i386: Emulate MMX movntq with SSE2 movntidi
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2019 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105 static bool is_redundant_typedef (const_tree);
106
107 #ifndef XCOFF_DEBUGGING_INFO
108 #define XCOFF_DEBUGGING_INFO 0
109 #endif
110
111 #ifndef HAVE_XCOFF_DWARF_EXTRAS
112 #define HAVE_XCOFF_DWARF_EXTRAS 0
113 #endif
114
115 #ifdef VMS_DEBUGGING_INFO
116 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
117
118 /* Define this macro to be a nonzero value if the directory specifications
119 which are output in the debug info should end with a separator. */
120 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
121 /* Define this macro to evaluate to a nonzero value if GCC should refrain
122 from generating indirect strings in DWARF2 debug information, for instance
123 if your target is stuck with an old version of GDB that is unable to
124 process them properly or uses VMS Debug. */
125 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
126 #else
127 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
128 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
129 #endif
130
131 /* ??? Poison these here until it can be done generically. They've been
132 totally replaced in this file; make sure it stays that way. */
133 #undef DWARF2_UNWIND_INFO
134 #undef DWARF2_FRAME_INFO
135 #if (GCC_VERSION >= 3000)
136 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
137 #endif
138
139 /* The size of the target's pointer type. */
140 #ifndef PTR_SIZE
141 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
142 #endif
143
144 /* Array of RTXes referenced by the debugging information, which therefore
145 must be kept around forever. */
146 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
147
148 /* A pointer to the base of a list of incomplete types which might be
149 completed at some later time. incomplete_types_list needs to be a
150 vec<tree, va_gc> *because we want to tell the garbage collector about
151 it. */
152 static GTY(()) vec<tree, va_gc> *incomplete_types;
153
154 /* Pointers to various DWARF2 sections. */
155 static GTY(()) section *debug_info_section;
156 static GTY(()) section *debug_skeleton_info_section;
157 static GTY(()) section *debug_abbrev_section;
158 static GTY(()) section *debug_skeleton_abbrev_section;
159 static GTY(()) section *debug_aranges_section;
160 static GTY(()) section *debug_addr_section;
161 static GTY(()) section *debug_macinfo_section;
162 static const char *debug_macinfo_section_name;
163 static unsigned macinfo_label_base = 1;
164 static GTY(()) section *debug_line_section;
165 static GTY(()) section *debug_skeleton_line_section;
166 static GTY(()) section *debug_loc_section;
167 static GTY(()) section *debug_pubnames_section;
168 static GTY(()) section *debug_pubtypes_section;
169 static GTY(()) section *debug_str_section;
170 static GTY(()) section *debug_line_str_section;
171 static GTY(()) section *debug_str_dwo_section;
172 static GTY(()) section *debug_str_offsets_section;
173 static GTY(()) section *debug_ranges_section;
174 static GTY(()) section *debug_frame_section;
175
176 /* Maximum size (in bytes) of an artificially generated label. */
177 #define MAX_ARTIFICIAL_LABEL_BYTES 40
178
179 /* According to the (draft) DWARF 3 specification, the initial length
180 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
181 bytes are 0xffffffff, followed by the length stored in the next 8
182 bytes.
183
184 However, the SGI/MIPS ABI uses an initial length which is equal to
185 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
186
187 #ifndef DWARF_INITIAL_LENGTH_SIZE
188 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
189 #endif
190
191 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
192 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
193 #endif
194
195 /* Round SIZE up to the nearest BOUNDARY. */
196 #define DWARF_ROUND(SIZE,BOUNDARY) \
197 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
198
199 /* CIE identifier. */
200 #if HOST_BITS_PER_WIDE_INT >= 64
201 #define DWARF_CIE_ID \
202 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
203 #else
204 #define DWARF_CIE_ID DW_CIE_ID
205 #endif
206
207
208 /* A vector for a table that contains frame description
209 information for each routine. */
210 #define NOT_INDEXED (-1U)
211 #define NO_INDEX_ASSIGNED (-2U)
212
213 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
214
215 struct GTY((for_user)) indirect_string_node {
216 const char *str;
217 unsigned int refcount;
218 enum dwarf_form form;
219 char *label;
220 unsigned int index;
221 };
222
223 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
224 {
225 typedef const char *compare_type;
226
227 static hashval_t hash (indirect_string_node *);
228 static bool equal (indirect_string_node *, const char *);
229 };
230
231 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
232
233 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
234
235 /* With split_debug_info, both the comp_dir and dwo_name go in the
236 main object file, rather than the dwo, similar to the force_direct
237 parameter elsewhere but with additional complications:
238
239 1) The string is needed in both the main object file and the dwo.
240 That is, the comp_dir and dwo_name will appear in both places.
241
242 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
243 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
244
245 3) GCC chooses the form to use late, depending on the size and
246 reference count.
247
248 Rather than forcing the all debug string handling functions and
249 callers to deal with these complications, simply use a separate,
250 special-cased string table for any attribute that should go in the
251 main object file. This limits the complexity to just the places
252 that need it. */
253
254 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
255
256 static GTY(()) int dw2_string_counter;
257
258 /* True if the compilation unit places functions in more than one section. */
259 static GTY(()) bool have_multiple_function_sections = false;
260
261 /* Whether the default text and cold text sections have been used at all. */
262 static GTY(()) bool text_section_used = false;
263 static GTY(()) bool cold_text_section_used = false;
264
265 /* The default cold text section. */
266 static GTY(()) section *cold_text_section;
267
268 /* The DIE for C++14 'auto' in a function return type. */
269 static GTY(()) dw_die_ref auto_die;
270
271 /* The DIE for C++14 'decltype(auto)' in a function return type. */
272 static GTY(()) dw_die_ref decltype_auto_die;
273
274 /* Forward declarations for functions defined in this file. */
275
276 static void output_call_frame_info (int);
277 static void dwarf2out_note_section_used (void);
278
279 /* Personality decl of current unit. Used only when assembler does not support
280 personality CFI. */
281 static GTY(()) rtx current_unit_personality;
282
283 /* Whether an eh_frame section is required. */
284 static GTY(()) bool do_eh_frame = false;
285
286 /* .debug_rnglists next index. */
287 static unsigned int rnglist_idx;
288
289 /* Data and reference forms for relocatable data. */
290 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
291 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
292
293 #ifndef DEBUG_FRAME_SECTION
294 #define DEBUG_FRAME_SECTION ".debug_frame"
295 #endif
296
297 #ifndef FUNC_BEGIN_LABEL
298 #define FUNC_BEGIN_LABEL "LFB"
299 #endif
300
301 #ifndef FUNC_SECOND_SECT_LABEL
302 #define FUNC_SECOND_SECT_LABEL "LFSB"
303 #endif
304
305 #ifndef FUNC_END_LABEL
306 #define FUNC_END_LABEL "LFE"
307 #endif
308
309 #ifndef PROLOGUE_END_LABEL
310 #define PROLOGUE_END_LABEL "LPE"
311 #endif
312
313 #ifndef EPILOGUE_BEGIN_LABEL
314 #define EPILOGUE_BEGIN_LABEL "LEB"
315 #endif
316
317 #ifndef FRAME_BEGIN_LABEL
318 #define FRAME_BEGIN_LABEL "Lframe"
319 #endif
320 #define CIE_AFTER_SIZE_LABEL "LSCIE"
321 #define CIE_END_LABEL "LECIE"
322 #define FDE_LABEL "LSFDE"
323 #define FDE_AFTER_SIZE_LABEL "LASFDE"
324 #define FDE_END_LABEL "LEFDE"
325 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
326 #define LINE_NUMBER_END_LABEL "LELT"
327 #define LN_PROLOG_AS_LABEL "LASLTP"
328 #define LN_PROLOG_END_LABEL "LELTP"
329 #define DIE_LABEL_PREFIX "DW"
330 \f
331 /* Match the base name of a file to the base name of a compilation unit. */
332
333 static int
334 matches_main_base (const char *path)
335 {
336 /* Cache the last query. */
337 static const char *last_path = NULL;
338 static int last_match = 0;
339 if (path != last_path)
340 {
341 const char *base;
342 int length = base_of_path (path, &base);
343 last_path = path;
344 last_match = (length == main_input_baselength
345 && memcmp (base, main_input_basename, length) == 0);
346 }
347 return last_match;
348 }
349
350 #ifdef DEBUG_DEBUG_STRUCT
351
352 static int
353 dump_struct_debug (tree type, enum debug_info_usage usage,
354 enum debug_struct_file criterion, int generic,
355 int matches, int result)
356 {
357 /* Find the type name. */
358 tree type_decl = TYPE_STUB_DECL (type);
359 tree t = type_decl;
360 const char *name = 0;
361 if (TREE_CODE (t) == TYPE_DECL)
362 t = DECL_NAME (t);
363 if (t)
364 name = IDENTIFIER_POINTER (t);
365
366 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
367 criterion,
368 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
369 matches ? "bas" : "hdr",
370 generic ? "gen" : "ord",
371 usage == DINFO_USAGE_DFN ? ";" :
372 usage == DINFO_USAGE_DIR_USE ? "." : "*",
373 result,
374 (void*) type_decl, name);
375 return result;
376 }
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 dump_struct_debug (type, usage, criterion, generic, matches, result)
379
380 #else
381
382 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
383 (result)
384
385 #endif
386
387 /* Get the number of HOST_WIDE_INTs needed to represent the precision
388 of the number. Some constants have a large uniform precision, so
389 we get the precision needed for the actual value of the number. */
390
391 static unsigned int
392 get_full_len (const wide_int &op)
393 {
394 int prec = wi::min_precision (op, UNSIGNED);
395 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
396 / HOST_BITS_PER_WIDE_INT);
397 }
398
399 static bool
400 should_emit_struct_debug (tree type, enum debug_info_usage usage)
401 {
402 enum debug_struct_file criterion;
403 tree type_decl;
404 bool generic = lang_hooks.types.generic_p (type);
405
406 if (generic)
407 criterion = debug_struct_generic[usage];
408 else
409 criterion = debug_struct_ordinary[usage];
410
411 if (criterion == DINFO_STRUCT_FILE_NONE)
412 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
413 if (criterion == DINFO_STRUCT_FILE_ANY)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
415
416 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
417
418 if (type_decl != NULL)
419 {
420 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
421 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
422
423 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
424 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
425 }
426
427 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
428 }
429 \f
430 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
431 switch to the data section instead, and write out a synthetic start label
432 for collect2 the first time around. */
433
434 static void
435 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
436 {
437 if (eh_frame_section == 0)
438 {
439 int flags;
440
441 if (EH_TABLES_CAN_BE_READ_ONLY)
442 {
443 int fde_encoding;
444 int per_encoding;
445 int lsda_encoding;
446
447 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
448 /*global=*/0);
449 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
450 /*global=*/1);
451 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
452 /*global=*/0);
453 flags = ((! flag_pic
454 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
455 && (fde_encoding & 0x70) != DW_EH_PE_aligned
456 && (per_encoding & 0x70) != DW_EH_PE_absptr
457 && (per_encoding & 0x70) != DW_EH_PE_aligned
458 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
459 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
460 ? 0 : SECTION_WRITE);
461 }
462 else
463 flags = SECTION_WRITE;
464
465 #ifdef EH_FRAME_SECTION_NAME
466 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
467 #else
468 eh_frame_section = ((flags == SECTION_WRITE)
469 ? data_section : readonly_data_section);
470 #endif /* EH_FRAME_SECTION_NAME */
471 }
472
473 switch_to_section (eh_frame_section);
474
475 #ifdef EH_FRAME_THROUGH_COLLECT2
476 /* We have no special eh_frame section. Emit special labels to guide
477 collect2. */
478 if (!back)
479 {
480 tree label = get_file_function_name ("F");
481 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
482 targetm.asm_out.globalize_label (asm_out_file,
483 IDENTIFIER_POINTER (label));
484 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
485 }
486 #endif
487 }
488
489 /* Switch [BACK] to the eh or debug frame table section, depending on
490 FOR_EH. */
491
492 static void
493 switch_to_frame_table_section (int for_eh, bool back)
494 {
495 if (for_eh)
496 switch_to_eh_frame_section (back);
497 else
498 {
499 if (!debug_frame_section)
500 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
501 SECTION_DEBUG, NULL);
502 switch_to_section (debug_frame_section);
503 }
504 }
505
506 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
507
508 enum dw_cfi_oprnd_type
509 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
510 {
511 switch (cfi)
512 {
513 case DW_CFA_nop:
514 case DW_CFA_GNU_window_save:
515 case DW_CFA_remember_state:
516 case DW_CFA_restore_state:
517 return dw_cfi_oprnd_unused;
518
519 case DW_CFA_set_loc:
520 case DW_CFA_advance_loc1:
521 case DW_CFA_advance_loc2:
522 case DW_CFA_advance_loc4:
523 case DW_CFA_MIPS_advance_loc8:
524 return dw_cfi_oprnd_addr;
525
526 case DW_CFA_offset:
527 case DW_CFA_offset_extended:
528 case DW_CFA_def_cfa:
529 case DW_CFA_offset_extended_sf:
530 case DW_CFA_def_cfa_sf:
531 case DW_CFA_restore:
532 case DW_CFA_restore_extended:
533 case DW_CFA_undefined:
534 case DW_CFA_same_value:
535 case DW_CFA_def_cfa_register:
536 case DW_CFA_register:
537 case DW_CFA_expression:
538 case DW_CFA_val_expression:
539 return dw_cfi_oprnd_reg_num;
540
541 case DW_CFA_def_cfa_offset:
542 case DW_CFA_GNU_args_size:
543 case DW_CFA_def_cfa_offset_sf:
544 return dw_cfi_oprnd_offset;
545
546 case DW_CFA_def_cfa_expression:
547 return dw_cfi_oprnd_loc;
548
549 default:
550 gcc_unreachable ();
551 }
552 }
553
554 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
555
556 enum dw_cfi_oprnd_type
557 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
558 {
559 switch (cfi)
560 {
561 case DW_CFA_def_cfa:
562 case DW_CFA_def_cfa_sf:
563 case DW_CFA_offset:
564 case DW_CFA_offset_extended_sf:
565 case DW_CFA_offset_extended:
566 return dw_cfi_oprnd_offset;
567
568 case DW_CFA_register:
569 return dw_cfi_oprnd_reg_num;
570
571 case DW_CFA_expression:
572 case DW_CFA_val_expression:
573 return dw_cfi_oprnd_loc;
574
575 case DW_CFA_def_cfa_expression:
576 return dw_cfi_oprnd_cfa_loc;
577
578 default:
579 return dw_cfi_oprnd_unused;
580 }
581 }
582
583 /* Output one FDE. */
584
585 static void
586 output_fde (dw_fde_ref fde, bool for_eh, bool second,
587 char *section_start_label, int fde_encoding, char *augmentation,
588 bool any_lsda_needed, int lsda_encoding)
589 {
590 const char *begin, *end;
591 static unsigned int j;
592 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
593
594 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
595 /* empty */ 0);
596 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
597 for_eh + j);
598 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
599 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
600 if (!XCOFF_DEBUGGING_INFO || for_eh)
601 {
602 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
603 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
604 " indicating 64-bit DWARF extension");
605 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
606 "FDE Length");
607 }
608 ASM_OUTPUT_LABEL (asm_out_file, l1);
609
610 if (for_eh)
611 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
612 else
613 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
614 debug_frame_section, "FDE CIE offset");
615
616 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
617 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
618
619 if (for_eh)
620 {
621 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
622 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
623 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
624 "FDE initial location");
625 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
626 end, begin, "FDE address range");
627 }
628 else
629 {
630 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
631 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
632 }
633
634 if (augmentation[0])
635 {
636 if (any_lsda_needed)
637 {
638 int size = size_of_encoded_value (lsda_encoding);
639
640 if (lsda_encoding == DW_EH_PE_aligned)
641 {
642 int offset = ( 4 /* Length */
643 + 4 /* CIE offset */
644 + 2 * size_of_encoded_value (fde_encoding)
645 + 1 /* Augmentation size */ );
646 int pad = -offset & (PTR_SIZE - 1);
647
648 size += pad;
649 gcc_assert (size_of_uleb128 (size) == 1);
650 }
651
652 dw2_asm_output_data_uleb128 (size, "Augmentation size");
653
654 if (fde->uses_eh_lsda)
655 {
656 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
657 fde->funcdef_number);
658 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
659 gen_rtx_SYMBOL_REF (Pmode, l1),
660 false,
661 "Language Specific Data Area");
662 }
663 else
664 {
665 if (lsda_encoding == DW_EH_PE_aligned)
666 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
667 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
668 "Language Specific Data Area (none)");
669 }
670 }
671 else
672 dw2_asm_output_data_uleb128 (0, "Augmentation size");
673 }
674
675 /* Loop through the Call Frame Instructions associated with this FDE. */
676 fde->dw_fde_current_label = begin;
677 {
678 size_t from, until, i;
679
680 from = 0;
681 until = vec_safe_length (fde->dw_fde_cfi);
682
683 if (fde->dw_fde_second_begin == NULL)
684 ;
685 else if (!second)
686 until = fde->dw_fde_switch_cfi_index;
687 else
688 from = fde->dw_fde_switch_cfi_index;
689
690 for (i = from; i < until; i++)
691 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
692 }
693
694 /* If we are to emit a ref/link from function bodies to their frame tables,
695 do it now. This is typically performed to make sure that tables
696 associated with functions are dragged with them and not discarded in
697 garbage collecting links. We need to do this on a per function basis to
698 cope with -ffunction-sections. */
699
700 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
701 /* Switch to the function section, emit the ref to the tables, and
702 switch *back* into the table section. */
703 switch_to_section (function_section (fde->decl));
704 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
705 switch_to_frame_table_section (for_eh, true);
706 #endif
707
708 /* Pad the FDE out to an address sized boundary. */
709 ASM_OUTPUT_ALIGN (asm_out_file,
710 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
711 ASM_OUTPUT_LABEL (asm_out_file, l2);
712
713 j += 2;
714 }
715
716 /* Return true if frame description entry FDE is needed for EH. */
717
718 static bool
719 fde_needed_for_eh_p (dw_fde_ref fde)
720 {
721 if (flag_asynchronous_unwind_tables)
722 return true;
723
724 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
725 return true;
726
727 if (fde->uses_eh_lsda)
728 return true;
729
730 /* If exceptions are enabled, we have collected nothrow info. */
731 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
732 return false;
733
734 return true;
735 }
736
737 /* Output the call frame information used to record information
738 that relates to calculating the frame pointer, and records the
739 location of saved registers. */
740
741 static void
742 output_call_frame_info (int for_eh)
743 {
744 unsigned int i;
745 dw_fde_ref fde;
746 dw_cfi_ref cfi;
747 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
748 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
749 bool any_lsda_needed = false;
750 char augmentation[6];
751 int augmentation_size;
752 int fde_encoding = DW_EH_PE_absptr;
753 int per_encoding = DW_EH_PE_absptr;
754 int lsda_encoding = DW_EH_PE_absptr;
755 int return_reg;
756 rtx personality = NULL;
757 int dw_cie_version;
758
759 /* Don't emit a CIE if there won't be any FDEs. */
760 if (!fde_vec)
761 return;
762
763 /* Nothing to do if the assembler's doing it all. */
764 if (dwarf2out_do_cfi_asm ())
765 return;
766
767 /* If we don't have any functions we'll want to unwind out of, don't emit
768 any EH unwind information. If we make FDEs linkonce, we may have to
769 emit an empty label for an FDE that wouldn't otherwise be emitted. We
770 want to avoid having an FDE kept around when the function it refers to
771 is discarded. Example where this matters: a primary function template
772 in C++ requires EH information, an explicit specialization doesn't. */
773 if (for_eh)
774 {
775 bool any_eh_needed = false;
776
777 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
778 {
779 if (fde->uses_eh_lsda)
780 any_eh_needed = any_lsda_needed = true;
781 else if (fde_needed_for_eh_p (fde))
782 any_eh_needed = true;
783 else if (TARGET_USES_WEAK_UNWIND_INFO)
784 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
785 }
786
787 if (!any_eh_needed)
788 return;
789 }
790
791 /* We're going to be generating comments, so turn on app. */
792 if (flag_debug_asm)
793 app_enable ();
794
795 /* Switch to the proper frame section, first time. */
796 switch_to_frame_table_section (for_eh, false);
797
798 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
799 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
800
801 /* Output the CIE. */
802 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
803 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
804 if (!XCOFF_DEBUGGING_INFO || for_eh)
805 {
806 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
807 dw2_asm_output_data (4, 0xffffffff,
808 "Initial length escape value indicating 64-bit DWARF extension");
809 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
810 "Length of Common Information Entry");
811 }
812 ASM_OUTPUT_LABEL (asm_out_file, l1);
813
814 /* Now that the CIE pointer is PC-relative for EH,
815 use 0 to identify the CIE. */
816 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
817 (for_eh ? 0 : DWARF_CIE_ID),
818 "CIE Identifier Tag");
819
820 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
821 use CIE version 1, unless that would produce incorrect results
822 due to overflowing the return register column. */
823 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
824 dw_cie_version = 1;
825 if (return_reg >= 256 || dwarf_version > 2)
826 dw_cie_version = 3;
827 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
828
829 augmentation[0] = 0;
830 augmentation_size = 0;
831
832 personality = current_unit_personality;
833 if (for_eh)
834 {
835 char *p;
836
837 /* Augmentation:
838 z Indicates that a uleb128 is present to size the
839 augmentation section.
840 L Indicates the encoding (and thus presence) of
841 an LSDA pointer in the FDE augmentation.
842 R Indicates a non-default pointer encoding for
843 FDE code pointers.
844 P Indicates the presence of an encoding + language
845 personality routine in the CIE augmentation. */
846
847 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
848 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
849 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
850
851 p = augmentation + 1;
852 if (personality)
853 {
854 *p++ = 'P';
855 augmentation_size += 1 + size_of_encoded_value (per_encoding);
856 assemble_external_libcall (personality);
857 }
858 if (any_lsda_needed)
859 {
860 *p++ = 'L';
861 augmentation_size += 1;
862 }
863 if (fde_encoding != DW_EH_PE_absptr)
864 {
865 *p++ = 'R';
866 augmentation_size += 1;
867 }
868 if (p > augmentation + 1)
869 {
870 augmentation[0] = 'z';
871 *p = '\0';
872 }
873
874 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
875 if (personality && per_encoding == DW_EH_PE_aligned)
876 {
877 int offset = ( 4 /* Length */
878 + 4 /* CIE Id */
879 + 1 /* CIE version */
880 + strlen (augmentation) + 1 /* Augmentation */
881 + size_of_uleb128 (1) /* Code alignment */
882 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
883 + 1 /* RA column */
884 + 1 /* Augmentation size */
885 + 1 /* Personality encoding */ );
886 int pad = -offset & (PTR_SIZE - 1);
887
888 augmentation_size += pad;
889
890 /* Augmentations should be small, so there's scarce need to
891 iterate for a solution. Die if we exceed one uleb128 byte. */
892 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
893 }
894 }
895
896 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
897 if (dw_cie_version >= 4)
898 {
899 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
900 dw2_asm_output_data (1, 0, "CIE Segment Size");
901 }
902 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
903 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
904 "CIE Data Alignment Factor");
905
906 if (dw_cie_version == 1)
907 dw2_asm_output_data (1, return_reg, "CIE RA Column");
908 else
909 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
910
911 if (augmentation[0])
912 {
913 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
914 if (personality)
915 {
916 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
917 eh_data_format_name (per_encoding));
918 dw2_asm_output_encoded_addr_rtx (per_encoding,
919 personality,
920 true, NULL);
921 }
922
923 if (any_lsda_needed)
924 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
925 eh_data_format_name (lsda_encoding));
926
927 if (fde_encoding != DW_EH_PE_absptr)
928 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
929 eh_data_format_name (fde_encoding));
930 }
931
932 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
933 output_cfi (cfi, NULL, for_eh);
934
935 /* Pad the CIE out to an address sized boundary. */
936 ASM_OUTPUT_ALIGN (asm_out_file,
937 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
938 ASM_OUTPUT_LABEL (asm_out_file, l2);
939
940 /* Loop through all of the FDE's. */
941 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
942 {
943 unsigned int k;
944
945 /* Don't emit EH unwind info for leaf functions that don't need it. */
946 if (for_eh && !fde_needed_for_eh_p (fde))
947 continue;
948
949 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
950 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
951 augmentation, any_lsda_needed, lsda_encoding);
952 }
953
954 if (for_eh && targetm.terminate_dw2_eh_frame_info)
955 dw2_asm_output_data (4, 0, "End of Table");
956
957 /* Turn off app to make assembly quicker. */
958 if (flag_debug_asm)
959 app_disable ();
960 }
961
962 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
963
964 static void
965 dwarf2out_do_cfi_startproc (bool second)
966 {
967 int enc;
968 rtx ref;
969
970 fprintf (asm_out_file, "\t.cfi_startproc\n");
971
972 targetm.asm_out.post_cfi_startproc (asm_out_file, current_function_decl);
973
974 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
975 eh unwinders. */
976 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
977 return;
978
979 rtx personality = get_personality_function (current_function_decl);
980
981 if (personality)
982 {
983 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
984 ref = personality;
985
986 /* ??? The GAS support isn't entirely consistent. We have to
987 handle indirect support ourselves, but PC-relative is done
988 in the assembler. Further, the assembler can't handle any
989 of the weirder relocation types. */
990 if (enc & DW_EH_PE_indirect)
991 ref = dw2_force_const_mem (ref, true);
992
993 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
994 output_addr_const (asm_out_file, ref);
995 fputc ('\n', asm_out_file);
996 }
997
998 if (crtl->uses_eh_lsda)
999 {
1000 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1001
1002 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1003 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1004 current_function_funcdef_no);
1005 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1006 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1007
1008 if (enc & DW_EH_PE_indirect)
1009 ref = dw2_force_const_mem (ref, true);
1010
1011 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1012 output_addr_const (asm_out_file, ref);
1013 fputc ('\n', asm_out_file);
1014 }
1015 }
1016
1017 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1018 this allocation may be done before pass_final. */
1019
1020 dw_fde_ref
1021 dwarf2out_alloc_current_fde (void)
1022 {
1023 dw_fde_ref fde;
1024
1025 fde = ggc_cleared_alloc<dw_fde_node> ();
1026 fde->decl = current_function_decl;
1027 fde->funcdef_number = current_function_funcdef_no;
1028 fde->fde_index = vec_safe_length (fde_vec);
1029 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1030 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1031 fde->nothrow = crtl->nothrow;
1032 fde->drap_reg = INVALID_REGNUM;
1033 fde->vdrap_reg = INVALID_REGNUM;
1034
1035 /* Record the FDE associated with this function. */
1036 cfun->fde = fde;
1037 vec_safe_push (fde_vec, fde);
1038
1039 return fde;
1040 }
1041
1042 /* Output a marker (i.e. a label) for the beginning of a function, before
1043 the prologue. */
1044
1045 void
1046 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1047 unsigned int column ATTRIBUTE_UNUSED,
1048 const char *file ATTRIBUTE_UNUSED)
1049 {
1050 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1051 char * dup_label;
1052 dw_fde_ref fde;
1053 section *fnsec;
1054 bool do_frame;
1055
1056 current_function_func_begin_label = NULL;
1057
1058 do_frame = dwarf2out_do_frame ();
1059
1060 /* ??? current_function_func_begin_label is also used by except.c for
1061 call-site information. We must emit this label if it might be used. */
1062 if (!do_frame
1063 && (!flag_exceptions
1064 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1065 return;
1066
1067 fnsec = function_section (current_function_decl);
1068 switch_to_section (fnsec);
1069 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1072 current_function_funcdef_no);
1073 dup_label = xstrdup (label);
1074 current_function_func_begin_label = dup_label;
1075
1076 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1077 if (!do_frame)
1078 return;
1079
1080 /* Unlike the debug version, the EH version of frame unwind info is a per-
1081 function setting so we need to record whether we need it for the unit. */
1082 do_eh_frame |= dwarf2out_do_eh_frame ();
1083
1084 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1085 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1086 would include pass_dwarf2_frame. If we've not created the FDE yet,
1087 do so now. */
1088 fde = cfun->fde;
1089 if (fde == NULL)
1090 fde = dwarf2out_alloc_current_fde ();
1091
1092 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1093 fde->dw_fde_begin = dup_label;
1094 fde->dw_fde_current_label = dup_label;
1095 fde->in_std_section = (fnsec == text_section
1096 || (cold_text_section && fnsec == cold_text_section));
1097
1098 /* We only want to output line number information for the genuine dwarf2
1099 prologue case, not the eh frame case. */
1100 #ifdef DWARF2_DEBUGGING_INFO
1101 if (file)
1102 dwarf2out_source_line (line, column, file, 0, true);
1103 #endif
1104
1105 if (dwarf2out_do_cfi_asm ())
1106 dwarf2out_do_cfi_startproc (false);
1107 else
1108 {
1109 rtx personality = get_personality_function (current_function_decl);
1110 if (!current_unit_personality)
1111 current_unit_personality = personality;
1112
1113 /* We cannot keep a current personality per function as without CFI
1114 asm, at the point where we emit the CFI data, there is no current
1115 function anymore. */
1116 if (personality && current_unit_personality != personality)
1117 sorry ("multiple EH personalities are supported only with assemblers "
1118 "supporting .cfi_personality directive");
1119 }
1120 }
1121
1122 /* Output a marker (i.e. a label) for the end of the generated code
1123 for a function prologue. This gets called *after* the prologue code has
1124 been generated. */
1125
1126 void
1127 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1128 const char *file ATTRIBUTE_UNUSED)
1129 {
1130 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1131
1132 /* Output a label to mark the endpoint of the code generated for this
1133 function. */
1134 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1137 current_function_funcdef_no);
1138 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1139 }
1140
1141 /* Output a marker (i.e. a label) for the beginning of the generated code
1142 for a function epilogue. This gets called *before* the prologue code has
1143 been generated. */
1144
1145 void
1146 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1147 const char *file ATTRIBUTE_UNUSED)
1148 {
1149 dw_fde_ref fde = cfun->fde;
1150 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1151
1152 if (fde->dw_fde_vms_begin_epilogue)
1153 return;
1154
1155 /* Output a label to mark the endpoint of the code generated for this
1156 function. */
1157 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1160 current_function_funcdef_no);
1161 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1162 }
1163
1164 /* Output a marker (i.e. a label) for the absolute end of the generated code
1165 for a function definition. This gets called *after* the epilogue code has
1166 been generated. */
1167
1168 void
1169 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1170 const char *file ATTRIBUTE_UNUSED)
1171 {
1172 dw_fde_ref fde;
1173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1174
1175 last_var_location_insn = NULL;
1176 cached_next_real_insn = NULL;
1177
1178 if (dwarf2out_do_cfi_asm ())
1179 fprintf (asm_out_file, "\t.cfi_endproc\n");
1180
1181 /* Output a label to mark the endpoint of the code generated for this
1182 function. */
1183 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1184 current_function_funcdef_no);
1185 ASM_OUTPUT_LABEL (asm_out_file, label);
1186 fde = cfun->fde;
1187 gcc_assert (fde != NULL);
1188 if (fde->dw_fde_second_begin == NULL)
1189 fde->dw_fde_end = xstrdup (label);
1190 }
1191
1192 void
1193 dwarf2out_frame_finish (void)
1194 {
1195 /* Output call frame information. */
1196 if (targetm.debug_unwind_info () == UI_DWARF2)
1197 output_call_frame_info (0);
1198
1199 /* Output another copy for the unwinder. */
1200 if (do_eh_frame)
1201 output_call_frame_info (1);
1202 }
1203
1204 /* Note that the current function section is being used for code. */
1205
1206 static void
1207 dwarf2out_note_section_used (void)
1208 {
1209 section *sec = current_function_section ();
1210 if (sec == text_section)
1211 text_section_used = true;
1212 else if (sec == cold_text_section)
1213 cold_text_section_used = true;
1214 }
1215
1216 static void var_location_switch_text_section (void);
1217 static void set_cur_line_info_table (section *);
1218
1219 void
1220 dwarf2out_switch_text_section (void)
1221 {
1222 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1223 section *sect;
1224 dw_fde_ref fde = cfun->fde;
1225
1226 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1227
1228 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL,
1229 current_function_funcdef_no);
1230
1231 fde->dw_fde_second_begin = ggc_strdup (label);
1232 if (!in_cold_section_p)
1233 {
1234 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1235 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1236 }
1237 else
1238 {
1239 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1240 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1241 }
1242 have_multiple_function_sections = true;
1243
1244 /* There is no need to mark used sections when not debugging. */
1245 if (cold_text_section != NULL)
1246 dwarf2out_note_section_used ();
1247
1248 if (dwarf2out_do_cfi_asm ())
1249 fprintf (asm_out_file, "\t.cfi_endproc\n");
1250
1251 /* Now do the real section switch. */
1252 sect = current_function_section ();
1253 switch_to_section (sect);
1254
1255 fde->second_in_std_section
1256 = (sect == text_section
1257 || (cold_text_section && sect == cold_text_section));
1258
1259 if (dwarf2out_do_cfi_asm ())
1260 dwarf2out_do_cfi_startproc (true);
1261
1262 var_location_switch_text_section ();
1263
1264 if (cold_text_section != NULL)
1265 set_cur_line_info_table (sect);
1266 }
1267 \f
1268 /* And now, the subset of the debugging information support code necessary
1269 for emitting location expressions. */
1270
1271 /* Data about a single source file. */
1272 struct GTY((for_user)) dwarf_file_data {
1273 const char * filename;
1274 int emitted_number;
1275 };
1276
1277 /* Describe an entry into the .debug_addr section. */
1278
1279 enum ate_kind {
1280 ate_kind_rtx,
1281 ate_kind_rtx_dtprel,
1282 ate_kind_label
1283 };
1284
1285 struct GTY((for_user)) addr_table_entry {
1286 enum ate_kind kind;
1287 unsigned int refcount;
1288 unsigned int index;
1289 union addr_table_entry_struct_union
1290 {
1291 rtx GTY ((tag ("0"))) rtl;
1292 char * GTY ((tag ("1"))) label;
1293 }
1294 GTY ((desc ("%1.kind"))) addr;
1295 };
1296
1297 typedef unsigned int var_loc_view;
1298
1299 /* Location lists are ranges + location descriptions for that range,
1300 so you can track variables that are in different places over
1301 their entire life. */
1302 typedef struct GTY(()) dw_loc_list_struct {
1303 dw_loc_list_ref dw_loc_next;
1304 const char *begin; /* Label and addr_entry for start of range */
1305 addr_table_entry *begin_entry;
1306 const char *end; /* Label for end of range */
1307 char *ll_symbol; /* Label for beginning of location list.
1308 Only on head of list. */
1309 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1310 const char *section; /* Section this loclist is relative to */
1311 dw_loc_descr_ref expr;
1312 var_loc_view vbegin, vend;
1313 hashval_t hash;
1314 /* True if all addresses in this and subsequent lists are known to be
1315 resolved. */
1316 bool resolved_addr;
1317 /* True if this list has been replaced by dw_loc_next. */
1318 bool replaced;
1319 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1320 section. */
1321 unsigned char emitted : 1;
1322 /* True if hash field is index rather than hash value. */
1323 unsigned char num_assigned : 1;
1324 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1325 unsigned char offset_emitted : 1;
1326 /* True if note_variable_value_in_expr has been called on it. */
1327 unsigned char noted_variable_value : 1;
1328 /* True if the range should be emitted even if begin and end
1329 are the same. */
1330 bool force;
1331 } dw_loc_list_node;
1332
1333 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1334 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1335
1336 /* Convert a DWARF stack opcode into its string name. */
1337
1338 static const char *
1339 dwarf_stack_op_name (unsigned int op)
1340 {
1341 const char *name = get_DW_OP_name (op);
1342
1343 if (name != NULL)
1344 return name;
1345
1346 return "OP_<unknown>";
1347 }
1348
1349 /* Return TRUE iff we're to output location view lists as a separate
1350 attribute next to the location lists, as an extension compatible
1351 with DWARF 2 and above. */
1352
1353 static inline bool
1354 dwarf2out_locviews_in_attribute ()
1355 {
1356 return debug_variable_location_views == 1;
1357 }
1358
1359 /* Return TRUE iff we're to output location view lists as part of the
1360 location lists, as proposed for standardization after DWARF 5. */
1361
1362 static inline bool
1363 dwarf2out_locviews_in_loclist ()
1364 {
1365 #ifndef DW_LLE_view_pair
1366 return false;
1367 #else
1368 return debug_variable_location_views == -1;
1369 #endif
1370 }
1371
1372 /* Return a pointer to a newly allocated location description. Location
1373 descriptions are simple expression terms that can be strung
1374 together to form more complicated location (address) descriptions. */
1375
1376 static inline dw_loc_descr_ref
1377 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1378 unsigned HOST_WIDE_INT oprnd2)
1379 {
1380 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1381
1382 descr->dw_loc_opc = op;
1383 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1384 descr->dw_loc_oprnd1.val_entry = NULL;
1385 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1386 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1387 descr->dw_loc_oprnd2.val_entry = NULL;
1388 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1389
1390 return descr;
1391 }
1392
1393 /* Add a location description term to a location description expression. */
1394
1395 static inline void
1396 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1397 {
1398 dw_loc_descr_ref *d;
1399
1400 /* Find the end of the chain. */
1401 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1402 ;
1403
1404 *d = descr;
1405 }
1406
1407 /* Compare two location operands for exact equality. */
1408
1409 static bool
1410 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1411 {
1412 if (a->val_class != b->val_class)
1413 return false;
1414 switch (a->val_class)
1415 {
1416 case dw_val_class_none:
1417 return true;
1418 case dw_val_class_addr:
1419 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1420
1421 case dw_val_class_offset:
1422 case dw_val_class_unsigned_const:
1423 case dw_val_class_const:
1424 case dw_val_class_unsigned_const_implicit:
1425 case dw_val_class_const_implicit:
1426 case dw_val_class_range_list:
1427 /* These are all HOST_WIDE_INT, signed or unsigned. */
1428 return a->v.val_unsigned == b->v.val_unsigned;
1429
1430 case dw_val_class_loc:
1431 return a->v.val_loc == b->v.val_loc;
1432 case dw_val_class_loc_list:
1433 return a->v.val_loc_list == b->v.val_loc_list;
1434 case dw_val_class_view_list:
1435 return a->v.val_view_list == b->v.val_view_list;
1436 case dw_val_class_die_ref:
1437 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1438 case dw_val_class_fde_ref:
1439 return a->v.val_fde_index == b->v.val_fde_index;
1440 case dw_val_class_symview:
1441 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1442 case dw_val_class_lbl_id:
1443 case dw_val_class_lineptr:
1444 case dw_val_class_macptr:
1445 case dw_val_class_loclistsptr:
1446 case dw_val_class_high_pc:
1447 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1448 case dw_val_class_str:
1449 return a->v.val_str == b->v.val_str;
1450 case dw_val_class_flag:
1451 return a->v.val_flag == b->v.val_flag;
1452 case dw_val_class_file:
1453 case dw_val_class_file_implicit:
1454 return a->v.val_file == b->v.val_file;
1455 case dw_val_class_decl_ref:
1456 return a->v.val_decl_ref == b->v.val_decl_ref;
1457
1458 case dw_val_class_const_double:
1459 return (a->v.val_double.high == b->v.val_double.high
1460 && a->v.val_double.low == b->v.val_double.low);
1461
1462 case dw_val_class_wide_int:
1463 return *a->v.val_wide == *b->v.val_wide;
1464
1465 case dw_val_class_vec:
1466 {
1467 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1468 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1469
1470 return (a_len == b_len
1471 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1472 }
1473
1474 case dw_val_class_data8:
1475 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1476
1477 case dw_val_class_vms_delta:
1478 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1479 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1480
1481 case dw_val_class_discr_value:
1482 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1483 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1484 case dw_val_class_discr_list:
1485 /* It makes no sense comparing two discriminant value lists. */
1486 return false;
1487 }
1488 gcc_unreachable ();
1489 }
1490
1491 /* Compare two location atoms for exact equality. */
1492
1493 static bool
1494 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1495 {
1496 if (a->dw_loc_opc != b->dw_loc_opc)
1497 return false;
1498
1499 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1500 address size, but since we always allocate cleared storage it
1501 should be zero for other types of locations. */
1502 if (a->dtprel != b->dtprel)
1503 return false;
1504
1505 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1506 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1507 }
1508
1509 /* Compare two complete location expressions for exact equality. */
1510
1511 bool
1512 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1513 {
1514 while (1)
1515 {
1516 if (a == b)
1517 return true;
1518 if (a == NULL || b == NULL)
1519 return false;
1520 if (!loc_descr_equal_p_1 (a, b))
1521 return false;
1522
1523 a = a->dw_loc_next;
1524 b = b->dw_loc_next;
1525 }
1526 }
1527
1528
1529 /* Add a constant POLY_OFFSET to a location expression. */
1530
1531 static void
1532 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1533 {
1534 dw_loc_descr_ref loc;
1535 HOST_WIDE_INT *p;
1536
1537 gcc_assert (*list_head != NULL);
1538
1539 if (known_eq (poly_offset, 0))
1540 return;
1541
1542 /* Find the end of the chain. */
1543 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1544 ;
1545
1546 HOST_WIDE_INT offset;
1547 if (!poly_offset.is_constant (&offset))
1548 {
1549 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1550 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1551 return;
1552 }
1553
1554 p = NULL;
1555 if (loc->dw_loc_opc == DW_OP_fbreg
1556 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1557 p = &loc->dw_loc_oprnd1.v.val_int;
1558 else if (loc->dw_loc_opc == DW_OP_bregx)
1559 p = &loc->dw_loc_oprnd2.v.val_int;
1560
1561 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1562 offset. Don't optimize if an signed integer overflow would happen. */
1563 if (p != NULL
1564 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1565 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1566 *p += offset;
1567
1568 else if (offset > 0)
1569 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1570
1571 else
1572 {
1573 loc->dw_loc_next
1574 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1575 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1576 }
1577 }
1578
1579 /* Return a pointer to a newly allocated location description for
1580 REG and OFFSET. */
1581
1582 static inline dw_loc_descr_ref
1583 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1584 {
1585 HOST_WIDE_INT const_offset;
1586 if (offset.is_constant (&const_offset))
1587 {
1588 if (reg <= 31)
1589 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1590 const_offset, 0);
1591 else
1592 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1593 }
1594 else
1595 {
1596 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1597 loc_descr_plus_const (&ret, offset);
1598 return ret;
1599 }
1600 }
1601
1602 /* Add a constant OFFSET to a location list. */
1603
1604 static void
1605 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1606 {
1607 dw_loc_list_ref d;
1608 for (d = list_head; d != NULL; d = d->dw_loc_next)
1609 loc_descr_plus_const (&d->expr, offset);
1610 }
1611
1612 #define DWARF_REF_SIZE \
1613 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1614
1615 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1616 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1617 DW_FORM_data16 with 128 bits. */
1618 #define DWARF_LARGEST_DATA_FORM_BITS \
1619 (dwarf_version >= 5 ? 128 : 64)
1620
1621 /* Utility inline function for construction of ops that were GNU extension
1622 before DWARF 5. */
1623 static inline enum dwarf_location_atom
1624 dwarf_OP (enum dwarf_location_atom op)
1625 {
1626 switch (op)
1627 {
1628 case DW_OP_implicit_pointer:
1629 if (dwarf_version < 5)
1630 return DW_OP_GNU_implicit_pointer;
1631 break;
1632
1633 case DW_OP_entry_value:
1634 if (dwarf_version < 5)
1635 return DW_OP_GNU_entry_value;
1636 break;
1637
1638 case DW_OP_const_type:
1639 if (dwarf_version < 5)
1640 return DW_OP_GNU_const_type;
1641 break;
1642
1643 case DW_OP_regval_type:
1644 if (dwarf_version < 5)
1645 return DW_OP_GNU_regval_type;
1646 break;
1647
1648 case DW_OP_deref_type:
1649 if (dwarf_version < 5)
1650 return DW_OP_GNU_deref_type;
1651 break;
1652
1653 case DW_OP_convert:
1654 if (dwarf_version < 5)
1655 return DW_OP_GNU_convert;
1656 break;
1657
1658 case DW_OP_reinterpret:
1659 if (dwarf_version < 5)
1660 return DW_OP_GNU_reinterpret;
1661 break;
1662
1663 case DW_OP_addrx:
1664 if (dwarf_version < 5)
1665 return DW_OP_GNU_addr_index;
1666 break;
1667
1668 case DW_OP_constx:
1669 if (dwarf_version < 5)
1670 return DW_OP_GNU_const_index;
1671 break;
1672
1673 default:
1674 break;
1675 }
1676 return op;
1677 }
1678
1679 /* Similarly for attributes. */
1680 static inline enum dwarf_attribute
1681 dwarf_AT (enum dwarf_attribute at)
1682 {
1683 switch (at)
1684 {
1685 case DW_AT_call_return_pc:
1686 if (dwarf_version < 5)
1687 return DW_AT_low_pc;
1688 break;
1689
1690 case DW_AT_call_tail_call:
1691 if (dwarf_version < 5)
1692 return DW_AT_GNU_tail_call;
1693 break;
1694
1695 case DW_AT_call_origin:
1696 if (dwarf_version < 5)
1697 return DW_AT_abstract_origin;
1698 break;
1699
1700 case DW_AT_call_target:
1701 if (dwarf_version < 5)
1702 return DW_AT_GNU_call_site_target;
1703 break;
1704
1705 case DW_AT_call_target_clobbered:
1706 if (dwarf_version < 5)
1707 return DW_AT_GNU_call_site_target_clobbered;
1708 break;
1709
1710 case DW_AT_call_parameter:
1711 if (dwarf_version < 5)
1712 return DW_AT_abstract_origin;
1713 break;
1714
1715 case DW_AT_call_value:
1716 if (dwarf_version < 5)
1717 return DW_AT_GNU_call_site_value;
1718 break;
1719
1720 case DW_AT_call_data_value:
1721 if (dwarf_version < 5)
1722 return DW_AT_GNU_call_site_data_value;
1723 break;
1724
1725 case DW_AT_call_all_calls:
1726 if (dwarf_version < 5)
1727 return DW_AT_GNU_all_call_sites;
1728 break;
1729
1730 case DW_AT_call_all_tail_calls:
1731 if (dwarf_version < 5)
1732 return DW_AT_GNU_all_tail_call_sites;
1733 break;
1734
1735 case DW_AT_dwo_name:
1736 if (dwarf_version < 5)
1737 return DW_AT_GNU_dwo_name;
1738 break;
1739
1740 case DW_AT_addr_base:
1741 if (dwarf_version < 5)
1742 return DW_AT_GNU_addr_base;
1743 break;
1744
1745 default:
1746 break;
1747 }
1748 return at;
1749 }
1750
1751 /* And similarly for tags. */
1752 static inline enum dwarf_tag
1753 dwarf_TAG (enum dwarf_tag tag)
1754 {
1755 switch (tag)
1756 {
1757 case DW_TAG_call_site:
1758 if (dwarf_version < 5)
1759 return DW_TAG_GNU_call_site;
1760 break;
1761
1762 case DW_TAG_call_site_parameter:
1763 if (dwarf_version < 5)
1764 return DW_TAG_GNU_call_site_parameter;
1765 break;
1766
1767 default:
1768 break;
1769 }
1770 return tag;
1771 }
1772
1773 /* And similarly for forms. */
1774 static inline enum dwarf_form
1775 dwarf_FORM (enum dwarf_form form)
1776 {
1777 switch (form)
1778 {
1779 case DW_FORM_addrx:
1780 if (dwarf_version < 5)
1781 return DW_FORM_GNU_addr_index;
1782 break;
1783
1784 case DW_FORM_strx:
1785 if (dwarf_version < 5)
1786 return DW_FORM_GNU_str_index;
1787 break;
1788
1789 default:
1790 break;
1791 }
1792 return form;
1793 }
1794
1795 static unsigned long int get_base_type_offset (dw_die_ref);
1796
1797 /* Return the size of a location descriptor. */
1798
1799 static unsigned long
1800 size_of_loc_descr (dw_loc_descr_ref loc)
1801 {
1802 unsigned long size = 1;
1803
1804 switch (loc->dw_loc_opc)
1805 {
1806 case DW_OP_addr:
1807 size += DWARF2_ADDR_SIZE;
1808 break;
1809 case DW_OP_GNU_addr_index:
1810 case DW_OP_addrx:
1811 case DW_OP_GNU_const_index:
1812 case DW_OP_constx:
1813 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1814 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1815 break;
1816 case DW_OP_const1u:
1817 case DW_OP_const1s:
1818 size += 1;
1819 break;
1820 case DW_OP_const2u:
1821 case DW_OP_const2s:
1822 size += 2;
1823 break;
1824 case DW_OP_const4u:
1825 case DW_OP_const4s:
1826 size += 4;
1827 break;
1828 case DW_OP_const8u:
1829 case DW_OP_const8s:
1830 size += 8;
1831 break;
1832 case DW_OP_constu:
1833 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1834 break;
1835 case DW_OP_consts:
1836 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1837 break;
1838 case DW_OP_pick:
1839 size += 1;
1840 break;
1841 case DW_OP_plus_uconst:
1842 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1843 break;
1844 case DW_OP_skip:
1845 case DW_OP_bra:
1846 size += 2;
1847 break;
1848 case DW_OP_breg0:
1849 case DW_OP_breg1:
1850 case DW_OP_breg2:
1851 case DW_OP_breg3:
1852 case DW_OP_breg4:
1853 case DW_OP_breg5:
1854 case DW_OP_breg6:
1855 case DW_OP_breg7:
1856 case DW_OP_breg8:
1857 case DW_OP_breg9:
1858 case DW_OP_breg10:
1859 case DW_OP_breg11:
1860 case DW_OP_breg12:
1861 case DW_OP_breg13:
1862 case DW_OP_breg14:
1863 case DW_OP_breg15:
1864 case DW_OP_breg16:
1865 case DW_OP_breg17:
1866 case DW_OP_breg18:
1867 case DW_OP_breg19:
1868 case DW_OP_breg20:
1869 case DW_OP_breg21:
1870 case DW_OP_breg22:
1871 case DW_OP_breg23:
1872 case DW_OP_breg24:
1873 case DW_OP_breg25:
1874 case DW_OP_breg26:
1875 case DW_OP_breg27:
1876 case DW_OP_breg28:
1877 case DW_OP_breg29:
1878 case DW_OP_breg30:
1879 case DW_OP_breg31:
1880 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1881 break;
1882 case DW_OP_regx:
1883 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1884 break;
1885 case DW_OP_fbreg:
1886 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1887 break;
1888 case DW_OP_bregx:
1889 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1890 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1891 break;
1892 case DW_OP_piece:
1893 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1894 break;
1895 case DW_OP_bit_piece:
1896 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1897 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1898 break;
1899 case DW_OP_deref_size:
1900 case DW_OP_xderef_size:
1901 size += 1;
1902 break;
1903 case DW_OP_call2:
1904 size += 2;
1905 break;
1906 case DW_OP_call4:
1907 size += 4;
1908 break;
1909 case DW_OP_call_ref:
1910 case DW_OP_GNU_variable_value:
1911 size += DWARF_REF_SIZE;
1912 break;
1913 case DW_OP_implicit_value:
1914 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1915 + loc->dw_loc_oprnd1.v.val_unsigned;
1916 break;
1917 case DW_OP_implicit_pointer:
1918 case DW_OP_GNU_implicit_pointer:
1919 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1920 break;
1921 case DW_OP_entry_value:
1922 case DW_OP_GNU_entry_value:
1923 {
1924 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1925 size += size_of_uleb128 (op_size) + op_size;
1926 break;
1927 }
1928 case DW_OP_const_type:
1929 case DW_OP_GNU_const_type:
1930 {
1931 unsigned long o
1932 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1933 size += size_of_uleb128 (o) + 1;
1934 switch (loc->dw_loc_oprnd2.val_class)
1935 {
1936 case dw_val_class_vec:
1937 size += loc->dw_loc_oprnd2.v.val_vec.length
1938 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1939 break;
1940 case dw_val_class_const:
1941 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1942 break;
1943 case dw_val_class_const_double:
1944 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1945 break;
1946 case dw_val_class_wide_int:
1947 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1948 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1949 break;
1950 default:
1951 gcc_unreachable ();
1952 }
1953 break;
1954 }
1955 case DW_OP_regval_type:
1956 case DW_OP_GNU_regval_type:
1957 {
1958 unsigned long o
1959 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1960 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1961 + size_of_uleb128 (o);
1962 }
1963 break;
1964 case DW_OP_deref_type:
1965 case DW_OP_GNU_deref_type:
1966 {
1967 unsigned long o
1968 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1969 size += 1 + size_of_uleb128 (o);
1970 }
1971 break;
1972 case DW_OP_convert:
1973 case DW_OP_reinterpret:
1974 case DW_OP_GNU_convert:
1975 case DW_OP_GNU_reinterpret:
1976 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1977 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1978 else
1979 {
1980 unsigned long o
1981 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1982 size += size_of_uleb128 (o);
1983 }
1984 break;
1985 case DW_OP_GNU_parameter_ref:
1986 size += 4;
1987 break;
1988 default:
1989 break;
1990 }
1991
1992 return size;
1993 }
1994
1995 /* Return the size of a series of location descriptors. */
1996
1997 unsigned long
1998 size_of_locs (dw_loc_descr_ref loc)
1999 {
2000 dw_loc_descr_ref l;
2001 unsigned long size;
2002
2003 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
2004 field, to avoid writing to a PCH file. */
2005 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2006 {
2007 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
2008 break;
2009 size += size_of_loc_descr (l);
2010 }
2011 if (! l)
2012 return size;
2013
2014 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2015 {
2016 l->dw_loc_addr = size;
2017 size += size_of_loc_descr (l);
2018 }
2019
2020 return size;
2021 }
2022
2023 /* Return the size of the value in a DW_AT_discr_value attribute. */
2024
2025 static int
2026 size_of_discr_value (dw_discr_value *discr_value)
2027 {
2028 if (discr_value->pos)
2029 return size_of_uleb128 (discr_value->v.uval);
2030 else
2031 return size_of_sleb128 (discr_value->v.sval);
2032 }
2033
2034 /* Return the size of the value in a DW_AT_discr_list attribute. */
2035
2036 static int
2037 size_of_discr_list (dw_discr_list_ref discr_list)
2038 {
2039 int size = 0;
2040
2041 for (dw_discr_list_ref list = discr_list;
2042 list != NULL;
2043 list = list->dw_discr_next)
2044 {
2045 /* One byte for the discriminant value descriptor, and then one or two
2046 LEB128 numbers, depending on whether it's a single case label or a
2047 range label. */
2048 size += 1;
2049 size += size_of_discr_value (&list->dw_discr_lower_bound);
2050 if (list->dw_discr_range != 0)
2051 size += size_of_discr_value (&list->dw_discr_upper_bound);
2052 }
2053 return size;
2054 }
2055
2056 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2057 static void get_ref_die_offset_label (char *, dw_die_ref);
2058 static unsigned long int get_ref_die_offset (dw_die_ref);
2059
2060 /* Output location description stack opcode's operands (if any).
2061 The for_eh_or_skip parameter controls whether register numbers are
2062 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2063 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2064 info). This should be suppressed for the cases that have not been converted
2065 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2066
2067 static void
2068 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2069 {
2070 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2071 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2072
2073 switch (loc->dw_loc_opc)
2074 {
2075 #ifdef DWARF2_DEBUGGING_INFO
2076 case DW_OP_const2u:
2077 case DW_OP_const2s:
2078 dw2_asm_output_data (2, val1->v.val_int, NULL);
2079 break;
2080 case DW_OP_const4u:
2081 if (loc->dtprel)
2082 {
2083 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2084 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2085 val1->v.val_addr);
2086 fputc ('\n', asm_out_file);
2087 break;
2088 }
2089 /* FALLTHRU */
2090 case DW_OP_const4s:
2091 dw2_asm_output_data (4, val1->v.val_int, NULL);
2092 break;
2093 case DW_OP_const8u:
2094 if (loc->dtprel)
2095 {
2096 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2097 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2098 val1->v.val_addr);
2099 fputc ('\n', asm_out_file);
2100 break;
2101 }
2102 /* FALLTHRU */
2103 case DW_OP_const8s:
2104 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2105 dw2_asm_output_data (8, val1->v.val_int, NULL);
2106 break;
2107 case DW_OP_skip:
2108 case DW_OP_bra:
2109 {
2110 int offset;
2111
2112 gcc_assert (val1->val_class == dw_val_class_loc);
2113 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2114
2115 dw2_asm_output_data (2, offset, NULL);
2116 }
2117 break;
2118 case DW_OP_implicit_value:
2119 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2120 switch (val2->val_class)
2121 {
2122 case dw_val_class_const:
2123 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2124 break;
2125 case dw_val_class_vec:
2126 {
2127 unsigned int elt_size = val2->v.val_vec.elt_size;
2128 unsigned int len = val2->v.val_vec.length;
2129 unsigned int i;
2130 unsigned char *p;
2131
2132 if (elt_size > sizeof (HOST_WIDE_INT))
2133 {
2134 elt_size /= 2;
2135 len *= 2;
2136 }
2137 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2138 i < len;
2139 i++, p += elt_size)
2140 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2141 "fp or vector constant word %u", i);
2142 }
2143 break;
2144 case dw_val_class_const_double:
2145 {
2146 unsigned HOST_WIDE_INT first, second;
2147
2148 if (WORDS_BIG_ENDIAN)
2149 {
2150 first = val2->v.val_double.high;
2151 second = val2->v.val_double.low;
2152 }
2153 else
2154 {
2155 first = val2->v.val_double.low;
2156 second = val2->v.val_double.high;
2157 }
2158 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2159 first, NULL);
2160 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2161 second, NULL);
2162 }
2163 break;
2164 case dw_val_class_wide_int:
2165 {
2166 int i;
2167 int len = get_full_len (*val2->v.val_wide);
2168 if (WORDS_BIG_ENDIAN)
2169 for (i = len - 1; i >= 0; --i)
2170 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2171 val2->v.val_wide->elt (i), NULL);
2172 else
2173 for (i = 0; i < len; ++i)
2174 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2175 val2->v.val_wide->elt (i), NULL);
2176 }
2177 break;
2178 case dw_val_class_addr:
2179 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2180 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2181 break;
2182 default:
2183 gcc_unreachable ();
2184 }
2185 break;
2186 #else
2187 case DW_OP_const2u:
2188 case DW_OP_const2s:
2189 case DW_OP_const4u:
2190 case DW_OP_const4s:
2191 case DW_OP_const8u:
2192 case DW_OP_const8s:
2193 case DW_OP_skip:
2194 case DW_OP_bra:
2195 case DW_OP_implicit_value:
2196 /* We currently don't make any attempt to make sure these are
2197 aligned properly like we do for the main unwind info, so
2198 don't support emitting things larger than a byte if we're
2199 only doing unwinding. */
2200 gcc_unreachable ();
2201 #endif
2202 case DW_OP_const1u:
2203 case DW_OP_const1s:
2204 dw2_asm_output_data (1, val1->v.val_int, NULL);
2205 break;
2206 case DW_OP_constu:
2207 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2208 break;
2209 case DW_OP_consts:
2210 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2211 break;
2212 case DW_OP_pick:
2213 dw2_asm_output_data (1, val1->v.val_int, NULL);
2214 break;
2215 case DW_OP_plus_uconst:
2216 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2217 break;
2218 case DW_OP_breg0:
2219 case DW_OP_breg1:
2220 case DW_OP_breg2:
2221 case DW_OP_breg3:
2222 case DW_OP_breg4:
2223 case DW_OP_breg5:
2224 case DW_OP_breg6:
2225 case DW_OP_breg7:
2226 case DW_OP_breg8:
2227 case DW_OP_breg9:
2228 case DW_OP_breg10:
2229 case DW_OP_breg11:
2230 case DW_OP_breg12:
2231 case DW_OP_breg13:
2232 case DW_OP_breg14:
2233 case DW_OP_breg15:
2234 case DW_OP_breg16:
2235 case DW_OP_breg17:
2236 case DW_OP_breg18:
2237 case DW_OP_breg19:
2238 case DW_OP_breg20:
2239 case DW_OP_breg21:
2240 case DW_OP_breg22:
2241 case DW_OP_breg23:
2242 case DW_OP_breg24:
2243 case DW_OP_breg25:
2244 case DW_OP_breg26:
2245 case DW_OP_breg27:
2246 case DW_OP_breg28:
2247 case DW_OP_breg29:
2248 case DW_OP_breg30:
2249 case DW_OP_breg31:
2250 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2251 break;
2252 case DW_OP_regx:
2253 {
2254 unsigned r = val1->v.val_unsigned;
2255 if (for_eh_or_skip >= 0)
2256 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2257 gcc_assert (size_of_uleb128 (r)
2258 == size_of_uleb128 (val1->v.val_unsigned));
2259 dw2_asm_output_data_uleb128 (r, NULL);
2260 }
2261 break;
2262 case DW_OP_fbreg:
2263 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2264 break;
2265 case DW_OP_bregx:
2266 {
2267 unsigned r = val1->v.val_unsigned;
2268 if (for_eh_or_skip >= 0)
2269 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2270 gcc_assert (size_of_uleb128 (r)
2271 == size_of_uleb128 (val1->v.val_unsigned));
2272 dw2_asm_output_data_uleb128 (r, NULL);
2273 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2274 }
2275 break;
2276 case DW_OP_piece:
2277 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2278 break;
2279 case DW_OP_bit_piece:
2280 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2281 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2282 break;
2283 case DW_OP_deref_size:
2284 case DW_OP_xderef_size:
2285 dw2_asm_output_data (1, val1->v.val_int, NULL);
2286 break;
2287
2288 case DW_OP_addr:
2289 if (loc->dtprel)
2290 {
2291 if (targetm.asm_out.output_dwarf_dtprel)
2292 {
2293 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2294 DWARF2_ADDR_SIZE,
2295 val1->v.val_addr);
2296 fputc ('\n', asm_out_file);
2297 }
2298 else
2299 gcc_unreachable ();
2300 }
2301 else
2302 {
2303 #ifdef DWARF2_DEBUGGING_INFO
2304 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2305 #else
2306 gcc_unreachable ();
2307 #endif
2308 }
2309 break;
2310
2311 case DW_OP_GNU_addr_index:
2312 case DW_OP_addrx:
2313 case DW_OP_GNU_const_index:
2314 case DW_OP_constx:
2315 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2316 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2317 "(index into .debug_addr)");
2318 break;
2319
2320 case DW_OP_call2:
2321 case DW_OP_call4:
2322 {
2323 unsigned long die_offset
2324 = get_ref_die_offset (val1->v.val_die_ref.die);
2325 /* Make sure the offset has been computed and that we can encode it as
2326 an operand. */
2327 gcc_assert (die_offset > 0
2328 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2329 ? 0xffff
2330 : 0xffffffff));
2331 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2332 die_offset, NULL);
2333 }
2334 break;
2335
2336 case DW_OP_call_ref:
2337 case DW_OP_GNU_variable_value:
2338 {
2339 char label[MAX_ARTIFICIAL_LABEL_BYTES
2340 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2341 gcc_assert (val1->val_class == dw_val_class_die_ref);
2342 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2343 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2344 }
2345 break;
2346
2347 case DW_OP_implicit_pointer:
2348 case DW_OP_GNU_implicit_pointer:
2349 {
2350 char label[MAX_ARTIFICIAL_LABEL_BYTES
2351 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2352 gcc_assert (val1->val_class == dw_val_class_die_ref);
2353 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2354 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2355 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2356 }
2357 break;
2358
2359 case DW_OP_entry_value:
2360 case DW_OP_GNU_entry_value:
2361 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2362 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2363 break;
2364
2365 case DW_OP_const_type:
2366 case DW_OP_GNU_const_type:
2367 {
2368 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2369 gcc_assert (o);
2370 dw2_asm_output_data_uleb128 (o, NULL);
2371 switch (val2->val_class)
2372 {
2373 case dw_val_class_const:
2374 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2375 dw2_asm_output_data (1, l, NULL);
2376 dw2_asm_output_data (l, val2->v.val_int, NULL);
2377 break;
2378 case dw_val_class_vec:
2379 {
2380 unsigned int elt_size = val2->v.val_vec.elt_size;
2381 unsigned int len = val2->v.val_vec.length;
2382 unsigned int i;
2383 unsigned char *p;
2384
2385 l = len * elt_size;
2386 dw2_asm_output_data (1, l, NULL);
2387 if (elt_size > sizeof (HOST_WIDE_INT))
2388 {
2389 elt_size /= 2;
2390 len *= 2;
2391 }
2392 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2393 i < len;
2394 i++, p += elt_size)
2395 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2396 "fp or vector constant word %u", i);
2397 }
2398 break;
2399 case dw_val_class_const_double:
2400 {
2401 unsigned HOST_WIDE_INT first, second;
2402 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2403
2404 dw2_asm_output_data (1, 2 * l, NULL);
2405 if (WORDS_BIG_ENDIAN)
2406 {
2407 first = val2->v.val_double.high;
2408 second = val2->v.val_double.low;
2409 }
2410 else
2411 {
2412 first = val2->v.val_double.low;
2413 second = val2->v.val_double.high;
2414 }
2415 dw2_asm_output_data (l, first, NULL);
2416 dw2_asm_output_data (l, second, NULL);
2417 }
2418 break;
2419 case dw_val_class_wide_int:
2420 {
2421 int i;
2422 int len = get_full_len (*val2->v.val_wide);
2423 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2424
2425 dw2_asm_output_data (1, len * l, NULL);
2426 if (WORDS_BIG_ENDIAN)
2427 for (i = len - 1; i >= 0; --i)
2428 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2429 else
2430 for (i = 0; i < len; ++i)
2431 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2432 }
2433 break;
2434 default:
2435 gcc_unreachable ();
2436 }
2437 }
2438 break;
2439 case DW_OP_regval_type:
2440 case DW_OP_GNU_regval_type:
2441 {
2442 unsigned r = val1->v.val_unsigned;
2443 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2444 gcc_assert (o);
2445 if (for_eh_or_skip >= 0)
2446 {
2447 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2448 gcc_assert (size_of_uleb128 (r)
2449 == size_of_uleb128 (val1->v.val_unsigned));
2450 }
2451 dw2_asm_output_data_uleb128 (r, NULL);
2452 dw2_asm_output_data_uleb128 (o, NULL);
2453 }
2454 break;
2455 case DW_OP_deref_type:
2456 case DW_OP_GNU_deref_type:
2457 {
2458 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2459 gcc_assert (o);
2460 dw2_asm_output_data (1, val1->v.val_int, NULL);
2461 dw2_asm_output_data_uleb128 (o, NULL);
2462 }
2463 break;
2464 case DW_OP_convert:
2465 case DW_OP_reinterpret:
2466 case DW_OP_GNU_convert:
2467 case DW_OP_GNU_reinterpret:
2468 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2469 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2470 else
2471 {
2472 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2473 gcc_assert (o);
2474 dw2_asm_output_data_uleb128 (o, NULL);
2475 }
2476 break;
2477
2478 case DW_OP_GNU_parameter_ref:
2479 {
2480 unsigned long o;
2481 gcc_assert (val1->val_class == dw_val_class_die_ref);
2482 o = get_ref_die_offset (val1->v.val_die_ref.die);
2483 dw2_asm_output_data (4, o, NULL);
2484 }
2485 break;
2486
2487 default:
2488 /* Other codes have no operands. */
2489 break;
2490 }
2491 }
2492
2493 /* Output a sequence of location operations.
2494 The for_eh_or_skip parameter controls whether register numbers are
2495 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2496 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2497 info). This should be suppressed for the cases that have not been converted
2498 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2499
2500 void
2501 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2502 {
2503 for (; loc != NULL; loc = loc->dw_loc_next)
2504 {
2505 enum dwarf_location_atom opc = loc->dw_loc_opc;
2506 /* Output the opcode. */
2507 if (for_eh_or_skip >= 0
2508 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2509 {
2510 unsigned r = (opc - DW_OP_breg0);
2511 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2512 gcc_assert (r <= 31);
2513 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2514 }
2515 else if (for_eh_or_skip >= 0
2516 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2517 {
2518 unsigned r = (opc - DW_OP_reg0);
2519 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2520 gcc_assert (r <= 31);
2521 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2522 }
2523
2524 dw2_asm_output_data (1, opc,
2525 "%s", dwarf_stack_op_name (opc));
2526
2527 /* Output the operand(s) (if any). */
2528 output_loc_operands (loc, for_eh_or_skip);
2529 }
2530 }
2531
2532 /* Output location description stack opcode's operands (if any).
2533 The output is single bytes on a line, suitable for .cfi_escape. */
2534
2535 static void
2536 output_loc_operands_raw (dw_loc_descr_ref loc)
2537 {
2538 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2539 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2540
2541 switch (loc->dw_loc_opc)
2542 {
2543 case DW_OP_addr:
2544 case DW_OP_GNU_addr_index:
2545 case DW_OP_addrx:
2546 case DW_OP_GNU_const_index:
2547 case DW_OP_constx:
2548 case DW_OP_implicit_value:
2549 /* We cannot output addresses in .cfi_escape, only bytes. */
2550 gcc_unreachable ();
2551
2552 case DW_OP_const1u:
2553 case DW_OP_const1s:
2554 case DW_OP_pick:
2555 case DW_OP_deref_size:
2556 case DW_OP_xderef_size:
2557 fputc (',', asm_out_file);
2558 dw2_asm_output_data_raw (1, val1->v.val_int);
2559 break;
2560
2561 case DW_OP_const2u:
2562 case DW_OP_const2s:
2563 fputc (',', asm_out_file);
2564 dw2_asm_output_data_raw (2, val1->v.val_int);
2565 break;
2566
2567 case DW_OP_const4u:
2568 case DW_OP_const4s:
2569 fputc (',', asm_out_file);
2570 dw2_asm_output_data_raw (4, val1->v.val_int);
2571 break;
2572
2573 case DW_OP_const8u:
2574 case DW_OP_const8s:
2575 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2576 fputc (',', asm_out_file);
2577 dw2_asm_output_data_raw (8, val1->v.val_int);
2578 break;
2579
2580 case DW_OP_skip:
2581 case DW_OP_bra:
2582 {
2583 int offset;
2584
2585 gcc_assert (val1->val_class == dw_val_class_loc);
2586 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2587
2588 fputc (',', asm_out_file);
2589 dw2_asm_output_data_raw (2, offset);
2590 }
2591 break;
2592
2593 case DW_OP_regx:
2594 {
2595 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2596 gcc_assert (size_of_uleb128 (r)
2597 == size_of_uleb128 (val1->v.val_unsigned));
2598 fputc (',', asm_out_file);
2599 dw2_asm_output_data_uleb128_raw (r);
2600 }
2601 break;
2602
2603 case DW_OP_constu:
2604 case DW_OP_plus_uconst:
2605 case DW_OP_piece:
2606 fputc (',', asm_out_file);
2607 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2608 break;
2609
2610 case DW_OP_bit_piece:
2611 fputc (',', asm_out_file);
2612 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2613 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2614 break;
2615
2616 case DW_OP_consts:
2617 case DW_OP_breg0:
2618 case DW_OP_breg1:
2619 case DW_OP_breg2:
2620 case DW_OP_breg3:
2621 case DW_OP_breg4:
2622 case DW_OP_breg5:
2623 case DW_OP_breg6:
2624 case DW_OP_breg7:
2625 case DW_OP_breg8:
2626 case DW_OP_breg9:
2627 case DW_OP_breg10:
2628 case DW_OP_breg11:
2629 case DW_OP_breg12:
2630 case DW_OP_breg13:
2631 case DW_OP_breg14:
2632 case DW_OP_breg15:
2633 case DW_OP_breg16:
2634 case DW_OP_breg17:
2635 case DW_OP_breg18:
2636 case DW_OP_breg19:
2637 case DW_OP_breg20:
2638 case DW_OP_breg21:
2639 case DW_OP_breg22:
2640 case DW_OP_breg23:
2641 case DW_OP_breg24:
2642 case DW_OP_breg25:
2643 case DW_OP_breg26:
2644 case DW_OP_breg27:
2645 case DW_OP_breg28:
2646 case DW_OP_breg29:
2647 case DW_OP_breg30:
2648 case DW_OP_breg31:
2649 case DW_OP_fbreg:
2650 fputc (',', asm_out_file);
2651 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2652 break;
2653
2654 case DW_OP_bregx:
2655 {
2656 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2657 gcc_assert (size_of_uleb128 (r)
2658 == size_of_uleb128 (val1->v.val_unsigned));
2659 fputc (',', asm_out_file);
2660 dw2_asm_output_data_uleb128_raw (r);
2661 fputc (',', asm_out_file);
2662 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2663 }
2664 break;
2665
2666 case DW_OP_implicit_pointer:
2667 case DW_OP_entry_value:
2668 case DW_OP_const_type:
2669 case DW_OP_regval_type:
2670 case DW_OP_deref_type:
2671 case DW_OP_convert:
2672 case DW_OP_reinterpret:
2673 case DW_OP_GNU_implicit_pointer:
2674 case DW_OP_GNU_entry_value:
2675 case DW_OP_GNU_const_type:
2676 case DW_OP_GNU_regval_type:
2677 case DW_OP_GNU_deref_type:
2678 case DW_OP_GNU_convert:
2679 case DW_OP_GNU_reinterpret:
2680 case DW_OP_GNU_parameter_ref:
2681 gcc_unreachable ();
2682 break;
2683
2684 default:
2685 /* Other codes have no operands. */
2686 break;
2687 }
2688 }
2689
2690 void
2691 output_loc_sequence_raw (dw_loc_descr_ref loc)
2692 {
2693 while (1)
2694 {
2695 enum dwarf_location_atom opc = loc->dw_loc_opc;
2696 /* Output the opcode. */
2697 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2698 {
2699 unsigned r = (opc - DW_OP_breg0);
2700 r = DWARF2_FRAME_REG_OUT (r, 1);
2701 gcc_assert (r <= 31);
2702 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2703 }
2704 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2705 {
2706 unsigned r = (opc - DW_OP_reg0);
2707 r = DWARF2_FRAME_REG_OUT (r, 1);
2708 gcc_assert (r <= 31);
2709 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2710 }
2711 /* Output the opcode. */
2712 fprintf (asm_out_file, "%#x", opc);
2713 output_loc_operands_raw (loc);
2714
2715 if (!loc->dw_loc_next)
2716 break;
2717 loc = loc->dw_loc_next;
2718
2719 fputc (',', asm_out_file);
2720 }
2721 }
2722
2723 /* This function builds a dwarf location descriptor sequence from a
2724 dw_cfa_location, adding the given OFFSET to the result of the
2725 expression. */
2726
2727 struct dw_loc_descr_node *
2728 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2729 {
2730 struct dw_loc_descr_node *head, *tmp;
2731
2732 offset += cfa->offset;
2733
2734 if (cfa->indirect)
2735 {
2736 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2737 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2738 head->dw_loc_oprnd1.val_entry = NULL;
2739 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2740 add_loc_descr (&head, tmp);
2741 loc_descr_plus_const (&head, offset);
2742 }
2743 else
2744 head = new_reg_loc_descr (cfa->reg, offset);
2745
2746 return head;
2747 }
2748
2749 /* This function builds a dwarf location descriptor sequence for
2750 the address at OFFSET from the CFA when stack is aligned to
2751 ALIGNMENT byte. */
2752
2753 struct dw_loc_descr_node *
2754 build_cfa_aligned_loc (dw_cfa_location *cfa,
2755 poly_int64 offset, HOST_WIDE_INT alignment)
2756 {
2757 struct dw_loc_descr_node *head;
2758 unsigned int dwarf_fp
2759 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2760
2761 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2762 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2763 {
2764 head = new_reg_loc_descr (dwarf_fp, 0);
2765 add_loc_descr (&head, int_loc_descriptor (alignment));
2766 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2767 loc_descr_plus_const (&head, offset);
2768 }
2769 else
2770 head = new_reg_loc_descr (dwarf_fp, offset);
2771 return head;
2772 }
2773 \f
2774 /* And now, the support for symbolic debugging information. */
2775
2776 /* .debug_str support. */
2777
2778 static void dwarf2out_init (const char *);
2779 static void dwarf2out_finish (const char *);
2780 static void dwarf2out_early_finish (const char *);
2781 static void dwarf2out_assembly_start (void);
2782 static void dwarf2out_define (unsigned int, const char *);
2783 static void dwarf2out_undef (unsigned int, const char *);
2784 static void dwarf2out_start_source_file (unsigned, const char *);
2785 static void dwarf2out_end_source_file (unsigned);
2786 static void dwarf2out_function_decl (tree);
2787 static void dwarf2out_begin_block (unsigned, unsigned);
2788 static void dwarf2out_end_block (unsigned, unsigned);
2789 static bool dwarf2out_ignore_block (const_tree);
2790 static void dwarf2out_early_global_decl (tree);
2791 static void dwarf2out_late_global_decl (tree);
2792 static void dwarf2out_type_decl (tree, int);
2793 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2794 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2795 dw_die_ref);
2796 static void dwarf2out_abstract_function (tree);
2797 static void dwarf2out_var_location (rtx_insn *);
2798 static void dwarf2out_inline_entry (tree);
2799 static void dwarf2out_size_function (tree);
2800 static void dwarf2out_begin_function (tree);
2801 static void dwarf2out_end_function (unsigned int);
2802 static void dwarf2out_register_main_translation_unit (tree unit);
2803 static void dwarf2out_set_name (tree, tree);
2804 static void dwarf2out_register_external_die (tree decl, const char *sym,
2805 unsigned HOST_WIDE_INT off);
2806 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2807 unsigned HOST_WIDE_INT *off);
2808
2809 /* The debug hooks structure. */
2810
2811 const struct gcc_debug_hooks dwarf2_debug_hooks =
2812 {
2813 dwarf2out_init,
2814 dwarf2out_finish,
2815 dwarf2out_early_finish,
2816 dwarf2out_assembly_start,
2817 dwarf2out_define,
2818 dwarf2out_undef,
2819 dwarf2out_start_source_file,
2820 dwarf2out_end_source_file,
2821 dwarf2out_begin_block,
2822 dwarf2out_end_block,
2823 dwarf2out_ignore_block,
2824 dwarf2out_source_line,
2825 dwarf2out_begin_prologue,
2826 #if VMS_DEBUGGING_INFO
2827 dwarf2out_vms_end_prologue,
2828 dwarf2out_vms_begin_epilogue,
2829 #else
2830 debug_nothing_int_charstar,
2831 debug_nothing_int_charstar,
2832 #endif
2833 dwarf2out_end_epilogue,
2834 dwarf2out_begin_function,
2835 dwarf2out_end_function, /* end_function */
2836 dwarf2out_register_main_translation_unit,
2837 dwarf2out_function_decl, /* function_decl */
2838 dwarf2out_early_global_decl,
2839 dwarf2out_late_global_decl,
2840 dwarf2out_type_decl, /* type_decl */
2841 dwarf2out_imported_module_or_decl,
2842 dwarf2out_die_ref_for_decl,
2843 dwarf2out_register_external_die,
2844 debug_nothing_tree, /* deferred_inline_function */
2845 /* The DWARF 2 backend tries to reduce debugging bloat by not
2846 emitting the abstract description of inline functions until
2847 something tries to reference them. */
2848 dwarf2out_abstract_function, /* outlining_inline_function */
2849 debug_nothing_rtx_code_label, /* label */
2850 debug_nothing_int, /* handle_pch */
2851 dwarf2out_var_location,
2852 dwarf2out_inline_entry, /* inline_entry */
2853 dwarf2out_size_function, /* size_function */
2854 dwarf2out_switch_text_section,
2855 dwarf2out_set_name,
2856 1, /* start_end_main_source_file */
2857 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2858 };
2859
2860 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2861 {
2862 dwarf2out_init,
2863 debug_nothing_charstar,
2864 debug_nothing_charstar,
2865 dwarf2out_assembly_start,
2866 debug_nothing_int_charstar,
2867 debug_nothing_int_charstar,
2868 debug_nothing_int_charstar,
2869 debug_nothing_int,
2870 debug_nothing_int_int, /* begin_block */
2871 debug_nothing_int_int, /* end_block */
2872 debug_true_const_tree, /* ignore_block */
2873 dwarf2out_source_line, /* source_line */
2874 debug_nothing_int_int_charstar, /* begin_prologue */
2875 debug_nothing_int_charstar, /* end_prologue */
2876 debug_nothing_int_charstar, /* begin_epilogue */
2877 debug_nothing_int_charstar, /* end_epilogue */
2878 debug_nothing_tree, /* begin_function */
2879 debug_nothing_int, /* end_function */
2880 debug_nothing_tree, /* register_main_translation_unit */
2881 debug_nothing_tree, /* function_decl */
2882 debug_nothing_tree, /* early_global_decl */
2883 debug_nothing_tree, /* late_global_decl */
2884 debug_nothing_tree_int, /* type_decl */
2885 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2886 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2887 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2888 debug_nothing_tree, /* deferred_inline_function */
2889 debug_nothing_tree, /* outlining_inline_function */
2890 debug_nothing_rtx_code_label, /* label */
2891 debug_nothing_int, /* handle_pch */
2892 debug_nothing_rtx_insn, /* var_location */
2893 debug_nothing_tree, /* inline_entry */
2894 debug_nothing_tree, /* size_function */
2895 debug_nothing_void, /* switch_text_section */
2896 debug_nothing_tree_tree, /* set_name */
2897 0, /* start_end_main_source_file */
2898 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2899 };
2900 \f
2901 /* NOTE: In the comments in this file, many references are made to
2902 "Debugging Information Entries". This term is abbreviated as `DIE'
2903 throughout the remainder of this file. */
2904
2905 /* An internal representation of the DWARF output is built, and then
2906 walked to generate the DWARF debugging info. The walk of the internal
2907 representation is done after the entire program has been compiled.
2908 The types below are used to describe the internal representation. */
2909
2910 /* Whether to put type DIEs into their own section .debug_types instead
2911 of making them part of the .debug_info section. Only supported for
2912 Dwarf V4 or higher and the user didn't disable them through
2913 -fno-debug-types-section. It is more efficient to put them in a
2914 separate comdat sections since the linker will then be able to
2915 remove duplicates. But not all tools support .debug_types sections
2916 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2917 it is DW_UT_type unit type in .debug_info section. For late LTO
2918 debug there should be almost no types emitted so avoid enabling
2919 -fdebug-types-section there. */
2920
2921 #define use_debug_types (dwarf_version >= 4 \
2922 && flag_debug_types_section \
2923 && !in_lto_p)
2924
2925 /* Various DIE's use offsets relative to the beginning of the
2926 .debug_info section to refer to each other. */
2927
2928 typedef long int dw_offset;
2929
2930 struct comdat_type_node;
2931
2932 /* The entries in the line_info table more-or-less mirror the opcodes
2933 that are used in the real dwarf line table. Arrays of these entries
2934 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2935 supported. */
2936
2937 enum dw_line_info_opcode {
2938 /* Emit DW_LNE_set_address; the operand is the label index. */
2939 LI_set_address,
2940
2941 /* Emit a row to the matrix with the given line. This may be done
2942 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2943 special opcodes. */
2944 LI_set_line,
2945
2946 /* Emit a DW_LNS_set_file. */
2947 LI_set_file,
2948
2949 /* Emit a DW_LNS_set_column. */
2950 LI_set_column,
2951
2952 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2953 LI_negate_stmt,
2954
2955 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2956 LI_set_prologue_end,
2957 LI_set_epilogue_begin,
2958
2959 /* Emit a DW_LNE_set_discriminator. */
2960 LI_set_discriminator,
2961
2962 /* Output a Fixed Advance PC; the target PC is the label index; the
2963 base PC is the previous LI_adv_address or LI_set_address entry.
2964 We only use this when emitting debug views without assembler
2965 support, at explicit user request. Ideally, we should only use
2966 it when the offset might be zero but we can't tell: it's the only
2967 way to maybe change the PC without resetting the view number. */
2968 LI_adv_address
2969 };
2970
2971 typedef struct GTY(()) dw_line_info_struct {
2972 enum dw_line_info_opcode opcode;
2973 unsigned int val;
2974 } dw_line_info_entry;
2975
2976
2977 struct GTY(()) dw_line_info_table {
2978 /* The label that marks the end of this section. */
2979 const char *end_label;
2980
2981 /* The values for the last row of the matrix, as collected in the table.
2982 These are used to minimize the changes to the next row. */
2983 unsigned int file_num;
2984 unsigned int line_num;
2985 unsigned int column_num;
2986 int discrim_num;
2987 bool is_stmt;
2988 bool in_use;
2989
2990 /* This denotes the NEXT view number.
2991
2992 If it is 0, it is known that the NEXT view will be the first view
2993 at the given PC.
2994
2995 If it is -1, we're forcing the view number to be reset, e.g. at a
2996 function entry.
2997
2998 The meaning of other nonzero values depends on whether we're
2999 computing views internally or leaving it for the assembler to do
3000 so. If we're emitting them internally, view denotes the view
3001 number since the last known advance of PC. If we're leaving it
3002 for the assembler, it denotes the LVU label number that we're
3003 going to ask the assembler to assign. */
3004 var_loc_view view;
3005
3006 /* This counts the number of symbolic views emitted in this table
3007 since the latest view reset. Its max value, over all tables,
3008 sets symview_upper_bound. */
3009 var_loc_view symviews_since_reset;
3010
3011 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
3012 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3013 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3014 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3015
3016 vec<dw_line_info_entry, va_gc> *entries;
3017 };
3018
3019 /* This is an upper bound for view numbers that the assembler may
3020 assign to symbolic views output in this translation. It is used to
3021 decide how big a field to use to represent view numbers in
3022 symview-classed attributes. */
3023
3024 static var_loc_view symview_upper_bound;
3025
3026 /* If we're keep track of location views and their reset points, and
3027 INSN is a reset point (i.e., it necessarily advances the PC), mark
3028 the next view in TABLE as reset. */
3029
3030 static void
3031 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3032 {
3033 if (!debug_internal_reset_location_views)
3034 return;
3035
3036 /* Maybe turn (part of?) this test into a default target hook. */
3037 int reset = 0;
3038
3039 if (targetm.reset_location_view)
3040 reset = targetm.reset_location_view (insn);
3041
3042 if (reset)
3043 ;
3044 else if (JUMP_TABLE_DATA_P (insn))
3045 reset = 1;
3046 else if (GET_CODE (insn) == USE
3047 || GET_CODE (insn) == CLOBBER
3048 || GET_CODE (insn) == ASM_INPUT
3049 || asm_noperands (insn) >= 0)
3050 ;
3051 else if (get_attr_min_length (insn) > 0)
3052 reset = 1;
3053
3054 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3055 RESET_NEXT_VIEW (table->view);
3056 }
3057
3058 /* Each DIE attribute has a field specifying the attribute kind,
3059 a link to the next attribute in the chain, and an attribute value.
3060 Attributes are typically linked below the DIE they modify. */
3061
3062 typedef struct GTY(()) dw_attr_struct {
3063 enum dwarf_attribute dw_attr;
3064 dw_val_node dw_attr_val;
3065 }
3066 dw_attr_node;
3067
3068
3069 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3070 The children of each node form a circular list linked by
3071 die_sib. die_child points to the node *before* the "first" child node. */
3072
3073 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3074 union die_symbol_or_type_node
3075 {
3076 const char * GTY ((tag ("0"))) die_symbol;
3077 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3078 }
3079 GTY ((desc ("%0.comdat_type_p"))) die_id;
3080 vec<dw_attr_node, va_gc> *die_attr;
3081 dw_die_ref die_parent;
3082 dw_die_ref die_child;
3083 dw_die_ref die_sib;
3084 dw_die_ref die_definition; /* ref from a specification to its definition */
3085 dw_offset die_offset;
3086 unsigned long die_abbrev;
3087 int die_mark;
3088 unsigned int decl_id;
3089 enum dwarf_tag die_tag;
3090 /* Die is used and must not be pruned as unused. */
3091 BOOL_BITFIELD die_perennial_p : 1;
3092 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3093 /* For an external ref to die_symbol if die_offset contains an extra
3094 offset to that symbol. */
3095 BOOL_BITFIELD with_offset : 1;
3096 /* Whether this DIE was removed from the DIE tree, for example via
3097 prune_unused_types. We don't consider those present from the
3098 DIE lookup routines. */
3099 BOOL_BITFIELD removed : 1;
3100 /* Lots of spare bits. */
3101 }
3102 die_node;
3103
3104 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3105 static bool early_dwarf;
3106 static bool early_dwarf_finished;
3107 struct set_early_dwarf {
3108 bool saved;
3109 set_early_dwarf () : saved(early_dwarf)
3110 {
3111 gcc_assert (! early_dwarf_finished);
3112 early_dwarf = true;
3113 }
3114 ~set_early_dwarf () { early_dwarf = saved; }
3115 };
3116
3117 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3118 #define FOR_EACH_CHILD(die, c, expr) do { \
3119 c = die->die_child; \
3120 if (c) do { \
3121 c = c->die_sib; \
3122 expr; \
3123 } while (c != die->die_child); \
3124 } while (0)
3125
3126 /* The pubname structure */
3127
3128 typedef struct GTY(()) pubname_struct {
3129 dw_die_ref die;
3130 const char *name;
3131 }
3132 pubname_entry;
3133
3134
3135 struct GTY(()) dw_ranges {
3136 const char *label;
3137 /* If this is positive, it's a block number, otherwise it's a
3138 bitwise-negated index into dw_ranges_by_label. */
3139 int num;
3140 /* Index for the range list for DW_FORM_rnglistx. */
3141 unsigned int idx : 31;
3142 /* True if this range might be possibly in a different section
3143 from previous entry. */
3144 unsigned int maybe_new_sec : 1;
3145 };
3146
3147 /* A structure to hold a macinfo entry. */
3148
3149 typedef struct GTY(()) macinfo_struct {
3150 unsigned char code;
3151 unsigned HOST_WIDE_INT lineno;
3152 const char *info;
3153 }
3154 macinfo_entry;
3155
3156
3157 struct GTY(()) dw_ranges_by_label {
3158 const char *begin;
3159 const char *end;
3160 };
3161
3162 /* The comdat type node structure. */
3163 struct GTY(()) comdat_type_node
3164 {
3165 dw_die_ref root_die;
3166 dw_die_ref type_die;
3167 dw_die_ref skeleton_die;
3168 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3169 comdat_type_node *next;
3170 };
3171
3172 /* A list of DIEs for which we can't determine ancestry (parent_die
3173 field) just yet. Later in dwarf2out_finish we will fill in the
3174 missing bits. */
3175 typedef struct GTY(()) limbo_die_struct {
3176 dw_die_ref die;
3177 /* The tree for which this DIE was created. We use this to
3178 determine ancestry later. */
3179 tree created_for;
3180 struct limbo_die_struct *next;
3181 }
3182 limbo_die_node;
3183
3184 typedef struct skeleton_chain_struct
3185 {
3186 dw_die_ref old_die;
3187 dw_die_ref new_die;
3188 struct skeleton_chain_struct *parent;
3189 }
3190 skeleton_chain_node;
3191
3192 /* Define a macro which returns nonzero for a TYPE_DECL which was
3193 implicitly generated for a type.
3194
3195 Note that, unlike the C front-end (which generates a NULL named
3196 TYPE_DECL node for each complete tagged type, each array type,
3197 and each function type node created) the C++ front-end generates
3198 a _named_ TYPE_DECL node for each tagged type node created.
3199 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3200 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3201 front-end, but for each type, tagged or not. */
3202
3203 #define TYPE_DECL_IS_STUB(decl) \
3204 (DECL_NAME (decl) == NULL_TREE \
3205 || (DECL_ARTIFICIAL (decl) \
3206 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3207 /* This is necessary for stub decls that \
3208 appear in nested inline functions. */ \
3209 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3210 && (decl_ultimate_origin (decl) \
3211 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3212
3213 /* Information concerning the compilation unit's programming
3214 language, and compiler version. */
3215
3216 /* Fixed size portion of the DWARF compilation unit header. */
3217 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3218 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3219 + (dwarf_version >= 5 ? 4 : 3))
3220
3221 /* Fixed size portion of the DWARF comdat type unit header. */
3222 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3223 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3224 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3225
3226 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3227 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3228 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3229
3230 /* Fixed size portion of public names info. */
3231 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3232
3233 /* Fixed size portion of the address range info. */
3234 #define DWARF_ARANGES_HEADER_SIZE \
3235 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3236 DWARF2_ADDR_SIZE * 2) \
3237 - DWARF_INITIAL_LENGTH_SIZE)
3238
3239 /* Size of padding portion in the address range info. It must be
3240 aligned to twice the pointer size. */
3241 #define DWARF_ARANGES_PAD_SIZE \
3242 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3243 DWARF2_ADDR_SIZE * 2) \
3244 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3245
3246 /* Use assembler line directives if available. */
3247 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3248 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3249 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3250 #else
3251 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3252 #endif
3253 #endif
3254
3255 /* Use assembler views in line directives if available. */
3256 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3257 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3258 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3259 #else
3260 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3261 #endif
3262 #endif
3263
3264 /* Return true if GCC configure detected assembler support for .loc. */
3265
3266 bool
3267 dwarf2out_default_as_loc_support (void)
3268 {
3269 return DWARF2_ASM_LINE_DEBUG_INFO;
3270 #if (GCC_VERSION >= 3000)
3271 # undef DWARF2_ASM_LINE_DEBUG_INFO
3272 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3273 #endif
3274 }
3275
3276 /* Return true if GCC configure detected assembler support for views
3277 in .loc directives. */
3278
3279 bool
3280 dwarf2out_default_as_locview_support (void)
3281 {
3282 return DWARF2_ASM_VIEW_DEBUG_INFO;
3283 #if (GCC_VERSION >= 3000)
3284 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3285 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3286 #endif
3287 }
3288
3289 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3290 view computation, and it refers to a view identifier for which we
3291 will not emit a label because it is known to map to a view number
3292 zero. We won't allocate the bitmap if we're not using assembler
3293 support for location views, but we have to make the variable
3294 visible for GGC and for code that will be optimized out for lack of
3295 support but that's still parsed and compiled. We could abstract it
3296 out with macros, but it's not worth it. */
3297 static GTY(()) bitmap zero_view_p;
3298
3299 /* Evaluate to TRUE iff N is known to identify the first location view
3300 at its PC. When not using assembler location view computation,
3301 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3302 and views label numbers recorded in it are the ones known to be
3303 zero. */
3304 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3305 || (N) == (var_loc_view)-1 \
3306 || (zero_view_p \
3307 && bitmap_bit_p (zero_view_p, (N))))
3308
3309 /* Return true iff we're to emit .loc directives for the assembler to
3310 generate line number sections.
3311
3312 When we're not emitting views, all we need from the assembler is
3313 support for .loc directives.
3314
3315 If we are emitting views, we can only use the assembler's .loc
3316 support if it also supports views.
3317
3318 When the compiler is emitting the line number programs and
3319 computing view numbers itself, it resets view numbers at known PC
3320 changes and counts from that, and then it emits view numbers as
3321 literal constants in locviewlists. There are cases in which the
3322 compiler is not sure about PC changes, e.g. when extra alignment is
3323 requested for a label. In these cases, the compiler may not reset
3324 the view counter, and the potential PC advance in the line number
3325 program will use an opcode that does not reset the view counter
3326 even if the PC actually changes, so that compiler and debug info
3327 consumer can keep view numbers in sync.
3328
3329 When the compiler defers view computation to the assembler, it
3330 emits symbolic view numbers in locviewlists, with the exception of
3331 views known to be zero (forced resets, or reset after
3332 compiler-visible PC changes): instead of emitting symbols for
3333 these, we emit literal zero and assert the assembler agrees with
3334 the compiler's assessment. We could use symbolic views everywhere,
3335 instead of special-casing zero views, but then we'd be unable to
3336 optimize out locviewlists that contain only zeros. */
3337
3338 static bool
3339 output_asm_line_debug_info (void)
3340 {
3341 return (dwarf2out_as_loc_support
3342 && (dwarf2out_as_locview_support
3343 || !debug_variable_location_views));
3344 }
3345
3346 /* Minimum line offset in a special line info. opcode.
3347 This value was chosen to give a reasonable range of values. */
3348 #define DWARF_LINE_BASE -10
3349
3350 /* First special line opcode - leave room for the standard opcodes. */
3351 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3352
3353 /* Range of line offsets in a special line info. opcode. */
3354 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3355
3356 /* Flag that indicates the initial value of the is_stmt_start flag.
3357 In the present implementation, we do not mark any lines as
3358 the beginning of a source statement, because that information
3359 is not made available by the GCC front-end. */
3360 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3361
3362 /* Maximum number of operations per instruction bundle. */
3363 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3364 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3365 #endif
3366
3367 /* This location is used by calc_die_sizes() to keep track
3368 the offset of each DIE within the .debug_info section. */
3369 static unsigned long next_die_offset;
3370
3371 /* Record the root of the DIE's built for the current compilation unit. */
3372 static GTY(()) dw_die_ref single_comp_unit_die;
3373
3374 /* A list of type DIEs that have been separated into comdat sections. */
3375 static GTY(()) comdat_type_node *comdat_type_list;
3376
3377 /* A list of CU DIEs that have been separated. */
3378 static GTY(()) limbo_die_node *cu_die_list;
3379
3380 /* A list of DIEs with a NULL parent waiting to be relocated. */
3381 static GTY(()) limbo_die_node *limbo_die_list;
3382
3383 /* A list of DIEs for which we may have to generate
3384 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3385 static GTY(()) limbo_die_node *deferred_asm_name;
3386
3387 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3388 {
3389 typedef const char *compare_type;
3390
3391 static hashval_t hash (dwarf_file_data *);
3392 static bool equal (dwarf_file_data *, const char *);
3393 };
3394
3395 /* Filenames referenced by this compilation unit. */
3396 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3397
3398 struct decl_die_hasher : ggc_ptr_hash<die_node>
3399 {
3400 typedef tree compare_type;
3401
3402 static hashval_t hash (die_node *);
3403 static bool equal (die_node *, tree);
3404 };
3405 /* A hash table of references to DIE's that describe declarations.
3406 The key is a DECL_UID() which is a unique number identifying each decl. */
3407 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3408
3409 struct GTY ((for_user)) variable_value_struct {
3410 unsigned int decl_id;
3411 vec<dw_die_ref, va_gc> *dies;
3412 };
3413
3414 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3415 {
3416 typedef tree compare_type;
3417
3418 static hashval_t hash (variable_value_struct *);
3419 static bool equal (variable_value_struct *, tree);
3420 };
3421 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3422 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3423 DECL_CONTEXT of the referenced VAR_DECLs. */
3424 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3425
3426 struct block_die_hasher : ggc_ptr_hash<die_struct>
3427 {
3428 static hashval_t hash (die_struct *);
3429 static bool equal (die_struct *, die_struct *);
3430 };
3431
3432 /* A hash table of references to DIE's that describe COMMON blocks.
3433 The key is DECL_UID() ^ die_parent. */
3434 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3435
3436 typedef struct GTY(()) die_arg_entry_struct {
3437 dw_die_ref die;
3438 tree arg;
3439 } die_arg_entry;
3440
3441
3442 /* Node of the variable location list. */
3443 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3444 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3445 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3446 in mode of the EXPR_LIST node and first EXPR_LIST operand
3447 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3448 location or NULL for padding. For larger bitsizes,
3449 mode is 0 and first operand is a CONCAT with bitsize
3450 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3451 NULL as second operand. */
3452 rtx GTY (()) loc;
3453 const char * GTY (()) label;
3454 struct var_loc_node * GTY (()) next;
3455 var_loc_view view;
3456 };
3457
3458 /* Variable location list. */
3459 struct GTY ((for_user)) var_loc_list_def {
3460 struct var_loc_node * GTY (()) first;
3461
3462 /* Pointer to the last but one or last element of the
3463 chained list. If the list is empty, both first and
3464 last are NULL, if the list contains just one node
3465 or the last node certainly is not redundant, it points
3466 to the last node, otherwise points to the last but one.
3467 Do not mark it for GC because it is marked through the chain. */
3468 struct var_loc_node * GTY ((skip ("%h"))) last;
3469
3470 /* Pointer to the last element before section switch,
3471 if NULL, either sections weren't switched or first
3472 is after section switch. */
3473 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3474
3475 /* DECL_UID of the variable decl. */
3476 unsigned int decl_id;
3477 };
3478 typedef struct var_loc_list_def var_loc_list;
3479
3480 /* Call argument location list. */
3481 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3482 rtx GTY (()) call_arg_loc_note;
3483 const char * GTY (()) label;
3484 tree GTY (()) block;
3485 bool tail_call_p;
3486 rtx GTY (()) symbol_ref;
3487 struct call_arg_loc_node * GTY (()) next;
3488 };
3489
3490
3491 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3492 {
3493 typedef const_tree compare_type;
3494
3495 static hashval_t hash (var_loc_list *);
3496 static bool equal (var_loc_list *, const_tree);
3497 };
3498
3499 /* Table of decl location linked lists. */
3500 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3501
3502 /* Head and tail of call_arg_loc chain. */
3503 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3504 static struct call_arg_loc_node *call_arg_loc_last;
3505
3506 /* Number of call sites in the current function. */
3507 static int call_site_count = -1;
3508 /* Number of tail call sites in the current function. */
3509 static int tail_call_site_count = -1;
3510
3511 /* A cached location list. */
3512 struct GTY ((for_user)) cached_dw_loc_list_def {
3513 /* The DECL_UID of the decl that this entry describes. */
3514 unsigned int decl_id;
3515
3516 /* The cached location list. */
3517 dw_loc_list_ref loc_list;
3518 };
3519 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3520
3521 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3522 {
3523
3524 typedef const_tree compare_type;
3525
3526 static hashval_t hash (cached_dw_loc_list *);
3527 static bool equal (cached_dw_loc_list *, const_tree);
3528 };
3529
3530 /* Table of cached location lists. */
3531 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3532
3533 /* A vector of references to DIE's that are uniquely identified by their tag,
3534 presence/absence of children DIE's, and list of attribute/value pairs. */
3535 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3536
3537 /* A hash map to remember the stack usage for DWARF procedures. The value
3538 stored is the stack size difference between before the DWARF procedure
3539 invokation and after it returned. In other words, for a DWARF procedure
3540 that consumes N stack slots and that pushes M ones, this stores M - N. */
3541 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3542
3543 /* A global counter for generating labels for line number data. */
3544 static unsigned int line_info_label_num;
3545
3546 /* The current table to which we should emit line number information
3547 for the current function. This will be set up at the beginning of
3548 assembly for the function. */
3549 static GTY(()) dw_line_info_table *cur_line_info_table;
3550
3551 /* The two default tables of line number info. */
3552 static GTY(()) dw_line_info_table *text_section_line_info;
3553 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3554
3555 /* The set of all non-default tables of line number info. */
3556 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3557
3558 /* A flag to tell pubnames/types export if there is an info section to
3559 refer to. */
3560 static bool info_section_emitted;
3561
3562 /* A pointer to the base of a table that contains a list of publicly
3563 accessible names. */
3564 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3565
3566 /* A pointer to the base of a table that contains a list of publicly
3567 accessible types. */
3568 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3569
3570 /* A pointer to the base of a table that contains a list of macro
3571 defines/undefines (and file start/end markers). */
3572 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3573
3574 /* True if .debug_macinfo or .debug_macros section is going to be
3575 emitted. */
3576 #define have_macinfo \
3577 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3578 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3579 && !macinfo_table->is_empty ())
3580
3581 /* Vector of dies for which we should generate .debug_ranges info. */
3582 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3583
3584 /* Vector of pairs of labels referenced in ranges_table. */
3585 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3586
3587 /* Whether we have location lists that need outputting */
3588 static GTY(()) bool have_location_lists;
3589
3590 /* Unique label counter. */
3591 static GTY(()) unsigned int loclabel_num;
3592
3593 /* Unique label counter for point-of-call tables. */
3594 static GTY(()) unsigned int poc_label_num;
3595
3596 /* The last file entry emitted by maybe_emit_file(). */
3597 static GTY(()) struct dwarf_file_data * last_emitted_file;
3598
3599 /* Number of internal labels generated by gen_internal_sym(). */
3600 static GTY(()) int label_num;
3601
3602 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3603
3604 /* Instances of generic types for which we need to generate debug
3605 info that describe their generic parameters and arguments. That
3606 generation needs to happen once all types are properly laid out so
3607 we do it at the end of compilation. */
3608 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3609
3610 /* Offset from the "steady-state frame pointer" to the frame base,
3611 within the current function. */
3612 static poly_int64 frame_pointer_fb_offset;
3613 static bool frame_pointer_fb_offset_valid;
3614
3615 static vec<dw_die_ref> base_types;
3616
3617 /* Flags to represent a set of attribute classes for attributes that represent
3618 a scalar value (bounds, pointers, ...). */
3619 enum dw_scalar_form
3620 {
3621 dw_scalar_form_constant = 0x01,
3622 dw_scalar_form_exprloc = 0x02,
3623 dw_scalar_form_reference = 0x04
3624 };
3625
3626 /* Forward declarations for functions defined in this file. */
3627
3628 static int is_pseudo_reg (const_rtx);
3629 static tree type_main_variant (tree);
3630 static int is_tagged_type (const_tree);
3631 static const char *dwarf_tag_name (unsigned);
3632 static const char *dwarf_attr_name (unsigned);
3633 static const char *dwarf_form_name (unsigned);
3634 static tree decl_ultimate_origin (const_tree);
3635 static tree decl_class_context (tree);
3636 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3637 static inline enum dw_val_class AT_class (dw_attr_node *);
3638 static inline unsigned int AT_index (dw_attr_node *);
3639 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3640 static inline unsigned AT_flag (dw_attr_node *);
3641 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3642 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3643 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3644 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3645 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3646 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3647 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3648 unsigned int, unsigned char *);
3649 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3650 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3651 static inline const char *AT_string (dw_attr_node *);
3652 static enum dwarf_form AT_string_form (dw_attr_node *);
3653 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3654 static void add_AT_specification (dw_die_ref, dw_die_ref);
3655 static inline dw_die_ref AT_ref (dw_attr_node *);
3656 static inline int AT_ref_external (dw_attr_node *);
3657 static inline void set_AT_ref_external (dw_attr_node *, int);
3658 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3659 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3660 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3661 dw_loc_list_ref);
3662 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3663 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3664 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3665 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3666 static void remove_addr_table_entry (addr_table_entry *);
3667 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3668 static inline rtx AT_addr (dw_attr_node *);
3669 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3670 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3671 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3672 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3673 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3674 unsigned long, bool);
3675 static inline const char *AT_lbl (dw_attr_node *);
3676 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3677 static const char *get_AT_low_pc (dw_die_ref);
3678 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3679 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3680 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3681 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3682 static bool is_c (void);
3683 static bool is_cxx (void);
3684 static bool is_cxx (const_tree);
3685 static bool is_fortran (void);
3686 static bool is_ada (void);
3687 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3688 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3689 static void add_child_die (dw_die_ref, dw_die_ref);
3690 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3691 static dw_die_ref lookup_type_die (tree);
3692 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3693 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3694 static void equate_type_number_to_die (tree, dw_die_ref);
3695 static dw_die_ref lookup_decl_die (tree);
3696 static var_loc_list *lookup_decl_loc (const_tree);
3697 static void equate_decl_number_to_die (tree, dw_die_ref);
3698 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3699 static void print_spaces (FILE *);
3700 static void print_die (dw_die_ref, FILE *);
3701 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3702 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3703 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3704 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3705 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3706 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3707 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3708 struct md5_ctx *, int *);
3709 struct checksum_attributes;
3710 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3711 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3712 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3713 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3714 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3715 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3716 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3717 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3718 static int is_type_die (dw_die_ref);
3719 static inline bool is_template_instantiation (dw_die_ref);
3720 static int is_declaration_die (dw_die_ref);
3721 static int should_move_die_to_comdat (dw_die_ref);
3722 static dw_die_ref clone_as_declaration (dw_die_ref);
3723 static dw_die_ref clone_die (dw_die_ref);
3724 static dw_die_ref clone_tree (dw_die_ref);
3725 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3726 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3727 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3728 static dw_die_ref generate_skeleton (dw_die_ref);
3729 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3730 dw_die_ref,
3731 dw_die_ref);
3732 static void break_out_comdat_types (dw_die_ref);
3733 static void copy_decls_for_unworthy_types (dw_die_ref);
3734
3735 static void add_sibling_attributes (dw_die_ref);
3736 static void output_location_lists (dw_die_ref);
3737 static int constant_size (unsigned HOST_WIDE_INT);
3738 static unsigned long size_of_die (dw_die_ref);
3739 static void calc_die_sizes (dw_die_ref);
3740 static void calc_base_type_die_sizes (void);
3741 static void mark_dies (dw_die_ref);
3742 static void unmark_dies (dw_die_ref);
3743 static void unmark_all_dies (dw_die_ref);
3744 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3745 static unsigned long size_of_aranges (void);
3746 static enum dwarf_form value_format (dw_attr_node *);
3747 static void output_value_format (dw_attr_node *);
3748 static void output_abbrev_section (void);
3749 static void output_die_abbrevs (unsigned long, dw_die_ref);
3750 static void output_die (dw_die_ref);
3751 static void output_compilation_unit_header (enum dwarf_unit_type);
3752 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3753 static void output_comdat_type_unit (comdat_type_node *, bool);
3754 static const char *dwarf2_name (tree, int);
3755 static void add_pubname (tree, dw_die_ref);
3756 static void add_enumerator_pubname (const char *, dw_die_ref);
3757 static void add_pubname_string (const char *, dw_die_ref);
3758 static void add_pubtype (tree, dw_die_ref);
3759 static void output_pubnames (vec<pubname_entry, va_gc> *);
3760 static void output_aranges (void);
3761 static unsigned int add_ranges (const_tree, bool = false);
3762 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3763 bool *, bool);
3764 static void output_ranges (void);
3765 static dw_line_info_table *new_line_info_table (void);
3766 static void output_line_info (bool);
3767 static void output_file_names (void);
3768 static dw_die_ref base_type_die (tree, bool);
3769 static int is_base_type (tree);
3770 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3771 static int decl_quals (const_tree);
3772 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3773 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3774 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3775 static unsigned int dbx_reg_number (const_rtx);
3776 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3777 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3778 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3779 enum var_init_status);
3780 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3781 enum var_init_status);
3782 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3783 enum var_init_status);
3784 static int is_based_loc (const_rtx);
3785 static bool resolve_one_addr (rtx *);
3786 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3787 enum var_init_status);
3788 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3789 enum var_init_status);
3790 struct loc_descr_context;
3791 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3792 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3793 static dw_loc_list_ref loc_list_from_tree (tree, int,
3794 struct loc_descr_context *);
3795 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3796 struct loc_descr_context *);
3797 static tree field_type (const_tree);
3798 static unsigned int simple_type_align_in_bits (const_tree);
3799 static unsigned int simple_decl_align_in_bits (const_tree);
3800 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3801 struct vlr_context;
3802 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3803 HOST_WIDE_INT *);
3804 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3805 dw_loc_list_ref);
3806 static void add_data_member_location_attribute (dw_die_ref, tree,
3807 struct vlr_context *);
3808 static bool add_const_value_attribute (dw_die_ref, rtx);
3809 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3810 static void insert_wide_int (const wide_int &, unsigned char *, int);
3811 static void insert_float (const_rtx, unsigned char *);
3812 static rtx rtl_for_decl_location (tree);
3813 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3814 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3815 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3816 static void add_name_attribute (dw_die_ref, const char *);
3817 static void add_desc_attribute (dw_die_ref, tree);
3818 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3819 static void add_comp_dir_attribute (dw_die_ref);
3820 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3821 struct loc_descr_context *);
3822 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3823 struct loc_descr_context *);
3824 static void add_subscript_info (dw_die_ref, tree, bool);
3825 static void add_byte_size_attribute (dw_die_ref, tree);
3826 static void add_alignment_attribute (dw_die_ref, tree);
3827 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3828 struct vlr_context *);
3829 static void add_bit_size_attribute (dw_die_ref, tree);
3830 static void add_prototyped_attribute (dw_die_ref, tree);
3831 static void add_abstract_origin_attribute (dw_die_ref, tree);
3832 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3833 static void add_src_coords_attributes (dw_die_ref, tree);
3834 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3835 static void add_discr_value (dw_die_ref, dw_discr_value *);
3836 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3837 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3838 static dw_die_ref scope_die_for (tree, dw_die_ref);
3839 static inline int local_scope_p (dw_die_ref);
3840 static inline int class_scope_p (dw_die_ref);
3841 static inline int class_or_namespace_scope_p (dw_die_ref);
3842 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3843 static void add_calling_convention_attribute (dw_die_ref, tree);
3844 static const char *type_tag (const_tree);
3845 static tree member_declared_type (const_tree);
3846 #if 0
3847 static const char *decl_start_label (tree);
3848 #endif
3849 static void gen_array_type_die (tree, dw_die_ref);
3850 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3851 #if 0
3852 static void gen_entry_point_die (tree, dw_die_ref);
3853 #endif
3854 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3855 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3856 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3857 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3858 static void gen_formal_types_die (tree, dw_die_ref);
3859 static void gen_subprogram_die (tree, dw_die_ref);
3860 static void gen_variable_die (tree, tree, dw_die_ref);
3861 static void gen_const_die (tree, dw_die_ref);
3862 static void gen_label_die (tree, dw_die_ref);
3863 static void gen_lexical_block_die (tree, dw_die_ref);
3864 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3865 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3866 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3867 static dw_die_ref gen_compile_unit_die (const char *);
3868 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3869 static void gen_member_die (tree, dw_die_ref);
3870 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3871 enum debug_info_usage);
3872 static void gen_subroutine_type_die (tree, dw_die_ref);
3873 static void gen_typedef_die (tree, dw_die_ref);
3874 static void gen_type_die (tree, dw_die_ref);
3875 static void gen_block_die (tree, dw_die_ref);
3876 static void decls_for_scope (tree, dw_die_ref, bool = true);
3877 static bool is_naming_typedef_decl (const_tree);
3878 static inline dw_die_ref get_context_die (tree);
3879 static void gen_namespace_die (tree, dw_die_ref);
3880 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3881 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3882 static dw_die_ref force_decl_die (tree);
3883 static dw_die_ref force_type_die (tree);
3884 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3885 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3886 static struct dwarf_file_data * lookup_filename (const char *);
3887 static void retry_incomplete_types (void);
3888 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3889 static void gen_generic_params_dies (tree);
3890 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3891 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3892 static void splice_child_die (dw_die_ref, dw_die_ref);
3893 static int file_info_cmp (const void *, const void *);
3894 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3895 const char *, var_loc_view, const char *);
3896 static void output_loc_list (dw_loc_list_ref);
3897 static char *gen_internal_sym (const char *);
3898 static bool want_pubnames (void);
3899
3900 static void prune_unmark_dies (dw_die_ref);
3901 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3902 static void prune_unused_types_mark (dw_die_ref, int);
3903 static void prune_unused_types_walk (dw_die_ref);
3904 static void prune_unused_types_walk_attribs (dw_die_ref);
3905 static void prune_unused_types_prune (dw_die_ref);
3906 static void prune_unused_types (void);
3907 static int maybe_emit_file (struct dwarf_file_data *fd);
3908 static inline const char *AT_vms_delta1 (dw_attr_node *);
3909 static inline const char *AT_vms_delta2 (dw_attr_node *);
3910 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3911 const char *, const char *);
3912 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3913 static void gen_remaining_tmpl_value_param_die_attribute (void);
3914 static bool generic_type_p (tree);
3915 static void schedule_generic_params_dies_gen (tree t);
3916 static void gen_scheduled_generic_parms_dies (void);
3917 static void resolve_variable_values (void);
3918
3919 static const char *comp_dir_string (void);
3920
3921 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3922
3923 /* enum for tracking thread-local variables whose address is really an offset
3924 relative to the TLS pointer, which will need link-time relocation, but will
3925 not need relocation by the DWARF consumer. */
3926
3927 enum dtprel_bool
3928 {
3929 dtprel_false = 0,
3930 dtprel_true = 1
3931 };
3932
3933 /* Return the operator to use for an address of a variable. For dtprel_true, we
3934 use DW_OP_const*. For regular variables, which need both link-time
3935 relocation and consumer-level relocation (e.g., to account for shared objects
3936 loaded at a random address), we use DW_OP_addr*. */
3937
3938 static inline enum dwarf_location_atom
3939 dw_addr_op (enum dtprel_bool dtprel)
3940 {
3941 if (dtprel == dtprel_true)
3942 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3943 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3944 else
3945 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3946 }
3947
3948 /* Return a pointer to a newly allocated address location description. If
3949 dwarf_split_debug_info is true, then record the address with the appropriate
3950 relocation. */
3951 static inline dw_loc_descr_ref
3952 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3953 {
3954 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3955
3956 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3957 ref->dw_loc_oprnd1.v.val_addr = addr;
3958 ref->dtprel = dtprel;
3959 if (dwarf_split_debug_info)
3960 ref->dw_loc_oprnd1.val_entry
3961 = add_addr_table_entry (addr,
3962 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3963 else
3964 ref->dw_loc_oprnd1.val_entry = NULL;
3965
3966 return ref;
3967 }
3968
3969 /* Section names used to hold DWARF debugging information. */
3970
3971 #ifndef DEBUG_INFO_SECTION
3972 #define DEBUG_INFO_SECTION ".debug_info"
3973 #endif
3974 #ifndef DEBUG_DWO_INFO_SECTION
3975 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3976 #endif
3977 #ifndef DEBUG_LTO_INFO_SECTION
3978 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3979 #endif
3980 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3981 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3982 #endif
3983 #ifndef DEBUG_ABBREV_SECTION
3984 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3985 #endif
3986 #ifndef DEBUG_LTO_ABBREV_SECTION
3987 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3988 #endif
3989 #ifndef DEBUG_DWO_ABBREV_SECTION
3990 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3991 #endif
3992 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3993 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3994 #endif
3995 #ifndef DEBUG_ARANGES_SECTION
3996 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3997 #endif
3998 #ifndef DEBUG_ADDR_SECTION
3999 #define DEBUG_ADDR_SECTION ".debug_addr"
4000 #endif
4001 #ifndef DEBUG_MACINFO_SECTION
4002 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
4003 #endif
4004 #ifndef DEBUG_LTO_MACINFO_SECTION
4005 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
4006 #endif
4007 #ifndef DEBUG_DWO_MACINFO_SECTION
4008 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4009 #endif
4010 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4011 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4012 #endif
4013 #ifndef DEBUG_MACRO_SECTION
4014 #define DEBUG_MACRO_SECTION ".debug_macro"
4015 #endif
4016 #ifndef DEBUG_LTO_MACRO_SECTION
4017 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4018 #endif
4019 #ifndef DEBUG_DWO_MACRO_SECTION
4020 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4021 #endif
4022 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4023 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4024 #endif
4025 #ifndef DEBUG_LINE_SECTION
4026 #define DEBUG_LINE_SECTION ".debug_line"
4027 #endif
4028 #ifndef DEBUG_LTO_LINE_SECTION
4029 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4030 #endif
4031 #ifndef DEBUG_DWO_LINE_SECTION
4032 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4033 #endif
4034 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4035 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4036 #endif
4037 #ifndef DEBUG_LOC_SECTION
4038 #define DEBUG_LOC_SECTION ".debug_loc"
4039 #endif
4040 #ifndef DEBUG_DWO_LOC_SECTION
4041 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4042 #endif
4043 #ifndef DEBUG_LOCLISTS_SECTION
4044 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4045 #endif
4046 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4047 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4048 #endif
4049 #ifndef DEBUG_PUBNAMES_SECTION
4050 #define DEBUG_PUBNAMES_SECTION \
4051 ((debug_generate_pub_sections == 2) \
4052 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4053 #endif
4054 #ifndef DEBUG_PUBTYPES_SECTION
4055 #define DEBUG_PUBTYPES_SECTION \
4056 ((debug_generate_pub_sections == 2) \
4057 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4058 #endif
4059 #ifndef DEBUG_STR_OFFSETS_SECTION
4060 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4061 #endif
4062 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4063 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4064 #endif
4065 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4066 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4067 #endif
4068 #ifndef DEBUG_STR_SECTION
4069 #define DEBUG_STR_SECTION ".debug_str"
4070 #endif
4071 #ifndef DEBUG_LTO_STR_SECTION
4072 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4073 #endif
4074 #ifndef DEBUG_STR_DWO_SECTION
4075 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4076 #endif
4077 #ifndef DEBUG_LTO_STR_DWO_SECTION
4078 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4079 #endif
4080 #ifndef DEBUG_RANGES_SECTION
4081 #define DEBUG_RANGES_SECTION ".debug_ranges"
4082 #endif
4083 #ifndef DEBUG_RNGLISTS_SECTION
4084 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4085 #endif
4086 #ifndef DEBUG_LINE_STR_SECTION
4087 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4088 #endif
4089 #ifndef DEBUG_LTO_LINE_STR_SECTION
4090 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4091 #endif
4092
4093 /* Standard ELF section names for compiled code and data. */
4094 #ifndef TEXT_SECTION_NAME
4095 #define TEXT_SECTION_NAME ".text"
4096 #endif
4097
4098 /* Section flags for .debug_str section. */
4099 #define DEBUG_STR_SECTION_FLAGS \
4100 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4101 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4102 : SECTION_DEBUG)
4103
4104 /* Section flags for .debug_str.dwo section. */
4105 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4106
4107 /* Attribute used to refer to the macro section. */
4108 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4109 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4110
4111 /* Labels we insert at beginning sections we can reference instead of
4112 the section names themselves. */
4113
4114 #ifndef TEXT_SECTION_LABEL
4115 #define TEXT_SECTION_LABEL "Ltext"
4116 #endif
4117 #ifndef COLD_TEXT_SECTION_LABEL
4118 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4119 #endif
4120 #ifndef DEBUG_LINE_SECTION_LABEL
4121 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4122 #endif
4123 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4124 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4125 #endif
4126 #ifndef DEBUG_INFO_SECTION_LABEL
4127 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4128 #endif
4129 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4130 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4131 #endif
4132 #ifndef DEBUG_ABBREV_SECTION_LABEL
4133 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4134 #endif
4135 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4136 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4137 #endif
4138 #ifndef DEBUG_ADDR_SECTION_LABEL
4139 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4140 #endif
4141 #ifndef DEBUG_LOC_SECTION_LABEL
4142 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4143 #endif
4144 #ifndef DEBUG_RANGES_SECTION_LABEL
4145 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4146 #endif
4147 #ifndef DEBUG_MACINFO_SECTION_LABEL
4148 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4149 #endif
4150 #ifndef DEBUG_MACRO_SECTION_LABEL
4151 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4152 #endif
4153 #define SKELETON_COMP_DIE_ABBREV 1
4154 #define SKELETON_TYPE_DIE_ABBREV 2
4155
4156 /* Definitions of defaults for formats and names of various special
4157 (artificial) labels which may be generated within this file (when the -g
4158 options is used and DWARF2_DEBUGGING_INFO is in effect.
4159 If necessary, these may be overridden from within the tm.h file, but
4160 typically, overriding these defaults is unnecessary. */
4161
4162 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4163 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4164 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4169 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4170 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4171 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4172 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4173 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4174 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4175 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4176 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4177
4178 #ifndef TEXT_END_LABEL
4179 #define TEXT_END_LABEL "Letext"
4180 #endif
4181 #ifndef COLD_END_LABEL
4182 #define COLD_END_LABEL "Letext_cold"
4183 #endif
4184 #ifndef BLOCK_BEGIN_LABEL
4185 #define BLOCK_BEGIN_LABEL "LBB"
4186 #endif
4187 #ifndef BLOCK_INLINE_ENTRY_LABEL
4188 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4189 #endif
4190 #ifndef BLOCK_END_LABEL
4191 #define BLOCK_END_LABEL "LBE"
4192 #endif
4193 #ifndef LINE_CODE_LABEL
4194 #define LINE_CODE_LABEL "LM"
4195 #endif
4196
4197 \f
4198 /* Return the root of the DIE's built for the current compilation unit. */
4199 static dw_die_ref
4200 comp_unit_die (void)
4201 {
4202 if (!single_comp_unit_die)
4203 single_comp_unit_die = gen_compile_unit_die (NULL);
4204 return single_comp_unit_die;
4205 }
4206
4207 /* We allow a language front-end to designate a function that is to be
4208 called to "demangle" any name before it is put into a DIE. */
4209
4210 static const char *(*demangle_name_func) (const char *);
4211
4212 void
4213 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4214 {
4215 demangle_name_func = func;
4216 }
4217
4218 /* Test if rtl node points to a pseudo register. */
4219
4220 static inline int
4221 is_pseudo_reg (const_rtx rtl)
4222 {
4223 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4224 || (GET_CODE (rtl) == SUBREG
4225 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4226 }
4227
4228 /* Return a reference to a type, with its const and volatile qualifiers
4229 removed. */
4230
4231 static inline tree
4232 type_main_variant (tree type)
4233 {
4234 type = TYPE_MAIN_VARIANT (type);
4235
4236 /* ??? There really should be only one main variant among any group of
4237 variants of a given type (and all of the MAIN_VARIANT values for all
4238 members of the group should point to that one type) but sometimes the C
4239 front-end messes this up for array types, so we work around that bug
4240 here. */
4241 if (TREE_CODE (type) == ARRAY_TYPE)
4242 while (type != TYPE_MAIN_VARIANT (type))
4243 type = TYPE_MAIN_VARIANT (type);
4244
4245 return type;
4246 }
4247
4248 /* Return nonzero if the given type node represents a tagged type. */
4249
4250 static inline int
4251 is_tagged_type (const_tree type)
4252 {
4253 enum tree_code code = TREE_CODE (type);
4254
4255 return (code == RECORD_TYPE || code == UNION_TYPE
4256 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4257 }
4258
4259 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4260
4261 static void
4262 get_ref_die_offset_label (char *label, dw_die_ref ref)
4263 {
4264 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4265 }
4266
4267 /* Return die_offset of a DIE reference to a base type. */
4268
4269 static unsigned long int
4270 get_base_type_offset (dw_die_ref ref)
4271 {
4272 if (ref->die_offset)
4273 return ref->die_offset;
4274 if (comp_unit_die ()->die_abbrev)
4275 {
4276 calc_base_type_die_sizes ();
4277 gcc_assert (ref->die_offset);
4278 }
4279 return ref->die_offset;
4280 }
4281
4282 /* Return die_offset of a DIE reference other than base type. */
4283
4284 static unsigned long int
4285 get_ref_die_offset (dw_die_ref ref)
4286 {
4287 gcc_assert (ref->die_offset);
4288 return ref->die_offset;
4289 }
4290
4291 /* Convert a DIE tag into its string name. */
4292
4293 static const char *
4294 dwarf_tag_name (unsigned int tag)
4295 {
4296 const char *name = get_DW_TAG_name (tag);
4297
4298 if (name != NULL)
4299 return name;
4300
4301 return "DW_TAG_<unknown>";
4302 }
4303
4304 /* Convert a DWARF attribute code into its string name. */
4305
4306 static const char *
4307 dwarf_attr_name (unsigned int attr)
4308 {
4309 const char *name;
4310
4311 switch (attr)
4312 {
4313 #if VMS_DEBUGGING_INFO
4314 case DW_AT_HP_prologue:
4315 return "DW_AT_HP_prologue";
4316 #else
4317 case DW_AT_MIPS_loop_unroll_factor:
4318 return "DW_AT_MIPS_loop_unroll_factor";
4319 #endif
4320
4321 #if VMS_DEBUGGING_INFO
4322 case DW_AT_HP_epilogue:
4323 return "DW_AT_HP_epilogue";
4324 #else
4325 case DW_AT_MIPS_stride:
4326 return "DW_AT_MIPS_stride";
4327 #endif
4328 }
4329
4330 name = get_DW_AT_name (attr);
4331
4332 if (name != NULL)
4333 return name;
4334
4335 return "DW_AT_<unknown>";
4336 }
4337
4338 /* Convert a DWARF value form code into its string name. */
4339
4340 static const char *
4341 dwarf_form_name (unsigned int form)
4342 {
4343 const char *name = get_DW_FORM_name (form);
4344
4345 if (name != NULL)
4346 return name;
4347
4348 return "DW_FORM_<unknown>";
4349 }
4350 \f
4351 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4352 instance of an inlined instance of a decl which is local to an inline
4353 function, so we have to trace all of the way back through the origin chain
4354 to find out what sort of node actually served as the original seed for the
4355 given block. */
4356
4357 static tree
4358 decl_ultimate_origin (const_tree decl)
4359 {
4360 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4361 return NULL_TREE;
4362
4363 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4364 we're trying to output the abstract instance of this function. */
4365 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4366 return NULL_TREE;
4367
4368 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4369 most distant ancestor, this should never happen. */
4370 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4371
4372 return DECL_ABSTRACT_ORIGIN (decl);
4373 }
4374
4375 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4376 of a virtual function may refer to a base class, so we check the 'this'
4377 parameter. */
4378
4379 static tree
4380 decl_class_context (tree decl)
4381 {
4382 tree context = NULL_TREE;
4383
4384 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4385 context = DECL_CONTEXT (decl);
4386 else
4387 context = TYPE_MAIN_VARIANT
4388 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4389
4390 if (context && !TYPE_P (context))
4391 context = NULL_TREE;
4392
4393 return context;
4394 }
4395 \f
4396 /* Add an attribute/value pair to a DIE. */
4397
4398 static inline void
4399 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4400 {
4401 /* Maybe this should be an assert? */
4402 if (die == NULL)
4403 return;
4404
4405 if (flag_checking)
4406 {
4407 /* Check we do not add duplicate attrs. Can't use get_AT here
4408 because that recurses to the specification/abstract origin DIE. */
4409 dw_attr_node *a;
4410 unsigned ix;
4411 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4412 gcc_assert (a->dw_attr != attr->dw_attr);
4413 }
4414
4415 vec_safe_reserve (die->die_attr, 1);
4416 vec_safe_push (die->die_attr, *attr);
4417 }
4418
4419 static inline enum dw_val_class
4420 AT_class (dw_attr_node *a)
4421 {
4422 return a->dw_attr_val.val_class;
4423 }
4424
4425 /* Return the index for any attribute that will be referenced with a
4426 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4427 indices are stored in dw_attr_val.v.val_str for reference counting
4428 pruning. */
4429
4430 static inline unsigned int
4431 AT_index (dw_attr_node *a)
4432 {
4433 if (AT_class (a) == dw_val_class_str)
4434 return a->dw_attr_val.v.val_str->index;
4435 else if (a->dw_attr_val.val_entry != NULL)
4436 return a->dw_attr_val.val_entry->index;
4437 return NOT_INDEXED;
4438 }
4439
4440 /* Add a flag value attribute to a DIE. */
4441
4442 static inline void
4443 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4444 {
4445 dw_attr_node attr;
4446
4447 attr.dw_attr = attr_kind;
4448 attr.dw_attr_val.val_class = dw_val_class_flag;
4449 attr.dw_attr_val.val_entry = NULL;
4450 attr.dw_attr_val.v.val_flag = flag;
4451 add_dwarf_attr (die, &attr);
4452 }
4453
4454 static inline unsigned
4455 AT_flag (dw_attr_node *a)
4456 {
4457 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4458 return a->dw_attr_val.v.val_flag;
4459 }
4460
4461 /* Add a signed integer attribute value to a DIE. */
4462
4463 static inline void
4464 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4465 {
4466 dw_attr_node attr;
4467
4468 attr.dw_attr = attr_kind;
4469 attr.dw_attr_val.val_class = dw_val_class_const;
4470 attr.dw_attr_val.val_entry = NULL;
4471 attr.dw_attr_val.v.val_int = int_val;
4472 add_dwarf_attr (die, &attr);
4473 }
4474
4475 static inline HOST_WIDE_INT
4476 AT_int (dw_attr_node *a)
4477 {
4478 gcc_assert (a && (AT_class (a) == dw_val_class_const
4479 || AT_class (a) == dw_val_class_const_implicit));
4480 return a->dw_attr_val.v.val_int;
4481 }
4482
4483 /* Add an unsigned integer attribute value to a DIE. */
4484
4485 static inline void
4486 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4487 unsigned HOST_WIDE_INT unsigned_val)
4488 {
4489 dw_attr_node attr;
4490
4491 attr.dw_attr = attr_kind;
4492 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4493 attr.dw_attr_val.val_entry = NULL;
4494 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4495 add_dwarf_attr (die, &attr);
4496 }
4497
4498 static inline unsigned HOST_WIDE_INT
4499 AT_unsigned (dw_attr_node *a)
4500 {
4501 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4502 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4503 return a->dw_attr_val.v.val_unsigned;
4504 }
4505
4506 /* Add an unsigned wide integer attribute value to a DIE. */
4507
4508 static inline void
4509 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4510 const wide_int& w)
4511 {
4512 dw_attr_node attr;
4513
4514 attr.dw_attr = attr_kind;
4515 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4516 attr.dw_attr_val.val_entry = NULL;
4517 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4518 *attr.dw_attr_val.v.val_wide = w;
4519 add_dwarf_attr (die, &attr);
4520 }
4521
4522 /* Add an unsigned double integer attribute value to a DIE. */
4523
4524 static inline void
4525 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4526 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4527 {
4528 dw_attr_node attr;
4529
4530 attr.dw_attr = attr_kind;
4531 attr.dw_attr_val.val_class = dw_val_class_const_double;
4532 attr.dw_attr_val.val_entry = NULL;
4533 attr.dw_attr_val.v.val_double.high = high;
4534 attr.dw_attr_val.v.val_double.low = low;
4535 add_dwarf_attr (die, &attr);
4536 }
4537
4538 /* Add a floating point attribute value to a DIE and return it. */
4539
4540 static inline void
4541 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4542 unsigned int length, unsigned int elt_size, unsigned char *array)
4543 {
4544 dw_attr_node attr;
4545
4546 attr.dw_attr = attr_kind;
4547 attr.dw_attr_val.val_class = dw_val_class_vec;
4548 attr.dw_attr_val.val_entry = NULL;
4549 attr.dw_attr_val.v.val_vec.length = length;
4550 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4551 attr.dw_attr_val.v.val_vec.array = array;
4552 add_dwarf_attr (die, &attr);
4553 }
4554
4555 /* Add an 8-byte data attribute value to a DIE. */
4556
4557 static inline void
4558 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4559 unsigned char data8[8])
4560 {
4561 dw_attr_node attr;
4562
4563 attr.dw_attr = attr_kind;
4564 attr.dw_attr_val.val_class = dw_val_class_data8;
4565 attr.dw_attr_val.val_entry = NULL;
4566 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4567 add_dwarf_attr (die, &attr);
4568 }
4569
4570 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4571 dwarf_split_debug_info, address attributes in dies destined for the
4572 final executable have force_direct set to avoid using indexed
4573 references. */
4574
4575 static inline void
4576 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4577 bool force_direct)
4578 {
4579 dw_attr_node attr;
4580 char * lbl_id;
4581
4582 lbl_id = xstrdup (lbl_low);
4583 attr.dw_attr = DW_AT_low_pc;
4584 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4585 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4586 if (dwarf_split_debug_info && !force_direct)
4587 attr.dw_attr_val.val_entry
4588 = add_addr_table_entry (lbl_id, ate_kind_label);
4589 else
4590 attr.dw_attr_val.val_entry = NULL;
4591 add_dwarf_attr (die, &attr);
4592
4593 attr.dw_attr = DW_AT_high_pc;
4594 if (dwarf_version < 4)
4595 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4596 else
4597 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4598 lbl_id = xstrdup (lbl_high);
4599 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4600 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4601 && dwarf_split_debug_info && !force_direct)
4602 attr.dw_attr_val.val_entry
4603 = add_addr_table_entry (lbl_id, ate_kind_label);
4604 else
4605 attr.dw_attr_val.val_entry = NULL;
4606 add_dwarf_attr (die, &attr);
4607 }
4608
4609 /* Hash and equality functions for debug_str_hash. */
4610
4611 hashval_t
4612 indirect_string_hasher::hash (indirect_string_node *x)
4613 {
4614 return htab_hash_string (x->str);
4615 }
4616
4617 bool
4618 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4619 {
4620 return strcmp (x1->str, x2) == 0;
4621 }
4622
4623 /* Add STR to the given string hash table. */
4624
4625 static struct indirect_string_node *
4626 find_AT_string_in_table (const char *str,
4627 hash_table<indirect_string_hasher> *table,
4628 enum insert_option insert = INSERT)
4629 {
4630 struct indirect_string_node *node;
4631
4632 indirect_string_node **slot
4633 = table->find_slot_with_hash (str, htab_hash_string (str), insert);
4634 if (*slot == NULL)
4635 {
4636 node = ggc_cleared_alloc<indirect_string_node> ();
4637 node->str = ggc_strdup (str);
4638 *slot = node;
4639 }
4640 else
4641 node = *slot;
4642
4643 node->refcount++;
4644 return node;
4645 }
4646
4647 /* Add STR to the indirect string hash table. */
4648
4649 static struct indirect_string_node *
4650 find_AT_string (const char *str, enum insert_option insert = INSERT)
4651 {
4652 if (! debug_str_hash)
4653 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4654
4655 return find_AT_string_in_table (str, debug_str_hash, insert);
4656 }
4657
4658 /* Add a string attribute value to a DIE. */
4659
4660 static inline void
4661 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4662 {
4663 dw_attr_node attr;
4664 struct indirect_string_node *node;
4665
4666 node = find_AT_string (str);
4667
4668 attr.dw_attr = attr_kind;
4669 attr.dw_attr_val.val_class = dw_val_class_str;
4670 attr.dw_attr_val.val_entry = NULL;
4671 attr.dw_attr_val.v.val_str = node;
4672 add_dwarf_attr (die, &attr);
4673 }
4674
4675 static inline const char *
4676 AT_string (dw_attr_node *a)
4677 {
4678 gcc_assert (a && AT_class (a) == dw_val_class_str);
4679 return a->dw_attr_val.v.val_str->str;
4680 }
4681
4682 /* Call this function directly to bypass AT_string_form's logic to put
4683 the string inline in the die. */
4684
4685 static void
4686 set_indirect_string (struct indirect_string_node *node)
4687 {
4688 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4689 /* Already indirect is a no op. */
4690 if (node->form == DW_FORM_strp
4691 || node->form == DW_FORM_line_strp
4692 || node->form == dwarf_FORM (DW_FORM_strx))
4693 {
4694 gcc_assert (node->label);
4695 return;
4696 }
4697 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4698 ++dw2_string_counter;
4699 node->label = xstrdup (label);
4700
4701 if (!dwarf_split_debug_info)
4702 {
4703 node->form = DW_FORM_strp;
4704 node->index = NOT_INDEXED;
4705 }
4706 else
4707 {
4708 node->form = dwarf_FORM (DW_FORM_strx);
4709 node->index = NO_INDEX_ASSIGNED;
4710 }
4711 }
4712
4713 /* A helper function for dwarf2out_finish, called to reset indirect
4714 string decisions done for early LTO dwarf output before fat object
4715 dwarf output. */
4716
4717 int
4718 reset_indirect_string (indirect_string_node **h, void *)
4719 {
4720 struct indirect_string_node *node = *h;
4721 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4722 {
4723 free (node->label);
4724 node->label = NULL;
4725 node->form = (dwarf_form) 0;
4726 node->index = 0;
4727 }
4728 return 1;
4729 }
4730
4731 /* Find out whether a string should be output inline in DIE
4732 or out-of-line in .debug_str section. */
4733
4734 static enum dwarf_form
4735 find_string_form (struct indirect_string_node *node)
4736 {
4737 unsigned int len;
4738
4739 if (node->form)
4740 return node->form;
4741
4742 len = strlen (node->str) + 1;
4743
4744 /* If the string is shorter or equal to the size of the reference, it is
4745 always better to put it inline. */
4746 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4747 return node->form = DW_FORM_string;
4748
4749 /* If we cannot expect the linker to merge strings in .debug_str
4750 section, only put it into .debug_str if it is worth even in this
4751 single module. */
4752 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4753 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4754 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4755 return node->form = DW_FORM_string;
4756
4757 set_indirect_string (node);
4758
4759 return node->form;
4760 }
4761
4762 /* Find out whether the string referenced from the attribute should be
4763 output inline in DIE or out-of-line in .debug_str section. */
4764
4765 static enum dwarf_form
4766 AT_string_form (dw_attr_node *a)
4767 {
4768 gcc_assert (a && AT_class (a) == dw_val_class_str);
4769 return find_string_form (a->dw_attr_val.v.val_str);
4770 }
4771
4772 /* Add a DIE reference attribute value to a DIE. */
4773
4774 static inline void
4775 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4776 {
4777 dw_attr_node attr;
4778 gcc_checking_assert (targ_die != NULL);
4779
4780 /* With LTO we can end up trying to reference something we didn't create
4781 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4782 if (targ_die == NULL)
4783 return;
4784
4785 attr.dw_attr = attr_kind;
4786 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4787 attr.dw_attr_val.val_entry = NULL;
4788 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4789 attr.dw_attr_val.v.val_die_ref.external = 0;
4790 add_dwarf_attr (die, &attr);
4791 }
4792
4793 /* Change DIE reference REF to point to NEW_DIE instead. */
4794
4795 static inline void
4796 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4797 {
4798 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4799 ref->dw_attr_val.v.val_die_ref.die = new_die;
4800 ref->dw_attr_val.v.val_die_ref.external = 0;
4801 }
4802
4803 /* Add an AT_specification attribute to a DIE, and also make the back
4804 pointer from the specification to the definition. */
4805
4806 static inline void
4807 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4808 {
4809 add_AT_die_ref (die, DW_AT_specification, targ_die);
4810 gcc_assert (!targ_die->die_definition);
4811 targ_die->die_definition = die;
4812 }
4813
4814 static inline dw_die_ref
4815 AT_ref (dw_attr_node *a)
4816 {
4817 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4818 return a->dw_attr_val.v.val_die_ref.die;
4819 }
4820
4821 static inline int
4822 AT_ref_external (dw_attr_node *a)
4823 {
4824 if (a && AT_class (a) == dw_val_class_die_ref)
4825 return a->dw_attr_val.v.val_die_ref.external;
4826
4827 return 0;
4828 }
4829
4830 static inline void
4831 set_AT_ref_external (dw_attr_node *a, int i)
4832 {
4833 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4834 a->dw_attr_val.v.val_die_ref.external = i;
4835 }
4836
4837 /* Add a location description attribute value to a DIE. */
4838
4839 static inline void
4840 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4841 {
4842 dw_attr_node attr;
4843
4844 attr.dw_attr = attr_kind;
4845 attr.dw_attr_val.val_class = dw_val_class_loc;
4846 attr.dw_attr_val.val_entry = NULL;
4847 attr.dw_attr_val.v.val_loc = loc;
4848 add_dwarf_attr (die, &attr);
4849 }
4850
4851 static inline dw_loc_descr_ref
4852 AT_loc (dw_attr_node *a)
4853 {
4854 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4855 return a->dw_attr_val.v.val_loc;
4856 }
4857
4858 static inline void
4859 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4860 {
4861 dw_attr_node attr;
4862
4863 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4864 return;
4865
4866 attr.dw_attr = attr_kind;
4867 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4868 attr.dw_attr_val.val_entry = NULL;
4869 attr.dw_attr_val.v.val_loc_list = loc_list;
4870 add_dwarf_attr (die, &attr);
4871 have_location_lists = true;
4872 }
4873
4874 static inline dw_loc_list_ref
4875 AT_loc_list (dw_attr_node *a)
4876 {
4877 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4878 return a->dw_attr_val.v.val_loc_list;
4879 }
4880
4881 /* Add a view list attribute to DIE. It must have a DW_AT_location
4882 attribute, because the view list complements the location list. */
4883
4884 static inline void
4885 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4886 {
4887 dw_attr_node attr;
4888
4889 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4890 return;
4891
4892 attr.dw_attr = attr_kind;
4893 attr.dw_attr_val.val_class = dw_val_class_view_list;
4894 attr.dw_attr_val.val_entry = NULL;
4895 attr.dw_attr_val.v.val_view_list = die;
4896 add_dwarf_attr (die, &attr);
4897 gcc_checking_assert (get_AT (die, DW_AT_location));
4898 gcc_assert (have_location_lists);
4899 }
4900
4901 /* Return a pointer to the location list referenced by the attribute.
4902 If the named attribute is a view list, look up the corresponding
4903 DW_AT_location attribute and return its location list. */
4904
4905 static inline dw_loc_list_ref *
4906 AT_loc_list_ptr (dw_attr_node *a)
4907 {
4908 gcc_assert (a);
4909 switch (AT_class (a))
4910 {
4911 case dw_val_class_loc_list:
4912 return &a->dw_attr_val.v.val_loc_list;
4913 case dw_val_class_view_list:
4914 {
4915 dw_attr_node *l;
4916 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4917 if (!l)
4918 return NULL;
4919 gcc_checking_assert (l + 1 == a);
4920 return AT_loc_list_ptr (l);
4921 }
4922 default:
4923 gcc_unreachable ();
4924 }
4925 }
4926
4927 /* Return the location attribute value associated with a view list
4928 attribute value. */
4929
4930 static inline dw_val_node *
4931 view_list_to_loc_list_val_node (dw_val_node *val)
4932 {
4933 gcc_assert (val->val_class == dw_val_class_view_list);
4934 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4935 if (!loc)
4936 return NULL;
4937 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4938 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4939 return &loc->dw_attr_val;
4940 }
4941
4942 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4943 {
4944 static hashval_t hash (addr_table_entry *);
4945 static bool equal (addr_table_entry *, addr_table_entry *);
4946 };
4947
4948 /* Table of entries into the .debug_addr section. */
4949
4950 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4951
4952 /* Hash an address_table_entry. */
4953
4954 hashval_t
4955 addr_hasher::hash (addr_table_entry *a)
4956 {
4957 inchash::hash hstate;
4958 switch (a->kind)
4959 {
4960 case ate_kind_rtx:
4961 hstate.add_int (0);
4962 break;
4963 case ate_kind_rtx_dtprel:
4964 hstate.add_int (1);
4965 break;
4966 case ate_kind_label:
4967 return htab_hash_string (a->addr.label);
4968 default:
4969 gcc_unreachable ();
4970 }
4971 inchash::add_rtx (a->addr.rtl, hstate);
4972 return hstate.end ();
4973 }
4974
4975 /* Determine equality for two address_table_entries. */
4976
4977 bool
4978 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4979 {
4980 if (a1->kind != a2->kind)
4981 return 0;
4982 switch (a1->kind)
4983 {
4984 case ate_kind_rtx:
4985 case ate_kind_rtx_dtprel:
4986 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4987 case ate_kind_label:
4988 return strcmp (a1->addr.label, a2->addr.label) == 0;
4989 default:
4990 gcc_unreachable ();
4991 }
4992 }
4993
4994 /* Initialize an addr_table_entry. */
4995
4996 void
4997 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4998 {
4999 e->kind = kind;
5000 switch (kind)
5001 {
5002 case ate_kind_rtx:
5003 case ate_kind_rtx_dtprel:
5004 e->addr.rtl = (rtx) addr;
5005 break;
5006 case ate_kind_label:
5007 e->addr.label = (char *) addr;
5008 break;
5009 }
5010 e->refcount = 0;
5011 e->index = NO_INDEX_ASSIGNED;
5012 }
5013
5014 /* Add attr to the address table entry to the table. Defer setting an
5015 index until output time. */
5016
5017 static addr_table_entry *
5018 add_addr_table_entry (void *addr, enum ate_kind kind)
5019 {
5020 addr_table_entry *node;
5021 addr_table_entry finder;
5022
5023 gcc_assert (dwarf_split_debug_info);
5024 if (! addr_index_table)
5025 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5026 init_addr_table_entry (&finder, kind, addr);
5027 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5028
5029 if (*slot == HTAB_EMPTY_ENTRY)
5030 {
5031 node = ggc_cleared_alloc<addr_table_entry> ();
5032 init_addr_table_entry (node, kind, addr);
5033 *slot = node;
5034 }
5035 else
5036 node = *slot;
5037
5038 node->refcount++;
5039 return node;
5040 }
5041
5042 /* Remove an entry from the addr table by decrementing its refcount.
5043 Strictly, decrementing the refcount would be enough, but the
5044 assertion that the entry is actually in the table has found
5045 bugs. */
5046
5047 static void
5048 remove_addr_table_entry (addr_table_entry *entry)
5049 {
5050 gcc_assert (dwarf_split_debug_info && addr_index_table);
5051 /* After an index is assigned, the table is frozen. */
5052 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5053 entry->refcount--;
5054 }
5055
5056 /* Given a location list, remove all addresses it refers to from the
5057 address_table. */
5058
5059 static void
5060 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5061 {
5062 for (; descr; descr = descr->dw_loc_next)
5063 if (descr->dw_loc_oprnd1.val_entry != NULL)
5064 {
5065 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5066 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5067 }
5068 }
5069
5070 /* A helper function for dwarf2out_finish called through
5071 htab_traverse. Assign an addr_table_entry its index. All entries
5072 must be collected into the table when this function is called,
5073 because the indexing code relies on htab_traverse to traverse nodes
5074 in the same order for each run. */
5075
5076 int
5077 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5078 {
5079 addr_table_entry *node = *h;
5080
5081 /* Don't index unreferenced nodes. */
5082 if (node->refcount == 0)
5083 return 1;
5084
5085 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5086 node->index = *index;
5087 *index += 1;
5088
5089 return 1;
5090 }
5091
5092 /* Add an address constant attribute value to a DIE. When using
5093 dwarf_split_debug_info, address attributes in dies destined for the
5094 final executable should be direct references--setting the parameter
5095 force_direct ensures this behavior. */
5096
5097 static inline void
5098 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5099 bool force_direct)
5100 {
5101 dw_attr_node attr;
5102
5103 attr.dw_attr = attr_kind;
5104 attr.dw_attr_val.val_class = dw_val_class_addr;
5105 attr.dw_attr_val.v.val_addr = addr;
5106 if (dwarf_split_debug_info && !force_direct)
5107 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5108 else
5109 attr.dw_attr_val.val_entry = NULL;
5110 add_dwarf_attr (die, &attr);
5111 }
5112
5113 /* Get the RTX from to an address DIE attribute. */
5114
5115 static inline rtx
5116 AT_addr (dw_attr_node *a)
5117 {
5118 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5119 return a->dw_attr_val.v.val_addr;
5120 }
5121
5122 /* Add a file attribute value to a DIE. */
5123
5124 static inline void
5125 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5126 struct dwarf_file_data *fd)
5127 {
5128 dw_attr_node attr;
5129
5130 attr.dw_attr = attr_kind;
5131 attr.dw_attr_val.val_class = dw_val_class_file;
5132 attr.dw_attr_val.val_entry = NULL;
5133 attr.dw_attr_val.v.val_file = fd;
5134 add_dwarf_attr (die, &attr);
5135 }
5136
5137 /* Get the dwarf_file_data from a file DIE attribute. */
5138
5139 static inline struct dwarf_file_data *
5140 AT_file (dw_attr_node *a)
5141 {
5142 gcc_assert (a && (AT_class (a) == dw_val_class_file
5143 || AT_class (a) == dw_val_class_file_implicit));
5144 return a->dw_attr_val.v.val_file;
5145 }
5146
5147 /* Add a vms delta attribute value to a DIE. */
5148
5149 static inline void
5150 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5151 const char *lbl1, const char *lbl2)
5152 {
5153 dw_attr_node attr;
5154
5155 attr.dw_attr = attr_kind;
5156 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5157 attr.dw_attr_val.val_entry = NULL;
5158 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5159 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5160 add_dwarf_attr (die, &attr);
5161 }
5162
5163 /* Add a symbolic view identifier attribute value to a DIE. */
5164
5165 static inline void
5166 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5167 const char *view_label)
5168 {
5169 dw_attr_node attr;
5170
5171 attr.dw_attr = attr_kind;
5172 attr.dw_attr_val.val_class = dw_val_class_symview;
5173 attr.dw_attr_val.val_entry = NULL;
5174 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5175 add_dwarf_attr (die, &attr);
5176 }
5177
5178 /* Add a label identifier attribute value to a DIE. */
5179
5180 static inline void
5181 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5182 const char *lbl_id)
5183 {
5184 dw_attr_node attr;
5185
5186 attr.dw_attr = attr_kind;
5187 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5188 attr.dw_attr_val.val_entry = NULL;
5189 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5190 if (dwarf_split_debug_info)
5191 attr.dw_attr_val.val_entry
5192 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5193 ate_kind_label);
5194 add_dwarf_attr (die, &attr);
5195 }
5196
5197 /* Add a section offset attribute value to a DIE, an offset into the
5198 debug_line section. */
5199
5200 static inline void
5201 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5202 const char *label)
5203 {
5204 dw_attr_node attr;
5205
5206 attr.dw_attr = attr_kind;
5207 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5208 attr.dw_attr_val.val_entry = NULL;
5209 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5210 add_dwarf_attr (die, &attr);
5211 }
5212
5213 /* Add a section offset attribute value to a DIE, an offset into the
5214 debug_macinfo section. */
5215
5216 static inline void
5217 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5218 const char *label)
5219 {
5220 dw_attr_node attr;
5221
5222 attr.dw_attr = attr_kind;
5223 attr.dw_attr_val.val_class = dw_val_class_macptr;
5224 attr.dw_attr_val.val_entry = NULL;
5225 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5226 add_dwarf_attr (die, &attr);
5227 }
5228
5229 /* Add a range_list attribute value to a DIE. When using
5230 dwarf_split_debug_info, address attributes in dies destined for the
5231 final executable should be direct references--setting the parameter
5232 force_direct ensures this behavior. */
5233
5234 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5235 #define RELOCATED_OFFSET (NULL)
5236
5237 static void
5238 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5239 long unsigned int offset, bool force_direct)
5240 {
5241 dw_attr_node attr;
5242
5243 attr.dw_attr = attr_kind;
5244 attr.dw_attr_val.val_class = dw_val_class_range_list;
5245 /* For the range_list attribute, use val_entry to store whether the
5246 offset should follow split-debug-info or normal semantics. This
5247 value is read in output_range_list_offset. */
5248 if (dwarf_split_debug_info && !force_direct)
5249 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5250 else
5251 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5252 attr.dw_attr_val.v.val_offset = offset;
5253 add_dwarf_attr (die, &attr);
5254 }
5255
5256 /* Return the start label of a delta attribute. */
5257
5258 static inline const char *
5259 AT_vms_delta1 (dw_attr_node *a)
5260 {
5261 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5262 return a->dw_attr_val.v.val_vms_delta.lbl1;
5263 }
5264
5265 /* Return the end label of a delta attribute. */
5266
5267 static inline const char *
5268 AT_vms_delta2 (dw_attr_node *a)
5269 {
5270 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5271 return a->dw_attr_val.v.val_vms_delta.lbl2;
5272 }
5273
5274 static inline const char *
5275 AT_lbl (dw_attr_node *a)
5276 {
5277 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5278 || AT_class (a) == dw_val_class_lineptr
5279 || AT_class (a) == dw_val_class_macptr
5280 || AT_class (a) == dw_val_class_loclistsptr
5281 || AT_class (a) == dw_val_class_high_pc));
5282 return a->dw_attr_val.v.val_lbl_id;
5283 }
5284
5285 /* Get the attribute of type attr_kind. */
5286
5287 static dw_attr_node *
5288 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5289 {
5290 dw_attr_node *a;
5291 unsigned ix;
5292 dw_die_ref spec = NULL;
5293
5294 if (! die)
5295 return NULL;
5296
5297 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5298 if (a->dw_attr == attr_kind)
5299 return a;
5300 else if (a->dw_attr == DW_AT_specification
5301 || a->dw_attr == DW_AT_abstract_origin)
5302 spec = AT_ref (a);
5303
5304 if (spec)
5305 return get_AT (spec, attr_kind);
5306
5307 return NULL;
5308 }
5309
5310 /* Returns the parent of the declaration of DIE. */
5311
5312 static dw_die_ref
5313 get_die_parent (dw_die_ref die)
5314 {
5315 dw_die_ref t;
5316
5317 if (!die)
5318 return NULL;
5319
5320 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5321 || (t = get_AT_ref (die, DW_AT_specification)))
5322 die = t;
5323
5324 return die->die_parent;
5325 }
5326
5327 /* Return the "low pc" attribute value, typically associated with a subprogram
5328 DIE. Return null if the "low pc" attribute is either not present, or if it
5329 cannot be represented as an assembler label identifier. */
5330
5331 static inline const char *
5332 get_AT_low_pc (dw_die_ref die)
5333 {
5334 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5335
5336 return a ? AT_lbl (a) : NULL;
5337 }
5338
5339 /* Return the value of the string attribute designated by ATTR_KIND, or
5340 NULL if it is not present. */
5341
5342 static inline const char *
5343 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5344 {
5345 dw_attr_node *a = get_AT (die, attr_kind);
5346
5347 return a ? AT_string (a) : NULL;
5348 }
5349
5350 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5351 if it is not present. */
5352
5353 static inline int
5354 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5355 {
5356 dw_attr_node *a = get_AT (die, attr_kind);
5357
5358 return a ? AT_flag (a) : 0;
5359 }
5360
5361 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5362 if it is not present. */
5363
5364 static inline unsigned
5365 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5366 {
5367 dw_attr_node *a = get_AT (die, attr_kind);
5368
5369 return a ? AT_unsigned (a) : 0;
5370 }
5371
5372 static inline dw_die_ref
5373 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5374 {
5375 dw_attr_node *a = get_AT (die, attr_kind);
5376
5377 return a ? AT_ref (a) : NULL;
5378 }
5379
5380 static inline struct dwarf_file_data *
5381 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5382 {
5383 dw_attr_node *a = get_AT (die, attr_kind);
5384
5385 return a ? AT_file (a) : NULL;
5386 }
5387
5388 /* Return TRUE if the language is C. */
5389
5390 static inline bool
5391 is_c (void)
5392 {
5393 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5394
5395 return (lang == DW_LANG_C || lang == DW_LANG_C89 || lang == DW_LANG_C99
5396 || lang == DW_LANG_C11 || lang == DW_LANG_ObjC);
5397
5398
5399 }
5400
5401 /* Return TRUE if the language is C++. */
5402
5403 static inline bool
5404 is_cxx (void)
5405 {
5406 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5407
5408 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5409 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5410 }
5411
5412 /* Return TRUE if DECL was created by the C++ frontend. */
5413
5414 static bool
5415 is_cxx (const_tree decl)
5416 {
5417 if (in_lto_p)
5418 {
5419 const_tree context = get_ultimate_context (decl);
5420 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5421 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5422 }
5423 return is_cxx ();
5424 }
5425
5426 /* Return TRUE if the language is Fortran. */
5427
5428 static inline bool
5429 is_fortran (void)
5430 {
5431 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5432
5433 return (lang == DW_LANG_Fortran77
5434 || lang == DW_LANG_Fortran90
5435 || lang == DW_LANG_Fortran95
5436 || lang == DW_LANG_Fortran03
5437 || lang == DW_LANG_Fortran08);
5438 }
5439
5440 static inline bool
5441 is_fortran (const_tree decl)
5442 {
5443 if (in_lto_p)
5444 {
5445 const_tree context = get_ultimate_context (decl);
5446 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5447 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5448 "GNU Fortran", 11) == 0
5449 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5450 "GNU F77") == 0);
5451 }
5452 return is_fortran ();
5453 }
5454
5455 /* Return TRUE if the language is Ada. */
5456
5457 static inline bool
5458 is_ada (void)
5459 {
5460 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5461
5462 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5463 }
5464
5465 /* Return TRUE if the language is D. */
5466
5467 static inline bool
5468 is_dlang (void)
5469 {
5470 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5471
5472 return lang == DW_LANG_D;
5473 }
5474
5475 /* Remove the specified attribute if present. Return TRUE if removal
5476 was successful. */
5477
5478 static bool
5479 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5480 {
5481 dw_attr_node *a;
5482 unsigned ix;
5483
5484 if (! die)
5485 return false;
5486
5487 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5488 if (a->dw_attr == attr_kind)
5489 {
5490 if (AT_class (a) == dw_val_class_str)
5491 if (a->dw_attr_val.v.val_str->refcount)
5492 a->dw_attr_val.v.val_str->refcount--;
5493
5494 /* vec::ordered_remove should help reduce the number of abbrevs
5495 that are needed. */
5496 die->die_attr->ordered_remove (ix);
5497 return true;
5498 }
5499 return false;
5500 }
5501
5502 /* Remove CHILD from its parent. PREV must have the property that
5503 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5504
5505 static void
5506 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5507 {
5508 gcc_assert (child->die_parent == prev->die_parent);
5509 gcc_assert (prev->die_sib == child);
5510 if (prev == child)
5511 {
5512 gcc_assert (child->die_parent->die_child == child);
5513 prev = NULL;
5514 }
5515 else
5516 prev->die_sib = child->die_sib;
5517 if (child->die_parent->die_child == child)
5518 child->die_parent->die_child = prev;
5519 child->die_sib = NULL;
5520 }
5521
5522 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5523 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5524
5525 static void
5526 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5527 {
5528 dw_die_ref parent = old_child->die_parent;
5529
5530 gcc_assert (parent == prev->die_parent);
5531 gcc_assert (prev->die_sib == old_child);
5532
5533 new_child->die_parent = parent;
5534 if (prev == old_child)
5535 {
5536 gcc_assert (parent->die_child == old_child);
5537 new_child->die_sib = new_child;
5538 }
5539 else
5540 {
5541 prev->die_sib = new_child;
5542 new_child->die_sib = old_child->die_sib;
5543 }
5544 if (old_child->die_parent->die_child == old_child)
5545 old_child->die_parent->die_child = new_child;
5546 old_child->die_sib = NULL;
5547 }
5548
5549 /* Move all children from OLD_PARENT to NEW_PARENT. */
5550
5551 static void
5552 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5553 {
5554 dw_die_ref c;
5555 new_parent->die_child = old_parent->die_child;
5556 old_parent->die_child = NULL;
5557 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5558 }
5559
5560 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5561 matches TAG. */
5562
5563 static void
5564 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5565 {
5566 dw_die_ref c;
5567
5568 c = die->die_child;
5569 if (c) do {
5570 dw_die_ref prev = c;
5571 c = c->die_sib;
5572 while (c->die_tag == tag)
5573 {
5574 remove_child_with_prev (c, prev);
5575 c->die_parent = NULL;
5576 /* Might have removed every child. */
5577 if (die->die_child == NULL)
5578 return;
5579 c = prev->die_sib;
5580 }
5581 } while (c != die->die_child);
5582 }
5583
5584 /* Add a CHILD_DIE as the last child of DIE. */
5585
5586 static void
5587 add_child_die (dw_die_ref die, dw_die_ref child_die)
5588 {
5589 /* FIXME this should probably be an assert. */
5590 if (! die || ! child_die)
5591 return;
5592 gcc_assert (die != child_die);
5593
5594 child_die->die_parent = die;
5595 if (die->die_child)
5596 {
5597 child_die->die_sib = die->die_child->die_sib;
5598 die->die_child->die_sib = child_die;
5599 }
5600 else
5601 child_die->die_sib = child_die;
5602 die->die_child = child_die;
5603 }
5604
5605 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5606
5607 static void
5608 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5609 dw_die_ref after_die)
5610 {
5611 gcc_assert (die
5612 && child_die
5613 && after_die
5614 && die->die_child
5615 && die != child_die);
5616
5617 child_die->die_parent = die;
5618 child_die->die_sib = after_die->die_sib;
5619 after_die->die_sib = child_die;
5620 if (die->die_child == after_die)
5621 die->die_child = child_die;
5622 }
5623
5624 /* Unassociate CHILD from its parent, and make its parent be
5625 NEW_PARENT. */
5626
5627 static void
5628 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5629 {
5630 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5631 if (p->die_sib == child)
5632 {
5633 remove_child_with_prev (child, p);
5634 break;
5635 }
5636 add_child_die (new_parent, child);
5637 }
5638
5639 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5640 is the specification, to the end of PARENT's list of children.
5641 This is done by removing and re-adding it. */
5642
5643 static void
5644 splice_child_die (dw_die_ref parent, dw_die_ref child)
5645 {
5646 /* We want the declaration DIE from inside the class, not the
5647 specification DIE at toplevel. */
5648 if (child->die_parent != parent)
5649 {
5650 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5651
5652 if (tmp)
5653 child = tmp;
5654 }
5655
5656 gcc_assert (child->die_parent == parent
5657 || (child->die_parent
5658 == get_AT_ref (parent, DW_AT_specification)));
5659
5660 reparent_child (child, parent);
5661 }
5662
5663 /* Create and return a new die with TAG_VALUE as tag. */
5664
5665 static inline dw_die_ref
5666 new_die_raw (enum dwarf_tag tag_value)
5667 {
5668 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5669 die->die_tag = tag_value;
5670 return die;
5671 }
5672
5673 /* Create and return a new die with a parent of PARENT_DIE. If
5674 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5675 associated tree T must be supplied to determine parenthood
5676 later. */
5677
5678 static inline dw_die_ref
5679 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5680 {
5681 dw_die_ref die = new_die_raw (tag_value);
5682
5683 if (parent_die != NULL)
5684 add_child_die (parent_die, die);
5685 else
5686 {
5687 limbo_die_node *limbo_node;
5688
5689 /* No DIEs created after early dwarf should end up in limbo,
5690 because the limbo list should not persist past LTO
5691 streaming. */
5692 if (tag_value != DW_TAG_compile_unit
5693 /* These are allowed because they're generated while
5694 breaking out COMDAT units late. */
5695 && tag_value != DW_TAG_type_unit
5696 && tag_value != DW_TAG_skeleton_unit
5697 && !early_dwarf
5698 /* Allow nested functions to live in limbo because they will
5699 only temporarily live there, as decls_for_scope will fix
5700 them up. */
5701 && (TREE_CODE (t) != FUNCTION_DECL
5702 || !decl_function_context (t))
5703 /* Same as nested functions above but for types. Types that
5704 are local to a function will be fixed in
5705 decls_for_scope. */
5706 && (!RECORD_OR_UNION_TYPE_P (t)
5707 || !TYPE_CONTEXT (t)
5708 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5709 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5710 especially in the ltrans stage, but once we implement LTO
5711 dwarf streaming, we should remove this exception. */
5712 && !in_lto_p)
5713 {
5714 fprintf (stderr, "symbol ended up in limbo too late:");
5715 debug_generic_stmt (t);
5716 gcc_unreachable ();
5717 }
5718
5719 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5720 limbo_node->die = die;
5721 limbo_node->created_for = t;
5722 limbo_node->next = limbo_die_list;
5723 limbo_die_list = limbo_node;
5724 }
5725
5726 return die;
5727 }
5728
5729 /* Return the DIE associated with the given type specifier. */
5730
5731 static inline dw_die_ref
5732 lookup_type_die (tree type)
5733 {
5734 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5735 if (die && die->removed)
5736 {
5737 TYPE_SYMTAB_DIE (type) = NULL;
5738 return NULL;
5739 }
5740 return die;
5741 }
5742
5743 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5744 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5745 anonymous type instead the one of the naming typedef. */
5746
5747 static inline dw_die_ref
5748 strip_naming_typedef (tree type, dw_die_ref type_die)
5749 {
5750 if (type
5751 && TREE_CODE (type) == RECORD_TYPE
5752 && type_die
5753 && type_die->die_tag == DW_TAG_typedef
5754 && is_naming_typedef_decl (TYPE_NAME (type)))
5755 type_die = get_AT_ref (type_die, DW_AT_type);
5756 return type_die;
5757 }
5758
5759 /* Like lookup_type_die, but if type is an anonymous type named by a
5760 typedef[1], return the DIE of the anonymous type instead the one of
5761 the naming typedef. This is because in gen_typedef_die, we did
5762 equate the anonymous struct named by the typedef with the DIE of
5763 the naming typedef. So by default, lookup_type_die on an anonymous
5764 struct yields the DIE of the naming typedef.
5765
5766 [1]: Read the comment of is_naming_typedef_decl to learn about what
5767 a naming typedef is. */
5768
5769 static inline dw_die_ref
5770 lookup_type_die_strip_naming_typedef (tree type)
5771 {
5772 dw_die_ref die = lookup_type_die (type);
5773 return strip_naming_typedef (type, die);
5774 }
5775
5776 /* Equate a DIE to a given type specifier. */
5777
5778 static inline void
5779 equate_type_number_to_die (tree type, dw_die_ref type_die)
5780 {
5781 TYPE_SYMTAB_DIE (type) = type_die;
5782 }
5783
5784 static dw_die_ref maybe_create_die_with_external_ref (tree);
5785 struct GTY(()) sym_off_pair
5786 {
5787 const char * GTY((skip)) sym;
5788 unsigned HOST_WIDE_INT off;
5789 };
5790 static GTY(()) hash_map<tree, sym_off_pair> *external_die_map;
5791
5792 /* Returns a hash value for X (which really is a die_struct). */
5793
5794 inline hashval_t
5795 decl_die_hasher::hash (die_node *x)
5796 {
5797 return (hashval_t) x->decl_id;
5798 }
5799
5800 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5801
5802 inline bool
5803 decl_die_hasher::equal (die_node *x, tree y)
5804 {
5805 return (x->decl_id == DECL_UID (y));
5806 }
5807
5808 /* Return the DIE associated with a given declaration. */
5809
5810 static inline dw_die_ref
5811 lookup_decl_die (tree decl)
5812 {
5813 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5814 NO_INSERT);
5815 if (!die)
5816 {
5817 if (in_lto_p)
5818 return maybe_create_die_with_external_ref (decl);
5819 return NULL;
5820 }
5821 if ((*die)->removed)
5822 {
5823 decl_die_table->clear_slot (die);
5824 return NULL;
5825 }
5826 return *die;
5827 }
5828
5829
5830 /* Return the DIE associated with BLOCK. */
5831
5832 static inline dw_die_ref
5833 lookup_block_die (tree block)
5834 {
5835 dw_die_ref die = BLOCK_DIE (block);
5836 if (!die && in_lto_p)
5837 return maybe_create_die_with_external_ref (block);
5838 return die;
5839 }
5840
5841 /* Associate DIE with BLOCK. */
5842
5843 static inline void
5844 equate_block_to_die (tree block, dw_die_ref die)
5845 {
5846 BLOCK_DIE (block) = die;
5847 }
5848 #undef BLOCK_DIE
5849
5850
5851 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5852 style reference. Return true if we found one refering to a DIE for
5853 DECL, otherwise return false. */
5854
5855 static bool
5856 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5857 unsigned HOST_WIDE_INT *off)
5858 {
5859 dw_die_ref die;
5860
5861 if (in_lto_p)
5862 {
5863 /* During WPA stage and incremental linking we use a hash-map
5864 to store the decl <-> label + offset map. */
5865 if (!external_die_map)
5866 return false;
5867 sym_off_pair *desc = external_die_map->get (decl);
5868 if (!desc)
5869 return false;
5870 *sym = desc->sym;
5871 *off = desc->off;
5872 return true;
5873 }
5874
5875 if (TREE_CODE (decl) == BLOCK)
5876 die = lookup_block_die (decl);
5877 else
5878 die = lookup_decl_die (decl);
5879 if (!die)
5880 return false;
5881
5882 /* Similar to get_ref_die_offset_label, but using the "correct"
5883 label. */
5884 *off = die->die_offset;
5885 while (die->die_parent)
5886 die = die->die_parent;
5887 /* For the containing CU DIE we compute a die_symbol in
5888 compute_comp_unit_symbol. */
5889 gcc_assert (die->die_tag == DW_TAG_compile_unit
5890 && die->die_id.die_symbol != NULL);
5891 *sym = die->die_id.die_symbol;
5892 return true;
5893 }
5894
5895 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5896
5897 static void
5898 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5899 const char *symbol, HOST_WIDE_INT offset)
5900 {
5901 /* Create a fake DIE that contains the reference. Don't use
5902 new_die because we don't want to end up in the limbo list. */
5903 /* ??? We probably want to share these, thus put a ref to the DIE
5904 we create here to the external_die_map entry. */
5905 dw_die_ref ref = new_die_raw (die->die_tag);
5906 ref->die_id.die_symbol = symbol;
5907 ref->die_offset = offset;
5908 ref->with_offset = 1;
5909 add_AT_die_ref (die, attr_kind, ref);
5910 }
5911
5912 /* Create a DIE for DECL if required and add a reference to a DIE
5913 at SYMBOL + OFFSET which contains attributes dumped early. */
5914
5915 static void
5916 dwarf2out_register_external_die (tree decl, const char *sym,
5917 unsigned HOST_WIDE_INT off)
5918 {
5919 if (debug_info_level == DINFO_LEVEL_NONE)
5920 return;
5921
5922 if (!external_die_map)
5923 external_die_map = hash_map<tree, sym_off_pair>::create_ggc (1000);
5924 gcc_checking_assert (!external_die_map->get (decl));
5925 sym_off_pair p = { IDENTIFIER_POINTER (get_identifier (sym)), off };
5926 external_die_map->put (decl, p);
5927 }
5928
5929 /* If we have a registered external DIE for DECL return a new DIE for
5930 the concrete instance with an appropriate abstract origin. */
5931
5932 static dw_die_ref
5933 maybe_create_die_with_external_ref (tree decl)
5934 {
5935 if (!external_die_map)
5936 return NULL;
5937 sym_off_pair *desc = external_die_map->get (decl);
5938 if (!desc)
5939 return NULL;
5940
5941 const char *sym = desc->sym;
5942 unsigned HOST_WIDE_INT off = desc->off;
5943
5944 in_lto_p = false;
5945 dw_die_ref die = (TREE_CODE (decl) == BLOCK
5946 ? lookup_block_die (decl) : lookup_decl_die (decl));
5947 gcc_assert (!die);
5948 in_lto_p = true;
5949
5950 tree ctx;
5951 dw_die_ref parent = NULL;
5952 /* Need to lookup a DIE for the decls context - the containing
5953 function or translation unit. */
5954 if (TREE_CODE (decl) == BLOCK)
5955 {
5956 ctx = BLOCK_SUPERCONTEXT (decl);
5957 /* ??? We do not output DIEs for all scopes thus skip as
5958 many DIEs as needed. */
5959 while (TREE_CODE (ctx) == BLOCK
5960 && !lookup_block_die (ctx))
5961 ctx = BLOCK_SUPERCONTEXT (ctx);
5962 }
5963 else
5964 ctx = DECL_CONTEXT (decl);
5965 /* Peel types in the context stack. */
5966 while (ctx && TYPE_P (ctx))
5967 ctx = TYPE_CONTEXT (ctx);
5968 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5969 if (debug_info_level <= DINFO_LEVEL_TERSE)
5970 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5971 ctx = DECL_CONTEXT (ctx);
5972 if (ctx)
5973 {
5974 if (TREE_CODE (ctx) == BLOCK)
5975 parent = lookup_block_die (ctx);
5976 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5977 /* Keep the 1:1 association during WPA. */
5978 && !flag_wpa
5979 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5980 /* Otherwise all late annotations go to the main CU which
5981 imports the original CUs. */
5982 parent = comp_unit_die ();
5983 else if (TREE_CODE (ctx) == FUNCTION_DECL
5984 && TREE_CODE (decl) != FUNCTION_DECL
5985 && TREE_CODE (decl) != PARM_DECL
5986 && TREE_CODE (decl) != RESULT_DECL
5987 && TREE_CODE (decl) != BLOCK)
5988 /* Leave function local entities parent determination to when
5989 we process scope vars. */
5990 ;
5991 else
5992 parent = lookup_decl_die (ctx);
5993 }
5994 else
5995 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5996 Handle this case gracefully by globalizing stuff. */
5997 parent = comp_unit_die ();
5998 /* Create a DIE "stub". */
5999 switch (TREE_CODE (decl))
6000 {
6001 case TRANSLATION_UNIT_DECL:
6002 {
6003 die = comp_unit_die ();
6004 /* We re-target all CU decls to the LTRANS CU DIE, so no need
6005 to create a DIE for the original CUs. */
6006 return die;
6007 }
6008 case NAMESPACE_DECL:
6009 if (is_fortran (decl))
6010 die = new_die (DW_TAG_module, parent, decl);
6011 else
6012 die = new_die (DW_TAG_namespace, parent, decl);
6013 break;
6014 case FUNCTION_DECL:
6015 die = new_die (DW_TAG_subprogram, parent, decl);
6016 break;
6017 case VAR_DECL:
6018 die = new_die (DW_TAG_variable, parent, decl);
6019 break;
6020 case RESULT_DECL:
6021 die = new_die (DW_TAG_variable, parent, decl);
6022 break;
6023 case PARM_DECL:
6024 die = new_die (DW_TAG_formal_parameter, parent, decl);
6025 break;
6026 case CONST_DECL:
6027 die = new_die (DW_TAG_constant, parent, decl);
6028 break;
6029 case LABEL_DECL:
6030 die = new_die (DW_TAG_label, parent, decl);
6031 break;
6032 case BLOCK:
6033 die = new_die (DW_TAG_lexical_block, parent, decl);
6034 break;
6035 default:
6036 gcc_unreachable ();
6037 }
6038 if (TREE_CODE (decl) == BLOCK)
6039 equate_block_to_die (decl, die);
6040 else
6041 equate_decl_number_to_die (decl, die);
6042
6043 add_desc_attribute (die, decl);
6044
6045 /* Add a reference to the DIE providing early debug at $sym + off. */
6046 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6047
6048 return die;
6049 }
6050
6051 /* Returns a hash value for X (which really is a var_loc_list). */
6052
6053 inline hashval_t
6054 decl_loc_hasher::hash (var_loc_list *x)
6055 {
6056 return (hashval_t) x->decl_id;
6057 }
6058
6059 /* Return nonzero if decl_id of var_loc_list X is the same as
6060 UID of decl *Y. */
6061
6062 inline bool
6063 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6064 {
6065 return (x->decl_id == DECL_UID (y));
6066 }
6067
6068 /* Return the var_loc list associated with a given declaration. */
6069
6070 static inline var_loc_list *
6071 lookup_decl_loc (const_tree decl)
6072 {
6073 if (!decl_loc_table)
6074 return NULL;
6075 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6076 }
6077
6078 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6079
6080 inline hashval_t
6081 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6082 {
6083 return (hashval_t) x->decl_id;
6084 }
6085
6086 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6087 UID of decl *Y. */
6088
6089 inline bool
6090 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6091 {
6092 return (x->decl_id == DECL_UID (y));
6093 }
6094
6095 /* Equate a DIE to a particular declaration. */
6096
6097 static void
6098 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6099 {
6100 unsigned int decl_id = DECL_UID (decl);
6101
6102 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6103 decl_die->decl_id = decl_id;
6104 }
6105
6106 /* Return how many bits covers PIECE EXPR_LIST. */
6107
6108 static HOST_WIDE_INT
6109 decl_piece_bitsize (rtx piece)
6110 {
6111 int ret = (int) GET_MODE (piece);
6112 if (ret)
6113 return ret;
6114 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6115 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6116 return INTVAL (XEXP (XEXP (piece, 0), 0));
6117 }
6118
6119 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6120
6121 static rtx *
6122 decl_piece_varloc_ptr (rtx piece)
6123 {
6124 if ((int) GET_MODE (piece))
6125 return &XEXP (piece, 0);
6126 else
6127 return &XEXP (XEXP (piece, 0), 1);
6128 }
6129
6130 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6131 Next is the chain of following piece nodes. */
6132
6133 static rtx_expr_list *
6134 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6135 {
6136 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6137 return alloc_EXPR_LIST (bitsize, loc_note, next);
6138 else
6139 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6140 GEN_INT (bitsize),
6141 loc_note), next);
6142 }
6143
6144 /* Return rtx that should be stored into loc field for
6145 LOC_NOTE and BITPOS/BITSIZE. */
6146
6147 static rtx
6148 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6149 HOST_WIDE_INT bitsize)
6150 {
6151 if (bitsize != -1)
6152 {
6153 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6154 if (bitpos != 0)
6155 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6156 }
6157 return loc_note;
6158 }
6159
6160 /* This function either modifies location piece list *DEST in
6161 place (if SRC and INNER is NULL), or copies location piece list
6162 *SRC to *DEST while modifying it. Location BITPOS is modified
6163 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6164 not copied and if needed some padding around it is added.
6165 When modifying in place, DEST should point to EXPR_LIST where
6166 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6167 to the start of the whole list and INNER points to the EXPR_LIST
6168 where earlier pieces cover PIECE_BITPOS bits. */
6169
6170 static void
6171 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6172 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6173 HOST_WIDE_INT bitsize, rtx loc_note)
6174 {
6175 HOST_WIDE_INT diff;
6176 bool copy = inner != NULL;
6177
6178 if (copy)
6179 {
6180 /* First copy all nodes preceding the current bitpos. */
6181 while (src != inner)
6182 {
6183 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6184 decl_piece_bitsize (*src), NULL_RTX);
6185 dest = &XEXP (*dest, 1);
6186 src = &XEXP (*src, 1);
6187 }
6188 }
6189 /* Add padding if needed. */
6190 if (bitpos != piece_bitpos)
6191 {
6192 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6193 copy ? NULL_RTX : *dest);
6194 dest = &XEXP (*dest, 1);
6195 }
6196 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6197 {
6198 gcc_assert (!copy);
6199 /* A piece with correct bitpos and bitsize already exist,
6200 just update the location for it and return. */
6201 *decl_piece_varloc_ptr (*dest) = loc_note;
6202 return;
6203 }
6204 /* Add the piece that changed. */
6205 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6206 dest = &XEXP (*dest, 1);
6207 /* Skip over pieces that overlap it. */
6208 diff = bitpos - piece_bitpos + bitsize;
6209 if (!copy)
6210 src = dest;
6211 while (diff > 0 && *src)
6212 {
6213 rtx piece = *src;
6214 diff -= decl_piece_bitsize (piece);
6215 if (copy)
6216 src = &XEXP (piece, 1);
6217 else
6218 {
6219 *src = XEXP (piece, 1);
6220 free_EXPR_LIST_node (piece);
6221 }
6222 }
6223 /* Add padding if needed. */
6224 if (diff < 0 && *src)
6225 {
6226 if (!copy)
6227 dest = src;
6228 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6229 dest = &XEXP (*dest, 1);
6230 }
6231 if (!copy)
6232 return;
6233 /* Finally copy all nodes following it. */
6234 while (*src)
6235 {
6236 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6237 decl_piece_bitsize (*src), NULL_RTX);
6238 dest = &XEXP (*dest, 1);
6239 src = &XEXP (*src, 1);
6240 }
6241 }
6242
6243 /* Add a variable location node to the linked list for DECL. */
6244
6245 static struct var_loc_node *
6246 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6247 {
6248 unsigned int decl_id;
6249 var_loc_list *temp;
6250 struct var_loc_node *loc = NULL;
6251 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6252
6253 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6254 {
6255 tree realdecl = DECL_DEBUG_EXPR (decl);
6256 if (handled_component_p (realdecl)
6257 || (TREE_CODE (realdecl) == MEM_REF
6258 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6259 {
6260 bool reverse;
6261 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6262 &bitsize, &reverse);
6263 if (!innerdecl
6264 || !DECL_P (innerdecl)
6265 || DECL_IGNORED_P (innerdecl)
6266 || TREE_STATIC (innerdecl)
6267 || bitsize == 0
6268 || bitpos + bitsize > 256)
6269 return NULL;
6270 decl = innerdecl;
6271 }
6272 }
6273
6274 decl_id = DECL_UID (decl);
6275 var_loc_list **slot
6276 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6277 if (*slot == NULL)
6278 {
6279 temp = ggc_cleared_alloc<var_loc_list> ();
6280 temp->decl_id = decl_id;
6281 *slot = temp;
6282 }
6283 else
6284 temp = *slot;
6285
6286 /* For PARM_DECLs try to keep around the original incoming value,
6287 even if that means we'll emit a zero-range .debug_loc entry. */
6288 if (temp->last
6289 && temp->first == temp->last
6290 && TREE_CODE (decl) == PARM_DECL
6291 && NOTE_P (temp->first->loc)
6292 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6293 && DECL_INCOMING_RTL (decl)
6294 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6295 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6296 == GET_CODE (DECL_INCOMING_RTL (decl))
6297 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6298 && (bitsize != -1
6299 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6300 NOTE_VAR_LOCATION_LOC (loc_note))
6301 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6302 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6303 {
6304 loc = ggc_cleared_alloc<var_loc_node> ();
6305 temp->first->next = loc;
6306 temp->last = loc;
6307 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6308 }
6309 else if (temp->last)
6310 {
6311 struct var_loc_node *last = temp->last, *unused = NULL;
6312 rtx *piece_loc = NULL, last_loc_note;
6313 HOST_WIDE_INT piece_bitpos = 0;
6314 if (last->next)
6315 {
6316 last = last->next;
6317 gcc_assert (last->next == NULL);
6318 }
6319 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6320 {
6321 piece_loc = &last->loc;
6322 do
6323 {
6324 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6325 if (piece_bitpos + cur_bitsize > bitpos)
6326 break;
6327 piece_bitpos += cur_bitsize;
6328 piece_loc = &XEXP (*piece_loc, 1);
6329 }
6330 while (*piece_loc);
6331 }
6332 /* TEMP->LAST here is either pointer to the last but one or
6333 last element in the chained list, LAST is pointer to the
6334 last element. */
6335 if (label && strcmp (last->label, label) == 0 && last->view == view)
6336 {
6337 /* For SRA optimized variables if there weren't any real
6338 insns since last note, just modify the last node. */
6339 if (piece_loc != NULL)
6340 {
6341 adjust_piece_list (piece_loc, NULL, NULL,
6342 bitpos, piece_bitpos, bitsize, loc_note);
6343 return NULL;
6344 }
6345 /* If the last note doesn't cover any instructions, remove it. */
6346 if (temp->last != last)
6347 {
6348 temp->last->next = NULL;
6349 unused = last;
6350 last = temp->last;
6351 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6352 }
6353 else
6354 {
6355 gcc_assert (temp->first == temp->last
6356 || (temp->first->next == temp->last
6357 && TREE_CODE (decl) == PARM_DECL));
6358 memset (temp->last, '\0', sizeof (*temp->last));
6359 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6360 return temp->last;
6361 }
6362 }
6363 if (bitsize == -1 && NOTE_P (last->loc))
6364 last_loc_note = last->loc;
6365 else if (piece_loc != NULL
6366 && *piece_loc != NULL_RTX
6367 && piece_bitpos == bitpos
6368 && decl_piece_bitsize (*piece_loc) == bitsize)
6369 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6370 else
6371 last_loc_note = NULL_RTX;
6372 /* If the current location is the same as the end of the list,
6373 and either both or neither of the locations is uninitialized,
6374 we have nothing to do. */
6375 if (last_loc_note == NULL_RTX
6376 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6377 NOTE_VAR_LOCATION_LOC (loc_note)))
6378 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6379 != NOTE_VAR_LOCATION_STATUS (loc_note))
6380 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6381 == VAR_INIT_STATUS_UNINITIALIZED)
6382 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6383 == VAR_INIT_STATUS_UNINITIALIZED))))
6384 {
6385 /* Add LOC to the end of list and update LAST. If the last
6386 element of the list has been removed above, reuse its
6387 memory for the new node, otherwise allocate a new one. */
6388 if (unused)
6389 {
6390 loc = unused;
6391 memset (loc, '\0', sizeof (*loc));
6392 }
6393 else
6394 loc = ggc_cleared_alloc<var_loc_node> ();
6395 if (bitsize == -1 || piece_loc == NULL)
6396 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6397 else
6398 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6399 bitpos, piece_bitpos, bitsize, loc_note);
6400 last->next = loc;
6401 /* Ensure TEMP->LAST will point either to the new last but one
6402 element of the chain, or to the last element in it. */
6403 if (last != temp->last)
6404 temp->last = last;
6405 }
6406 else if (unused)
6407 ggc_free (unused);
6408 }
6409 else
6410 {
6411 loc = ggc_cleared_alloc<var_loc_node> ();
6412 temp->first = loc;
6413 temp->last = loc;
6414 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6415 }
6416 return loc;
6417 }
6418 \f
6419 /* Keep track of the number of spaces used to indent the
6420 output of the debugging routines that print the structure of
6421 the DIE internal representation. */
6422 static int print_indent;
6423
6424 /* Indent the line the number of spaces given by print_indent. */
6425
6426 static inline void
6427 print_spaces (FILE *outfile)
6428 {
6429 fprintf (outfile, "%*s", print_indent, "");
6430 }
6431
6432 /* Print a type signature in hex. */
6433
6434 static inline void
6435 print_signature (FILE *outfile, char *sig)
6436 {
6437 int i;
6438
6439 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6440 fprintf (outfile, "%02x", sig[i] & 0xff);
6441 }
6442
6443 static inline void
6444 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6445 {
6446 if (discr_value->pos)
6447 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6448 else
6449 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6450 }
6451
6452 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6453
6454 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6455 RECURSE, output location descriptor operations. */
6456
6457 static void
6458 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6459 {
6460 switch (val->val_class)
6461 {
6462 case dw_val_class_addr:
6463 fprintf (outfile, "address");
6464 break;
6465 case dw_val_class_offset:
6466 fprintf (outfile, "offset");
6467 break;
6468 case dw_val_class_loc:
6469 fprintf (outfile, "location descriptor");
6470 if (val->v.val_loc == NULL)
6471 fprintf (outfile, " -> <null>\n");
6472 else if (recurse)
6473 {
6474 fprintf (outfile, ":\n");
6475 print_indent += 4;
6476 print_loc_descr (val->v.val_loc, outfile);
6477 print_indent -= 4;
6478 }
6479 else
6480 {
6481 if (flag_dump_noaddr || flag_dump_unnumbered)
6482 fprintf (outfile, " #\n");
6483 else
6484 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6485 }
6486 break;
6487 case dw_val_class_loc_list:
6488 fprintf (outfile, "location list -> label:%s",
6489 val->v.val_loc_list->ll_symbol);
6490 break;
6491 case dw_val_class_view_list:
6492 val = view_list_to_loc_list_val_node (val);
6493 fprintf (outfile, "location list with views -> labels:%s and %s",
6494 val->v.val_loc_list->ll_symbol,
6495 val->v.val_loc_list->vl_symbol);
6496 break;
6497 case dw_val_class_range_list:
6498 fprintf (outfile, "range list");
6499 break;
6500 case dw_val_class_const:
6501 case dw_val_class_const_implicit:
6502 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6503 break;
6504 case dw_val_class_unsigned_const:
6505 case dw_val_class_unsigned_const_implicit:
6506 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6507 break;
6508 case dw_val_class_const_double:
6509 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6510 HOST_WIDE_INT_PRINT_UNSIGNED")",
6511 val->v.val_double.high,
6512 val->v.val_double.low);
6513 break;
6514 case dw_val_class_wide_int:
6515 {
6516 int i = val->v.val_wide->get_len ();
6517 fprintf (outfile, "constant (");
6518 gcc_assert (i > 0);
6519 if (val->v.val_wide->elt (i - 1) == 0)
6520 fprintf (outfile, "0x");
6521 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6522 val->v.val_wide->elt (--i));
6523 while (--i >= 0)
6524 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6525 val->v.val_wide->elt (i));
6526 fprintf (outfile, ")");
6527 break;
6528 }
6529 case dw_val_class_vec:
6530 fprintf (outfile, "floating-point or vector constant");
6531 break;
6532 case dw_val_class_flag:
6533 fprintf (outfile, "%u", val->v.val_flag);
6534 break;
6535 case dw_val_class_die_ref:
6536 if (val->v.val_die_ref.die != NULL)
6537 {
6538 dw_die_ref die = val->v.val_die_ref.die;
6539
6540 if (die->comdat_type_p)
6541 {
6542 fprintf (outfile, "die -> signature: ");
6543 print_signature (outfile,
6544 die->die_id.die_type_node->signature);
6545 }
6546 else if (die->die_id.die_symbol)
6547 {
6548 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6549 if (die->with_offset)
6550 fprintf (outfile, " + %ld", die->die_offset);
6551 }
6552 else
6553 fprintf (outfile, "die -> %ld", die->die_offset);
6554 if (flag_dump_noaddr || flag_dump_unnumbered)
6555 fprintf (outfile, " #");
6556 else
6557 fprintf (outfile, " (%p)", (void *) die);
6558 }
6559 else
6560 fprintf (outfile, "die -> <null>");
6561 break;
6562 case dw_val_class_vms_delta:
6563 fprintf (outfile, "delta: @slotcount(%s-%s)",
6564 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6565 break;
6566 case dw_val_class_symview:
6567 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6568 break;
6569 case dw_val_class_lbl_id:
6570 case dw_val_class_lineptr:
6571 case dw_val_class_macptr:
6572 case dw_val_class_loclistsptr:
6573 case dw_val_class_high_pc:
6574 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6575 break;
6576 case dw_val_class_str:
6577 if (val->v.val_str->str != NULL)
6578 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6579 else
6580 fprintf (outfile, "<null>");
6581 break;
6582 case dw_val_class_file:
6583 case dw_val_class_file_implicit:
6584 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6585 val->v.val_file->emitted_number);
6586 break;
6587 case dw_val_class_data8:
6588 {
6589 int i;
6590
6591 for (i = 0; i < 8; i++)
6592 fprintf (outfile, "%02x", val->v.val_data8[i]);
6593 break;
6594 }
6595 case dw_val_class_discr_value:
6596 print_discr_value (outfile, &val->v.val_discr_value);
6597 break;
6598 case dw_val_class_discr_list:
6599 for (dw_discr_list_ref node = val->v.val_discr_list;
6600 node != NULL;
6601 node = node->dw_discr_next)
6602 {
6603 if (node->dw_discr_range)
6604 {
6605 fprintf (outfile, " .. ");
6606 print_discr_value (outfile, &node->dw_discr_lower_bound);
6607 print_discr_value (outfile, &node->dw_discr_upper_bound);
6608 }
6609 else
6610 print_discr_value (outfile, &node->dw_discr_lower_bound);
6611
6612 if (node->dw_discr_next != NULL)
6613 fprintf (outfile, " | ");
6614 }
6615 default:
6616 break;
6617 }
6618 }
6619
6620 /* Likewise, for a DIE attribute. */
6621
6622 static void
6623 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6624 {
6625 print_dw_val (&a->dw_attr_val, recurse, outfile);
6626 }
6627
6628
6629 /* Print the list of operands in the LOC location description to OUTFILE. This
6630 routine is a debugging aid only. */
6631
6632 static void
6633 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6634 {
6635 dw_loc_descr_ref l = loc;
6636
6637 if (loc == NULL)
6638 {
6639 print_spaces (outfile);
6640 fprintf (outfile, "<null>\n");
6641 return;
6642 }
6643
6644 for (l = loc; l != NULL; l = l->dw_loc_next)
6645 {
6646 print_spaces (outfile);
6647 if (flag_dump_noaddr || flag_dump_unnumbered)
6648 fprintf (outfile, "#");
6649 else
6650 fprintf (outfile, "(%p)", (void *) l);
6651 fprintf (outfile, " %s",
6652 dwarf_stack_op_name (l->dw_loc_opc));
6653 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6654 {
6655 fprintf (outfile, " ");
6656 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6657 }
6658 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6659 {
6660 fprintf (outfile, ", ");
6661 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6662 }
6663 fprintf (outfile, "\n");
6664 }
6665 }
6666
6667 /* Print the information associated with a given DIE, and its children.
6668 This routine is a debugging aid only. */
6669
6670 static void
6671 print_die (dw_die_ref die, FILE *outfile)
6672 {
6673 dw_attr_node *a;
6674 dw_die_ref c;
6675 unsigned ix;
6676
6677 print_spaces (outfile);
6678 fprintf (outfile, "DIE %4ld: %s ",
6679 die->die_offset, dwarf_tag_name (die->die_tag));
6680 if (flag_dump_noaddr || flag_dump_unnumbered)
6681 fprintf (outfile, "#\n");
6682 else
6683 fprintf (outfile, "(%p)\n", (void*) die);
6684 print_spaces (outfile);
6685 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6686 fprintf (outfile, " offset: %ld", die->die_offset);
6687 fprintf (outfile, " mark: %d\n", die->die_mark);
6688
6689 if (die->comdat_type_p)
6690 {
6691 print_spaces (outfile);
6692 fprintf (outfile, " signature: ");
6693 print_signature (outfile, die->die_id.die_type_node->signature);
6694 fprintf (outfile, "\n");
6695 }
6696
6697 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6698 {
6699 print_spaces (outfile);
6700 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6701
6702 print_attribute (a, true, outfile);
6703 fprintf (outfile, "\n");
6704 }
6705
6706 if (die->die_child != NULL)
6707 {
6708 print_indent += 4;
6709 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6710 print_indent -= 4;
6711 }
6712 if (print_indent == 0)
6713 fprintf (outfile, "\n");
6714 }
6715
6716 /* Print the list of operations in the LOC location description. */
6717
6718 DEBUG_FUNCTION void
6719 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6720 {
6721 print_loc_descr (loc, stderr);
6722 }
6723
6724 /* Print the information collected for a given DIE. */
6725
6726 DEBUG_FUNCTION void
6727 debug_dwarf_die (dw_die_ref die)
6728 {
6729 print_die (die, stderr);
6730 }
6731
6732 DEBUG_FUNCTION void
6733 debug (die_struct &ref)
6734 {
6735 print_die (&ref, stderr);
6736 }
6737
6738 DEBUG_FUNCTION void
6739 debug (die_struct *ptr)
6740 {
6741 if (ptr)
6742 debug (*ptr);
6743 else
6744 fprintf (stderr, "<nil>\n");
6745 }
6746
6747
6748 /* Print all DWARF information collected for the compilation unit.
6749 This routine is a debugging aid only. */
6750
6751 DEBUG_FUNCTION void
6752 debug_dwarf (void)
6753 {
6754 print_indent = 0;
6755 print_die (comp_unit_die (), stderr);
6756 }
6757
6758 /* Verify the DIE tree structure. */
6759
6760 DEBUG_FUNCTION void
6761 verify_die (dw_die_ref die)
6762 {
6763 gcc_assert (!die->die_mark);
6764 if (die->die_parent == NULL
6765 && die->die_sib == NULL)
6766 return;
6767 /* Verify the die_sib list is cyclic. */
6768 dw_die_ref x = die;
6769 do
6770 {
6771 x->die_mark = 1;
6772 x = x->die_sib;
6773 }
6774 while (x && !x->die_mark);
6775 gcc_assert (x == die);
6776 x = die;
6777 do
6778 {
6779 /* Verify all dies have the same parent. */
6780 gcc_assert (x->die_parent == die->die_parent);
6781 if (x->die_child)
6782 {
6783 /* Verify the child has the proper parent and recurse. */
6784 gcc_assert (x->die_child->die_parent == x);
6785 verify_die (x->die_child);
6786 }
6787 x->die_mark = 0;
6788 x = x->die_sib;
6789 }
6790 while (x && x->die_mark);
6791 }
6792
6793 /* Sanity checks on DIEs. */
6794
6795 static void
6796 check_die (dw_die_ref die)
6797 {
6798 unsigned ix;
6799 dw_attr_node *a;
6800 bool inline_found = false;
6801 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6802 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6803 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6804 {
6805 switch (a->dw_attr)
6806 {
6807 case DW_AT_inline:
6808 if (a->dw_attr_val.v.val_unsigned)
6809 inline_found = true;
6810 break;
6811 case DW_AT_location:
6812 ++n_location;
6813 break;
6814 case DW_AT_low_pc:
6815 ++n_low_pc;
6816 break;
6817 case DW_AT_high_pc:
6818 ++n_high_pc;
6819 break;
6820 case DW_AT_artificial:
6821 ++n_artificial;
6822 break;
6823 case DW_AT_decl_column:
6824 ++n_decl_column;
6825 break;
6826 case DW_AT_decl_line:
6827 ++n_decl_line;
6828 break;
6829 case DW_AT_decl_file:
6830 ++n_decl_file;
6831 break;
6832 default:
6833 break;
6834 }
6835 }
6836 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6837 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6838 {
6839 fprintf (stderr, "Duplicate attributes in DIE:\n");
6840 debug_dwarf_die (die);
6841 gcc_unreachable ();
6842 }
6843 if (inline_found)
6844 {
6845 /* A debugging information entry that is a member of an abstract
6846 instance tree [that has DW_AT_inline] should not contain any
6847 attributes which describe aspects of the subroutine which vary
6848 between distinct inlined expansions or distinct out-of-line
6849 expansions. */
6850 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6851 gcc_assert (a->dw_attr != DW_AT_low_pc
6852 && a->dw_attr != DW_AT_high_pc
6853 && a->dw_attr != DW_AT_location
6854 && a->dw_attr != DW_AT_frame_base
6855 && a->dw_attr != DW_AT_call_all_calls
6856 && a->dw_attr != DW_AT_GNU_all_call_sites);
6857 }
6858 }
6859 \f
6860 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6861 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6862 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6863
6864 /* Calculate the checksum of a location expression. */
6865
6866 static inline void
6867 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6868 {
6869 int tem;
6870 inchash::hash hstate;
6871 hashval_t hash;
6872
6873 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6874 CHECKSUM (tem);
6875 hash_loc_operands (loc, hstate);
6876 hash = hstate.end();
6877 CHECKSUM (hash);
6878 }
6879
6880 /* Calculate the checksum of an attribute. */
6881
6882 static void
6883 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6884 {
6885 dw_loc_descr_ref loc;
6886 rtx r;
6887
6888 CHECKSUM (at->dw_attr);
6889
6890 /* We don't care that this was compiled with a different compiler
6891 snapshot; if the output is the same, that's what matters. */
6892 if (at->dw_attr == DW_AT_producer)
6893 return;
6894
6895 switch (AT_class (at))
6896 {
6897 case dw_val_class_const:
6898 case dw_val_class_const_implicit:
6899 CHECKSUM (at->dw_attr_val.v.val_int);
6900 break;
6901 case dw_val_class_unsigned_const:
6902 case dw_val_class_unsigned_const_implicit:
6903 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6904 break;
6905 case dw_val_class_const_double:
6906 CHECKSUM (at->dw_attr_val.v.val_double);
6907 break;
6908 case dw_val_class_wide_int:
6909 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6910 get_full_len (*at->dw_attr_val.v.val_wide)
6911 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6912 break;
6913 case dw_val_class_vec:
6914 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6915 (at->dw_attr_val.v.val_vec.length
6916 * at->dw_attr_val.v.val_vec.elt_size));
6917 break;
6918 case dw_val_class_flag:
6919 CHECKSUM (at->dw_attr_val.v.val_flag);
6920 break;
6921 case dw_val_class_str:
6922 CHECKSUM_STRING (AT_string (at));
6923 break;
6924
6925 case dw_val_class_addr:
6926 r = AT_addr (at);
6927 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6928 CHECKSUM_STRING (XSTR (r, 0));
6929 break;
6930
6931 case dw_val_class_offset:
6932 CHECKSUM (at->dw_attr_val.v.val_offset);
6933 break;
6934
6935 case dw_val_class_loc:
6936 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6937 loc_checksum (loc, ctx);
6938 break;
6939
6940 case dw_val_class_die_ref:
6941 die_checksum (AT_ref (at), ctx, mark);
6942 break;
6943
6944 case dw_val_class_fde_ref:
6945 case dw_val_class_vms_delta:
6946 case dw_val_class_symview:
6947 case dw_val_class_lbl_id:
6948 case dw_val_class_lineptr:
6949 case dw_val_class_macptr:
6950 case dw_val_class_loclistsptr:
6951 case dw_val_class_high_pc:
6952 break;
6953
6954 case dw_val_class_file:
6955 case dw_val_class_file_implicit:
6956 CHECKSUM_STRING (AT_file (at)->filename);
6957 break;
6958
6959 case dw_val_class_data8:
6960 CHECKSUM (at->dw_attr_val.v.val_data8);
6961 break;
6962
6963 default:
6964 break;
6965 }
6966 }
6967
6968 /* Calculate the checksum of a DIE. */
6969
6970 static void
6971 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6972 {
6973 dw_die_ref c;
6974 dw_attr_node *a;
6975 unsigned ix;
6976
6977 /* To avoid infinite recursion. */
6978 if (die->die_mark)
6979 {
6980 CHECKSUM (die->die_mark);
6981 return;
6982 }
6983 die->die_mark = ++(*mark);
6984
6985 CHECKSUM (die->die_tag);
6986
6987 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6988 attr_checksum (a, ctx, mark);
6989
6990 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6991 }
6992
6993 #undef CHECKSUM
6994 #undef CHECKSUM_BLOCK
6995 #undef CHECKSUM_STRING
6996
6997 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6998 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6999 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
7000 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
7001 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
7002 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
7003 #define CHECKSUM_ATTR(FOO) \
7004 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
7005
7006 /* Calculate the checksum of a number in signed LEB128 format. */
7007
7008 static void
7009 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
7010 {
7011 unsigned char byte;
7012 bool more;
7013
7014 while (1)
7015 {
7016 byte = (value & 0x7f);
7017 value >>= 7;
7018 more = !((value == 0 && (byte & 0x40) == 0)
7019 || (value == -1 && (byte & 0x40) != 0));
7020 if (more)
7021 byte |= 0x80;
7022 CHECKSUM (byte);
7023 if (!more)
7024 break;
7025 }
7026 }
7027
7028 /* Calculate the checksum of a number in unsigned LEB128 format. */
7029
7030 static void
7031 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
7032 {
7033 while (1)
7034 {
7035 unsigned char byte = (value & 0x7f);
7036 value >>= 7;
7037 if (value != 0)
7038 /* More bytes to follow. */
7039 byte |= 0x80;
7040 CHECKSUM (byte);
7041 if (value == 0)
7042 break;
7043 }
7044 }
7045
7046 /* Checksum the context of the DIE. This adds the names of any
7047 surrounding namespaces or structures to the checksum. */
7048
7049 static void
7050 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7051 {
7052 const char *name;
7053 dw_die_ref spec;
7054 int tag = die->die_tag;
7055
7056 if (tag != DW_TAG_namespace
7057 && tag != DW_TAG_structure_type
7058 && tag != DW_TAG_class_type)
7059 return;
7060
7061 name = get_AT_string (die, DW_AT_name);
7062
7063 spec = get_AT_ref (die, DW_AT_specification);
7064 if (spec != NULL)
7065 die = spec;
7066
7067 if (die->die_parent != NULL)
7068 checksum_die_context (die->die_parent, ctx);
7069
7070 CHECKSUM_ULEB128 ('C');
7071 CHECKSUM_ULEB128 (tag);
7072 if (name != NULL)
7073 CHECKSUM_STRING (name);
7074 }
7075
7076 /* Calculate the checksum of a location expression. */
7077
7078 static inline void
7079 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7080 {
7081 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7082 were emitted as a DW_FORM_sdata instead of a location expression. */
7083 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7084 {
7085 CHECKSUM_ULEB128 (DW_FORM_sdata);
7086 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7087 return;
7088 }
7089
7090 /* Otherwise, just checksum the raw location expression. */
7091 while (loc != NULL)
7092 {
7093 inchash::hash hstate;
7094 hashval_t hash;
7095
7096 CHECKSUM_ULEB128 (loc->dtprel);
7097 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7098 hash_loc_operands (loc, hstate);
7099 hash = hstate.end ();
7100 CHECKSUM (hash);
7101 loc = loc->dw_loc_next;
7102 }
7103 }
7104
7105 /* Calculate the checksum of an attribute. */
7106
7107 static void
7108 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7109 struct md5_ctx *ctx, int *mark)
7110 {
7111 dw_loc_descr_ref loc;
7112 rtx r;
7113
7114 if (AT_class (at) == dw_val_class_die_ref)
7115 {
7116 dw_die_ref target_die = AT_ref (at);
7117
7118 /* For pointer and reference types, we checksum only the (qualified)
7119 name of the target type (if there is a name). For friend entries,
7120 we checksum only the (qualified) name of the target type or function.
7121 This allows the checksum to remain the same whether the target type
7122 is complete or not. */
7123 if ((at->dw_attr == DW_AT_type
7124 && (tag == DW_TAG_pointer_type
7125 || tag == DW_TAG_reference_type
7126 || tag == DW_TAG_rvalue_reference_type
7127 || tag == DW_TAG_ptr_to_member_type))
7128 || (at->dw_attr == DW_AT_friend
7129 && tag == DW_TAG_friend))
7130 {
7131 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7132
7133 if (name_attr != NULL)
7134 {
7135 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7136
7137 if (decl == NULL)
7138 decl = target_die;
7139 CHECKSUM_ULEB128 ('N');
7140 CHECKSUM_ULEB128 (at->dw_attr);
7141 if (decl->die_parent != NULL)
7142 checksum_die_context (decl->die_parent, ctx);
7143 CHECKSUM_ULEB128 ('E');
7144 CHECKSUM_STRING (AT_string (name_attr));
7145 return;
7146 }
7147 }
7148
7149 /* For all other references to another DIE, we check to see if the
7150 target DIE has already been visited. If it has, we emit a
7151 backward reference; if not, we descend recursively. */
7152 if (target_die->die_mark > 0)
7153 {
7154 CHECKSUM_ULEB128 ('R');
7155 CHECKSUM_ULEB128 (at->dw_attr);
7156 CHECKSUM_ULEB128 (target_die->die_mark);
7157 }
7158 else
7159 {
7160 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7161
7162 if (decl == NULL)
7163 decl = target_die;
7164 target_die->die_mark = ++(*mark);
7165 CHECKSUM_ULEB128 ('T');
7166 CHECKSUM_ULEB128 (at->dw_attr);
7167 if (decl->die_parent != NULL)
7168 checksum_die_context (decl->die_parent, ctx);
7169 die_checksum_ordered (target_die, ctx, mark);
7170 }
7171 return;
7172 }
7173
7174 CHECKSUM_ULEB128 ('A');
7175 CHECKSUM_ULEB128 (at->dw_attr);
7176
7177 switch (AT_class (at))
7178 {
7179 case dw_val_class_const:
7180 case dw_val_class_const_implicit:
7181 CHECKSUM_ULEB128 (DW_FORM_sdata);
7182 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7183 break;
7184
7185 case dw_val_class_unsigned_const:
7186 case dw_val_class_unsigned_const_implicit:
7187 CHECKSUM_ULEB128 (DW_FORM_sdata);
7188 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7189 break;
7190
7191 case dw_val_class_const_double:
7192 CHECKSUM_ULEB128 (DW_FORM_block);
7193 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7194 CHECKSUM (at->dw_attr_val.v.val_double);
7195 break;
7196
7197 case dw_val_class_wide_int:
7198 CHECKSUM_ULEB128 (DW_FORM_block);
7199 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7200 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7201 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7202 get_full_len (*at->dw_attr_val.v.val_wide)
7203 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7204 break;
7205
7206 case dw_val_class_vec:
7207 CHECKSUM_ULEB128 (DW_FORM_block);
7208 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7209 * at->dw_attr_val.v.val_vec.elt_size);
7210 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7211 (at->dw_attr_val.v.val_vec.length
7212 * at->dw_attr_val.v.val_vec.elt_size));
7213 break;
7214
7215 case dw_val_class_flag:
7216 CHECKSUM_ULEB128 (DW_FORM_flag);
7217 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7218 break;
7219
7220 case dw_val_class_str:
7221 CHECKSUM_ULEB128 (DW_FORM_string);
7222 CHECKSUM_STRING (AT_string (at));
7223 break;
7224
7225 case dw_val_class_addr:
7226 r = AT_addr (at);
7227 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7228 CHECKSUM_ULEB128 (DW_FORM_string);
7229 CHECKSUM_STRING (XSTR (r, 0));
7230 break;
7231
7232 case dw_val_class_offset:
7233 CHECKSUM_ULEB128 (DW_FORM_sdata);
7234 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7235 break;
7236
7237 case dw_val_class_loc:
7238 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7239 loc_checksum_ordered (loc, ctx);
7240 break;
7241
7242 case dw_val_class_fde_ref:
7243 case dw_val_class_symview:
7244 case dw_val_class_lbl_id:
7245 case dw_val_class_lineptr:
7246 case dw_val_class_macptr:
7247 case dw_val_class_loclistsptr:
7248 case dw_val_class_high_pc:
7249 break;
7250
7251 case dw_val_class_file:
7252 case dw_val_class_file_implicit:
7253 CHECKSUM_ULEB128 (DW_FORM_string);
7254 CHECKSUM_STRING (AT_file (at)->filename);
7255 break;
7256
7257 case dw_val_class_data8:
7258 CHECKSUM (at->dw_attr_val.v.val_data8);
7259 break;
7260
7261 default:
7262 break;
7263 }
7264 }
7265
7266 struct checksum_attributes
7267 {
7268 dw_attr_node *at_name;
7269 dw_attr_node *at_type;
7270 dw_attr_node *at_friend;
7271 dw_attr_node *at_accessibility;
7272 dw_attr_node *at_address_class;
7273 dw_attr_node *at_alignment;
7274 dw_attr_node *at_allocated;
7275 dw_attr_node *at_artificial;
7276 dw_attr_node *at_associated;
7277 dw_attr_node *at_binary_scale;
7278 dw_attr_node *at_bit_offset;
7279 dw_attr_node *at_bit_size;
7280 dw_attr_node *at_bit_stride;
7281 dw_attr_node *at_byte_size;
7282 dw_attr_node *at_byte_stride;
7283 dw_attr_node *at_const_value;
7284 dw_attr_node *at_containing_type;
7285 dw_attr_node *at_count;
7286 dw_attr_node *at_data_location;
7287 dw_attr_node *at_data_member_location;
7288 dw_attr_node *at_decimal_scale;
7289 dw_attr_node *at_decimal_sign;
7290 dw_attr_node *at_default_value;
7291 dw_attr_node *at_digit_count;
7292 dw_attr_node *at_discr;
7293 dw_attr_node *at_discr_list;
7294 dw_attr_node *at_discr_value;
7295 dw_attr_node *at_encoding;
7296 dw_attr_node *at_endianity;
7297 dw_attr_node *at_explicit;
7298 dw_attr_node *at_is_optional;
7299 dw_attr_node *at_location;
7300 dw_attr_node *at_lower_bound;
7301 dw_attr_node *at_mutable;
7302 dw_attr_node *at_ordering;
7303 dw_attr_node *at_picture_string;
7304 dw_attr_node *at_prototyped;
7305 dw_attr_node *at_small;
7306 dw_attr_node *at_segment;
7307 dw_attr_node *at_string_length;
7308 dw_attr_node *at_string_length_bit_size;
7309 dw_attr_node *at_string_length_byte_size;
7310 dw_attr_node *at_threads_scaled;
7311 dw_attr_node *at_upper_bound;
7312 dw_attr_node *at_use_location;
7313 dw_attr_node *at_use_UTF8;
7314 dw_attr_node *at_variable_parameter;
7315 dw_attr_node *at_virtuality;
7316 dw_attr_node *at_visibility;
7317 dw_attr_node *at_vtable_elem_location;
7318 };
7319
7320 /* Collect the attributes that we will want to use for the checksum. */
7321
7322 static void
7323 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7324 {
7325 dw_attr_node *a;
7326 unsigned ix;
7327
7328 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7329 {
7330 switch (a->dw_attr)
7331 {
7332 case DW_AT_name:
7333 attrs->at_name = a;
7334 break;
7335 case DW_AT_type:
7336 attrs->at_type = a;
7337 break;
7338 case DW_AT_friend:
7339 attrs->at_friend = a;
7340 break;
7341 case DW_AT_accessibility:
7342 attrs->at_accessibility = a;
7343 break;
7344 case DW_AT_address_class:
7345 attrs->at_address_class = a;
7346 break;
7347 case DW_AT_alignment:
7348 attrs->at_alignment = a;
7349 break;
7350 case DW_AT_allocated:
7351 attrs->at_allocated = a;
7352 break;
7353 case DW_AT_artificial:
7354 attrs->at_artificial = a;
7355 break;
7356 case DW_AT_associated:
7357 attrs->at_associated = a;
7358 break;
7359 case DW_AT_binary_scale:
7360 attrs->at_binary_scale = a;
7361 break;
7362 case DW_AT_bit_offset:
7363 attrs->at_bit_offset = a;
7364 break;
7365 case DW_AT_bit_size:
7366 attrs->at_bit_size = a;
7367 break;
7368 case DW_AT_bit_stride:
7369 attrs->at_bit_stride = a;
7370 break;
7371 case DW_AT_byte_size:
7372 attrs->at_byte_size = a;
7373 break;
7374 case DW_AT_byte_stride:
7375 attrs->at_byte_stride = a;
7376 break;
7377 case DW_AT_const_value:
7378 attrs->at_const_value = a;
7379 break;
7380 case DW_AT_containing_type:
7381 attrs->at_containing_type = a;
7382 break;
7383 case DW_AT_count:
7384 attrs->at_count = a;
7385 break;
7386 case DW_AT_data_location:
7387 attrs->at_data_location = a;
7388 break;
7389 case DW_AT_data_member_location:
7390 attrs->at_data_member_location = a;
7391 break;
7392 case DW_AT_decimal_scale:
7393 attrs->at_decimal_scale = a;
7394 break;
7395 case DW_AT_decimal_sign:
7396 attrs->at_decimal_sign = a;
7397 break;
7398 case DW_AT_default_value:
7399 attrs->at_default_value = a;
7400 break;
7401 case DW_AT_digit_count:
7402 attrs->at_digit_count = a;
7403 break;
7404 case DW_AT_discr:
7405 attrs->at_discr = a;
7406 break;
7407 case DW_AT_discr_list:
7408 attrs->at_discr_list = a;
7409 break;
7410 case DW_AT_discr_value:
7411 attrs->at_discr_value = a;
7412 break;
7413 case DW_AT_encoding:
7414 attrs->at_encoding = a;
7415 break;
7416 case DW_AT_endianity:
7417 attrs->at_endianity = a;
7418 break;
7419 case DW_AT_explicit:
7420 attrs->at_explicit = a;
7421 break;
7422 case DW_AT_is_optional:
7423 attrs->at_is_optional = a;
7424 break;
7425 case DW_AT_location:
7426 attrs->at_location = a;
7427 break;
7428 case DW_AT_lower_bound:
7429 attrs->at_lower_bound = a;
7430 break;
7431 case DW_AT_mutable:
7432 attrs->at_mutable = a;
7433 break;
7434 case DW_AT_ordering:
7435 attrs->at_ordering = a;
7436 break;
7437 case DW_AT_picture_string:
7438 attrs->at_picture_string = a;
7439 break;
7440 case DW_AT_prototyped:
7441 attrs->at_prototyped = a;
7442 break;
7443 case DW_AT_small:
7444 attrs->at_small = a;
7445 break;
7446 case DW_AT_segment:
7447 attrs->at_segment = a;
7448 break;
7449 case DW_AT_string_length:
7450 attrs->at_string_length = a;
7451 break;
7452 case DW_AT_string_length_bit_size:
7453 attrs->at_string_length_bit_size = a;
7454 break;
7455 case DW_AT_string_length_byte_size:
7456 attrs->at_string_length_byte_size = a;
7457 break;
7458 case DW_AT_threads_scaled:
7459 attrs->at_threads_scaled = a;
7460 break;
7461 case DW_AT_upper_bound:
7462 attrs->at_upper_bound = a;
7463 break;
7464 case DW_AT_use_location:
7465 attrs->at_use_location = a;
7466 break;
7467 case DW_AT_use_UTF8:
7468 attrs->at_use_UTF8 = a;
7469 break;
7470 case DW_AT_variable_parameter:
7471 attrs->at_variable_parameter = a;
7472 break;
7473 case DW_AT_virtuality:
7474 attrs->at_virtuality = a;
7475 break;
7476 case DW_AT_visibility:
7477 attrs->at_visibility = a;
7478 break;
7479 case DW_AT_vtable_elem_location:
7480 attrs->at_vtable_elem_location = a;
7481 break;
7482 default:
7483 break;
7484 }
7485 }
7486 }
7487
7488 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7489
7490 static void
7491 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7492 {
7493 dw_die_ref c;
7494 dw_die_ref decl;
7495 struct checksum_attributes attrs;
7496
7497 CHECKSUM_ULEB128 ('D');
7498 CHECKSUM_ULEB128 (die->die_tag);
7499
7500 memset (&attrs, 0, sizeof (attrs));
7501
7502 decl = get_AT_ref (die, DW_AT_specification);
7503 if (decl != NULL)
7504 collect_checksum_attributes (&attrs, decl);
7505 collect_checksum_attributes (&attrs, die);
7506
7507 CHECKSUM_ATTR (attrs.at_name);
7508 CHECKSUM_ATTR (attrs.at_accessibility);
7509 CHECKSUM_ATTR (attrs.at_address_class);
7510 CHECKSUM_ATTR (attrs.at_allocated);
7511 CHECKSUM_ATTR (attrs.at_artificial);
7512 CHECKSUM_ATTR (attrs.at_associated);
7513 CHECKSUM_ATTR (attrs.at_binary_scale);
7514 CHECKSUM_ATTR (attrs.at_bit_offset);
7515 CHECKSUM_ATTR (attrs.at_bit_size);
7516 CHECKSUM_ATTR (attrs.at_bit_stride);
7517 CHECKSUM_ATTR (attrs.at_byte_size);
7518 CHECKSUM_ATTR (attrs.at_byte_stride);
7519 CHECKSUM_ATTR (attrs.at_const_value);
7520 CHECKSUM_ATTR (attrs.at_containing_type);
7521 CHECKSUM_ATTR (attrs.at_count);
7522 CHECKSUM_ATTR (attrs.at_data_location);
7523 CHECKSUM_ATTR (attrs.at_data_member_location);
7524 CHECKSUM_ATTR (attrs.at_decimal_scale);
7525 CHECKSUM_ATTR (attrs.at_decimal_sign);
7526 CHECKSUM_ATTR (attrs.at_default_value);
7527 CHECKSUM_ATTR (attrs.at_digit_count);
7528 CHECKSUM_ATTR (attrs.at_discr);
7529 CHECKSUM_ATTR (attrs.at_discr_list);
7530 CHECKSUM_ATTR (attrs.at_discr_value);
7531 CHECKSUM_ATTR (attrs.at_encoding);
7532 CHECKSUM_ATTR (attrs.at_endianity);
7533 CHECKSUM_ATTR (attrs.at_explicit);
7534 CHECKSUM_ATTR (attrs.at_is_optional);
7535 CHECKSUM_ATTR (attrs.at_location);
7536 CHECKSUM_ATTR (attrs.at_lower_bound);
7537 CHECKSUM_ATTR (attrs.at_mutable);
7538 CHECKSUM_ATTR (attrs.at_ordering);
7539 CHECKSUM_ATTR (attrs.at_picture_string);
7540 CHECKSUM_ATTR (attrs.at_prototyped);
7541 CHECKSUM_ATTR (attrs.at_small);
7542 CHECKSUM_ATTR (attrs.at_segment);
7543 CHECKSUM_ATTR (attrs.at_string_length);
7544 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7545 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7546 CHECKSUM_ATTR (attrs.at_threads_scaled);
7547 CHECKSUM_ATTR (attrs.at_upper_bound);
7548 CHECKSUM_ATTR (attrs.at_use_location);
7549 CHECKSUM_ATTR (attrs.at_use_UTF8);
7550 CHECKSUM_ATTR (attrs.at_variable_parameter);
7551 CHECKSUM_ATTR (attrs.at_virtuality);
7552 CHECKSUM_ATTR (attrs.at_visibility);
7553 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7554 CHECKSUM_ATTR (attrs.at_type);
7555 CHECKSUM_ATTR (attrs.at_friend);
7556 CHECKSUM_ATTR (attrs.at_alignment);
7557
7558 /* Checksum the child DIEs. */
7559 c = die->die_child;
7560 if (c) do {
7561 dw_attr_node *name_attr;
7562
7563 c = c->die_sib;
7564 name_attr = get_AT (c, DW_AT_name);
7565 if (is_template_instantiation (c))
7566 {
7567 /* Ignore instantiations of member type and function templates. */
7568 }
7569 else if (name_attr != NULL
7570 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7571 {
7572 /* Use a shallow checksum for named nested types and member
7573 functions. */
7574 CHECKSUM_ULEB128 ('S');
7575 CHECKSUM_ULEB128 (c->die_tag);
7576 CHECKSUM_STRING (AT_string (name_attr));
7577 }
7578 else
7579 {
7580 /* Use a deep checksum for other children. */
7581 /* Mark this DIE so it gets processed when unmarking. */
7582 if (c->die_mark == 0)
7583 c->die_mark = -1;
7584 die_checksum_ordered (c, ctx, mark);
7585 }
7586 } while (c != die->die_child);
7587
7588 CHECKSUM_ULEB128 (0);
7589 }
7590
7591 /* Add a type name and tag to a hash. */
7592 static void
7593 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7594 {
7595 CHECKSUM_ULEB128 (tag);
7596 CHECKSUM_STRING (name);
7597 }
7598
7599 #undef CHECKSUM
7600 #undef CHECKSUM_STRING
7601 #undef CHECKSUM_ATTR
7602 #undef CHECKSUM_LEB128
7603 #undef CHECKSUM_ULEB128
7604
7605 /* Generate the type signature for DIE. This is computed by generating an
7606 MD5 checksum over the DIE's tag, its relevant attributes, and its
7607 children. Attributes that are references to other DIEs are processed
7608 by recursion, using the MARK field to prevent infinite recursion.
7609 If the DIE is nested inside a namespace or another type, we also
7610 need to include that context in the signature. The lower 64 bits
7611 of the resulting MD5 checksum comprise the signature. */
7612
7613 static void
7614 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7615 {
7616 int mark;
7617 const char *name;
7618 unsigned char checksum[16];
7619 struct md5_ctx ctx;
7620 dw_die_ref decl;
7621 dw_die_ref parent;
7622
7623 name = get_AT_string (die, DW_AT_name);
7624 decl = get_AT_ref (die, DW_AT_specification);
7625 parent = get_die_parent (die);
7626
7627 /* First, compute a signature for just the type name (and its surrounding
7628 context, if any. This is stored in the type unit DIE for link-time
7629 ODR (one-definition rule) checking. */
7630
7631 if (is_cxx () && name != NULL)
7632 {
7633 md5_init_ctx (&ctx);
7634
7635 /* Checksum the names of surrounding namespaces and structures. */
7636 if (parent != NULL)
7637 checksum_die_context (parent, &ctx);
7638
7639 /* Checksum the current DIE. */
7640 die_odr_checksum (die->die_tag, name, &ctx);
7641 md5_finish_ctx (&ctx, checksum);
7642
7643 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7644 }
7645
7646 /* Next, compute the complete type signature. */
7647
7648 md5_init_ctx (&ctx);
7649 mark = 1;
7650 die->die_mark = mark;
7651
7652 /* Checksum the names of surrounding namespaces and structures. */
7653 if (parent != NULL)
7654 checksum_die_context (parent, &ctx);
7655
7656 /* Checksum the DIE and its children. */
7657 die_checksum_ordered (die, &ctx, &mark);
7658 unmark_all_dies (die);
7659 md5_finish_ctx (&ctx, checksum);
7660
7661 /* Store the signature in the type node and link the type DIE and the
7662 type node together. */
7663 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7664 DWARF_TYPE_SIGNATURE_SIZE);
7665 die->comdat_type_p = true;
7666 die->die_id.die_type_node = type_node;
7667 type_node->type_die = die;
7668
7669 /* If the DIE is a specification, link its declaration to the type node
7670 as well. */
7671 if (decl != NULL)
7672 {
7673 decl->comdat_type_p = true;
7674 decl->die_id.die_type_node = type_node;
7675 }
7676 }
7677
7678 /* Do the location expressions look same? */
7679 static inline int
7680 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7681 {
7682 return loc1->dw_loc_opc == loc2->dw_loc_opc
7683 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7684 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7685 }
7686
7687 /* Do the values look the same? */
7688 static int
7689 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7690 {
7691 dw_loc_descr_ref loc1, loc2;
7692 rtx r1, r2;
7693
7694 if (v1->val_class != v2->val_class)
7695 return 0;
7696
7697 switch (v1->val_class)
7698 {
7699 case dw_val_class_const:
7700 case dw_val_class_const_implicit:
7701 return v1->v.val_int == v2->v.val_int;
7702 case dw_val_class_unsigned_const:
7703 case dw_val_class_unsigned_const_implicit:
7704 return v1->v.val_unsigned == v2->v.val_unsigned;
7705 case dw_val_class_const_double:
7706 return v1->v.val_double.high == v2->v.val_double.high
7707 && v1->v.val_double.low == v2->v.val_double.low;
7708 case dw_val_class_wide_int:
7709 return *v1->v.val_wide == *v2->v.val_wide;
7710 case dw_val_class_vec:
7711 if (v1->v.val_vec.length != v2->v.val_vec.length
7712 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7713 return 0;
7714 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7715 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7716 return 0;
7717 return 1;
7718 case dw_val_class_flag:
7719 return v1->v.val_flag == v2->v.val_flag;
7720 case dw_val_class_str:
7721 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7722
7723 case dw_val_class_addr:
7724 r1 = v1->v.val_addr;
7725 r2 = v2->v.val_addr;
7726 if (GET_CODE (r1) != GET_CODE (r2))
7727 return 0;
7728 return !rtx_equal_p (r1, r2);
7729
7730 case dw_val_class_offset:
7731 return v1->v.val_offset == v2->v.val_offset;
7732
7733 case dw_val_class_loc:
7734 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7735 loc1 && loc2;
7736 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7737 if (!same_loc_p (loc1, loc2, mark))
7738 return 0;
7739 return !loc1 && !loc2;
7740
7741 case dw_val_class_die_ref:
7742 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7743
7744 case dw_val_class_symview:
7745 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7746
7747 case dw_val_class_fde_ref:
7748 case dw_val_class_vms_delta:
7749 case dw_val_class_lbl_id:
7750 case dw_val_class_lineptr:
7751 case dw_val_class_macptr:
7752 case dw_val_class_loclistsptr:
7753 case dw_val_class_high_pc:
7754 return 1;
7755
7756 case dw_val_class_file:
7757 case dw_val_class_file_implicit:
7758 return v1->v.val_file == v2->v.val_file;
7759
7760 case dw_val_class_data8:
7761 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7762
7763 default:
7764 return 1;
7765 }
7766 }
7767
7768 /* Do the attributes look the same? */
7769
7770 static int
7771 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7772 {
7773 if (at1->dw_attr != at2->dw_attr)
7774 return 0;
7775
7776 /* We don't care that this was compiled with a different compiler
7777 snapshot; if the output is the same, that's what matters. */
7778 if (at1->dw_attr == DW_AT_producer)
7779 return 1;
7780
7781 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7782 }
7783
7784 /* Do the dies look the same? */
7785
7786 static int
7787 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7788 {
7789 dw_die_ref c1, c2;
7790 dw_attr_node *a1;
7791 unsigned ix;
7792
7793 /* To avoid infinite recursion. */
7794 if (die1->die_mark)
7795 return die1->die_mark == die2->die_mark;
7796 die1->die_mark = die2->die_mark = ++(*mark);
7797
7798 if (die1->die_tag != die2->die_tag)
7799 return 0;
7800
7801 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7802 return 0;
7803
7804 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7805 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7806 return 0;
7807
7808 c1 = die1->die_child;
7809 c2 = die2->die_child;
7810 if (! c1)
7811 {
7812 if (c2)
7813 return 0;
7814 }
7815 else
7816 for (;;)
7817 {
7818 if (!same_die_p (c1, c2, mark))
7819 return 0;
7820 c1 = c1->die_sib;
7821 c2 = c2->die_sib;
7822 if (c1 == die1->die_child)
7823 {
7824 if (c2 == die2->die_child)
7825 break;
7826 else
7827 return 0;
7828 }
7829 }
7830
7831 return 1;
7832 }
7833
7834 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7835 children, and set die_symbol. */
7836
7837 static void
7838 compute_comp_unit_symbol (dw_die_ref unit_die)
7839 {
7840 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7841 const char *base = die_name ? lbasename (die_name) : "anonymous";
7842 char *name = XALLOCAVEC (char, strlen (base) + 64);
7843 char *p;
7844 int i, mark;
7845 unsigned char checksum[16];
7846 struct md5_ctx ctx;
7847
7848 /* Compute the checksum of the DIE, then append part of it as hex digits to
7849 the name filename of the unit. */
7850
7851 md5_init_ctx (&ctx);
7852 mark = 0;
7853 die_checksum (unit_die, &ctx, &mark);
7854 unmark_all_dies (unit_die);
7855 md5_finish_ctx (&ctx, checksum);
7856
7857 /* When we this for comp_unit_die () we have a DW_AT_name that might
7858 not start with a letter but with anything valid for filenames and
7859 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7860 character is not a letter. */
7861 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7862 clean_symbol_name (name);
7863
7864 p = name + strlen (name);
7865 for (i = 0; i < 4; i++)
7866 {
7867 sprintf (p, "%.2x", checksum[i]);
7868 p += 2;
7869 }
7870
7871 unit_die->die_id.die_symbol = xstrdup (name);
7872 }
7873
7874 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7875
7876 static int
7877 is_type_die (dw_die_ref die)
7878 {
7879 switch (die->die_tag)
7880 {
7881 case DW_TAG_array_type:
7882 case DW_TAG_class_type:
7883 case DW_TAG_interface_type:
7884 case DW_TAG_enumeration_type:
7885 case DW_TAG_pointer_type:
7886 case DW_TAG_reference_type:
7887 case DW_TAG_rvalue_reference_type:
7888 case DW_TAG_string_type:
7889 case DW_TAG_structure_type:
7890 case DW_TAG_subroutine_type:
7891 case DW_TAG_union_type:
7892 case DW_TAG_ptr_to_member_type:
7893 case DW_TAG_set_type:
7894 case DW_TAG_subrange_type:
7895 case DW_TAG_base_type:
7896 case DW_TAG_const_type:
7897 case DW_TAG_file_type:
7898 case DW_TAG_packed_type:
7899 case DW_TAG_volatile_type:
7900 case DW_TAG_typedef:
7901 return 1;
7902 default:
7903 return 0;
7904 }
7905 }
7906
7907 /* Returns true iff C is a compile-unit DIE. */
7908
7909 static inline bool
7910 is_cu_die (dw_die_ref c)
7911 {
7912 return c && (c->die_tag == DW_TAG_compile_unit
7913 || c->die_tag == DW_TAG_skeleton_unit);
7914 }
7915
7916 /* Returns true iff C is a unit DIE of some sort. */
7917
7918 static inline bool
7919 is_unit_die (dw_die_ref c)
7920 {
7921 return c && (c->die_tag == DW_TAG_compile_unit
7922 || c->die_tag == DW_TAG_partial_unit
7923 || c->die_tag == DW_TAG_type_unit
7924 || c->die_tag == DW_TAG_skeleton_unit);
7925 }
7926
7927 /* Returns true iff C is a namespace DIE. */
7928
7929 static inline bool
7930 is_namespace_die (dw_die_ref c)
7931 {
7932 return c && c->die_tag == DW_TAG_namespace;
7933 }
7934
7935 /* Return non-zero if this DIE is a template parameter. */
7936
7937 static inline bool
7938 is_template_parameter (dw_die_ref die)
7939 {
7940 switch (die->die_tag)
7941 {
7942 case DW_TAG_template_type_param:
7943 case DW_TAG_template_value_param:
7944 case DW_TAG_GNU_template_template_param:
7945 case DW_TAG_GNU_template_parameter_pack:
7946 return true;
7947 default:
7948 return false;
7949 }
7950 }
7951
7952 /* Return non-zero if this DIE represents a template instantiation. */
7953
7954 static inline bool
7955 is_template_instantiation (dw_die_ref die)
7956 {
7957 dw_die_ref c;
7958
7959 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7960 return false;
7961 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7962 return false;
7963 }
7964
7965 static char *
7966 gen_internal_sym (const char *prefix)
7967 {
7968 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7969
7970 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7971 return xstrdup (buf);
7972 }
7973
7974 /* Return non-zero if this DIE is a declaration. */
7975
7976 static int
7977 is_declaration_die (dw_die_ref die)
7978 {
7979 dw_attr_node *a;
7980 unsigned ix;
7981
7982 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7983 if (a->dw_attr == DW_AT_declaration)
7984 return 1;
7985
7986 return 0;
7987 }
7988
7989 /* Return non-zero if this DIE is nested inside a subprogram. */
7990
7991 static int
7992 is_nested_in_subprogram (dw_die_ref die)
7993 {
7994 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7995
7996 if (decl == NULL)
7997 decl = die;
7998 return local_scope_p (decl);
7999 }
8000
8001 /* Return non-zero if this DIE contains a defining declaration of a
8002 subprogram. */
8003
8004 static int
8005 contains_subprogram_definition (dw_die_ref die)
8006 {
8007 dw_die_ref c;
8008
8009 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
8010 return 1;
8011 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
8012 return 0;
8013 }
8014
8015 /* Return non-zero if this is a type DIE that should be moved to a
8016 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8017 unit type. */
8018
8019 static int
8020 should_move_die_to_comdat (dw_die_ref die)
8021 {
8022 switch (die->die_tag)
8023 {
8024 case DW_TAG_class_type:
8025 case DW_TAG_structure_type:
8026 case DW_TAG_enumeration_type:
8027 case DW_TAG_union_type:
8028 /* Don't move declarations, inlined instances, types nested in a
8029 subprogram, or types that contain subprogram definitions. */
8030 if (is_declaration_die (die)
8031 || get_AT (die, DW_AT_abstract_origin)
8032 || is_nested_in_subprogram (die)
8033 || contains_subprogram_definition (die))
8034 return 0;
8035 return 1;
8036 case DW_TAG_array_type:
8037 case DW_TAG_interface_type:
8038 case DW_TAG_pointer_type:
8039 case DW_TAG_reference_type:
8040 case DW_TAG_rvalue_reference_type:
8041 case DW_TAG_string_type:
8042 case DW_TAG_subroutine_type:
8043 case DW_TAG_ptr_to_member_type:
8044 case DW_TAG_set_type:
8045 case DW_TAG_subrange_type:
8046 case DW_TAG_base_type:
8047 case DW_TAG_const_type:
8048 case DW_TAG_file_type:
8049 case DW_TAG_packed_type:
8050 case DW_TAG_volatile_type:
8051 case DW_TAG_typedef:
8052 default:
8053 return 0;
8054 }
8055 }
8056
8057 /* Make a clone of DIE. */
8058
8059 static dw_die_ref
8060 clone_die (dw_die_ref die)
8061 {
8062 dw_die_ref clone = new_die_raw (die->die_tag);
8063 dw_attr_node *a;
8064 unsigned ix;
8065
8066 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8067 add_dwarf_attr (clone, a);
8068
8069 return clone;
8070 }
8071
8072 /* Make a clone of the tree rooted at DIE. */
8073
8074 static dw_die_ref
8075 clone_tree (dw_die_ref die)
8076 {
8077 dw_die_ref c;
8078 dw_die_ref clone = clone_die (die);
8079
8080 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8081
8082 return clone;
8083 }
8084
8085 /* Make a clone of DIE as a declaration. */
8086
8087 static dw_die_ref
8088 clone_as_declaration (dw_die_ref die)
8089 {
8090 dw_die_ref clone;
8091 dw_die_ref decl;
8092 dw_attr_node *a;
8093 unsigned ix;
8094
8095 /* If the DIE is already a declaration, just clone it. */
8096 if (is_declaration_die (die))
8097 return clone_die (die);
8098
8099 /* If the DIE is a specification, just clone its declaration DIE. */
8100 decl = get_AT_ref (die, DW_AT_specification);
8101 if (decl != NULL)
8102 {
8103 clone = clone_die (decl);
8104 if (die->comdat_type_p)
8105 add_AT_die_ref (clone, DW_AT_signature, die);
8106 return clone;
8107 }
8108
8109 clone = new_die_raw (die->die_tag);
8110
8111 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8112 {
8113 /* We don't want to copy over all attributes.
8114 For example we don't want DW_AT_byte_size because otherwise we will no
8115 longer have a declaration and GDB will treat it as a definition. */
8116
8117 switch (a->dw_attr)
8118 {
8119 case DW_AT_abstract_origin:
8120 case DW_AT_artificial:
8121 case DW_AT_containing_type:
8122 case DW_AT_external:
8123 case DW_AT_name:
8124 case DW_AT_type:
8125 case DW_AT_virtuality:
8126 case DW_AT_linkage_name:
8127 case DW_AT_MIPS_linkage_name:
8128 add_dwarf_attr (clone, a);
8129 break;
8130 case DW_AT_byte_size:
8131 case DW_AT_alignment:
8132 default:
8133 break;
8134 }
8135 }
8136
8137 if (die->comdat_type_p)
8138 add_AT_die_ref (clone, DW_AT_signature, die);
8139
8140 add_AT_flag (clone, DW_AT_declaration, 1);
8141 return clone;
8142 }
8143
8144
8145 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8146
8147 struct decl_table_entry
8148 {
8149 dw_die_ref orig;
8150 dw_die_ref copy;
8151 };
8152
8153 /* Helpers to manipulate hash table of copied declarations. */
8154
8155 /* Hashtable helpers. */
8156
8157 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8158 {
8159 typedef die_struct *compare_type;
8160 static inline hashval_t hash (const decl_table_entry *);
8161 static inline bool equal (const decl_table_entry *, const die_struct *);
8162 };
8163
8164 inline hashval_t
8165 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8166 {
8167 return htab_hash_pointer (entry->orig);
8168 }
8169
8170 inline bool
8171 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8172 const die_struct *entry2)
8173 {
8174 return entry1->orig == entry2;
8175 }
8176
8177 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8178
8179 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8180 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8181 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8182 to check if the ancestor has already been copied into UNIT. */
8183
8184 static dw_die_ref
8185 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8186 decl_hash_type *decl_table)
8187 {
8188 dw_die_ref parent = die->die_parent;
8189 dw_die_ref new_parent = unit;
8190 dw_die_ref copy;
8191 decl_table_entry **slot = NULL;
8192 struct decl_table_entry *entry = NULL;
8193
8194 /* If DIE refers to a stub unfold that so we get the appropriate
8195 DIE registered as orig in decl_table. */
8196 if (dw_die_ref c = get_AT_ref (die, DW_AT_signature))
8197 die = c;
8198
8199 if (decl_table)
8200 {
8201 /* Check if the entry has already been copied to UNIT. */
8202 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8203 INSERT);
8204 if (*slot != HTAB_EMPTY_ENTRY)
8205 {
8206 entry = *slot;
8207 return entry->copy;
8208 }
8209
8210 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8211 entry = XCNEW (struct decl_table_entry);
8212 entry->orig = die;
8213 entry->copy = NULL;
8214 *slot = entry;
8215 }
8216
8217 if (parent != NULL)
8218 {
8219 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8220 if (spec != NULL)
8221 parent = spec;
8222 if (!is_unit_die (parent))
8223 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8224 }
8225
8226 copy = clone_as_declaration (die);
8227 add_child_die (new_parent, copy);
8228
8229 if (decl_table)
8230 {
8231 /* Record the pointer to the copy. */
8232 entry->copy = copy;
8233 }
8234
8235 return copy;
8236 }
8237 /* Copy the declaration context to the new type unit DIE. This includes
8238 any surrounding namespace or type declarations. If the DIE has an
8239 AT_specification attribute, it also includes attributes and children
8240 attached to the specification, and returns a pointer to the original
8241 parent of the declaration DIE. Returns NULL otherwise. */
8242
8243 static dw_die_ref
8244 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8245 {
8246 dw_die_ref decl;
8247 dw_die_ref new_decl;
8248 dw_die_ref orig_parent = NULL;
8249
8250 decl = get_AT_ref (die, DW_AT_specification);
8251 if (decl == NULL)
8252 decl = die;
8253 else
8254 {
8255 unsigned ix;
8256 dw_die_ref c;
8257 dw_attr_node *a;
8258
8259 /* The original DIE will be changed to a declaration, and must
8260 be moved to be a child of the original declaration DIE. */
8261 orig_parent = decl->die_parent;
8262
8263 /* Copy the type node pointer from the new DIE to the original
8264 declaration DIE so we can forward references later. */
8265 decl->comdat_type_p = true;
8266 decl->die_id.die_type_node = die->die_id.die_type_node;
8267
8268 remove_AT (die, DW_AT_specification);
8269
8270 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8271 {
8272 if (a->dw_attr != DW_AT_name
8273 && a->dw_attr != DW_AT_declaration
8274 && a->dw_attr != DW_AT_external)
8275 add_dwarf_attr (die, a);
8276 }
8277
8278 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8279 }
8280
8281 if (decl->die_parent != NULL
8282 && !is_unit_die (decl->die_parent))
8283 {
8284 new_decl = copy_ancestor_tree (unit, decl, NULL);
8285 if (new_decl != NULL)
8286 {
8287 remove_AT (new_decl, DW_AT_signature);
8288 add_AT_specification (die, new_decl);
8289 }
8290 }
8291
8292 return orig_parent;
8293 }
8294
8295 /* Generate the skeleton ancestor tree for the given NODE, then clone
8296 the DIE and add the clone into the tree. */
8297
8298 static void
8299 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8300 {
8301 if (node->new_die != NULL)
8302 return;
8303
8304 node->new_die = clone_as_declaration (node->old_die);
8305
8306 if (node->parent != NULL)
8307 {
8308 generate_skeleton_ancestor_tree (node->parent);
8309 add_child_die (node->parent->new_die, node->new_die);
8310 }
8311 }
8312
8313 /* Generate a skeleton tree of DIEs containing any declarations that are
8314 found in the original tree. We traverse the tree looking for declaration
8315 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8316
8317 static void
8318 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8319 {
8320 skeleton_chain_node node;
8321 dw_die_ref c;
8322 dw_die_ref first;
8323 dw_die_ref prev = NULL;
8324 dw_die_ref next = NULL;
8325
8326 node.parent = parent;
8327
8328 first = c = parent->old_die->die_child;
8329 if (c)
8330 next = c->die_sib;
8331 if (c) do {
8332 if (prev == NULL || prev->die_sib == c)
8333 prev = c;
8334 c = next;
8335 next = (c == first ? NULL : c->die_sib);
8336 node.old_die = c;
8337 node.new_die = NULL;
8338 if (is_declaration_die (c))
8339 {
8340 if (is_template_instantiation (c))
8341 {
8342 /* Instantiated templates do not need to be cloned into the
8343 type unit. Just move the DIE and its children back to
8344 the skeleton tree (in the main CU). */
8345 remove_child_with_prev (c, prev);
8346 add_child_die (parent->new_die, c);
8347 c = prev;
8348 }
8349 else if (c->comdat_type_p)
8350 {
8351 /* This is the skeleton of earlier break_out_comdat_types
8352 type. Clone the existing DIE, but keep the children
8353 under the original (which is in the main CU). */
8354 dw_die_ref clone = clone_die (c);
8355
8356 replace_child (c, clone, prev);
8357 generate_skeleton_ancestor_tree (parent);
8358 add_child_die (parent->new_die, c);
8359 c = clone;
8360 continue;
8361 }
8362 else
8363 {
8364 /* Clone the existing DIE, move the original to the skeleton
8365 tree (which is in the main CU), and put the clone, with
8366 all the original's children, where the original came from
8367 (which is about to be moved to the type unit). */
8368 dw_die_ref clone = clone_die (c);
8369 move_all_children (c, clone);
8370
8371 /* If the original has a DW_AT_object_pointer attribute,
8372 it would now point to a child DIE just moved to the
8373 cloned tree, so we need to remove that attribute from
8374 the original. */
8375 remove_AT (c, DW_AT_object_pointer);
8376
8377 replace_child (c, clone, prev);
8378 generate_skeleton_ancestor_tree (parent);
8379 add_child_die (parent->new_die, c);
8380 node.old_die = clone;
8381 node.new_die = c;
8382 c = clone;
8383 }
8384 }
8385 generate_skeleton_bottom_up (&node);
8386 } while (next != NULL);
8387 }
8388
8389 /* Wrapper function for generate_skeleton_bottom_up. */
8390
8391 static dw_die_ref
8392 generate_skeleton (dw_die_ref die)
8393 {
8394 skeleton_chain_node node;
8395
8396 node.old_die = die;
8397 node.new_die = NULL;
8398 node.parent = NULL;
8399
8400 /* If this type definition is nested inside another type,
8401 and is not an instantiation of a template, always leave
8402 at least a declaration in its place. */
8403 if (die->die_parent != NULL
8404 && is_type_die (die->die_parent)
8405 && !is_template_instantiation (die))
8406 node.new_die = clone_as_declaration (die);
8407
8408 generate_skeleton_bottom_up (&node);
8409 return node.new_die;
8410 }
8411
8412 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8413 declaration. The original DIE is moved to a new compile unit so that
8414 existing references to it follow it to the new location. If any of the
8415 original DIE's descendants is a declaration, we need to replace the
8416 original DIE with a skeleton tree and move the declarations back into the
8417 skeleton tree. */
8418
8419 static dw_die_ref
8420 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8421 dw_die_ref prev)
8422 {
8423 dw_die_ref skeleton, orig_parent;
8424
8425 /* Copy the declaration context to the type unit DIE. If the returned
8426 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8427 that DIE. */
8428 orig_parent = copy_declaration_context (unit, child);
8429
8430 skeleton = generate_skeleton (child);
8431 if (skeleton == NULL)
8432 remove_child_with_prev (child, prev);
8433 else
8434 {
8435 skeleton->comdat_type_p = true;
8436 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8437
8438 /* If the original DIE was a specification, we need to put
8439 the skeleton under the parent DIE of the declaration.
8440 This leaves the original declaration in the tree, but
8441 it will be pruned later since there are no longer any
8442 references to it. */
8443 if (orig_parent != NULL)
8444 {
8445 remove_child_with_prev (child, prev);
8446 add_child_die (orig_parent, skeleton);
8447 }
8448 else
8449 replace_child (child, skeleton, prev);
8450 }
8451
8452 return skeleton;
8453 }
8454
8455 static void
8456 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8457 comdat_type_node *type_node,
8458 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8459
8460 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8461 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8462 DWARF procedure references in the DW_AT_location attribute. */
8463
8464 static dw_die_ref
8465 copy_dwarf_procedure (dw_die_ref die,
8466 comdat_type_node *type_node,
8467 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8468 {
8469 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8470
8471 /* DWARF procedures are not supposed to have children... */
8472 gcc_assert (die->die_child == NULL);
8473
8474 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8475 gcc_assert (vec_safe_length (die->die_attr) == 1
8476 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8477
8478 /* Do not copy more than once DWARF procedures. */
8479 bool existed;
8480 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8481 if (existed)
8482 return die_copy;
8483
8484 die_copy = clone_die (die);
8485 add_child_die (type_node->root_die, die_copy);
8486 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8487 return die_copy;
8488 }
8489
8490 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8491 procedures in DIE's attributes. */
8492
8493 static void
8494 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8495 comdat_type_node *type_node,
8496 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8497 {
8498 dw_attr_node *a;
8499 unsigned i;
8500
8501 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8502 {
8503 dw_loc_descr_ref loc;
8504
8505 if (a->dw_attr_val.val_class != dw_val_class_loc)
8506 continue;
8507
8508 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8509 {
8510 switch (loc->dw_loc_opc)
8511 {
8512 case DW_OP_call2:
8513 case DW_OP_call4:
8514 case DW_OP_call_ref:
8515 gcc_assert (loc->dw_loc_oprnd1.val_class
8516 == dw_val_class_die_ref);
8517 loc->dw_loc_oprnd1.v.val_die_ref.die
8518 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8519 type_node,
8520 copied_dwarf_procs);
8521
8522 default:
8523 break;
8524 }
8525 }
8526 }
8527 }
8528
8529 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8530 rewrite references to point to the copies.
8531
8532 References are looked for in DIE's attributes and recursively in all its
8533 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8534 mapping from old DWARF procedures to their copy. It is used not to copy
8535 twice the same DWARF procedure under TYPE_NODE. */
8536
8537 static void
8538 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8539 comdat_type_node *type_node,
8540 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8541 {
8542 dw_die_ref c;
8543
8544 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8545 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8546 type_node,
8547 copied_dwarf_procs));
8548 }
8549
8550 /* Traverse the DIE and set up additional .debug_types or .debug_info
8551 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8552 section. */
8553
8554 static void
8555 break_out_comdat_types (dw_die_ref die)
8556 {
8557 dw_die_ref c;
8558 dw_die_ref first;
8559 dw_die_ref prev = NULL;
8560 dw_die_ref next = NULL;
8561 dw_die_ref unit = NULL;
8562
8563 first = c = die->die_child;
8564 if (c)
8565 next = c->die_sib;
8566 if (c) do {
8567 if (prev == NULL || prev->die_sib == c)
8568 prev = c;
8569 c = next;
8570 next = (c == first ? NULL : c->die_sib);
8571 if (should_move_die_to_comdat (c))
8572 {
8573 dw_die_ref replacement;
8574 comdat_type_node *type_node;
8575
8576 /* Break out nested types into their own type units. */
8577 break_out_comdat_types (c);
8578
8579 /* Create a new type unit DIE as the root for the new tree. */
8580 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8581 add_AT_unsigned (unit, DW_AT_language,
8582 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8583
8584 /* Add the new unit's type DIE into the comdat type list. */
8585 type_node = ggc_cleared_alloc<comdat_type_node> ();
8586 type_node->root_die = unit;
8587 type_node->next = comdat_type_list;
8588 comdat_type_list = type_node;
8589
8590 /* Generate the type signature. */
8591 generate_type_signature (c, type_node);
8592
8593 /* Copy the declaration context, attributes, and children of the
8594 declaration into the new type unit DIE, then remove this DIE
8595 from the main CU (or replace it with a skeleton if necessary). */
8596 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8597 type_node->skeleton_die = replacement;
8598
8599 /* Add the DIE to the new compunit. */
8600 add_child_die (unit, c);
8601
8602 /* Types can reference DWARF procedures for type size or data location
8603 expressions. Calls in DWARF expressions cannot target procedures
8604 that are not in the same section. So we must copy DWARF procedures
8605 along with this type and then rewrite references to them. */
8606 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8607 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8608
8609 if (replacement != NULL)
8610 c = replacement;
8611 }
8612 else if (c->die_tag == DW_TAG_namespace
8613 || c->die_tag == DW_TAG_class_type
8614 || c->die_tag == DW_TAG_structure_type
8615 || c->die_tag == DW_TAG_union_type)
8616 {
8617 /* Look for nested types that can be broken out. */
8618 break_out_comdat_types (c);
8619 }
8620 } while (next != NULL);
8621 }
8622
8623 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8624 Enter all the cloned children into the hash table decl_table. */
8625
8626 static dw_die_ref
8627 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8628 {
8629 dw_die_ref c;
8630 dw_die_ref clone;
8631 struct decl_table_entry *entry;
8632 decl_table_entry **slot;
8633
8634 if (die->die_tag == DW_TAG_subprogram)
8635 clone = clone_as_declaration (die);
8636 else
8637 clone = clone_die (die);
8638
8639 slot = decl_table->find_slot_with_hash (die,
8640 htab_hash_pointer (die), INSERT);
8641
8642 /* Assert that DIE isn't in the hash table yet. If it would be there
8643 before, the ancestors would be necessarily there as well, therefore
8644 clone_tree_partial wouldn't be called. */
8645 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8646
8647 entry = XCNEW (struct decl_table_entry);
8648 entry->orig = die;
8649 entry->copy = clone;
8650 *slot = entry;
8651
8652 if (die->die_tag != DW_TAG_subprogram)
8653 FOR_EACH_CHILD (die, c,
8654 add_child_die (clone, clone_tree_partial (c, decl_table)));
8655
8656 return clone;
8657 }
8658
8659 /* Walk the DIE and its children, looking for references to incomplete
8660 or trivial types that are unmarked (i.e., that are not in the current
8661 type_unit). */
8662
8663 static void
8664 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8665 {
8666 dw_die_ref c;
8667 dw_attr_node *a;
8668 unsigned ix;
8669
8670 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8671 {
8672 if (AT_class (a) == dw_val_class_die_ref)
8673 {
8674 dw_die_ref targ = AT_ref (a);
8675 decl_table_entry **slot;
8676 struct decl_table_entry *entry;
8677
8678 if (targ->die_mark != 0 || targ->comdat_type_p)
8679 continue;
8680
8681 slot = decl_table->find_slot_with_hash (targ,
8682 htab_hash_pointer (targ),
8683 INSERT);
8684
8685 if (*slot != HTAB_EMPTY_ENTRY)
8686 {
8687 /* TARG has already been copied, so we just need to
8688 modify the reference to point to the copy. */
8689 entry = *slot;
8690 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8691 }
8692 else
8693 {
8694 dw_die_ref parent = unit;
8695 dw_die_ref copy = clone_die (targ);
8696
8697 /* Record in DECL_TABLE that TARG has been copied.
8698 Need to do this now, before the recursive call,
8699 because DECL_TABLE may be expanded and SLOT
8700 would no longer be a valid pointer. */
8701 entry = XCNEW (struct decl_table_entry);
8702 entry->orig = targ;
8703 entry->copy = copy;
8704 *slot = entry;
8705
8706 /* If TARG is not a declaration DIE, we need to copy its
8707 children. */
8708 if (!is_declaration_die (targ))
8709 {
8710 FOR_EACH_CHILD (
8711 targ, c,
8712 add_child_die (copy,
8713 clone_tree_partial (c, decl_table)));
8714 }
8715
8716 /* Make sure the cloned tree is marked as part of the
8717 type unit. */
8718 mark_dies (copy);
8719
8720 /* If TARG has surrounding context, copy its ancestor tree
8721 into the new type unit. */
8722 if (targ->die_parent != NULL
8723 && !is_unit_die (targ->die_parent))
8724 parent = copy_ancestor_tree (unit, targ->die_parent,
8725 decl_table);
8726
8727 add_child_die (parent, copy);
8728 a->dw_attr_val.v.val_die_ref.die = copy;
8729
8730 /* Make sure the newly-copied DIE is walked. If it was
8731 installed in a previously-added context, it won't
8732 get visited otherwise. */
8733 if (parent != unit)
8734 {
8735 /* Find the highest point of the newly-added tree,
8736 mark each node along the way, and walk from there. */
8737 parent->die_mark = 1;
8738 while (parent->die_parent
8739 && parent->die_parent->die_mark == 0)
8740 {
8741 parent = parent->die_parent;
8742 parent->die_mark = 1;
8743 }
8744 copy_decls_walk (unit, parent, decl_table);
8745 }
8746 }
8747 }
8748 }
8749
8750 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8751 }
8752
8753 /* Collect skeleton dies in DIE created by break_out_comdat_types already
8754 and record them in DECL_TABLE. */
8755
8756 static void
8757 collect_skeleton_dies (dw_die_ref die, decl_hash_type *decl_table)
8758 {
8759 dw_die_ref c;
8760
8761 if (dw_attr_node *a = get_AT (die, DW_AT_signature))
8762 {
8763 dw_die_ref targ = AT_ref (a);
8764 gcc_assert (targ->die_mark == 0 && targ->comdat_type_p);
8765 decl_table_entry **slot
8766 = decl_table->find_slot_with_hash (targ,
8767 htab_hash_pointer (targ),
8768 INSERT);
8769 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8770 /* Record in DECL_TABLE that TARG has been already copied
8771 by remove_child_or_replace_with_skeleton. */
8772 decl_table_entry *entry = XCNEW (struct decl_table_entry);
8773 entry->orig = targ;
8774 entry->copy = die;
8775 *slot = entry;
8776 }
8777 FOR_EACH_CHILD (die, c, collect_skeleton_dies (c, decl_table));
8778 }
8779
8780 /* Copy declarations for "unworthy" types into the new comdat section.
8781 Incomplete types, modified types, and certain other types aren't broken
8782 out into comdat sections of their own, so they don't have a signature,
8783 and we need to copy the declaration into the same section so that we
8784 don't have an external reference. */
8785
8786 static void
8787 copy_decls_for_unworthy_types (dw_die_ref unit)
8788 {
8789 mark_dies (unit);
8790 decl_hash_type decl_table (10);
8791 collect_skeleton_dies (unit, &decl_table);
8792 copy_decls_walk (unit, unit, &decl_table);
8793 unmark_dies (unit);
8794 }
8795
8796 /* Traverse the DIE and add a sibling attribute if it may have the
8797 effect of speeding up access to siblings. To save some space,
8798 avoid generating sibling attributes for DIE's without children. */
8799
8800 static void
8801 add_sibling_attributes (dw_die_ref die)
8802 {
8803 dw_die_ref c;
8804
8805 if (! die->die_child)
8806 return;
8807
8808 if (die->die_parent && die != die->die_parent->die_child)
8809 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8810
8811 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8812 }
8813
8814 /* Output all location lists for the DIE and its children. */
8815
8816 static void
8817 output_location_lists (dw_die_ref die)
8818 {
8819 dw_die_ref c;
8820 dw_attr_node *a;
8821 unsigned ix;
8822
8823 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8824 if (AT_class (a) == dw_val_class_loc_list)
8825 output_loc_list (AT_loc_list (a));
8826
8827 FOR_EACH_CHILD (die, c, output_location_lists (c));
8828 }
8829
8830 /* During assign_location_list_indexes and output_loclists_offset the
8831 current index, after it the number of assigned indexes (i.e. how
8832 large the .debug_loclists* offset table should be). */
8833 static unsigned int loc_list_idx;
8834
8835 /* Output all location list offsets for the DIE and its children. */
8836
8837 static void
8838 output_loclists_offsets (dw_die_ref die)
8839 {
8840 dw_die_ref c;
8841 dw_attr_node *a;
8842 unsigned ix;
8843
8844 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8845 if (AT_class (a) == dw_val_class_loc_list)
8846 {
8847 dw_loc_list_ref l = AT_loc_list (a);
8848 if (l->offset_emitted)
8849 continue;
8850 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8851 loc_section_label, NULL);
8852 gcc_assert (l->hash == loc_list_idx);
8853 loc_list_idx++;
8854 l->offset_emitted = true;
8855 }
8856
8857 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8858 }
8859
8860 /* Recursively set indexes of location lists. */
8861
8862 static void
8863 assign_location_list_indexes (dw_die_ref die)
8864 {
8865 dw_die_ref c;
8866 dw_attr_node *a;
8867 unsigned ix;
8868
8869 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8870 if (AT_class (a) == dw_val_class_loc_list)
8871 {
8872 dw_loc_list_ref list = AT_loc_list (a);
8873 if (!list->num_assigned)
8874 {
8875 list->num_assigned = true;
8876 list->hash = loc_list_idx++;
8877 }
8878 }
8879
8880 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8881 }
8882
8883 /* We want to limit the number of external references, because they are
8884 larger than local references: a relocation takes multiple words, and
8885 even a sig8 reference is always eight bytes, whereas a local reference
8886 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8887 So if we encounter multiple external references to the same type DIE, we
8888 make a local typedef stub for it and redirect all references there.
8889
8890 This is the element of the hash table for keeping track of these
8891 references. */
8892
8893 struct external_ref
8894 {
8895 dw_die_ref type;
8896 dw_die_ref stub;
8897 unsigned n_refs;
8898 };
8899
8900 /* Hashtable helpers. */
8901
8902 struct external_ref_hasher : free_ptr_hash <external_ref>
8903 {
8904 static inline hashval_t hash (const external_ref *);
8905 static inline bool equal (const external_ref *, const external_ref *);
8906 };
8907
8908 inline hashval_t
8909 external_ref_hasher::hash (const external_ref *r)
8910 {
8911 dw_die_ref die = r->type;
8912 hashval_t h = 0;
8913
8914 /* We can't use the address of the DIE for hashing, because
8915 that will make the order of the stub DIEs non-deterministic. */
8916 if (! die->comdat_type_p)
8917 /* We have a symbol; use it to compute a hash. */
8918 h = htab_hash_string (die->die_id.die_symbol);
8919 else
8920 {
8921 /* We have a type signature; use a subset of the bits as the hash.
8922 The 8-byte signature is at least as large as hashval_t. */
8923 comdat_type_node *type_node = die->die_id.die_type_node;
8924 memcpy (&h, type_node->signature, sizeof (h));
8925 }
8926 return h;
8927 }
8928
8929 inline bool
8930 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8931 {
8932 return r1->type == r2->type;
8933 }
8934
8935 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8936
8937 /* Return a pointer to the external_ref for references to DIE. */
8938
8939 static struct external_ref *
8940 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8941 {
8942 struct external_ref ref, *ref_p;
8943 external_ref **slot;
8944
8945 ref.type = die;
8946 slot = map->find_slot (&ref, INSERT);
8947 if (*slot != HTAB_EMPTY_ENTRY)
8948 return *slot;
8949
8950 ref_p = XCNEW (struct external_ref);
8951 ref_p->type = die;
8952 *slot = ref_p;
8953 return ref_p;
8954 }
8955
8956 /* Subroutine of optimize_external_refs, below.
8957
8958 If we see a type skeleton, record it as our stub. If we see external
8959 references, remember how many we've seen. */
8960
8961 static void
8962 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8963 {
8964 dw_die_ref c;
8965 dw_attr_node *a;
8966 unsigned ix;
8967 struct external_ref *ref_p;
8968
8969 if (is_type_die (die)
8970 && (c = get_AT_ref (die, DW_AT_signature)))
8971 {
8972 /* This is a local skeleton; use it for local references. */
8973 ref_p = lookup_external_ref (map, c);
8974 ref_p->stub = die;
8975 }
8976
8977 /* Scan the DIE references, and remember any that refer to DIEs from
8978 other CUs (i.e. those which are not marked). */
8979 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8980 if (AT_class (a) == dw_val_class_die_ref
8981 && (c = AT_ref (a))->die_mark == 0
8982 && is_type_die (c))
8983 {
8984 ref_p = lookup_external_ref (map, c);
8985 ref_p->n_refs++;
8986 }
8987
8988 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8989 }
8990
8991 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8992 points to an external_ref, DATA is the CU we're processing. If we don't
8993 already have a local stub, and we have multiple refs, build a stub. */
8994
8995 int
8996 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8997 {
8998 struct external_ref *ref_p = *slot;
8999
9000 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
9001 {
9002 /* We have multiple references to this type, so build a small stub.
9003 Both of these forms are a bit dodgy from the perspective of the
9004 DWARF standard, since technically they should have names. */
9005 dw_die_ref cu = data;
9006 dw_die_ref type = ref_p->type;
9007 dw_die_ref stub = NULL;
9008
9009 if (type->comdat_type_p)
9010 {
9011 /* If we refer to this type via sig8, use AT_signature. */
9012 stub = new_die (type->die_tag, cu, NULL_TREE);
9013 add_AT_die_ref (stub, DW_AT_signature, type);
9014 }
9015 else
9016 {
9017 /* Otherwise, use a typedef with no name. */
9018 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
9019 add_AT_die_ref (stub, DW_AT_type, type);
9020 }
9021
9022 stub->die_mark++;
9023 ref_p->stub = stub;
9024 }
9025 return 1;
9026 }
9027
9028 /* DIE is a unit; look through all the DIE references to see if there are
9029 any external references to types, and if so, create local stubs for
9030 them which will be applied in build_abbrev_table. This is useful because
9031 references to local DIEs are smaller. */
9032
9033 static external_ref_hash_type *
9034 optimize_external_refs (dw_die_ref die)
9035 {
9036 external_ref_hash_type *map = new external_ref_hash_type (10);
9037 optimize_external_refs_1 (die, map);
9038 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9039 return map;
9040 }
9041
9042 /* The following 3 variables are temporaries that are computed only during the
9043 build_abbrev_table call and used and released during the following
9044 optimize_abbrev_table call. */
9045
9046 /* First abbrev_id that can be optimized based on usage. */
9047 static unsigned int abbrev_opt_start;
9048
9049 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9050 abbrev_id smaller than this, because they must be already sized
9051 during build_abbrev_table). */
9052 static unsigned int abbrev_opt_base_type_end;
9053
9054 /* Vector of usage counts during build_abbrev_table. Indexed by
9055 abbrev_id - abbrev_opt_start. */
9056 static vec<unsigned int> abbrev_usage_count;
9057
9058 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9059 static vec<dw_die_ref> sorted_abbrev_dies;
9060
9061 /* The format of each DIE (and its attribute value pairs) is encoded in an
9062 abbreviation table. This routine builds the abbreviation table and assigns
9063 a unique abbreviation id for each abbreviation entry. The children of each
9064 die are visited recursively. */
9065
9066 static void
9067 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9068 {
9069 unsigned int abbrev_id = 0;
9070 dw_die_ref c;
9071 dw_attr_node *a;
9072 unsigned ix;
9073 dw_die_ref abbrev;
9074
9075 /* Scan the DIE references, and replace any that refer to
9076 DIEs from other CUs (i.e. those which are not marked) with
9077 the local stubs we built in optimize_external_refs. */
9078 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9079 if (AT_class (a) == dw_val_class_die_ref
9080 && (c = AT_ref (a))->die_mark == 0)
9081 {
9082 struct external_ref *ref_p;
9083 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9084
9085 if (is_type_die (c)
9086 && (ref_p = lookup_external_ref (extern_map, c))
9087 && ref_p->stub && ref_p->stub != die)
9088 {
9089 gcc_assert (a->dw_attr != DW_AT_signature);
9090 change_AT_die_ref (a, ref_p->stub);
9091 }
9092 else
9093 /* We aren't changing this reference, so mark it external. */
9094 set_AT_ref_external (a, 1);
9095 }
9096
9097 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9098 {
9099 dw_attr_node *die_a, *abbrev_a;
9100 unsigned ix;
9101 bool ok = true;
9102
9103 if (abbrev_id == 0)
9104 continue;
9105 if (abbrev->die_tag != die->die_tag)
9106 continue;
9107 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9108 continue;
9109
9110 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9111 continue;
9112
9113 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9114 {
9115 abbrev_a = &(*abbrev->die_attr)[ix];
9116 if ((abbrev_a->dw_attr != die_a->dw_attr)
9117 || (value_format (abbrev_a) != value_format (die_a)))
9118 {
9119 ok = false;
9120 break;
9121 }
9122 }
9123 if (ok)
9124 break;
9125 }
9126
9127 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9128 {
9129 vec_safe_push (abbrev_die_table, die);
9130 if (abbrev_opt_start)
9131 abbrev_usage_count.safe_push (0);
9132 }
9133 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9134 {
9135 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9136 sorted_abbrev_dies.safe_push (die);
9137 }
9138
9139 die->die_abbrev = abbrev_id;
9140 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9141 }
9142
9143 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9144 by die_abbrev's usage count, from the most commonly used
9145 abbreviation to the least. */
9146
9147 static int
9148 die_abbrev_cmp (const void *p1, const void *p2)
9149 {
9150 dw_die_ref die1 = *(const dw_die_ref *) p1;
9151 dw_die_ref die2 = *(const dw_die_ref *) p2;
9152
9153 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9154 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9155
9156 if (die1->die_abbrev >= abbrev_opt_base_type_end
9157 && die2->die_abbrev >= abbrev_opt_base_type_end)
9158 {
9159 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9160 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9161 return -1;
9162 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9163 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9164 return 1;
9165 }
9166
9167 /* Stabilize the sort. */
9168 if (die1->die_abbrev < die2->die_abbrev)
9169 return -1;
9170 if (die1->die_abbrev > die2->die_abbrev)
9171 return 1;
9172
9173 return 0;
9174 }
9175
9176 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9177 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9178 into dw_val_class_const_implicit or
9179 dw_val_class_unsigned_const_implicit. */
9180
9181 static void
9182 optimize_implicit_const (unsigned int first_id, unsigned int end,
9183 vec<bool> &implicit_consts)
9184 {
9185 /* It never makes sense if there is just one DIE using the abbreviation. */
9186 if (end < first_id + 2)
9187 return;
9188
9189 dw_attr_node *a;
9190 unsigned ix, i;
9191 dw_die_ref die = sorted_abbrev_dies[first_id];
9192 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9193 if (implicit_consts[ix])
9194 {
9195 enum dw_val_class new_class = dw_val_class_none;
9196 switch (AT_class (a))
9197 {
9198 case dw_val_class_unsigned_const:
9199 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9200 continue;
9201
9202 /* The .debug_abbrev section will grow by
9203 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9204 in all the DIEs using that abbreviation. */
9205 if (constant_size (AT_unsigned (a)) * (end - first_id)
9206 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9207 continue;
9208
9209 new_class = dw_val_class_unsigned_const_implicit;
9210 break;
9211
9212 case dw_val_class_const:
9213 new_class = dw_val_class_const_implicit;
9214 break;
9215
9216 case dw_val_class_file:
9217 new_class = dw_val_class_file_implicit;
9218 break;
9219
9220 default:
9221 continue;
9222 }
9223 for (i = first_id; i < end; i++)
9224 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9225 = new_class;
9226 }
9227 }
9228
9229 /* Attempt to optimize abbreviation table from abbrev_opt_start
9230 abbreviation above. */
9231
9232 static void
9233 optimize_abbrev_table (void)
9234 {
9235 if (abbrev_opt_start
9236 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9237 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9238 {
9239 auto_vec<bool, 32> implicit_consts;
9240 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9241
9242 unsigned int abbrev_id = abbrev_opt_start - 1;
9243 unsigned int first_id = ~0U;
9244 unsigned int last_abbrev_id = 0;
9245 unsigned int i;
9246 dw_die_ref die;
9247 if (abbrev_opt_base_type_end > abbrev_opt_start)
9248 abbrev_id = abbrev_opt_base_type_end - 1;
9249 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9250 most commonly used abbreviations come first. */
9251 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9252 {
9253 dw_attr_node *a;
9254 unsigned ix;
9255
9256 /* If calc_base_type_die_sizes has been called, the CU and
9257 base types after it can't be optimized, because we've already
9258 calculated their DIE offsets. We've sorted them first. */
9259 if (die->die_abbrev < abbrev_opt_base_type_end)
9260 continue;
9261 if (die->die_abbrev != last_abbrev_id)
9262 {
9263 last_abbrev_id = die->die_abbrev;
9264 if (dwarf_version >= 5 && first_id != ~0U)
9265 optimize_implicit_const (first_id, i, implicit_consts);
9266 abbrev_id++;
9267 (*abbrev_die_table)[abbrev_id] = die;
9268 if (dwarf_version >= 5)
9269 {
9270 first_id = i;
9271 implicit_consts.truncate (0);
9272
9273 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9274 switch (AT_class (a))
9275 {
9276 case dw_val_class_const:
9277 case dw_val_class_unsigned_const:
9278 case dw_val_class_file:
9279 implicit_consts.safe_push (true);
9280 break;
9281 default:
9282 implicit_consts.safe_push (false);
9283 break;
9284 }
9285 }
9286 }
9287 else if (dwarf_version >= 5)
9288 {
9289 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9290 if (!implicit_consts[ix])
9291 continue;
9292 else
9293 {
9294 dw_attr_node *other_a
9295 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9296 if (!dw_val_equal_p (&a->dw_attr_val,
9297 &other_a->dw_attr_val))
9298 implicit_consts[ix] = false;
9299 }
9300 }
9301 die->die_abbrev = abbrev_id;
9302 }
9303 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9304 if (dwarf_version >= 5 && first_id != ~0U)
9305 optimize_implicit_const (first_id, i, implicit_consts);
9306 }
9307
9308 abbrev_opt_start = 0;
9309 abbrev_opt_base_type_end = 0;
9310 abbrev_usage_count.release ();
9311 sorted_abbrev_dies.release ();
9312 }
9313 \f
9314 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9315
9316 static int
9317 constant_size (unsigned HOST_WIDE_INT value)
9318 {
9319 int log;
9320
9321 if (value == 0)
9322 log = 0;
9323 else
9324 log = floor_log2 (value);
9325
9326 log = log / 8;
9327 log = 1 << (floor_log2 (log) + 1);
9328
9329 return log;
9330 }
9331
9332 /* Return the size of a DIE as it is represented in the
9333 .debug_info section. */
9334
9335 static unsigned long
9336 size_of_die (dw_die_ref die)
9337 {
9338 unsigned long size = 0;
9339 dw_attr_node *a;
9340 unsigned ix;
9341 enum dwarf_form form;
9342
9343 size += size_of_uleb128 (die->die_abbrev);
9344 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9345 {
9346 switch (AT_class (a))
9347 {
9348 case dw_val_class_addr:
9349 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9350 {
9351 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9352 size += size_of_uleb128 (AT_index (a));
9353 }
9354 else
9355 size += DWARF2_ADDR_SIZE;
9356 break;
9357 case dw_val_class_offset:
9358 size += DWARF_OFFSET_SIZE;
9359 break;
9360 case dw_val_class_loc:
9361 {
9362 unsigned long lsize = size_of_locs (AT_loc (a));
9363
9364 /* Block length. */
9365 if (dwarf_version >= 4)
9366 size += size_of_uleb128 (lsize);
9367 else
9368 size += constant_size (lsize);
9369 size += lsize;
9370 }
9371 break;
9372 case dw_val_class_loc_list:
9373 if (dwarf_split_debug_info && dwarf_version >= 5)
9374 {
9375 gcc_assert (AT_loc_list (a)->num_assigned);
9376 size += size_of_uleb128 (AT_loc_list (a)->hash);
9377 }
9378 else
9379 size += DWARF_OFFSET_SIZE;
9380 break;
9381 case dw_val_class_view_list:
9382 size += DWARF_OFFSET_SIZE;
9383 break;
9384 case dw_val_class_range_list:
9385 if (value_format (a) == DW_FORM_rnglistx)
9386 {
9387 gcc_assert (rnglist_idx);
9388 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9389 size += size_of_uleb128 (r->idx);
9390 }
9391 else
9392 size += DWARF_OFFSET_SIZE;
9393 break;
9394 case dw_val_class_const:
9395 size += size_of_sleb128 (AT_int (a));
9396 break;
9397 case dw_val_class_unsigned_const:
9398 {
9399 int csize = constant_size (AT_unsigned (a));
9400 if (dwarf_version == 3
9401 && a->dw_attr == DW_AT_data_member_location
9402 && csize >= 4)
9403 size += size_of_uleb128 (AT_unsigned (a));
9404 else
9405 size += csize;
9406 }
9407 break;
9408 case dw_val_class_symview:
9409 if (symview_upper_bound <= 0xff)
9410 size += 1;
9411 else if (symview_upper_bound <= 0xffff)
9412 size += 2;
9413 else if (symview_upper_bound <= 0xffffffff)
9414 size += 4;
9415 else
9416 size += 8;
9417 break;
9418 case dw_val_class_const_implicit:
9419 case dw_val_class_unsigned_const_implicit:
9420 case dw_val_class_file_implicit:
9421 /* These occupy no size in the DIE, just an extra sleb128 in
9422 .debug_abbrev. */
9423 break;
9424 case dw_val_class_const_double:
9425 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9426 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9427 size++; /* block */
9428 break;
9429 case dw_val_class_wide_int:
9430 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9431 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9432 if (get_full_len (*a->dw_attr_val.v.val_wide)
9433 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9434 size++; /* block */
9435 break;
9436 case dw_val_class_vec:
9437 size += constant_size (a->dw_attr_val.v.val_vec.length
9438 * a->dw_attr_val.v.val_vec.elt_size)
9439 + a->dw_attr_val.v.val_vec.length
9440 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9441 break;
9442 case dw_val_class_flag:
9443 if (dwarf_version >= 4)
9444 /* Currently all add_AT_flag calls pass in 1 as last argument,
9445 so DW_FORM_flag_present can be used. If that ever changes,
9446 we'll need to use DW_FORM_flag and have some optimization
9447 in build_abbrev_table that will change those to
9448 DW_FORM_flag_present if it is set to 1 in all DIEs using
9449 the same abbrev entry. */
9450 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9451 else
9452 size += 1;
9453 break;
9454 case dw_val_class_die_ref:
9455 if (AT_ref_external (a))
9456 {
9457 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9458 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9459 is sized by target address length, whereas in DWARF3
9460 it's always sized as an offset. */
9461 if (AT_ref (a)->comdat_type_p)
9462 size += DWARF_TYPE_SIGNATURE_SIZE;
9463 else if (dwarf_version == 2)
9464 size += DWARF2_ADDR_SIZE;
9465 else
9466 size += DWARF_OFFSET_SIZE;
9467 }
9468 else
9469 size += DWARF_OFFSET_SIZE;
9470 break;
9471 case dw_val_class_fde_ref:
9472 size += DWARF_OFFSET_SIZE;
9473 break;
9474 case dw_val_class_lbl_id:
9475 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9476 {
9477 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9478 size += size_of_uleb128 (AT_index (a));
9479 }
9480 else
9481 size += DWARF2_ADDR_SIZE;
9482 break;
9483 case dw_val_class_lineptr:
9484 case dw_val_class_macptr:
9485 case dw_val_class_loclistsptr:
9486 size += DWARF_OFFSET_SIZE;
9487 break;
9488 case dw_val_class_str:
9489 form = AT_string_form (a);
9490 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9491 size += DWARF_OFFSET_SIZE;
9492 else if (form == dwarf_FORM (DW_FORM_strx))
9493 size += size_of_uleb128 (AT_index (a));
9494 else
9495 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9496 break;
9497 case dw_val_class_file:
9498 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9499 break;
9500 case dw_val_class_data8:
9501 size += 8;
9502 break;
9503 case dw_val_class_vms_delta:
9504 size += DWARF_OFFSET_SIZE;
9505 break;
9506 case dw_val_class_high_pc:
9507 size += DWARF2_ADDR_SIZE;
9508 break;
9509 case dw_val_class_discr_value:
9510 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9511 break;
9512 case dw_val_class_discr_list:
9513 {
9514 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9515
9516 /* This is a block, so we have the block length and then its
9517 data. */
9518 size += constant_size (block_size) + block_size;
9519 }
9520 break;
9521 default:
9522 gcc_unreachable ();
9523 }
9524 }
9525
9526 return size;
9527 }
9528
9529 /* Size the debugging information associated with a given DIE. Visits the
9530 DIE's children recursively. Updates the global variable next_die_offset, on
9531 each time through. Uses the current value of next_die_offset to update the
9532 die_offset field in each DIE. */
9533
9534 static void
9535 calc_die_sizes (dw_die_ref die)
9536 {
9537 dw_die_ref c;
9538
9539 gcc_assert (die->die_offset == 0
9540 || (unsigned long int) die->die_offset == next_die_offset);
9541 die->die_offset = next_die_offset;
9542 next_die_offset += size_of_die (die);
9543
9544 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9545
9546 if (die->die_child != NULL)
9547 /* Count the null byte used to terminate sibling lists. */
9548 next_die_offset += 1;
9549 }
9550
9551 /* Size just the base type children at the start of the CU.
9552 This is needed because build_abbrev needs to size locs
9553 and sizing of type based stack ops needs to know die_offset
9554 values for the base types. */
9555
9556 static void
9557 calc_base_type_die_sizes (void)
9558 {
9559 unsigned long die_offset = (dwarf_split_debug_info
9560 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9561 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9562 unsigned int i;
9563 dw_die_ref base_type;
9564 #if ENABLE_ASSERT_CHECKING
9565 dw_die_ref prev = comp_unit_die ()->die_child;
9566 #endif
9567
9568 die_offset += size_of_die (comp_unit_die ());
9569 for (i = 0; base_types.iterate (i, &base_type); i++)
9570 {
9571 #if ENABLE_ASSERT_CHECKING
9572 gcc_assert (base_type->die_offset == 0
9573 && prev->die_sib == base_type
9574 && base_type->die_child == NULL
9575 && base_type->die_abbrev);
9576 prev = base_type;
9577 #endif
9578 if (abbrev_opt_start
9579 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9580 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9581 base_type->die_offset = die_offset;
9582 die_offset += size_of_die (base_type);
9583 }
9584 }
9585
9586 /* Set the marks for a die and its children. We do this so
9587 that we know whether or not a reference needs to use FORM_ref_addr; only
9588 DIEs in the same CU will be marked. We used to clear out the offset
9589 and use that as the flag, but ran into ordering problems. */
9590
9591 static void
9592 mark_dies (dw_die_ref die)
9593 {
9594 dw_die_ref c;
9595
9596 gcc_assert (!die->die_mark);
9597
9598 die->die_mark = 1;
9599 FOR_EACH_CHILD (die, c, mark_dies (c));
9600 }
9601
9602 /* Clear the marks for a die and its children. */
9603
9604 static void
9605 unmark_dies (dw_die_ref die)
9606 {
9607 dw_die_ref c;
9608
9609 if (! use_debug_types)
9610 gcc_assert (die->die_mark);
9611
9612 die->die_mark = 0;
9613 FOR_EACH_CHILD (die, c, unmark_dies (c));
9614 }
9615
9616 /* Clear the marks for a die, its children and referred dies. */
9617
9618 static void
9619 unmark_all_dies (dw_die_ref die)
9620 {
9621 dw_die_ref c;
9622 dw_attr_node *a;
9623 unsigned ix;
9624
9625 if (!die->die_mark)
9626 return;
9627 die->die_mark = 0;
9628
9629 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9630
9631 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9632 if (AT_class (a) == dw_val_class_die_ref)
9633 unmark_all_dies (AT_ref (a));
9634 }
9635
9636 /* Calculate if the entry should appear in the final output file. It may be
9637 from a pruned a type. */
9638
9639 static bool
9640 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9641 {
9642 /* By limiting gnu pubnames to definitions only, gold can generate a
9643 gdb index without entries for declarations, which don't include
9644 enough information to be useful. */
9645 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9646 return false;
9647
9648 if (table == pubname_table)
9649 {
9650 /* Enumerator names are part of the pubname table, but the
9651 parent DW_TAG_enumeration_type die may have been pruned.
9652 Don't output them if that is the case. */
9653 if (p->die->die_tag == DW_TAG_enumerator &&
9654 (p->die->die_parent == NULL
9655 || !p->die->die_parent->die_perennial_p))
9656 return false;
9657
9658 /* Everything else in the pubname table is included. */
9659 return true;
9660 }
9661
9662 /* The pubtypes table shouldn't include types that have been
9663 pruned. */
9664 return (p->die->die_offset != 0
9665 || !flag_eliminate_unused_debug_types);
9666 }
9667
9668 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9669 generated for the compilation unit. */
9670
9671 static unsigned long
9672 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9673 {
9674 unsigned long size;
9675 unsigned i;
9676 pubname_entry *p;
9677 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9678
9679 size = DWARF_PUBNAMES_HEADER_SIZE;
9680 FOR_EACH_VEC_ELT (*names, i, p)
9681 if (include_pubname_in_output (names, p))
9682 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9683
9684 size += DWARF_OFFSET_SIZE;
9685 return size;
9686 }
9687
9688 /* Return the size of the information in the .debug_aranges section. */
9689
9690 static unsigned long
9691 size_of_aranges (void)
9692 {
9693 unsigned long size;
9694
9695 size = DWARF_ARANGES_HEADER_SIZE;
9696
9697 /* Count the address/length pair for this compilation unit. */
9698 if (text_section_used)
9699 size += 2 * DWARF2_ADDR_SIZE;
9700 if (cold_text_section_used)
9701 size += 2 * DWARF2_ADDR_SIZE;
9702 if (have_multiple_function_sections)
9703 {
9704 unsigned fde_idx;
9705 dw_fde_ref fde;
9706
9707 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9708 {
9709 if (DECL_IGNORED_P (fde->decl))
9710 continue;
9711 if (!fde->in_std_section)
9712 size += 2 * DWARF2_ADDR_SIZE;
9713 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9714 size += 2 * DWARF2_ADDR_SIZE;
9715 }
9716 }
9717
9718 /* Count the two zero words used to terminated the address range table. */
9719 size += 2 * DWARF2_ADDR_SIZE;
9720 return size;
9721 }
9722 \f
9723 /* Select the encoding of an attribute value. */
9724
9725 static enum dwarf_form
9726 value_format (dw_attr_node *a)
9727 {
9728 switch (AT_class (a))
9729 {
9730 case dw_val_class_addr:
9731 /* Only very few attributes allow DW_FORM_addr. */
9732 switch (a->dw_attr)
9733 {
9734 case DW_AT_low_pc:
9735 case DW_AT_high_pc:
9736 case DW_AT_entry_pc:
9737 case DW_AT_trampoline:
9738 return (AT_index (a) == NOT_INDEXED
9739 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9740 default:
9741 break;
9742 }
9743 switch (DWARF2_ADDR_SIZE)
9744 {
9745 case 1:
9746 return DW_FORM_data1;
9747 case 2:
9748 return DW_FORM_data2;
9749 case 4:
9750 return DW_FORM_data4;
9751 case 8:
9752 return DW_FORM_data8;
9753 default:
9754 gcc_unreachable ();
9755 }
9756 case dw_val_class_loc_list:
9757 if (dwarf_split_debug_info
9758 && dwarf_version >= 5
9759 && AT_loc_list (a)->num_assigned)
9760 return DW_FORM_loclistx;
9761 /* FALLTHRU */
9762 case dw_val_class_view_list:
9763 case dw_val_class_range_list:
9764 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9765 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9766 care about sizes of .debug* sections in shared libraries and
9767 executables and don't take into account relocations that affect just
9768 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9769 table in the .debug_rnglists section. */
9770 if (dwarf_split_debug_info
9771 && dwarf_version >= 5
9772 && AT_class (a) == dw_val_class_range_list
9773 && rnglist_idx
9774 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9775 return DW_FORM_rnglistx;
9776 if (dwarf_version >= 4)
9777 return DW_FORM_sec_offset;
9778 /* FALLTHRU */
9779 case dw_val_class_vms_delta:
9780 case dw_val_class_offset:
9781 switch (DWARF_OFFSET_SIZE)
9782 {
9783 case 4:
9784 return DW_FORM_data4;
9785 case 8:
9786 return DW_FORM_data8;
9787 default:
9788 gcc_unreachable ();
9789 }
9790 case dw_val_class_loc:
9791 if (dwarf_version >= 4)
9792 return DW_FORM_exprloc;
9793 switch (constant_size (size_of_locs (AT_loc (a))))
9794 {
9795 case 1:
9796 return DW_FORM_block1;
9797 case 2:
9798 return DW_FORM_block2;
9799 case 4:
9800 return DW_FORM_block4;
9801 default:
9802 gcc_unreachable ();
9803 }
9804 case dw_val_class_const:
9805 return DW_FORM_sdata;
9806 case dw_val_class_unsigned_const:
9807 switch (constant_size (AT_unsigned (a)))
9808 {
9809 case 1:
9810 return DW_FORM_data1;
9811 case 2:
9812 return DW_FORM_data2;
9813 case 4:
9814 /* In DWARF3 DW_AT_data_member_location with
9815 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9816 constant, so we need to use DW_FORM_udata if we need
9817 a large constant. */
9818 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9819 return DW_FORM_udata;
9820 return DW_FORM_data4;
9821 case 8:
9822 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9823 return DW_FORM_udata;
9824 return DW_FORM_data8;
9825 default:
9826 gcc_unreachable ();
9827 }
9828 case dw_val_class_const_implicit:
9829 case dw_val_class_unsigned_const_implicit:
9830 case dw_val_class_file_implicit:
9831 return DW_FORM_implicit_const;
9832 case dw_val_class_const_double:
9833 switch (HOST_BITS_PER_WIDE_INT)
9834 {
9835 case 8:
9836 return DW_FORM_data2;
9837 case 16:
9838 return DW_FORM_data4;
9839 case 32:
9840 return DW_FORM_data8;
9841 case 64:
9842 if (dwarf_version >= 5)
9843 return DW_FORM_data16;
9844 /* FALLTHRU */
9845 default:
9846 return DW_FORM_block1;
9847 }
9848 case dw_val_class_wide_int:
9849 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9850 {
9851 case 8:
9852 return DW_FORM_data1;
9853 case 16:
9854 return DW_FORM_data2;
9855 case 32:
9856 return DW_FORM_data4;
9857 case 64:
9858 return DW_FORM_data8;
9859 case 128:
9860 if (dwarf_version >= 5)
9861 return DW_FORM_data16;
9862 /* FALLTHRU */
9863 default:
9864 return DW_FORM_block1;
9865 }
9866 case dw_val_class_symview:
9867 /* ??? We might use uleb128, but then we'd have to compute
9868 .debug_info offsets in the assembler. */
9869 if (symview_upper_bound <= 0xff)
9870 return DW_FORM_data1;
9871 else if (symview_upper_bound <= 0xffff)
9872 return DW_FORM_data2;
9873 else if (symview_upper_bound <= 0xffffffff)
9874 return DW_FORM_data4;
9875 else
9876 return DW_FORM_data8;
9877 case dw_val_class_vec:
9878 switch (constant_size (a->dw_attr_val.v.val_vec.length
9879 * a->dw_attr_val.v.val_vec.elt_size))
9880 {
9881 case 1:
9882 return DW_FORM_block1;
9883 case 2:
9884 return DW_FORM_block2;
9885 case 4:
9886 return DW_FORM_block4;
9887 default:
9888 gcc_unreachable ();
9889 }
9890 case dw_val_class_flag:
9891 if (dwarf_version >= 4)
9892 {
9893 /* Currently all add_AT_flag calls pass in 1 as last argument,
9894 so DW_FORM_flag_present can be used. If that ever changes,
9895 we'll need to use DW_FORM_flag and have some optimization
9896 in build_abbrev_table that will change those to
9897 DW_FORM_flag_present if it is set to 1 in all DIEs using
9898 the same abbrev entry. */
9899 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9900 return DW_FORM_flag_present;
9901 }
9902 return DW_FORM_flag;
9903 case dw_val_class_die_ref:
9904 if (AT_ref_external (a))
9905 {
9906 if (AT_ref (a)->comdat_type_p)
9907 return DW_FORM_ref_sig8;
9908 else
9909 return DW_FORM_ref_addr;
9910 }
9911 else
9912 return DW_FORM_ref;
9913 case dw_val_class_fde_ref:
9914 return DW_FORM_data;
9915 case dw_val_class_lbl_id:
9916 return (AT_index (a) == NOT_INDEXED
9917 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9918 case dw_val_class_lineptr:
9919 case dw_val_class_macptr:
9920 case dw_val_class_loclistsptr:
9921 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9922 case dw_val_class_str:
9923 return AT_string_form (a);
9924 case dw_val_class_file:
9925 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9926 {
9927 case 1:
9928 return DW_FORM_data1;
9929 case 2:
9930 return DW_FORM_data2;
9931 case 4:
9932 return DW_FORM_data4;
9933 default:
9934 gcc_unreachable ();
9935 }
9936
9937 case dw_val_class_data8:
9938 return DW_FORM_data8;
9939
9940 case dw_val_class_high_pc:
9941 switch (DWARF2_ADDR_SIZE)
9942 {
9943 case 1:
9944 return DW_FORM_data1;
9945 case 2:
9946 return DW_FORM_data2;
9947 case 4:
9948 return DW_FORM_data4;
9949 case 8:
9950 return DW_FORM_data8;
9951 default:
9952 gcc_unreachable ();
9953 }
9954
9955 case dw_val_class_discr_value:
9956 return (a->dw_attr_val.v.val_discr_value.pos
9957 ? DW_FORM_udata
9958 : DW_FORM_sdata);
9959 case dw_val_class_discr_list:
9960 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9961 {
9962 case 1:
9963 return DW_FORM_block1;
9964 case 2:
9965 return DW_FORM_block2;
9966 case 4:
9967 return DW_FORM_block4;
9968 default:
9969 gcc_unreachable ();
9970 }
9971
9972 default:
9973 gcc_unreachable ();
9974 }
9975 }
9976
9977 /* Output the encoding of an attribute value. */
9978
9979 static void
9980 output_value_format (dw_attr_node *a)
9981 {
9982 enum dwarf_form form = value_format (a);
9983
9984 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9985 }
9986
9987 /* Given a die and id, produce the appropriate abbreviations. */
9988
9989 static void
9990 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9991 {
9992 unsigned ix;
9993 dw_attr_node *a_attr;
9994
9995 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9996 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9997 dwarf_tag_name (abbrev->die_tag));
9998
9999 if (abbrev->die_child != NULL)
10000 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
10001 else
10002 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
10003
10004 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
10005 {
10006 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
10007 dwarf_attr_name (a_attr->dw_attr));
10008 output_value_format (a_attr);
10009 if (value_format (a_attr) == DW_FORM_implicit_const)
10010 {
10011 if (AT_class (a_attr) == dw_val_class_file_implicit)
10012 {
10013 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
10014 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
10015 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
10016 }
10017 else
10018 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
10019 }
10020 }
10021
10022 dw2_asm_output_data (1, 0, NULL);
10023 dw2_asm_output_data (1, 0, NULL);
10024 }
10025
10026
10027 /* Output the .debug_abbrev section which defines the DIE abbreviation
10028 table. */
10029
10030 static void
10031 output_abbrev_section (void)
10032 {
10033 unsigned int abbrev_id;
10034 dw_die_ref abbrev;
10035
10036 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
10037 if (abbrev_id != 0)
10038 output_die_abbrevs (abbrev_id, abbrev);
10039
10040 /* Terminate the table. */
10041 dw2_asm_output_data (1, 0, NULL);
10042 }
10043
10044 /* Return a new location list, given the begin and end range, and the
10045 expression. */
10046
10047 static inline dw_loc_list_ref
10048 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10049 const char *end, var_loc_view vend,
10050 const char *section)
10051 {
10052 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10053
10054 retlist->begin = begin;
10055 retlist->begin_entry = NULL;
10056 retlist->end = end;
10057 retlist->expr = expr;
10058 retlist->section = section;
10059 retlist->vbegin = vbegin;
10060 retlist->vend = vend;
10061
10062 return retlist;
10063 }
10064
10065 /* Return true iff there's any nonzero view number in the loc list.
10066
10067 ??? When views are not enabled, we'll often extend a single range
10068 to the entire function, so that we emit a single location
10069 expression rather than a location list. With views, even with a
10070 single range, we'll output a list if start or end have a nonzero
10071 view. If we change this, we may want to stop splitting a single
10072 range in dw_loc_list just because of a nonzero view, even if it
10073 straddles across hot/cold partitions. */
10074
10075 static bool
10076 loc_list_has_views (dw_loc_list_ref list)
10077 {
10078 if (!debug_variable_location_views)
10079 return false;
10080
10081 for (dw_loc_list_ref loc = list;
10082 loc != NULL; loc = loc->dw_loc_next)
10083 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10084 return true;
10085
10086 return false;
10087 }
10088
10089 /* Generate a new internal symbol for this location list node, if it
10090 hasn't got one yet. */
10091
10092 static inline void
10093 gen_llsym (dw_loc_list_ref list)
10094 {
10095 gcc_assert (!list->ll_symbol);
10096 list->ll_symbol = gen_internal_sym ("LLST");
10097
10098 if (!loc_list_has_views (list))
10099 return;
10100
10101 if (dwarf2out_locviews_in_attribute ())
10102 {
10103 /* Use the same label_num for the view list. */
10104 label_num--;
10105 list->vl_symbol = gen_internal_sym ("LVUS");
10106 }
10107 else
10108 list->vl_symbol = list->ll_symbol;
10109 }
10110
10111 /* Generate a symbol for the list, but only if we really want to emit
10112 it as a list. */
10113
10114 static inline void
10115 maybe_gen_llsym (dw_loc_list_ref list)
10116 {
10117 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10118 return;
10119
10120 gen_llsym (list);
10121 }
10122
10123 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10124 NULL, don't consider size of the location expression. If we're not
10125 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10126 representation in *SIZEP. */
10127
10128 static bool
10129 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10130 {
10131 /* Don't output an entry that starts and ends at the same address. */
10132 if (strcmp (curr->begin, curr->end) == 0
10133 && curr->vbegin == curr->vend && !curr->force)
10134 return true;
10135
10136 if (!sizep)
10137 return false;
10138
10139 unsigned long size = size_of_locs (curr->expr);
10140
10141 /* If the expression is too large, drop it on the floor. We could
10142 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10143 in the expression, but >= 64KB expressions for a single value
10144 in a single range are unlikely very useful. */
10145 if (dwarf_version < 5 && size > 0xffff)
10146 return true;
10147
10148 *sizep = size;
10149
10150 return false;
10151 }
10152
10153 /* Output a view pair loclist entry for CURR, if it requires one. */
10154
10155 static void
10156 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10157 {
10158 if (!dwarf2out_locviews_in_loclist ())
10159 return;
10160
10161 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10162 return;
10163
10164 #ifdef DW_LLE_view_pair
10165 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10166
10167 if (dwarf2out_as_locview_support)
10168 {
10169 if (ZERO_VIEW_P (curr->vbegin))
10170 dw2_asm_output_data_uleb128 (0, "Location view begin");
10171 else
10172 {
10173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10174 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10175 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10176 }
10177
10178 if (ZERO_VIEW_P (curr->vend))
10179 dw2_asm_output_data_uleb128 (0, "Location view end");
10180 else
10181 {
10182 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10183 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10184 dw2_asm_output_symname_uleb128 (label, "Location view end");
10185 }
10186 }
10187 else
10188 {
10189 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10190 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10191 }
10192 #endif /* DW_LLE_view_pair */
10193
10194 return;
10195 }
10196
10197 /* Output the location list given to us. */
10198
10199 static void
10200 output_loc_list (dw_loc_list_ref list_head)
10201 {
10202 int vcount = 0, lcount = 0;
10203
10204 if (list_head->emitted)
10205 return;
10206 list_head->emitted = true;
10207
10208 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10209 {
10210 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10211
10212 for (dw_loc_list_ref curr = list_head; curr != NULL;
10213 curr = curr->dw_loc_next)
10214 {
10215 unsigned long size;
10216
10217 if (skip_loc_list_entry (curr, &size))
10218 continue;
10219
10220 vcount++;
10221
10222 /* ?? dwarf_split_debug_info? */
10223 if (dwarf2out_as_locview_support)
10224 {
10225 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10226
10227 if (!ZERO_VIEW_P (curr->vbegin))
10228 {
10229 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10230 dw2_asm_output_symname_uleb128 (label,
10231 "View list begin (%s)",
10232 list_head->vl_symbol);
10233 }
10234 else
10235 dw2_asm_output_data_uleb128 (0,
10236 "View list begin (%s)",
10237 list_head->vl_symbol);
10238
10239 if (!ZERO_VIEW_P (curr->vend))
10240 {
10241 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10242 dw2_asm_output_symname_uleb128 (label,
10243 "View list end (%s)",
10244 list_head->vl_symbol);
10245 }
10246 else
10247 dw2_asm_output_data_uleb128 (0,
10248 "View list end (%s)",
10249 list_head->vl_symbol);
10250 }
10251 else
10252 {
10253 dw2_asm_output_data_uleb128 (curr->vbegin,
10254 "View list begin (%s)",
10255 list_head->vl_symbol);
10256 dw2_asm_output_data_uleb128 (curr->vend,
10257 "View list end (%s)",
10258 list_head->vl_symbol);
10259 }
10260 }
10261 }
10262
10263 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10264
10265 const char *last_section = NULL;
10266 const char *base_label = NULL;
10267
10268 /* Walk the location list, and output each range + expression. */
10269 for (dw_loc_list_ref curr = list_head; curr != NULL;
10270 curr = curr->dw_loc_next)
10271 {
10272 unsigned long size;
10273
10274 /* Skip this entry? If we skip it here, we must skip it in the
10275 view list above as well. */
10276 if (skip_loc_list_entry (curr, &size))
10277 continue;
10278
10279 lcount++;
10280
10281 if (dwarf_version >= 5)
10282 {
10283 if (dwarf_split_debug_info)
10284 {
10285 dwarf2out_maybe_output_loclist_view_pair (curr);
10286 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10287 uleb128 index into .debug_addr and uleb128 length. */
10288 dw2_asm_output_data (1, DW_LLE_startx_length,
10289 "DW_LLE_startx_length (%s)",
10290 list_head->ll_symbol);
10291 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10292 "Location list range start index "
10293 "(%s)", curr->begin);
10294 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10295 For that case we probably need to emit DW_LLE_startx_endx,
10296 but we'd need 2 .debug_addr entries rather than just one. */
10297 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10298 "Location list length (%s)",
10299 list_head->ll_symbol);
10300 }
10301 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10302 {
10303 dwarf2out_maybe_output_loclist_view_pair (curr);
10304 /* If all code is in .text section, the base address is
10305 already provided by the CU attributes. Use
10306 DW_LLE_offset_pair where both addresses are uleb128 encoded
10307 offsets against that base. */
10308 dw2_asm_output_data (1, DW_LLE_offset_pair,
10309 "DW_LLE_offset_pair (%s)",
10310 list_head->ll_symbol);
10311 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10312 "Location list begin address (%s)",
10313 list_head->ll_symbol);
10314 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10315 "Location list end address (%s)",
10316 list_head->ll_symbol);
10317 }
10318 else if (HAVE_AS_LEB128)
10319 {
10320 /* Otherwise, find out how many consecutive entries could share
10321 the same base entry. If just one, emit DW_LLE_start_length,
10322 otherwise emit DW_LLE_base_address for the base address
10323 followed by a series of DW_LLE_offset_pair. */
10324 if (last_section == NULL || curr->section != last_section)
10325 {
10326 dw_loc_list_ref curr2;
10327 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10328 curr2 = curr2->dw_loc_next)
10329 {
10330 if (strcmp (curr2->begin, curr2->end) == 0
10331 && !curr2->force)
10332 continue;
10333 break;
10334 }
10335 if (curr2 == NULL || curr->section != curr2->section)
10336 last_section = NULL;
10337 else
10338 {
10339 last_section = curr->section;
10340 base_label = curr->begin;
10341 dw2_asm_output_data (1, DW_LLE_base_address,
10342 "DW_LLE_base_address (%s)",
10343 list_head->ll_symbol);
10344 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10345 "Base address (%s)",
10346 list_head->ll_symbol);
10347 }
10348 }
10349 /* Only one entry with the same base address. Use
10350 DW_LLE_start_length with absolute address and uleb128
10351 length. */
10352 if (last_section == NULL)
10353 {
10354 dwarf2out_maybe_output_loclist_view_pair (curr);
10355 dw2_asm_output_data (1, DW_LLE_start_length,
10356 "DW_LLE_start_length (%s)",
10357 list_head->ll_symbol);
10358 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10359 "Location list begin address (%s)",
10360 list_head->ll_symbol);
10361 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10362 "Location list length "
10363 "(%s)", list_head->ll_symbol);
10364 }
10365 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10366 DW_LLE_base_address. */
10367 else
10368 {
10369 dwarf2out_maybe_output_loclist_view_pair (curr);
10370 dw2_asm_output_data (1, DW_LLE_offset_pair,
10371 "DW_LLE_offset_pair (%s)",
10372 list_head->ll_symbol);
10373 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10374 "Location list begin address "
10375 "(%s)", list_head->ll_symbol);
10376 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10377 "Location list end address "
10378 "(%s)", list_head->ll_symbol);
10379 }
10380 }
10381 /* The assembler does not support .uleb128 directive. Emit
10382 DW_LLE_start_end with a pair of absolute addresses. */
10383 else
10384 {
10385 dwarf2out_maybe_output_loclist_view_pair (curr);
10386 dw2_asm_output_data (1, DW_LLE_start_end,
10387 "DW_LLE_start_end (%s)",
10388 list_head->ll_symbol);
10389 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10390 "Location list begin address (%s)",
10391 list_head->ll_symbol);
10392 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10393 "Location list end address (%s)",
10394 list_head->ll_symbol);
10395 }
10396 }
10397 else if (dwarf_split_debug_info)
10398 {
10399 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10400 and 4 byte length. */
10401 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10402 "Location list start/length entry (%s)",
10403 list_head->ll_symbol);
10404 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10405 "Location list range start index (%s)",
10406 curr->begin);
10407 /* The length field is 4 bytes. If we ever need to support
10408 an 8-byte length, we can add a new DW_LLE code or fall back
10409 to DW_LLE_GNU_start_end_entry. */
10410 dw2_asm_output_delta (4, curr->end, curr->begin,
10411 "Location list range length (%s)",
10412 list_head->ll_symbol);
10413 }
10414 else if (!have_multiple_function_sections)
10415 {
10416 /* Pair of relative addresses against start of text section. */
10417 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10418 "Location list begin address (%s)",
10419 list_head->ll_symbol);
10420 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10421 "Location list end address (%s)",
10422 list_head->ll_symbol);
10423 }
10424 else
10425 {
10426 /* Pair of absolute addresses. */
10427 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10428 "Location list begin address (%s)",
10429 list_head->ll_symbol);
10430 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10431 "Location list end address (%s)",
10432 list_head->ll_symbol);
10433 }
10434
10435 /* Output the block length for this list of location operations. */
10436 if (dwarf_version >= 5)
10437 dw2_asm_output_data_uleb128 (size, "Location expression size");
10438 else
10439 {
10440 gcc_assert (size <= 0xffff);
10441 dw2_asm_output_data (2, size, "Location expression size");
10442 }
10443
10444 output_loc_sequence (curr->expr, -1);
10445 }
10446
10447 /* And finally list termination. */
10448 if (dwarf_version >= 5)
10449 dw2_asm_output_data (1, DW_LLE_end_of_list,
10450 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10451 else if (dwarf_split_debug_info)
10452 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10453 "Location list terminator (%s)",
10454 list_head->ll_symbol);
10455 else
10456 {
10457 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10458 "Location list terminator begin (%s)",
10459 list_head->ll_symbol);
10460 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10461 "Location list terminator end (%s)",
10462 list_head->ll_symbol);
10463 }
10464
10465 gcc_assert (!list_head->vl_symbol
10466 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10467 }
10468
10469 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10470 section. Emit a relocated reference if val_entry is NULL, otherwise,
10471 emit an indirect reference. */
10472
10473 static void
10474 output_range_list_offset (dw_attr_node *a)
10475 {
10476 const char *name = dwarf_attr_name (a->dw_attr);
10477
10478 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10479 {
10480 if (dwarf_version >= 5)
10481 {
10482 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10483 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10484 debug_ranges_section, "%s", name);
10485 }
10486 else
10487 {
10488 char *p = strchr (ranges_section_label, '\0');
10489 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10490 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10491 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10492 debug_ranges_section, "%s", name);
10493 *p = '\0';
10494 }
10495 }
10496 else if (dwarf_version >= 5)
10497 {
10498 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10499 gcc_assert (rnglist_idx);
10500 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10501 }
10502 else
10503 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10504 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10505 "%s (offset from %s)", name, ranges_section_label);
10506 }
10507
10508 /* Output the offset into the debug_loc section. */
10509
10510 static void
10511 output_loc_list_offset (dw_attr_node *a)
10512 {
10513 char *sym = AT_loc_list (a)->ll_symbol;
10514
10515 gcc_assert (sym);
10516 if (!dwarf_split_debug_info)
10517 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10518 "%s", dwarf_attr_name (a->dw_attr));
10519 else if (dwarf_version >= 5)
10520 {
10521 gcc_assert (AT_loc_list (a)->num_assigned);
10522 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10523 dwarf_attr_name (a->dw_attr),
10524 sym);
10525 }
10526 else
10527 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10528 "%s", dwarf_attr_name (a->dw_attr));
10529 }
10530
10531 /* Output the offset into the debug_loc section. */
10532
10533 static void
10534 output_view_list_offset (dw_attr_node *a)
10535 {
10536 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10537
10538 gcc_assert (sym);
10539 if (dwarf_split_debug_info)
10540 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10541 "%s", dwarf_attr_name (a->dw_attr));
10542 else
10543 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10544 "%s", dwarf_attr_name (a->dw_attr));
10545 }
10546
10547 /* Output an attribute's index or value appropriately. */
10548
10549 static void
10550 output_attr_index_or_value (dw_attr_node *a)
10551 {
10552 const char *name = dwarf_attr_name (a->dw_attr);
10553
10554 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10555 {
10556 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10557 return;
10558 }
10559 switch (AT_class (a))
10560 {
10561 case dw_val_class_addr:
10562 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10563 break;
10564 case dw_val_class_high_pc:
10565 case dw_val_class_lbl_id:
10566 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10567 break;
10568 default:
10569 gcc_unreachable ();
10570 }
10571 }
10572
10573 /* Output a type signature. */
10574
10575 static inline void
10576 output_signature (const char *sig, const char *name)
10577 {
10578 int i;
10579
10580 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10581 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10582 }
10583
10584 /* Output a discriminant value. */
10585
10586 static inline void
10587 output_discr_value (dw_discr_value *discr_value, const char *name)
10588 {
10589 if (discr_value->pos)
10590 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10591 else
10592 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10593 }
10594
10595 /* Output the DIE and its attributes. Called recursively to generate
10596 the definitions of each child DIE. */
10597
10598 static void
10599 output_die (dw_die_ref die)
10600 {
10601 dw_attr_node *a;
10602 dw_die_ref c;
10603 unsigned long size;
10604 unsigned ix;
10605
10606 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10607 (unsigned long)die->die_offset,
10608 dwarf_tag_name (die->die_tag));
10609
10610 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10611 {
10612 const char *name = dwarf_attr_name (a->dw_attr);
10613
10614 switch (AT_class (a))
10615 {
10616 case dw_val_class_addr:
10617 output_attr_index_or_value (a);
10618 break;
10619
10620 case dw_val_class_offset:
10621 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10622 "%s", name);
10623 break;
10624
10625 case dw_val_class_range_list:
10626 output_range_list_offset (a);
10627 break;
10628
10629 case dw_val_class_loc:
10630 size = size_of_locs (AT_loc (a));
10631
10632 /* Output the block length for this list of location operations. */
10633 if (dwarf_version >= 4)
10634 dw2_asm_output_data_uleb128 (size, "%s", name);
10635 else
10636 dw2_asm_output_data (constant_size (size), size, "%s", name);
10637
10638 output_loc_sequence (AT_loc (a), -1);
10639 break;
10640
10641 case dw_val_class_const:
10642 /* ??? It would be slightly more efficient to use a scheme like is
10643 used for unsigned constants below, but gdb 4.x does not sign
10644 extend. Gdb 5.x does sign extend. */
10645 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10646 break;
10647
10648 case dw_val_class_unsigned_const:
10649 {
10650 int csize = constant_size (AT_unsigned (a));
10651 if (dwarf_version == 3
10652 && a->dw_attr == DW_AT_data_member_location
10653 && csize >= 4)
10654 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10655 else
10656 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10657 }
10658 break;
10659
10660 case dw_val_class_symview:
10661 {
10662 int vsize;
10663 if (symview_upper_bound <= 0xff)
10664 vsize = 1;
10665 else if (symview_upper_bound <= 0xffff)
10666 vsize = 2;
10667 else if (symview_upper_bound <= 0xffffffff)
10668 vsize = 4;
10669 else
10670 vsize = 8;
10671 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10672 "%s", name);
10673 }
10674 break;
10675
10676 case dw_val_class_const_implicit:
10677 if (flag_debug_asm)
10678 fprintf (asm_out_file, "\t\t\t%s %s ("
10679 HOST_WIDE_INT_PRINT_DEC ")\n",
10680 ASM_COMMENT_START, name, AT_int (a));
10681 break;
10682
10683 case dw_val_class_unsigned_const_implicit:
10684 if (flag_debug_asm)
10685 fprintf (asm_out_file, "\t\t\t%s %s ("
10686 HOST_WIDE_INT_PRINT_HEX ")\n",
10687 ASM_COMMENT_START, name, AT_unsigned (a));
10688 break;
10689
10690 case dw_val_class_const_double:
10691 {
10692 unsigned HOST_WIDE_INT first, second;
10693
10694 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10695 dw2_asm_output_data (1,
10696 HOST_BITS_PER_DOUBLE_INT
10697 / HOST_BITS_PER_CHAR,
10698 NULL);
10699
10700 if (WORDS_BIG_ENDIAN)
10701 {
10702 first = a->dw_attr_val.v.val_double.high;
10703 second = a->dw_attr_val.v.val_double.low;
10704 }
10705 else
10706 {
10707 first = a->dw_attr_val.v.val_double.low;
10708 second = a->dw_attr_val.v.val_double.high;
10709 }
10710
10711 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10712 first, "%s", name);
10713 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10714 second, NULL);
10715 }
10716 break;
10717
10718 case dw_val_class_wide_int:
10719 {
10720 int i;
10721 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10722 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10723 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10724 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10725 * l, NULL);
10726
10727 if (WORDS_BIG_ENDIAN)
10728 for (i = len - 1; i >= 0; --i)
10729 {
10730 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10731 "%s", name);
10732 name = "";
10733 }
10734 else
10735 for (i = 0; i < len; ++i)
10736 {
10737 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10738 "%s", name);
10739 name = "";
10740 }
10741 }
10742 break;
10743
10744 case dw_val_class_vec:
10745 {
10746 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10747 unsigned int len = a->dw_attr_val.v.val_vec.length;
10748 unsigned int i;
10749 unsigned char *p;
10750
10751 dw2_asm_output_data (constant_size (len * elt_size),
10752 len * elt_size, "%s", name);
10753 if (elt_size > sizeof (HOST_WIDE_INT))
10754 {
10755 elt_size /= 2;
10756 len *= 2;
10757 }
10758 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10759 i < len;
10760 i++, p += elt_size)
10761 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10762 "fp or vector constant word %u", i);
10763 break;
10764 }
10765
10766 case dw_val_class_flag:
10767 if (dwarf_version >= 4)
10768 {
10769 /* Currently all add_AT_flag calls pass in 1 as last argument,
10770 so DW_FORM_flag_present can be used. If that ever changes,
10771 we'll need to use DW_FORM_flag and have some optimization
10772 in build_abbrev_table that will change those to
10773 DW_FORM_flag_present if it is set to 1 in all DIEs using
10774 the same abbrev entry. */
10775 gcc_assert (AT_flag (a) == 1);
10776 if (flag_debug_asm)
10777 fprintf (asm_out_file, "\t\t\t%s %s\n",
10778 ASM_COMMENT_START, name);
10779 break;
10780 }
10781 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10782 break;
10783
10784 case dw_val_class_loc_list:
10785 output_loc_list_offset (a);
10786 break;
10787
10788 case dw_val_class_view_list:
10789 output_view_list_offset (a);
10790 break;
10791
10792 case dw_val_class_die_ref:
10793 if (AT_ref_external (a))
10794 {
10795 if (AT_ref (a)->comdat_type_p)
10796 {
10797 comdat_type_node *type_node
10798 = AT_ref (a)->die_id.die_type_node;
10799
10800 gcc_assert (type_node);
10801 output_signature (type_node->signature, name);
10802 }
10803 else
10804 {
10805 const char *sym = AT_ref (a)->die_id.die_symbol;
10806 int size;
10807
10808 gcc_assert (sym);
10809 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10810 length, whereas in DWARF3 it's always sized as an
10811 offset. */
10812 if (dwarf_version == 2)
10813 size = DWARF2_ADDR_SIZE;
10814 else
10815 size = DWARF_OFFSET_SIZE;
10816 /* ??? We cannot unconditionally output die_offset if
10817 non-zero - others might create references to those
10818 DIEs via symbols.
10819 And we do not clear its DIE offset after outputting it
10820 (and the label refers to the actual DIEs, not the
10821 DWARF CU unit header which is when using label + offset
10822 would be the correct thing to do).
10823 ??? This is the reason for the with_offset flag. */
10824 if (AT_ref (a)->with_offset)
10825 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10826 debug_info_section, "%s", name);
10827 else
10828 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10829 name);
10830 }
10831 }
10832 else
10833 {
10834 gcc_assert (AT_ref (a)->die_offset);
10835 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10836 "%s", name);
10837 }
10838 break;
10839
10840 case dw_val_class_fde_ref:
10841 {
10842 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10843
10844 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10845 a->dw_attr_val.v.val_fde_index * 2);
10846 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10847 "%s", name);
10848 }
10849 break;
10850
10851 case dw_val_class_vms_delta:
10852 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10853 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10854 AT_vms_delta2 (a), AT_vms_delta1 (a),
10855 "%s", name);
10856 #else
10857 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10858 AT_vms_delta2 (a), AT_vms_delta1 (a),
10859 "%s", name);
10860 #endif
10861 break;
10862
10863 case dw_val_class_lbl_id:
10864 output_attr_index_or_value (a);
10865 break;
10866
10867 case dw_val_class_lineptr:
10868 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10869 debug_line_section, "%s", name);
10870 break;
10871
10872 case dw_val_class_macptr:
10873 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10874 debug_macinfo_section, "%s", name);
10875 break;
10876
10877 case dw_val_class_loclistsptr:
10878 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10879 debug_loc_section, "%s", name);
10880 break;
10881
10882 case dw_val_class_str:
10883 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10884 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10885 a->dw_attr_val.v.val_str->label,
10886 debug_str_section,
10887 "%s: \"%s\"", name, AT_string (a));
10888 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10889 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10890 a->dw_attr_val.v.val_str->label,
10891 debug_line_str_section,
10892 "%s: \"%s\"", name, AT_string (a));
10893 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10894 dw2_asm_output_data_uleb128 (AT_index (a),
10895 "%s: \"%s\"", name, AT_string (a));
10896 else
10897 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10898 break;
10899
10900 case dw_val_class_file:
10901 {
10902 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10903
10904 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10905 a->dw_attr_val.v.val_file->filename);
10906 break;
10907 }
10908
10909 case dw_val_class_file_implicit:
10910 if (flag_debug_asm)
10911 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10912 ASM_COMMENT_START, name,
10913 maybe_emit_file (a->dw_attr_val.v.val_file),
10914 a->dw_attr_val.v.val_file->filename);
10915 break;
10916
10917 case dw_val_class_data8:
10918 {
10919 int i;
10920
10921 for (i = 0; i < 8; i++)
10922 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10923 i == 0 ? "%s" : NULL, name);
10924 break;
10925 }
10926
10927 case dw_val_class_high_pc:
10928 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10929 get_AT_low_pc (die), "DW_AT_high_pc");
10930 break;
10931
10932 case dw_val_class_discr_value:
10933 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10934 break;
10935
10936 case dw_val_class_discr_list:
10937 {
10938 dw_discr_list_ref list = AT_discr_list (a);
10939 const int size = size_of_discr_list (list);
10940
10941 /* This is a block, so output its length first. */
10942 dw2_asm_output_data (constant_size (size), size,
10943 "%s: block size", name);
10944
10945 for (; list != NULL; list = list->dw_discr_next)
10946 {
10947 /* One byte for the discriminant value descriptor, and then as
10948 many LEB128 numbers as required. */
10949 if (list->dw_discr_range)
10950 dw2_asm_output_data (1, DW_DSC_range,
10951 "%s: DW_DSC_range", name);
10952 else
10953 dw2_asm_output_data (1, DW_DSC_label,
10954 "%s: DW_DSC_label", name);
10955
10956 output_discr_value (&list->dw_discr_lower_bound, name);
10957 if (list->dw_discr_range)
10958 output_discr_value (&list->dw_discr_upper_bound, name);
10959 }
10960 break;
10961 }
10962
10963 default:
10964 gcc_unreachable ();
10965 }
10966 }
10967
10968 FOR_EACH_CHILD (die, c, output_die (c));
10969
10970 /* Add null byte to terminate sibling list. */
10971 if (die->die_child != NULL)
10972 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10973 (unsigned long) die->die_offset);
10974 }
10975
10976 /* Output the dwarf version number. */
10977
10978 static void
10979 output_dwarf_version ()
10980 {
10981 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10982 views in loclist. That will change eventually. */
10983 if (dwarf_version == 6)
10984 {
10985 static bool once;
10986 if (!once)
10987 {
10988 warning (0, "%<-gdwarf-6%> is output as version 5 with "
10989 "incompatibilities");
10990 once = true;
10991 }
10992 dw2_asm_output_data (2, 5, "DWARF version number");
10993 }
10994 else
10995 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10996 }
10997
10998 /* Output the compilation unit that appears at the beginning of the
10999 .debug_info section, and precedes the DIE descriptions. */
11000
11001 static void
11002 output_compilation_unit_header (enum dwarf_unit_type ut)
11003 {
11004 if (!XCOFF_DEBUGGING_INFO)
11005 {
11006 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11007 dw2_asm_output_data (4, 0xffffffff,
11008 "Initial length escape value indicating 64-bit DWARF extension");
11009 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11010 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
11011 "Length of Compilation Unit Info");
11012 }
11013
11014 output_dwarf_version ();
11015 if (dwarf_version >= 5)
11016 {
11017 const char *name;
11018 switch (ut)
11019 {
11020 case DW_UT_compile: name = "DW_UT_compile"; break;
11021 case DW_UT_type: name = "DW_UT_type"; break;
11022 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
11023 case DW_UT_split_type: name = "DW_UT_split_type"; break;
11024 default: gcc_unreachable ();
11025 }
11026 dw2_asm_output_data (1, ut, "%s", name);
11027 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11028 }
11029 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
11030 debug_abbrev_section,
11031 "Offset Into Abbrev. Section");
11032 if (dwarf_version < 5)
11033 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11034 }
11035
11036 /* Output the compilation unit DIE and its children. */
11037
11038 static void
11039 output_comp_unit (dw_die_ref die, int output_if_empty,
11040 const unsigned char *dwo_id)
11041 {
11042 const char *secname, *oldsym;
11043 char *tmp;
11044
11045 /* Unless we are outputting main CU, we may throw away empty ones. */
11046 if (!output_if_empty && die->die_child == NULL)
11047 return;
11048
11049 /* Even if there are no children of this DIE, we must output the information
11050 about the compilation unit. Otherwise, on an empty translation unit, we
11051 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11052 will then complain when examining the file. First mark all the DIEs in
11053 this CU so we know which get local refs. */
11054 mark_dies (die);
11055
11056 external_ref_hash_type *extern_map = optimize_external_refs (die);
11057
11058 /* For now, optimize only the main CU, in order to optimize the rest
11059 we'd need to see all of them earlier. Leave the rest for post-linking
11060 tools like DWZ. */
11061 if (die == comp_unit_die ())
11062 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11063
11064 build_abbrev_table (die, extern_map);
11065
11066 optimize_abbrev_table ();
11067
11068 delete extern_map;
11069
11070 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11071 next_die_offset = (dwo_id
11072 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11073 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11074 calc_die_sizes (die);
11075
11076 oldsym = die->die_id.die_symbol;
11077 if (oldsym && die->comdat_type_p)
11078 {
11079 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11080
11081 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11082 secname = tmp;
11083 die->die_id.die_symbol = NULL;
11084 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11085 }
11086 else
11087 {
11088 switch_to_section (debug_info_section);
11089 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11090 info_section_emitted = true;
11091 }
11092
11093 /* For LTO cross unit DIE refs we want a symbol on the start of the
11094 debuginfo section, not on the CU DIE. */
11095 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11096 {
11097 /* ??? No way to get visibility assembled without a decl. */
11098 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11099 get_identifier (oldsym), char_type_node);
11100 TREE_PUBLIC (decl) = true;
11101 TREE_STATIC (decl) = true;
11102 DECL_ARTIFICIAL (decl) = true;
11103 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11104 DECL_VISIBILITY_SPECIFIED (decl) = true;
11105 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11106 #ifdef ASM_WEAKEN_LABEL
11107 /* We prefer a .weak because that handles duplicates from duplicate
11108 archive members in a graceful way. */
11109 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11110 #else
11111 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11112 #endif
11113 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11114 }
11115
11116 /* Output debugging information. */
11117 output_compilation_unit_header (dwo_id
11118 ? DW_UT_split_compile : DW_UT_compile);
11119 if (dwarf_version >= 5)
11120 {
11121 if (dwo_id != NULL)
11122 for (int i = 0; i < 8; i++)
11123 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11124 }
11125 output_die (die);
11126
11127 /* Leave the marks on the main CU, so we can check them in
11128 output_pubnames. */
11129 if (oldsym)
11130 {
11131 unmark_dies (die);
11132 die->die_id.die_symbol = oldsym;
11133 }
11134 }
11135
11136 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11137 and .debug_pubtypes. This is configured per-target, but can be
11138 overridden by the -gpubnames or -gno-pubnames options. */
11139
11140 static inline bool
11141 want_pubnames (void)
11142 {
11143 if (debug_info_level <= DINFO_LEVEL_TERSE
11144 /* Names and types go to the early debug part only. */
11145 || in_lto_p)
11146 return false;
11147 if (debug_generate_pub_sections != -1)
11148 return debug_generate_pub_sections;
11149 return targetm.want_debug_pub_sections;
11150 }
11151
11152 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11153
11154 static void
11155 add_AT_pubnames (dw_die_ref die)
11156 {
11157 if (want_pubnames ())
11158 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11159 }
11160
11161 /* Add a string attribute value to a skeleton DIE. */
11162
11163 static inline void
11164 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11165 const char *str)
11166 {
11167 dw_attr_node attr;
11168 struct indirect_string_node *node;
11169
11170 if (! skeleton_debug_str_hash)
11171 skeleton_debug_str_hash
11172 = hash_table<indirect_string_hasher>::create_ggc (10);
11173
11174 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11175 find_string_form (node);
11176 if (node->form == dwarf_FORM (DW_FORM_strx))
11177 node->form = DW_FORM_strp;
11178
11179 attr.dw_attr = attr_kind;
11180 attr.dw_attr_val.val_class = dw_val_class_str;
11181 attr.dw_attr_val.val_entry = NULL;
11182 attr.dw_attr_val.v.val_str = node;
11183 add_dwarf_attr (die, &attr);
11184 }
11185
11186 /* Helper function to generate top-level dies for skeleton debug_info and
11187 debug_types. */
11188
11189 static void
11190 add_top_level_skeleton_die_attrs (dw_die_ref die)
11191 {
11192 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11193 const char *comp_dir = comp_dir_string ();
11194
11195 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11196 if (comp_dir != NULL)
11197 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11198 add_AT_pubnames (die);
11199 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11200 }
11201
11202 /* Output skeleton debug sections that point to the dwo file. */
11203
11204 static void
11205 output_skeleton_debug_sections (dw_die_ref comp_unit,
11206 const unsigned char *dwo_id)
11207 {
11208 /* These attributes will be found in the full debug_info section. */
11209 remove_AT (comp_unit, DW_AT_producer);
11210 remove_AT (comp_unit, DW_AT_language);
11211
11212 switch_to_section (debug_skeleton_info_section);
11213 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11214
11215 /* Produce the skeleton compilation-unit header. This one differs enough from
11216 a normal CU header that it's better not to call output_compilation_unit
11217 header. */
11218 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11219 dw2_asm_output_data (4, 0xffffffff,
11220 "Initial length escape value indicating 64-bit "
11221 "DWARF extension");
11222
11223 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11224 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11225 - DWARF_INITIAL_LENGTH_SIZE
11226 + size_of_die (comp_unit),
11227 "Length of Compilation Unit Info");
11228 output_dwarf_version ();
11229 if (dwarf_version >= 5)
11230 {
11231 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11232 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11233 }
11234 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11235 debug_skeleton_abbrev_section,
11236 "Offset Into Abbrev. Section");
11237 if (dwarf_version < 5)
11238 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11239 else
11240 for (int i = 0; i < 8; i++)
11241 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11242
11243 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11244 output_die (comp_unit);
11245
11246 /* Build the skeleton debug_abbrev section. */
11247 switch_to_section (debug_skeleton_abbrev_section);
11248 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11249
11250 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11251
11252 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11253 }
11254
11255 /* Output a comdat type unit DIE and its children. */
11256
11257 static void
11258 output_comdat_type_unit (comdat_type_node *node,
11259 bool early_lto_debug ATTRIBUTE_UNUSED)
11260 {
11261 const char *secname;
11262 char *tmp;
11263 int i;
11264 #if defined (OBJECT_FORMAT_ELF)
11265 tree comdat_key;
11266 #endif
11267
11268 /* First mark all the DIEs in this CU so we know which get local refs. */
11269 mark_dies (node->root_die);
11270
11271 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11272
11273 build_abbrev_table (node->root_die, extern_map);
11274
11275 delete extern_map;
11276 extern_map = NULL;
11277
11278 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11279 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11280 calc_die_sizes (node->root_die);
11281
11282 #if defined (OBJECT_FORMAT_ELF)
11283 if (dwarf_version >= 5)
11284 {
11285 if (!dwarf_split_debug_info)
11286 secname = early_lto_debug ? DEBUG_LTO_INFO_SECTION : DEBUG_INFO_SECTION;
11287 else
11288 secname = (early_lto_debug
11289 ? DEBUG_LTO_DWO_INFO_SECTION : DEBUG_DWO_INFO_SECTION);
11290 }
11291 else if (!dwarf_split_debug_info)
11292 secname = early_lto_debug ? ".gnu.debuglto_.debug_types" : ".debug_types";
11293 else
11294 secname = (early_lto_debug
11295 ? ".gnu.debuglto_.debug_types.dwo" : ".debug_types.dwo");
11296
11297 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11298 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11299 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11300 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11301 comdat_key = get_identifier (tmp);
11302 targetm.asm_out.named_section (secname,
11303 SECTION_DEBUG | SECTION_LINKONCE,
11304 comdat_key);
11305 #else
11306 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11307 sprintf (tmp, (dwarf_version >= 5
11308 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11309 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11310 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11311 secname = tmp;
11312 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11313 #endif
11314
11315 /* Output debugging information. */
11316 output_compilation_unit_header (dwarf_split_debug_info
11317 ? DW_UT_split_type : DW_UT_type);
11318 output_signature (node->signature, "Type Signature");
11319 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11320 "Offset to Type DIE");
11321 output_die (node->root_die);
11322
11323 unmark_dies (node->root_die);
11324 }
11325
11326 /* Return the DWARF2/3 pubname associated with a decl. */
11327
11328 static const char *
11329 dwarf2_name (tree decl, int scope)
11330 {
11331 if (DECL_NAMELESS (decl))
11332 return NULL;
11333 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11334 }
11335
11336 /* Add a new entry to .debug_pubnames if appropriate. */
11337
11338 static void
11339 add_pubname_string (const char *str, dw_die_ref die)
11340 {
11341 pubname_entry e;
11342
11343 e.die = die;
11344 e.name = xstrdup (str);
11345 vec_safe_push (pubname_table, e);
11346 }
11347
11348 static void
11349 add_pubname (tree decl, dw_die_ref die)
11350 {
11351 if (!want_pubnames ())
11352 return;
11353
11354 /* Don't add items to the table when we expect that the consumer will have
11355 just read the enclosing die. For example, if the consumer is looking at a
11356 class_member, it will either be inside the class already, or will have just
11357 looked up the class to find the member. Either way, searching the class is
11358 faster than searching the index. */
11359 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11360 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11361 {
11362 const char *name = dwarf2_name (decl, 1);
11363
11364 if (name)
11365 add_pubname_string (name, die);
11366 }
11367 }
11368
11369 /* Add an enumerator to the pubnames section. */
11370
11371 static void
11372 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11373 {
11374 pubname_entry e;
11375
11376 gcc_assert (scope_name);
11377 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11378 e.die = die;
11379 vec_safe_push (pubname_table, e);
11380 }
11381
11382 /* Add a new entry to .debug_pubtypes if appropriate. */
11383
11384 static void
11385 add_pubtype (tree decl, dw_die_ref die)
11386 {
11387 pubname_entry e;
11388
11389 if (!want_pubnames ())
11390 return;
11391
11392 if ((TREE_PUBLIC (decl)
11393 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11394 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11395 {
11396 tree scope = NULL;
11397 const char *scope_name = "";
11398 const char *sep = is_cxx () ? "::" : ".";
11399 const char *name;
11400
11401 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11402 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11403 {
11404 scope_name = lang_hooks.dwarf_name (scope, 1);
11405 if (scope_name != NULL && scope_name[0] != '\0')
11406 scope_name = concat (scope_name, sep, NULL);
11407 else
11408 scope_name = "";
11409 }
11410
11411 if (TYPE_P (decl))
11412 name = type_tag (decl);
11413 else
11414 name = lang_hooks.dwarf_name (decl, 1);
11415
11416 /* If we don't have a name for the type, there's no point in adding
11417 it to the table. */
11418 if (name != NULL && name[0] != '\0')
11419 {
11420 e.die = die;
11421 e.name = concat (scope_name, name, NULL);
11422 vec_safe_push (pubtype_table, e);
11423 }
11424
11425 /* Although it might be more consistent to add the pubinfo for the
11426 enumerators as their dies are created, they should only be added if the
11427 enum type meets the criteria above. So rather than re-check the parent
11428 enum type whenever an enumerator die is created, just output them all
11429 here. This isn't protected by the name conditional because anonymous
11430 enums don't have names. */
11431 if (die->die_tag == DW_TAG_enumeration_type)
11432 {
11433 dw_die_ref c;
11434
11435 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11436 }
11437 }
11438 }
11439
11440 /* Output a single entry in the pubnames table. */
11441
11442 static void
11443 output_pubname (dw_offset die_offset, pubname_entry *entry)
11444 {
11445 dw_die_ref die = entry->die;
11446 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11447
11448 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11449
11450 if (debug_generate_pub_sections == 2)
11451 {
11452 /* This logic follows gdb's method for determining the value of the flag
11453 byte. */
11454 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11455 switch (die->die_tag)
11456 {
11457 case DW_TAG_typedef:
11458 case DW_TAG_base_type:
11459 case DW_TAG_subrange_type:
11460 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11461 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11462 break;
11463 case DW_TAG_enumerator:
11464 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11465 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11466 if (!is_cxx ())
11467 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11468 break;
11469 case DW_TAG_subprogram:
11470 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11471 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11472 if (!is_ada ())
11473 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11474 break;
11475 case DW_TAG_constant:
11476 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11477 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11478 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11479 break;
11480 case DW_TAG_variable:
11481 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11482 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11483 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11484 break;
11485 case DW_TAG_namespace:
11486 case DW_TAG_imported_declaration:
11487 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11488 break;
11489 case DW_TAG_class_type:
11490 case DW_TAG_interface_type:
11491 case DW_TAG_structure_type:
11492 case DW_TAG_union_type:
11493 case DW_TAG_enumeration_type:
11494 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11495 if (!is_cxx ())
11496 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11497 break;
11498 default:
11499 /* An unusual tag. Leave the flag-byte empty. */
11500 break;
11501 }
11502 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11503 "GDB-index flags");
11504 }
11505
11506 dw2_asm_output_nstring (entry->name, -1, "external name");
11507 }
11508
11509
11510 /* Output the public names table used to speed up access to externally
11511 visible names; or the public types table used to find type definitions. */
11512
11513 static void
11514 output_pubnames (vec<pubname_entry, va_gc> *names)
11515 {
11516 unsigned i;
11517 unsigned long pubnames_length = size_of_pubnames (names);
11518 pubname_entry *pub;
11519
11520 if (!XCOFF_DEBUGGING_INFO)
11521 {
11522 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11523 dw2_asm_output_data (4, 0xffffffff,
11524 "Initial length escape value indicating 64-bit DWARF extension");
11525 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11526 "Pub Info Length");
11527 }
11528
11529 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11530 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11531
11532 if (dwarf_split_debug_info)
11533 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11534 debug_skeleton_info_section,
11535 "Offset of Compilation Unit Info");
11536 else
11537 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11538 debug_info_section,
11539 "Offset of Compilation Unit Info");
11540 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11541 "Compilation Unit Length");
11542
11543 FOR_EACH_VEC_ELT (*names, i, pub)
11544 {
11545 if (include_pubname_in_output (names, pub))
11546 {
11547 dw_offset die_offset = pub->die->die_offset;
11548
11549 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11550 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11551 gcc_assert (pub->die->die_mark);
11552
11553 /* If we're putting types in their own .debug_types sections,
11554 the .debug_pubtypes table will still point to the compile
11555 unit (not the type unit), so we want to use the offset of
11556 the skeleton DIE (if there is one). */
11557 if (pub->die->comdat_type_p && names == pubtype_table)
11558 {
11559 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11560
11561 if (type_node != NULL)
11562 die_offset = (type_node->skeleton_die != NULL
11563 ? type_node->skeleton_die->die_offset
11564 : comp_unit_die ()->die_offset);
11565 }
11566
11567 output_pubname (die_offset, pub);
11568 }
11569 }
11570
11571 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11572 }
11573
11574 /* Output public names and types tables if necessary. */
11575
11576 static void
11577 output_pubtables (void)
11578 {
11579 if (!want_pubnames () || !info_section_emitted)
11580 return;
11581
11582 switch_to_section (debug_pubnames_section);
11583 output_pubnames (pubname_table);
11584 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11585 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11586 simply won't look for the section. */
11587 switch_to_section (debug_pubtypes_section);
11588 output_pubnames (pubtype_table);
11589 }
11590
11591
11592 /* Output the information that goes into the .debug_aranges table.
11593 Namely, define the beginning and ending address range of the
11594 text section generated for this compilation unit. */
11595
11596 static void
11597 output_aranges (void)
11598 {
11599 unsigned i;
11600 unsigned long aranges_length = size_of_aranges ();
11601
11602 if (!XCOFF_DEBUGGING_INFO)
11603 {
11604 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11605 dw2_asm_output_data (4, 0xffffffff,
11606 "Initial length escape value indicating 64-bit DWARF extension");
11607 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11608 "Length of Address Ranges Info");
11609 }
11610
11611 /* Version number for aranges is still 2, even up to DWARF5. */
11612 dw2_asm_output_data (2, 2, "DWARF aranges version");
11613 if (dwarf_split_debug_info)
11614 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11615 debug_skeleton_info_section,
11616 "Offset of Compilation Unit Info");
11617 else
11618 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11619 debug_info_section,
11620 "Offset of Compilation Unit Info");
11621 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11622 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11623
11624 /* We need to align to twice the pointer size here. */
11625 if (DWARF_ARANGES_PAD_SIZE)
11626 {
11627 /* Pad using a 2 byte words so that padding is correct for any
11628 pointer size. */
11629 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11630 2 * DWARF2_ADDR_SIZE);
11631 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11632 dw2_asm_output_data (2, 0, NULL);
11633 }
11634
11635 /* It is necessary not to output these entries if the sections were
11636 not used; if the sections were not used, the length will be 0 and
11637 the address may end up as 0 if the section is discarded by ld
11638 --gc-sections, leaving an invalid (0, 0) entry that can be
11639 confused with the terminator. */
11640 if (text_section_used)
11641 {
11642 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11643 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11644 text_section_label, "Length");
11645 }
11646 if (cold_text_section_used)
11647 {
11648 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11649 "Address");
11650 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11651 cold_text_section_label, "Length");
11652 }
11653
11654 if (have_multiple_function_sections)
11655 {
11656 unsigned fde_idx;
11657 dw_fde_ref fde;
11658
11659 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11660 {
11661 if (DECL_IGNORED_P (fde->decl))
11662 continue;
11663 if (!fde->in_std_section)
11664 {
11665 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11666 "Address");
11667 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11668 fde->dw_fde_begin, "Length");
11669 }
11670 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11671 {
11672 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11673 "Address");
11674 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11675 fde->dw_fde_second_begin, "Length");
11676 }
11677 }
11678 }
11679
11680 /* Output the terminator words. */
11681 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11682 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11683 }
11684
11685 /* Add a new entry to .debug_ranges. Return its index into
11686 ranges_table vector. */
11687
11688 static unsigned int
11689 add_ranges_num (int num, bool maybe_new_sec)
11690 {
11691 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11692 vec_safe_push (ranges_table, r);
11693 return vec_safe_length (ranges_table) - 1;
11694 }
11695
11696 /* Add a new entry to .debug_ranges corresponding to a block, or a
11697 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11698 this entry might be in a different section from previous range. */
11699
11700 static unsigned int
11701 add_ranges (const_tree block, bool maybe_new_sec)
11702 {
11703 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11704 }
11705
11706 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11707 chain, or middle entry of a chain that will be directly referred to. */
11708
11709 static void
11710 note_rnglist_head (unsigned int offset)
11711 {
11712 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11713 return;
11714 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11715 }
11716
11717 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11718 When using dwarf_split_debug_info, address attributes in dies destined
11719 for the final executable should be direct references--setting the
11720 parameter force_direct ensures this behavior. */
11721
11722 static void
11723 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11724 bool *added, bool force_direct)
11725 {
11726 unsigned int in_use = vec_safe_length (ranges_by_label);
11727 unsigned int offset;
11728 dw_ranges_by_label rbl = { begin, end };
11729 vec_safe_push (ranges_by_label, rbl);
11730 offset = add_ranges_num (-(int)in_use - 1, true);
11731 if (!*added)
11732 {
11733 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11734 *added = true;
11735 note_rnglist_head (offset);
11736 }
11737 }
11738
11739 /* Emit .debug_ranges section. */
11740
11741 static void
11742 output_ranges (void)
11743 {
11744 unsigned i;
11745 static const char *const start_fmt = "Offset %#x";
11746 const char *fmt = start_fmt;
11747 dw_ranges *r;
11748
11749 switch_to_section (debug_ranges_section);
11750 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11751 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11752 {
11753 int block_num = r->num;
11754
11755 if (block_num > 0)
11756 {
11757 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11758 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11759
11760 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11761 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11762
11763 /* If all code is in the text section, then the compilation
11764 unit base address defaults to DW_AT_low_pc, which is the
11765 base of the text section. */
11766 if (!have_multiple_function_sections)
11767 {
11768 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11769 text_section_label,
11770 fmt, i * 2 * DWARF2_ADDR_SIZE);
11771 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11772 text_section_label, NULL);
11773 }
11774
11775 /* Otherwise, the compilation unit base address is zero,
11776 which allows us to use absolute addresses, and not worry
11777 about whether the target supports cross-section
11778 arithmetic. */
11779 else
11780 {
11781 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11782 fmt, i * 2 * DWARF2_ADDR_SIZE);
11783 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11784 }
11785
11786 fmt = NULL;
11787 }
11788
11789 /* Negative block_num stands for an index into ranges_by_label. */
11790 else if (block_num < 0)
11791 {
11792 int lab_idx = - block_num - 1;
11793
11794 if (!have_multiple_function_sections)
11795 {
11796 gcc_unreachable ();
11797 #if 0
11798 /* If we ever use add_ranges_by_labels () for a single
11799 function section, all we have to do is to take out
11800 the #if 0 above. */
11801 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11802 (*ranges_by_label)[lab_idx].begin,
11803 text_section_label,
11804 fmt, i * 2 * DWARF2_ADDR_SIZE);
11805 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11806 (*ranges_by_label)[lab_idx].end,
11807 text_section_label, NULL);
11808 #endif
11809 }
11810 else
11811 {
11812 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11813 (*ranges_by_label)[lab_idx].begin,
11814 fmt, i * 2 * DWARF2_ADDR_SIZE);
11815 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11816 (*ranges_by_label)[lab_idx].end,
11817 NULL);
11818 }
11819 }
11820 else
11821 {
11822 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11823 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11824 fmt = start_fmt;
11825 }
11826 }
11827 }
11828
11829 /* Non-zero if .debug_line_str should be used for .debug_line section
11830 strings or strings that are likely shareable with those. */
11831 #define DWARF5_USE_DEBUG_LINE_STR \
11832 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11833 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11834 /* FIXME: there is no .debug_line_str.dwo section, \
11835 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11836 && !dwarf_split_debug_info)
11837
11838 /* Assign .debug_rnglists indexes. */
11839
11840 static void
11841 index_rnglists (void)
11842 {
11843 unsigned i;
11844 dw_ranges *r;
11845
11846 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11847 if (r->label)
11848 r->idx = rnglist_idx++;
11849 }
11850
11851 /* Emit .debug_rnglists section. */
11852
11853 static void
11854 output_rnglists (unsigned generation)
11855 {
11856 unsigned i;
11857 dw_ranges *r;
11858 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11859 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11860 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11861
11862 switch_to_section (debug_ranges_section);
11863 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11864 /* There are up to 4 unique ranges labels per generation.
11865 See also init_sections_and_labels. */
11866 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11867 2 + generation * 4);
11868 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11869 3 + generation * 4);
11870 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11871 dw2_asm_output_data (4, 0xffffffff,
11872 "Initial length escape value indicating "
11873 "64-bit DWARF extension");
11874 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11875 "Length of Range Lists");
11876 ASM_OUTPUT_LABEL (asm_out_file, l1);
11877 output_dwarf_version ();
11878 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11879 dw2_asm_output_data (1, 0, "Segment Size");
11880 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11881 about relocation sizes and primarily care about the size of .debug*
11882 sections in linked shared libraries and executables, then
11883 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11884 into it are usually larger than just DW_FORM_sec_offset offsets
11885 into the .debug_rnglists section. */
11886 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11887 "Offset Entry Count");
11888 if (dwarf_split_debug_info)
11889 {
11890 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11891 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11892 if (r->label)
11893 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11894 ranges_base_label, NULL);
11895 }
11896
11897 const char *lab = "";
11898 unsigned int len = vec_safe_length (ranges_table);
11899 const char *base = NULL;
11900 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11901 {
11902 int block_num = r->num;
11903
11904 if (r->label)
11905 {
11906 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11907 lab = r->label;
11908 }
11909 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11910 base = NULL;
11911 if (block_num > 0)
11912 {
11913 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11914 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11915
11916 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11917 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11918
11919 if (HAVE_AS_LEB128)
11920 {
11921 /* If all code is in the text section, then the compilation
11922 unit base address defaults to DW_AT_low_pc, which is the
11923 base of the text section. */
11924 if (!have_multiple_function_sections)
11925 {
11926 dw2_asm_output_data (1, DW_RLE_offset_pair,
11927 "DW_RLE_offset_pair (%s)", lab);
11928 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11929 "Range begin address (%s)", lab);
11930 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11931 "Range end address (%s)", lab);
11932 continue;
11933 }
11934 if (base == NULL)
11935 {
11936 dw_ranges *r2 = NULL;
11937 if (i < len - 1)
11938 r2 = &(*ranges_table)[i + 1];
11939 if (r2
11940 && r2->num != 0
11941 && r2->label == NULL
11942 && !r2->maybe_new_sec)
11943 {
11944 dw2_asm_output_data (1, DW_RLE_base_address,
11945 "DW_RLE_base_address (%s)", lab);
11946 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11947 "Base address (%s)", lab);
11948 strcpy (basebuf, blabel);
11949 base = basebuf;
11950 }
11951 }
11952 if (base)
11953 {
11954 dw2_asm_output_data (1, DW_RLE_offset_pair,
11955 "DW_RLE_offset_pair (%s)", lab);
11956 dw2_asm_output_delta_uleb128 (blabel, base,
11957 "Range begin address (%s)", lab);
11958 dw2_asm_output_delta_uleb128 (elabel, base,
11959 "Range end address (%s)", lab);
11960 continue;
11961 }
11962 dw2_asm_output_data (1, DW_RLE_start_length,
11963 "DW_RLE_start_length (%s)", lab);
11964 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11965 "Range begin address (%s)", lab);
11966 dw2_asm_output_delta_uleb128 (elabel, blabel,
11967 "Range length (%s)", lab);
11968 }
11969 else
11970 {
11971 dw2_asm_output_data (1, DW_RLE_start_end,
11972 "DW_RLE_start_end (%s)", lab);
11973 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11974 "Range begin address (%s)", lab);
11975 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11976 "Range end address (%s)", lab);
11977 }
11978 }
11979
11980 /* Negative block_num stands for an index into ranges_by_label. */
11981 else if (block_num < 0)
11982 {
11983 int lab_idx = - block_num - 1;
11984 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11985 const char *elabel = (*ranges_by_label)[lab_idx].end;
11986
11987 if (!have_multiple_function_sections)
11988 gcc_unreachable ();
11989 if (HAVE_AS_LEB128)
11990 {
11991 dw2_asm_output_data (1, DW_RLE_start_length,
11992 "DW_RLE_start_length (%s)", lab);
11993 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11994 "Range begin address (%s)", lab);
11995 dw2_asm_output_delta_uleb128 (elabel, blabel,
11996 "Range length (%s)", lab);
11997 }
11998 else
11999 {
12000 dw2_asm_output_data (1, DW_RLE_start_end,
12001 "DW_RLE_start_end (%s)", lab);
12002 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12003 "Range begin address (%s)", lab);
12004 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
12005 "Range end address (%s)", lab);
12006 }
12007 }
12008 else
12009 dw2_asm_output_data (1, DW_RLE_end_of_list,
12010 "DW_RLE_end_of_list (%s)", lab);
12011 }
12012 ASM_OUTPUT_LABEL (asm_out_file, l2);
12013 }
12014
12015 /* Data structure containing information about input files. */
12016 struct file_info
12017 {
12018 const char *path; /* Complete file name. */
12019 const char *fname; /* File name part. */
12020 int length; /* Length of entire string. */
12021 struct dwarf_file_data * file_idx; /* Index in input file table. */
12022 int dir_idx; /* Index in directory table. */
12023 };
12024
12025 /* Data structure containing information about directories with source
12026 files. */
12027 struct dir_info
12028 {
12029 const char *path; /* Path including directory name. */
12030 int length; /* Path length. */
12031 int prefix; /* Index of directory entry which is a prefix. */
12032 int count; /* Number of files in this directory. */
12033 int dir_idx; /* Index of directory used as base. */
12034 };
12035
12036 /* Callback function for file_info comparison. We sort by looking at
12037 the directories in the path. */
12038
12039 static int
12040 file_info_cmp (const void *p1, const void *p2)
12041 {
12042 const struct file_info *const s1 = (const struct file_info *) p1;
12043 const struct file_info *const s2 = (const struct file_info *) p2;
12044 const unsigned char *cp1;
12045 const unsigned char *cp2;
12046
12047 /* Take care of file names without directories. We need to make sure that
12048 we return consistent values to qsort since some will get confused if
12049 we return the same value when identical operands are passed in opposite
12050 orders. So if neither has a directory, return 0 and otherwise return
12051 1 or -1 depending on which one has the directory. We want the one with
12052 the directory to sort after the one without, so all no directory files
12053 are at the start (normally only the compilation unit file). */
12054 if ((s1->path == s1->fname || s2->path == s2->fname))
12055 return (s2->path == s2->fname) - (s1->path == s1->fname);
12056
12057 cp1 = (const unsigned char *) s1->path;
12058 cp2 = (const unsigned char *) s2->path;
12059
12060 while (1)
12061 {
12062 ++cp1;
12063 ++cp2;
12064 /* Reached the end of the first path? If so, handle like above,
12065 but now we want longer directory prefixes before shorter ones. */
12066 if ((cp1 == (const unsigned char *) s1->fname)
12067 || (cp2 == (const unsigned char *) s2->fname))
12068 return ((cp1 == (const unsigned char *) s1->fname)
12069 - (cp2 == (const unsigned char *) s2->fname));
12070
12071 /* Character of current path component the same? */
12072 else if (*cp1 != *cp2)
12073 return *cp1 - *cp2;
12074 }
12075 }
12076
12077 struct file_name_acquire_data
12078 {
12079 struct file_info *files;
12080 int used_files;
12081 int max_files;
12082 };
12083
12084 /* Traversal function for the hash table. */
12085
12086 int
12087 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12088 {
12089 struct dwarf_file_data *d = *slot;
12090 struct file_info *fi;
12091 const char *f;
12092
12093 gcc_assert (fnad->max_files >= d->emitted_number);
12094
12095 if (! d->emitted_number)
12096 return 1;
12097
12098 gcc_assert (fnad->max_files != fnad->used_files);
12099
12100 fi = fnad->files + fnad->used_files++;
12101
12102 /* Skip all leading "./". */
12103 f = d->filename;
12104 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12105 f += 2;
12106
12107 /* Create a new array entry. */
12108 fi->path = f;
12109 fi->length = strlen (f);
12110 fi->file_idx = d;
12111
12112 /* Search for the file name part. */
12113 f = strrchr (f, DIR_SEPARATOR);
12114 #if defined (DIR_SEPARATOR_2)
12115 {
12116 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12117
12118 if (g != NULL)
12119 {
12120 if (f == NULL || f < g)
12121 f = g;
12122 }
12123 }
12124 #endif
12125
12126 fi->fname = f == NULL ? fi->path : f + 1;
12127 return 1;
12128 }
12129
12130 /* Helper function for output_file_names. Emit a FORM encoded
12131 string STR, with assembly comment start ENTRY_KIND and
12132 index IDX */
12133
12134 static void
12135 output_line_string (enum dwarf_form form, const char *str,
12136 const char *entry_kind, unsigned int idx)
12137 {
12138 switch (form)
12139 {
12140 case DW_FORM_string:
12141 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12142 break;
12143 case DW_FORM_line_strp:
12144 if (!debug_line_str_hash)
12145 debug_line_str_hash
12146 = hash_table<indirect_string_hasher>::create_ggc (10);
12147
12148 struct indirect_string_node *node;
12149 node = find_AT_string_in_table (str, debug_line_str_hash);
12150 set_indirect_string (node);
12151 node->form = form;
12152 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12153 debug_line_str_section, "%s: %#x: \"%s\"",
12154 entry_kind, 0, node->str);
12155 break;
12156 default:
12157 gcc_unreachable ();
12158 }
12159 }
12160
12161 /* Output the directory table and the file name table. We try to minimize
12162 the total amount of memory needed. A heuristic is used to avoid large
12163 slowdowns with many input files. */
12164
12165 static void
12166 output_file_names (void)
12167 {
12168 struct file_name_acquire_data fnad;
12169 int numfiles;
12170 struct file_info *files;
12171 struct dir_info *dirs;
12172 int *saved;
12173 int *savehere;
12174 int *backmap;
12175 int ndirs;
12176 int idx_offset;
12177 int i;
12178
12179 if (!last_emitted_file)
12180 {
12181 if (dwarf_version >= 5)
12182 {
12183 dw2_asm_output_data (1, 0, "Directory entry format count");
12184 dw2_asm_output_data_uleb128 (0, "Directories count");
12185 dw2_asm_output_data (1, 0, "File name entry format count");
12186 dw2_asm_output_data_uleb128 (0, "File names count");
12187 }
12188 else
12189 {
12190 dw2_asm_output_data (1, 0, "End directory table");
12191 dw2_asm_output_data (1, 0, "End file name table");
12192 }
12193 return;
12194 }
12195
12196 numfiles = last_emitted_file->emitted_number;
12197
12198 /* Allocate the various arrays we need. */
12199 files = XALLOCAVEC (struct file_info, numfiles);
12200 dirs = XALLOCAVEC (struct dir_info, numfiles);
12201
12202 fnad.files = files;
12203 fnad.used_files = 0;
12204 fnad.max_files = numfiles;
12205 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12206 gcc_assert (fnad.used_files == fnad.max_files);
12207
12208 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12209
12210 /* Find all the different directories used. */
12211 dirs[0].path = files[0].path;
12212 dirs[0].length = files[0].fname - files[0].path;
12213 dirs[0].prefix = -1;
12214 dirs[0].count = 1;
12215 dirs[0].dir_idx = 0;
12216 files[0].dir_idx = 0;
12217 ndirs = 1;
12218
12219 for (i = 1; i < numfiles; i++)
12220 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12221 && memcmp (dirs[ndirs - 1].path, files[i].path,
12222 dirs[ndirs - 1].length) == 0)
12223 {
12224 /* Same directory as last entry. */
12225 files[i].dir_idx = ndirs - 1;
12226 ++dirs[ndirs - 1].count;
12227 }
12228 else
12229 {
12230 int j;
12231
12232 /* This is a new directory. */
12233 dirs[ndirs].path = files[i].path;
12234 dirs[ndirs].length = files[i].fname - files[i].path;
12235 dirs[ndirs].count = 1;
12236 dirs[ndirs].dir_idx = ndirs;
12237 files[i].dir_idx = ndirs;
12238
12239 /* Search for a prefix. */
12240 dirs[ndirs].prefix = -1;
12241 for (j = 0; j < ndirs; j++)
12242 if (dirs[j].length < dirs[ndirs].length
12243 && dirs[j].length > 1
12244 && (dirs[ndirs].prefix == -1
12245 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12246 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12247 dirs[ndirs].prefix = j;
12248
12249 ++ndirs;
12250 }
12251
12252 /* Now to the actual work. We have to find a subset of the directories which
12253 allow expressing the file name using references to the directory table
12254 with the least amount of characters. We do not do an exhaustive search
12255 where we would have to check out every combination of every single
12256 possible prefix. Instead we use a heuristic which provides nearly optimal
12257 results in most cases and never is much off. */
12258 saved = XALLOCAVEC (int, ndirs);
12259 savehere = XALLOCAVEC (int, ndirs);
12260
12261 memset (saved, '\0', ndirs * sizeof (saved[0]));
12262 for (i = 0; i < ndirs; i++)
12263 {
12264 int j;
12265 int total;
12266
12267 /* We can always save some space for the current directory. But this
12268 does not mean it will be enough to justify adding the directory. */
12269 savehere[i] = dirs[i].length;
12270 total = (savehere[i] - saved[i]) * dirs[i].count;
12271
12272 for (j = i + 1; j < ndirs; j++)
12273 {
12274 savehere[j] = 0;
12275 if (saved[j] < dirs[i].length)
12276 {
12277 /* Determine whether the dirs[i] path is a prefix of the
12278 dirs[j] path. */
12279 int k;
12280
12281 k = dirs[j].prefix;
12282 while (k != -1 && k != (int) i)
12283 k = dirs[k].prefix;
12284
12285 if (k == (int) i)
12286 {
12287 /* Yes it is. We can possibly save some memory by
12288 writing the filenames in dirs[j] relative to
12289 dirs[i]. */
12290 savehere[j] = dirs[i].length;
12291 total += (savehere[j] - saved[j]) * dirs[j].count;
12292 }
12293 }
12294 }
12295
12296 /* Check whether we can save enough to justify adding the dirs[i]
12297 directory. */
12298 if (total > dirs[i].length + 1)
12299 {
12300 /* It's worthwhile adding. */
12301 for (j = i; j < ndirs; j++)
12302 if (savehere[j] > 0)
12303 {
12304 /* Remember how much we saved for this directory so far. */
12305 saved[j] = savehere[j];
12306
12307 /* Remember the prefix directory. */
12308 dirs[j].dir_idx = i;
12309 }
12310 }
12311 }
12312
12313 /* Emit the directory name table. */
12314 idx_offset = dirs[0].length > 0 ? 1 : 0;
12315 enum dwarf_form str_form = DW_FORM_string;
12316 enum dwarf_form idx_form = DW_FORM_udata;
12317 if (dwarf_version >= 5)
12318 {
12319 const char *comp_dir = comp_dir_string ();
12320 if (comp_dir == NULL)
12321 comp_dir = "";
12322 dw2_asm_output_data (1, 1, "Directory entry format count");
12323 if (DWARF5_USE_DEBUG_LINE_STR)
12324 str_form = DW_FORM_line_strp;
12325 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12326 dw2_asm_output_data_uleb128 (str_form, "%s",
12327 get_DW_FORM_name (str_form));
12328 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12329 if (str_form == DW_FORM_string)
12330 {
12331 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12332 for (i = 1 - idx_offset; i < ndirs; i++)
12333 dw2_asm_output_nstring (dirs[i].path,
12334 dirs[i].length
12335 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12336 "Directory Entry: %#x", i + idx_offset);
12337 }
12338 else
12339 {
12340 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12341 for (i = 1 - idx_offset; i < ndirs; i++)
12342 {
12343 const char *str
12344 = ggc_alloc_string (dirs[i].path,
12345 dirs[i].length
12346 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12347 output_line_string (str_form, str, "Directory Entry",
12348 (unsigned) i + idx_offset);
12349 }
12350 }
12351 }
12352 else
12353 {
12354 for (i = 1 - idx_offset; i < ndirs; i++)
12355 dw2_asm_output_nstring (dirs[i].path,
12356 dirs[i].length
12357 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12358 "Directory Entry: %#x", i + idx_offset);
12359
12360 dw2_asm_output_data (1, 0, "End directory table");
12361 }
12362
12363 /* We have to emit them in the order of emitted_number since that's
12364 used in the debug info generation. To do this efficiently we
12365 generate a back-mapping of the indices first. */
12366 backmap = XALLOCAVEC (int, numfiles);
12367 for (i = 0; i < numfiles; i++)
12368 backmap[files[i].file_idx->emitted_number - 1] = i;
12369
12370 if (dwarf_version >= 5)
12371 {
12372 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12373 if (filename0 == NULL)
12374 filename0 = "";
12375 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12376 DW_FORM_data2. Choose one based on the number of directories
12377 and how much space would they occupy in each encoding.
12378 If we have at most 256 directories, all indexes fit into
12379 a single byte, so DW_FORM_data1 is most compact (if there
12380 are at most 128 directories, DW_FORM_udata would be as
12381 compact as that, but not shorter and slower to decode). */
12382 if (ndirs + idx_offset <= 256)
12383 idx_form = DW_FORM_data1;
12384 /* If there are more than 65536 directories, we have to use
12385 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12386 Otherwise, compute what space would occupy if all the indexes
12387 used DW_FORM_udata - sum - and compare that to how large would
12388 be DW_FORM_data2 encoding, and pick the more efficient one. */
12389 else if (ndirs + idx_offset <= 65536)
12390 {
12391 unsigned HOST_WIDE_INT sum = 1;
12392 for (i = 0; i < numfiles; i++)
12393 {
12394 int file_idx = backmap[i];
12395 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12396 sum += size_of_uleb128 (dir_idx);
12397 }
12398 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12399 idx_form = DW_FORM_data2;
12400 }
12401 #ifdef VMS_DEBUGGING_INFO
12402 dw2_asm_output_data (1, 4, "File name entry format count");
12403 #else
12404 dw2_asm_output_data (1, 2, "File name entry format count");
12405 #endif
12406 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12407 dw2_asm_output_data_uleb128 (str_form, "%s",
12408 get_DW_FORM_name (str_form));
12409 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12410 "DW_LNCT_directory_index");
12411 dw2_asm_output_data_uleb128 (idx_form, "%s",
12412 get_DW_FORM_name (idx_form));
12413 #ifdef VMS_DEBUGGING_INFO
12414 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12415 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12416 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12417 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12418 #endif
12419 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12420
12421 output_line_string (str_form, filename0, "File Entry", 0);
12422
12423 /* Include directory index. */
12424 if (idx_form != DW_FORM_udata)
12425 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12426 0, NULL);
12427 else
12428 dw2_asm_output_data_uleb128 (0, NULL);
12429
12430 #ifdef VMS_DEBUGGING_INFO
12431 dw2_asm_output_data_uleb128 (0, NULL);
12432 dw2_asm_output_data_uleb128 (0, NULL);
12433 #endif
12434 }
12435
12436 /* Now write all the file names. */
12437 for (i = 0; i < numfiles; i++)
12438 {
12439 int file_idx = backmap[i];
12440 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12441
12442 #ifdef VMS_DEBUGGING_INFO
12443 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12444
12445 /* Setting these fields can lead to debugger miscomparisons,
12446 but VMS Debug requires them to be set correctly. */
12447
12448 int ver;
12449 long long cdt;
12450 long siz;
12451 int maxfilelen = (strlen (files[file_idx].path)
12452 + dirs[dir_idx].length
12453 + MAX_VMS_VERSION_LEN + 1);
12454 char *filebuf = XALLOCAVEC (char, maxfilelen);
12455
12456 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12457 snprintf (filebuf, maxfilelen, "%s;%d",
12458 files[file_idx].path + dirs[dir_idx].length, ver);
12459
12460 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12461
12462 /* Include directory index. */
12463 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12464 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12465 dir_idx + idx_offset, NULL);
12466 else
12467 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12468
12469 /* Modification time. */
12470 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12471 &cdt, 0, 0, 0) == 0)
12472 ? cdt : 0, NULL);
12473
12474 /* File length in bytes. */
12475 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12476 0, &siz, 0, 0) == 0)
12477 ? siz : 0, NULL);
12478 #else
12479 output_line_string (str_form,
12480 files[file_idx].path + dirs[dir_idx].length,
12481 "File Entry", (unsigned) i + 1);
12482
12483 /* Include directory index. */
12484 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12485 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12486 dir_idx + idx_offset, NULL);
12487 else
12488 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12489
12490 if (dwarf_version >= 5)
12491 continue;
12492
12493 /* Modification time. */
12494 dw2_asm_output_data_uleb128 (0, NULL);
12495
12496 /* File length in bytes. */
12497 dw2_asm_output_data_uleb128 (0, NULL);
12498 #endif /* VMS_DEBUGGING_INFO */
12499 }
12500
12501 if (dwarf_version < 5)
12502 dw2_asm_output_data (1, 0, "End file name table");
12503 }
12504
12505
12506 /* Output one line number table into the .debug_line section. */
12507
12508 static void
12509 output_one_line_info_table (dw_line_info_table *table)
12510 {
12511 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12512 unsigned int current_line = 1;
12513 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12514 dw_line_info_entry *ent, *prev_addr;
12515 size_t i;
12516 unsigned int view;
12517
12518 view = 0;
12519
12520 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12521 {
12522 switch (ent->opcode)
12523 {
12524 case LI_set_address:
12525 /* ??? Unfortunately, we have little choice here currently, and
12526 must always use the most general form. GCC does not know the
12527 address delta itself, so we can't use DW_LNS_advance_pc. Many
12528 ports do have length attributes which will give an upper bound
12529 on the address range. We could perhaps use length attributes
12530 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12531 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12532
12533 view = 0;
12534
12535 /* This can handle any delta. This takes
12536 4+DWARF2_ADDR_SIZE bytes. */
12537 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12538 debug_variable_location_views
12539 ? ", reset view to 0" : "");
12540 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12541 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12542 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12543
12544 prev_addr = ent;
12545 break;
12546
12547 case LI_adv_address:
12548 {
12549 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12550 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12551 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12552
12553 view++;
12554
12555 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12556 dw2_asm_output_delta (2, line_label, prev_label,
12557 "from %s to %s", prev_label, line_label);
12558
12559 prev_addr = ent;
12560 break;
12561 }
12562
12563 case LI_set_line:
12564 if (ent->val == current_line)
12565 {
12566 /* We still need to start a new row, so output a copy insn. */
12567 dw2_asm_output_data (1, DW_LNS_copy,
12568 "copy line %u", current_line);
12569 }
12570 else
12571 {
12572 int line_offset = ent->val - current_line;
12573 int line_delta = line_offset - DWARF_LINE_BASE;
12574
12575 current_line = ent->val;
12576 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12577 {
12578 /* This can handle deltas from -10 to 234, using the current
12579 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12580 This takes 1 byte. */
12581 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12582 "line %u", current_line);
12583 }
12584 else
12585 {
12586 /* This can handle any delta. This takes at least 4 bytes,
12587 depending on the value being encoded. */
12588 dw2_asm_output_data (1, DW_LNS_advance_line,
12589 "advance to line %u", current_line);
12590 dw2_asm_output_data_sleb128 (line_offset, NULL);
12591 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12592 }
12593 }
12594 break;
12595
12596 case LI_set_file:
12597 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12598 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12599 break;
12600
12601 case LI_set_column:
12602 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12603 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12604 break;
12605
12606 case LI_negate_stmt:
12607 current_is_stmt = !current_is_stmt;
12608 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12609 "is_stmt %d", current_is_stmt);
12610 break;
12611
12612 case LI_set_prologue_end:
12613 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12614 "set prologue end");
12615 break;
12616
12617 case LI_set_epilogue_begin:
12618 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12619 "set epilogue begin");
12620 break;
12621
12622 case LI_set_discriminator:
12623 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12624 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12625 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12626 dw2_asm_output_data_uleb128 (ent->val, NULL);
12627 break;
12628 }
12629 }
12630
12631 /* Emit debug info for the address of the end of the table. */
12632 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12633 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12634 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12635 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12636
12637 dw2_asm_output_data (1, 0, "end sequence");
12638 dw2_asm_output_data_uleb128 (1, NULL);
12639 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12640 }
12641
12642 /* Output the source line number correspondence information. This
12643 information goes into the .debug_line section. */
12644
12645 static void
12646 output_line_info (bool prologue_only)
12647 {
12648 static unsigned int generation;
12649 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12650 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12651 bool saw_one = false;
12652 int opc;
12653
12654 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12655 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12656 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12657 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12658
12659 if (!XCOFF_DEBUGGING_INFO)
12660 {
12661 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12662 dw2_asm_output_data (4, 0xffffffff,
12663 "Initial length escape value indicating 64-bit DWARF extension");
12664 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12665 "Length of Source Line Info");
12666 }
12667
12668 ASM_OUTPUT_LABEL (asm_out_file, l1);
12669
12670 output_dwarf_version ();
12671 if (dwarf_version >= 5)
12672 {
12673 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12674 dw2_asm_output_data (1, 0, "Segment Size");
12675 }
12676 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12677 ASM_OUTPUT_LABEL (asm_out_file, p1);
12678
12679 /* Define the architecture-dependent minimum instruction length (in bytes).
12680 In this implementation of DWARF, this field is used for information
12681 purposes only. Since GCC generates assembly language, we have no
12682 a priori knowledge of how many instruction bytes are generated for each
12683 source line, and therefore can use only the DW_LNE_set_address and
12684 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12685 this as '1', which is "correct enough" for all architectures,
12686 and don't let the target override. */
12687 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12688
12689 if (dwarf_version >= 4)
12690 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12691 "Maximum Operations Per Instruction");
12692 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12693 "Default is_stmt_start flag");
12694 dw2_asm_output_data (1, DWARF_LINE_BASE,
12695 "Line Base Value (Special Opcodes)");
12696 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12697 "Line Range Value (Special Opcodes)");
12698 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12699 "Special Opcode Base");
12700
12701 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12702 {
12703 int n_op_args;
12704 switch (opc)
12705 {
12706 case DW_LNS_advance_pc:
12707 case DW_LNS_advance_line:
12708 case DW_LNS_set_file:
12709 case DW_LNS_set_column:
12710 case DW_LNS_fixed_advance_pc:
12711 case DW_LNS_set_isa:
12712 n_op_args = 1;
12713 break;
12714 default:
12715 n_op_args = 0;
12716 break;
12717 }
12718
12719 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12720 opc, n_op_args);
12721 }
12722
12723 /* Write out the information about the files we use. */
12724 output_file_names ();
12725 ASM_OUTPUT_LABEL (asm_out_file, p2);
12726 if (prologue_only)
12727 {
12728 /* Output the marker for the end of the line number info. */
12729 ASM_OUTPUT_LABEL (asm_out_file, l2);
12730 return;
12731 }
12732
12733 if (separate_line_info)
12734 {
12735 dw_line_info_table *table;
12736 size_t i;
12737
12738 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12739 if (table->in_use)
12740 {
12741 output_one_line_info_table (table);
12742 saw_one = true;
12743 }
12744 }
12745 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12746 {
12747 output_one_line_info_table (cold_text_section_line_info);
12748 saw_one = true;
12749 }
12750
12751 /* ??? Some Darwin linkers crash on a .debug_line section with no
12752 sequences. Further, merely a DW_LNE_end_sequence entry is not
12753 sufficient -- the address column must also be initialized.
12754 Make sure to output at least one set_address/end_sequence pair,
12755 choosing .text since that section is always present. */
12756 if (text_section_line_info->in_use || !saw_one)
12757 output_one_line_info_table (text_section_line_info);
12758
12759 /* Output the marker for the end of the line number info. */
12760 ASM_OUTPUT_LABEL (asm_out_file, l2);
12761 }
12762 \f
12763 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12764
12765 static inline bool
12766 need_endianity_attribute_p (bool reverse)
12767 {
12768 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12769 }
12770
12771 /* Given a pointer to a tree node for some base type, return a pointer to
12772 a DIE that describes the given type. REVERSE is true if the type is
12773 to be interpreted in the reverse storage order wrt the target order.
12774
12775 This routine must only be called for GCC type nodes that correspond to
12776 Dwarf base (fundamental) types. */
12777
12778 static dw_die_ref
12779 base_type_die (tree type, bool reverse)
12780 {
12781 dw_die_ref base_type_result;
12782 enum dwarf_type encoding;
12783 bool fpt_used = false;
12784 struct fixed_point_type_info fpt_info;
12785 tree type_bias = NULL_TREE;
12786
12787 /* If this is a subtype that should not be emitted as a subrange type,
12788 use the base type. See subrange_type_for_debug_p. */
12789 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12790 type = TREE_TYPE (type);
12791
12792 switch (TREE_CODE (type))
12793 {
12794 case INTEGER_TYPE:
12795 if ((dwarf_version >= 4 || !dwarf_strict)
12796 && TYPE_NAME (type)
12797 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12798 && DECL_IS_BUILTIN (TYPE_NAME (type))
12799 && DECL_NAME (TYPE_NAME (type)))
12800 {
12801 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12802 if (strcmp (name, "char16_t") == 0
12803 || strcmp (name, "char32_t") == 0)
12804 {
12805 encoding = DW_ATE_UTF;
12806 break;
12807 }
12808 }
12809 if ((dwarf_version >= 3 || !dwarf_strict)
12810 && lang_hooks.types.get_fixed_point_type_info)
12811 {
12812 memset (&fpt_info, 0, sizeof (fpt_info));
12813 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12814 {
12815 fpt_used = true;
12816 encoding = ((TYPE_UNSIGNED (type))
12817 ? DW_ATE_unsigned_fixed
12818 : DW_ATE_signed_fixed);
12819 break;
12820 }
12821 }
12822 if (TYPE_STRING_FLAG (type))
12823 {
12824 if (TYPE_UNSIGNED (type))
12825 encoding = DW_ATE_unsigned_char;
12826 else
12827 encoding = DW_ATE_signed_char;
12828 }
12829 else if (TYPE_UNSIGNED (type))
12830 encoding = DW_ATE_unsigned;
12831 else
12832 encoding = DW_ATE_signed;
12833
12834 if (!dwarf_strict
12835 && lang_hooks.types.get_type_bias)
12836 type_bias = lang_hooks.types.get_type_bias (type);
12837 break;
12838
12839 case REAL_TYPE:
12840 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12841 {
12842 if (dwarf_version >= 3 || !dwarf_strict)
12843 encoding = DW_ATE_decimal_float;
12844 else
12845 encoding = DW_ATE_lo_user;
12846 }
12847 else
12848 encoding = DW_ATE_float;
12849 break;
12850
12851 case FIXED_POINT_TYPE:
12852 if (!(dwarf_version >= 3 || !dwarf_strict))
12853 encoding = DW_ATE_lo_user;
12854 else if (TYPE_UNSIGNED (type))
12855 encoding = DW_ATE_unsigned_fixed;
12856 else
12857 encoding = DW_ATE_signed_fixed;
12858 break;
12859
12860 /* Dwarf2 doesn't know anything about complex ints, so use
12861 a user defined type for it. */
12862 case COMPLEX_TYPE:
12863 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12864 encoding = DW_ATE_complex_float;
12865 else
12866 encoding = DW_ATE_lo_user;
12867 break;
12868
12869 case BOOLEAN_TYPE:
12870 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12871 encoding = DW_ATE_boolean;
12872 break;
12873
12874 default:
12875 /* No other TREE_CODEs are Dwarf fundamental types. */
12876 gcc_unreachable ();
12877 }
12878
12879 base_type_result = new_die_raw (DW_TAG_base_type);
12880
12881 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12882 int_size_in_bytes (type));
12883 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12884
12885 if (need_endianity_attribute_p (reverse))
12886 add_AT_unsigned (base_type_result, DW_AT_endianity,
12887 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12888
12889 add_alignment_attribute (base_type_result, type);
12890
12891 if (fpt_used)
12892 {
12893 switch (fpt_info.scale_factor_kind)
12894 {
12895 case fixed_point_scale_factor_binary:
12896 add_AT_int (base_type_result, DW_AT_binary_scale,
12897 fpt_info.scale_factor.binary);
12898 break;
12899
12900 case fixed_point_scale_factor_decimal:
12901 add_AT_int (base_type_result, DW_AT_decimal_scale,
12902 fpt_info.scale_factor.decimal);
12903 break;
12904
12905 case fixed_point_scale_factor_arbitrary:
12906 /* Arbitrary scale factors cannot be described in standard DWARF,
12907 yet. */
12908 if (!dwarf_strict)
12909 {
12910 /* Describe the scale factor as a rational constant. */
12911 const dw_die_ref scale_factor
12912 = new_die (DW_TAG_constant, comp_unit_die (), type);
12913
12914 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12915 fpt_info.scale_factor.arbitrary.numerator);
12916 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12917 fpt_info.scale_factor.arbitrary.denominator);
12918
12919 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12920 }
12921 break;
12922
12923 default:
12924 gcc_unreachable ();
12925 }
12926 }
12927
12928 if (type_bias)
12929 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12930 dw_scalar_form_constant
12931 | dw_scalar_form_exprloc
12932 | dw_scalar_form_reference,
12933 NULL);
12934
12935 return base_type_result;
12936 }
12937
12938 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12939 named 'auto' in its type: return true for it, false otherwise. */
12940
12941 static inline bool
12942 is_cxx_auto (tree type)
12943 {
12944 if (is_cxx ())
12945 {
12946 tree name = TYPE_IDENTIFIER (type);
12947 if (name == get_identifier ("auto")
12948 || name == get_identifier ("decltype(auto)"))
12949 return true;
12950 }
12951 return false;
12952 }
12953
12954 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12955 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12956
12957 static inline int
12958 is_base_type (tree type)
12959 {
12960 switch (TREE_CODE (type))
12961 {
12962 case INTEGER_TYPE:
12963 case REAL_TYPE:
12964 case FIXED_POINT_TYPE:
12965 case COMPLEX_TYPE:
12966 case BOOLEAN_TYPE:
12967 return 1;
12968
12969 case VOID_TYPE:
12970 case ARRAY_TYPE:
12971 case RECORD_TYPE:
12972 case UNION_TYPE:
12973 case QUAL_UNION_TYPE:
12974 case ENUMERAL_TYPE:
12975 case FUNCTION_TYPE:
12976 case METHOD_TYPE:
12977 case POINTER_TYPE:
12978 case REFERENCE_TYPE:
12979 case NULLPTR_TYPE:
12980 case OFFSET_TYPE:
12981 case LANG_TYPE:
12982 case VECTOR_TYPE:
12983 return 0;
12984
12985 default:
12986 if (is_cxx_auto (type))
12987 return 0;
12988 gcc_unreachable ();
12989 }
12990
12991 return 0;
12992 }
12993
12994 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12995 node, return the size in bits for the type if it is a constant, or else
12996 return the alignment for the type if the type's size is not constant, or
12997 else return BITS_PER_WORD if the type actually turns out to be an
12998 ERROR_MARK node. */
12999
13000 static inline unsigned HOST_WIDE_INT
13001 simple_type_size_in_bits (const_tree type)
13002 {
13003 if (TREE_CODE (type) == ERROR_MARK)
13004 return BITS_PER_WORD;
13005 else if (TYPE_SIZE (type) == NULL_TREE)
13006 return 0;
13007 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
13008 return tree_to_uhwi (TYPE_SIZE (type));
13009 else
13010 return TYPE_ALIGN (type);
13011 }
13012
13013 /* Similarly, but return an offset_int instead of UHWI. */
13014
13015 static inline offset_int
13016 offset_int_type_size_in_bits (const_tree type)
13017 {
13018 if (TREE_CODE (type) == ERROR_MARK)
13019 return BITS_PER_WORD;
13020 else if (TYPE_SIZE (type) == NULL_TREE)
13021 return 0;
13022 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
13023 return wi::to_offset (TYPE_SIZE (type));
13024 else
13025 return TYPE_ALIGN (type);
13026 }
13027
13028 /* Given a pointer to a tree node for a subrange type, return a pointer
13029 to a DIE that describes the given type. */
13030
13031 static dw_die_ref
13032 subrange_type_die (tree type, tree low, tree high, tree bias,
13033 dw_die_ref context_die)
13034 {
13035 dw_die_ref subrange_die;
13036 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
13037
13038 if (context_die == NULL)
13039 context_die = comp_unit_die ();
13040
13041 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
13042
13043 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
13044 {
13045 /* The size of the subrange type and its base type do not match,
13046 so we need to generate a size attribute for the subrange type. */
13047 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
13048 }
13049
13050 add_alignment_attribute (subrange_die, type);
13051
13052 if (low)
13053 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13054 if (high)
13055 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13056 if (bias && !dwarf_strict)
13057 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13058 dw_scalar_form_constant
13059 | dw_scalar_form_exprloc
13060 | dw_scalar_form_reference,
13061 NULL);
13062
13063 return subrange_die;
13064 }
13065
13066 /* Returns the (const and/or volatile) cv_qualifiers associated with
13067 the decl node. This will normally be augmented with the
13068 cv_qualifiers of the underlying type in add_type_attribute. */
13069
13070 static int
13071 decl_quals (const_tree decl)
13072 {
13073 return ((TREE_READONLY (decl)
13074 /* The C++ front-end correctly marks reference-typed
13075 variables as readonly, but from a language (and debug
13076 info) standpoint they are not const-qualified. */
13077 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13078 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13079 | (TREE_THIS_VOLATILE (decl)
13080 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13081 }
13082
13083 /* Determine the TYPE whose qualifiers match the largest strict subset
13084 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13085 qualifiers outside QUAL_MASK. */
13086
13087 static int
13088 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13089 {
13090 tree t;
13091 int best_rank = 0, best_qual = 0, max_rank;
13092
13093 type_quals &= qual_mask;
13094 max_rank = popcount_hwi (type_quals) - 1;
13095
13096 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13097 t = TYPE_NEXT_VARIANT (t))
13098 {
13099 int q = TYPE_QUALS (t) & qual_mask;
13100
13101 if ((q & type_quals) == q && q != type_quals
13102 && check_base_type (t, type))
13103 {
13104 int rank = popcount_hwi (q);
13105
13106 if (rank > best_rank)
13107 {
13108 best_rank = rank;
13109 best_qual = q;
13110 }
13111 }
13112 }
13113
13114 return best_qual;
13115 }
13116
13117 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13118 static const dwarf_qual_info_t dwarf_qual_info[] =
13119 {
13120 { TYPE_QUAL_CONST, DW_TAG_const_type },
13121 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13122 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13123 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13124 };
13125 static const unsigned int dwarf_qual_info_size
13126 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13127
13128 /* If DIE is a qualified DIE of some base DIE with the same parent,
13129 return the base DIE, otherwise return NULL. Set MASK to the
13130 qualifiers added compared to the returned DIE. */
13131
13132 static dw_die_ref
13133 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13134 {
13135 unsigned int i;
13136 for (i = 0; i < dwarf_qual_info_size; i++)
13137 if (die->die_tag == dwarf_qual_info[i].t)
13138 break;
13139 if (i == dwarf_qual_info_size)
13140 return NULL;
13141 if (vec_safe_length (die->die_attr) != 1)
13142 return NULL;
13143 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13144 if (type == NULL || type->die_parent != die->die_parent)
13145 return NULL;
13146 *mask |= dwarf_qual_info[i].q;
13147 if (depth)
13148 {
13149 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13150 if (ret)
13151 return ret;
13152 }
13153 return type;
13154 }
13155
13156 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13157 entry that chains the modifiers specified by CV_QUALS in front of the
13158 given type. REVERSE is true if the type is to be interpreted in the
13159 reverse storage order wrt the target order. */
13160
13161 static dw_die_ref
13162 modified_type_die (tree type, int cv_quals, bool reverse,
13163 dw_die_ref context_die)
13164 {
13165 enum tree_code code = TREE_CODE (type);
13166 dw_die_ref mod_type_die;
13167 dw_die_ref sub_die = NULL;
13168 tree item_type = NULL;
13169 tree qualified_type;
13170 tree name, low, high;
13171 dw_die_ref mod_scope;
13172 /* Only these cv-qualifiers are currently handled. */
13173 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13174 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13175 ENCODE_QUAL_ADDR_SPACE(~0U));
13176 const bool reverse_base_type
13177 = need_endianity_attribute_p (reverse) && is_base_type (type);
13178
13179 if (code == ERROR_MARK)
13180 return NULL;
13181
13182 if (lang_hooks.types.get_debug_type)
13183 {
13184 tree debug_type = lang_hooks.types.get_debug_type (type);
13185
13186 if (debug_type != NULL_TREE && debug_type != type)
13187 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13188 }
13189
13190 cv_quals &= cv_qual_mask;
13191
13192 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13193 tag modifier (and not an attribute) old consumers won't be able
13194 to handle it. */
13195 if (dwarf_version < 3)
13196 cv_quals &= ~TYPE_QUAL_RESTRICT;
13197
13198 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13199 if (dwarf_version < 5)
13200 cv_quals &= ~TYPE_QUAL_ATOMIC;
13201
13202 /* See if we already have the appropriately qualified variant of
13203 this type. */
13204 qualified_type = get_qualified_type (type, cv_quals);
13205
13206 if (qualified_type == sizetype)
13207 {
13208 /* Try not to expose the internal sizetype type's name. */
13209 if (TYPE_NAME (qualified_type)
13210 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13211 {
13212 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13213
13214 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13215 && (TYPE_PRECISION (t)
13216 == TYPE_PRECISION (qualified_type))
13217 && (TYPE_UNSIGNED (t)
13218 == TYPE_UNSIGNED (qualified_type)));
13219 qualified_type = t;
13220 }
13221 else if (qualified_type == sizetype
13222 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13223 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13224 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13225 qualified_type = size_type_node;
13226 if (type == sizetype)
13227 type = qualified_type;
13228 }
13229
13230 /* If we do, then we can just use its DIE, if it exists. */
13231 if (qualified_type)
13232 {
13233 mod_type_die = lookup_type_die (qualified_type);
13234
13235 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13236 dealt with specially: the DIE with the attribute, if it exists, is
13237 placed immediately after the regular DIE for the same base type. */
13238 if (mod_type_die
13239 && (!reverse_base_type
13240 || ((mod_type_die = mod_type_die->die_sib) != NULL
13241 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13242 return mod_type_die;
13243 }
13244
13245 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13246
13247 /* Handle C typedef types. */
13248 if (name
13249 && TREE_CODE (name) == TYPE_DECL
13250 && DECL_ORIGINAL_TYPE (name)
13251 && !DECL_ARTIFICIAL (name))
13252 {
13253 tree dtype = TREE_TYPE (name);
13254
13255 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13256 if (qualified_type == dtype && !reverse_base_type)
13257 {
13258 tree origin = decl_ultimate_origin (name);
13259
13260 /* Typedef variants that have an abstract origin don't get their own
13261 type DIE (see gen_typedef_die), so fall back on the ultimate
13262 abstract origin instead. */
13263 if (origin != NULL && origin != name)
13264 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13265 context_die);
13266
13267 /* For a named type, use the typedef. */
13268 gen_type_die (qualified_type, context_die);
13269 return lookup_type_die (qualified_type);
13270 }
13271 else
13272 {
13273 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13274 dquals &= cv_qual_mask;
13275 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13276 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13277 /* cv-unqualified version of named type. Just use
13278 the unnamed type to which it refers. */
13279 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13280 reverse, context_die);
13281 /* Else cv-qualified version of named type; fall through. */
13282 }
13283 }
13284
13285 mod_scope = scope_die_for (type, context_die);
13286
13287 if (cv_quals)
13288 {
13289 int sub_quals = 0, first_quals = 0;
13290 unsigned i;
13291 dw_die_ref first = NULL, last = NULL;
13292
13293 /* Determine a lesser qualified type that most closely matches
13294 this one. Then generate DW_TAG_* entries for the remaining
13295 qualifiers. */
13296 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13297 cv_qual_mask);
13298 if (sub_quals && use_debug_types)
13299 {
13300 bool needed = false;
13301 /* If emitting type units, make sure the order of qualifiers
13302 is canonical. Thus, start from unqualified type if
13303 an earlier qualifier is missing in sub_quals, but some later
13304 one is present there. */
13305 for (i = 0; i < dwarf_qual_info_size; i++)
13306 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13307 needed = true;
13308 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13309 {
13310 sub_quals = 0;
13311 break;
13312 }
13313 }
13314 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13315 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13316 {
13317 /* As not all intermediate qualified DIEs have corresponding
13318 tree types, ensure that qualified DIEs in the same scope
13319 as their DW_AT_type are emitted after their DW_AT_type,
13320 only with other qualified DIEs for the same type possibly
13321 in between them. Determine the range of such qualified
13322 DIEs now (first being the base type, last being corresponding
13323 last qualified DIE for it). */
13324 unsigned int count = 0;
13325 first = qualified_die_p (mod_type_die, &first_quals,
13326 dwarf_qual_info_size);
13327 if (first == NULL)
13328 first = mod_type_die;
13329 gcc_assert ((first_quals & ~sub_quals) == 0);
13330 for (count = 0, last = first;
13331 count < (1U << dwarf_qual_info_size);
13332 count++, last = last->die_sib)
13333 {
13334 int quals = 0;
13335 if (last == mod_scope->die_child)
13336 break;
13337 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13338 != first)
13339 break;
13340 }
13341 }
13342
13343 for (i = 0; i < dwarf_qual_info_size; i++)
13344 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13345 {
13346 dw_die_ref d;
13347 if (first && first != last)
13348 {
13349 for (d = first->die_sib; ; d = d->die_sib)
13350 {
13351 int quals = 0;
13352 qualified_die_p (d, &quals, dwarf_qual_info_size);
13353 if (quals == (first_quals | dwarf_qual_info[i].q))
13354 break;
13355 if (d == last)
13356 {
13357 d = NULL;
13358 break;
13359 }
13360 }
13361 if (d)
13362 {
13363 mod_type_die = d;
13364 continue;
13365 }
13366 }
13367 if (first)
13368 {
13369 d = new_die_raw (dwarf_qual_info[i].t);
13370 add_child_die_after (mod_scope, d, last);
13371 last = d;
13372 }
13373 else
13374 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13375 if (mod_type_die)
13376 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13377 mod_type_die = d;
13378 first_quals |= dwarf_qual_info[i].q;
13379 }
13380 }
13381 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13382 {
13383 dwarf_tag tag = DW_TAG_pointer_type;
13384 if (code == REFERENCE_TYPE)
13385 {
13386 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13387 tag = DW_TAG_rvalue_reference_type;
13388 else
13389 tag = DW_TAG_reference_type;
13390 }
13391 mod_type_die = new_die (tag, mod_scope, type);
13392
13393 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13394 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13395 add_alignment_attribute (mod_type_die, type);
13396 item_type = TREE_TYPE (type);
13397
13398 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13399 if (!ADDR_SPACE_GENERIC_P (as))
13400 {
13401 int action = targetm.addr_space.debug (as);
13402 if (action >= 0)
13403 {
13404 /* Positive values indicate an address_class. */
13405 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13406 }
13407 else
13408 {
13409 /* Negative values indicate an (inverted) segment base reg. */
13410 dw_loc_descr_ref d
13411 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13412 add_AT_loc (mod_type_die, DW_AT_segment, d);
13413 }
13414 }
13415 }
13416 else if (code == INTEGER_TYPE
13417 && TREE_TYPE (type) != NULL_TREE
13418 && subrange_type_for_debug_p (type, &low, &high))
13419 {
13420 tree bias = NULL_TREE;
13421 if (lang_hooks.types.get_type_bias)
13422 bias = lang_hooks.types.get_type_bias (type);
13423 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13424 item_type = TREE_TYPE (type);
13425 }
13426 else if (is_base_type (type))
13427 {
13428 mod_type_die = base_type_die (type, reverse);
13429
13430 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13431 if (reverse_base_type)
13432 {
13433 dw_die_ref after_die
13434 = modified_type_die (type, cv_quals, false, context_die);
13435 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13436 }
13437 else
13438 add_child_die (comp_unit_die (), mod_type_die);
13439
13440 add_pubtype (type, mod_type_die);
13441 }
13442 else
13443 {
13444 gen_type_die (type, context_die);
13445
13446 /* We have to get the type_main_variant here (and pass that to the
13447 `lookup_type_die' routine) because the ..._TYPE node we have
13448 might simply be a *copy* of some original type node (where the
13449 copy was created to help us keep track of typedef names) and
13450 that copy might have a different TYPE_UID from the original
13451 ..._TYPE node. */
13452 if (TREE_CODE (type) == FUNCTION_TYPE
13453 || TREE_CODE (type) == METHOD_TYPE)
13454 {
13455 /* For function/method types, can't just use type_main_variant here,
13456 because that can have different ref-qualifiers for C++,
13457 but try to canonicalize. */
13458 tree main = TYPE_MAIN_VARIANT (type);
13459 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13460 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13461 && check_base_type (t, main)
13462 && check_lang_type (t, type))
13463 return lookup_type_die (t);
13464 return lookup_type_die (type);
13465 }
13466 else if (TREE_CODE (type) != VECTOR_TYPE
13467 && TREE_CODE (type) != ARRAY_TYPE)
13468 return lookup_type_die (type_main_variant (type));
13469 else
13470 /* Vectors have the debugging information in the type,
13471 not the main variant. */
13472 return lookup_type_die (type);
13473 }
13474
13475 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13476 don't output a DW_TAG_typedef, since there isn't one in the
13477 user's program; just attach a DW_AT_name to the type.
13478 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13479 if the base type already has the same name. */
13480 if (name
13481 && ((TREE_CODE (name) != TYPE_DECL
13482 && (qualified_type == TYPE_MAIN_VARIANT (type)
13483 || (cv_quals == TYPE_UNQUALIFIED)))
13484 || (TREE_CODE (name) == TYPE_DECL
13485 && TREE_TYPE (name) == qualified_type
13486 && DECL_NAME (name))))
13487 {
13488 if (TREE_CODE (name) == TYPE_DECL)
13489 /* Could just call add_name_and_src_coords_attributes here,
13490 but since this is a builtin type it doesn't have any
13491 useful source coordinates anyway. */
13492 name = DECL_NAME (name);
13493 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13494 }
13495 /* This probably indicates a bug. */
13496 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13497 {
13498 name = TYPE_IDENTIFIER (type);
13499 add_name_attribute (mod_type_die,
13500 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13501 }
13502
13503 if (qualified_type && !reverse_base_type)
13504 equate_type_number_to_die (qualified_type, mod_type_die);
13505
13506 if (item_type)
13507 /* We must do this after the equate_type_number_to_die call, in case
13508 this is a recursive type. This ensures that the modified_type_die
13509 recursion will terminate even if the type is recursive. Recursive
13510 types are possible in Ada. */
13511 sub_die = modified_type_die (item_type,
13512 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13513 reverse,
13514 context_die);
13515
13516 if (sub_die != NULL)
13517 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13518
13519 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13520 if (TYPE_ARTIFICIAL (type))
13521 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13522
13523 return mod_type_die;
13524 }
13525
13526 /* Generate DIEs for the generic parameters of T.
13527 T must be either a generic type or a generic function.
13528 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13529
13530 static void
13531 gen_generic_params_dies (tree t)
13532 {
13533 tree parms, args;
13534 int parms_num, i;
13535 dw_die_ref die = NULL;
13536 int non_default;
13537
13538 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13539 return;
13540
13541 if (TYPE_P (t))
13542 die = lookup_type_die (t);
13543 else if (DECL_P (t))
13544 die = lookup_decl_die (t);
13545
13546 gcc_assert (die);
13547
13548 parms = lang_hooks.get_innermost_generic_parms (t);
13549 if (!parms)
13550 /* T has no generic parameter. It means T is neither a generic type
13551 or function. End of story. */
13552 return;
13553
13554 parms_num = TREE_VEC_LENGTH (parms);
13555 args = lang_hooks.get_innermost_generic_args (t);
13556 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13557 non_default = int_cst_value (TREE_CHAIN (args));
13558 else
13559 non_default = TREE_VEC_LENGTH (args);
13560 for (i = 0; i < parms_num; i++)
13561 {
13562 tree parm, arg, arg_pack_elems;
13563 dw_die_ref parm_die;
13564
13565 parm = TREE_VEC_ELT (parms, i);
13566 arg = TREE_VEC_ELT (args, i);
13567 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13568 gcc_assert (parm && TREE_VALUE (parm) && arg);
13569
13570 if (parm && TREE_VALUE (parm) && arg)
13571 {
13572 /* If PARM represents a template parameter pack,
13573 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13574 by DW_TAG_template_*_parameter DIEs for the argument
13575 pack elements of ARG. Note that ARG would then be
13576 an argument pack. */
13577 if (arg_pack_elems)
13578 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13579 arg_pack_elems,
13580 die);
13581 else
13582 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13583 true /* emit name */, die);
13584 if (i >= non_default)
13585 add_AT_flag (parm_die, DW_AT_default_value, 1);
13586 }
13587 }
13588 }
13589
13590 /* Create and return a DIE for PARM which should be
13591 the representation of a generic type parameter.
13592 For instance, in the C++ front end, PARM would be a template parameter.
13593 ARG is the argument to PARM.
13594 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13595 name of the PARM.
13596 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13597 as a child node. */
13598
13599 static dw_die_ref
13600 generic_parameter_die (tree parm, tree arg,
13601 bool emit_name_p,
13602 dw_die_ref parent_die)
13603 {
13604 dw_die_ref tmpl_die = NULL;
13605 const char *name = NULL;
13606
13607 /* C++2a accepts class literals as template parameters, and var
13608 decls with initializers represent them. The VAR_DECLs would be
13609 rejected, but we can take the DECL_INITIAL constructor and
13610 attempt to expand it. */
13611 if (arg && VAR_P (arg))
13612 arg = DECL_INITIAL (arg);
13613
13614 if (!parm || !DECL_NAME (parm) || !arg)
13615 return NULL;
13616
13617 /* We support non-type generic parameters and arguments,
13618 type generic parameters and arguments, as well as
13619 generic generic parameters (a.k.a. template template parameters in C++)
13620 and arguments. */
13621 if (TREE_CODE (parm) == PARM_DECL)
13622 /* PARM is a nontype generic parameter */
13623 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13624 else if (TREE_CODE (parm) == TYPE_DECL)
13625 /* PARM is a type generic parameter. */
13626 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13627 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13628 /* PARM is a generic generic parameter.
13629 Its DIE is a GNU extension. It shall have a
13630 DW_AT_name attribute to represent the name of the template template
13631 parameter, and a DW_AT_GNU_template_name attribute to represent the
13632 name of the template template argument. */
13633 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13634 parent_die, parm);
13635 else
13636 gcc_unreachable ();
13637
13638 if (tmpl_die)
13639 {
13640 tree tmpl_type;
13641
13642 /* If PARM is a generic parameter pack, it means we are
13643 emitting debug info for a template argument pack element.
13644 In other terms, ARG is a template argument pack element.
13645 In that case, we don't emit any DW_AT_name attribute for
13646 the die. */
13647 if (emit_name_p)
13648 {
13649 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13650 gcc_assert (name);
13651 add_AT_string (tmpl_die, DW_AT_name, name);
13652 }
13653
13654 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13655 {
13656 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13657 TMPL_DIE should have a child DW_AT_type attribute that is set
13658 to the type of the argument to PARM, which is ARG.
13659 If PARM is a type generic parameter, TMPL_DIE should have a
13660 child DW_AT_type that is set to ARG. */
13661 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13662 add_type_attribute (tmpl_die, tmpl_type,
13663 (TREE_THIS_VOLATILE (tmpl_type)
13664 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13665 false, parent_die);
13666 }
13667 else
13668 {
13669 /* So TMPL_DIE is a DIE representing a
13670 a generic generic template parameter, a.k.a template template
13671 parameter in C++ and arg is a template. */
13672
13673 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13674 to the name of the argument. */
13675 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13676 if (name)
13677 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13678 }
13679
13680 if (TREE_CODE (parm) == PARM_DECL)
13681 /* So PARM is a non-type generic parameter.
13682 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13683 attribute of TMPL_DIE which value represents the value
13684 of ARG.
13685 We must be careful here:
13686 The value of ARG might reference some function decls.
13687 We might currently be emitting debug info for a generic
13688 type and types are emitted before function decls, we don't
13689 know if the function decls referenced by ARG will actually be
13690 emitted after cgraph computations.
13691 So must defer the generation of the DW_AT_const_value to
13692 after cgraph is ready. */
13693 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13694 }
13695
13696 return tmpl_die;
13697 }
13698
13699 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13700 PARM_PACK must be a template parameter pack. The returned DIE
13701 will be child DIE of PARENT_DIE. */
13702
13703 static dw_die_ref
13704 template_parameter_pack_die (tree parm_pack,
13705 tree parm_pack_args,
13706 dw_die_ref parent_die)
13707 {
13708 dw_die_ref die;
13709 int j;
13710
13711 gcc_assert (parent_die && parm_pack);
13712
13713 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13714 add_name_and_src_coords_attributes (die, parm_pack);
13715 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13716 generic_parameter_die (parm_pack,
13717 TREE_VEC_ELT (parm_pack_args, j),
13718 false /* Don't emit DW_AT_name */,
13719 die);
13720 return die;
13721 }
13722
13723 /* Return the DBX register number described by a given RTL node. */
13724
13725 static unsigned int
13726 dbx_reg_number (const_rtx rtl)
13727 {
13728 unsigned regno = REGNO (rtl);
13729
13730 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13731
13732 #ifdef LEAF_REG_REMAP
13733 if (crtl->uses_only_leaf_regs)
13734 {
13735 int leaf_reg = LEAF_REG_REMAP (regno);
13736 if (leaf_reg != -1)
13737 regno = (unsigned) leaf_reg;
13738 }
13739 #endif
13740
13741 regno = DBX_REGISTER_NUMBER (regno);
13742 gcc_assert (regno != INVALID_REGNUM);
13743 return regno;
13744 }
13745
13746 /* Optionally add a DW_OP_piece term to a location description expression.
13747 DW_OP_piece is only added if the location description expression already
13748 doesn't end with DW_OP_piece. */
13749
13750 static void
13751 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13752 {
13753 dw_loc_descr_ref loc;
13754
13755 if (*list_head != NULL)
13756 {
13757 /* Find the end of the chain. */
13758 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13759 ;
13760
13761 if (loc->dw_loc_opc != DW_OP_piece)
13762 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13763 }
13764 }
13765
13766 /* Return a location descriptor that designates a machine register or
13767 zero if there is none. */
13768
13769 static dw_loc_descr_ref
13770 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13771 {
13772 rtx regs;
13773
13774 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13775 return 0;
13776
13777 /* We only use "frame base" when we're sure we're talking about the
13778 post-prologue local stack frame. We do this by *not* running
13779 register elimination until this point, and recognizing the special
13780 argument pointer and soft frame pointer rtx's.
13781 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13782 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13783 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13784 {
13785 dw_loc_descr_ref result = NULL;
13786
13787 if (dwarf_version >= 4 || !dwarf_strict)
13788 {
13789 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13790 initialized);
13791 if (result)
13792 add_loc_descr (&result,
13793 new_loc_descr (DW_OP_stack_value, 0, 0));
13794 }
13795 return result;
13796 }
13797
13798 regs = targetm.dwarf_register_span (rtl);
13799
13800 if (REG_NREGS (rtl) > 1 || regs)
13801 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13802 else
13803 {
13804 unsigned int dbx_regnum = dbx_reg_number (rtl);
13805 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13806 return 0;
13807 return one_reg_loc_descriptor (dbx_regnum, initialized);
13808 }
13809 }
13810
13811 /* Return a location descriptor that designates a machine register for
13812 a given hard register number. */
13813
13814 static dw_loc_descr_ref
13815 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13816 {
13817 dw_loc_descr_ref reg_loc_descr;
13818
13819 if (regno <= 31)
13820 reg_loc_descr
13821 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13822 else
13823 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13824
13825 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13826 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13827
13828 return reg_loc_descr;
13829 }
13830
13831 /* Given an RTL of a register, return a location descriptor that
13832 designates a value that spans more than one register. */
13833
13834 static dw_loc_descr_ref
13835 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13836 enum var_init_status initialized)
13837 {
13838 int size, i;
13839 dw_loc_descr_ref loc_result = NULL;
13840
13841 /* Simple, contiguous registers. */
13842 if (regs == NULL_RTX)
13843 {
13844 unsigned reg = REGNO (rtl);
13845 int nregs;
13846
13847 #ifdef LEAF_REG_REMAP
13848 if (crtl->uses_only_leaf_regs)
13849 {
13850 int leaf_reg = LEAF_REG_REMAP (reg);
13851 if (leaf_reg != -1)
13852 reg = (unsigned) leaf_reg;
13853 }
13854 #endif
13855
13856 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13857 nregs = REG_NREGS (rtl);
13858
13859 /* At present we only track constant-sized pieces. */
13860 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13861 return NULL;
13862 size /= nregs;
13863
13864 loc_result = NULL;
13865 while (nregs--)
13866 {
13867 dw_loc_descr_ref t;
13868
13869 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13870 VAR_INIT_STATUS_INITIALIZED);
13871 add_loc_descr (&loc_result, t);
13872 add_loc_descr_op_piece (&loc_result, size);
13873 ++reg;
13874 }
13875 return loc_result;
13876 }
13877
13878 /* Now onto stupid register sets in non contiguous locations. */
13879
13880 gcc_assert (GET_CODE (regs) == PARALLEL);
13881
13882 /* At present we only track constant-sized pieces. */
13883 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13884 return NULL;
13885 loc_result = NULL;
13886
13887 for (i = 0; i < XVECLEN (regs, 0); ++i)
13888 {
13889 dw_loc_descr_ref t;
13890
13891 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13892 VAR_INIT_STATUS_INITIALIZED);
13893 add_loc_descr (&loc_result, t);
13894 add_loc_descr_op_piece (&loc_result, size);
13895 }
13896
13897 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13898 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13899 return loc_result;
13900 }
13901
13902 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13903
13904 /* Return a location descriptor that designates a constant i,
13905 as a compound operation from constant (i >> shift), constant shift
13906 and DW_OP_shl. */
13907
13908 static dw_loc_descr_ref
13909 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13910 {
13911 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13912 add_loc_descr (&ret, int_loc_descriptor (shift));
13913 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13914 return ret;
13915 }
13916
13917 /* Return a location descriptor that designates constant POLY_I. */
13918
13919 static dw_loc_descr_ref
13920 int_loc_descriptor (poly_int64 poly_i)
13921 {
13922 enum dwarf_location_atom op;
13923
13924 HOST_WIDE_INT i;
13925 if (!poly_i.is_constant (&i))
13926 {
13927 /* Create location descriptions for the non-constant part and
13928 add any constant offset at the end. */
13929 dw_loc_descr_ref ret = NULL;
13930 HOST_WIDE_INT constant = poly_i.coeffs[0];
13931 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13932 {
13933 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13934 if (coeff != 0)
13935 {
13936 dw_loc_descr_ref start = ret;
13937 unsigned int factor;
13938 int bias;
13939 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13940 (j, &factor, &bias);
13941
13942 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13943 add COEFF * (REGNO / FACTOR) now and subtract
13944 COEFF * BIAS from the final constant part. */
13945 constant -= coeff * bias;
13946 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13947 if (coeff % factor == 0)
13948 coeff /= factor;
13949 else
13950 {
13951 int amount = exact_log2 (factor);
13952 gcc_assert (amount >= 0);
13953 add_loc_descr (&ret, int_loc_descriptor (amount));
13954 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13955 }
13956 if (coeff != 1)
13957 {
13958 add_loc_descr (&ret, int_loc_descriptor (coeff));
13959 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13960 }
13961 if (start)
13962 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13963 }
13964 }
13965 loc_descr_plus_const (&ret, constant);
13966 return ret;
13967 }
13968
13969 /* Pick the smallest representation of a constant, rather than just
13970 defaulting to the LEB encoding. */
13971 if (i >= 0)
13972 {
13973 int clz = clz_hwi (i);
13974 int ctz = ctz_hwi (i);
13975 if (i <= 31)
13976 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13977 else if (i <= 0xff)
13978 op = DW_OP_const1u;
13979 else if (i <= 0xffff)
13980 op = DW_OP_const2u;
13981 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13982 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13983 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13984 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13985 while DW_OP_const4u is 5 bytes. */
13986 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13987 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13988 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13989 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13990 while DW_OP_const4u is 5 bytes. */
13991 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13992
13993 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13994 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13995 <= 4)
13996 {
13997 /* As i >= 2**31, the double cast above will yield a negative number.
13998 Since wrapping is defined in DWARF expressions we can output big
13999 positive integers as small negative ones, regardless of the size
14000 of host wide ints.
14001
14002 Here, since the evaluator will handle 32-bit values and since i >=
14003 2**31, we know it's going to be interpreted as a negative literal:
14004 store it this way if we can do better than 5 bytes this way. */
14005 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14006 }
14007 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14008 op = DW_OP_const4u;
14009
14010 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
14011 least 6 bytes: see if we can do better before falling back to it. */
14012 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14013 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14014 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
14015 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
14016 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14017 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
14018 >= HOST_BITS_PER_WIDE_INT)
14019 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
14020 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
14021 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
14022 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14023 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14024 && size_of_uleb128 (i) > 6)
14025 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
14026 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
14027 else
14028 op = DW_OP_constu;
14029 }
14030 else
14031 {
14032 if (i >= -0x80)
14033 op = DW_OP_const1s;
14034 else if (i >= -0x8000)
14035 op = DW_OP_const2s;
14036 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14037 {
14038 if (size_of_int_loc_descriptor (i) < 5)
14039 {
14040 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14041 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14042 return ret;
14043 }
14044 op = DW_OP_const4s;
14045 }
14046 else
14047 {
14048 if (size_of_int_loc_descriptor (i)
14049 < (unsigned long) 1 + size_of_sleb128 (i))
14050 {
14051 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14052 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14053 return ret;
14054 }
14055 op = DW_OP_consts;
14056 }
14057 }
14058
14059 return new_loc_descr (op, i, 0);
14060 }
14061
14062 /* Likewise, for unsigned constants. */
14063
14064 static dw_loc_descr_ref
14065 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14066 {
14067 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14068 const unsigned HOST_WIDE_INT max_uint
14069 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14070
14071 /* If possible, use the clever signed constants handling. */
14072 if (i <= max_int)
14073 return int_loc_descriptor ((HOST_WIDE_INT) i);
14074
14075 /* Here, we are left with positive numbers that cannot be represented as
14076 HOST_WIDE_INT, i.e.:
14077 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14078
14079 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14080 whereas may be better to output a negative integer: thanks to integer
14081 wrapping, we know that:
14082 x = x - 2 ** DWARF2_ADDR_SIZE
14083 = x - 2 * (max (HOST_WIDE_INT) + 1)
14084 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14085 small negative integers. Let's try that in cases it will clearly improve
14086 the encoding: there is no gain turning DW_OP_const4u into
14087 DW_OP_const4s. */
14088 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14089 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14090 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14091 {
14092 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14093
14094 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14095 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14096 const HOST_WIDE_INT second_shift
14097 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14098
14099 /* So we finally have:
14100 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14101 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14102 return int_loc_descriptor (second_shift);
14103 }
14104
14105 /* Last chance: fallback to a simple constant operation. */
14106 return new_loc_descr
14107 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14108 ? DW_OP_const4u
14109 : DW_OP_const8u,
14110 i, 0);
14111 }
14112
14113 /* Generate and return a location description that computes the unsigned
14114 comparison of the two stack top entries (a OP b where b is the top-most
14115 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14116 LE_EXPR, GT_EXPR or GE_EXPR. */
14117
14118 static dw_loc_descr_ref
14119 uint_comparison_loc_list (enum tree_code kind)
14120 {
14121 enum dwarf_location_atom op, flip_op;
14122 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14123
14124 switch (kind)
14125 {
14126 case LT_EXPR:
14127 op = DW_OP_lt;
14128 break;
14129 case LE_EXPR:
14130 op = DW_OP_le;
14131 break;
14132 case GT_EXPR:
14133 op = DW_OP_gt;
14134 break;
14135 case GE_EXPR:
14136 op = DW_OP_ge;
14137 break;
14138 default:
14139 gcc_unreachable ();
14140 }
14141
14142 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14143 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14144
14145 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14146 possible to perform unsigned comparisons: we just have to distinguish
14147 three cases:
14148
14149 1. when a and b have the same sign (as signed integers); then we should
14150 return: a OP(signed) b;
14151
14152 2. when a is a negative signed integer while b is a positive one, then a
14153 is a greater unsigned integer than b; likewise when a and b's roles
14154 are flipped.
14155
14156 So first, compare the sign of the two operands. */
14157 ret = new_loc_descr (DW_OP_over, 0, 0);
14158 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14159 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14160 /* If they have different signs (i.e. they have different sign bits), then
14161 the stack top value has now the sign bit set and thus it's smaller than
14162 zero. */
14163 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14164 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14165 add_loc_descr (&ret, bra_node);
14166
14167 /* We are in case 1. At this point, we know both operands have the same
14168 sign, to it's safe to use the built-in signed comparison. */
14169 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14170 add_loc_descr (&ret, jmp_node);
14171
14172 /* We are in case 2. Here, we know both operands do not have the same sign,
14173 so we have to flip the signed comparison. */
14174 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14175 tmp = new_loc_descr (flip_op, 0, 0);
14176 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14177 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14178 add_loc_descr (&ret, tmp);
14179
14180 /* This dummy operation is necessary to make the two branches join. */
14181 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14182 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14183 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14184 add_loc_descr (&ret, tmp);
14185
14186 return ret;
14187 }
14188
14189 /* Likewise, but takes the location description lists (might be destructive on
14190 them). Return NULL if either is NULL or if concatenation fails. */
14191
14192 static dw_loc_list_ref
14193 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14194 enum tree_code kind)
14195 {
14196 if (left == NULL || right == NULL)
14197 return NULL;
14198
14199 add_loc_list (&left, right);
14200 if (left == NULL)
14201 return NULL;
14202
14203 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14204 return left;
14205 }
14206
14207 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14208 without actually allocating it. */
14209
14210 static unsigned long
14211 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14212 {
14213 return size_of_int_loc_descriptor (i >> shift)
14214 + size_of_int_loc_descriptor (shift)
14215 + 1;
14216 }
14217
14218 /* Return size_of_locs (int_loc_descriptor (i)) without
14219 actually allocating it. */
14220
14221 static unsigned long
14222 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14223 {
14224 unsigned long s;
14225
14226 if (i >= 0)
14227 {
14228 int clz, ctz;
14229 if (i <= 31)
14230 return 1;
14231 else if (i <= 0xff)
14232 return 2;
14233 else if (i <= 0xffff)
14234 return 3;
14235 clz = clz_hwi (i);
14236 ctz = ctz_hwi (i);
14237 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14238 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14239 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14240 - clz - 5);
14241 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14242 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14243 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14244 - clz - 8);
14245 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14246 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14247 <= 4)
14248 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14249 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14250 return 5;
14251 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14252 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14253 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14254 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14255 - clz - 8);
14256 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14257 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14258 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14259 - clz - 16);
14260 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14261 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14262 && s > 6)
14263 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14264 - clz - 32);
14265 else
14266 return 1 + s;
14267 }
14268 else
14269 {
14270 if (i >= -0x80)
14271 return 2;
14272 else if (i >= -0x8000)
14273 return 3;
14274 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14275 {
14276 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14277 {
14278 s = size_of_int_loc_descriptor (-i) + 1;
14279 if (s < 5)
14280 return s;
14281 }
14282 return 5;
14283 }
14284 else
14285 {
14286 unsigned long r = 1 + size_of_sleb128 (i);
14287 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14288 {
14289 s = size_of_int_loc_descriptor (-i) + 1;
14290 if (s < r)
14291 return s;
14292 }
14293 return r;
14294 }
14295 }
14296 }
14297
14298 /* Return loc description representing "address" of integer value.
14299 This can appear only as toplevel expression. */
14300
14301 static dw_loc_descr_ref
14302 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14303 {
14304 int litsize;
14305 dw_loc_descr_ref loc_result = NULL;
14306
14307 if (!(dwarf_version >= 4 || !dwarf_strict))
14308 return NULL;
14309
14310 litsize = size_of_int_loc_descriptor (i);
14311 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14312 is more compact. For DW_OP_stack_value we need:
14313 litsize + 1 (DW_OP_stack_value)
14314 and for DW_OP_implicit_value:
14315 1 (DW_OP_implicit_value) + 1 (length) + size. */
14316 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14317 {
14318 loc_result = int_loc_descriptor (i);
14319 add_loc_descr (&loc_result,
14320 new_loc_descr (DW_OP_stack_value, 0, 0));
14321 return loc_result;
14322 }
14323
14324 loc_result = new_loc_descr (DW_OP_implicit_value,
14325 size, 0);
14326 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14327 loc_result->dw_loc_oprnd2.v.val_int = i;
14328 return loc_result;
14329 }
14330
14331 /* Return a location descriptor that designates a base+offset location. */
14332
14333 static dw_loc_descr_ref
14334 based_loc_descr (rtx reg, poly_int64 offset,
14335 enum var_init_status initialized)
14336 {
14337 unsigned int regno;
14338 dw_loc_descr_ref result;
14339 dw_fde_ref fde = cfun->fde;
14340
14341 /* We only use "frame base" when we're sure we're talking about the
14342 post-prologue local stack frame. We do this by *not* running
14343 register elimination until this point, and recognizing the special
14344 argument pointer and soft frame pointer rtx's. */
14345 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14346 {
14347 rtx elim = (ira_use_lra_p
14348 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14349 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14350
14351 if (elim != reg)
14352 {
14353 /* Allow hard frame pointer here even if frame pointer
14354 isn't used since hard frame pointer is encoded with
14355 DW_OP_fbreg which uses the DW_AT_frame_base attribute,
14356 not hard frame pointer directly. */
14357 elim = strip_offset_and_add (elim, &offset);
14358 gcc_assert (elim == hard_frame_pointer_rtx
14359 || elim == stack_pointer_rtx);
14360
14361 /* If drap register is used to align stack, use frame
14362 pointer + offset to access stack variables. If stack
14363 is aligned without drap, use stack pointer + offset to
14364 access stack variables. */
14365 if (crtl->stack_realign_tried
14366 && reg == frame_pointer_rtx)
14367 {
14368 int base_reg
14369 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14370 ? HARD_FRAME_POINTER_REGNUM
14371 : REGNO (elim));
14372 return new_reg_loc_descr (base_reg, offset);
14373 }
14374
14375 gcc_assert (frame_pointer_fb_offset_valid);
14376 offset += frame_pointer_fb_offset;
14377 HOST_WIDE_INT const_offset;
14378 if (offset.is_constant (&const_offset))
14379 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14380 else
14381 {
14382 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14383 loc_descr_plus_const (&ret, offset);
14384 return ret;
14385 }
14386 }
14387 }
14388
14389 regno = REGNO (reg);
14390 #ifdef LEAF_REG_REMAP
14391 if (crtl->uses_only_leaf_regs)
14392 {
14393 int leaf_reg = LEAF_REG_REMAP (regno);
14394 if (leaf_reg != -1)
14395 regno = (unsigned) leaf_reg;
14396 }
14397 #endif
14398 regno = DWARF_FRAME_REGNUM (regno);
14399
14400 HOST_WIDE_INT const_offset;
14401 if (!optimize && fde
14402 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14403 && offset.is_constant (&const_offset))
14404 {
14405 /* Use cfa+offset to represent the location of arguments passed
14406 on the stack when drap is used to align stack.
14407 Only do this when not optimizing, for optimized code var-tracking
14408 is supposed to track where the arguments live and the register
14409 used as vdrap or drap in some spot might be used for something
14410 else in other part of the routine. */
14411 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14412 }
14413
14414 result = new_reg_loc_descr (regno, offset);
14415
14416 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14417 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14418
14419 return result;
14420 }
14421
14422 /* Return true if this RTL expression describes a base+offset calculation. */
14423
14424 static inline int
14425 is_based_loc (const_rtx rtl)
14426 {
14427 return (GET_CODE (rtl) == PLUS
14428 && ((REG_P (XEXP (rtl, 0))
14429 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14430 && CONST_INT_P (XEXP (rtl, 1)))));
14431 }
14432
14433 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14434 failed. */
14435
14436 static dw_loc_descr_ref
14437 tls_mem_loc_descriptor (rtx mem)
14438 {
14439 tree base;
14440 dw_loc_descr_ref loc_result;
14441
14442 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14443 return NULL;
14444
14445 base = get_base_address (MEM_EXPR (mem));
14446 if (base == NULL
14447 || !VAR_P (base)
14448 || !DECL_THREAD_LOCAL_P (base))
14449 return NULL;
14450
14451 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14452 if (loc_result == NULL)
14453 return NULL;
14454
14455 if (maybe_ne (MEM_OFFSET (mem), 0))
14456 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14457
14458 return loc_result;
14459 }
14460
14461 /* Output debug info about reason why we failed to expand expression as dwarf
14462 expression. */
14463
14464 static void
14465 expansion_failed (tree expr, rtx rtl, char const *reason)
14466 {
14467 if (dump_file && (dump_flags & TDF_DETAILS))
14468 {
14469 fprintf (dump_file, "Failed to expand as dwarf: ");
14470 if (expr)
14471 print_generic_expr (dump_file, expr, dump_flags);
14472 if (rtl)
14473 {
14474 fprintf (dump_file, "\n");
14475 print_rtl (dump_file, rtl);
14476 }
14477 fprintf (dump_file, "\nReason: %s\n", reason);
14478 }
14479 }
14480
14481 /* Helper function for const_ok_for_output. */
14482
14483 static bool
14484 const_ok_for_output_1 (rtx rtl)
14485 {
14486 if (targetm.const_not_ok_for_debug_p (rtl))
14487 {
14488 if (GET_CODE (rtl) != UNSPEC)
14489 {
14490 expansion_failed (NULL_TREE, rtl,
14491 "Expression rejected for debug by the backend.\n");
14492 return false;
14493 }
14494
14495 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14496 the target hook doesn't explicitly allow it in debug info, assume
14497 we can't express it in the debug info. */
14498 /* Don't complain about TLS UNSPECs, those are just too hard to
14499 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14500 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14501 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14502 if (flag_checking
14503 && (XVECLEN (rtl, 0) == 0
14504 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14505 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14506 inform (current_function_decl
14507 ? DECL_SOURCE_LOCATION (current_function_decl)
14508 : UNKNOWN_LOCATION,
14509 #if NUM_UNSPEC_VALUES > 0
14510 "non-delegitimized UNSPEC %s (%d) found in variable location",
14511 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14512 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14513 #else
14514 "non-delegitimized UNSPEC %d found in variable location",
14515 #endif
14516 XINT (rtl, 1));
14517 expansion_failed (NULL_TREE, rtl,
14518 "UNSPEC hasn't been delegitimized.\n");
14519 return false;
14520 }
14521
14522 if (CONST_POLY_INT_P (rtl))
14523 return false;
14524
14525 /* FIXME: Refer to PR60655. It is possible for simplification
14526 of rtl expressions in var tracking to produce such expressions.
14527 We should really identify / validate expressions
14528 enclosed in CONST that can be handled by assemblers on various
14529 targets and only handle legitimate cases here. */
14530 switch (GET_CODE (rtl))
14531 {
14532 case SYMBOL_REF:
14533 break;
14534 case NOT:
14535 case NEG:
14536 return false;
14537 case PLUS:
14538 {
14539 /* Make sure SYMBOL_REFs/UNSPECs are at most in one of the
14540 operands. */
14541 subrtx_var_iterator::array_type array;
14542 bool first = false;
14543 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14544 if (SYMBOL_REF_P (*iter)
14545 || LABEL_P (*iter)
14546 || GET_CODE (*iter) == UNSPEC)
14547 {
14548 first = true;
14549 break;
14550 }
14551 if (!first)
14552 return true;
14553 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14554 if (SYMBOL_REF_P (*iter)
14555 || LABEL_P (*iter)
14556 || GET_CODE (*iter) == UNSPEC)
14557 return false;
14558 return true;
14559 }
14560 case MINUS:
14561 {
14562 /* Disallow negation of SYMBOL_REFs or UNSPECs when they
14563 appear in the second operand of MINUS. */
14564 subrtx_var_iterator::array_type array;
14565 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14566 if (SYMBOL_REF_P (*iter)
14567 || LABEL_P (*iter)
14568 || GET_CODE (*iter) == UNSPEC)
14569 return false;
14570 return true;
14571 }
14572 default:
14573 return true;
14574 }
14575
14576 if (CONSTANT_POOL_ADDRESS_P (rtl))
14577 {
14578 bool marked;
14579 get_pool_constant_mark (rtl, &marked);
14580 /* If all references to this pool constant were optimized away,
14581 it was not output and thus we can't represent it. */
14582 if (!marked)
14583 {
14584 expansion_failed (NULL_TREE, rtl,
14585 "Constant was removed from constant pool.\n");
14586 return false;
14587 }
14588 }
14589
14590 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14591 return false;
14592
14593 /* Avoid references to external symbols in debug info, on several targets
14594 the linker might even refuse to link when linking a shared library,
14595 and in many other cases the relocations for .debug_info/.debug_loc are
14596 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14597 to be defined within the same shared library or executable are fine. */
14598 if (SYMBOL_REF_EXTERNAL_P (rtl))
14599 {
14600 tree decl = SYMBOL_REF_DECL (rtl);
14601
14602 if (decl == NULL || !targetm.binds_local_p (decl))
14603 {
14604 expansion_failed (NULL_TREE, rtl,
14605 "Symbol not defined in current TU.\n");
14606 return false;
14607 }
14608 }
14609
14610 return true;
14611 }
14612
14613 /* Return true if constant RTL can be emitted in DW_OP_addr or
14614 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14615 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14616
14617 static bool
14618 const_ok_for_output (rtx rtl)
14619 {
14620 if (GET_CODE (rtl) == SYMBOL_REF)
14621 return const_ok_for_output_1 (rtl);
14622
14623 if (GET_CODE (rtl) == CONST)
14624 {
14625 subrtx_var_iterator::array_type array;
14626 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14627 if (!const_ok_for_output_1 (*iter))
14628 return false;
14629 return true;
14630 }
14631
14632 return true;
14633 }
14634
14635 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14636 if possible, NULL otherwise. */
14637
14638 static dw_die_ref
14639 base_type_for_mode (machine_mode mode, bool unsignedp)
14640 {
14641 dw_die_ref type_die;
14642 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14643
14644 if (type == NULL)
14645 return NULL;
14646 switch (TREE_CODE (type))
14647 {
14648 case INTEGER_TYPE:
14649 case REAL_TYPE:
14650 break;
14651 default:
14652 return NULL;
14653 }
14654 type_die = lookup_type_die (type);
14655 if (!type_die)
14656 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14657 comp_unit_die ());
14658 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14659 return NULL;
14660 return type_die;
14661 }
14662
14663 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14664 type matching MODE, or, if MODE is narrower than or as wide as
14665 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14666 possible. */
14667
14668 static dw_loc_descr_ref
14669 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14670 {
14671 machine_mode outer_mode = mode;
14672 dw_die_ref type_die;
14673 dw_loc_descr_ref cvt;
14674
14675 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14676 {
14677 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14678 return op;
14679 }
14680 type_die = base_type_for_mode (outer_mode, 1);
14681 if (type_die == NULL)
14682 return NULL;
14683 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14684 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14685 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14686 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14687 add_loc_descr (&op, cvt);
14688 return op;
14689 }
14690
14691 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14692
14693 static dw_loc_descr_ref
14694 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14695 dw_loc_descr_ref op1)
14696 {
14697 dw_loc_descr_ref ret = op0;
14698 add_loc_descr (&ret, op1);
14699 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14700 if (STORE_FLAG_VALUE != 1)
14701 {
14702 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14703 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14704 }
14705 return ret;
14706 }
14707
14708 /* Subroutine of scompare_loc_descriptor for the case in which we're
14709 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14710 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14711
14712 static dw_loc_descr_ref
14713 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14714 scalar_int_mode op_mode,
14715 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14716 {
14717 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14718 dw_loc_descr_ref cvt;
14719
14720 if (type_die == NULL)
14721 return NULL;
14722 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14723 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14724 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14725 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14726 add_loc_descr (&op0, cvt);
14727 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14728 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14729 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14730 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14731 add_loc_descr (&op1, cvt);
14732 return compare_loc_descriptor (op, op0, op1);
14733 }
14734
14735 /* Subroutine of scompare_loc_descriptor for the case in which we're
14736 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14737 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14738
14739 static dw_loc_descr_ref
14740 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14741 scalar_int_mode op_mode,
14742 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14743 {
14744 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14745 /* For eq/ne, if the operands are known to be zero-extended,
14746 there is no need to do the fancy shifting up. */
14747 if (op == DW_OP_eq || op == DW_OP_ne)
14748 {
14749 dw_loc_descr_ref last0, last1;
14750 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14751 ;
14752 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14753 ;
14754 /* deref_size zero extends, and for constants we can check
14755 whether they are zero extended or not. */
14756 if (((last0->dw_loc_opc == DW_OP_deref_size
14757 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14758 || (CONST_INT_P (XEXP (rtl, 0))
14759 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14760 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14761 && ((last1->dw_loc_opc == DW_OP_deref_size
14762 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14763 || (CONST_INT_P (XEXP (rtl, 1))
14764 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14765 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14766 return compare_loc_descriptor (op, op0, op1);
14767
14768 /* EQ/NE comparison against constant in narrower type than
14769 DWARF2_ADDR_SIZE can be performed either as
14770 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14771 DW_OP_{eq,ne}
14772 or
14773 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14774 DW_OP_{eq,ne}. Pick whatever is shorter. */
14775 if (CONST_INT_P (XEXP (rtl, 1))
14776 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14777 && (size_of_int_loc_descriptor (shift) + 1
14778 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14779 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14780 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14781 & GET_MODE_MASK (op_mode))))
14782 {
14783 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14784 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14785 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14786 & GET_MODE_MASK (op_mode));
14787 return compare_loc_descriptor (op, op0, op1);
14788 }
14789 }
14790 add_loc_descr (&op0, int_loc_descriptor (shift));
14791 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14792 if (CONST_INT_P (XEXP (rtl, 1)))
14793 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14794 else
14795 {
14796 add_loc_descr (&op1, int_loc_descriptor (shift));
14797 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14798 }
14799 return compare_loc_descriptor (op, op0, op1);
14800 }
14801
14802 /* Return location descriptor for unsigned comparison OP RTL. */
14803
14804 static dw_loc_descr_ref
14805 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14806 machine_mode mem_mode)
14807 {
14808 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14809 dw_loc_descr_ref op0, op1;
14810
14811 if (op_mode == VOIDmode)
14812 op_mode = GET_MODE (XEXP (rtl, 1));
14813 if (op_mode == VOIDmode)
14814 return NULL;
14815
14816 scalar_int_mode int_op_mode;
14817 if (dwarf_strict
14818 && dwarf_version < 5
14819 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14820 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14821 return NULL;
14822
14823 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14824 VAR_INIT_STATUS_INITIALIZED);
14825 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14826 VAR_INIT_STATUS_INITIALIZED);
14827
14828 if (op0 == NULL || op1 == NULL)
14829 return NULL;
14830
14831 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14832 {
14833 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14834 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14835
14836 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14837 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14838 }
14839 return compare_loc_descriptor (op, op0, op1);
14840 }
14841
14842 /* Return location descriptor for unsigned comparison OP RTL. */
14843
14844 static dw_loc_descr_ref
14845 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14846 machine_mode mem_mode)
14847 {
14848 dw_loc_descr_ref op0, op1;
14849
14850 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14851 if (test_op_mode == VOIDmode)
14852 test_op_mode = GET_MODE (XEXP (rtl, 1));
14853
14854 scalar_int_mode op_mode;
14855 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14856 return NULL;
14857
14858 if (dwarf_strict
14859 && dwarf_version < 5
14860 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14861 return NULL;
14862
14863 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14864 VAR_INIT_STATUS_INITIALIZED);
14865 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14866 VAR_INIT_STATUS_INITIALIZED);
14867
14868 if (op0 == NULL || op1 == NULL)
14869 return NULL;
14870
14871 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14872 {
14873 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14874 dw_loc_descr_ref last0, last1;
14875 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14876 ;
14877 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14878 ;
14879 if (CONST_INT_P (XEXP (rtl, 0)))
14880 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14881 /* deref_size zero extends, so no need to mask it again. */
14882 else if (last0->dw_loc_opc != DW_OP_deref_size
14883 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14884 {
14885 add_loc_descr (&op0, int_loc_descriptor (mask));
14886 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14887 }
14888 if (CONST_INT_P (XEXP (rtl, 1)))
14889 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14890 /* deref_size zero extends, so no need to mask it again. */
14891 else if (last1->dw_loc_opc != DW_OP_deref_size
14892 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14893 {
14894 add_loc_descr (&op1, int_loc_descriptor (mask));
14895 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14896 }
14897 }
14898 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14899 {
14900 HOST_WIDE_INT bias = 1;
14901 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14902 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14903 if (CONST_INT_P (XEXP (rtl, 1)))
14904 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14905 + INTVAL (XEXP (rtl, 1)));
14906 else
14907 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14908 bias, 0));
14909 }
14910 return compare_loc_descriptor (op, op0, op1);
14911 }
14912
14913 /* Return location descriptor for {U,S}{MIN,MAX}. */
14914
14915 static dw_loc_descr_ref
14916 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14917 machine_mode mem_mode)
14918 {
14919 enum dwarf_location_atom op;
14920 dw_loc_descr_ref op0, op1, ret;
14921 dw_loc_descr_ref bra_node, drop_node;
14922
14923 scalar_int_mode int_mode;
14924 if (dwarf_strict
14925 && dwarf_version < 5
14926 && (!is_a <scalar_int_mode> (mode, &int_mode)
14927 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14928 return NULL;
14929
14930 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14931 VAR_INIT_STATUS_INITIALIZED);
14932 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14933 VAR_INIT_STATUS_INITIALIZED);
14934
14935 if (op0 == NULL || op1 == NULL)
14936 return NULL;
14937
14938 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14939 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14940 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14941 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14942 {
14943 /* Checked by the caller. */
14944 int_mode = as_a <scalar_int_mode> (mode);
14945 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14946 {
14947 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14948 add_loc_descr (&op0, int_loc_descriptor (mask));
14949 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14950 add_loc_descr (&op1, int_loc_descriptor (mask));
14951 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14952 }
14953 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14954 {
14955 HOST_WIDE_INT bias = 1;
14956 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14957 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14958 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14959 }
14960 }
14961 else if (is_a <scalar_int_mode> (mode, &int_mode)
14962 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14963 {
14964 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14965 add_loc_descr (&op0, int_loc_descriptor (shift));
14966 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14967 add_loc_descr (&op1, int_loc_descriptor (shift));
14968 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14969 }
14970 else if (is_a <scalar_int_mode> (mode, &int_mode)
14971 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14972 {
14973 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14974 dw_loc_descr_ref cvt;
14975 if (type_die == NULL)
14976 return NULL;
14977 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14978 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14979 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14980 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14981 add_loc_descr (&op0, cvt);
14982 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14983 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14984 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14985 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14986 add_loc_descr (&op1, cvt);
14987 }
14988
14989 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14990 op = DW_OP_lt;
14991 else
14992 op = DW_OP_gt;
14993 ret = op0;
14994 add_loc_descr (&ret, op1);
14995 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14996 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14997 add_loc_descr (&ret, bra_node);
14998 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14999 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
15000 add_loc_descr (&ret, drop_node);
15001 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
15002 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
15003 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
15004 && is_a <scalar_int_mode> (mode, &int_mode)
15005 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15006 ret = convert_descriptor_to_mode (int_mode, ret);
15007 return ret;
15008 }
15009
15010 /* Helper function for mem_loc_descriptor. Perform OP binary op,
15011 but after converting arguments to type_die, afterwards
15012 convert back to unsigned. */
15013
15014 static dw_loc_descr_ref
15015 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
15016 scalar_int_mode mode, machine_mode mem_mode)
15017 {
15018 dw_loc_descr_ref cvt, op0, op1;
15019
15020 if (type_die == NULL)
15021 return NULL;
15022 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15023 VAR_INIT_STATUS_INITIALIZED);
15024 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15025 VAR_INIT_STATUS_INITIALIZED);
15026 if (op0 == NULL || op1 == NULL)
15027 return NULL;
15028 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15029 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15030 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15031 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15032 add_loc_descr (&op0, cvt);
15033 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15034 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15035 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15036 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15037 add_loc_descr (&op1, cvt);
15038 add_loc_descr (&op0, op1);
15039 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
15040 return convert_descriptor_to_mode (mode, op0);
15041 }
15042
15043 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
15044 const0 is DW_OP_lit0 or corresponding typed constant,
15045 const1 is DW_OP_lit1 or corresponding typed constant
15046 and constMSB is constant with just the MSB bit set
15047 for the mode):
15048 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15049 L1: const0 DW_OP_swap
15050 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
15051 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15052 L3: DW_OP_drop
15053 L4: DW_OP_nop
15054
15055 CTZ is similar:
15056 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15057 L1: const0 DW_OP_swap
15058 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15059 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15060 L3: DW_OP_drop
15061 L4: DW_OP_nop
15062
15063 FFS is similar:
15064 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
15065 L1: const1 DW_OP_swap
15066 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15067 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15068 L3: DW_OP_drop
15069 L4: DW_OP_nop */
15070
15071 static dw_loc_descr_ref
15072 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
15073 machine_mode mem_mode)
15074 {
15075 dw_loc_descr_ref op0, ret, tmp;
15076 HOST_WIDE_INT valv;
15077 dw_loc_descr_ref l1jump, l1label;
15078 dw_loc_descr_ref l2jump, l2label;
15079 dw_loc_descr_ref l3jump, l3label;
15080 dw_loc_descr_ref l4jump, l4label;
15081 rtx msb;
15082
15083 if (GET_MODE (XEXP (rtl, 0)) != mode)
15084 return NULL;
15085
15086 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15087 VAR_INIT_STATUS_INITIALIZED);
15088 if (op0 == NULL)
15089 return NULL;
15090 ret = op0;
15091 if (GET_CODE (rtl) == CLZ)
15092 {
15093 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15094 valv = GET_MODE_BITSIZE (mode);
15095 }
15096 else if (GET_CODE (rtl) == FFS)
15097 valv = 0;
15098 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15099 valv = GET_MODE_BITSIZE (mode);
15100 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15101 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15102 add_loc_descr (&ret, l1jump);
15103 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15104 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15105 VAR_INIT_STATUS_INITIALIZED);
15106 if (tmp == NULL)
15107 return NULL;
15108 add_loc_descr (&ret, tmp);
15109 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15110 add_loc_descr (&ret, l4jump);
15111 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15112 ? const1_rtx : const0_rtx,
15113 mode, mem_mode,
15114 VAR_INIT_STATUS_INITIALIZED);
15115 if (l1label == NULL)
15116 return NULL;
15117 add_loc_descr (&ret, l1label);
15118 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15119 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15120 add_loc_descr (&ret, l2label);
15121 if (GET_CODE (rtl) != CLZ)
15122 msb = const1_rtx;
15123 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15124 msb = GEN_INT (HOST_WIDE_INT_1U
15125 << (GET_MODE_BITSIZE (mode) - 1));
15126 else
15127 msb = immed_wide_int_const
15128 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15129 GET_MODE_PRECISION (mode)), mode);
15130 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15131 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15132 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15133 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15134 else
15135 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15136 VAR_INIT_STATUS_INITIALIZED);
15137 if (tmp == NULL)
15138 return NULL;
15139 add_loc_descr (&ret, tmp);
15140 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15141 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15142 add_loc_descr (&ret, l3jump);
15143 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15144 VAR_INIT_STATUS_INITIALIZED);
15145 if (tmp == NULL)
15146 return NULL;
15147 add_loc_descr (&ret, tmp);
15148 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15149 ? DW_OP_shl : DW_OP_shr, 0, 0));
15150 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15151 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15152 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15153 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15154 add_loc_descr (&ret, l2jump);
15155 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15156 add_loc_descr (&ret, l3label);
15157 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15158 add_loc_descr (&ret, l4label);
15159 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15160 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15161 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15162 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15163 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15164 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15165 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15166 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15167 return ret;
15168 }
15169
15170 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15171 const1 is DW_OP_lit1 or corresponding typed constant):
15172 const0 DW_OP_swap
15173 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15174 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15175 L2: DW_OP_drop
15176
15177 PARITY is similar:
15178 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15179 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15180 L2: DW_OP_drop */
15181
15182 static dw_loc_descr_ref
15183 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15184 machine_mode mem_mode)
15185 {
15186 dw_loc_descr_ref op0, ret, tmp;
15187 dw_loc_descr_ref l1jump, l1label;
15188 dw_loc_descr_ref l2jump, l2label;
15189
15190 if (GET_MODE (XEXP (rtl, 0)) != mode)
15191 return NULL;
15192
15193 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15194 VAR_INIT_STATUS_INITIALIZED);
15195 if (op0 == NULL)
15196 return NULL;
15197 ret = op0;
15198 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15199 VAR_INIT_STATUS_INITIALIZED);
15200 if (tmp == NULL)
15201 return NULL;
15202 add_loc_descr (&ret, tmp);
15203 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15204 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15205 add_loc_descr (&ret, l1label);
15206 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15207 add_loc_descr (&ret, l2jump);
15208 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15209 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15210 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15211 VAR_INIT_STATUS_INITIALIZED);
15212 if (tmp == NULL)
15213 return NULL;
15214 add_loc_descr (&ret, tmp);
15215 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15216 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15217 ? DW_OP_plus : DW_OP_xor, 0, 0));
15218 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15219 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15220 VAR_INIT_STATUS_INITIALIZED);
15221 add_loc_descr (&ret, tmp);
15222 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15223 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15224 add_loc_descr (&ret, l1jump);
15225 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15226 add_loc_descr (&ret, l2label);
15227 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15228 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15229 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15230 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15231 return ret;
15232 }
15233
15234 /* BSWAP (constS is initial shift count, either 56 or 24):
15235 constS const0
15236 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15237 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15238 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15239 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15240 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15241
15242 static dw_loc_descr_ref
15243 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15244 machine_mode mem_mode)
15245 {
15246 dw_loc_descr_ref op0, ret, tmp;
15247 dw_loc_descr_ref l1jump, l1label;
15248 dw_loc_descr_ref l2jump, l2label;
15249
15250 if (BITS_PER_UNIT != 8
15251 || (GET_MODE_BITSIZE (mode) != 32
15252 && GET_MODE_BITSIZE (mode) != 64))
15253 return NULL;
15254
15255 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15256 VAR_INIT_STATUS_INITIALIZED);
15257 if (op0 == NULL)
15258 return NULL;
15259
15260 ret = op0;
15261 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15262 mode, mem_mode,
15263 VAR_INIT_STATUS_INITIALIZED);
15264 if (tmp == NULL)
15265 return NULL;
15266 add_loc_descr (&ret, tmp);
15267 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15268 VAR_INIT_STATUS_INITIALIZED);
15269 if (tmp == NULL)
15270 return NULL;
15271 add_loc_descr (&ret, tmp);
15272 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15273 add_loc_descr (&ret, l1label);
15274 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15275 mode, mem_mode,
15276 VAR_INIT_STATUS_INITIALIZED);
15277 add_loc_descr (&ret, tmp);
15278 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15279 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15280 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15281 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15282 VAR_INIT_STATUS_INITIALIZED);
15283 if (tmp == NULL)
15284 return NULL;
15285 add_loc_descr (&ret, tmp);
15286 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15287 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15288 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15289 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15290 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15291 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15292 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15293 VAR_INIT_STATUS_INITIALIZED);
15294 add_loc_descr (&ret, tmp);
15295 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15296 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15297 add_loc_descr (&ret, l2jump);
15298 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15299 VAR_INIT_STATUS_INITIALIZED);
15300 add_loc_descr (&ret, tmp);
15301 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15302 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15303 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15304 add_loc_descr (&ret, l1jump);
15305 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15306 add_loc_descr (&ret, l2label);
15307 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15308 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15309 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15310 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15311 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15312 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15313 return ret;
15314 }
15315
15316 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15317 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15318 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15319 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15320
15321 ROTATERT is similar:
15322 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15323 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15324 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15325
15326 static dw_loc_descr_ref
15327 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15328 machine_mode mem_mode)
15329 {
15330 rtx rtlop1 = XEXP (rtl, 1);
15331 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15332 int i;
15333
15334 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15335 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15336 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15337 VAR_INIT_STATUS_INITIALIZED);
15338 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15339 VAR_INIT_STATUS_INITIALIZED);
15340 if (op0 == NULL || op1 == NULL)
15341 return NULL;
15342 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15343 for (i = 0; i < 2; i++)
15344 {
15345 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15346 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15347 mode, mem_mode,
15348 VAR_INIT_STATUS_INITIALIZED);
15349 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15350 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15351 ? DW_OP_const4u
15352 : HOST_BITS_PER_WIDE_INT == 64
15353 ? DW_OP_const8u : DW_OP_constu,
15354 GET_MODE_MASK (mode), 0);
15355 else
15356 mask[i] = NULL;
15357 if (mask[i] == NULL)
15358 return NULL;
15359 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15360 }
15361 ret = op0;
15362 add_loc_descr (&ret, op1);
15363 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15364 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15365 if (GET_CODE (rtl) == ROTATERT)
15366 {
15367 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15368 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15369 GET_MODE_BITSIZE (mode), 0));
15370 }
15371 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15372 if (mask[0] != NULL)
15373 add_loc_descr (&ret, mask[0]);
15374 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15375 if (mask[1] != NULL)
15376 {
15377 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15378 add_loc_descr (&ret, mask[1]);
15379 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15380 }
15381 if (GET_CODE (rtl) == ROTATE)
15382 {
15383 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15384 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15385 GET_MODE_BITSIZE (mode), 0));
15386 }
15387 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15388 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15389 return ret;
15390 }
15391
15392 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15393 for DEBUG_PARAMETER_REF RTL. */
15394
15395 static dw_loc_descr_ref
15396 parameter_ref_descriptor (rtx rtl)
15397 {
15398 dw_loc_descr_ref ret;
15399 dw_die_ref ref;
15400
15401 if (dwarf_strict)
15402 return NULL;
15403 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15404 /* With LTO during LTRANS we get the late DIE that refers to the early
15405 DIE, thus we add another indirection here. This seems to confuse
15406 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15407 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15408 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15409 if (ref)
15410 {
15411 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15412 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15413 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15414 }
15415 else
15416 {
15417 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15418 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15419 }
15420 return ret;
15421 }
15422
15423 /* The following routine converts the RTL for a variable or parameter
15424 (resident in memory) into an equivalent Dwarf representation of a
15425 mechanism for getting the address of that same variable onto the top of a
15426 hypothetical "address evaluation" stack.
15427
15428 When creating memory location descriptors, we are effectively transforming
15429 the RTL for a memory-resident object into its Dwarf postfix expression
15430 equivalent. This routine recursively descends an RTL tree, turning
15431 it into Dwarf postfix code as it goes.
15432
15433 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15434
15435 MEM_MODE is the mode of the memory reference, needed to handle some
15436 autoincrement addressing modes.
15437
15438 Return 0 if we can't represent the location. */
15439
15440 dw_loc_descr_ref
15441 mem_loc_descriptor (rtx rtl, machine_mode mode,
15442 machine_mode mem_mode,
15443 enum var_init_status initialized)
15444 {
15445 dw_loc_descr_ref mem_loc_result = NULL;
15446 enum dwarf_location_atom op;
15447 dw_loc_descr_ref op0, op1;
15448 rtx inner = NULL_RTX;
15449 poly_int64 offset;
15450
15451 if (mode == VOIDmode)
15452 mode = GET_MODE (rtl);
15453
15454 /* Note that for a dynamically sized array, the location we will generate a
15455 description of here will be the lowest numbered location which is
15456 actually within the array. That's *not* necessarily the same as the
15457 zeroth element of the array. */
15458
15459 rtl = targetm.delegitimize_address (rtl);
15460
15461 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15462 return NULL;
15463
15464 scalar_int_mode int_mode = BImode, inner_mode, op1_mode;
15465 switch (GET_CODE (rtl))
15466 {
15467 case POST_INC:
15468 case POST_DEC:
15469 case POST_MODIFY:
15470 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15471
15472 case SUBREG:
15473 /* The case of a subreg may arise when we have a local (register)
15474 variable or a formal (register) parameter which doesn't quite fill
15475 up an entire register. For now, just assume that it is
15476 legitimate to make the Dwarf info refer to the whole register which
15477 contains the given subreg. */
15478 if (!subreg_lowpart_p (rtl))
15479 break;
15480 inner = SUBREG_REG (rtl);
15481 /* FALLTHRU */
15482 case TRUNCATE:
15483 if (inner == NULL_RTX)
15484 inner = XEXP (rtl, 0);
15485 if (is_a <scalar_int_mode> (mode, &int_mode)
15486 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15487 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15488 #ifdef POINTERS_EXTEND_UNSIGNED
15489 || (int_mode == Pmode && mem_mode != VOIDmode)
15490 #endif
15491 )
15492 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15493 {
15494 mem_loc_result = mem_loc_descriptor (inner,
15495 inner_mode,
15496 mem_mode, initialized);
15497 break;
15498 }
15499 if (dwarf_strict && dwarf_version < 5)
15500 break;
15501 if (is_a <scalar_int_mode> (mode, &int_mode)
15502 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15503 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15504 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15505 {
15506 dw_die_ref type_die;
15507 dw_loc_descr_ref cvt;
15508
15509 mem_loc_result = mem_loc_descriptor (inner,
15510 GET_MODE (inner),
15511 mem_mode, initialized);
15512 if (mem_loc_result == NULL)
15513 break;
15514 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15515 if (type_die == NULL)
15516 {
15517 mem_loc_result = NULL;
15518 break;
15519 }
15520 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15521 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15522 else
15523 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15524 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15525 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15526 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15527 add_loc_descr (&mem_loc_result, cvt);
15528 if (is_a <scalar_int_mode> (mode, &int_mode)
15529 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15530 {
15531 /* Convert it to untyped afterwards. */
15532 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15533 add_loc_descr (&mem_loc_result, cvt);
15534 }
15535 }
15536 break;
15537
15538 case REG:
15539 if (!is_a <scalar_int_mode> (mode, &int_mode)
15540 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15541 && rtl != arg_pointer_rtx
15542 && rtl != frame_pointer_rtx
15543 #ifdef POINTERS_EXTEND_UNSIGNED
15544 && (int_mode != Pmode || mem_mode == VOIDmode)
15545 #endif
15546 ))
15547 {
15548 dw_die_ref type_die;
15549 unsigned int dbx_regnum;
15550
15551 if (dwarf_strict && dwarf_version < 5)
15552 break;
15553 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15554 break;
15555 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15556 if (type_die == NULL)
15557 break;
15558
15559 dbx_regnum = dbx_reg_number (rtl);
15560 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15561 break;
15562 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15563 dbx_regnum, 0);
15564 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15565 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15566 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15567 break;
15568 }
15569 /* Whenever a register number forms a part of the description of the
15570 method for calculating the (dynamic) address of a memory resident
15571 object, DWARF rules require the register number be referred to as
15572 a "base register". This distinction is not based in any way upon
15573 what category of register the hardware believes the given register
15574 belongs to. This is strictly DWARF terminology we're dealing with
15575 here. Note that in cases where the location of a memory-resident
15576 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15577 OP_CONST (0)) the actual DWARF location descriptor that we generate
15578 may just be OP_BASEREG (basereg). This may look deceptively like
15579 the object in question was allocated to a register (rather than in
15580 memory) so DWARF consumers need to be aware of the subtle
15581 distinction between OP_REG and OP_BASEREG. */
15582 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15583 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15584 else if (stack_realign_drap
15585 && crtl->drap_reg
15586 && crtl->args.internal_arg_pointer == rtl
15587 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15588 {
15589 /* If RTL is internal_arg_pointer, which has been optimized
15590 out, use DRAP instead. */
15591 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15592 VAR_INIT_STATUS_INITIALIZED);
15593 }
15594 break;
15595
15596 case SIGN_EXTEND:
15597 case ZERO_EXTEND:
15598 if (!is_a <scalar_int_mode> (mode, &int_mode)
15599 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15600 break;
15601 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15602 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15603 if (op0 == 0)
15604 break;
15605 else if (GET_CODE (rtl) == ZERO_EXTEND
15606 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15607 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15608 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15609 to expand zero extend as two shifts instead of
15610 masking. */
15611 && GET_MODE_SIZE (inner_mode) <= 4)
15612 {
15613 mem_loc_result = op0;
15614 add_loc_descr (&mem_loc_result,
15615 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15616 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15617 }
15618 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15619 {
15620 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15621 shift *= BITS_PER_UNIT;
15622 if (GET_CODE (rtl) == SIGN_EXTEND)
15623 op = DW_OP_shra;
15624 else
15625 op = DW_OP_shr;
15626 mem_loc_result = op0;
15627 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15628 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15629 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15630 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15631 }
15632 else if (!dwarf_strict || dwarf_version >= 5)
15633 {
15634 dw_die_ref type_die1, type_die2;
15635 dw_loc_descr_ref cvt;
15636
15637 type_die1 = base_type_for_mode (inner_mode,
15638 GET_CODE (rtl) == ZERO_EXTEND);
15639 if (type_die1 == NULL)
15640 break;
15641 type_die2 = base_type_for_mode (int_mode, 1);
15642 if (type_die2 == NULL)
15643 break;
15644 mem_loc_result = op0;
15645 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15646 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15647 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15648 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15649 add_loc_descr (&mem_loc_result, cvt);
15650 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15651 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15652 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15653 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15654 add_loc_descr (&mem_loc_result, cvt);
15655 }
15656 break;
15657
15658 case MEM:
15659 {
15660 rtx new_rtl = avoid_constant_pool_reference (rtl);
15661 if (new_rtl != rtl)
15662 {
15663 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15664 initialized);
15665 if (mem_loc_result != NULL)
15666 return mem_loc_result;
15667 }
15668 }
15669 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15670 get_address_mode (rtl), mode,
15671 VAR_INIT_STATUS_INITIALIZED);
15672 if (mem_loc_result == NULL)
15673 mem_loc_result = tls_mem_loc_descriptor (rtl);
15674 if (mem_loc_result != NULL)
15675 {
15676 if (!is_a <scalar_int_mode> (mode, &int_mode)
15677 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15678 {
15679 dw_die_ref type_die;
15680 dw_loc_descr_ref deref;
15681 HOST_WIDE_INT size;
15682
15683 if (dwarf_strict && dwarf_version < 5)
15684 return NULL;
15685 if (!GET_MODE_SIZE (mode).is_constant (&size))
15686 return NULL;
15687 type_die
15688 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15689 if (type_die == NULL)
15690 return NULL;
15691 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15692 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15693 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15694 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15695 add_loc_descr (&mem_loc_result, deref);
15696 }
15697 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15698 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15699 else
15700 add_loc_descr (&mem_loc_result,
15701 new_loc_descr (DW_OP_deref_size,
15702 GET_MODE_SIZE (int_mode), 0));
15703 }
15704 break;
15705
15706 case LO_SUM:
15707 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15708
15709 case LABEL_REF:
15710 /* Some ports can transform a symbol ref into a label ref, because
15711 the symbol ref is too far away and has to be dumped into a constant
15712 pool. */
15713 case CONST:
15714 case SYMBOL_REF:
15715 case UNSPEC:
15716 if (!is_a <scalar_int_mode> (mode, &int_mode)
15717 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15718 #ifdef POINTERS_EXTEND_UNSIGNED
15719 && (int_mode != Pmode || mem_mode == VOIDmode)
15720 #endif
15721 ))
15722 break;
15723
15724 if (GET_CODE (rtl) == UNSPEC)
15725 {
15726 /* If delegitimize_address couldn't do anything with the UNSPEC, we
15727 can't express it in the debug info. This can happen e.g. with some
15728 TLS UNSPECs. Allow UNSPECs formerly from CONST that the backend
15729 approves. */
15730 bool not_ok = false;
15731 subrtx_var_iterator::array_type array;
15732 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15733 if (*iter != rtl && !CONSTANT_P (*iter))
15734 {
15735 not_ok = true;
15736 break;
15737 }
15738
15739 if (not_ok)
15740 break;
15741
15742 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15743 if (!const_ok_for_output_1 (*iter))
15744 {
15745 not_ok = true;
15746 break;
15747 }
15748
15749 if (not_ok)
15750 break;
15751
15752 rtl = gen_rtx_CONST (GET_MODE (rtl), rtl);
15753 goto symref;
15754 }
15755
15756 if (GET_CODE (rtl) == SYMBOL_REF
15757 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15758 {
15759 dw_loc_descr_ref temp;
15760
15761 /* If this is not defined, we have no way to emit the data. */
15762 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15763 break;
15764
15765 temp = new_addr_loc_descr (rtl, dtprel_true);
15766
15767 /* We check for DWARF 5 here because gdb did not implement
15768 DW_OP_form_tls_address until after 7.12. */
15769 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15770 ? DW_OP_form_tls_address
15771 : DW_OP_GNU_push_tls_address),
15772 0, 0);
15773 add_loc_descr (&mem_loc_result, temp);
15774
15775 break;
15776 }
15777
15778 if (!const_ok_for_output (rtl))
15779 {
15780 if (GET_CODE (rtl) == CONST)
15781 switch (GET_CODE (XEXP (rtl, 0)))
15782 {
15783 case NOT:
15784 op = DW_OP_not;
15785 goto try_const_unop;
15786 case NEG:
15787 op = DW_OP_neg;
15788 goto try_const_unop;
15789 try_const_unop:
15790 rtx arg;
15791 arg = XEXP (XEXP (rtl, 0), 0);
15792 if (!CONSTANT_P (arg))
15793 arg = gen_rtx_CONST (int_mode, arg);
15794 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15795 initialized);
15796 if (op0)
15797 {
15798 mem_loc_result = op0;
15799 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15800 }
15801 break;
15802 default:
15803 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15804 mem_mode, initialized);
15805 break;
15806 }
15807 break;
15808 }
15809
15810 symref:
15811 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15812 vec_safe_push (used_rtx_array, rtl);
15813 break;
15814
15815 case CONCAT:
15816 case CONCATN:
15817 case VAR_LOCATION:
15818 case DEBUG_IMPLICIT_PTR:
15819 expansion_failed (NULL_TREE, rtl,
15820 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15821 return 0;
15822
15823 case ENTRY_VALUE:
15824 if (dwarf_strict && dwarf_version < 5)
15825 return NULL;
15826 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15827 {
15828 if (!is_a <scalar_int_mode> (mode, &int_mode)
15829 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15830 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15831 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15832 else
15833 {
15834 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15835 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15836 return NULL;
15837 op0 = one_reg_loc_descriptor (dbx_regnum,
15838 VAR_INIT_STATUS_INITIALIZED);
15839 }
15840 }
15841 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15842 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15843 {
15844 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15845 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15846 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15847 return NULL;
15848 }
15849 else
15850 gcc_unreachable ();
15851 if (op0 == NULL)
15852 return NULL;
15853 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15854 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15855 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15856 break;
15857
15858 case DEBUG_PARAMETER_REF:
15859 mem_loc_result = parameter_ref_descriptor (rtl);
15860 break;
15861
15862 case PRE_MODIFY:
15863 /* Extract the PLUS expression nested inside and fall into
15864 PLUS code below. */
15865 rtl = XEXP (rtl, 1);
15866 goto plus;
15867
15868 case PRE_INC:
15869 case PRE_DEC:
15870 /* Turn these into a PLUS expression and fall into the PLUS code
15871 below. */
15872 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15873 gen_int_mode (GET_CODE (rtl) == PRE_INC
15874 ? GET_MODE_UNIT_SIZE (mem_mode)
15875 : -GET_MODE_UNIT_SIZE (mem_mode),
15876 mode));
15877
15878 /* fall through */
15879
15880 case PLUS:
15881 plus:
15882 if (is_based_loc (rtl)
15883 && is_a <scalar_int_mode> (mode, &int_mode)
15884 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15885 || XEXP (rtl, 0) == arg_pointer_rtx
15886 || XEXP (rtl, 0) == frame_pointer_rtx))
15887 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15888 INTVAL (XEXP (rtl, 1)),
15889 VAR_INIT_STATUS_INITIALIZED);
15890 else
15891 {
15892 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15893 VAR_INIT_STATUS_INITIALIZED);
15894 if (mem_loc_result == 0)
15895 break;
15896
15897 if (CONST_INT_P (XEXP (rtl, 1))
15898 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15899 <= DWARF2_ADDR_SIZE))
15900 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15901 else
15902 {
15903 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15904 VAR_INIT_STATUS_INITIALIZED);
15905 if (op1 == 0)
15906 return NULL;
15907 add_loc_descr (&mem_loc_result, op1);
15908 add_loc_descr (&mem_loc_result,
15909 new_loc_descr (DW_OP_plus, 0, 0));
15910 }
15911 }
15912 break;
15913
15914 /* If a pseudo-reg is optimized away, it is possible for it to
15915 be replaced with a MEM containing a multiply or shift. */
15916 case MINUS:
15917 op = DW_OP_minus;
15918 goto do_binop;
15919
15920 case MULT:
15921 op = DW_OP_mul;
15922 goto do_binop;
15923
15924 case DIV:
15925 if ((!dwarf_strict || dwarf_version >= 5)
15926 && is_a <scalar_int_mode> (mode, &int_mode)
15927 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15928 {
15929 mem_loc_result = typed_binop (DW_OP_div, rtl,
15930 base_type_for_mode (mode, 0),
15931 int_mode, mem_mode);
15932 break;
15933 }
15934 op = DW_OP_div;
15935 goto do_binop;
15936
15937 case UMOD:
15938 op = DW_OP_mod;
15939 goto do_binop;
15940
15941 case ASHIFT:
15942 op = DW_OP_shl;
15943 goto do_shift;
15944
15945 case ASHIFTRT:
15946 op = DW_OP_shra;
15947 goto do_shift;
15948
15949 case LSHIFTRT:
15950 op = DW_OP_shr;
15951 goto do_shift;
15952
15953 do_shift:
15954 if (!is_a <scalar_int_mode> (mode, &int_mode))
15955 break;
15956 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15957 VAR_INIT_STATUS_INITIALIZED);
15958 {
15959 rtx rtlop1 = XEXP (rtl, 1);
15960 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15961 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15962 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15963 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15964 VAR_INIT_STATUS_INITIALIZED);
15965 }
15966
15967 if (op0 == 0 || op1 == 0)
15968 break;
15969
15970 mem_loc_result = op0;
15971 add_loc_descr (&mem_loc_result, op1);
15972 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15973 break;
15974
15975 case AND:
15976 op = DW_OP_and;
15977 goto do_binop;
15978
15979 case IOR:
15980 op = DW_OP_or;
15981 goto do_binop;
15982
15983 case XOR:
15984 op = DW_OP_xor;
15985 goto do_binop;
15986
15987 do_binop:
15988 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15989 VAR_INIT_STATUS_INITIALIZED);
15990 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15991 VAR_INIT_STATUS_INITIALIZED);
15992
15993 if (op0 == 0 || op1 == 0)
15994 break;
15995
15996 mem_loc_result = op0;
15997 add_loc_descr (&mem_loc_result, op1);
15998 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15999 break;
16000
16001 case MOD:
16002 if ((!dwarf_strict || dwarf_version >= 5)
16003 && is_a <scalar_int_mode> (mode, &int_mode)
16004 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16005 {
16006 mem_loc_result = typed_binop (DW_OP_mod, rtl,
16007 base_type_for_mode (mode, 0),
16008 int_mode, mem_mode);
16009 break;
16010 }
16011
16012 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16013 VAR_INIT_STATUS_INITIALIZED);
16014 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16015 VAR_INIT_STATUS_INITIALIZED);
16016
16017 if (op0 == 0 || op1 == 0)
16018 break;
16019
16020 mem_loc_result = op0;
16021 add_loc_descr (&mem_loc_result, op1);
16022 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16023 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16024 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
16025 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
16026 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
16027 break;
16028
16029 case UDIV:
16030 if ((!dwarf_strict || dwarf_version >= 5)
16031 && is_a <scalar_int_mode> (mode, &int_mode))
16032 {
16033 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16034 {
16035 op = DW_OP_div;
16036 goto do_binop;
16037 }
16038 mem_loc_result = typed_binop (DW_OP_div, rtl,
16039 base_type_for_mode (int_mode, 1),
16040 int_mode, mem_mode);
16041 }
16042 break;
16043
16044 case NOT:
16045 op = DW_OP_not;
16046 goto do_unop;
16047
16048 case ABS:
16049 op = DW_OP_abs;
16050 goto do_unop;
16051
16052 case NEG:
16053 op = DW_OP_neg;
16054 goto do_unop;
16055
16056 do_unop:
16057 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16058 VAR_INIT_STATUS_INITIALIZED);
16059
16060 if (op0 == 0)
16061 break;
16062
16063 mem_loc_result = op0;
16064 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16065 break;
16066
16067 case CONST_INT:
16068 if (!is_a <scalar_int_mode> (mode, &int_mode)
16069 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16070 #ifdef POINTERS_EXTEND_UNSIGNED
16071 || (int_mode == Pmode
16072 && mem_mode != VOIDmode
16073 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
16074 #endif
16075 )
16076 {
16077 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16078 break;
16079 }
16080 if ((!dwarf_strict || dwarf_version >= 5)
16081 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
16082 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
16083 {
16084 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
16085 scalar_int_mode amode;
16086 if (type_die == NULL)
16087 return NULL;
16088 if (INTVAL (rtl) >= 0
16089 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
16090 .exists (&amode))
16091 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
16092 /* const DW_OP_convert <XXX> vs.
16093 DW_OP_const_type <XXX, 1, const>. */
16094 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
16095 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
16096 {
16097 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16098 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16099 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16100 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16101 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
16102 add_loc_descr (&mem_loc_result, op0);
16103 return mem_loc_result;
16104 }
16105 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
16106 INTVAL (rtl));
16107 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16108 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16109 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16110 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16111 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16112 else
16113 {
16114 mem_loc_result->dw_loc_oprnd2.val_class
16115 = dw_val_class_const_double;
16116 mem_loc_result->dw_loc_oprnd2.v.val_double
16117 = double_int::from_shwi (INTVAL (rtl));
16118 }
16119 }
16120 break;
16121
16122 case CONST_DOUBLE:
16123 if (!dwarf_strict || dwarf_version >= 5)
16124 {
16125 dw_die_ref type_die;
16126
16127 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16128 CONST_DOUBLE rtx could represent either a large integer
16129 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16130 the value is always a floating point constant.
16131
16132 When it is an integer, a CONST_DOUBLE is used whenever
16133 the constant requires 2 HWIs to be adequately represented.
16134 We output CONST_DOUBLEs as blocks. */
16135 if (mode == VOIDmode
16136 || (GET_MODE (rtl) == VOIDmode
16137 && maybe_ne (GET_MODE_BITSIZE (mode),
16138 HOST_BITS_PER_DOUBLE_INT)))
16139 break;
16140 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16141 if (type_die == NULL)
16142 return NULL;
16143 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16144 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16145 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16146 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16147 #if TARGET_SUPPORTS_WIDE_INT == 0
16148 if (!SCALAR_FLOAT_MODE_P (mode))
16149 {
16150 mem_loc_result->dw_loc_oprnd2.val_class
16151 = dw_val_class_const_double;
16152 mem_loc_result->dw_loc_oprnd2.v.val_double
16153 = rtx_to_double_int (rtl);
16154 }
16155 else
16156 #endif
16157 {
16158 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16159 unsigned int length = GET_MODE_SIZE (float_mode);
16160 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16161
16162 insert_float (rtl, array);
16163 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16164 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16165 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16166 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16167 }
16168 }
16169 break;
16170
16171 case CONST_WIDE_INT:
16172 if (!dwarf_strict || dwarf_version >= 5)
16173 {
16174 dw_die_ref type_die;
16175
16176 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16177 if (type_die == NULL)
16178 return NULL;
16179 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16180 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16181 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16182 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16183 mem_loc_result->dw_loc_oprnd2.val_class
16184 = dw_val_class_wide_int;
16185 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16186 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16187 }
16188 break;
16189
16190 case CONST_POLY_INT:
16191 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16192 break;
16193
16194 case EQ:
16195 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16196 break;
16197
16198 case GE:
16199 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16200 break;
16201
16202 case GT:
16203 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16204 break;
16205
16206 case LE:
16207 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16208 break;
16209
16210 case LT:
16211 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16212 break;
16213
16214 case NE:
16215 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16216 break;
16217
16218 case GEU:
16219 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16220 break;
16221
16222 case GTU:
16223 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16224 break;
16225
16226 case LEU:
16227 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16228 break;
16229
16230 case LTU:
16231 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16232 break;
16233
16234 case UMIN:
16235 case UMAX:
16236 if (!SCALAR_INT_MODE_P (mode))
16237 break;
16238 /* FALLTHRU */
16239 case SMIN:
16240 case SMAX:
16241 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16242 break;
16243
16244 case ZERO_EXTRACT:
16245 case SIGN_EXTRACT:
16246 if (CONST_INT_P (XEXP (rtl, 1))
16247 && CONST_INT_P (XEXP (rtl, 2))
16248 && is_a <scalar_int_mode> (mode, &int_mode)
16249 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16250 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16251 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16252 && ((unsigned) INTVAL (XEXP (rtl, 1))
16253 + (unsigned) INTVAL (XEXP (rtl, 2))
16254 <= GET_MODE_BITSIZE (int_mode)))
16255 {
16256 int shift, size;
16257 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16258 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16259 if (op0 == 0)
16260 break;
16261 if (GET_CODE (rtl) == SIGN_EXTRACT)
16262 op = DW_OP_shra;
16263 else
16264 op = DW_OP_shr;
16265 mem_loc_result = op0;
16266 size = INTVAL (XEXP (rtl, 1));
16267 shift = INTVAL (XEXP (rtl, 2));
16268 if (BITS_BIG_ENDIAN)
16269 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16270 if (shift + size != (int) DWARF2_ADDR_SIZE)
16271 {
16272 add_loc_descr (&mem_loc_result,
16273 int_loc_descriptor (DWARF2_ADDR_SIZE
16274 - shift - size));
16275 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16276 }
16277 if (size != (int) DWARF2_ADDR_SIZE)
16278 {
16279 add_loc_descr (&mem_loc_result,
16280 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16281 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16282 }
16283 }
16284 break;
16285
16286 case IF_THEN_ELSE:
16287 {
16288 dw_loc_descr_ref op2, bra_node, drop_node;
16289 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16290 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16291 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16292 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16293 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16294 VAR_INIT_STATUS_INITIALIZED);
16295 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16296 VAR_INIT_STATUS_INITIALIZED);
16297 if (op0 == NULL || op1 == NULL || op2 == NULL)
16298 break;
16299
16300 mem_loc_result = op1;
16301 add_loc_descr (&mem_loc_result, op2);
16302 add_loc_descr (&mem_loc_result, op0);
16303 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16304 add_loc_descr (&mem_loc_result, bra_node);
16305 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16306 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16307 add_loc_descr (&mem_loc_result, drop_node);
16308 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16309 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16310 }
16311 break;
16312
16313 case FLOAT_EXTEND:
16314 case FLOAT_TRUNCATE:
16315 case FLOAT:
16316 case UNSIGNED_FLOAT:
16317 case FIX:
16318 case UNSIGNED_FIX:
16319 if (!dwarf_strict || dwarf_version >= 5)
16320 {
16321 dw_die_ref type_die;
16322 dw_loc_descr_ref cvt;
16323
16324 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16325 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16326 if (op0 == NULL)
16327 break;
16328 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16329 && (GET_CODE (rtl) == FLOAT
16330 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16331 {
16332 type_die = base_type_for_mode (int_mode,
16333 GET_CODE (rtl) == UNSIGNED_FLOAT);
16334 if (type_die == NULL)
16335 break;
16336 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16337 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16338 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16339 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16340 add_loc_descr (&op0, cvt);
16341 }
16342 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16343 if (type_die == NULL)
16344 break;
16345 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16346 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16347 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16348 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16349 add_loc_descr (&op0, cvt);
16350 if (is_a <scalar_int_mode> (mode, &int_mode)
16351 && (GET_CODE (rtl) == FIX
16352 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16353 {
16354 op0 = convert_descriptor_to_mode (int_mode, op0);
16355 if (op0 == NULL)
16356 break;
16357 }
16358 mem_loc_result = op0;
16359 }
16360 break;
16361
16362 case CLZ:
16363 case CTZ:
16364 case FFS:
16365 if (is_a <scalar_int_mode> (mode, &int_mode))
16366 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16367 break;
16368
16369 case POPCOUNT:
16370 case PARITY:
16371 if (is_a <scalar_int_mode> (mode, &int_mode))
16372 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16373 break;
16374
16375 case BSWAP:
16376 if (is_a <scalar_int_mode> (mode, &int_mode))
16377 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16378 break;
16379
16380 case ROTATE:
16381 case ROTATERT:
16382 if (is_a <scalar_int_mode> (mode, &int_mode))
16383 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16384 break;
16385
16386 case COMPARE:
16387 /* In theory, we could implement the above. */
16388 /* DWARF cannot represent the unsigned compare operations
16389 natively. */
16390 case SS_MULT:
16391 case US_MULT:
16392 case SS_DIV:
16393 case US_DIV:
16394 case SS_PLUS:
16395 case US_PLUS:
16396 case SS_MINUS:
16397 case US_MINUS:
16398 case SS_NEG:
16399 case US_NEG:
16400 case SS_ABS:
16401 case SS_ASHIFT:
16402 case US_ASHIFT:
16403 case SS_TRUNCATE:
16404 case US_TRUNCATE:
16405 case UNORDERED:
16406 case ORDERED:
16407 case UNEQ:
16408 case UNGE:
16409 case UNGT:
16410 case UNLE:
16411 case UNLT:
16412 case LTGT:
16413 case FRACT_CONVERT:
16414 case UNSIGNED_FRACT_CONVERT:
16415 case SAT_FRACT:
16416 case UNSIGNED_SAT_FRACT:
16417 case SQRT:
16418 case ASM_OPERANDS:
16419 case VEC_MERGE:
16420 case VEC_SELECT:
16421 case VEC_CONCAT:
16422 case VEC_DUPLICATE:
16423 case VEC_SERIES:
16424 case HIGH:
16425 case FMA:
16426 case STRICT_LOW_PART:
16427 case CONST_VECTOR:
16428 case CONST_FIXED:
16429 case CLRSB:
16430 case CLOBBER:
16431 case CLOBBER_HIGH:
16432 break;
16433
16434 case CONST_STRING:
16435 resolve_one_addr (&rtl);
16436 goto symref;
16437
16438 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16439 the expression. An UNSPEC rtx represents a raw DWARF operation,
16440 new_loc_descr is called for it to build the operation directly.
16441 Otherwise mem_loc_descriptor is called recursively. */
16442 case PARALLEL:
16443 {
16444 int index = 0;
16445 dw_loc_descr_ref exp_result = NULL;
16446
16447 for (; index < XVECLEN (rtl, 0); index++)
16448 {
16449 rtx elem = XVECEXP (rtl, 0, index);
16450 if (GET_CODE (elem) == UNSPEC)
16451 {
16452 /* Each DWARF operation UNSPEC contain two operands, if
16453 one operand is not used for the operation, const0_rtx is
16454 passed. */
16455 gcc_assert (XVECLEN (elem, 0) == 2);
16456
16457 HOST_WIDE_INT dw_op = XINT (elem, 1);
16458 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16459 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16460 exp_result
16461 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16462 oprnd2);
16463 }
16464 else
16465 exp_result
16466 = mem_loc_descriptor (elem, mode, mem_mode,
16467 VAR_INIT_STATUS_INITIALIZED);
16468
16469 if (!mem_loc_result)
16470 mem_loc_result = exp_result;
16471 else
16472 add_loc_descr (&mem_loc_result, exp_result);
16473 }
16474
16475 break;
16476 }
16477
16478 default:
16479 if (flag_checking)
16480 {
16481 print_rtl (stderr, rtl);
16482 gcc_unreachable ();
16483 }
16484 break;
16485 }
16486
16487 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16488 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16489
16490 return mem_loc_result;
16491 }
16492
16493 /* Return a descriptor that describes the concatenation of two locations.
16494 This is typically a complex variable. */
16495
16496 static dw_loc_descr_ref
16497 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16498 {
16499 /* At present we only track constant-sized pieces. */
16500 unsigned int size0, size1;
16501 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16502 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16503 return 0;
16504
16505 dw_loc_descr_ref cc_loc_result = NULL;
16506 dw_loc_descr_ref x0_ref
16507 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16508 dw_loc_descr_ref x1_ref
16509 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16510
16511 if (x0_ref == 0 || x1_ref == 0)
16512 return 0;
16513
16514 cc_loc_result = x0_ref;
16515 add_loc_descr_op_piece (&cc_loc_result, size0);
16516
16517 add_loc_descr (&cc_loc_result, x1_ref);
16518 add_loc_descr_op_piece (&cc_loc_result, size1);
16519
16520 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16521 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16522
16523 return cc_loc_result;
16524 }
16525
16526 /* Return a descriptor that describes the concatenation of N
16527 locations. */
16528
16529 static dw_loc_descr_ref
16530 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16531 {
16532 unsigned int i;
16533 dw_loc_descr_ref cc_loc_result = NULL;
16534 unsigned int n = XVECLEN (concatn, 0);
16535 unsigned int size;
16536
16537 for (i = 0; i < n; ++i)
16538 {
16539 dw_loc_descr_ref ref;
16540 rtx x = XVECEXP (concatn, 0, i);
16541
16542 /* At present we only track constant-sized pieces. */
16543 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16544 return NULL;
16545
16546 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16547 if (ref == NULL)
16548 return NULL;
16549
16550 add_loc_descr (&cc_loc_result, ref);
16551 add_loc_descr_op_piece (&cc_loc_result, size);
16552 }
16553
16554 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16555 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16556
16557 return cc_loc_result;
16558 }
16559
16560 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16561 for DEBUG_IMPLICIT_PTR RTL. */
16562
16563 static dw_loc_descr_ref
16564 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16565 {
16566 dw_loc_descr_ref ret;
16567 dw_die_ref ref;
16568
16569 if (dwarf_strict && dwarf_version < 5)
16570 return NULL;
16571 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16572 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16573 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16574 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16575 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16576 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16577 if (ref)
16578 {
16579 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16580 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16581 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16582 }
16583 else
16584 {
16585 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16586 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16587 }
16588 return ret;
16589 }
16590
16591 /* Output a proper Dwarf location descriptor for a variable or parameter
16592 which is either allocated in a register or in a memory location. For a
16593 register, we just generate an OP_REG and the register number. For a
16594 memory location we provide a Dwarf postfix expression describing how to
16595 generate the (dynamic) address of the object onto the address stack.
16596
16597 MODE is mode of the decl if this loc_descriptor is going to be used in
16598 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16599 allowed, VOIDmode otherwise.
16600
16601 If we don't know how to describe it, return 0. */
16602
16603 static dw_loc_descr_ref
16604 loc_descriptor (rtx rtl, machine_mode mode,
16605 enum var_init_status initialized)
16606 {
16607 dw_loc_descr_ref loc_result = NULL;
16608 scalar_int_mode int_mode;
16609
16610 switch (GET_CODE (rtl))
16611 {
16612 case SUBREG:
16613 /* The case of a subreg may arise when we have a local (register)
16614 variable or a formal (register) parameter which doesn't quite fill
16615 up an entire register. For now, just assume that it is
16616 legitimate to make the Dwarf info refer to the whole register which
16617 contains the given subreg. */
16618 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16619 loc_result = loc_descriptor (SUBREG_REG (rtl),
16620 GET_MODE (SUBREG_REG (rtl)), initialized);
16621 else
16622 goto do_default;
16623 break;
16624
16625 case REG:
16626 loc_result = reg_loc_descriptor (rtl, initialized);
16627 break;
16628
16629 case MEM:
16630 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16631 GET_MODE (rtl), initialized);
16632 if (loc_result == NULL)
16633 loc_result = tls_mem_loc_descriptor (rtl);
16634 if (loc_result == NULL)
16635 {
16636 rtx new_rtl = avoid_constant_pool_reference (rtl);
16637 if (new_rtl != rtl)
16638 loc_result = loc_descriptor (new_rtl, mode, initialized);
16639 }
16640 break;
16641
16642 case CONCAT:
16643 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16644 initialized);
16645 break;
16646
16647 case CONCATN:
16648 loc_result = concatn_loc_descriptor (rtl, initialized);
16649 break;
16650
16651 case VAR_LOCATION:
16652 /* Single part. */
16653 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16654 {
16655 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16656 if (GET_CODE (loc) == EXPR_LIST)
16657 loc = XEXP (loc, 0);
16658 loc_result = loc_descriptor (loc, mode, initialized);
16659 break;
16660 }
16661
16662 rtl = XEXP (rtl, 1);
16663 /* FALLTHRU */
16664
16665 case PARALLEL:
16666 {
16667 rtvec par_elems = XVEC (rtl, 0);
16668 int num_elem = GET_NUM_ELEM (par_elems);
16669 machine_mode mode;
16670 int i, size;
16671
16672 /* Create the first one, so we have something to add to. */
16673 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16674 VOIDmode, initialized);
16675 if (loc_result == NULL)
16676 return NULL;
16677 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16678 /* At present we only track constant-sized pieces. */
16679 if (!GET_MODE_SIZE (mode).is_constant (&size))
16680 return NULL;
16681 add_loc_descr_op_piece (&loc_result, size);
16682 for (i = 1; i < num_elem; i++)
16683 {
16684 dw_loc_descr_ref temp;
16685
16686 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16687 VOIDmode, initialized);
16688 if (temp == NULL)
16689 return NULL;
16690 add_loc_descr (&loc_result, temp);
16691 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16692 /* At present we only track constant-sized pieces. */
16693 if (!GET_MODE_SIZE (mode).is_constant (&size))
16694 return NULL;
16695 add_loc_descr_op_piece (&loc_result, size);
16696 }
16697 }
16698 break;
16699
16700 case CONST_INT:
16701 if (mode != VOIDmode && mode != BLKmode)
16702 {
16703 int_mode = as_a <scalar_int_mode> (mode);
16704 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16705 INTVAL (rtl));
16706 }
16707 break;
16708
16709 case CONST_DOUBLE:
16710 if (mode == VOIDmode)
16711 mode = GET_MODE (rtl);
16712
16713 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16714 {
16715 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16716
16717 /* Note that a CONST_DOUBLE rtx could represent either an integer
16718 or a floating-point constant. A CONST_DOUBLE is used whenever
16719 the constant requires more than one word in order to be
16720 adequately represented. We output CONST_DOUBLEs as blocks. */
16721 scalar_mode smode = as_a <scalar_mode> (mode);
16722 loc_result = new_loc_descr (DW_OP_implicit_value,
16723 GET_MODE_SIZE (smode), 0);
16724 #if TARGET_SUPPORTS_WIDE_INT == 0
16725 if (!SCALAR_FLOAT_MODE_P (smode))
16726 {
16727 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16728 loc_result->dw_loc_oprnd2.v.val_double
16729 = rtx_to_double_int (rtl);
16730 }
16731 else
16732 #endif
16733 {
16734 unsigned int length = GET_MODE_SIZE (smode);
16735 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16736
16737 insert_float (rtl, array);
16738 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16739 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16740 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16741 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16742 }
16743 }
16744 break;
16745
16746 case CONST_WIDE_INT:
16747 if (mode == VOIDmode)
16748 mode = GET_MODE (rtl);
16749
16750 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16751 {
16752 int_mode = as_a <scalar_int_mode> (mode);
16753 loc_result = new_loc_descr (DW_OP_implicit_value,
16754 GET_MODE_SIZE (int_mode), 0);
16755 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16756 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16757 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16758 }
16759 break;
16760
16761 case CONST_VECTOR:
16762 if (mode == VOIDmode)
16763 mode = GET_MODE (rtl);
16764
16765 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16766 {
16767 unsigned int length;
16768 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16769 return NULL;
16770
16771 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16772 unsigned char *array
16773 = ggc_vec_alloc<unsigned char> (length * elt_size);
16774 unsigned int i;
16775 unsigned char *p;
16776 machine_mode imode = GET_MODE_INNER (mode);
16777
16778 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16779 switch (GET_MODE_CLASS (mode))
16780 {
16781 case MODE_VECTOR_INT:
16782 for (i = 0, p = array; i < length; i++, p += elt_size)
16783 {
16784 rtx elt = CONST_VECTOR_ELT (rtl, i);
16785 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16786 }
16787 break;
16788
16789 case MODE_VECTOR_FLOAT:
16790 for (i = 0, p = array; i < length; i++, p += elt_size)
16791 {
16792 rtx elt = CONST_VECTOR_ELT (rtl, i);
16793 insert_float (elt, p);
16794 }
16795 break;
16796
16797 default:
16798 gcc_unreachable ();
16799 }
16800
16801 loc_result = new_loc_descr (DW_OP_implicit_value,
16802 length * elt_size, 0);
16803 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16804 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16805 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16806 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16807 }
16808 break;
16809
16810 case CONST:
16811 if (mode == VOIDmode
16812 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16813 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16814 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16815 {
16816 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16817 break;
16818 }
16819 /* FALLTHROUGH */
16820 case SYMBOL_REF:
16821 if (!const_ok_for_output (rtl))
16822 break;
16823 /* FALLTHROUGH */
16824 case LABEL_REF:
16825 if (is_a <scalar_int_mode> (mode, &int_mode)
16826 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16827 && (dwarf_version >= 4 || !dwarf_strict))
16828 {
16829 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16830 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16831 vec_safe_push (used_rtx_array, rtl);
16832 }
16833 break;
16834
16835 case DEBUG_IMPLICIT_PTR:
16836 loc_result = implicit_ptr_descriptor (rtl, 0);
16837 break;
16838
16839 case PLUS:
16840 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16841 && CONST_INT_P (XEXP (rtl, 1)))
16842 {
16843 loc_result
16844 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16845 break;
16846 }
16847 /* FALLTHRU */
16848 do_default:
16849 default:
16850 if ((is_a <scalar_int_mode> (mode, &int_mode)
16851 && GET_MODE (rtl) == int_mode
16852 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16853 && dwarf_version >= 4)
16854 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16855 {
16856 /* Value expression. */
16857 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16858 if (loc_result)
16859 add_loc_descr (&loc_result,
16860 new_loc_descr (DW_OP_stack_value, 0, 0));
16861 }
16862 break;
16863 }
16864
16865 return loc_result;
16866 }
16867
16868 /* We need to figure out what section we should use as the base for the
16869 address ranges where a given location is valid.
16870 1. If this particular DECL has a section associated with it, use that.
16871 2. If this function has a section associated with it, use that.
16872 3. Otherwise, use the text section.
16873 XXX: If you split a variable across multiple sections, we won't notice. */
16874
16875 static const char *
16876 secname_for_decl (const_tree decl)
16877 {
16878 const char *secname;
16879
16880 if (VAR_OR_FUNCTION_DECL_P (decl)
16881 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16882 && DECL_SECTION_NAME (decl))
16883 secname = DECL_SECTION_NAME (decl);
16884 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16885 {
16886 if (in_cold_section_p)
16887 {
16888 section *sec = current_function_section ();
16889 if (sec->common.flags & SECTION_NAMED)
16890 return sec->named.name;
16891 }
16892 secname = DECL_SECTION_NAME (current_function_decl);
16893 }
16894 else if (cfun && in_cold_section_p)
16895 secname = crtl->subsections.cold_section_label;
16896 else
16897 secname = text_section_label;
16898
16899 return secname;
16900 }
16901
16902 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16903
16904 static bool
16905 decl_by_reference_p (tree decl)
16906 {
16907 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16908 || VAR_P (decl))
16909 && DECL_BY_REFERENCE (decl));
16910 }
16911
16912 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16913 for VARLOC. */
16914
16915 static dw_loc_descr_ref
16916 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16917 enum var_init_status initialized)
16918 {
16919 int have_address = 0;
16920 dw_loc_descr_ref descr;
16921 machine_mode mode;
16922
16923 if (want_address != 2)
16924 {
16925 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16926 /* Single part. */
16927 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16928 {
16929 varloc = PAT_VAR_LOCATION_LOC (varloc);
16930 if (GET_CODE (varloc) == EXPR_LIST)
16931 varloc = XEXP (varloc, 0);
16932 mode = GET_MODE (varloc);
16933 if (MEM_P (varloc))
16934 {
16935 rtx addr = XEXP (varloc, 0);
16936 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16937 mode, initialized);
16938 if (descr)
16939 have_address = 1;
16940 else
16941 {
16942 rtx x = avoid_constant_pool_reference (varloc);
16943 if (x != varloc)
16944 descr = mem_loc_descriptor (x, mode, VOIDmode,
16945 initialized);
16946 }
16947 }
16948 else
16949 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16950 }
16951 else
16952 return 0;
16953 }
16954 else
16955 {
16956 if (GET_CODE (varloc) == VAR_LOCATION)
16957 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16958 else
16959 mode = DECL_MODE (loc);
16960 descr = loc_descriptor (varloc, mode, initialized);
16961 have_address = 1;
16962 }
16963
16964 if (!descr)
16965 return 0;
16966
16967 if (want_address == 2 && !have_address
16968 && (dwarf_version >= 4 || !dwarf_strict))
16969 {
16970 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16971 {
16972 expansion_failed (loc, NULL_RTX,
16973 "DWARF address size mismatch");
16974 return 0;
16975 }
16976 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16977 have_address = 1;
16978 }
16979 /* Show if we can't fill the request for an address. */
16980 if (want_address && !have_address)
16981 {
16982 expansion_failed (loc, NULL_RTX,
16983 "Want address and only have value");
16984 return 0;
16985 }
16986
16987 /* If we've got an address and don't want one, dereference. */
16988 if (!want_address && have_address)
16989 {
16990 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16991 enum dwarf_location_atom op;
16992
16993 if (size > DWARF2_ADDR_SIZE || size == -1)
16994 {
16995 expansion_failed (loc, NULL_RTX,
16996 "DWARF address size mismatch");
16997 return 0;
16998 }
16999 else if (size == DWARF2_ADDR_SIZE)
17000 op = DW_OP_deref;
17001 else
17002 op = DW_OP_deref_size;
17003
17004 add_loc_descr (&descr, new_loc_descr (op, size, 0));
17005 }
17006
17007 return descr;
17008 }
17009
17010 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
17011 if it is not possible. */
17012
17013 static dw_loc_descr_ref
17014 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
17015 {
17016 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
17017 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
17018 else if (dwarf_version >= 3 || !dwarf_strict)
17019 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
17020 else
17021 return NULL;
17022 }
17023
17024 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
17025 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
17026
17027 static dw_loc_descr_ref
17028 dw_sra_loc_expr (tree decl, rtx loc)
17029 {
17030 rtx p;
17031 unsigned HOST_WIDE_INT padsize = 0;
17032 dw_loc_descr_ref descr, *descr_tail;
17033 unsigned HOST_WIDE_INT decl_size;
17034 rtx varloc;
17035 enum var_init_status initialized;
17036
17037 if (DECL_SIZE (decl) == NULL
17038 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
17039 return NULL;
17040
17041 decl_size = tree_to_uhwi (DECL_SIZE (decl));
17042 descr = NULL;
17043 descr_tail = &descr;
17044
17045 for (p = loc; p; p = XEXP (p, 1))
17046 {
17047 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
17048 rtx loc_note = *decl_piece_varloc_ptr (p);
17049 dw_loc_descr_ref cur_descr;
17050 dw_loc_descr_ref *tail, last = NULL;
17051 unsigned HOST_WIDE_INT opsize = 0;
17052
17053 if (loc_note == NULL_RTX
17054 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
17055 {
17056 padsize += bitsize;
17057 continue;
17058 }
17059 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
17060 varloc = NOTE_VAR_LOCATION (loc_note);
17061 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
17062 if (cur_descr == NULL)
17063 {
17064 padsize += bitsize;
17065 continue;
17066 }
17067
17068 /* Check that cur_descr either doesn't use
17069 DW_OP_*piece operations, or their sum is equal
17070 to bitsize. Otherwise we can't embed it. */
17071 for (tail = &cur_descr; *tail != NULL;
17072 tail = &(*tail)->dw_loc_next)
17073 if ((*tail)->dw_loc_opc == DW_OP_piece)
17074 {
17075 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
17076 * BITS_PER_UNIT;
17077 last = *tail;
17078 }
17079 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
17080 {
17081 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
17082 last = *tail;
17083 }
17084
17085 if (last != NULL && opsize != bitsize)
17086 {
17087 padsize += bitsize;
17088 /* Discard the current piece of the descriptor and release any
17089 addr_table entries it uses. */
17090 remove_loc_list_addr_table_entries (cur_descr);
17091 continue;
17092 }
17093
17094 /* If there is a hole, add DW_OP_*piece after empty DWARF
17095 expression, which means that those bits are optimized out. */
17096 if (padsize)
17097 {
17098 if (padsize > decl_size)
17099 {
17100 remove_loc_list_addr_table_entries (cur_descr);
17101 goto discard_descr;
17102 }
17103 decl_size -= padsize;
17104 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
17105 if (*descr_tail == NULL)
17106 {
17107 remove_loc_list_addr_table_entries (cur_descr);
17108 goto discard_descr;
17109 }
17110 descr_tail = &(*descr_tail)->dw_loc_next;
17111 padsize = 0;
17112 }
17113 *descr_tail = cur_descr;
17114 descr_tail = tail;
17115 if (bitsize > decl_size)
17116 goto discard_descr;
17117 decl_size -= bitsize;
17118 if (last == NULL)
17119 {
17120 HOST_WIDE_INT offset = 0;
17121 if (GET_CODE (varloc) == VAR_LOCATION
17122 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17123 {
17124 varloc = PAT_VAR_LOCATION_LOC (varloc);
17125 if (GET_CODE (varloc) == EXPR_LIST)
17126 varloc = XEXP (varloc, 0);
17127 }
17128 do
17129 {
17130 if (GET_CODE (varloc) == CONST
17131 || GET_CODE (varloc) == SIGN_EXTEND
17132 || GET_CODE (varloc) == ZERO_EXTEND)
17133 varloc = XEXP (varloc, 0);
17134 else if (GET_CODE (varloc) == SUBREG)
17135 varloc = SUBREG_REG (varloc);
17136 else
17137 break;
17138 }
17139 while (1);
17140 /* DW_OP_bit_size offset should be zero for register
17141 or implicit location descriptions and empty location
17142 descriptions, but for memory addresses needs big endian
17143 adjustment. */
17144 if (MEM_P (varloc))
17145 {
17146 unsigned HOST_WIDE_INT memsize;
17147 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17148 goto discard_descr;
17149 memsize *= BITS_PER_UNIT;
17150 if (memsize != bitsize)
17151 {
17152 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17153 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17154 goto discard_descr;
17155 if (memsize < bitsize)
17156 goto discard_descr;
17157 if (BITS_BIG_ENDIAN)
17158 offset = memsize - bitsize;
17159 }
17160 }
17161
17162 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17163 if (*descr_tail == NULL)
17164 goto discard_descr;
17165 descr_tail = &(*descr_tail)->dw_loc_next;
17166 }
17167 }
17168
17169 /* If there were any non-empty expressions, add padding till the end of
17170 the decl. */
17171 if (descr != NULL && decl_size != 0)
17172 {
17173 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17174 if (*descr_tail == NULL)
17175 goto discard_descr;
17176 }
17177 return descr;
17178
17179 discard_descr:
17180 /* Discard the descriptor and release any addr_table entries it uses. */
17181 remove_loc_list_addr_table_entries (descr);
17182 return NULL;
17183 }
17184
17185 /* Return the dwarf representation of the location list LOC_LIST of
17186 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17187 function. */
17188
17189 static dw_loc_list_ref
17190 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17191 {
17192 const char *endname, *secname;
17193 var_loc_view endview;
17194 rtx varloc;
17195 enum var_init_status initialized;
17196 struct var_loc_node *node;
17197 dw_loc_descr_ref descr;
17198 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17199 dw_loc_list_ref list = NULL;
17200 dw_loc_list_ref *listp = &list;
17201
17202 /* Now that we know what section we are using for a base,
17203 actually construct the list of locations.
17204 The first location information is what is passed to the
17205 function that creates the location list, and the remaining
17206 locations just get added on to that list.
17207 Note that we only know the start address for a location
17208 (IE location changes), so to build the range, we use
17209 the range [current location start, next location start].
17210 This means we have to special case the last node, and generate
17211 a range of [last location start, end of function label]. */
17212
17213 if (cfun && crtl->has_bb_partition)
17214 {
17215 bool save_in_cold_section_p = in_cold_section_p;
17216 in_cold_section_p = first_function_block_is_cold;
17217 if (loc_list->last_before_switch == NULL)
17218 in_cold_section_p = !in_cold_section_p;
17219 secname = secname_for_decl (decl);
17220 in_cold_section_p = save_in_cold_section_p;
17221 }
17222 else
17223 secname = secname_for_decl (decl);
17224
17225 for (node = loc_list->first; node; node = node->next)
17226 {
17227 bool range_across_switch = false;
17228 if (GET_CODE (node->loc) == EXPR_LIST
17229 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17230 {
17231 if (GET_CODE (node->loc) == EXPR_LIST)
17232 {
17233 descr = NULL;
17234 /* This requires DW_OP_{,bit_}piece, which is not usable
17235 inside DWARF expressions. */
17236 if (want_address == 2)
17237 descr = dw_sra_loc_expr (decl, node->loc);
17238 }
17239 else
17240 {
17241 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17242 varloc = NOTE_VAR_LOCATION (node->loc);
17243 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17244 }
17245 if (descr)
17246 {
17247 /* If section switch happens in between node->label
17248 and node->next->label (or end of function) and
17249 we can't emit it as a single entry list,
17250 emit two ranges, first one ending at the end
17251 of first partition and second one starting at the
17252 beginning of second partition. */
17253 if (node == loc_list->last_before_switch
17254 && (node != loc_list->first || loc_list->first->next
17255 /* If we are to emit a view number, we will emit
17256 a loclist rather than a single location
17257 expression for the entire function (see
17258 loc_list_has_views), so we have to split the
17259 range that straddles across partitions. */
17260 || !ZERO_VIEW_P (node->view))
17261 && current_function_decl)
17262 {
17263 endname = cfun->fde->dw_fde_end;
17264 endview = 0;
17265 range_across_switch = true;
17266 }
17267 /* The variable has a location between NODE->LABEL and
17268 NODE->NEXT->LABEL. */
17269 else if (node->next)
17270 endname = node->next->label, endview = node->next->view;
17271 /* If the variable has a location at the last label
17272 it keeps its location until the end of function. */
17273 else if (!current_function_decl)
17274 endname = text_end_label, endview = 0;
17275 else
17276 {
17277 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17278 current_function_funcdef_no);
17279 endname = ggc_strdup (label_id);
17280 endview = 0;
17281 }
17282
17283 *listp = new_loc_list (descr, node->label, node->view,
17284 endname, endview, secname);
17285 if (TREE_CODE (decl) == PARM_DECL
17286 && node == loc_list->first
17287 && NOTE_P (node->loc)
17288 && strcmp (node->label, endname) == 0)
17289 (*listp)->force = true;
17290 listp = &(*listp)->dw_loc_next;
17291 }
17292 }
17293
17294 if (cfun
17295 && crtl->has_bb_partition
17296 && node == loc_list->last_before_switch)
17297 {
17298 bool save_in_cold_section_p = in_cold_section_p;
17299 in_cold_section_p = !first_function_block_is_cold;
17300 secname = secname_for_decl (decl);
17301 in_cold_section_p = save_in_cold_section_p;
17302 }
17303
17304 if (range_across_switch)
17305 {
17306 if (GET_CODE (node->loc) == EXPR_LIST)
17307 descr = dw_sra_loc_expr (decl, node->loc);
17308 else
17309 {
17310 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17311 varloc = NOTE_VAR_LOCATION (node->loc);
17312 descr = dw_loc_list_1 (decl, varloc, want_address,
17313 initialized);
17314 }
17315 gcc_assert (descr);
17316 /* The variable has a location between NODE->LABEL and
17317 NODE->NEXT->LABEL. */
17318 if (node->next)
17319 endname = node->next->label, endview = node->next->view;
17320 else
17321 endname = cfun->fde->dw_fde_second_end, endview = 0;
17322 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17323 endname, endview, secname);
17324 listp = &(*listp)->dw_loc_next;
17325 }
17326 }
17327
17328 /* Try to avoid the overhead of a location list emitting a location
17329 expression instead, but only if we didn't have more than one
17330 location entry in the first place. If some entries were not
17331 representable, we don't want to pretend a single entry that was
17332 applies to the entire scope in which the variable is
17333 available. */
17334 if (list && loc_list->first->next)
17335 gen_llsym (list);
17336 else
17337 maybe_gen_llsym (list);
17338
17339 return list;
17340 }
17341
17342 /* Return if the loc_list has only single element and thus can be represented
17343 as location description. */
17344
17345 static bool
17346 single_element_loc_list_p (dw_loc_list_ref list)
17347 {
17348 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17349 return !list->ll_symbol;
17350 }
17351
17352 /* Duplicate a single element of location list. */
17353
17354 static inline dw_loc_descr_ref
17355 copy_loc_descr (dw_loc_descr_ref ref)
17356 {
17357 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17358 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17359 return copy;
17360 }
17361
17362 /* To each location in list LIST append loc descr REF. */
17363
17364 static void
17365 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17366 {
17367 dw_loc_descr_ref copy;
17368 add_loc_descr (&list->expr, ref);
17369 list = list->dw_loc_next;
17370 while (list)
17371 {
17372 copy = copy_loc_descr (ref);
17373 add_loc_descr (&list->expr, copy);
17374 while (copy->dw_loc_next)
17375 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17376 list = list->dw_loc_next;
17377 }
17378 }
17379
17380 /* To each location in list LIST prepend loc descr REF. */
17381
17382 static void
17383 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17384 {
17385 dw_loc_descr_ref copy;
17386 dw_loc_descr_ref ref_end = list->expr;
17387 add_loc_descr (&ref, list->expr);
17388 list->expr = ref;
17389 list = list->dw_loc_next;
17390 while (list)
17391 {
17392 dw_loc_descr_ref end = list->expr;
17393 list->expr = copy = copy_loc_descr (ref);
17394 while (copy->dw_loc_next != ref_end)
17395 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17396 copy->dw_loc_next = end;
17397 list = list->dw_loc_next;
17398 }
17399 }
17400
17401 /* Given two lists RET and LIST
17402 produce location list that is result of adding expression in LIST
17403 to expression in RET on each position in program.
17404 Might be destructive on both RET and LIST.
17405
17406 TODO: We handle only simple cases of RET or LIST having at most one
17407 element. General case would involve sorting the lists in program order
17408 and merging them that will need some additional work.
17409 Adding that will improve quality of debug info especially for SRA-ed
17410 structures. */
17411
17412 static void
17413 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17414 {
17415 if (!list)
17416 return;
17417 if (!*ret)
17418 {
17419 *ret = list;
17420 return;
17421 }
17422 if (!list->dw_loc_next)
17423 {
17424 add_loc_descr_to_each (*ret, list->expr);
17425 return;
17426 }
17427 if (!(*ret)->dw_loc_next)
17428 {
17429 prepend_loc_descr_to_each (list, (*ret)->expr);
17430 *ret = list;
17431 return;
17432 }
17433 expansion_failed (NULL_TREE, NULL_RTX,
17434 "Don't know how to merge two non-trivial"
17435 " location lists.\n");
17436 *ret = NULL;
17437 return;
17438 }
17439
17440 /* LOC is constant expression. Try a luck, look it up in constant
17441 pool and return its loc_descr of its address. */
17442
17443 static dw_loc_descr_ref
17444 cst_pool_loc_descr (tree loc)
17445 {
17446 /* Get an RTL for this, if something has been emitted. */
17447 rtx rtl = lookup_constant_def (loc);
17448
17449 if (!rtl || !MEM_P (rtl))
17450 {
17451 gcc_assert (!rtl);
17452 return 0;
17453 }
17454 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17455
17456 /* TODO: We might get more coverage if we was actually delaying expansion
17457 of all expressions till end of compilation when constant pools are fully
17458 populated. */
17459 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17460 {
17461 expansion_failed (loc, NULL_RTX,
17462 "CST value in contant pool but not marked.");
17463 return 0;
17464 }
17465 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17466 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17467 }
17468
17469 /* Return dw_loc_list representing address of addr_expr LOC
17470 by looking for inner INDIRECT_REF expression and turning
17471 it into simple arithmetics.
17472
17473 See loc_list_from_tree for the meaning of CONTEXT. */
17474
17475 static dw_loc_list_ref
17476 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17477 loc_descr_context *context)
17478 {
17479 tree obj, offset;
17480 poly_int64 bitsize, bitpos, bytepos;
17481 machine_mode mode;
17482 int unsignedp, reversep, volatilep = 0;
17483 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17484
17485 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17486 &bitsize, &bitpos, &offset, &mode,
17487 &unsignedp, &reversep, &volatilep);
17488 STRIP_NOPS (obj);
17489 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17490 {
17491 expansion_failed (loc, NULL_RTX, "bitfield access");
17492 return 0;
17493 }
17494 if (!INDIRECT_REF_P (obj))
17495 {
17496 expansion_failed (obj,
17497 NULL_RTX, "no indirect ref in inner refrence");
17498 return 0;
17499 }
17500 if (!offset && known_eq (bitpos, 0))
17501 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17502 context);
17503 else if (toplev
17504 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17505 && (dwarf_version >= 4 || !dwarf_strict))
17506 {
17507 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17508 if (!list_ret)
17509 return 0;
17510 if (offset)
17511 {
17512 /* Variable offset. */
17513 list_ret1 = loc_list_from_tree (offset, 0, context);
17514 if (list_ret1 == 0)
17515 return 0;
17516 add_loc_list (&list_ret, list_ret1);
17517 if (!list_ret)
17518 return 0;
17519 add_loc_descr_to_each (list_ret,
17520 new_loc_descr (DW_OP_plus, 0, 0));
17521 }
17522 HOST_WIDE_INT value;
17523 if (bytepos.is_constant (&value) && value > 0)
17524 add_loc_descr_to_each (list_ret,
17525 new_loc_descr (DW_OP_plus_uconst, value, 0));
17526 else if (maybe_ne (bytepos, 0))
17527 loc_list_plus_const (list_ret, bytepos);
17528 add_loc_descr_to_each (list_ret,
17529 new_loc_descr (DW_OP_stack_value, 0, 0));
17530 }
17531 return list_ret;
17532 }
17533
17534 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17535 all operations from LOC are nops, move to the last one. Insert in NOPS all
17536 operations that are skipped. */
17537
17538 static void
17539 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17540 hash_set<dw_loc_descr_ref> &nops)
17541 {
17542 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17543 {
17544 nops.add (loc);
17545 loc = loc->dw_loc_next;
17546 }
17547 }
17548
17549 /* Helper for loc_descr_without_nops: free the location description operation
17550 P. */
17551
17552 bool
17553 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17554 {
17555 ggc_free (loc);
17556 return true;
17557 }
17558
17559 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17560 finishes LOC. */
17561
17562 static void
17563 loc_descr_without_nops (dw_loc_descr_ref &loc)
17564 {
17565 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17566 return;
17567
17568 /* Set of all DW_OP_nop operations we remove. */
17569 hash_set<dw_loc_descr_ref> nops;
17570
17571 /* First, strip all prefix NOP operations in order to keep the head of the
17572 operations list. */
17573 loc_descr_to_next_no_nop (loc, nops);
17574
17575 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17576 {
17577 /* For control flow operations: strip "prefix" nops in destination
17578 labels. */
17579 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17580 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17581 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17582 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17583
17584 /* Do the same for the operations that follow, then move to the next
17585 iteration. */
17586 if (cur->dw_loc_next != NULL)
17587 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17588 cur = cur->dw_loc_next;
17589 }
17590
17591 nops.traverse<void *, free_loc_descr> (NULL);
17592 }
17593
17594
17595 struct dwarf_procedure_info;
17596
17597 /* Helper structure for location descriptions generation. */
17598 struct loc_descr_context
17599 {
17600 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17601 NULL_TREE if DW_OP_push_object_address in invalid for this location
17602 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17603 tree context_type;
17604 /* The ..._DECL node that should be translated as a
17605 DW_OP_push_object_address operation. */
17606 tree base_decl;
17607 /* Information about the DWARF procedure we are currently generating. NULL if
17608 we are not generating a DWARF procedure. */
17609 struct dwarf_procedure_info *dpi;
17610 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17611 by consumer. Used for DW_TAG_generic_subrange attributes. */
17612 bool placeholder_arg;
17613 /* True if PLACEHOLDER_EXPR has been seen. */
17614 bool placeholder_seen;
17615 };
17616
17617 /* DWARF procedures generation
17618
17619 DWARF expressions (aka. location descriptions) are used to encode variable
17620 things such as sizes or offsets. Such computations can have redundant parts
17621 that can be factorized in order to reduce the size of the output debug
17622 information. This is the whole point of DWARF procedures.
17623
17624 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17625 already factorized into functions ("size functions") in order to handle very
17626 big and complex types. Such functions are quite simple: they have integral
17627 arguments, they return an integral result and their body contains only a
17628 return statement with arithmetic expressions. This is the only kind of
17629 function we are interested in translating into DWARF procedures, here.
17630
17631 DWARF expressions and DWARF procedure are executed using a stack, so we have
17632 to define some calling convention for them to interact. Let's say that:
17633
17634 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17635 all arguments in reverse order (right-to-left) so that when the DWARF
17636 procedure execution starts, the first argument is the top of the stack.
17637
17638 - Then, when returning, the DWARF procedure must have consumed all arguments
17639 on the stack, must have pushed the result and touched nothing else.
17640
17641 - Each integral argument and the result are integral types can be hold in a
17642 single stack slot.
17643
17644 - We call "frame offset" the number of stack slots that are "under DWARF
17645 procedure control": it includes the arguments slots, the temporaries and
17646 the result slot. Thus, it is equal to the number of arguments when the
17647 procedure execution starts and must be equal to one (the result) when it
17648 returns. */
17649
17650 /* Helper structure used when generating operations for a DWARF procedure. */
17651 struct dwarf_procedure_info
17652 {
17653 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17654 currently translated. */
17655 tree fndecl;
17656 /* The number of arguments FNDECL takes. */
17657 unsigned args_count;
17658 };
17659
17660 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17661 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17662 equate it to this DIE. */
17663
17664 static dw_die_ref
17665 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17666 dw_die_ref parent_die)
17667 {
17668 dw_die_ref dwarf_proc_die;
17669
17670 if ((dwarf_version < 3 && dwarf_strict)
17671 || location == NULL)
17672 return NULL;
17673
17674 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17675 if (fndecl)
17676 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17677 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17678 return dwarf_proc_die;
17679 }
17680
17681 /* Return whether TYPE is a supported type as a DWARF procedure argument
17682 type or return type (we handle only scalar types and pointer types that
17683 aren't wider than the DWARF expression evaluation stack. */
17684
17685 static bool
17686 is_handled_procedure_type (tree type)
17687 {
17688 return ((INTEGRAL_TYPE_P (type)
17689 || TREE_CODE (type) == OFFSET_TYPE
17690 || TREE_CODE (type) == POINTER_TYPE)
17691 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17692 }
17693
17694 /* Helper for resolve_args_picking: do the same but stop when coming across
17695 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17696 offset *before* evaluating the corresponding operation. */
17697
17698 static bool
17699 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17700 struct dwarf_procedure_info *dpi,
17701 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17702 {
17703 /* The "frame_offset" identifier is already used to name a macro... */
17704 unsigned frame_offset_ = initial_frame_offset;
17705 dw_loc_descr_ref l;
17706
17707 for (l = loc; l != NULL;)
17708 {
17709 bool existed;
17710 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17711
17712 /* If we already met this node, there is nothing to compute anymore. */
17713 if (existed)
17714 {
17715 /* Make sure that the stack size is consistent wherever the execution
17716 flow comes from. */
17717 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17718 break;
17719 }
17720 l_frame_offset = frame_offset_;
17721
17722 /* If needed, relocate the picking offset with respect to the frame
17723 offset. */
17724 if (l->frame_offset_rel)
17725 {
17726 unsigned HOST_WIDE_INT off;
17727 switch (l->dw_loc_opc)
17728 {
17729 case DW_OP_pick:
17730 off = l->dw_loc_oprnd1.v.val_unsigned;
17731 break;
17732 case DW_OP_dup:
17733 off = 0;
17734 break;
17735 case DW_OP_over:
17736 off = 1;
17737 break;
17738 default:
17739 gcc_unreachable ();
17740 }
17741 /* frame_offset_ is the size of the current stack frame, including
17742 incoming arguments. Besides, the arguments are pushed
17743 right-to-left. Thus, in order to access the Nth argument from
17744 this operation node, the picking has to skip temporaries *plus*
17745 one stack slot per argument (0 for the first one, 1 for the second
17746 one, etc.).
17747
17748 The targetted argument number (N) is already set as the operand,
17749 and the number of temporaries can be computed with:
17750 frame_offsets_ - dpi->args_count */
17751 off += frame_offset_ - dpi->args_count;
17752
17753 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17754 if (off > 255)
17755 return false;
17756
17757 if (off == 0)
17758 {
17759 l->dw_loc_opc = DW_OP_dup;
17760 l->dw_loc_oprnd1.v.val_unsigned = 0;
17761 }
17762 else if (off == 1)
17763 {
17764 l->dw_loc_opc = DW_OP_over;
17765 l->dw_loc_oprnd1.v.val_unsigned = 0;
17766 }
17767 else
17768 {
17769 l->dw_loc_opc = DW_OP_pick;
17770 l->dw_loc_oprnd1.v.val_unsigned = off;
17771 }
17772 }
17773
17774 /* Update frame_offset according to the effect the current operation has
17775 on the stack. */
17776 switch (l->dw_loc_opc)
17777 {
17778 case DW_OP_deref:
17779 case DW_OP_swap:
17780 case DW_OP_rot:
17781 case DW_OP_abs:
17782 case DW_OP_neg:
17783 case DW_OP_not:
17784 case DW_OP_plus_uconst:
17785 case DW_OP_skip:
17786 case DW_OP_reg0:
17787 case DW_OP_reg1:
17788 case DW_OP_reg2:
17789 case DW_OP_reg3:
17790 case DW_OP_reg4:
17791 case DW_OP_reg5:
17792 case DW_OP_reg6:
17793 case DW_OP_reg7:
17794 case DW_OP_reg8:
17795 case DW_OP_reg9:
17796 case DW_OP_reg10:
17797 case DW_OP_reg11:
17798 case DW_OP_reg12:
17799 case DW_OP_reg13:
17800 case DW_OP_reg14:
17801 case DW_OP_reg15:
17802 case DW_OP_reg16:
17803 case DW_OP_reg17:
17804 case DW_OP_reg18:
17805 case DW_OP_reg19:
17806 case DW_OP_reg20:
17807 case DW_OP_reg21:
17808 case DW_OP_reg22:
17809 case DW_OP_reg23:
17810 case DW_OP_reg24:
17811 case DW_OP_reg25:
17812 case DW_OP_reg26:
17813 case DW_OP_reg27:
17814 case DW_OP_reg28:
17815 case DW_OP_reg29:
17816 case DW_OP_reg30:
17817 case DW_OP_reg31:
17818 case DW_OP_bregx:
17819 case DW_OP_piece:
17820 case DW_OP_deref_size:
17821 case DW_OP_nop:
17822 case DW_OP_bit_piece:
17823 case DW_OP_implicit_value:
17824 case DW_OP_stack_value:
17825 break;
17826
17827 case DW_OP_addr:
17828 case DW_OP_const1u:
17829 case DW_OP_const1s:
17830 case DW_OP_const2u:
17831 case DW_OP_const2s:
17832 case DW_OP_const4u:
17833 case DW_OP_const4s:
17834 case DW_OP_const8u:
17835 case DW_OP_const8s:
17836 case DW_OP_constu:
17837 case DW_OP_consts:
17838 case DW_OP_dup:
17839 case DW_OP_over:
17840 case DW_OP_pick:
17841 case DW_OP_lit0:
17842 case DW_OP_lit1:
17843 case DW_OP_lit2:
17844 case DW_OP_lit3:
17845 case DW_OP_lit4:
17846 case DW_OP_lit5:
17847 case DW_OP_lit6:
17848 case DW_OP_lit7:
17849 case DW_OP_lit8:
17850 case DW_OP_lit9:
17851 case DW_OP_lit10:
17852 case DW_OP_lit11:
17853 case DW_OP_lit12:
17854 case DW_OP_lit13:
17855 case DW_OP_lit14:
17856 case DW_OP_lit15:
17857 case DW_OP_lit16:
17858 case DW_OP_lit17:
17859 case DW_OP_lit18:
17860 case DW_OP_lit19:
17861 case DW_OP_lit20:
17862 case DW_OP_lit21:
17863 case DW_OP_lit22:
17864 case DW_OP_lit23:
17865 case DW_OP_lit24:
17866 case DW_OP_lit25:
17867 case DW_OP_lit26:
17868 case DW_OP_lit27:
17869 case DW_OP_lit28:
17870 case DW_OP_lit29:
17871 case DW_OP_lit30:
17872 case DW_OP_lit31:
17873 case DW_OP_breg0:
17874 case DW_OP_breg1:
17875 case DW_OP_breg2:
17876 case DW_OP_breg3:
17877 case DW_OP_breg4:
17878 case DW_OP_breg5:
17879 case DW_OP_breg6:
17880 case DW_OP_breg7:
17881 case DW_OP_breg8:
17882 case DW_OP_breg9:
17883 case DW_OP_breg10:
17884 case DW_OP_breg11:
17885 case DW_OP_breg12:
17886 case DW_OP_breg13:
17887 case DW_OP_breg14:
17888 case DW_OP_breg15:
17889 case DW_OP_breg16:
17890 case DW_OP_breg17:
17891 case DW_OP_breg18:
17892 case DW_OP_breg19:
17893 case DW_OP_breg20:
17894 case DW_OP_breg21:
17895 case DW_OP_breg22:
17896 case DW_OP_breg23:
17897 case DW_OP_breg24:
17898 case DW_OP_breg25:
17899 case DW_OP_breg26:
17900 case DW_OP_breg27:
17901 case DW_OP_breg28:
17902 case DW_OP_breg29:
17903 case DW_OP_breg30:
17904 case DW_OP_breg31:
17905 case DW_OP_fbreg:
17906 case DW_OP_push_object_address:
17907 case DW_OP_call_frame_cfa:
17908 case DW_OP_GNU_variable_value:
17909 ++frame_offset_;
17910 break;
17911
17912 case DW_OP_drop:
17913 case DW_OP_xderef:
17914 case DW_OP_and:
17915 case DW_OP_div:
17916 case DW_OP_minus:
17917 case DW_OP_mod:
17918 case DW_OP_mul:
17919 case DW_OP_or:
17920 case DW_OP_plus:
17921 case DW_OP_shl:
17922 case DW_OP_shr:
17923 case DW_OP_shra:
17924 case DW_OP_xor:
17925 case DW_OP_bra:
17926 case DW_OP_eq:
17927 case DW_OP_ge:
17928 case DW_OP_gt:
17929 case DW_OP_le:
17930 case DW_OP_lt:
17931 case DW_OP_ne:
17932 case DW_OP_regx:
17933 case DW_OP_xderef_size:
17934 --frame_offset_;
17935 break;
17936
17937 case DW_OP_call2:
17938 case DW_OP_call4:
17939 case DW_OP_call_ref:
17940 {
17941 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17942 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17943
17944 if (stack_usage == NULL)
17945 return false;
17946 frame_offset_ += *stack_usage;
17947 break;
17948 }
17949
17950 case DW_OP_implicit_pointer:
17951 case DW_OP_entry_value:
17952 case DW_OP_const_type:
17953 case DW_OP_regval_type:
17954 case DW_OP_deref_type:
17955 case DW_OP_convert:
17956 case DW_OP_reinterpret:
17957 case DW_OP_form_tls_address:
17958 case DW_OP_GNU_push_tls_address:
17959 case DW_OP_GNU_uninit:
17960 case DW_OP_GNU_encoded_addr:
17961 case DW_OP_GNU_implicit_pointer:
17962 case DW_OP_GNU_entry_value:
17963 case DW_OP_GNU_const_type:
17964 case DW_OP_GNU_regval_type:
17965 case DW_OP_GNU_deref_type:
17966 case DW_OP_GNU_convert:
17967 case DW_OP_GNU_reinterpret:
17968 case DW_OP_GNU_parameter_ref:
17969 /* loc_list_from_tree will probably not output these operations for
17970 size functions, so assume they will not appear here. */
17971 /* Fall through... */
17972
17973 default:
17974 gcc_unreachable ();
17975 }
17976
17977 /* Now, follow the control flow (except subroutine calls). */
17978 switch (l->dw_loc_opc)
17979 {
17980 case DW_OP_bra:
17981 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17982 frame_offsets))
17983 return false;
17984 /* Fall through. */
17985
17986 case DW_OP_skip:
17987 l = l->dw_loc_oprnd1.v.val_loc;
17988 break;
17989
17990 case DW_OP_stack_value:
17991 return true;
17992
17993 default:
17994 l = l->dw_loc_next;
17995 break;
17996 }
17997 }
17998
17999 return true;
18000 }
18001
18002 /* Make a DFS over operations reachable through LOC (i.e. follow branch
18003 operations) in order to resolve the operand of DW_OP_pick operations that
18004 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
18005 offset *before* LOC is executed. Return if all relocations were
18006 successful. */
18007
18008 static bool
18009 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
18010 struct dwarf_procedure_info *dpi)
18011 {
18012 /* Associate to all visited operations the frame offset *before* evaluating
18013 this operation. */
18014 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
18015
18016 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
18017 frame_offsets);
18018 }
18019
18020 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
18021 Return NULL if it is not possible. */
18022
18023 static dw_die_ref
18024 function_to_dwarf_procedure (tree fndecl)
18025 {
18026 struct loc_descr_context ctx;
18027 struct dwarf_procedure_info dpi;
18028 dw_die_ref dwarf_proc_die;
18029 tree tree_body = DECL_SAVED_TREE (fndecl);
18030 dw_loc_descr_ref loc_body, epilogue;
18031
18032 tree cursor;
18033 unsigned i;
18034
18035 /* Do not generate multiple DWARF procedures for the same function
18036 declaration. */
18037 dwarf_proc_die = lookup_decl_die (fndecl);
18038 if (dwarf_proc_die != NULL)
18039 return dwarf_proc_die;
18040
18041 /* DWARF procedures are available starting with the DWARFv3 standard. */
18042 if (dwarf_version < 3 && dwarf_strict)
18043 return NULL;
18044
18045 /* We handle only functions for which we still have a body, that return a
18046 supported type and that takes arguments with supported types. Note that
18047 there is no point translating functions that return nothing. */
18048 if (tree_body == NULL_TREE
18049 || DECL_RESULT (fndecl) == NULL_TREE
18050 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
18051 return NULL;
18052
18053 for (cursor = DECL_ARGUMENTS (fndecl);
18054 cursor != NULL_TREE;
18055 cursor = TREE_CHAIN (cursor))
18056 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
18057 return NULL;
18058
18059 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
18060 if (TREE_CODE (tree_body) != RETURN_EXPR)
18061 return NULL;
18062 tree_body = TREE_OPERAND (tree_body, 0);
18063 if (TREE_CODE (tree_body) != MODIFY_EXPR
18064 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
18065 return NULL;
18066 tree_body = TREE_OPERAND (tree_body, 1);
18067
18068 /* Try to translate the body expression itself. Note that this will probably
18069 cause an infinite recursion if its call graph has a cycle. This is very
18070 unlikely for size functions, however, so don't bother with such things at
18071 the moment. */
18072 ctx.context_type = NULL_TREE;
18073 ctx.base_decl = NULL_TREE;
18074 ctx.dpi = &dpi;
18075 ctx.placeholder_arg = false;
18076 ctx.placeholder_seen = false;
18077 dpi.fndecl = fndecl;
18078 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
18079 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
18080 if (!loc_body)
18081 return NULL;
18082
18083 /* After evaluating all operands in "loc_body", we should still have on the
18084 stack all arguments plus the desired function result (top of the stack).
18085 Generate code in order to keep only the result in our stack frame. */
18086 epilogue = NULL;
18087 for (i = 0; i < dpi.args_count; ++i)
18088 {
18089 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
18090 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
18091 op_couple->dw_loc_next->dw_loc_next = epilogue;
18092 epilogue = op_couple;
18093 }
18094 add_loc_descr (&loc_body, epilogue);
18095 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
18096 return NULL;
18097
18098 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
18099 because they are considered useful. Now there is an epilogue, they are
18100 not anymore, so give it another try. */
18101 loc_descr_without_nops (loc_body);
18102
18103 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
18104 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
18105 though, given that size functions do not come from source, so they should
18106 not have a dedicated DW_TAG_subprogram DIE. */
18107 dwarf_proc_die
18108 = new_dwarf_proc_die (loc_body, fndecl,
18109 get_context_die (DECL_CONTEXT (fndecl)));
18110
18111 /* The called DWARF procedure consumes one stack slot per argument and
18112 returns one stack slot. */
18113 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
18114
18115 return dwarf_proc_die;
18116 }
18117
18118
18119 /* Generate Dwarf location list representing LOC.
18120 If WANT_ADDRESS is false, expression computing LOC will be computed
18121 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18122 if WANT_ADDRESS is 2, expression computing address useable in location
18123 will be returned (i.e. DW_OP_reg can be used
18124 to refer to register values).
18125
18126 CONTEXT provides information to customize the location descriptions
18127 generation. Its context_type field specifies what type is implicitly
18128 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18129 will not be generated.
18130
18131 Its DPI field determines whether we are generating a DWARF expression for a
18132 DWARF procedure, so PARM_DECL references are processed specifically.
18133
18134 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18135 and dpi fields were null. */
18136
18137 static dw_loc_list_ref
18138 loc_list_from_tree_1 (tree loc, int want_address,
18139 struct loc_descr_context *context)
18140 {
18141 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18142 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18143 int have_address = 0;
18144 enum dwarf_location_atom op;
18145
18146 /* ??? Most of the time we do not take proper care for sign/zero
18147 extending the values properly. Hopefully this won't be a real
18148 problem... */
18149
18150 if (context != NULL
18151 && context->base_decl == loc
18152 && want_address == 0)
18153 {
18154 if (dwarf_version >= 3 || !dwarf_strict)
18155 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18156 NULL, 0, NULL, 0, NULL);
18157 else
18158 return NULL;
18159 }
18160
18161 switch (TREE_CODE (loc))
18162 {
18163 case ERROR_MARK:
18164 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18165 return 0;
18166
18167 case PLACEHOLDER_EXPR:
18168 /* This case involves extracting fields from an object to determine the
18169 position of other fields. It is supposed to appear only as the first
18170 operand of COMPONENT_REF nodes and to reference precisely the type
18171 that the context allows. */
18172 if (context != NULL
18173 && TREE_TYPE (loc) == context->context_type
18174 && want_address >= 1)
18175 {
18176 if (dwarf_version >= 3 || !dwarf_strict)
18177 {
18178 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18179 have_address = 1;
18180 break;
18181 }
18182 else
18183 return NULL;
18184 }
18185 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18186 the single argument passed by consumer. */
18187 else if (context != NULL
18188 && context->placeholder_arg
18189 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18190 && want_address == 0)
18191 {
18192 ret = new_loc_descr (DW_OP_pick, 0, 0);
18193 ret->frame_offset_rel = 1;
18194 context->placeholder_seen = true;
18195 break;
18196 }
18197 else
18198 expansion_failed (loc, NULL_RTX,
18199 "PLACEHOLDER_EXPR for an unexpected type");
18200 break;
18201
18202 case CALL_EXPR:
18203 {
18204 const int nargs = call_expr_nargs (loc);
18205 tree callee = get_callee_fndecl (loc);
18206 int i;
18207 dw_die_ref dwarf_proc;
18208
18209 if (callee == NULL_TREE)
18210 goto call_expansion_failed;
18211
18212 /* We handle only functions that return an integer. */
18213 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18214 goto call_expansion_failed;
18215
18216 dwarf_proc = function_to_dwarf_procedure (callee);
18217 if (dwarf_proc == NULL)
18218 goto call_expansion_failed;
18219
18220 /* Evaluate arguments right-to-left so that the first argument will
18221 be the top-most one on the stack. */
18222 for (i = nargs - 1; i >= 0; --i)
18223 {
18224 dw_loc_descr_ref loc_descr
18225 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18226 context);
18227
18228 if (loc_descr == NULL)
18229 goto call_expansion_failed;
18230
18231 add_loc_descr (&ret, loc_descr);
18232 }
18233
18234 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18235 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18236 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18237 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18238 add_loc_descr (&ret, ret1);
18239 break;
18240
18241 call_expansion_failed:
18242 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18243 /* There are no opcodes for these operations. */
18244 return 0;
18245 }
18246
18247 case PREINCREMENT_EXPR:
18248 case PREDECREMENT_EXPR:
18249 case POSTINCREMENT_EXPR:
18250 case POSTDECREMENT_EXPR:
18251 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18252 /* There are no opcodes for these operations. */
18253 return 0;
18254
18255 case ADDR_EXPR:
18256 /* If we already want an address, see if there is INDIRECT_REF inside
18257 e.g. for &this->field. */
18258 if (want_address)
18259 {
18260 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18261 (loc, want_address == 2, context);
18262 if (list_ret)
18263 have_address = 1;
18264 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18265 && (ret = cst_pool_loc_descr (loc)))
18266 have_address = 1;
18267 }
18268 /* Otherwise, process the argument and look for the address. */
18269 if (!list_ret && !ret)
18270 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18271 else
18272 {
18273 if (want_address)
18274 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18275 return NULL;
18276 }
18277 break;
18278
18279 case VAR_DECL:
18280 if (DECL_THREAD_LOCAL_P (loc))
18281 {
18282 rtx rtl;
18283 enum dwarf_location_atom tls_op;
18284 enum dtprel_bool dtprel = dtprel_false;
18285
18286 if (targetm.have_tls)
18287 {
18288 /* If this is not defined, we have no way to emit the
18289 data. */
18290 if (!targetm.asm_out.output_dwarf_dtprel)
18291 return 0;
18292
18293 /* The way DW_OP_GNU_push_tls_address is specified, we
18294 can only look up addresses of objects in the current
18295 module. We used DW_OP_addr as first op, but that's
18296 wrong, because DW_OP_addr is relocated by the debug
18297 info consumer, while DW_OP_GNU_push_tls_address
18298 operand shouldn't be. */
18299 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18300 return 0;
18301 dtprel = dtprel_true;
18302 /* We check for DWARF 5 here because gdb did not implement
18303 DW_OP_form_tls_address until after 7.12. */
18304 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18305 : DW_OP_GNU_push_tls_address);
18306 }
18307 else
18308 {
18309 if (!targetm.emutls.debug_form_tls_address
18310 || !(dwarf_version >= 3 || !dwarf_strict))
18311 return 0;
18312 /* We stuffed the control variable into the DECL_VALUE_EXPR
18313 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18314 no longer appear in gimple code. We used the control
18315 variable in specific so that we could pick it up here. */
18316 loc = DECL_VALUE_EXPR (loc);
18317 tls_op = DW_OP_form_tls_address;
18318 }
18319
18320 rtl = rtl_for_decl_location (loc);
18321 if (rtl == NULL_RTX)
18322 return 0;
18323
18324 if (!MEM_P (rtl))
18325 return 0;
18326 rtl = XEXP (rtl, 0);
18327 if (! CONSTANT_P (rtl))
18328 return 0;
18329
18330 ret = new_addr_loc_descr (rtl, dtprel);
18331 ret1 = new_loc_descr (tls_op, 0, 0);
18332 add_loc_descr (&ret, ret1);
18333
18334 have_address = 1;
18335 break;
18336 }
18337 /* FALLTHRU */
18338
18339 case PARM_DECL:
18340 if (context != NULL && context->dpi != NULL
18341 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18342 {
18343 /* We are generating code for a DWARF procedure and we want to access
18344 one of its arguments: find the appropriate argument offset and let
18345 the resolve_args_picking pass compute the offset that complies
18346 with the stack frame size. */
18347 unsigned i = 0;
18348 tree cursor;
18349
18350 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18351 cursor != NULL_TREE && cursor != loc;
18352 cursor = TREE_CHAIN (cursor), ++i)
18353 ;
18354 /* If we are translating a DWARF procedure, all referenced parameters
18355 must belong to the current function. */
18356 gcc_assert (cursor != NULL_TREE);
18357
18358 ret = new_loc_descr (DW_OP_pick, i, 0);
18359 ret->frame_offset_rel = 1;
18360 break;
18361 }
18362 /* FALLTHRU */
18363
18364 case RESULT_DECL:
18365 if (DECL_HAS_VALUE_EXPR_P (loc))
18366 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18367 want_address, context);
18368 /* FALLTHRU */
18369
18370 case FUNCTION_DECL:
18371 {
18372 rtx rtl;
18373 var_loc_list *loc_list = lookup_decl_loc (loc);
18374
18375 if (loc_list && loc_list->first)
18376 {
18377 list_ret = dw_loc_list (loc_list, loc, want_address);
18378 have_address = want_address != 0;
18379 break;
18380 }
18381 rtl = rtl_for_decl_location (loc);
18382 if (rtl == NULL_RTX)
18383 {
18384 if (TREE_CODE (loc) != FUNCTION_DECL
18385 && early_dwarf
18386 && current_function_decl
18387 && want_address != 1
18388 && ! DECL_IGNORED_P (loc)
18389 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18390 || POINTER_TYPE_P (TREE_TYPE (loc)))
18391 && DECL_CONTEXT (loc) == current_function_decl
18392 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18393 <= DWARF2_ADDR_SIZE))
18394 {
18395 dw_die_ref ref = lookup_decl_die (loc);
18396 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18397 if (ref)
18398 {
18399 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18400 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18401 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18402 }
18403 else
18404 {
18405 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18406 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18407 }
18408 break;
18409 }
18410 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18411 return 0;
18412 }
18413 else if (CONST_INT_P (rtl))
18414 {
18415 HOST_WIDE_INT val = INTVAL (rtl);
18416 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18417 val &= GET_MODE_MASK (DECL_MODE (loc));
18418 ret = int_loc_descriptor (val);
18419 }
18420 else if (GET_CODE (rtl) == CONST_STRING)
18421 {
18422 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18423 return 0;
18424 }
18425 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18426 ret = new_addr_loc_descr (rtl, dtprel_false);
18427 else
18428 {
18429 machine_mode mode, mem_mode;
18430
18431 /* Certain constructs can only be represented at top-level. */
18432 if (want_address == 2)
18433 {
18434 ret = loc_descriptor (rtl, VOIDmode,
18435 VAR_INIT_STATUS_INITIALIZED);
18436 have_address = 1;
18437 }
18438 else
18439 {
18440 mode = GET_MODE (rtl);
18441 mem_mode = VOIDmode;
18442 if (MEM_P (rtl))
18443 {
18444 mem_mode = mode;
18445 mode = get_address_mode (rtl);
18446 rtl = XEXP (rtl, 0);
18447 have_address = 1;
18448 }
18449 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18450 VAR_INIT_STATUS_INITIALIZED);
18451 }
18452 if (!ret)
18453 expansion_failed (loc, rtl,
18454 "failed to produce loc descriptor for rtl");
18455 }
18456 }
18457 break;
18458
18459 case MEM_REF:
18460 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18461 {
18462 have_address = 1;
18463 goto do_plus;
18464 }
18465 /* Fallthru. */
18466 case INDIRECT_REF:
18467 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18468 have_address = 1;
18469 break;
18470
18471 case TARGET_MEM_REF:
18472 case SSA_NAME:
18473 case DEBUG_EXPR_DECL:
18474 return NULL;
18475
18476 case COMPOUND_EXPR:
18477 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18478 context);
18479
18480 CASE_CONVERT:
18481 case VIEW_CONVERT_EXPR:
18482 case SAVE_EXPR:
18483 case MODIFY_EXPR:
18484 case NON_LVALUE_EXPR:
18485 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18486 context);
18487
18488 case COMPONENT_REF:
18489 case BIT_FIELD_REF:
18490 case ARRAY_REF:
18491 case ARRAY_RANGE_REF:
18492 case REALPART_EXPR:
18493 case IMAGPART_EXPR:
18494 {
18495 tree obj, offset;
18496 poly_int64 bitsize, bitpos, bytepos;
18497 machine_mode mode;
18498 int unsignedp, reversep, volatilep = 0;
18499
18500 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18501 &unsignedp, &reversep, &volatilep);
18502
18503 gcc_assert (obj != loc);
18504
18505 list_ret = loc_list_from_tree_1 (obj,
18506 want_address == 2
18507 && known_eq (bitpos, 0)
18508 && !offset ? 2 : 1,
18509 context);
18510 /* TODO: We can extract value of the small expression via shifting even
18511 for nonzero bitpos. */
18512 if (list_ret == 0)
18513 return 0;
18514 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18515 || !multiple_p (bitsize, BITS_PER_UNIT))
18516 {
18517 expansion_failed (loc, NULL_RTX,
18518 "bitfield access");
18519 return 0;
18520 }
18521
18522 if (offset != NULL_TREE)
18523 {
18524 /* Variable offset. */
18525 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18526 if (list_ret1 == 0)
18527 return 0;
18528 add_loc_list (&list_ret, list_ret1);
18529 if (!list_ret)
18530 return 0;
18531 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18532 }
18533
18534 HOST_WIDE_INT value;
18535 if (bytepos.is_constant (&value) && value > 0)
18536 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18537 value, 0));
18538 else if (maybe_ne (bytepos, 0))
18539 loc_list_plus_const (list_ret, bytepos);
18540
18541 have_address = 1;
18542 break;
18543 }
18544
18545 case INTEGER_CST:
18546 if ((want_address || !tree_fits_shwi_p (loc))
18547 && (ret = cst_pool_loc_descr (loc)))
18548 have_address = 1;
18549 else if (want_address == 2
18550 && tree_fits_shwi_p (loc)
18551 && (ret = address_of_int_loc_descriptor
18552 (int_size_in_bytes (TREE_TYPE (loc)),
18553 tree_to_shwi (loc))))
18554 have_address = 1;
18555 else if (tree_fits_shwi_p (loc))
18556 ret = int_loc_descriptor (tree_to_shwi (loc));
18557 else if (tree_fits_uhwi_p (loc))
18558 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18559 else
18560 {
18561 expansion_failed (loc, NULL_RTX,
18562 "Integer operand is not host integer");
18563 return 0;
18564 }
18565 break;
18566
18567 case CONSTRUCTOR:
18568 case REAL_CST:
18569 case STRING_CST:
18570 case COMPLEX_CST:
18571 if ((ret = cst_pool_loc_descr (loc)))
18572 have_address = 1;
18573 else if (TREE_CODE (loc) == CONSTRUCTOR)
18574 {
18575 tree type = TREE_TYPE (loc);
18576 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18577 unsigned HOST_WIDE_INT offset = 0;
18578 unsigned HOST_WIDE_INT cnt;
18579 constructor_elt *ce;
18580
18581 if (TREE_CODE (type) == RECORD_TYPE)
18582 {
18583 /* This is very limited, but it's enough to output
18584 pointers to member functions, as long as the
18585 referenced function is defined in the current
18586 translation unit. */
18587 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18588 {
18589 tree val = ce->value;
18590
18591 tree field = ce->index;
18592
18593 if (val)
18594 STRIP_NOPS (val);
18595
18596 if (!field || DECL_BIT_FIELD (field))
18597 {
18598 expansion_failed (loc, NULL_RTX,
18599 "bitfield in record type constructor");
18600 size = offset = (unsigned HOST_WIDE_INT)-1;
18601 ret = NULL;
18602 break;
18603 }
18604
18605 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18606 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18607 gcc_assert (pos + fieldsize <= size);
18608 if (pos < offset)
18609 {
18610 expansion_failed (loc, NULL_RTX,
18611 "out-of-order fields in record constructor");
18612 size = offset = (unsigned HOST_WIDE_INT)-1;
18613 ret = NULL;
18614 break;
18615 }
18616 if (pos > offset)
18617 {
18618 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18619 add_loc_descr (&ret, ret1);
18620 offset = pos;
18621 }
18622 if (val && fieldsize != 0)
18623 {
18624 ret1 = loc_descriptor_from_tree (val, want_address, context);
18625 if (!ret1)
18626 {
18627 expansion_failed (loc, NULL_RTX,
18628 "unsupported expression in field");
18629 size = offset = (unsigned HOST_WIDE_INT)-1;
18630 ret = NULL;
18631 break;
18632 }
18633 add_loc_descr (&ret, ret1);
18634 }
18635 if (fieldsize)
18636 {
18637 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18638 add_loc_descr (&ret, ret1);
18639 offset = pos + fieldsize;
18640 }
18641 }
18642
18643 if (offset != size)
18644 {
18645 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18646 add_loc_descr (&ret, ret1);
18647 offset = size;
18648 }
18649
18650 have_address = !!want_address;
18651 }
18652 else
18653 expansion_failed (loc, NULL_RTX,
18654 "constructor of non-record type");
18655 }
18656 else
18657 /* We can construct small constants here using int_loc_descriptor. */
18658 expansion_failed (loc, NULL_RTX,
18659 "constructor or constant not in constant pool");
18660 break;
18661
18662 case TRUTH_AND_EXPR:
18663 case TRUTH_ANDIF_EXPR:
18664 case BIT_AND_EXPR:
18665 op = DW_OP_and;
18666 goto do_binop;
18667
18668 case TRUTH_XOR_EXPR:
18669 case BIT_XOR_EXPR:
18670 op = DW_OP_xor;
18671 goto do_binop;
18672
18673 case TRUTH_OR_EXPR:
18674 case TRUTH_ORIF_EXPR:
18675 case BIT_IOR_EXPR:
18676 op = DW_OP_or;
18677 goto do_binop;
18678
18679 case FLOOR_DIV_EXPR:
18680 case CEIL_DIV_EXPR:
18681 case ROUND_DIV_EXPR:
18682 case TRUNC_DIV_EXPR:
18683 case EXACT_DIV_EXPR:
18684 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18685 return 0;
18686 op = DW_OP_div;
18687 goto do_binop;
18688
18689 case MINUS_EXPR:
18690 op = DW_OP_minus;
18691 goto do_binop;
18692
18693 case FLOOR_MOD_EXPR:
18694 case CEIL_MOD_EXPR:
18695 case ROUND_MOD_EXPR:
18696 case TRUNC_MOD_EXPR:
18697 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18698 {
18699 op = DW_OP_mod;
18700 goto do_binop;
18701 }
18702 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18703 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18704 if (list_ret == 0 || list_ret1 == 0)
18705 return 0;
18706
18707 add_loc_list (&list_ret, list_ret1);
18708 if (list_ret == 0)
18709 return 0;
18710 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18711 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18712 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18713 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18714 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18715 break;
18716
18717 case MULT_EXPR:
18718 op = DW_OP_mul;
18719 goto do_binop;
18720
18721 case LSHIFT_EXPR:
18722 op = DW_OP_shl;
18723 goto do_binop;
18724
18725 case RSHIFT_EXPR:
18726 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18727 goto do_binop;
18728
18729 case POINTER_PLUS_EXPR:
18730 case PLUS_EXPR:
18731 do_plus:
18732 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18733 {
18734 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18735 smarter to encode their opposite. The DW_OP_plus_uconst operation
18736 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18737 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18738 bytes, Y being the size of the operation that pushes the opposite
18739 of the addend. So let's choose the smallest representation. */
18740 const tree tree_addend = TREE_OPERAND (loc, 1);
18741 offset_int wi_addend;
18742 HOST_WIDE_INT shwi_addend;
18743 dw_loc_descr_ref loc_naddend;
18744
18745 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18746 if (list_ret == 0)
18747 return 0;
18748
18749 /* Try to get the literal to push. It is the opposite of the addend,
18750 so as we rely on wrapping during DWARF evaluation, first decode
18751 the literal as a "DWARF-sized" signed number. */
18752 wi_addend = wi::to_offset (tree_addend);
18753 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18754 shwi_addend = wi_addend.to_shwi ();
18755 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18756 ? int_loc_descriptor (-shwi_addend)
18757 : NULL;
18758
18759 if (loc_naddend != NULL
18760 && ((unsigned) size_of_uleb128 (shwi_addend)
18761 > size_of_loc_descr (loc_naddend)))
18762 {
18763 add_loc_descr_to_each (list_ret, loc_naddend);
18764 add_loc_descr_to_each (list_ret,
18765 new_loc_descr (DW_OP_minus, 0, 0));
18766 }
18767 else
18768 {
18769 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18770 {
18771 loc_naddend = loc_cur;
18772 loc_cur = loc_cur->dw_loc_next;
18773 ggc_free (loc_naddend);
18774 }
18775 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18776 }
18777 break;
18778 }
18779
18780 op = DW_OP_plus;
18781 goto do_binop;
18782
18783 case LE_EXPR:
18784 op = DW_OP_le;
18785 goto do_comp_binop;
18786
18787 case GE_EXPR:
18788 op = DW_OP_ge;
18789 goto do_comp_binop;
18790
18791 case LT_EXPR:
18792 op = DW_OP_lt;
18793 goto do_comp_binop;
18794
18795 case GT_EXPR:
18796 op = DW_OP_gt;
18797 goto do_comp_binop;
18798
18799 do_comp_binop:
18800 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18801 {
18802 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18803 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18804 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18805 TREE_CODE (loc));
18806 break;
18807 }
18808 else
18809 goto do_binop;
18810
18811 case EQ_EXPR:
18812 op = DW_OP_eq;
18813 goto do_binop;
18814
18815 case NE_EXPR:
18816 op = DW_OP_ne;
18817 goto do_binop;
18818
18819 do_binop:
18820 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18821 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18822 if (list_ret == 0 || list_ret1 == 0)
18823 return 0;
18824
18825 add_loc_list (&list_ret, list_ret1);
18826 if (list_ret == 0)
18827 return 0;
18828 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18829 break;
18830
18831 case TRUTH_NOT_EXPR:
18832 case BIT_NOT_EXPR:
18833 op = DW_OP_not;
18834 goto do_unop;
18835
18836 case ABS_EXPR:
18837 op = DW_OP_abs;
18838 goto do_unop;
18839
18840 case NEGATE_EXPR:
18841 op = DW_OP_neg;
18842 goto do_unop;
18843
18844 do_unop:
18845 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18846 if (list_ret == 0)
18847 return 0;
18848
18849 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18850 break;
18851
18852 case MIN_EXPR:
18853 case MAX_EXPR:
18854 {
18855 const enum tree_code code =
18856 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18857
18858 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18859 build2 (code, integer_type_node,
18860 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18861 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18862 }
18863
18864 /* fall through */
18865
18866 case COND_EXPR:
18867 {
18868 dw_loc_descr_ref lhs
18869 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18870 dw_loc_list_ref rhs
18871 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18872 dw_loc_descr_ref bra_node, jump_node, tmp;
18873
18874 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18875 if (list_ret == 0 || lhs == 0 || rhs == 0)
18876 return 0;
18877
18878 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18879 add_loc_descr_to_each (list_ret, bra_node);
18880
18881 add_loc_list (&list_ret, rhs);
18882 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18883 add_loc_descr_to_each (list_ret, jump_node);
18884
18885 add_loc_descr_to_each (list_ret, lhs);
18886 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18887 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18888
18889 /* ??? Need a node to point the skip at. Use a nop. */
18890 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18891 add_loc_descr_to_each (list_ret, tmp);
18892 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18893 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18894 }
18895 break;
18896
18897 case FIX_TRUNC_EXPR:
18898 return 0;
18899
18900 default:
18901 /* Leave front-end specific codes as simply unknown. This comes
18902 up, for instance, with the C STMT_EXPR. */
18903 if ((unsigned int) TREE_CODE (loc)
18904 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18905 {
18906 expansion_failed (loc, NULL_RTX,
18907 "language specific tree node");
18908 return 0;
18909 }
18910
18911 /* Otherwise this is a generic code; we should just lists all of
18912 these explicitly. We forgot one. */
18913 if (flag_checking)
18914 gcc_unreachable ();
18915
18916 /* In a release build, we want to degrade gracefully: better to
18917 generate incomplete debugging information than to crash. */
18918 return NULL;
18919 }
18920
18921 if (!ret && !list_ret)
18922 return 0;
18923
18924 if (want_address == 2 && !have_address
18925 && (dwarf_version >= 4 || !dwarf_strict))
18926 {
18927 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18928 {
18929 expansion_failed (loc, NULL_RTX,
18930 "DWARF address size mismatch");
18931 return 0;
18932 }
18933 if (ret)
18934 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18935 else
18936 add_loc_descr_to_each (list_ret,
18937 new_loc_descr (DW_OP_stack_value, 0, 0));
18938 have_address = 1;
18939 }
18940 /* Show if we can't fill the request for an address. */
18941 if (want_address && !have_address)
18942 {
18943 expansion_failed (loc, NULL_RTX,
18944 "Want address and only have value");
18945 return 0;
18946 }
18947
18948 gcc_assert (!ret || !list_ret);
18949
18950 /* If we've got an address and don't want one, dereference. */
18951 if (!want_address && have_address)
18952 {
18953 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18954
18955 if (size > DWARF2_ADDR_SIZE || size == -1)
18956 {
18957 expansion_failed (loc, NULL_RTX,
18958 "DWARF address size mismatch");
18959 return 0;
18960 }
18961 else if (size == DWARF2_ADDR_SIZE)
18962 op = DW_OP_deref;
18963 else
18964 op = DW_OP_deref_size;
18965
18966 if (ret)
18967 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18968 else
18969 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18970 }
18971 if (ret)
18972 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18973
18974 return list_ret;
18975 }
18976
18977 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18978 expressions. */
18979
18980 static dw_loc_list_ref
18981 loc_list_from_tree (tree loc, int want_address,
18982 struct loc_descr_context *context)
18983 {
18984 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18985
18986 for (dw_loc_list_ref loc_cur = result;
18987 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18988 loc_descr_without_nops (loc_cur->expr);
18989 return result;
18990 }
18991
18992 /* Same as above but return only single location expression. */
18993 static dw_loc_descr_ref
18994 loc_descriptor_from_tree (tree loc, int want_address,
18995 struct loc_descr_context *context)
18996 {
18997 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18998 if (!ret)
18999 return NULL;
19000 if (ret->dw_loc_next)
19001 {
19002 expansion_failed (loc, NULL_RTX,
19003 "Location list where only loc descriptor needed");
19004 return NULL;
19005 }
19006 return ret->expr;
19007 }
19008
19009 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
19010 pointer to the declared type for the relevant field variable, or return
19011 `integer_type_node' if the given node turns out to be an
19012 ERROR_MARK node. */
19013
19014 static inline tree
19015 field_type (const_tree decl)
19016 {
19017 tree type;
19018
19019 if (TREE_CODE (decl) == ERROR_MARK)
19020 return integer_type_node;
19021
19022 type = DECL_BIT_FIELD_TYPE (decl);
19023 if (type == NULL_TREE)
19024 type = TREE_TYPE (decl);
19025
19026 return type;
19027 }
19028
19029 /* Given a pointer to a tree node, return the alignment in bits for
19030 it, or else return BITS_PER_WORD if the node actually turns out to
19031 be an ERROR_MARK node. */
19032
19033 static inline unsigned
19034 simple_type_align_in_bits (const_tree type)
19035 {
19036 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
19037 }
19038
19039 static inline unsigned
19040 simple_decl_align_in_bits (const_tree decl)
19041 {
19042 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
19043 }
19044
19045 /* Return the result of rounding T up to ALIGN. */
19046
19047 static inline offset_int
19048 round_up_to_align (const offset_int &t, unsigned int align)
19049 {
19050 return wi::udiv_trunc (t + align - 1, align) * align;
19051 }
19052
19053 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
19054 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
19055 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
19056 if we fail to return the size in one of these two forms. */
19057
19058 static dw_loc_descr_ref
19059 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
19060 {
19061 tree tree_size;
19062 struct loc_descr_context ctx;
19063
19064 /* Return a constant integer in priority, if possible. */
19065 *cst_size = int_size_in_bytes (type);
19066 if (*cst_size != -1)
19067 return NULL;
19068
19069 ctx.context_type = const_cast<tree> (type);
19070 ctx.base_decl = NULL_TREE;
19071 ctx.dpi = NULL;
19072 ctx.placeholder_arg = false;
19073 ctx.placeholder_seen = false;
19074
19075 type = TYPE_MAIN_VARIANT (type);
19076 tree_size = TYPE_SIZE_UNIT (type);
19077 return ((tree_size != NULL_TREE)
19078 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
19079 : NULL);
19080 }
19081
19082 /* Helper structure for RECORD_TYPE processing. */
19083 struct vlr_context
19084 {
19085 /* Root RECORD_TYPE. It is needed to generate data member location
19086 descriptions in variable-length records (VLR), but also to cope with
19087 variants, which are composed of nested structures multiplexed with
19088 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
19089 function processing a FIELD_DECL, it is required to be non null. */
19090 tree struct_type;
19091 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
19092 QUAL_UNION_TYPE), this holds an expression that computes the offset for
19093 this variant part as part of the root record (in storage units). For
19094 regular records, it must be NULL_TREE. */
19095 tree variant_part_offset;
19096 };
19097
19098 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
19099 addressed byte of the "containing object" for the given FIELD_DECL. If
19100 possible, return a native constant through CST_OFFSET (in which case NULL is
19101 returned); otherwise return a DWARF expression that computes the offset.
19102
19103 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
19104 that offset is, either because the argument turns out to be a pointer to an
19105 ERROR_MARK node, or because the offset expression is too complex for us.
19106
19107 CTX is required: see the comment for VLR_CONTEXT. */
19108
19109 static dw_loc_descr_ref
19110 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19111 HOST_WIDE_INT *cst_offset)
19112 {
19113 tree tree_result;
19114 dw_loc_list_ref loc_result;
19115
19116 *cst_offset = 0;
19117
19118 if (TREE_CODE (decl) == ERROR_MARK)
19119 return NULL;
19120 else
19121 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19122
19123 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19124 case. */
19125 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19126 return NULL;
19127
19128 /* We used to handle only constant offsets in all cases. Now, we handle
19129 properly dynamic byte offsets only when PCC bitfield type doesn't
19130 matter. */
19131 if (PCC_BITFIELD_TYPE_MATTERS
19132 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19133 {
19134 offset_int object_offset_in_bits;
19135 offset_int object_offset_in_bytes;
19136 offset_int bitpos_int;
19137 tree type;
19138 tree field_size_tree;
19139 offset_int deepest_bitpos;
19140 offset_int field_size_in_bits;
19141 unsigned int type_align_in_bits;
19142 unsigned int decl_align_in_bits;
19143 offset_int type_size_in_bits;
19144
19145 bitpos_int = wi::to_offset (bit_position (decl));
19146 type = field_type (decl);
19147 type_size_in_bits = offset_int_type_size_in_bits (type);
19148 type_align_in_bits = simple_type_align_in_bits (type);
19149
19150 field_size_tree = DECL_SIZE (decl);
19151
19152 /* The size could be unspecified if there was an error, or for
19153 a flexible array member. */
19154 if (!field_size_tree)
19155 field_size_tree = bitsize_zero_node;
19156
19157 /* If the size of the field is not constant, use the type size. */
19158 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19159 field_size_in_bits = wi::to_offset (field_size_tree);
19160 else
19161 field_size_in_bits = type_size_in_bits;
19162
19163 decl_align_in_bits = simple_decl_align_in_bits (decl);
19164
19165 /* The GCC front-end doesn't make any attempt to keep track of the
19166 starting bit offset (relative to the start of the containing
19167 structure type) of the hypothetical "containing object" for a
19168 bit-field. Thus, when computing the byte offset value for the
19169 start of the "containing object" of a bit-field, we must deduce
19170 this information on our own. This can be rather tricky to do in
19171 some cases. For example, handling the following structure type
19172 definition when compiling for an i386/i486 target (which only
19173 aligns long long's to 32-bit boundaries) can be very tricky:
19174
19175 struct S { int field1; long long field2:31; };
19176
19177 Fortunately, there is a simple rule-of-thumb which can be used
19178 in such cases. When compiling for an i386/i486, GCC will
19179 allocate 8 bytes for the structure shown above. It decides to
19180 do this based upon one simple rule for bit-field allocation.
19181 GCC allocates each "containing object" for each bit-field at
19182 the first (i.e. lowest addressed) legitimate alignment boundary
19183 (based upon the required minimum alignment for the declared
19184 type of the field) which it can possibly use, subject to the
19185 condition that there is still enough available space remaining
19186 in the containing object (when allocated at the selected point)
19187 to fully accommodate all of the bits of the bit-field itself.
19188
19189 This simple rule makes it obvious why GCC allocates 8 bytes for
19190 each object of the structure type shown above. When looking
19191 for a place to allocate the "containing object" for `field2',
19192 the compiler simply tries to allocate a 64-bit "containing
19193 object" at each successive 32-bit boundary (starting at zero)
19194 until it finds a place to allocate that 64- bit field such that
19195 at least 31 contiguous (and previously unallocated) bits remain
19196 within that selected 64 bit field. (As it turns out, for the
19197 example above, the compiler finds it is OK to allocate the
19198 "containing object" 64-bit field at bit-offset zero within the
19199 structure type.)
19200
19201 Here we attempt to work backwards from the limited set of facts
19202 we're given, and we try to deduce from those facts, where GCC
19203 must have believed that the containing object started (within
19204 the structure type). The value we deduce is then used (by the
19205 callers of this routine) to generate DW_AT_location and
19206 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19207 the case of DW_AT_location, regular fields as well). */
19208
19209 /* Figure out the bit-distance from the start of the structure to
19210 the "deepest" bit of the bit-field. */
19211 deepest_bitpos = bitpos_int + field_size_in_bits;
19212
19213 /* This is the tricky part. Use some fancy footwork to deduce
19214 where the lowest addressed bit of the containing object must
19215 be. */
19216 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19217
19218 /* Round up to type_align by default. This works best for
19219 bitfields. */
19220 object_offset_in_bits
19221 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19222
19223 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19224 {
19225 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19226
19227 /* Round up to decl_align instead. */
19228 object_offset_in_bits
19229 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19230 }
19231
19232 object_offset_in_bytes
19233 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19234 if (ctx->variant_part_offset == NULL_TREE)
19235 {
19236 *cst_offset = object_offset_in_bytes.to_shwi ();
19237 return NULL;
19238 }
19239 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19240 }
19241 else
19242 tree_result = byte_position (decl);
19243
19244 if (ctx->variant_part_offset != NULL_TREE)
19245 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19246 ctx->variant_part_offset, tree_result);
19247
19248 /* If the byte offset is a constant, it's simplier to handle a native
19249 constant rather than a DWARF expression. */
19250 if (TREE_CODE (tree_result) == INTEGER_CST)
19251 {
19252 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19253 return NULL;
19254 }
19255 struct loc_descr_context loc_ctx = {
19256 ctx->struct_type, /* context_type */
19257 NULL_TREE, /* base_decl */
19258 NULL, /* dpi */
19259 false, /* placeholder_arg */
19260 false /* placeholder_seen */
19261 };
19262 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19263
19264 /* We want a DWARF expression: abort if we only have a location list with
19265 multiple elements. */
19266 if (!loc_result || !single_element_loc_list_p (loc_result))
19267 return NULL;
19268 else
19269 return loc_result->expr;
19270 }
19271 \f
19272 /* The following routines define various Dwarf attributes and any data
19273 associated with them. */
19274
19275 /* Add a location description attribute value to a DIE.
19276
19277 This emits location attributes suitable for whole variables and
19278 whole parameters. Note that the location attributes for struct fields are
19279 generated by the routine `data_member_location_attribute' below. */
19280
19281 static inline void
19282 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19283 dw_loc_list_ref descr)
19284 {
19285 bool check_no_locviews = true;
19286 if (descr == 0)
19287 return;
19288 if (single_element_loc_list_p (descr))
19289 add_AT_loc (die, attr_kind, descr->expr);
19290 else
19291 {
19292 add_AT_loc_list (die, attr_kind, descr);
19293 gcc_assert (descr->ll_symbol);
19294 if (attr_kind == DW_AT_location && descr->vl_symbol
19295 && dwarf2out_locviews_in_attribute ())
19296 {
19297 add_AT_view_list (die, DW_AT_GNU_locviews);
19298 check_no_locviews = false;
19299 }
19300 }
19301
19302 if (check_no_locviews)
19303 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19304 }
19305
19306 /* Add DW_AT_accessibility attribute to DIE if needed. */
19307
19308 static void
19309 add_accessibility_attribute (dw_die_ref die, tree decl)
19310 {
19311 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19312 children, otherwise the default is DW_ACCESS_public. In DWARF2
19313 the default has always been DW_ACCESS_public. */
19314 if (TREE_PROTECTED (decl))
19315 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19316 else if (TREE_PRIVATE (decl))
19317 {
19318 if (dwarf_version == 2
19319 || die->die_parent == NULL
19320 || die->die_parent->die_tag != DW_TAG_class_type)
19321 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19322 }
19323 else if (dwarf_version > 2
19324 && die->die_parent
19325 && die->die_parent->die_tag == DW_TAG_class_type)
19326 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19327 }
19328
19329 /* Attach the specialized form of location attribute used for data members of
19330 struct and union types. In the special case of a FIELD_DECL node which
19331 represents a bit-field, the "offset" part of this special location
19332 descriptor must indicate the distance in bytes from the lowest-addressed
19333 byte of the containing struct or union type to the lowest-addressed byte of
19334 the "containing object" for the bit-field. (See the `field_byte_offset'
19335 function above).
19336
19337 For any given bit-field, the "containing object" is a hypothetical object
19338 (of some integral or enum type) within which the given bit-field lives. The
19339 type of this hypothetical "containing object" is always the same as the
19340 declared type of the individual bit-field itself (for GCC anyway... the
19341 DWARF spec doesn't actually mandate this). Note that it is the size (in
19342 bytes) of the hypothetical "containing object" which will be given in the
19343 DW_AT_byte_size attribute for this bit-field. (See the
19344 `byte_size_attribute' function below.) It is also used when calculating the
19345 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19346 function below.)
19347
19348 CTX is required: see the comment for VLR_CONTEXT. */
19349
19350 static void
19351 add_data_member_location_attribute (dw_die_ref die,
19352 tree decl,
19353 struct vlr_context *ctx)
19354 {
19355 HOST_WIDE_INT offset;
19356 dw_loc_descr_ref loc_descr = 0;
19357
19358 if (TREE_CODE (decl) == TREE_BINFO)
19359 {
19360 /* We're working on the TAG_inheritance for a base class. */
19361 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19362 {
19363 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19364 aren't at a fixed offset from all (sub)objects of the same
19365 type. We need to extract the appropriate offset from our
19366 vtable. The following dwarf expression means
19367
19368 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19369
19370 This is specific to the V3 ABI, of course. */
19371
19372 dw_loc_descr_ref tmp;
19373
19374 /* Make a copy of the object address. */
19375 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19376 add_loc_descr (&loc_descr, tmp);
19377
19378 /* Extract the vtable address. */
19379 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19380 add_loc_descr (&loc_descr, tmp);
19381
19382 /* Calculate the address of the offset. */
19383 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19384 gcc_assert (offset < 0);
19385
19386 tmp = int_loc_descriptor (-offset);
19387 add_loc_descr (&loc_descr, tmp);
19388 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19389 add_loc_descr (&loc_descr, tmp);
19390
19391 /* Extract the offset. */
19392 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19393 add_loc_descr (&loc_descr, tmp);
19394
19395 /* Add it to the object address. */
19396 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19397 add_loc_descr (&loc_descr, tmp);
19398 }
19399 else
19400 offset = tree_to_shwi (BINFO_OFFSET (decl));
19401 }
19402 else
19403 {
19404 loc_descr = field_byte_offset (decl, ctx, &offset);
19405
19406 /* If loc_descr is available then we know the field offset is dynamic.
19407 However, GDB does not handle dynamic field offsets very well at the
19408 moment. */
19409 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19410 {
19411 loc_descr = NULL;
19412 offset = 0;
19413 }
19414
19415 /* Data member location evalutation starts with the base address on the
19416 stack. Compute the field offset and add it to this base address. */
19417 else if (loc_descr != NULL)
19418 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19419 }
19420
19421 if (! loc_descr)
19422 {
19423 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19424 e.g. GDB only added support to it in November 2016. For DWARF5
19425 we need newer debug info consumers anyway. We might change this
19426 to dwarf_version >= 4 once most consumers catched up. */
19427 if (dwarf_version >= 5
19428 && TREE_CODE (decl) == FIELD_DECL
19429 && DECL_BIT_FIELD_TYPE (decl))
19430 {
19431 tree off = bit_position (decl);
19432 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19433 {
19434 remove_AT (die, DW_AT_byte_size);
19435 remove_AT (die, DW_AT_bit_offset);
19436 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19437 return;
19438 }
19439 }
19440 if (dwarf_version > 2)
19441 {
19442 /* Don't need to output a location expression, just the constant. */
19443 if (offset < 0)
19444 add_AT_int (die, DW_AT_data_member_location, offset);
19445 else
19446 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19447 return;
19448 }
19449 else
19450 {
19451 enum dwarf_location_atom op;
19452
19453 /* The DWARF2 standard says that we should assume that the structure
19454 address is already on the stack, so we can specify a structure
19455 field address by using DW_OP_plus_uconst. */
19456 op = DW_OP_plus_uconst;
19457 loc_descr = new_loc_descr (op, offset, 0);
19458 }
19459 }
19460
19461 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19462 }
19463
19464 /* Writes integer values to dw_vec_const array. */
19465
19466 static void
19467 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19468 {
19469 while (size != 0)
19470 {
19471 *dest++ = val & 0xff;
19472 val >>= 8;
19473 --size;
19474 }
19475 }
19476
19477 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19478
19479 static HOST_WIDE_INT
19480 extract_int (const unsigned char *src, unsigned int size)
19481 {
19482 HOST_WIDE_INT val = 0;
19483
19484 src += size;
19485 while (size != 0)
19486 {
19487 val <<= 8;
19488 val |= *--src & 0xff;
19489 --size;
19490 }
19491 return val;
19492 }
19493
19494 /* Writes wide_int values to dw_vec_const array. */
19495
19496 static void
19497 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19498 {
19499 int i;
19500
19501 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19502 {
19503 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19504 return;
19505 }
19506
19507 /* We'd have to extend this code to support odd sizes. */
19508 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19509
19510 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19511
19512 if (WORDS_BIG_ENDIAN)
19513 for (i = n - 1; i >= 0; i--)
19514 {
19515 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19516 dest += sizeof (HOST_WIDE_INT);
19517 }
19518 else
19519 for (i = 0; i < n; i++)
19520 {
19521 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19522 dest += sizeof (HOST_WIDE_INT);
19523 }
19524 }
19525
19526 /* Writes floating point values to dw_vec_const array. */
19527
19528 static void
19529 insert_float (const_rtx rtl, unsigned char *array)
19530 {
19531 long val[4];
19532 int i;
19533 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19534
19535 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19536
19537 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19538 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19539 {
19540 insert_int (val[i], 4, array);
19541 array += 4;
19542 }
19543 }
19544
19545 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19546 does not have a "location" either in memory or in a register. These
19547 things can arise in GNU C when a constant is passed as an actual parameter
19548 to an inlined function. They can also arise in C++ where declared
19549 constants do not necessarily get memory "homes". */
19550
19551 static bool
19552 add_const_value_attribute (dw_die_ref die, rtx rtl)
19553 {
19554 switch (GET_CODE (rtl))
19555 {
19556 case CONST_INT:
19557 {
19558 HOST_WIDE_INT val = INTVAL (rtl);
19559
19560 if (val < 0)
19561 add_AT_int (die, DW_AT_const_value, val);
19562 else
19563 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19564 }
19565 return true;
19566
19567 case CONST_WIDE_INT:
19568 {
19569 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19570 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19571 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19572 wide_int w = wi::zext (w1, prec);
19573 add_AT_wide (die, DW_AT_const_value, w);
19574 }
19575 return true;
19576
19577 case CONST_DOUBLE:
19578 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19579 floating-point constant. A CONST_DOUBLE is used whenever the
19580 constant requires more than one word in order to be adequately
19581 represented. */
19582 if (TARGET_SUPPORTS_WIDE_INT == 0
19583 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19584 add_AT_double (die, DW_AT_const_value,
19585 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19586 else
19587 {
19588 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19589 unsigned int length = GET_MODE_SIZE (mode);
19590 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19591
19592 insert_float (rtl, array);
19593 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19594 }
19595 return true;
19596
19597 case CONST_VECTOR:
19598 {
19599 unsigned int length;
19600 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19601 return false;
19602
19603 machine_mode mode = GET_MODE (rtl);
19604 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19605 unsigned char *array
19606 = ggc_vec_alloc<unsigned char> (length * elt_size);
19607 unsigned int i;
19608 unsigned char *p;
19609 machine_mode imode = GET_MODE_INNER (mode);
19610
19611 switch (GET_MODE_CLASS (mode))
19612 {
19613 case MODE_VECTOR_INT:
19614 for (i = 0, p = array; i < length; i++, p += elt_size)
19615 {
19616 rtx elt = CONST_VECTOR_ELT (rtl, i);
19617 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19618 }
19619 break;
19620
19621 case MODE_VECTOR_FLOAT:
19622 for (i = 0, p = array; i < length; i++, p += elt_size)
19623 {
19624 rtx elt = CONST_VECTOR_ELT (rtl, i);
19625 insert_float (elt, p);
19626 }
19627 break;
19628
19629 default:
19630 gcc_unreachable ();
19631 }
19632
19633 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19634 }
19635 return true;
19636
19637 case CONST_STRING:
19638 if (dwarf_version >= 4 || !dwarf_strict)
19639 {
19640 dw_loc_descr_ref loc_result;
19641 resolve_one_addr (&rtl);
19642 rtl_addr:
19643 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19644 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19645 add_AT_loc (die, DW_AT_location, loc_result);
19646 vec_safe_push (used_rtx_array, rtl);
19647 return true;
19648 }
19649 return false;
19650
19651 case CONST:
19652 if (CONSTANT_P (XEXP (rtl, 0)))
19653 return add_const_value_attribute (die, XEXP (rtl, 0));
19654 /* FALLTHROUGH */
19655 case SYMBOL_REF:
19656 if (!const_ok_for_output (rtl))
19657 return false;
19658 /* FALLTHROUGH */
19659 case LABEL_REF:
19660 if (dwarf_version >= 4 || !dwarf_strict)
19661 goto rtl_addr;
19662 return false;
19663
19664 case PLUS:
19665 /* In cases where an inlined instance of an inline function is passed
19666 the address of an `auto' variable (which is local to the caller) we
19667 can get a situation where the DECL_RTL of the artificial local
19668 variable (for the inlining) which acts as a stand-in for the
19669 corresponding formal parameter (of the inline function) will look
19670 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19671 exactly a compile-time constant expression, but it isn't the address
19672 of the (artificial) local variable either. Rather, it represents the
19673 *value* which the artificial local variable always has during its
19674 lifetime. We currently have no way to represent such quasi-constant
19675 values in Dwarf, so for now we just punt and generate nothing. */
19676 return false;
19677
19678 case HIGH:
19679 case CONST_FIXED:
19680 case MINUS:
19681 case SIGN_EXTEND:
19682 case ZERO_EXTEND:
19683 return false;
19684
19685 case MEM:
19686 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19687 && MEM_READONLY_P (rtl)
19688 && GET_MODE (rtl) == BLKmode)
19689 {
19690 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19691 return true;
19692 }
19693 return false;
19694
19695 default:
19696 /* No other kinds of rtx should be possible here. */
19697 gcc_unreachable ();
19698 }
19699 return false;
19700 }
19701
19702 /* Determine whether the evaluation of EXPR references any variables
19703 or functions which aren't otherwise used (and therefore may not be
19704 output). */
19705 static tree
19706 reference_to_unused (tree * tp, int * walk_subtrees,
19707 void * data ATTRIBUTE_UNUSED)
19708 {
19709 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19710 *walk_subtrees = 0;
19711
19712 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19713 && ! TREE_ASM_WRITTEN (*tp))
19714 return *tp;
19715 /* ??? The C++ FE emits debug information for using decls, so
19716 putting gcc_unreachable here falls over. See PR31899. For now
19717 be conservative. */
19718 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19719 return *tp;
19720 else if (VAR_P (*tp))
19721 {
19722 varpool_node *node = varpool_node::get (*tp);
19723 if (!node || !node->definition)
19724 return *tp;
19725 }
19726 else if (TREE_CODE (*tp) == FUNCTION_DECL
19727 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19728 {
19729 /* The call graph machinery must have finished analyzing,
19730 optimizing and gimplifying the CU by now.
19731 So if *TP has no call graph node associated
19732 to it, it means *TP will not be emitted. */
19733 if (!cgraph_node::get (*tp))
19734 return *tp;
19735 }
19736 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19737 return *tp;
19738
19739 return NULL_TREE;
19740 }
19741
19742 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19743 for use in a later add_const_value_attribute call. */
19744
19745 static rtx
19746 rtl_for_decl_init (tree init, tree type)
19747 {
19748 rtx rtl = NULL_RTX;
19749
19750 STRIP_NOPS (init);
19751
19752 /* If a variable is initialized with a string constant without embedded
19753 zeros, build CONST_STRING. */
19754 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19755 {
19756 tree enttype = TREE_TYPE (type);
19757 tree domain = TYPE_DOMAIN (type);
19758 scalar_int_mode mode;
19759
19760 if (is_int_mode (TYPE_MODE (enttype), &mode)
19761 && GET_MODE_SIZE (mode) == 1
19762 && domain
19763 && TYPE_MAX_VALUE (domain)
19764 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19765 && integer_zerop (TYPE_MIN_VALUE (domain))
19766 && compare_tree_int (TYPE_MAX_VALUE (domain),
19767 TREE_STRING_LENGTH (init) - 1) == 0
19768 && ((size_t) TREE_STRING_LENGTH (init)
19769 == strlen (TREE_STRING_POINTER (init)) + 1))
19770 {
19771 rtl = gen_rtx_CONST_STRING (VOIDmode,
19772 ggc_strdup (TREE_STRING_POINTER (init)));
19773 rtl = gen_rtx_MEM (BLKmode, rtl);
19774 MEM_READONLY_P (rtl) = 1;
19775 }
19776 }
19777 /* Other aggregates, and complex values, could be represented using
19778 CONCAT: FIXME! */
19779 else if (AGGREGATE_TYPE_P (type)
19780 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19781 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19782 || TREE_CODE (type) == COMPLEX_TYPE)
19783 ;
19784 /* Vectors only work if their mode is supported by the target.
19785 FIXME: generic vectors ought to work too. */
19786 else if (TREE_CODE (type) == VECTOR_TYPE
19787 && !VECTOR_MODE_P (TYPE_MODE (type)))
19788 ;
19789 /* If the initializer is something that we know will expand into an
19790 immediate RTL constant, expand it now. We must be careful not to
19791 reference variables which won't be output. */
19792 else if (initializer_constant_valid_p (init, type)
19793 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19794 {
19795 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19796 possible. */
19797 if (TREE_CODE (type) == VECTOR_TYPE)
19798 switch (TREE_CODE (init))
19799 {
19800 case VECTOR_CST:
19801 break;
19802 case CONSTRUCTOR:
19803 if (TREE_CONSTANT (init))
19804 {
19805 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19806 bool constant_p = true;
19807 tree value;
19808 unsigned HOST_WIDE_INT ix;
19809
19810 /* Even when ctor is constant, it might contain non-*_CST
19811 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19812 belong into VECTOR_CST nodes. */
19813 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19814 if (!CONSTANT_CLASS_P (value))
19815 {
19816 constant_p = false;
19817 break;
19818 }
19819
19820 if (constant_p)
19821 {
19822 init = build_vector_from_ctor (type, elts);
19823 break;
19824 }
19825 }
19826 /* FALLTHRU */
19827
19828 default:
19829 return NULL;
19830 }
19831
19832 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19833
19834 /* If expand_expr returns a MEM, it wasn't immediate. */
19835 gcc_assert (!rtl || !MEM_P (rtl));
19836 }
19837
19838 return rtl;
19839 }
19840
19841 /* Generate RTL for the variable DECL to represent its location. */
19842
19843 static rtx
19844 rtl_for_decl_location (tree decl)
19845 {
19846 rtx rtl;
19847
19848 /* Here we have to decide where we are going to say the parameter "lives"
19849 (as far as the debugger is concerned). We only have a couple of
19850 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19851
19852 DECL_RTL normally indicates where the parameter lives during most of the
19853 activation of the function. If optimization is enabled however, this
19854 could be either NULL or else a pseudo-reg. Both of those cases indicate
19855 that the parameter doesn't really live anywhere (as far as the code
19856 generation parts of GCC are concerned) during most of the function's
19857 activation. That will happen (for example) if the parameter is never
19858 referenced within the function.
19859
19860 We could just generate a location descriptor here for all non-NULL
19861 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19862 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19863 where DECL_RTL is NULL or is a pseudo-reg.
19864
19865 Note however that we can only get away with using DECL_INCOMING_RTL as
19866 a backup substitute for DECL_RTL in certain limited cases. In cases
19867 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19868 we can be sure that the parameter was passed using the same type as it is
19869 declared to have within the function, and that its DECL_INCOMING_RTL
19870 points us to a place where a value of that type is passed.
19871
19872 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19873 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19874 because in these cases DECL_INCOMING_RTL points us to a value of some
19875 type which is *different* from the type of the parameter itself. Thus,
19876 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19877 such cases, the debugger would end up (for example) trying to fetch a
19878 `float' from a place which actually contains the first part of a
19879 `double'. That would lead to really incorrect and confusing
19880 output at debug-time.
19881
19882 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19883 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19884 are a couple of exceptions however. On little-endian machines we can
19885 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19886 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19887 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19888 when (on a little-endian machine) a non-prototyped function has a
19889 parameter declared to be of type `short' or `char'. In such cases,
19890 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19891 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19892 passed `int' value. If the debugger then uses that address to fetch
19893 a `short' or a `char' (on a little-endian machine) the result will be
19894 the correct data, so we allow for such exceptional cases below.
19895
19896 Note that our goal here is to describe the place where the given formal
19897 parameter lives during most of the function's activation (i.e. between the
19898 end of the prologue and the start of the epilogue). We'll do that as best
19899 as we can. Note however that if the given formal parameter is modified
19900 sometime during the execution of the function, then a stack backtrace (at
19901 debug-time) will show the function as having been called with the *new*
19902 value rather than the value which was originally passed in. This happens
19903 rarely enough that it is not a major problem, but it *is* a problem, and
19904 I'd like to fix it.
19905
19906 A future version of dwarf2out.c may generate two additional attributes for
19907 any given DW_TAG_formal_parameter DIE which will describe the "passed
19908 type" and the "passed location" for the given formal parameter in addition
19909 to the attributes we now generate to indicate the "declared type" and the
19910 "active location" for each parameter. This additional set of attributes
19911 could be used by debuggers for stack backtraces. Separately, note that
19912 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19913 This happens (for example) for inlined-instances of inline function formal
19914 parameters which are never referenced. This really shouldn't be
19915 happening. All PARM_DECL nodes should get valid non-NULL
19916 DECL_INCOMING_RTL values. FIXME. */
19917
19918 /* Use DECL_RTL as the "location" unless we find something better. */
19919 rtl = DECL_RTL_IF_SET (decl);
19920
19921 /* When generating abstract instances, ignore everything except
19922 constants, symbols living in memory, and symbols living in
19923 fixed registers. */
19924 if (! reload_completed)
19925 {
19926 if (rtl
19927 && (CONSTANT_P (rtl)
19928 || (MEM_P (rtl)
19929 && CONSTANT_P (XEXP (rtl, 0)))
19930 || (REG_P (rtl)
19931 && VAR_P (decl)
19932 && TREE_STATIC (decl))))
19933 {
19934 rtl = targetm.delegitimize_address (rtl);
19935 return rtl;
19936 }
19937 rtl = NULL_RTX;
19938 }
19939 else if (TREE_CODE (decl) == PARM_DECL)
19940 {
19941 if (rtl == NULL_RTX
19942 || is_pseudo_reg (rtl)
19943 || (MEM_P (rtl)
19944 && is_pseudo_reg (XEXP (rtl, 0))
19945 && DECL_INCOMING_RTL (decl)
19946 && MEM_P (DECL_INCOMING_RTL (decl))
19947 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19948 {
19949 tree declared_type = TREE_TYPE (decl);
19950 tree passed_type = DECL_ARG_TYPE (decl);
19951 machine_mode dmode = TYPE_MODE (declared_type);
19952 machine_mode pmode = TYPE_MODE (passed_type);
19953
19954 /* This decl represents a formal parameter which was optimized out.
19955 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19956 all cases where (rtl == NULL_RTX) just below. */
19957 if (dmode == pmode)
19958 rtl = DECL_INCOMING_RTL (decl);
19959 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19960 && SCALAR_INT_MODE_P (dmode)
19961 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19962 && DECL_INCOMING_RTL (decl))
19963 {
19964 rtx inc = DECL_INCOMING_RTL (decl);
19965 if (REG_P (inc))
19966 rtl = inc;
19967 else if (MEM_P (inc))
19968 {
19969 if (BYTES_BIG_ENDIAN)
19970 rtl = adjust_address_nv (inc, dmode,
19971 GET_MODE_SIZE (pmode)
19972 - GET_MODE_SIZE (dmode));
19973 else
19974 rtl = inc;
19975 }
19976 }
19977 }
19978
19979 /* If the parm was passed in registers, but lives on the stack, then
19980 make a big endian correction if the mode of the type of the
19981 parameter is not the same as the mode of the rtl. */
19982 /* ??? This is the same series of checks that are made in dbxout.c before
19983 we reach the big endian correction code there. It isn't clear if all
19984 of these checks are necessary here, but keeping them all is the safe
19985 thing to do. */
19986 else if (MEM_P (rtl)
19987 && XEXP (rtl, 0) != const0_rtx
19988 && ! CONSTANT_P (XEXP (rtl, 0))
19989 /* Not passed in memory. */
19990 && !MEM_P (DECL_INCOMING_RTL (decl))
19991 /* Not passed by invisible reference. */
19992 && (!REG_P (XEXP (rtl, 0))
19993 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19994 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19995 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19996 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19997 #endif
19998 )
19999 /* Big endian correction check. */
20000 && BYTES_BIG_ENDIAN
20001 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
20002 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
20003 UNITS_PER_WORD))
20004 {
20005 machine_mode addr_mode = get_address_mode (rtl);
20006 poly_int64 offset = (UNITS_PER_WORD
20007 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
20008
20009 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20010 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20011 }
20012 }
20013 else if (VAR_P (decl)
20014 && rtl
20015 && MEM_P (rtl)
20016 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
20017 {
20018 machine_mode addr_mode = get_address_mode (rtl);
20019 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
20020 GET_MODE (rtl));
20021
20022 /* If a variable is declared "register" yet is smaller than
20023 a register, then if we store the variable to memory, it
20024 looks like we're storing a register-sized value, when in
20025 fact we are not. We need to adjust the offset of the
20026 storage location to reflect the actual value's bytes,
20027 else gdb will not be able to display it. */
20028 if (maybe_ne (offset, 0))
20029 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20030 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20031 }
20032
20033 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
20034 and will have been substituted directly into all expressions that use it.
20035 C does not have such a concept, but C++ and other languages do. */
20036 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
20037 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
20038
20039 if (rtl)
20040 rtl = targetm.delegitimize_address (rtl);
20041
20042 /* If we don't look past the constant pool, we risk emitting a
20043 reference to a constant pool entry that isn't referenced from
20044 code, and thus is not emitted. */
20045 if (rtl)
20046 rtl = avoid_constant_pool_reference (rtl);
20047
20048 /* Try harder to get a rtl. If this symbol ends up not being emitted
20049 in the current CU, resolve_addr will remove the expression referencing
20050 it. */
20051 if (rtl == NULL_RTX
20052 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
20053 && VAR_P (decl)
20054 && !DECL_EXTERNAL (decl)
20055 && TREE_STATIC (decl)
20056 && DECL_NAME (decl)
20057 && !DECL_HARD_REGISTER (decl)
20058 && DECL_MODE (decl) != VOIDmode)
20059 {
20060 rtl = make_decl_rtl_for_debug (decl);
20061 if (!MEM_P (rtl)
20062 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
20063 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
20064 rtl = NULL_RTX;
20065 }
20066
20067 return rtl;
20068 }
20069
20070 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
20071 returned. If so, the decl for the COMMON block is returned, and the
20072 value is the offset into the common block for the symbol. */
20073
20074 static tree
20075 fortran_common (tree decl, HOST_WIDE_INT *value)
20076 {
20077 tree val_expr, cvar;
20078 machine_mode mode;
20079 poly_int64 bitsize, bitpos;
20080 tree offset;
20081 HOST_WIDE_INT cbitpos;
20082 int unsignedp, reversep, volatilep = 0;
20083
20084 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
20085 it does not have a value (the offset into the common area), or if it
20086 is thread local (as opposed to global) then it isn't common, and shouldn't
20087 be handled as such. */
20088 if (!VAR_P (decl)
20089 || !TREE_STATIC (decl)
20090 || !DECL_HAS_VALUE_EXPR_P (decl)
20091 || !is_fortran ())
20092 return NULL_TREE;
20093
20094 val_expr = DECL_VALUE_EXPR (decl);
20095 if (TREE_CODE (val_expr) != COMPONENT_REF)
20096 return NULL_TREE;
20097
20098 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
20099 &unsignedp, &reversep, &volatilep);
20100
20101 if (cvar == NULL_TREE
20102 || !VAR_P (cvar)
20103 || DECL_ARTIFICIAL (cvar)
20104 || !TREE_PUBLIC (cvar)
20105 /* We don't expect to have to cope with variable offsets,
20106 since at present all static data must have a constant size. */
20107 || !bitpos.is_constant (&cbitpos))
20108 return NULL_TREE;
20109
20110 *value = 0;
20111 if (offset != NULL)
20112 {
20113 if (!tree_fits_shwi_p (offset))
20114 return NULL_TREE;
20115 *value = tree_to_shwi (offset);
20116 }
20117 if (cbitpos != 0)
20118 *value += cbitpos / BITS_PER_UNIT;
20119
20120 return cvar;
20121 }
20122
20123 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20124 data attribute for a variable or a parameter. We generate the
20125 DW_AT_const_value attribute only in those cases where the given variable
20126 or parameter does not have a true "location" either in memory or in a
20127 register. This can happen (for example) when a constant is passed as an
20128 actual argument in a call to an inline function. (It's possible that
20129 these things can crop up in other ways also.) Note that one type of
20130 constant value which can be passed into an inlined function is a constant
20131 pointer. This can happen for example if an actual argument in an inlined
20132 function call evaluates to a compile-time constant address.
20133
20134 CACHE_P is true if it is worth caching the location list for DECL,
20135 so that future calls can reuse it rather than regenerate it from scratch.
20136 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20137 since we will need to refer to them each time the function is inlined. */
20138
20139 static bool
20140 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20141 {
20142 rtx rtl;
20143 dw_loc_list_ref list;
20144 var_loc_list *loc_list;
20145 cached_dw_loc_list *cache;
20146
20147 if (early_dwarf)
20148 return false;
20149
20150 if (TREE_CODE (decl) == ERROR_MARK)
20151 return false;
20152
20153 if (get_AT (die, DW_AT_location)
20154 || get_AT (die, DW_AT_const_value))
20155 return true;
20156
20157 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20158 || TREE_CODE (decl) == RESULT_DECL);
20159
20160 /* Try to get some constant RTL for this decl, and use that as the value of
20161 the location. */
20162
20163 rtl = rtl_for_decl_location (decl);
20164 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20165 && add_const_value_attribute (die, rtl))
20166 return true;
20167
20168 /* See if we have single element location list that is equivalent to
20169 a constant value. That way we are better to use add_const_value_attribute
20170 rather than expanding constant value equivalent. */
20171 loc_list = lookup_decl_loc (decl);
20172 if (loc_list
20173 && loc_list->first
20174 && loc_list->first->next == NULL
20175 && NOTE_P (loc_list->first->loc)
20176 && NOTE_VAR_LOCATION (loc_list->first->loc)
20177 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20178 {
20179 struct var_loc_node *node;
20180
20181 node = loc_list->first;
20182 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20183 if (GET_CODE (rtl) == EXPR_LIST)
20184 rtl = XEXP (rtl, 0);
20185 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20186 && add_const_value_attribute (die, rtl))
20187 return true;
20188 }
20189 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20190 list several times. See if we've already cached the contents. */
20191 list = NULL;
20192 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20193 cache_p = false;
20194 if (cache_p)
20195 {
20196 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20197 if (cache)
20198 list = cache->loc_list;
20199 }
20200 if (list == NULL)
20201 {
20202 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20203 NULL);
20204 /* It is usually worth caching this result if the decl is from
20205 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20206 if (cache_p && list && list->dw_loc_next)
20207 {
20208 cached_dw_loc_list **slot
20209 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20210 DECL_UID (decl),
20211 INSERT);
20212 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20213 cache->decl_id = DECL_UID (decl);
20214 cache->loc_list = list;
20215 *slot = cache;
20216 }
20217 }
20218 if (list)
20219 {
20220 add_AT_location_description (die, DW_AT_location, list);
20221 return true;
20222 }
20223 /* None of that worked, so it must not really have a location;
20224 try adding a constant value attribute from the DECL_INITIAL. */
20225 return tree_add_const_value_attribute_for_decl (die, decl);
20226 }
20227
20228 /* Helper function for tree_add_const_value_attribute. Natively encode
20229 initializer INIT into an array. Return true if successful. */
20230
20231 static bool
20232 native_encode_initializer (tree init, unsigned char *array, int size)
20233 {
20234 tree type;
20235
20236 if (init == NULL_TREE)
20237 return false;
20238
20239 STRIP_NOPS (init);
20240 switch (TREE_CODE (init))
20241 {
20242 case STRING_CST:
20243 type = TREE_TYPE (init);
20244 if (TREE_CODE (type) == ARRAY_TYPE)
20245 {
20246 tree enttype = TREE_TYPE (type);
20247 scalar_int_mode mode;
20248
20249 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20250 || GET_MODE_SIZE (mode) != 1)
20251 return false;
20252 if (int_size_in_bytes (type) != size)
20253 return false;
20254 if (size > TREE_STRING_LENGTH (init))
20255 {
20256 memcpy (array, TREE_STRING_POINTER (init),
20257 TREE_STRING_LENGTH (init));
20258 memset (array + TREE_STRING_LENGTH (init),
20259 '\0', size - TREE_STRING_LENGTH (init));
20260 }
20261 else
20262 memcpy (array, TREE_STRING_POINTER (init), size);
20263 return true;
20264 }
20265 return false;
20266 case CONSTRUCTOR:
20267 type = TREE_TYPE (init);
20268 if (int_size_in_bytes (type) != size)
20269 return false;
20270 if (TREE_CODE (type) == ARRAY_TYPE)
20271 {
20272 HOST_WIDE_INT min_index;
20273 unsigned HOST_WIDE_INT cnt;
20274 int curpos = 0, fieldsize;
20275 constructor_elt *ce;
20276
20277 if (TYPE_DOMAIN (type) == NULL_TREE
20278 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20279 return false;
20280
20281 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20282 if (fieldsize <= 0)
20283 return false;
20284
20285 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20286 memset (array, '\0', size);
20287 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20288 {
20289 tree val = ce->value;
20290 tree index = ce->index;
20291 int pos = curpos;
20292 if (index && TREE_CODE (index) == RANGE_EXPR)
20293 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20294 * fieldsize;
20295 else if (index)
20296 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20297
20298 if (val)
20299 {
20300 STRIP_NOPS (val);
20301 if (!native_encode_initializer (val, array + pos, fieldsize))
20302 return false;
20303 }
20304 curpos = pos + fieldsize;
20305 if (index && TREE_CODE (index) == RANGE_EXPR)
20306 {
20307 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20308 - tree_to_shwi (TREE_OPERAND (index, 0));
20309 while (count-- > 0)
20310 {
20311 if (val)
20312 memcpy (array + curpos, array + pos, fieldsize);
20313 curpos += fieldsize;
20314 }
20315 }
20316 gcc_assert (curpos <= size);
20317 }
20318 return true;
20319 }
20320 else if (TREE_CODE (type) == RECORD_TYPE
20321 || TREE_CODE (type) == UNION_TYPE)
20322 {
20323 tree field = NULL_TREE;
20324 unsigned HOST_WIDE_INT cnt;
20325 constructor_elt *ce;
20326
20327 if (int_size_in_bytes (type) != size)
20328 return false;
20329
20330 if (TREE_CODE (type) == RECORD_TYPE)
20331 field = TYPE_FIELDS (type);
20332
20333 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20334 {
20335 tree val = ce->value;
20336 int pos, fieldsize;
20337
20338 if (ce->index != 0)
20339 field = ce->index;
20340
20341 if (val)
20342 STRIP_NOPS (val);
20343
20344 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20345 return false;
20346
20347 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20348 && TYPE_DOMAIN (TREE_TYPE (field))
20349 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20350 return false;
20351 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20352 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20353 return false;
20354 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20355 pos = int_byte_position (field);
20356 gcc_assert (pos + fieldsize <= size);
20357 if (val && fieldsize != 0
20358 && !native_encode_initializer (val, array + pos, fieldsize))
20359 return false;
20360 }
20361 return true;
20362 }
20363 return false;
20364 case VIEW_CONVERT_EXPR:
20365 case NON_LVALUE_EXPR:
20366 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20367 default:
20368 return native_encode_expr (init, array, size) == size;
20369 }
20370 }
20371
20372 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20373 attribute is the const value T. */
20374
20375 static bool
20376 tree_add_const_value_attribute (dw_die_ref die, tree t)
20377 {
20378 tree init;
20379 tree type = TREE_TYPE (t);
20380 rtx rtl;
20381
20382 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20383 return false;
20384
20385 init = t;
20386 gcc_assert (!DECL_P (init));
20387
20388 if (TREE_CODE (init) == INTEGER_CST)
20389 {
20390 if (tree_fits_uhwi_p (init))
20391 {
20392 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20393 return true;
20394 }
20395 if (tree_fits_shwi_p (init))
20396 {
20397 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20398 return true;
20399 }
20400 }
20401 if (! early_dwarf)
20402 {
20403 rtl = rtl_for_decl_init (init, type);
20404 if (rtl)
20405 return add_const_value_attribute (die, rtl);
20406 }
20407 /* If the host and target are sane, try harder. */
20408 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20409 && initializer_constant_valid_p (init, type))
20410 {
20411 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20412 if (size > 0 && (int) size == size)
20413 {
20414 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20415
20416 if (native_encode_initializer (init, array, size))
20417 {
20418 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20419 return true;
20420 }
20421 ggc_free (array);
20422 }
20423 }
20424 return false;
20425 }
20426
20427 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20428 attribute is the const value of T, where T is an integral constant
20429 variable with static storage duration
20430 (so it can't be a PARM_DECL or a RESULT_DECL). */
20431
20432 static bool
20433 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20434 {
20435
20436 if (!decl
20437 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20438 || (VAR_P (decl) && !TREE_STATIC (decl)))
20439 return false;
20440
20441 if (TREE_READONLY (decl)
20442 && ! TREE_THIS_VOLATILE (decl)
20443 && DECL_INITIAL (decl))
20444 /* OK */;
20445 else
20446 return false;
20447
20448 /* Don't add DW_AT_const_value if abstract origin already has one. */
20449 if (get_AT (var_die, DW_AT_const_value))
20450 return false;
20451
20452 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20453 }
20454
20455 /* Convert the CFI instructions for the current function into a
20456 location list. This is used for DW_AT_frame_base when we targeting
20457 a dwarf2 consumer that does not support the dwarf3
20458 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20459 expressions. */
20460
20461 static dw_loc_list_ref
20462 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20463 {
20464 int ix;
20465 dw_fde_ref fde;
20466 dw_loc_list_ref list, *list_tail;
20467 dw_cfi_ref cfi;
20468 dw_cfa_location last_cfa, next_cfa;
20469 const char *start_label, *last_label, *section;
20470 dw_cfa_location remember;
20471
20472 fde = cfun->fde;
20473 gcc_assert (fde != NULL);
20474
20475 section = secname_for_decl (current_function_decl);
20476 list_tail = &list;
20477 list = NULL;
20478
20479 memset (&next_cfa, 0, sizeof (next_cfa));
20480 next_cfa.reg = INVALID_REGNUM;
20481 remember = next_cfa;
20482
20483 start_label = fde->dw_fde_begin;
20484
20485 /* ??? Bald assumption that the CIE opcode list does not contain
20486 advance opcodes. */
20487 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20488 lookup_cfa_1 (cfi, &next_cfa, &remember);
20489
20490 last_cfa = next_cfa;
20491 last_label = start_label;
20492
20493 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20494 {
20495 /* If the first partition contained no CFI adjustments, the
20496 CIE opcodes apply to the whole first partition. */
20497 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20498 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20499 list_tail =&(*list_tail)->dw_loc_next;
20500 start_label = last_label = fde->dw_fde_second_begin;
20501 }
20502
20503 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20504 {
20505 switch (cfi->dw_cfi_opc)
20506 {
20507 case DW_CFA_set_loc:
20508 case DW_CFA_advance_loc1:
20509 case DW_CFA_advance_loc2:
20510 case DW_CFA_advance_loc4:
20511 if (!cfa_equal_p (&last_cfa, &next_cfa))
20512 {
20513 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20514 start_label, 0, last_label, 0, section);
20515
20516 list_tail = &(*list_tail)->dw_loc_next;
20517 last_cfa = next_cfa;
20518 start_label = last_label;
20519 }
20520 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20521 break;
20522
20523 case DW_CFA_advance_loc:
20524 /* The encoding is complex enough that we should never emit this. */
20525 gcc_unreachable ();
20526
20527 default:
20528 lookup_cfa_1 (cfi, &next_cfa, &remember);
20529 break;
20530 }
20531 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20532 {
20533 if (!cfa_equal_p (&last_cfa, &next_cfa))
20534 {
20535 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20536 start_label, 0, last_label, 0, section);
20537
20538 list_tail = &(*list_tail)->dw_loc_next;
20539 last_cfa = next_cfa;
20540 start_label = last_label;
20541 }
20542 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20543 start_label, 0, fde->dw_fde_end, 0, section);
20544 list_tail = &(*list_tail)->dw_loc_next;
20545 start_label = last_label = fde->dw_fde_second_begin;
20546 }
20547 }
20548
20549 if (!cfa_equal_p (&last_cfa, &next_cfa))
20550 {
20551 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20552 start_label, 0, last_label, 0, section);
20553 list_tail = &(*list_tail)->dw_loc_next;
20554 start_label = last_label;
20555 }
20556
20557 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20558 start_label, 0,
20559 fde->dw_fde_second_begin
20560 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20561 section);
20562
20563 maybe_gen_llsym (list);
20564
20565 return list;
20566 }
20567
20568 /* Compute a displacement from the "steady-state frame pointer" to the
20569 frame base (often the same as the CFA), and store it in
20570 frame_pointer_fb_offset. OFFSET is added to the displacement
20571 before the latter is negated. */
20572
20573 static void
20574 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20575 {
20576 rtx reg, elim;
20577
20578 #ifdef FRAME_POINTER_CFA_OFFSET
20579 reg = frame_pointer_rtx;
20580 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20581 #else
20582 reg = arg_pointer_rtx;
20583 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20584 #endif
20585
20586 elim = (ira_use_lra_p
20587 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20588 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20589 elim = strip_offset_and_add (elim, &offset);
20590
20591 frame_pointer_fb_offset = -offset;
20592
20593 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20594 in which to eliminate. This is because it's stack pointer isn't
20595 directly accessible as a register within the ISA. To work around
20596 this, assume that while we cannot provide a proper value for
20597 frame_pointer_fb_offset, we won't need one either. We can use
20598 hard frame pointer in debug info even if frame pointer isn't used
20599 since hard frame pointer in debug info is encoded with DW_OP_fbreg
20600 which uses the DW_AT_frame_base attribute, not hard frame pointer
20601 directly. */
20602 frame_pointer_fb_offset_valid
20603 = (elim == hard_frame_pointer_rtx || elim == stack_pointer_rtx);
20604 }
20605
20606 /* Generate a DW_AT_name attribute given some string value to be included as
20607 the value of the attribute. */
20608
20609 static void
20610 add_name_attribute (dw_die_ref die, const char *name_string)
20611 {
20612 if (name_string != NULL && *name_string != 0)
20613 {
20614 if (demangle_name_func)
20615 name_string = (*demangle_name_func) (name_string);
20616
20617 add_AT_string (die, DW_AT_name, name_string);
20618 }
20619 }
20620
20621 /* Generate a DW_AT_description attribute given some string value to be included
20622 as the value of the attribute. */
20623
20624 static void
20625 add_desc_attribute (dw_die_ref die, const char *name_string)
20626 {
20627 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20628 return;
20629
20630 if (name_string == NULL || *name_string == 0)
20631 return;
20632
20633 if (demangle_name_func)
20634 name_string = (*demangle_name_func) (name_string);
20635
20636 add_AT_string (die, DW_AT_description, name_string);
20637 }
20638
20639 /* Generate a DW_AT_description attribute given some decl to be included
20640 as the value of the attribute. */
20641
20642 static void
20643 add_desc_attribute (dw_die_ref die, tree decl)
20644 {
20645 tree decl_name;
20646
20647 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20648 return;
20649
20650 if (decl == NULL_TREE || !DECL_P (decl))
20651 return;
20652 decl_name = DECL_NAME (decl);
20653
20654 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20655 {
20656 const char *name = dwarf2_name (decl, 0);
20657 add_desc_attribute (die, name ? name : IDENTIFIER_POINTER (decl_name));
20658 }
20659 else
20660 {
20661 char *desc = print_generic_expr_to_str (decl);
20662 add_desc_attribute (die, desc);
20663 free (desc);
20664 }
20665 }
20666
20667 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20668 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20669 of TYPE accordingly.
20670
20671 ??? This is a temporary measure until after we're able to generate
20672 regular DWARF for the complex Ada type system. */
20673
20674 static void
20675 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20676 dw_die_ref context_die)
20677 {
20678 tree dtype;
20679 dw_die_ref dtype_die;
20680
20681 if (!lang_hooks.types.descriptive_type)
20682 return;
20683
20684 dtype = lang_hooks.types.descriptive_type (type);
20685 if (!dtype)
20686 return;
20687
20688 dtype_die = lookup_type_die (dtype);
20689 if (!dtype_die)
20690 {
20691 gen_type_die (dtype, context_die);
20692 dtype_die = lookup_type_die (dtype);
20693 gcc_assert (dtype_die);
20694 }
20695
20696 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20697 }
20698
20699 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20700
20701 static const char *
20702 comp_dir_string (void)
20703 {
20704 const char *wd;
20705 char *wd_plus_sep = NULL;
20706 static const char *cached_wd = NULL;
20707
20708 if (cached_wd != NULL)
20709 return cached_wd;
20710
20711 wd = get_src_pwd ();
20712 if (wd == NULL)
20713 return NULL;
20714
20715 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20716 {
20717 size_t wdlen = strlen (wd);
20718 wd_plus_sep = XNEWVEC (char, wdlen + 2);
20719 strcpy (wd_plus_sep, wd);
20720 wd_plus_sep [wdlen] = DIR_SEPARATOR;
20721 wd_plus_sep [wdlen + 1] = 0;
20722 wd = wd_plus_sep;
20723 }
20724
20725 cached_wd = remap_debug_filename (wd);
20726
20727 /* remap_debug_filename can just pass through wd or return a new gc string.
20728 These two types can't be both stored in a GTY(())-tagged string, but since
20729 the cached value lives forever just copy it if needed. */
20730 if (cached_wd != wd)
20731 {
20732 cached_wd = xstrdup (cached_wd);
20733 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR && wd_plus_sep != NULL)
20734 free (wd_plus_sep);
20735 }
20736
20737 return cached_wd;
20738 }
20739
20740 /* Generate a DW_AT_comp_dir attribute for DIE. */
20741
20742 static void
20743 add_comp_dir_attribute (dw_die_ref die)
20744 {
20745 const char * wd = comp_dir_string ();
20746 if (wd != NULL)
20747 add_AT_string (die, DW_AT_comp_dir, wd);
20748 }
20749
20750 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20751 pointer computation, ...), output a representation for that bound according
20752 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20753 loc_list_from_tree for the meaning of CONTEXT. */
20754
20755 static void
20756 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20757 int forms, struct loc_descr_context *context)
20758 {
20759 dw_die_ref context_die, decl_die = NULL;
20760 dw_loc_list_ref list;
20761 bool strip_conversions = true;
20762 bool placeholder_seen = false;
20763
20764 while (strip_conversions)
20765 switch (TREE_CODE (value))
20766 {
20767 case ERROR_MARK:
20768 case SAVE_EXPR:
20769 return;
20770
20771 CASE_CONVERT:
20772 case VIEW_CONVERT_EXPR:
20773 value = TREE_OPERAND (value, 0);
20774 break;
20775
20776 default:
20777 strip_conversions = false;
20778 break;
20779 }
20780
20781 /* If possible and permitted, output the attribute as a constant. */
20782 if ((forms & dw_scalar_form_constant) != 0
20783 && TREE_CODE (value) == INTEGER_CST)
20784 {
20785 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20786
20787 /* If HOST_WIDE_INT is big enough then represent the bound as
20788 a constant value. We need to choose a form based on
20789 whether the type is signed or unsigned. We cannot just
20790 call add_AT_unsigned if the value itself is positive
20791 (add_AT_unsigned might add the unsigned value encoded as
20792 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20793 bounds type and then sign extend any unsigned values found
20794 for signed types. This is needed only for
20795 DW_AT_{lower,upper}_bound, since for most other attributes,
20796 consumers will treat DW_FORM_data[1248] as unsigned values,
20797 regardless of the underlying type. */
20798 if (prec <= HOST_BITS_PER_WIDE_INT
20799 || tree_fits_uhwi_p (value))
20800 {
20801 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20802 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20803 else
20804 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20805 }
20806 else
20807 /* Otherwise represent the bound as an unsigned value with
20808 the precision of its type. The precision and signedness
20809 of the type will be necessary to re-interpret it
20810 unambiguously. */
20811 add_AT_wide (die, attr, wi::to_wide (value));
20812 return;
20813 }
20814
20815 /* Otherwise, if it's possible and permitted too, output a reference to
20816 another DIE. */
20817 if ((forms & dw_scalar_form_reference) != 0)
20818 {
20819 tree decl = NULL_TREE;
20820
20821 /* Some type attributes reference an outer type. For instance, the upper
20822 bound of an array may reference an embedding record (this happens in
20823 Ada). */
20824 if (TREE_CODE (value) == COMPONENT_REF
20825 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20826 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20827 decl = TREE_OPERAND (value, 1);
20828
20829 else if (VAR_P (value)
20830 || TREE_CODE (value) == PARM_DECL
20831 || TREE_CODE (value) == RESULT_DECL)
20832 decl = value;
20833
20834 if (decl != NULL_TREE)
20835 {
20836 decl_die = lookup_decl_die (decl);
20837
20838 /* ??? Can this happen, or should the variable have been bound
20839 first? Probably it can, since I imagine that we try to create
20840 the types of parameters in the order in which they exist in
20841 the list, and won't have created a forward reference to a
20842 later parameter. */
20843 if (decl_die != NULL)
20844 {
20845 if (get_AT (decl_die, DW_AT_location)
20846 || get_AT (decl_die, DW_AT_const_value))
20847 {
20848 add_AT_die_ref (die, attr, decl_die);
20849 return;
20850 }
20851 }
20852 }
20853 }
20854
20855 /* Last chance: try to create a stack operation procedure to evaluate the
20856 value. Do nothing if even that is not possible or permitted. */
20857 if ((forms & dw_scalar_form_exprloc) == 0)
20858 return;
20859
20860 list = loc_list_from_tree (value, 2, context);
20861 if (context && context->placeholder_arg)
20862 {
20863 placeholder_seen = context->placeholder_seen;
20864 context->placeholder_seen = false;
20865 }
20866 if (list == NULL || single_element_loc_list_p (list))
20867 {
20868 /* If this attribute is not a reference nor constant, it is
20869 a DWARF expression rather than location description. For that
20870 loc_list_from_tree (value, 0, &context) is needed. */
20871 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20872 if (list2 && single_element_loc_list_p (list2))
20873 {
20874 if (placeholder_seen)
20875 {
20876 struct dwarf_procedure_info dpi;
20877 dpi.fndecl = NULL_TREE;
20878 dpi.args_count = 1;
20879 if (!resolve_args_picking (list2->expr, 1, &dpi))
20880 return;
20881 }
20882 add_AT_loc (die, attr, list2->expr);
20883 return;
20884 }
20885 }
20886
20887 /* If that failed to give a single element location list, fall back to
20888 outputting this as a reference... still if permitted. */
20889 if (list == NULL
20890 || (forms & dw_scalar_form_reference) == 0
20891 || placeholder_seen)
20892 return;
20893
20894 if (!decl_die)
20895 {
20896 if (current_function_decl == 0)
20897 context_die = comp_unit_die ();
20898 else
20899 context_die = lookup_decl_die (current_function_decl);
20900
20901 decl_die = new_die (DW_TAG_variable, context_die, value);
20902 add_AT_flag (decl_die, DW_AT_artificial, 1);
20903 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20904 context_die);
20905 }
20906
20907 add_AT_location_description (decl_die, DW_AT_location, list);
20908 add_AT_die_ref (die, attr, decl_die);
20909 }
20910
20911 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20912 default. */
20913
20914 static int
20915 lower_bound_default (void)
20916 {
20917 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20918 {
20919 case DW_LANG_C:
20920 case DW_LANG_C89:
20921 case DW_LANG_C99:
20922 case DW_LANG_C11:
20923 case DW_LANG_C_plus_plus:
20924 case DW_LANG_C_plus_plus_11:
20925 case DW_LANG_C_plus_plus_14:
20926 case DW_LANG_ObjC:
20927 case DW_LANG_ObjC_plus_plus:
20928 return 0;
20929 case DW_LANG_Fortran77:
20930 case DW_LANG_Fortran90:
20931 case DW_LANG_Fortran95:
20932 case DW_LANG_Fortran03:
20933 case DW_LANG_Fortran08:
20934 return 1;
20935 case DW_LANG_UPC:
20936 case DW_LANG_D:
20937 case DW_LANG_Python:
20938 return dwarf_version >= 4 ? 0 : -1;
20939 case DW_LANG_Ada95:
20940 case DW_LANG_Ada83:
20941 case DW_LANG_Cobol74:
20942 case DW_LANG_Cobol85:
20943 case DW_LANG_Modula2:
20944 case DW_LANG_PLI:
20945 return dwarf_version >= 4 ? 1 : -1;
20946 default:
20947 return -1;
20948 }
20949 }
20950
20951 /* Given a tree node describing an array bound (either lower or upper) output
20952 a representation for that bound. */
20953
20954 static void
20955 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20956 tree bound, struct loc_descr_context *context)
20957 {
20958 int dflt;
20959
20960 while (1)
20961 switch (TREE_CODE (bound))
20962 {
20963 /* Strip all conversions. */
20964 CASE_CONVERT:
20965 case VIEW_CONVERT_EXPR:
20966 bound = TREE_OPERAND (bound, 0);
20967 break;
20968
20969 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20970 are even omitted when they are the default. */
20971 case INTEGER_CST:
20972 /* If the value for this bound is the default one, we can even omit the
20973 attribute. */
20974 if (bound_attr == DW_AT_lower_bound
20975 && tree_fits_shwi_p (bound)
20976 && (dflt = lower_bound_default ()) != -1
20977 && tree_to_shwi (bound) == dflt)
20978 return;
20979
20980 /* FALLTHRU */
20981
20982 default:
20983 /* Because of the complex interaction there can be with other GNAT
20984 encodings, GDB isn't ready yet to handle proper DWARF description
20985 for self-referencial subrange bounds: let GNAT encodings do the
20986 magic in such a case. */
20987 if (is_ada ()
20988 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20989 && contains_placeholder_p (bound))
20990 return;
20991
20992 add_scalar_info (subrange_die, bound_attr, bound,
20993 dw_scalar_form_constant
20994 | dw_scalar_form_exprloc
20995 | dw_scalar_form_reference,
20996 context);
20997 return;
20998 }
20999 }
21000
21001 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
21002 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
21003 Note that the block of subscript information for an array type also
21004 includes information about the element type of the given array type.
21005
21006 This function reuses previously set type and bound information if
21007 available. */
21008
21009 static void
21010 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
21011 {
21012 unsigned dimension_number;
21013 tree lower, upper;
21014 dw_die_ref child = type_die->die_child;
21015
21016 for (dimension_number = 0;
21017 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
21018 type = TREE_TYPE (type), dimension_number++)
21019 {
21020 tree domain = TYPE_DOMAIN (type);
21021
21022 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
21023 break;
21024
21025 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
21026 and (in GNU C only) variable bounds. Handle all three forms
21027 here. */
21028
21029 /* Find and reuse a previously generated DW_TAG_subrange_type if
21030 available.
21031
21032 For multi-dimensional arrays, as we iterate through the
21033 various dimensions in the enclosing for loop above, we also
21034 iterate through the DIE children and pick at each
21035 DW_TAG_subrange_type previously generated (if available).
21036 Each child DW_TAG_subrange_type DIE describes the range of
21037 the current dimension. At this point we should have as many
21038 DW_TAG_subrange_type's as we have dimensions in the
21039 array. */
21040 dw_die_ref subrange_die = NULL;
21041 if (child)
21042 while (1)
21043 {
21044 child = child->die_sib;
21045 if (child->die_tag == DW_TAG_subrange_type)
21046 subrange_die = child;
21047 if (child == type_die->die_child)
21048 {
21049 /* If we wrapped around, stop looking next time. */
21050 child = NULL;
21051 break;
21052 }
21053 if (child->die_tag == DW_TAG_subrange_type)
21054 break;
21055 }
21056 if (!subrange_die)
21057 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
21058
21059 if (domain)
21060 {
21061 /* We have an array type with specified bounds. */
21062 lower = TYPE_MIN_VALUE (domain);
21063 upper = TYPE_MAX_VALUE (domain);
21064
21065 /* Define the index type. */
21066 if (TREE_TYPE (domain)
21067 && !get_AT (subrange_die, DW_AT_type))
21068 {
21069 /* ??? This is probably an Ada unnamed subrange type. Ignore the
21070 TREE_TYPE field. We can't emit debug info for this
21071 because it is an unnamed integral type. */
21072 if (TREE_CODE (domain) == INTEGER_TYPE
21073 && TYPE_NAME (domain) == NULL_TREE
21074 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
21075 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
21076 ;
21077 else
21078 add_type_attribute (subrange_die, TREE_TYPE (domain),
21079 TYPE_UNQUALIFIED, false, type_die);
21080 }
21081
21082 /* ??? If upper is NULL, the array has unspecified length,
21083 but it does have a lower bound. This happens with Fortran
21084 dimension arr(N:*)
21085 Since the debugger is definitely going to need to know N
21086 to produce useful results, go ahead and output the lower
21087 bound solo, and hope the debugger can cope. */
21088
21089 if (!get_AT (subrange_die, DW_AT_lower_bound))
21090 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
21091 if (!get_AT (subrange_die, DW_AT_upper_bound)
21092 && !get_AT (subrange_die, DW_AT_count))
21093 {
21094 if (upper)
21095 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
21096 else if ((is_c () || is_cxx ()) && COMPLETE_TYPE_P (type))
21097 /* Zero-length array. */
21098 add_bound_info (subrange_die, DW_AT_count,
21099 build_int_cst (TREE_TYPE (lower), 0), NULL);
21100 }
21101 }
21102
21103 /* Otherwise we have an array type with an unspecified length. The
21104 DWARF-2 spec does not say how to handle this; let's just leave out the
21105 bounds. */
21106 }
21107 }
21108
21109 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
21110
21111 static void
21112 add_byte_size_attribute (dw_die_ref die, tree tree_node)
21113 {
21114 dw_die_ref decl_die;
21115 HOST_WIDE_INT size;
21116 dw_loc_descr_ref size_expr = NULL;
21117
21118 switch (TREE_CODE (tree_node))
21119 {
21120 case ERROR_MARK:
21121 size = 0;
21122 break;
21123 case ENUMERAL_TYPE:
21124 case RECORD_TYPE:
21125 case UNION_TYPE:
21126 case QUAL_UNION_TYPE:
21127 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
21128 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
21129 {
21130 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
21131 return;
21132 }
21133 size_expr = type_byte_size (tree_node, &size);
21134 break;
21135 case FIELD_DECL:
21136 /* For a data member of a struct or union, the DW_AT_byte_size is
21137 generally given as the number of bytes normally allocated for an
21138 object of the *declared* type of the member itself. This is true
21139 even for bit-fields. */
21140 size = int_size_in_bytes (field_type (tree_node));
21141 break;
21142 default:
21143 gcc_unreachable ();
21144 }
21145
21146 /* Support for dynamically-sized objects was introduced by DWARFv3.
21147 At the moment, GDB does not handle variable byte sizes very well,
21148 though. */
21149 if ((dwarf_version >= 3 || !dwarf_strict)
21150 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
21151 && size_expr != NULL)
21152 add_AT_loc (die, DW_AT_byte_size, size_expr);
21153
21154 /* Note that `size' might be -1 when we get to this point. If it is, that
21155 indicates that the byte size of the entity in question is variable and
21156 that we could not generate a DWARF expression that computes it. */
21157 if (size >= 0)
21158 add_AT_unsigned (die, DW_AT_byte_size, size);
21159 }
21160
21161 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
21162 alignment. */
21163
21164 static void
21165 add_alignment_attribute (dw_die_ref die, tree tree_node)
21166 {
21167 if (dwarf_version < 5 && dwarf_strict)
21168 return;
21169
21170 unsigned align;
21171
21172 if (DECL_P (tree_node))
21173 {
21174 if (!DECL_USER_ALIGN (tree_node))
21175 return;
21176
21177 align = DECL_ALIGN_UNIT (tree_node);
21178 }
21179 else if (TYPE_P (tree_node))
21180 {
21181 if (!TYPE_USER_ALIGN (tree_node))
21182 return;
21183
21184 align = TYPE_ALIGN_UNIT (tree_node);
21185 }
21186 else
21187 gcc_unreachable ();
21188
21189 add_AT_unsigned (die, DW_AT_alignment, align);
21190 }
21191
21192 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21193 which specifies the distance in bits from the highest order bit of the
21194 "containing object" for the bit-field to the highest order bit of the
21195 bit-field itself.
21196
21197 For any given bit-field, the "containing object" is a hypothetical object
21198 (of some integral or enum type) within which the given bit-field lives. The
21199 type of this hypothetical "containing object" is always the same as the
21200 declared type of the individual bit-field itself. The determination of the
21201 exact location of the "containing object" for a bit-field is rather
21202 complicated. It's handled by the `field_byte_offset' function (above).
21203
21204 CTX is required: see the comment for VLR_CONTEXT.
21205
21206 Note that it is the size (in bytes) of the hypothetical "containing object"
21207 which will be given in the DW_AT_byte_size attribute for this bit-field.
21208 (See `byte_size_attribute' above). */
21209
21210 static inline void
21211 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21212 {
21213 HOST_WIDE_INT object_offset_in_bytes;
21214 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21215 HOST_WIDE_INT bitpos_int;
21216 HOST_WIDE_INT highest_order_object_bit_offset;
21217 HOST_WIDE_INT highest_order_field_bit_offset;
21218 HOST_WIDE_INT bit_offset;
21219
21220 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21221
21222 /* Must be a field and a bit field. */
21223 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21224
21225 /* We can't yet handle bit-fields whose offsets are variable, so if we
21226 encounter such things, just return without generating any attribute
21227 whatsoever. Likewise for variable or too large size. */
21228 if (! tree_fits_shwi_p (bit_position (decl))
21229 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21230 return;
21231
21232 bitpos_int = int_bit_position (decl);
21233
21234 /* Note that the bit offset is always the distance (in bits) from the
21235 highest-order bit of the "containing object" to the highest-order bit of
21236 the bit-field itself. Since the "high-order end" of any object or field
21237 is different on big-endian and little-endian machines, the computation
21238 below must take account of these differences. */
21239 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21240 highest_order_field_bit_offset = bitpos_int;
21241
21242 if (! BYTES_BIG_ENDIAN)
21243 {
21244 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21245 highest_order_object_bit_offset +=
21246 simple_type_size_in_bits (original_type);
21247 }
21248
21249 bit_offset
21250 = (! BYTES_BIG_ENDIAN
21251 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21252 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21253
21254 if (bit_offset < 0)
21255 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21256 else
21257 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21258 }
21259
21260 /* For a FIELD_DECL node which represents a bit field, output an attribute
21261 which specifies the length in bits of the given field. */
21262
21263 static inline void
21264 add_bit_size_attribute (dw_die_ref die, tree decl)
21265 {
21266 /* Must be a field and a bit field. */
21267 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21268 && DECL_BIT_FIELD_TYPE (decl));
21269
21270 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21271 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21272 }
21273
21274 /* If the compiled language is ANSI C, then add a 'prototyped'
21275 attribute, if arg types are given for the parameters of a function. */
21276
21277 static inline void
21278 add_prototyped_attribute (dw_die_ref die, tree func_type)
21279 {
21280 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21281 {
21282 case DW_LANG_C:
21283 case DW_LANG_C89:
21284 case DW_LANG_C99:
21285 case DW_LANG_C11:
21286 case DW_LANG_ObjC:
21287 if (prototype_p (func_type))
21288 add_AT_flag (die, DW_AT_prototyped, 1);
21289 break;
21290 default:
21291 break;
21292 }
21293 }
21294
21295 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21296 by looking in the type declaration, the object declaration equate table or
21297 the block mapping. */
21298
21299 static inline void
21300 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21301 {
21302 dw_die_ref origin_die = NULL;
21303
21304 /* For late LTO debug output we want to refer directly to the abstract
21305 DIE in the early debug rather to the possibly existing concrete
21306 instance and avoid creating that just for this purpose. */
21307 sym_off_pair *desc;
21308 if (in_lto_p
21309 && external_die_map
21310 && (desc = external_die_map->get (origin)))
21311 {
21312 add_AT_external_die_ref (die, DW_AT_abstract_origin,
21313 desc->sym, desc->off);
21314 return;
21315 }
21316
21317 if (DECL_P (origin))
21318 origin_die = lookup_decl_die (origin);
21319 else if (TYPE_P (origin))
21320 origin_die = lookup_type_die (origin);
21321 else if (TREE_CODE (origin) == BLOCK)
21322 origin_die = lookup_block_die (origin);
21323
21324 /* XXX: Functions that are never lowered don't always have correct block
21325 trees (in the case of java, they simply have no block tree, in some other
21326 languages). For these functions, there is nothing we can really do to
21327 output correct debug info for inlined functions in all cases. Rather
21328 than die, we'll just produce deficient debug info now, in that we will
21329 have variables without a proper abstract origin. In the future, when all
21330 functions are lowered, we should re-add a gcc_assert (origin_die)
21331 here. */
21332
21333 if (origin_die)
21334 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21335 }
21336
21337 /* We do not currently support the pure_virtual attribute. */
21338
21339 static inline void
21340 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21341 {
21342 if (DECL_VINDEX (func_decl))
21343 {
21344 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21345
21346 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21347 add_AT_loc (die, DW_AT_vtable_elem_location,
21348 new_loc_descr (DW_OP_constu,
21349 tree_to_shwi (DECL_VINDEX (func_decl)),
21350 0));
21351
21352 /* GNU extension: Record what type this method came from originally. */
21353 if (debug_info_level > DINFO_LEVEL_TERSE
21354 && DECL_CONTEXT (func_decl))
21355 add_AT_die_ref (die, DW_AT_containing_type,
21356 lookup_type_die (DECL_CONTEXT (func_decl)));
21357 }
21358 }
21359 \f
21360 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21361 given decl. This used to be a vendor extension until after DWARF 4
21362 standardized it. */
21363
21364 static void
21365 add_linkage_attr (dw_die_ref die, tree decl)
21366 {
21367 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21368
21369 /* Mimic what assemble_name_raw does with a leading '*'. */
21370 if (name[0] == '*')
21371 name = &name[1];
21372
21373 if (dwarf_version >= 4)
21374 add_AT_string (die, DW_AT_linkage_name, name);
21375 else
21376 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21377 }
21378
21379 /* Add source coordinate attributes for the given decl. */
21380
21381 static void
21382 add_src_coords_attributes (dw_die_ref die, tree decl)
21383 {
21384 expanded_location s;
21385
21386 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21387 return;
21388 s = expand_location (DECL_SOURCE_LOCATION (decl));
21389 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21390 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21391 if (debug_column_info && s.column)
21392 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21393 }
21394
21395 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21396
21397 static void
21398 add_linkage_name_raw (dw_die_ref die, tree decl)
21399 {
21400 /* Defer until we have an assembler name set. */
21401 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21402 {
21403 limbo_die_node *asm_name;
21404
21405 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21406 asm_name->die = die;
21407 asm_name->created_for = decl;
21408 asm_name->next = deferred_asm_name;
21409 deferred_asm_name = asm_name;
21410 }
21411 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21412 add_linkage_attr (die, decl);
21413 }
21414
21415 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21416
21417 static void
21418 add_linkage_name (dw_die_ref die, tree decl)
21419 {
21420 if (debug_info_level > DINFO_LEVEL_NONE
21421 && VAR_OR_FUNCTION_DECL_P (decl)
21422 && TREE_PUBLIC (decl)
21423 && !(VAR_P (decl) && DECL_REGISTER (decl))
21424 && die->die_tag != DW_TAG_member)
21425 add_linkage_name_raw (die, decl);
21426 }
21427
21428 /* Add a DW_AT_name attribute and source coordinate attribute for the
21429 given decl, but only if it actually has a name. */
21430
21431 static void
21432 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21433 bool no_linkage_name)
21434 {
21435 tree decl_name;
21436
21437 decl_name = DECL_NAME (decl);
21438 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21439 {
21440 const char *name = dwarf2_name (decl, 0);
21441 if (name)
21442 add_name_attribute (die, name);
21443 else
21444 add_desc_attribute (die, decl);
21445
21446 if (! DECL_ARTIFICIAL (decl))
21447 add_src_coords_attributes (die, decl);
21448
21449 if (!no_linkage_name)
21450 add_linkage_name (die, decl);
21451 }
21452 else
21453 add_desc_attribute (die, decl);
21454
21455 #ifdef VMS_DEBUGGING_INFO
21456 /* Get the function's name, as described by its RTL. This may be different
21457 from the DECL_NAME name used in the source file. */
21458 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21459 {
21460 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21461 XEXP (DECL_RTL (decl), 0), false);
21462 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21463 }
21464 #endif /* VMS_DEBUGGING_INFO */
21465 }
21466
21467 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21468
21469 static void
21470 add_discr_value (dw_die_ref die, dw_discr_value *value)
21471 {
21472 dw_attr_node attr;
21473
21474 attr.dw_attr = DW_AT_discr_value;
21475 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21476 attr.dw_attr_val.val_entry = NULL;
21477 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21478 if (value->pos)
21479 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21480 else
21481 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21482 add_dwarf_attr (die, &attr);
21483 }
21484
21485 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21486
21487 static void
21488 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21489 {
21490 dw_attr_node attr;
21491
21492 attr.dw_attr = DW_AT_discr_list;
21493 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21494 attr.dw_attr_val.val_entry = NULL;
21495 attr.dw_attr_val.v.val_discr_list = discr_list;
21496 add_dwarf_attr (die, &attr);
21497 }
21498
21499 static inline dw_discr_list_ref
21500 AT_discr_list (dw_attr_node *attr)
21501 {
21502 return attr->dw_attr_val.v.val_discr_list;
21503 }
21504
21505 #ifdef VMS_DEBUGGING_INFO
21506 /* Output the debug main pointer die for VMS */
21507
21508 void
21509 dwarf2out_vms_debug_main_pointer (void)
21510 {
21511 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21512 dw_die_ref die;
21513
21514 /* Allocate the VMS debug main subprogram die. */
21515 die = new_die_raw (DW_TAG_subprogram);
21516 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21517 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21518 current_function_funcdef_no);
21519 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21520
21521 /* Make it the first child of comp_unit_die (). */
21522 die->die_parent = comp_unit_die ();
21523 if (comp_unit_die ()->die_child)
21524 {
21525 die->die_sib = comp_unit_die ()->die_child->die_sib;
21526 comp_unit_die ()->die_child->die_sib = die;
21527 }
21528 else
21529 {
21530 die->die_sib = die;
21531 comp_unit_die ()->die_child = die;
21532 }
21533 }
21534 #endif /* VMS_DEBUGGING_INFO */
21535
21536 /* walk_tree helper function for uses_local_type, below. */
21537
21538 static tree
21539 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21540 {
21541 if (!TYPE_P (*tp))
21542 *walk_subtrees = 0;
21543 else
21544 {
21545 tree name = TYPE_NAME (*tp);
21546 if (name && DECL_P (name) && decl_function_context (name))
21547 return *tp;
21548 }
21549 return NULL_TREE;
21550 }
21551
21552 /* If TYPE involves a function-local type (including a local typedef to a
21553 non-local type), returns that type; otherwise returns NULL_TREE. */
21554
21555 static tree
21556 uses_local_type (tree type)
21557 {
21558 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21559 return used;
21560 }
21561
21562 /* Return the DIE for the scope that immediately contains this type.
21563 Non-named types that do not involve a function-local type get global
21564 scope. Named types nested in namespaces or other types get their
21565 containing scope. All other types (i.e. function-local named types) get
21566 the current active scope. */
21567
21568 static dw_die_ref
21569 scope_die_for (tree t, dw_die_ref context_die)
21570 {
21571 dw_die_ref scope_die = NULL;
21572 tree containing_scope;
21573
21574 /* Non-types always go in the current scope. */
21575 gcc_assert (TYPE_P (t));
21576
21577 /* Use the scope of the typedef, rather than the scope of the type
21578 it refers to. */
21579 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21580 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21581 else
21582 containing_scope = TYPE_CONTEXT (t);
21583
21584 /* Use the containing namespace if there is one. */
21585 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21586 {
21587 if (context_die == lookup_decl_die (containing_scope))
21588 /* OK */;
21589 else if (debug_info_level > DINFO_LEVEL_TERSE)
21590 context_die = get_context_die (containing_scope);
21591 else
21592 containing_scope = NULL_TREE;
21593 }
21594
21595 /* Ignore function type "scopes" from the C frontend. They mean that
21596 a tagged type is local to a parmlist of a function declarator, but
21597 that isn't useful to DWARF. */
21598 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21599 containing_scope = NULL_TREE;
21600
21601 if (SCOPE_FILE_SCOPE_P (containing_scope))
21602 {
21603 /* If T uses a local type keep it local as well, to avoid references
21604 to function-local DIEs from outside the function. */
21605 if (current_function_decl && uses_local_type (t))
21606 scope_die = context_die;
21607 else
21608 scope_die = comp_unit_die ();
21609 }
21610 else if (TYPE_P (containing_scope))
21611 {
21612 /* For types, we can just look up the appropriate DIE. */
21613 if (debug_info_level > DINFO_LEVEL_TERSE)
21614 scope_die = get_context_die (containing_scope);
21615 else
21616 {
21617 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21618 if (scope_die == NULL)
21619 scope_die = comp_unit_die ();
21620 }
21621 }
21622 else
21623 scope_die = context_die;
21624
21625 return scope_die;
21626 }
21627
21628 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21629
21630 static inline int
21631 local_scope_p (dw_die_ref context_die)
21632 {
21633 for (; context_die; context_die = context_die->die_parent)
21634 if (context_die->die_tag == DW_TAG_inlined_subroutine
21635 || context_die->die_tag == DW_TAG_subprogram)
21636 return 1;
21637
21638 return 0;
21639 }
21640
21641 /* Returns nonzero if CONTEXT_DIE is a class. */
21642
21643 static inline int
21644 class_scope_p (dw_die_ref context_die)
21645 {
21646 return (context_die
21647 && (context_die->die_tag == DW_TAG_structure_type
21648 || context_die->die_tag == DW_TAG_class_type
21649 || context_die->die_tag == DW_TAG_interface_type
21650 || context_die->die_tag == DW_TAG_union_type));
21651 }
21652
21653 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21654 whether or not to treat a DIE in this context as a declaration. */
21655
21656 static inline int
21657 class_or_namespace_scope_p (dw_die_ref context_die)
21658 {
21659 return (class_scope_p (context_die)
21660 || (context_die && context_die->die_tag == DW_TAG_namespace));
21661 }
21662
21663 /* Many forms of DIEs require a "type description" attribute. This
21664 routine locates the proper "type descriptor" die for the type given
21665 by 'type' plus any additional qualifiers given by 'cv_quals', and
21666 adds a DW_AT_type attribute below the given die. */
21667
21668 static void
21669 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21670 bool reverse, dw_die_ref context_die)
21671 {
21672 enum tree_code code = TREE_CODE (type);
21673 dw_die_ref type_die = NULL;
21674
21675 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21676 or fixed-point type, use the inner type. This is because we have no
21677 support for unnamed types in base_type_die. This can happen if this is
21678 an Ada subrange type. Correct solution is emit a subrange type die. */
21679 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21680 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21681 type = TREE_TYPE (type), code = TREE_CODE (type);
21682
21683 if (code == ERROR_MARK
21684 /* Handle a special case. For functions whose return type is void, we
21685 generate *no* type attribute. (Note that no object may have type
21686 `void', so this only applies to function return types). */
21687 || code == VOID_TYPE)
21688 return;
21689
21690 type_die = modified_type_die (type,
21691 cv_quals | TYPE_QUALS (type),
21692 reverse,
21693 context_die);
21694
21695 if (type_die != NULL)
21696 add_AT_die_ref (object_die, DW_AT_type, type_die);
21697 }
21698
21699 /* Given an object die, add the calling convention attribute for the
21700 function call type. */
21701 static void
21702 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21703 {
21704 enum dwarf_calling_convention value = DW_CC_normal;
21705
21706 value = ((enum dwarf_calling_convention)
21707 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21708
21709 if (is_fortran ()
21710 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21711 {
21712 /* DWARF 2 doesn't provide a way to identify a program's source-level
21713 entry point. DW_AT_calling_convention attributes are only meant
21714 to describe functions' calling conventions. However, lacking a
21715 better way to signal the Fortran main program, we used this for
21716 a long time, following existing custom. Now, DWARF 4 has
21717 DW_AT_main_subprogram, which we add below, but some tools still
21718 rely on the old way, which we thus keep. */
21719 value = DW_CC_program;
21720
21721 if (dwarf_version >= 4 || !dwarf_strict)
21722 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21723 }
21724
21725 /* Only add the attribute if the backend requests it, and
21726 is not DW_CC_normal. */
21727 if (value && (value != DW_CC_normal))
21728 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21729 }
21730
21731 /* Given a tree pointer to a struct, class, union, or enum type node, return
21732 a pointer to the (string) tag name for the given type, or zero if the type
21733 was declared without a tag. */
21734
21735 static const char *
21736 type_tag (const_tree type)
21737 {
21738 const char *name = 0;
21739
21740 if (TYPE_NAME (type) != 0)
21741 {
21742 tree t = 0;
21743
21744 /* Find the IDENTIFIER_NODE for the type name. */
21745 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21746 && !TYPE_NAMELESS (type))
21747 t = TYPE_NAME (type);
21748
21749 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21750 a TYPE_DECL node, regardless of whether or not a `typedef' was
21751 involved. */
21752 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21753 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21754 {
21755 /* We want to be extra verbose. Don't call dwarf_name if
21756 DECL_NAME isn't set. The default hook for decl_printable_name
21757 doesn't like that, and in this context it's correct to return
21758 0, instead of "<anonymous>" or the like. */
21759 if (DECL_NAME (TYPE_NAME (type))
21760 && !DECL_NAMELESS (TYPE_NAME (type)))
21761 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21762 }
21763
21764 /* Now get the name as a string, or invent one. */
21765 if (!name && t != 0)
21766 name = IDENTIFIER_POINTER (t);
21767 }
21768
21769 return (name == 0 || *name == '\0') ? 0 : name;
21770 }
21771
21772 /* Return the type associated with a data member, make a special check
21773 for bit field types. */
21774
21775 static inline tree
21776 member_declared_type (const_tree member)
21777 {
21778 return (DECL_BIT_FIELD_TYPE (member)
21779 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21780 }
21781
21782 /* Get the decl's label, as described by its RTL. This may be different
21783 from the DECL_NAME name used in the source file. */
21784
21785 #if 0
21786 static const char *
21787 decl_start_label (tree decl)
21788 {
21789 rtx x;
21790 const char *fnname;
21791
21792 x = DECL_RTL (decl);
21793 gcc_assert (MEM_P (x));
21794
21795 x = XEXP (x, 0);
21796 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21797
21798 fnname = XSTR (x, 0);
21799 return fnname;
21800 }
21801 #endif
21802 \f
21803 /* For variable-length arrays that have been previously generated, but
21804 may be incomplete due to missing subscript info, fill the subscript
21805 info. Return TRUE if this is one of those cases. */
21806 static bool
21807 fill_variable_array_bounds (tree type)
21808 {
21809 if (TREE_ASM_WRITTEN (type)
21810 && TREE_CODE (type) == ARRAY_TYPE
21811 && variably_modified_type_p (type, NULL))
21812 {
21813 dw_die_ref array_die = lookup_type_die (type);
21814 if (!array_die)
21815 return false;
21816 add_subscript_info (array_die, type, !is_ada ());
21817 return true;
21818 }
21819 return false;
21820 }
21821
21822 /* These routines generate the internal representation of the DIE's for
21823 the compilation unit. Debugging information is collected by walking
21824 the declaration trees passed in from dwarf2out_decl(). */
21825
21826 static void
21827 gen_array_type_die (tree type, dw_die_ref context_die)
21828 {
21829 dw_die_ref array_die;
21830
21831 /* GNU compilers represent multidimensional array types as sequences of one
21832 dimensional array types whose element types are themselves array types.
21833 We sometimes squish that down to a single array_type DIE with multiple
21834 subscripts in the Dwarf debugging info. The draft Dwarf specification
21835 say that we are allowed to do this kind of compression in C, because
21836 there is no difference between an array of arrays and a multidimensional
21837 array. We don't do this for Ada to remain as close as possible to the
21838 actual representation, which is especially important against the language
21839 flexibilty wrt arrays of variable size. */
21840
21841 bool collapse_nested_arrays = !is_ada ();
21842
21843 if (fill_variable_array_bounds (type))
21844 return;
21845
21846 dw_die_ref scope_die = scope_die_for (type, context_die);
21847 tree element_type;
21848
21849 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21850 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21851 if (TYPE_STRING_FLAG (type)
21852 && TREE_CODE (type) == ARRAY_TYPE
21853 && is_fortran ()
21854 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21855 {
21856 HOST_WIDE_INT size;
21857
21858 array_die = new_die (DW_TAG_string_type, scope_die, type);
21859 add_name_attribute (array_die, type_tag (type));
21860 equate_type_number_to_die (type, array_die);
21861 size = int_size_in_bytes (type);
21862 if (size >= 0)
21863 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21864 /* ??? We can't annotate types late, but for LTO we may not
21865 generate a location early either (gfortran.dg/save_6.f90). */
21866 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21867 && TYPE_DOMAIN (type) != NULL_TREE
21868 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21869 {
21870 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21871 tree rszdecl = szdecl;
21872
21873 size = int_size_in_bytes (TREE_TYPE (szdecl));
21874 if (!DECL_P (szdecl))
21875 {
21876 if (TREE_CODE (szdecl) == INDIRECT_REF
21877 && DECL_P (TREE_OPERAND (szdecl, 0)))
21878 {
21879 rszdecl = TREE_OPERAND (szdecl, 0);
21880 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21881 != DWARF2_ADDR_SIZE)
21882 size = 0;
21883 }
21884 else
21885 size = 0;
21886 }
21887 if (size > 0)
21888 {
21889 dw_loc_list_ref loc
21890 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21891 NULL);
21892 if (loc)
21893 {
21894 add_AT_location_description (array_die, DW_AT_string_length,
21895 loc);
21896 if (size != DWARF2_ADDR_SIZE)
21897 add_AT_unsigned (array_die, dwarf_version >= 5
21898 ? DW_AT_string_length_byte_size
21899 : DW_AT_byte_size, size);
21900 }
21901 }
21902 }
21903 return;
21904 }
21905
21906 array_die = new_die (DW_TAG_array_type, scope_die, type);
21907 add_name_attribute (array_die, type_tag (type));
21908 equate_type_number_to_die (type, array_die);
21909
21910 if (TREE_CODE (type) == VECTOR_TYPE)
21911 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21912
21913 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21914 if (is_fortran ()
21915 && TREE_CODE (type) == ARRAY_TYPE
21916 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21917 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21918 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21919
21920 #if 0
21921 /* We default the array ordering. Debuggers will probably do the right
21922 things even if DW_AT_ordering is not present. It's not even an issue
21923 until we start to get into multidimensional arrays anyway. If a debugger
21924 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21925 then we'll have to put the DW_AT_ordering attribute back in. (But if
21926 and when we find out that we need to put these in, we will only do so
21927 for multidimensional arrays. */
21928 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21929 #endif
21930
21931 if (TREE_CODE (type) == VECTOR_TYPE)
21932 {
21933 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21934 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21935 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21936 add_bound_info (subrange_die, DW_AT_upper_bound,
21937 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21938 }
21939 else
21940 add_subscript_info (array_die, type, collapse_nested_arrays);
21941
21942 /* Add representation of the type of the elements of this array type and
21943 emit the corresponding DIE if we haven't done it already. */
21944 element_type = TREE_TYPE (type);
21945 if (collapse_nested_arrays)
21946 while (TREE_CODE (element_type) == ARRAY_TYPE)
21947 {
21948 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21949 break;
21950 element_type = TREE_TYPE (element_type);
21951 }
21952
21953 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21954 TREE_CODE (type) == ARRAY_TYPE
21955 && TYPE_REVERSE_STORAGE_ORDER (type),
21956 context_die);
21957
21958 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21959 if (TYPE_ARTIFICIAL (type))
21960 add_AT_flag (array_die, DW_AT_artificial, 1);
21961
21962 if (get_AT (array_die, DW_AT_name))
21963 add_pubtype (type, array_die);
21964
21965 add_alignment_attribute (array_die, type);
21966 }
21967
21968 /* This routine generates DIE for array with hidden descriptor, details
21969 are filled into *info by a langhook. */
21970
21971 static void
21972 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21973 dw_die_ref context_die)
21974 {
21975 const dw_die_ref scope_die = scope_die_for (type, context_die);
21976 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21977 struct loc_descr_context context = { type, info->base_decl, NULL,
21978 false, false };
21979 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21980 int dim;
21981
21982 add_name_attribute (array_die, type_tag (type));
21983 equate_type_number_to_die (type, array_die);
21984
21985 if (info->ndimensions > 1)
21986 switch (info->ordering)
21987 {
21988 case array_descr_ordering_row_major:
21989 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21990 break;
21991 case array_descr_ordering_column_major:
21992 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21993 break;
21994 default:
21995 break;
21996 }
21997
21998 if (dwarf_version >= 3 || !dwarf_strict)
21999 {
22000 if (info->data_location)
22001 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
22002 dw_scalar_form_exprloc, &context);
22003 if (info->associated)
22004 add_scalar_info (array_die, DW_AT_associated, info->associated,
22005 dw_scalar_form_constant
22006 | dw_scalar_form_exprloc
22007 | dw_scalar_form_reference, &context);
22008 if (info->allocated)
22009 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
22010 dw_scalar_form_constant
22011 | dw_scalar_form_exprloc
22012 | dw_scalar_form_reference, &context);
22013 if (info->stride)
22014 {
22015 const enum dwarf_attribute attr
22016 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
22017 const int forms
22018 = (info->stride_in_bits)
22019 ? dw_scalar_form_constant
22020 : (dw_scalar_form_constant
22021 | dw_scalar_form_exprloc
22022 | dw_scalar_form_reference);
22023
22024 add_scalar_info (array_die, attr, info->stride, forms, &context);
22025 }
22026 }
22027 if (dwarf_version >= 5)
22028 {
22029 if (info->rank)
22030 {
22031 add_scalar_info (array_die, DW_AT_rank, info->rank,
22032 dw_scalar_form_constant
22033 | dw_scalar_form_exprloc, &context);
22034 subrange_tag = DW_TAG_generic_subrange;
22035 context.placeholder_arg = true;
22036 }
22037 }
22038
22039 add_gnat_descriptive_type_attribute (array_die, type, context_die);
22040
22041 for (dim = 0; dim < info->ndimensions; dim++)
22042 {
22043 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
22044
22045 if (info->dimen[dim].bounds_type)
22046 add_type_attribute (subrange_die,
22047 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
22048 false, context_die);
22049 if (info->dimen[dim].lower_bound)
22050 add_bound_info (subrange_die, DW_AT_lower_bound,
22051 info->dimen[dim].lower_bound, &context);
22052 if (info->dimen[dim].upper_bound)
22053 add_bound_info (subrange_die, DW_AT_upper_bound,
22054 info->dimen[dim].upper_bound, &context);
22055 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
22056 add_scalar_info (subrange_die, DW_AT_byte_stride,
22057 info->dimen[dim].stride,
22058 dw_scalar_form_constant
22059 | dw_scalar_form_exprloc
22060 | dw_scalar_form_reference,
22061 &context);
22062 }
22063
22064 gen_type_die (info->element_type, context_die);
22065 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
22066 TREE_CODE (type) == ARRAY_TYPE
22067 && TYPE_REVERSE_STORAGE_ORDER (type),
22068 context_die);
22069
22070 if (get_AT (array_die, DW_AT_name))
22071 add_pubtype (type, array_die);
22072
22073 add_alignment_attribute (array_die, type);
22074 }
22075
22076 #if 0
22077 static void
22078 gen_entry_point_die (tree decl, dw_die_ref context_die)
22079 {
22080 tree origin = decl_ultimate_origin (decl);
22081 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
22082
22083 if (origin != NULL)
22084 add_abstract_origin_attribute (decl_die, origin);
22085 else
22086 {
22087 add_name_and_src_coords_attributes (decl_die, decl);
22088 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
22089 TYPE_UNQUALIFIED, false, context_die);
22090 }
22091
22092 if (DECL_ABSTRACT_P (decl))
22093 equate_decl_number_to_die (decl, decl_die);
22094 else
22095 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
22096 }
22097 #endif
22098
22099 /* Walk through the list of incomplete types again, trying once more to
22100 emit full debugging info for them. */
22101
22102 static void
22103 retry_incomplete_types (void)
22104 {
22105 set_early_dwarf s;
22106 int i;
22107
22108 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
22109 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
22110 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
22111 vec_safe_truncate (incomplete_types, 0);
22112 }
22113
22114 /* Determine what tag to use for a record type. */
22115
22116 static enum dwarf_tag
22117 record_type_tag (tree type)
22118 {
22119 if (! lang_hooks.types.classify_record)
22120 return DW_TAG_structure_type;
22121
22122 switch (lang_hooks.types.classify_record (type))
22123 {
22124 case RECORD_IS_STRUCT:
22125 return DW_TAG_structure_type;
22126
22127 case RECORD_IS_CLASS:
22128 return DW_TAG_class_type;
22129
22130 case RECORD_IS_INTERFACE:
22131 if (dwarf_version >= 3 || !dwarf_strict)
22132 return DW_TAG_interface_type;
22133 return DW_TAG_structure_type;
22134
22135 default:
22136 gcc_unreachable ();
22137 }
22138 }
22139
22140 /* Generate a DIE to represent an enumeration type. Note that these DIEs
22141 include all of the information about the enumeration values also. Each
22142 enumerated type name/value is listed as a child of the enumerated type
22143 DIE. */
22144
22145 static dw_die_ref
22146 gen_enumeration_type_die (tree type, dw_die_ref context_die)
22147 {
22148 dw_die_ref type_die = lookup_type_die (type);
22149 dw_die_ref orig_type_die = type_die;
22150
22151 if (type_die == NULL)
22152 {
22153 type_die = new_die (DW_TAG_enumeration_type,
22154 scope_die_for (type, context_die), type);
22155 equate_type_number_to_die (type, type_die);
22156 add_name_attribute (type_die, type_tag (type));
22157 if ((dwarf_version >= 4 || !dwarf_strict)
22158 && ENUM_IS_SCOPED (type))
22159 add_AT_flag (type_die, DW_AT_enum_class, 1);
22160 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
22161 add_AT_flag (type_die, DW_AT_declaration, 1);
22162 if (!dwarf_strict)
22163 add_AT_unsigned (type_die, DW_AT_encoding,
22164 TYPE_UNSIGNED (type)
22165 ? DW_ATE_unsigned
22166 : DW_ATE_signed);
22167 }
22168 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22169 return type_die;
22170 else
22171 remove_AT (type_die, DW_AT_declaration);
22172
22173 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22174 given enum type is incomplete, do not generate the DW_AT_byte_size
22175 attribute or the DW_AT_element_list attribute. */
22176 if (TYPE_SIZE (type))
22177 {
22178 tree link;
22179
22180 if (!ENUM_IS_OPAQUE (type))
22181 TREE_ASM_WRITTEN (type) = 1;
22182 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22183 add_byte_size_attribute (type_die, type);
22184 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22185 add_alignment_attribute (type_die, type);
22186 if ((dwarf_version >= 3 || !dwarf_strict)
22187 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22188 {
22189 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22190 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22191 context_die);
22192 }
22193 if (TYPE_STUB_DECL (type) != NULL_TREE)
22194 {
22195 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22196 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22197 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22198 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22199 }
22200
22201 /* If the first reference to this type was as the return type of an
22202 inline function, then it may not have a parent. Fix this now. */
22203 if (type_die->die_parent == NULL)
22204 add_child_die (scope_die_for (type, context_die), type_die);
22205
22206 for (link = TYPE_VALUES (type);
22207 link != NULL; link = TREE_CHAIN (link))
22208 {
22209 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22210 tree value = TREE_VALUE (link);
22211
22212 gcc_assert (!ENUM_IS_OPAQUE (type));
22213 add_name_attribute (enum_die,
22214 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22215
22216 if (TREE_CODE (value) == CONST_DECL)
22217 value = DECL_INITIAL (value);
22218
22219 if (simple_type_size_in_bits (TREE_TYPE (value))
22220 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22221 {
22222 /* For constant forms created by add_AT_unsigned DWARF
22223 consumers (GDB, elfutils, etc.) always zero extend
22224 the value. Only when the actual value is negative
22225 do we need to use add_AT_int to generate a constant
22226 form that can represent negative values. */
22227 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22228 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22229 add_AT_unsigned (enum_die, DW_AT_const_value,
22230 (unsigned HOST_WIDE_INT) val);
22231 else
22232 add_AT_int (enum_die, DW_AT_const_value, val);
22233 }
22234 else
22235 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22236 that here. TODO: This should be re-worked to use correct
22237 signed/unsigned double tags for all cases. */
22238 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22239 }
22240
22241 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22242 if (TYPE_ARTIFICIAL (type)
22243 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22244 add_AT_flag (type_die, DW_AT_artificial, 1);
22245 }
22246 else
22247 add_AT_flag (type_die, DW_AT_declaration, 1);
22248
22249 add_pubtype (type, type_die);
22250
22251 return type_die;
22252 }
22253
22254 /* Generate a DIE to represent either a real live formal parameter decl or to
22255 represent just the type of some formal parameter position in some function
22256 type.
22257
22258 Note that this routine is a bit unusual because its argument may be a
22259 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22260 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22261 node. If it's the former then this function is being called to output a
22262 DIE to represent a formal parameter object (or some inlining thereof). If
22263 it's the latter, then this function is only being called to output a
22264 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22265 argument type of some subprogram type.
22266 If EMIT_NAME_P is true, name and source coordinate attributes
22267 are emitted. */
22268
22269 static dw_die_ref
22270 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22271 dw_die_ref context_die)
22272 {
22273 tree node_or_origin = node ? node : origin;
22274 tree ultimate_origin;
22275 dw_die_ref parm_die = NULL;
22276
22277 if (DECL_P (node_or_origin))
22278 {
22279 parm_die = lookup_decl_die (node);
22280
22281 /* If the contexts differ, we may not be talking about the same
22282 thing.
22283 ??? When in LTO the DIE parent is the "abstract" copy and the
22284 context_die is the specification "copy". But this whole block
22285 should eventually be no longer needed. */
22286 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22287 {
22288 if (!DECL_ABSTRACT_P (node))
22289 {
22290 /* This can happen when creating an inlined instance, in
22291 which case we need to create a new DIE that will get
22292 annotated with DW_AT_abstract_origin. */
22293 parm_die = NULL;
22294 }
22295 else
22296 gcc_unreachable ();
22297 }
22298
22299 if (parm_die && parm_die->die_parent == NULL)
22300 {
22301 /* Check that parm_die already has the right attributes that
22302 we would have added below. If any attributes are
22303 missing, fall through to add them. */
22304 if (! DECL_ABSTRACT_P (node_or_origin)
22305 && !get_AT (parm_die, DW_AT_location)
22306 && !get_AT (parm_die, DW_AT_const_value))
22307 /* We are missing location info, and are about to add it. */
22308 ;
22309 else
22310 {
22311 add_child_die (context_die, parm_die);
22312 return parm_die;
22313 }
22314 }
22315 }
22316
22317 /* If we have a previously generated DIE, use it, unless this is an
22318 concrete instance (origin != NULL), in which case we need a new
22319 DIE with a corresponding DW_AT_abstract_origin. */
22320 bool reusing_die;
22321 if (parm_die && origin == NULL)
22322 reusing_die = true;
22323 else
22324 {
22325 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22326 reusing_die = false;
22327 }
22328
22329 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22330 {
22331 case tcc_declaration:
22332 ultimate_origin = decl_ultimate_origin (node_or_origin);
22333 if (node || ultimate_origin)
22334 origin = ultimate_origin;
22335
22336 if (reusing_die)
22337 goto add_location;
22338
22339 if (origin != NULL)
22340 add_abstract_origin_attribute (parm_die, origin);
22341 else if (emit_name_p)
22342 add_name_and_src_coords_attributes (parm_die, node);
22343 if (origin == NULL
22344 || (! DECL_ABSTRACT_P (node_or_origin)
22345 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22346 decl_function_context
22347 (node_or_origin))))
22348 {
22349 tree type = TREE_TYPE (node_or_origin);
22350 if (decl_by_reference_p (node_or_origin))
22351 add_type_attribute (parm_die, TREE_TYPE (type),
22352 TYPE_UNQUALIFIED,
22353 false, context_die);
22354 else
22355 add_type_attribute (parm_die, type,
22356 decl_quals (node_or_origin),
22357 false, context_die);
22358 }
22359 if (origin == NULL && DECL_ARTIFICIAL (node))
22360 add_AT_flag (parm_die, DW_AT_artificial, 1);
22361 add_location:
22362 if (node && node != origin)
22363 equate_decl_number_to_die (node, parm_die);
22364 if (! DECL_ABSTRACT_P (node_or_origin))
22365 add_location_or_const_value_attribute (parm_die, node_or_origin,
22366 node == NULL);
22367
22368 break;
22369
22370 case tcc_type:
22371 /* We were called with some kind of a ..._TYPE node. */
22372 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22373 context_die);
22374 break;
22375
22376 default:
22377 gcc_unreachable ();
22378 }
22379
22380 return parm_die;
22381 }
22382
22383 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22384 children DW_TAG_formal_parameter DIEs representing the arguments of the
22385 parameter pack.
22386
22387 PARM_PACK must be a function parameter pack.
22388 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22389 must point to the subsequent arguments of the function PACK_ARG belongs to.
22390 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22391 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22392 following the last one for which a DIE was generated. */
22393
22394 static dw_die_ref
22395 gen_formal_parameter_pack_die (tree parm_pack,
22396 tree pack_arg,
22397 dw_die_ref subr_die,
22398 tree *next_arg)
22399 {
22400 tree arg;
22401 dw_die_ref parm_pack_die;
22402
22403 gcc_assert (parm_pack
22404 && lang_hooks.function_parameter_pack_p (parm_pack)
22405 && subr_die);
22406
22407 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22408 add_src_coords_attributes (parm_pack_die, parm_pack);
22409
22410 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22411 {
22412 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22413 parm_pack))
22414 break;
22415 gen_formal_parameter_die (arg, NULL,
22416 false /* Don't emit name attribute. */,
22417 parm_pack_die);
22418 }
22419 if (next_arg)
22420 *next_arg = arg;
22421 return parm_pack_die;
22422 }
22423
22424 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22425 at the end of an (ANSI prototyped) formal parameters list. */
22426
22427 static void
22428 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22429 {
22430 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22431 }
22432
22433 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22434 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22435 parameters as specified in some function type specification (except for
22436 those which appear as part of a function *definition*). */
22437
22438 static void
22439 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22440 {
22441 tree link;
22442 tree formal_type = NULL;
22443 tree first_parm_type;
22444 tree arg;
22445
22446 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22447 {
22448 arg = DECL_ARGUMENTS (function_or_method_type);
22449 function_or_method_type = TREE_TYPE (function_or_method_type);
22450 }
22451 else
22452 arg = NULL_TREE;
22453
22454 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22455
22456 /* Make our first pass over the list of formal parameter types and output a
22457 DW_TAG_formal_parameter DIE for each one. */
22458 for (link = first_parm_type; link; )
22459 {
22460 dw_die_ref parm_die;
22461
22462 formal_type = TREE_VALUE (link);
22463 if (formal_type == void_type_node)
22464 break;
22465
22466 /* Output a (nameless) DIE to represent the formal parameter itself. */
22467 parm_die = gen_formal_parameter_die (formal_type, NULL,
22468 true /* Emit name attribute. */,
22469 context_die);
22470 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22471 && link == first_parm_type)
22472 {
22473 add_AT_flag (parm_die, DW_AT_artificial, 1);
22474 if (dwarf_version >= 3 || !dwarf_strict)
22475 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22476 }
22477 else if (arg && DECL_ARTIFICIAL (arg))
22478 add_AT_flag (parm_die, DW_AT_artificial, 1);
22479
22480 link = TREE_CHAIN (link);
22481 if (arg)
22482 arg = DECL_CHAIN (arg);
22483 }
22484
22485 /* If this function type has an ellipsis, add a
22486 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22487 if (formal_type != void_type_node)
22488 gen_unspecified_parameters_die (function_or_method_type, context_die);
22489
22490 /* Make our second (and final) pass over the list of formal parameter types
22491 and output DIEs to represent those types (as necessary). */
22492 for (link = TYPE_ARG_TYPES (function_or_method_type);
22493 link && TREE_VALUE (link);
22494 link = TREE_CHAIN (link))
22495 gen_type_die (TREE_VALUE (link), context_die);
22496 }
22497
22498 /* We want to generate the DIE for TYPE so that we can generate the
22499 die for MEMBER, which has been defined; we will need to refer back
22500 to the member declaration nested within TYPE. If we're trying to
22501 generate minimal debug info for TYPE, processing TYPE won't do the
22502 trick; we need to attach the member declaration by hand. */
22503
22504 static void
22505 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22506 {
22507 gen_type_die (type, context_die);
22508
22509 /* If we're trying to avoid duplicate debug info, we may not have
22510 emitted the member decl for this function. Emit it now. */
22511 if (TYPE_STUB_DECL (type)
22512 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22513 && ! lookup_decl_die (member))
22514 {
22515 dw_die_ref type_die;
22516 gcc_assert (!decl_ultimate_origin (member));
22517
22518 type_die = lookup_type_die_strip_naming_typedef (type);
22519 if (TREE_CODE (member) == FUNCTION_DECL)
22520 gen_subprogram_die (member, type_die);
22521 else if (TREE_CODE (member) == FIELD_DECL)
22522 {
22523 /* Ignore the nameless fields that are used to skip bits but handle
22524 C++ anonymous unions and structs. */
22525 if (DECL_NAME (member) != NULL_TREE
22526 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22527 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22528 {
22529 struct vlr_context vlr_ctx = {
22530 DECL_CONTEXT (member), /* struct_type */
22531 NULL_TREE /* variant_part_offset */
22532 };
22533 gen_type_die (member_declared_type (member), type_die);
22534 gen_field_die (member, &vlr_ctx, type_die);
22535 }
22536 }
22537 else
22538 gen_variable_die (member, NULL_TREE, type_die);
22539 }
22540 }
22541 \f
22542 /* Forward declare these functions, because they are mutually recursive
22543 with their set_block_* pairing functions. */
22544 static void set_decl_origin_self (tree);
22545
22546 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22547 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22548 that it points to the node itself, thus indicating that the node is its
22549 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22550 the given node is NULL, recursively descend the decl/block tree which
22551 it is the root of, and for each other ..._DECL or BLOCK node contained
22552 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22553 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22554 values to point to themselves. */
22555
22556 static void
22557 set_block_origin_self (tree stmt)
22558 {
22559 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22560 {
22561 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22562
22563 {
22564 tree local_decl;
22565
22566 for (local_decl = BLOCK_VARS (stmt);
22567 local_decl != NULL_TREE;
22568 local_decl = DECL_CHAIN (local_decl))
22569 /* Do not recurse on nested functions since the inlining status
22570 of parent and child can be different as per the DWARF spec. */
22571 if (TREE_CODE (local_decl) != FUNCTION_DECL
22572 && !DECL_EXTERNAL (local_decl))
22573 set_decl_origin_self (local_decl);
22574 }
22575
22576 {
22577 tree subblock;
22578
22579 for (subblock = BLOCK_SUBBLOCKS (stmt);
22580 subblock != NULL_TREE;
22581 subblock = BLOCK_CHAIN (subblock))
22582 set_block_origin_self (subblock); /* Recurse. */
22583 }
22584 }
22585 }
22586
22587 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22588 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22589 node to so that it points to the node itself, thus indicating that the
22590 node represents its own (abstract) origin. Additionally, if the
22591 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22592 the decl/block tree of which the given node is the root of, and for
22593 each other ..._DECL or BLOCK node contained therein whose
22594 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22595 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22596 point to themselves. */
22597
22598 static void
22599 set_decl_origin_self (tree decl)
22600 {
22601 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22602 {
22603 DECL_ABSTRACT_ORIGIN (decl) = decl;
22604 if (TREE_CODE (decl) == FUNCTION_DECL)
22605 {
22606 tree arg;
22607
22608 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22609 DECL_ABSTRACT_ORIGIN (arg) = arg;
22610 if (DECL_INITIAL (decl) != NULL_TREE
22611 && DECL_INITIAL (decl) != error_mark_node)
22612 set_block_origin_self (DECL_INITIAL (decl));
22613 }
22614 }
22615 }
22616 \f
22617 /* Mark the early DIE for DECL as the abstract instance. */
22618
22619 static void
22620 dwarf2out_abstract_function (tree decl)
22621 {
22622 dw_die_ref old_die;
22623
22624 /* Make sure we have the actual abstract inline, not a clone. */
22625 decl = DECL_ORIGIN (decl);
22626
22627 if (DECL_IGNORED_P (decl))
22628 return;
22629
22630 /* In LTO we're all set. We already created abstract instances
22631 early and we want to avoid creating a concrete instance of that
22632 if we don't output it. */
22633 if (in_lto_p)
22634 return;
22635
22636 old_die = lookup_decl_die (decl);
22637 gcc_assert (old_die != NULL);
22638 if (get_AT (old_die, DW_AT_inline))
22639 /* We've already generated the abstract instance. */
22640 return;
22641
22642 /* Go ahead and put DW_AT_inline on the DIE. */
22643 if (DECL_DECLARED_INLINE_P (decl))
22644 {
22645 if (cgraph_function_possibly_inlined_p (decl))
22646 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22647 else
22648 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22649 }
22650 else
22651 {
22652 if (cgraph_function_possibly_inlined_p (decl))
22653 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22654 else
22655 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22656 }
22657
22658 if (DECL_DECLARED_INLINE_P (decl)
22659 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22660 add_AT_flag (old_die, DW_AT_artificial, 1);
22661
22662 set_decl_origin_self (decl);
22663 }
22664
22665 /* Helper function of premark_used_types() which gets called through
22666 htab_traverse.
22667
22668 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22669 marked as unused by prune_unused_types. */
22670
22671 bool
22672 premark_used_types_helper (tree const &type, void *)
22673 {
22674 dw_die_ref die;
22675
22676 die = lookup_type_die (type);
22677 if (die != NULL)
22678 die->die_perennial_p = 1;
22679 return true;
22680 }
22681
22682 /* Helper function of premark_types_used_by_global_vars which gets called
22683 through htab_traverse.
22684
22685 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22686 marked as unused by prune_unused_types. The DIE of the type is marked
22687 only if the global variable using the type will actually be emitted. */
22688
22689 int
22690 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22691 void *)
22692 {
22693 struct types_used_by_vars_entry *entry;
22694 dw_die_ref die;
22695
22696 entry = (struct types_used_by_vars_entry *) *slot;
22697 gcc_assert (entry->type != NULL
22698 && entry->var_decl != NULL);
22699 die = lookup_type_die (entry->type);
22700 if (die)
22701 {
22702 /* Ask cgraph if the global variable really is to be emitted.
22703 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22704 varpool_node *node = varpool_node::get (entry->var_decl);
22705 if (node && node->definition)
22706 {
22707 die->die_perennial_p = 1;
22708 /* Keep the parent DIEs as well. */
22709 while ((die = die->die_parent) && die->die_perennial_p == 0)
22710 die->die_perennial_p = 1;
22711 }
22712 }
22713 return 1;
22714 }
22715
22716 /* Mark all members of used_types_hash as perennial. */
22717
22718 static void
22719 premark_used_types (struct function *fun)
22720 {
22721 if (fun && fun->used_types_hash)
22722 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22723 }
22724
22725 /* Mark all members of types_used_by_vars_entry as perennial. */
22726
22727 static void
22728 premark_types_used_by_global_vars (void)
22729 {
22730 if (types_used_by_vars_hash)
22731 types_used_by_vars_hash
22732 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22733 }
22734
22735 /* Mark all variables used by the symtab as perennial. */
22736
22737 static void
22738 premark_used_variables (void)
22739 {
22740 /* Mark DIEs in the symtab as used. */
22741 varpool_node *var;
22742 FOR_EACH_VARIABLE (var)
22743 {
22744 dw_die_ref die = lookup_decl_die (var->decl);
22745 if (die)
22746 die->die_perennial_p = 1;
22747 }
22748 }
22749
22750 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22751 for CA_LOC call arg loc node. */
22752
22753 static dw_die_ref
22754 gen_call_site_die (tree decl, dw_die_ref subr_die,
22755 struct call_arg_loc_node *ca_loc)
22756 {
22757 dw_die_ref stmt_die = NULL, die;
22758 tree block = ca_loc->block;
22759
22760 while (block
22761 && block != DECL_INITIAL (decl)
22762 && TREE_CODE (block) == BLOCK)
22763 {
22764 stmt_die = lookup_block_die (block);
22765 if (stmt_die)
22766 break;
22767 block = BLOCK_SUPERCONTEXT (block);
22768 }
22769 if (stmt_die == NULL)
22770 stmt_die = subr_die;
22771 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22772 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22773 if (ca_loc->tail_call_p)
22774 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22775 if (ca_loc->symbol_ref)
22776 {
22777 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22778 if (tdie)
22779 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22780 else
22781 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22782 false);
22783 }
22784 return die;
22785 }
22786
22787 /* Generate a DIE to represent a declared function (either file-scope or
22788 block-local). */
22789
22790 static void
22791 gen_subprogram_die (tree decl, dw_die_ref context_die)
22792 {
22793 tree origin = decl_ultimate_origin (decl);
22794 dw_die_ref subr_die;
22795 dw_die_ref old_die = lookup_decl_die (decl);
22796
22797 /* This function gets called multiple times for different stages of
22798 the debug process. For example, for func() in this code:
22799
22800 namespace S
22801 {
22802 void func() { ... }
22803 }
22804
22805 ...we get called 4 times. Twice in early debug and twice in
22806 late debug:
22807
22808 Early debug
22809 -----------
22810
22811 1. Once while generating func() within the namespace. This is
22812 the declaration. The declaration bit below is set, as the
22813 context is the namespace.
22814
22815 A new DIE will be generated with DW_AT_declaration set.
22816
22817 2. Once for func() itself. This is the specification. The
22818 declaration bit below is clear as the context is the CU.
22819
22820 We will use the cached DIE from (1) to create a new DIE with
22821 DW_AT_specification pointing to the declaration in (1).
22822
22823 Late debug via rest_of_handle_final()
22824 -------------------------------------
22825
22826 3. Once generating func() within the namespace. This is also the
22827 declaration, as in (1), but this time we will early exit below
22828 as we have a cached DIE and a declaration needs no additional
22829 annotations (no locations), as the source declaration line
22830 info is enough.
22831
22832 4. Once for func() itself. As in (2), this is the specification,
22833 but this time we will re-use the cached DIE, and just annotate
22834 it with the location information that should now be available.
22835
22836 For something without namespaces, but with abstract instances, we
22837 are also called a multiple times:
22838
22839 class Base
22840 {
22841 public:
22842 Base (); // constructor declaration (1)
22843 };
22844
22845 Base::Base () { } // constructor specification (2)
22846
22847 Early debug
22848 -----------
22849
22850 1. Once for the Base() constructor by virtue of it being a
22851 member of the Base class. This is done via
22852 rest_of_type_compilation.
22853
22854 This is a declaration, so a new DIE will be created with
22855 DW_AT_declaration.
22856
22857 2. Once for the Base() constructor definition, but this time
22858 while generating the abstract instance of the base
22859 constructor (__base_ctor) which is being generated via early
22860 debug of reachable functions.
22861
22862 Even though we have a cached version of the declaration (1),
22863 we will create a DW_AT_specification of the declaration DIE
22864 in (1).
22865
22866 3. Once for the __base_ctor itself, but this time, we generate
22867 an DW_AT_abstract_origin version of the DW_AT_specification in
22868 (2).
22869
22870 Late debug via rest_of_handle_final
22871 -----------------------------------
22872
22873 4. One final time for the __base_ctor (which will have a cached
22874 DIE with DW_AT_abstract_origin created in (3). This time,
22875 we will just annotate the location information now
22876 available.
22877 */
22878 int declaration = (current_function_decl != decl
22879 || class_or_namespace_scope_p (context_die));
22880
22881 /* A declaration that has been previously dumped needs no
22882 additional information. */
22883 if (old_die && declaration)
22884 return;
22885
22886 /* Now that the C++ front end lazily declares artificial member fns, we
22887 might need to retrofit the declaration into its class. */
22888 if (!declaration && !origin && !old_die
22889 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22890 && !class_or_namespace_scope_p (context_die)
22891 && debug_info_level > DINFO_LEVEL_TERSE)
22892 old_die = force_decl_die (decl);
22893
22894 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22895 if (origin != NULL)
22896 {
22897 gcc_assert (!declaration || local_scope_p (context_die));
22898
22899 /* Fixup die_parent for the abstract instance of a nested
22900 inline function. */
22901 if (old_die && old_die->die_parent == NULL)
22902 add_child_die (context_die, old_die);
22903
22904 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22905 {
22906 /* If we have a DW_AT_abstract_origin we have a working
22907 cached version. */
22908 subr_die = old_die;
22909 }
22910 else
22911 {
22912 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22913 add_abstract_origin_attribute (subr_die, origin);
22914 /* This is where the actual code for a cloned function is.
22915 Let's emit linkage name attribute for it. This helps
22916 debuggers to e.g, set breakpoints into
22917 constructors/destructors when the user asks "break
22918 K::K". */
22919 add_linkage_name (subr_die, decl);
22920 }
22921 }
22922 /* A cached copy, possibly from early dwarf generation. Reuse as
22923 much as possible. */
22924 else if (old_die)
22925 {
22926 if (!get_AT_flag (old_die, DW_AT_declaration)
22927 /* We can have a normal definition following an inline one in the
22928 case of redefinition of GNU C extern inlines.
22929 It seems reasonable to use AT_specification in this case. */
22930 && !get_AT (old_die, DW_AT_inline))
22931 {
22932 /* Detect and ignore this case, where we are trying to output
22933 something we have already output. */
22934 if (get_AT (old_die, DW_AT_low_pc)
22935 || get_AT (old_die, DW_AT_ranges))
22936 return;
22937
22938 /* If we have no location information, this must be a
22939 partially generated DIE from early dwarf generation.
22940 Fall through and generate it. */
22941 }
22942
22943 /* If the definition comes from the same place as the declaration,
22944 maybe use the old DIE. We always want the DIE for this function
22945 that has the *_pc attributes to be under comp_unit_die so the
22946 debugger can find it. We also need to do this for abstract
22947 instances of inlines, since the spec requires the out-of-line copy
22948 to have the same parent. For local class methods, this doesn't
22949 apply; we just use the old DIE. */
22950 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22951 struct dwarf_file_data * file_index = lookup_filename (s.file);
22952 if (((is_unit_die (old_die->die_parent)
22953 /* This condition fixes the inconsistency/ICE with the
22954 following Fortran test (or some derivative thereof) while
22955 building libgfortran:
22956
22957 module some_m
22958 contains
22959 logical function funky (FLAG)
22960 funky = .true.
22961 end function
22962 end module
22963 */
22964 || (old_die->die_parent
22965 && old_die->die_parent->die_tag == DW_TAG_module)
22966 || local_scope_p (old_die->die_parent)
22967 || context_die == NULL)
22968 && (DECL_ARTIFICIAL (decl)
22969 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22970 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22971 == (unsigned) s.line)
22972 && (!debug_column_info
22973 || s.column == 0
22974 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22975 == (unsigned) s.column)))))
22976 /* With LTO if there's an abstract instance for
22977 the old DIE, this is a concrete instance and
22978 thus re-use the DIE. */
22979 || get_AT (old_die, DW_AT_abstract_origin))
22980 {
22981 subr_die = old_die;
22982
22983 /* Clear out the declaration attribute, but leave the
22984 parameters so they can be augmented with location
22985 information later. Unless this was a declaration, in
22986 which case, wipe out the nameless parameters and recreate
22987 them further down. */
22988 if (remove_AT (subr_die, DW_AT_declaration))
22989 {
22990
22991 remove_AT (subr_die, DW_AT_object_pointer);
22992 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22993 }
22994 }
22995 /* Make a specification pointing to the previously built
22996 declaration. */
22997 else
22998 {
22999 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
23000 add_AT_specification (subr_die, old_die);
23001 add_pubname (decl, subr_die);
23002 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23003 add_AT_file (subr_die, DW_AT_decl_file, file_index);
23004 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23005 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
23006 if (debug_column_info
23007 && s.column
23008 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23009 != (unsigned) s.column))
23010 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
23011
23012 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
23013 emit the real type on the definition die. */
23014 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
23015 {
23016 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
23017 if (die == auto_die || die == decltype_auto_die)
23018 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
23019 TYPE_UNQUALIFIED, false, context_die);
23020 }
23021
23022 /* When we process the method declaration, we haven't seen
23023 the out-of-class defaulted definition yet, so we have to
23024 recheck now. */
23025 if ((dwarf_version >= 5 || ! dwarf_strict)
23026 && !get_AT (subr_die, DW_AT_defaulted))
23027 {
23028 int defaulted
23029 = lang_hooks.decls.decl_dwarf_attribute (decl,
23030 DW_AT_defaulted);
23031 if (defaulted != -1)
23032 {
23033 /* Other values must have been handled before. */
23034 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
23035 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
23036 }
23037 }
23038 }
23039 }
23040 /* Create a fresh DIE for anything else. */
23041 else
23042 {
23043 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
23044
23045 if (TREE_PUBLIC (decl))
23046 add_AT_flag (subr_die, DW_AT_external, 1);
23047
23048 add_name_and_src_coords_attributes (subr_die, decl);
23049 add_pubname (decl, subr_die);
23050 if (debug_info_level > DINFO_LEVEL_TERSE)
23051 {
23052 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
23053 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
23054 TYPE_UNQUALIFIED, false, context_die);
23055 }
23056
23057 add_pure_or_virtual_attribute (subr_die, decl);
23058 if (DECL_ARTIFICIAL (decl))
23059 add_AT_flag (subr_die, DW_AT_artificial, 1);
23060
23061 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
23062 add_AT_flag (subr_die, DW_AT_noreturn, 1);
23063
23064 add_alignment_attribute (subr_die, decl);
23065
23066 add_accessibility_attribute (subr_die, decl);
23067 }
23068
23069 /* Unless we have an existing non-declaration DIE, equate the new
23070 DIE. */
23071 if (!old_die || is_declaration_die (old_die))
23072 equate_decl_number_to_die (decl, subr_die);
23073
23074 if (declaration)
23075 {
23076 if (!old_die || !get_AT (old_die, DW_AT_inline))
23077 {
23078 add_AT_flag (subr_die, DW_AT_declaration, 1);
23079
23080 /* If this is an explicit function declaration then generate
23081 a DW_AT_explicit attribute. */
23082 if ((dwarf_version >= 3 || !dwarf_strict)
23083 && lang_hooks.decls.decl_dwarf_attribute (decl,
23084 DW_AT_explicit) == 1)
23085 add_AT_flag (subr_die, DW_AT_explicit, 1);
23086
23087 /* If this is a C++11 deleted special function member then generate
23088 a DW_AT_deleted attribute. */
23089 if ((dwarf_version >= 5 || !dwarf_strict)
23090 && lang_hooks.decls.decl_dwarf_attribute (decl,
23091 DW_AT_deleted) == 1)
23092 add_AT_flag (subr_die, DW_AT_deleted, 1);
23093
23094 /* If this is a C++11 defaulted special function member then
23095 generate a DW_AT_defaulted attribute. */
23096 if (dwarf_version >= 5 || !dwarf_strict)
23097 {
23098 int defaulted
23099 = lang_hooks.decls.decl_dwarf_attribute (decl,
23100 DW_AT_defaulted);
23101 if (defaulted != -1)
23102 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
23103 }
23104
23105 /* If this is a C++11 non-static member function with & ref-qualifier
23106 then generate a DW_AT_reference attribute. */
23107 if ((dwarf_version >= 5 || !dwarf_strict)
23108 && lang_hooks.decls.decl_dwarf_attribute (decl,
23109 DW_AT_reference) == 1)
23110 add_AT_flag (subr_die, DW_AT_reference, 1);
23111
23112 /* If this is a C++11 non-static member function with &&
23113 ref-qualifier then generate a DW_AT_reference attribute. */
23114 if ((dwarf_version >= 5 || !dwarf_strict)
23115 && lang_hooks.decls.decl_dwarf_attribute (decl,
23116 DW_AT_rvalue_reference)
23117 == 1)
23118 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
23119 }
23120 }
23121 /* For non DECL_EXTERNALs, if range information is available, fill
23122 the DIE with it. */
23123 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
23124 {
23125 HOST_WIDE_INT cfa_fb_offset;
23126
23127 struct function *fun = DECL_STRUCT_FUNCTION (decl);
23128
23129 if (!crtl->has_bb_partition)
23130 {
23131 dw_fde_ref fde = fun->fde;
23132 if (fde->dw_fde_begin)
23133 {
23134 /* We have already generated the labels. */
23135 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23136 fde->dw_fde_end, false);
23137 }
23138 else
23139 {
23140 /* Create start/end labels and add the range. */
23141 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
23142 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
23143 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
23144 current_function_funcdef_no);
23145 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
23146 current_function_funcdef_no);
23147 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
23148 false);
23149 }
23150
23151 #if VMS_DEBUGGING_INFO
23152 /* HP OpenVMS Industry Standard 64: DWARF Extensions
23153 Section 2.3 Prologue and Epilogue Attributes:
23154 When a breakpoint is set on entry to a function, it is generally
23155 desirable for execution to be suspended, not on the very first
23156 instruction of the function, but rather at a point after the
23157 function's frame has been set up, after any language defined local
23158 declaration processing has been completed, and before execution of
23159 the first statement of the function begins. Debuggers generally
23160 cannot properly determine where this point is. Similarly for a
23161 breakpoint set on exit from a function. The prologue and epilogue
23162 attributes allow a compiler to communicate the location(s) to use. */
23163
23164 {
23165 if (fde->dw_fde_vms_end_prologue)
23166 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
23167 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
23168
23169 if (fde->dw_fde_vms_begin_epilogue)
23170 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
23171 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
23172 }
23173 #endif
23174
23175 }
23176 else
23177 {
23178 /* Generate pubnames entries for the split function code ranges. */
23179 dw_fde_ref fde = fun->fde;
23180
23181 if (fde->dw_fde_second_begin)
23182 {
23183 if (dwarf_version >= 3 || !dwarf_strict)
23184 {
23185 /* We should use ranges for non-contiguous code section
23186 addresses. Use the actual code range for the initial
23187 section, since the HOT/COLD labels might precede an
23188 alignment offset. */
23189 bool range_list_added = false;
23190 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23191 fde->dw_fde_end, &range_list_added,
23192 false);
23193 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23194 fde->dw_fde_second_end,
23195 &range_list_added, false);
23196 if (range_list_added)
23197 add_ranges (NULL);
23198 }
23199 else
23200 {
23201 /* There is no real support in DW2 for this .. so we make
23202 a work-around. First, emit the pub name for the segment
23203 containing the function label. Then make and emit a
23204 simplified subprogram DIE for the second segment with the
23205 name pre-fixed by __hot/cold_sect_of_. We use the same
23206 linkage name for the second die so that gdb will find both
23207 sections when given "b foo". */
23208 const char *name = NULL;
23209 tree decl_name = DECL_NAME (decl);
23210 dw_die_ref seg_die;
23211
23212 /* Do the 'primary' section. */
23213 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23214 fde->dw_fde_end, false);
23215
23216 /* Build a minimal DIE for the secondary section. */
23217 seg_die = new_die (DW_TAG_subprogram,
23218 subr_die->die_parent, decl);
23219
23220 if (TREE_PUBLIC (decl))
23221 add_AT_flag (seg_die, DW_AT_external, 1);
23222
23223 if (decl_name != NULL
23224 && IDENTIFIER_POINTER (decl_name) != NULL)
23225 {
23226 name = dwarf2_name (decl, 1);
23227 if (! DECL_ARTIFICIAL (decl))
23228 add_src_coords_attributes (seg_die, decl);
23229
23230 add_linkage_name (seg_die, decl);
23231 }
23232 gcc_assert (name != NULL);
23233 add_pure_or_virtual_attribute (seg_die, decl);
23234 if (DECL_ARTIFICIAL (decl))
23235 add_AT_flag (seg_die, DW_AT_artificial, 1);
23236
23237 name = concat ("__second_sect_of_", name, NULL);
23238 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23239 fde->dw_fde_second_end, false);
23240 add_name_attribute (seg_die, name);
23241 if (want_pubnames ())
23242 add_pubname_string (name, seg_die);
23243 }
23244 }
23245 else
23246 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23247 false);
23248 }
23249
23250 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23251
23252 /* We define the "frame base" as the function's CFA. This is more
23253 convenient for several reasons: (1) It's stable across the prologue
23254 and epilogue, which makes it better than just a frame pointer,
23255 (2) With dwarf3, there exists a one-byte encoding that allows us
23256 to reference the .debug_frame data by proxy, but failing that,
23257 (3) We can at least reuse the code inspection and interpretation
23258 code that determines the CFA position at various points in the
23259 function. */
23260 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23261 {
23262 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23263 add_AT_loc (subr_die, DW_AT_frame_base, op);
23264 }
23265 else
23266 {
23267 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23268 if (list->dw_loc_next)
23269 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23270 else
23271 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23272 }
23273
23274 /* Compute a displacement from the "steady-state frame pointer" to
23275 the CFA. The former is what all stack slots and argument slots
23276 will reference in the rtl; the latter is what we've told the
23277 debugger about. We'll need to adjust all frame_base references
23278 by this displacement. */
23279 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23280
23281 if (fun->static_chain_decl)
23282 {
23283 /* DWARF requires here a location expression that computes the
23284 address of the enclosing subprogram's frame base. The machinery
23285 in tree-nested.c is supposed to store this specific address in the
23286 last field of the FRAME record. */
23287 const tree frame_type
23288 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23289 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23290
23291 tree fb_expr
23292 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23293 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23294 fb_expr, fb_decl, NULL_TREE);
23295
23296 add_AT_location_description (subr_die, DW_AT_static_link,
23297 loc_list_from_tree (fb_expr, 0, NULL));
23298 }
23299
23300 resolve_variable_values ();
23301 }
23302
23303 /* Generate child dies for template paramaters. */
23304 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23305 gen_generic_params_dies (decl);
23306
23307 /* Now output descriptions of the arguments for this function. This gets
23308 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23309 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23310 `...' at the end of the formal parameter list. In order to find out if
23311 there was a trailing ellipsis or not, we must instead look at the type
23312 associated with the FUNCTION_DECL. This will be a node of type
23313 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23314 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23315 an ellipsis at the end. */
23316
23317 /* In the case where we are describing a mere function declaration, all we
23318 need to do here (and all we *can* do here) is to describe the *types* of
23319 its formal parameters. */
23320 if (debug_info_level <= DINFO_LEVEL_TERSE)
23321 ;
23322 else if (declaration)
23323 gen_formal_types_die (decl, subr_die);
23324 else
23325 {
23326 /* Generate DIEs to represent all known formal parameters. */
23327 tree parm = DECL_ARGUMENTS (decl);
23328 tree generic_decl = early_dwarf
23329 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23330 tree generic_decl_parm = generic_decl
23331 ? DECL_ARGUMENTS (generic_decl)
23332 : NULL;
23333
23334 /* Now we want to walk the list of parameters of the function and
23335 emit their relevant DIEs.
23336
23337 We consider the case of DECL being an instance of a generic function
23338 as well as it being a normal function.
23339
23340 If DECL is an instance of a generic function we walk the
23341 parameters of the generic function declaration _and_ the parameters of
23342 DECL itself. This is useful because we want to emit specific DIEs for
23343 function parameter packs and those are declared as part of the
23344 generic function declaration. In that particular case,
23345 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23346 That DIE has children DIEs representing the set of arguments
23347 of the pack. Note that the set of pack arguments can be empty.
23348 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23349 children DIE.
23350
23351 Otherwise, we just consider the parameters of DECL. */
23352 while (generic_decl_parm || parm)
23353 {
23354 if (generic_decl_parm
23355 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23356 gen_formal_parameter_pack_die (generic_decl_parm,
23357 parm, subr_die,
23358 &parm);
23359 else if (parm)
23360 {
23361 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23362
23363 if (early_dwarf
23364 && parm == DECL_ARGUMENTS (decl)
23365 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23366 && parm_die
23367 && (dwarf_version >= 3 || !dwarf_strict))
23368 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23369
23370 parm = DECL_CHAIN (parm);
23371 }
23372
23373 if (generic_decl_parm)
23374 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23375 }
23376
23377 /* Decide whether we need an unspecified_parameters DIE at the end.
23378 There are 2 more cases to do this for: 1) the ansi ... declaration -
23379 this is detectable when the end of the arg list is not a
23380 void_type_node 2) an unprototyped function declaration (not a
23381 definition). This just means that we have no info about the
23382 parameters at all. */
23383 if (early_dwarf)
23384 {
23385 if (prototype_p (TREE_TYPE (decl)))
23386 {
23387 /* This is the prototyped case, check for.... */
23388 if (stdarg_p (TREE_TYPE (decl)))
23389 gen_unspecified_parameters_die (decl, subr_die);
23390 }
23391 else if (DECL_INITIAL (decl) == NULL_TREE)
23392 gen_unspecified_parameters_die (decl, subr_die);
23393 }
23394 }
23395
23396 if (subr_die != old_die)
23397 /* Add the calling convention attribute if requested. */
23398 add_calling_convention_attribute (subr_die, decl);
23399
23400 /* Output Dwarf info for all of the stuff within the body of the function
23401 (if it has one - it may be just a declaration).
23402
23403 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23404 a function. This BLOCK actually represents the outermost binding contour
23405 for the function, i.e. the contour in which the function's formal
23406 parameters and labels get declared. Curiously, it appears that the front
23407 end doesn't actually put the PARM_DECL nodes for the current function onto
23408 the BLOCK_VARS list for this outer scope, but are strung off of the
23409 DECL_ARGUMENTS list for the function instead.
23410
23411 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23412 the LABEL_DECL nodes for the function however, and we output DWARF info
23413 for those in decls_for_scope. Just within the `outer_scope' there will be
23414 a BLOCK node representing the function's outermost pair of curly braces,
23415 and any blocks used for the base and member initializers of a C++
23416 constructor function. */
23417 tree outer_scope = DECL_INITIAL (decl);
23418 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23419 {
23420 int call_site_note_count = 0;
23421 int tail_call_site_note_count = 0;
23422
23423 /* Emit a DW_TAG_variable DIE for a named return value. */
23424 if (DECL_NAME (DECL_RESULT (decl)))
23425 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23426
23427 /* The first time through decls_for_scope we will generate the
23428 DIEs for the locals. The second time, we fill in the
23429 location info. */
23430 decls_for_scope (outer_scope, subr_die);
23431
23432 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23433 {
23434 struct call_arg_loc_node *ca_loc;
23435 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23436 {
23437 dw_die_ref die = NULL;
23438 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23439 rtx arg, next_arg;
23440 tree arg_decl = NULL_TREE;
23441
23442 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23443 ? XEXP (ca_loc->call_arg_loc_note, 0)
23444 : NULL_RTX);
23445 arg; arg = next_arg)
23446 {
23447 dw_loc_descr_ref reg, val;
23448 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23449 dw_die_ref cdie, tdie = NULL;
23450
23451 next_arg = XEXP (arg, 1);
23452 if (REG_P (XEXP (XEXP (arg, 0), 0))
23453 && next_arg
23454 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23455 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23456 && REGNO (XEXP (XEXP (arg, 0), 0))
23457 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23458 next_arg = XEXP (next_arg, 1);
23459 if (mode == VOIDmode)
23460 {
23461 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23462 if (mode == VOIDmode)
23463 mode = GET_MODE (XEXP (arg, 0));
23464 }
23465 if (mode == VOIDmode || mode == BLKmode)
23466 continue;
23467 /* Get dynamic information about call target only if we
23468 have no static information: we cannot generate both
23469 DW_AT_call_origin and DW_AT_call_target
23470 attributes. */
23471 if (ca_loc->symbol_ref == NULL_RTX)
23472 {
23473 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23474 {
23475 tloc = XEXP (XEXP (arg, 0), 1);
23476 continue;
23477 }
23478 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23479 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23480 {
23481 tlocc = XEXP (XEXP (arg, 0), 1);
23482 continue;
23483 }
23484 }
23485 reg = NULL;
23486 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23487 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23488 VAR_INIT_STATUS_INITIALIZED);
23489 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23490 {
23491 rtx mem = XEXP (XEXP (arg, 0), 0);
23492 reg = mem_loc_descriptor (XEXP (mem, 0),
23493 get_address_mode (mem),
23494 GET_MODE (mem),
23495 VAR_INIT_STATUS_INITIALIZED);
23496 }
23497 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23498 == DEBUG_PARAMETER_REF)
23499 {
23500 tree tdecl
23501 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23502 tdie = lookup_decl_die (tdecl);
23503 if (tdie == NULL)
23504 continue;
23505 arg_decl = tdecl;
23506 }
23507 else
23508 continue;
23509 if (reg == NULL
23510 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23511 != DEBUG_PARAMETER_REF)
23512 continue;
23513 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23514 VOIDmode,
23515 VAR_INIT_STATUS_INITIALIZED);
23516 if (val == NULL)
23517 continue;
23518 if (die == NULL)
23519 die = gen_call_site_die (decl, subr_die, ca_loc);
23520 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23521 NULL_TREE);
23522 add_desc_attribute (cdie, arg_decl);
23523 if (reg != NULL)
23524 add_AT_loc (cdie, DW_AT_location, reg);
23525 else if (tdie != NULL)
23526 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23527 tdie);
23528 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23529 if (next_arg != XEXP (arg, 1))
23530 {
23531 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23532 if (mode == VOIDmode)
23533 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23534 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23535 0), 1),
23536 mode, VOIDmode,
23537 VAR_INIT_STATUS_INITIALIZED);
23538 if (val != NULL)
23539 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23540 val);
23541 }
23542 }
23543 if (die == NULL
23544 && (ca_loc->symbol_ref || tloc))
23545 die = gen_call_site_die (decl, subr_die, ca_loc);
23546 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23547 {
23548 dw_loc_descr_ref tval = NULL;
23549
23550 if (tloc != NULL_RTX)
23551 tval = mem_loc_descriptor (tloc,
23552 GET_MODE (tloc) == VOIDmode
23553 ? Pmode : GET_MODE (tloc),
23554 VOIDmode,
23555 VAR_INIT_STATUS_INITIALIZED);
23556 if (tval)
23557 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23558 else if (tlocc != NULL_RTX)
23559 {
23560 tval = mem_loc_descriptor (tlocc,
23561 GET_MODE (tlocc) == VOIDmode
23562 ? Pmode : GET_MODE (tlocc),
23563 VOIDmode,
23564 VAR_INIT_STATUS_INITIALIZED);
23565 if (tval)
23566 add_AT_loc (die,
23567 dwarf_AT (DW_AT_call_target_clobbered),
23568 tval);
23569 }
23570 }
23571 if (die != NULL)
23572 {
23573 call_site_note_count++;
23574 if (ca_loc->tail_call_p)
23575 tail_call_site_note_count++;
23576 }
23577 }
23578 }
23579 call_arg_locations = NULL;
23580 call_arg_loc_last = NULL;
23581 if (tail_call_site_count >= 0
23582 && tail_call_site_count == tail_call_site_note_count
23583 && (!dwarf_strict || dwarf_version >= 5))
23584 {
23585 if (call_site_count >= 0
23586 && call_site_count == call_site_note_count)
23587 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23588 else
23589 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23590 }
23591 call_site_count = -1;
23592 tail_call_site_count = -1;
23593 }
23594
23595 /* Mark used types after we have created DIEs for the functions scopes. */
23596 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23597 }
23598
23599 /* Returns a hash value for X (which really is a die_struct). */
23600
23601 hashval_t
23602 block_die_hasher::hash (die_struct *d)
23603 {
23604 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23605 }
23606
23607 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23608 as decl_id and die_parent of die_struct Y. */
23609
23610 bool
23611 block_die_hasher::equal (die_struct *x, die_struct *y)
23612 {
23613 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23614 }
23615
23616 /* Hold information about markers for inlined entry points. */
23617 struct GTY ((for_user)) inline_entry_data
23618 {
23619 /* The block that's the inlined_function_outer_scope for an inlined
23620 function. */
23621 tree block;
23622
23623 /* The label at the inlined entry point. */
23624 const char *label_pfx;
23625 unsigned int label_num;
23626
23627 /* The view number to be used as the inlined entry point. */
23628 var_loc_view view;
23629 };
23630
23631 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23632 {
23633 typedef tree compare_type;
23634 static inline hashval_t hash (const inline_entry_data *);
23635 static inline bool equal (const inline_entry_data *, const_tree);
23636 };
23637
23638 /* Hash table routines for inline_entry_data. */
23639
23640 inline hashval_t
23641 inline_entry_data_hasher::hash (const inline_entry_data *data)
23642 {
23643 return htab_hash_pointer (data->block);
23644 }
23645
23646 inline bool
23647 inline_entry_data_hasher::equal (const inline_entry_data *data,
23648 const_tree block)
23649 {
23650 return data->block == block;
23651 }
23652
23653 /* Inlined entry points pending DIE creation in this compilation unit. */
23654
23655 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23656
23657
23658 /* Return TRUE if DECL, which may have been previously generated as
23659 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23660 true if decl (or its origin) is either an extern declaration or a
23661 class/namespace scoped declaration.
23662
23663 The declare_in_namespace support causes us to get two DIEs for one
23664 variable, both of which are declarations. We want to avoid
23665 considering one to be a specification, so we must test for
23666 DECLARATION and DW_AT_declaration. */
23667 static inline bool
23668 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23669 {
23670 return (old_die && TREE_STATIC (decl) && !declaration
23671 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23672 }
23673
23674 /* Return true if DECL is a local static. */
23675
23676 static inline bool
23677 local_function_static (tree decl)
23678 {
23679 gcc_assert (VAR_P (decl));
23680 return TREE_STATIC (decl)
23681 && DECL_CONTEXT (decl)
23682 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23683 }
23684
23685 /* Generate a DIE to represent a declared data object.
23686 Either DECL or ORIGIN must be non-null. */
23687
23688 static void
23689 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23690 {
23691 HOST_WIDE_INT off = 0;
23692 tree com_decl;
23693 tree decl_or_origin = decl ? decl : origin;
23694 tree ultimate_origin;
23695 dw_die_ref var_die;
23696 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23697 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23698 || class_or_namespace_scope_p (context_die));
23699 bool specialization_p = false;
23700 bool no_linkage_name = false;
23701
23702 /* While C++ inline static data members have definitions inside of the
23703 class, force the first DIE to be a declaration, then let gen_member_die
23704 reparent it to the class context and call gen_variable_die again
23705 to create the outside of the class DIE for the definition. */
23706 if (!declaration
23707 && old_die == NULL
23708 && decl
23709 && DECL_CONTEXT (decl)
23710 && TYPE_P (DECL_CONTEXT (decl))
23711 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23712 {
23713 declaration = true;
23714 if (dwarf_version < 5)
23715 no_linkage_name = true;
23716 }
23717
23718 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23719 if (decl || ultimate_origin)
23720 origin = ultimate_origin;
23721 com_decl = fortran_common (decl_or_origin, &off);
23722
23723 /* Symbol in common gets emitted as a child of the common block, in the form
23724 of a data member. */
23725 if (com_decl)
23726 {
23727 dw_die_ref com_die;
23728 dw_loc_list_ref loc = NULL;
23729 die_node com_die_arg;
23730
23731 var_die = lookup_decl_die (decl_or_origin);
23732 if (var_die)
23733 {
23734 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23735 {
23736 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23737 if (loc)
23738 {
23739 if (off)
23740 {
23741 /* Optimize the common case. */
23742 if (single_element_loc_list_p (loc)
23743 && loc->expr->dw_loc_opc == DW_OP_addr
23744 && loc->expr->dw_loc_next == NULL
23745 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23746 == SYMBOL_REF)
23747 {
23748 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23749 loc->expr->dw_loc_oprnd1.v.val_addr
23750 = plus_constant (GET_MODE (x), x , off);
23751 }
23752 else
23753 loc_list_plus_const (loc, off);
23754 }
23755 add_AT_location_description (var_die, DW_AT_location, loc);
23756 remove_AT (var_die, DW_AT_declaration);
23757 }
23758 }
23759 return;
23760 }
23761
23762 if (common_block_die_table == NULL)
23763 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23764
23765 com_die_arg.decl_id = DECL_UID (com_decl);
23766 com_die_arg.die_parent = context_die;
23767 com_die = common_block_die_table->find (&com_die_arg);
23768 if (! early_dwarf)
23769 loc = loc_list_from_tree (com_decl, 2, NULL);
23770 if (com_die == NULL)
23771 {
23772 const char *cnam
23773 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23774 die_node **slot;
23775
23776 com_die = new_die (DW_TAG_common_block, context_die, decl);
23777 add_name_and_src_coords_attributes (com_die, com_decl);
23778 if (loc)
23779 {
23780 add_AT_location_description (com_die, DW_AT_location, loc);
23781 /* Avoid sharing the same loc descriptor between
23782 DW_TAG_common_block and DW_TAG_variable. */
23783 loc = loc_list_from_tree (com_decl, 2, NULL);
23784 }
23785 else if (DECL_EXTERNAL (decl_or_origin))
23786 add_AT_flag (com_die, DW_AT_declaration, 1);
23787 if (want_pubnames ())
23788 add_pubname_string (cnam, com_die); /* ??? needed? */
23789 com_die->decl_id = DECL_UID (com_decl);
23790 slot = common_block_die_table->find_slot (com_die, INSERT);
23791 *slot = com_die;
23792 }
23793 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23794 {
23795 add_AT_location_description (com_die, DW_AT_location, loc);
23796 loc = loc_list_from_tree (com_decl, 2, NULL);
23797 remove_AT (com_die, DW_AT_declaration);
23798 }
23799 var_die = new_die (DW_TAG_variable, com_die, decl);
23800 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23801 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23802 decl_quals (decl_or_origin), false,
23803 context_die);
23804 add_alignment_attribute (var_die, decl);
23805 add_AT_flag (var_die, DW_AT_external, 1);
23806 if (loc)
23807 {
23808 if (off)
23809 {
23810 /* Optimize the common case. */
23811 if (single_element_loc_list_p (loc)
23812 && loc->expr->dw_loc_opc == DW_OP_addr
23813 && loc->expr->dw_loc_next == NULL
23814 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23815 {
23816 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23817 loc->expr->dw_loc_oprnd1.v.val_addr
23818 = plus_constant (GET_MODE (x), x, off);
23819 }
23820 else
23821 loc_list_plus_const (loc, off);
23822 }
23823 add_AT_location_description (var_die, DW_AT_location, loc);
23824 }
23825 else if (DECL_EXTERNAL (decl_or_origin))
23826 add_AT_flag (var_die, DW_AT_declaration, 1);
23827 if (decl)
23828 equate_decl_number_to_die (decl, var_die);
23829 return;
23830 }
23831
23832 if (old_die)
23833 {
23834 if (declaration)
23835 {
23836 /* A declaration that has been previously dumped, needs no
23837 further annotations, since it doesn't need location on
23838 the second pass. */
23839 return;
23840 }
23841 else if (decl_will_get_specification_p (old_die, decl, declaration)
23842 && !get_AT (old_die, DW_AT_specification))
23843 {
23844 /* Fall-thru so we can make a new variable die along with a
23845 DW_AT_specification. */
23846 }
23847 else if (origin && old_die->die_parent != context_die)
23848 {
23849 /* If we will be creating an inlined instance, we need a
23850 new DIE that will get annotated with
23851 DW_AT_abstract_origin. */
23852 gcc_assert (!DECL_ABSTRACT_P (decl));
23853 }
23854 else
23855 {
23856 /* If a DIE was dumped early, it still needs location info.
23857 Skip to where we fill the location bits. */
23858 var_die = old_die;
23859
23860 /* ??? In LTRANS we cannot annotate early created variably
23861 modified type DIEs without copying them and adjusting all
23862 references to them. Thus we dumped them again. Also add a
23863 reference to them but beware of -g0 compile and -g link
23864 in which case the reference will be already present. */
23865 tree type = TREE_TYPE (decl_or_origin);
23866 if (in_lto_p
23867 && ! get_AT (var_die, DW_AT_type)
23868 && variably_modified_type_p
23869 (type, decl_function_context (decl_or_origin)))
23870 {
23871 if (decl_by_reference_p (decl_or_origin))
23872 add_type_attribute (var_die, TREE_TYPE (type),
23873 TYPE_UNQUALIFIED, false, context_die);
23874 else
23875 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23876 false, context_die);
23877 }
23878
23879 goto gen_variable_die_location;
23880 }
23881 }
23882
23883 /* For static data members, the declaration in the class is supposed
23884 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23885 also in DWARF2; the specification should still be DW_TAG_variable
23886 referencing the DW_TAG_member DIE. */
23887 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23888 var_die = new_die (DW_TAG_member, context_die, decl);
23889 else
23890 var_die = new_die (DW_TAG_variable, context_die, decl);
23891
23892 if (origin != NULL)
23893 add_abstract_origin_attribute (var_die, origin);
23894
23895 /* Loop unrolling can create multiple blocks that refer to the same
23896 static variable, so we must test for the DW_AT_declaration flag.
23897
23898 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23899 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23900 sharing them.
23901
23902 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23903 else if (decl_will_get_specification_p (old_die, decl, declaration))
23904 {
23905 /* This is a definition of a C++ class level static. */
23906 add_AT_specification (var_die, old_die);
23907 specialization_p = true;
23908 if (DECL_NAME (decl))
23909 {
23910 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23911 struct dwarf_file_data * file_index = lookup_filename (s.file);
23912
23913 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23914 add_AT_file (var_die, DW_AT_decl_file, file_index);
23915
23916 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23917 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23918
23919 if (debug_column_info
23920 && s.column
23921 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23922 != (unsigned) s.column))
23923 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23924
23925 if (old_die->die_tag == DW_TAG_member)
23926 add_linkage_name (var_die, decl);
23927 }
23928 }
23929 else
23930 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23931
23932 if ((origin == NULL && !specialization_p)
23933 || (origin != NULL
23934 && !DECL_ABSTRACT_P (decl_or_origin)
23935 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23936 decl_function_context
23937 (decl_or_origin))))
23938 {
23939 tree type = TREE_TYPE (decl_or_origin);
23940
23941 if (decl_by_reference_p (decl_or_origin))
23942 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23943 context_die);
23944 else
23945 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23946 context_die);
23947 }
23948
23949 if (origin == NULL && !specialization_p)
23950 {
23951 if (TREE_PUBLIC (decl))
23952 add_AT_flag (var_die, DW_AT_external, 1);
23953
23954 if (DECL_ARTIFICIAL (decl))
23955 add_AT_flag (var_die, DW_AT_artificial, 1);
23956
23957 add_alignment_attribute (var_die, decl);
23958
23959 add_accessibility_attribute (var_die, decl);
23960 }
23961
23962 if (declaration)
23963 add_AT_flag (var_die, DW_AT_declaration, 1);
23964
23965 if (decl && (DECL_ABSTRACT_P (decl)
23966 || !old_die || is_declaration_die (old_die)))
23967 equate_decl_number_to_die (decl, var_die);
23968
23969 gen_variable_die_location:
23970 if (! declaration
23971 && (! DECL_ABSTRACT_P (decl_or_origin)
23972 /* Local static vars are shared between all clones/inlines,
23973 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23974 already set. */
23975 || (VAR_P (decl_or_origin)
23976 && TREE_STATIC (decl_or_origin)
23977 && DECL_RTL_SET_P (decl_or_origin))))
23978 {
23979 if (early_dwarf)
23980 add_pubname (decl_or_origin, var_die);
23981 else
23982 add_location_or_const_value_attribute (var_die, decl_or_origin,
23983 decl == NULL);
23984 }
23985 else
23986 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23987
23988 if ((dwarf_version >= 4 || !dwarf_strict)
23989 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23990 DW_AT_const_expr) == 1
23991 && !get_AT (var_die, DW_AT_const_expr)
23992 && !specialization_p)
23993 add_AT_flag (var_die, DW_AT_const_expr, 1);
23994
23995 if (!dwarf_strict)
23996 {
23997 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23998 DW_AT_inline);
23999 if (inl != -1
24000 && !get_AT (var_die, DW_AT_inline)
24001 && !specialization_p)
24002 add_AT_unsigned (var_die, DW_AT_inline, inl);
24003 }
24004 }
24005
24006 /* Generate a DIE to represent a named constant. */
24007
24008 static void
24009 gen_const_die (tree decl, dw_die_ref context_die)
24010 {
24011 dw_die_ref const_die;
24012 tree type = TREE_TYPE (decl);
24013
24014 const_die = lookup_decl_die (decl);
24015 if (const_die)
24016 return;
24017
24018 const_die = new_die (DW_TAG_constant, context_die, decl);
24019 equate_decl_number_to_die (decl, const_die);
24020 add_name_and_src_coords_attributes (const_die, decl);
24021 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
24022 if (TREE_PUBLIC (decl))
24023 add_AT_flag (const_die, DW_AT_external, 1);
24024 if (DECL_ARTIFICIAL (decl))
24025 add_AT_flag (const_die, DW_AT_artificial, 1);
24026 tree_add_const_value_attribute_for_decl (const_die, decl);
24027 }
24028
24029 /* Generate a DIE to represent a label identifier. */
24030
24031 static void
24032 gen_label_die (tree decl, dw_die_ref context_die)
24033 {
24034 tree origin = decl_ultimate_origin (decl);
24035 dw_die_ref lbl_die = lookup_decl_die (decl);
24036 rtx insn;
24037 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24038
24039 if (!lbl_die)
24040 {
24041 lbl_die = new_die (DW_TAG_label, context_die, decl);
24042 equate_decl_number_to_die (decl, lbl_die);
24043
24044 if (origin != NULL)
24045 add_abstract_origin_attribute (lbl_die, origin);
24046 else
24047 add_name_and_src_coords_attributes (lbl_die, decl);
24048 }
24049
24050 if (DECL_ABSTRACT_P (decl))
24051 equate_decl_number_to_die (decl, lbl_die);
24052 else if (! early_dwarf)
24053 {
24054 insn = DECL_RTL_IF_SET (decl);
24055
24056 /* Deleted labels are programmer specified labels which have been
24057 eliminated because of various optimizations. We still emit them
24058 here so that it is possible to put breakpoints on them. */
24059 if (insn
24060 && (LABEL_P (insn)
24061 || ((NOTE_P (insn)
24062 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
24063 {
24064 /* When optimization is enabled (via -O) some parts of the compiler
24065 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
24066 represent source-level labels which were explicitly declared by
24067 the user. This really shouldn't be happening though, so catch
24068 it if it ever does happen. */
24069 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
24070
24071 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
24072 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
24073 }
24074 else if (insn
24075 && NOTE_P (insn)
24076 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
24077 && CODE_LABEL_NUMBER (insn) != -1)
24078 {
24079 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
24080 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
24081 }
24082 }
24083 }
24084
24085 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
24086 attributes to the DIE for a block STMT, to describe where the inlined
24087 function was called from. This is similar to add_src_coords_attributes. */
24088
24089 static inline void
24090 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
24091 {
24092 /* We can end up with BUILTINS_LOCATION here. */
24093 if (RESERVED_LOCATION_P (BLOCK_SOURCE_LOCATION (stmt)))
24094 return;
24095
24096 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
24097
24098 if (dwarf_version >= 3 || !dwarf_strict)
24099 {
24100 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
24101 add_AT_unsigned (die, DW_AT_call_line, s.line);
24102 if (debug_column_info && s.column)
24103 add_AT_unsigned (die, DW_AT_call_column, s.column);
24104 }
24105 }
24106
24107
24108 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
24109 Add low_pc and high_pc attributes to the DIE for a block STMT. */
24110
24111 static inline void
24112 add_high_low_attributes (tree stmt, dw_die_ref die)
24113 {
24114 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24115
24116 if (inline_entry_data **iedp
24117 = !inline_entry_data_table ? NULL
24118 : inline_entry_data_table->find_slot_with_hash (stmt,
24119 htab_hash_pointer (stmt),
24120 NO_INSERT))
24121 {
24122 inline_entry_data *ied = *iedp;
24123 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
24124 gcc_assert (debug_inline_points);
24125 gcc_assert (inlined_function_outer_scope_p (stmt));
24126
24127 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
24128 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24129
24130 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
24131 && !dwarf_strict)
24132 {
24133 if (!output_asm_line_debug_info ())
24134 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
24135 else
24136 {
24137 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
24138 /* FIXME: this will resolve to a small number. Could we
24139 possibly emit smaller data? Ideally we'd emit a
24140 uleb128, but that would make the size of DIEs
24141 impossible for the compiler to compute, since it's
24142 the assembler that computes the value of the view
24143 label in this case. Ideally, we'd have a single form
24144 encompassing both the address and the view, and
24145 indirecting them through a table might make things
24146 easier, but even that would be more wasteful,
24147 space-wise, than what we have now. */
24148 add_AT_symview (die, DW_AT_GNU_entry_view, label);
24149 }
24150 }
24151
24152 inline_entry_data_table->clear_slot (iedp);
24153 }
24154
24155 if (BLOCK_FRAGMENT_CHAIN (stmt)
24156 && (dwarf_version >= 3 || !dwarf_strict))
24157 {
24158 tree chain, superblock = NULL_TREE;
24159 dw_die_ref pdie;
24160 dw_attr_node *attr = NULL;
24161
24162 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
24163 {
24164 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24165 BLOCK_NUMBER (stmt));
24166 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24167 }
24168
24169 /* Optimize duplicate .debug_ranges lists or even tails of
24170 lists. If this BLOCK has same ranges as its supercontext,
24171 lookup DW_AT_ranges attribute in the supercontext (and
24172 recursively so), verify that the ranges_table contains the
24173 right values and use it instead of adding a new .debug_range. */
24174 for (chain = stmt, pdie = die;
24175 BLOCK_SAME_RANGE (chain);
24176 chain = BLOCK_SUPERCONTEXT (chain))
24177 {
24178 dw_attr_node *new_attr;
24179
24180 pdie = pdie->die_parent;
24181 if (pdie == NULL)
24182 break;
24183 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
24184 break;
24185 new_attr = get_AT (pdie, DW_AT_ranges);
24186 if (new_attr == NULL
24187 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24188 break;
24189 attr = new_attr;
24190 superblock = BLOCK_SUPERCONTEXT (chain);
24191 }
24192 if (attr != NULL
24193 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24194 == (int)BLOCK_NUMBER (superblock))
24195 && BLOCK_FRAGMENT_CHAIN (superblock))
24196 {
24197 unsigned long off = attr->dw_attr_val.v.val_offset;
24198 unsigned long supercnt = 0, thiscnt = 0;
24199 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24200 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24201 {
24202 ++supercnt;
24203 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24204 == (int)BLOCK_NUMBER (chain));
24205 }
24206 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24207 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24208 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24209 ++thiscnt;
24210 gcc_assert (supercnt >= thiscnt);
24211 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24212 false);
24213 note_rnglist_head (off + supercnt - thiscnt);
24214 return;
24215 }
24216
24217 unsigned int offset = add_ranges (stmt, true);
24218 add_AT_range_list (die, DW_AT_ranges, offset, false);
24219 note_rnglist_head (offset);
24220
24221 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24222 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24223 do
24224 {
24225 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24226 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24227 chain = BLOCK_FRAGMENT_CHAIN (chain);
24228 }
24229 while (chain);
24230 add_ranges (NULL);
24231 }
24232 else
24233 {
24234 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24235 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24236 BLOCK_NUMBER (stmt));
24237 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24238 BLOCK_NUMBER (stmt));
24239 add_AT_low_high_pc (die, label, label_high, false);
24240 }
24241 }
24242
24243 /* Generate a DIE for a lexical block. */
24244
24245 static void
24246 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24247 {
24248 dw_die_ref old_die = lookup_block_die (stmt);
24249 dw_die_ref stmt_die = NULL;
24250 if (!old_die)
24251 {
24252 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24253 equate_block_to_die (stmt, stmt_die);
24254 }
24255
24256 if (BLOCK_ABSTRACT_ORIGIN (stmt))
24257 {
24258 /* If this is an inlined or conrecte instance, create a new lexical
24259 die for anything below to attach DW_AT_abstract_origin to. */
24260 if (old_die)
24261 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24262
24263 tree origin = block_ultimate_origin (stmt);
24264 if (origin != NULL_TREE && (origin != stmt || old_die))
24265 add_abstract_origin_attribute (stmt_die, origin);
24266
24267 old_die = NULL;
24268 }
24269
24270 if (old_die)
24271 stmt_die = old_die;
24272
24273 /* A non abstract block whose blocks have already been reordered
24274 should have the instruction range for this block. If so, set the
24275 high/low attributes. */
24276 if (!early_dwarf && TREE_ASM_WRITTEN (stmt))
24277 {
24278 gcc_assert (stmt_die);
24279 add_high_low_attributes (stmt, stmt_die);
24280 }
24281
24282 decls_for_scope (stmt, stmt_die);
24283 }
24284
24285 /* Generate a DIE for an inlined subprogram. */
24286
24287 static void
24288 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24289 {
24290 tree decl = block_ultimate_origin (stmt);
24291
24292 /* Make sure any inlined functions are known to be inlineable. */
24293 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24294 || cgraph_function_possibly_inlined_p (decl));
24295
24296 dw_die_ref subr_die = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24297
24298 if (call_arg_locations || debug_inline_points)
24299 equate_block_to_die (stmt, subr_die);
24300 add_abstract_origin_attribute (subr_die, decl);
24301 if (TREE_ASM_WRITTEN (stmt))
24302 add_high_low_attributes (stmt, subr_die);
24303 add_call_src_coords_attributes (stmt, subr_die);
24304
24305 /* The inliner creates an extra BLOCK for the parameter setup,
24306 we want to merge that with the actual outermost BLOCK of the
24307 inlined function to avoid duplicate locals in consumers.
24308 Do that by doing the recursion to subblocks on the single subblock
24309 of STMT. */
24310 bool unwrap_one = false;
24311 if (BLOCK_SUBBLOCKS (stmt) && !BLOCK_CHAIN (BLOCK_SUBBLOCKS (stmt)))
24312 {
24313 tree origin = block_ultimate_origin (BLOCK_SUBBLOCKS (stmt));
24314 if (origin
24315 && TREE_CODE (origin) == BLOCK
24316 && BLOCK_SUPERCONTEXT (origin) == decl)
24317 unwrap_one = true;
24318 }
24319 decls_for_scope (stmt, subr_die, !unwrap_one);
24320 if (unwrap_one)
24321 decls_for_scope (BLOCK_SUBBLOCKS (stmt), subr_die);
24322 }
24323
24324 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24325 the comment for VLR_CONTEXT. */
24326
24327 static void
24328 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24329 {
24330 dw_die_ref decl_die;
24331
24332 if (TREE_TYPE (decl) == error_mark_node)
24333 return;
24334
24335 decl_die = new_die (DW_TAG_member, context_die, decl);
24336 add_name_and_src_coords_attributes (decl_die, decl);
24337 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24338 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24339 context_die);
24340
24341 if (DECL_BIT_FIELD_TYPE (decl))
24342 {
24343 add_byte_size_attribute (decl_die, decl);
24344 add_bit_size_attribute (decl_die, decl);
24345 add_bit_offset_attribute (decl_die, decl, ctx);
24346 }
24347
24348 add_alignment_attribute (decl_die, decl);
24349
24350 /* If we have a variant part offset, then we are supposed to process a member
24351 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24352 trees. */
24353 gcc_assert (ctx->variant_part_offset == NULL_TREE
24354 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24355 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24356 add_data_member_location_attribute (decl_die, decl, ctx);
24357
24358 if (DECL_ARTIFICIAL (decl))
24359 add_AT_flag (decl_die, DW_AT_artificial, 1);
24360
24361 add_accessibility_attribute (decl_die, decl);
24362
24363 /* Equate decl number to die, so that we can look up this decl later on. */
24364 equate_decl_number_to_die (decl, decl_die);
24365 }
24366
24367 /* Generate a DIE for a pointer to a member type. TYPE can be an
24368 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24369 pointer to member function. */
24370
24371 static void
24372 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24373 {
24374 if (lookup_type_die (type))
24375 return;
24376
24377 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24378 scope_die_for (type, context_die), type);
24379
24380 equate_type_number_to_die (type, ptr_die);
24381 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24382 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24383 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24384 context_die);
24385 add_alignment_attribute (ptr_die, type);
24386
24387 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24388 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24389 {
24390 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24391 add_AT_loc (ptr_die, DW_AT_use_location, op);
24392 }
24393 }
24394
24395 static char *producer_string;
24396
24397 /* Return a heap allocated producer string including command line options
24398 if -grecord-gcc-switches. */
24399
24400 static char *
24401 gen_producer_string (void)
24402 {
24403 size_t j;
24404 auto_vec<const char *> switches;
24405 const char *language_string = lang_hooks.name;
24406 char *producer, *tail;
24407 const char *p;
24408 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24409 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24410
24411 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24412 switch (save_decoded_options[j].opt_index)
24413 {
24414 case OPT_o:
24415 case OPT_d:
24416 case OPT_dumpbase:
24417 case OPT_dumpdir:
24418 case OPT_auxbase:
24419 case OPT_auxbase_strip:
24420 case OPT_quiet:
24421 case OPT_version:
24422 case OPT_v:
24423 case OPT_w:
24424 case OPT_L:
24425 case OPT_D:
24426 case OPT_I:
24427 case OPT_U:
24428 case OPT_SPECIAL_unknown:
24429 case OPT_SPECIAL_ignore:
24430 case OPT_SPECIAL_deprecated:
24431 case OPT_SPECIAL_program_name:
24432 case OPT_SPECIAL_input_file:
24433 case OPT_grecord_gcc_switches:
24434 case OPT__output_pch_:
24435 case OPT_fdiagnostics_show_location_:
24436 case OPT_fdiagnostics_show_option:
24437 case OPT_fdiagnostics_show_caret:
24438 case OPT_fdiagnostics_show_labels:
24439 case OPT_fdiagnostics_show_line_numbers:
24440 case OPT_fdiagnostics_color_:
24441 case OPT_fdiagnostics_format_:
24442 case OPT_fverbose_asm:
24443 case OPT____:
24444 case OPT__sysroot_:
24445 case OPT_nostdinc:
24446 case OPT_nostdinc__:
24447 case OPT_fpreprocessed:
24448 case OPT_fltrans_output_list_:
24449 case OPT_fresolution_:
24450 case OPT_fdebug_prefix_map_:
24451 case OPT_fmacro_prefix_map_:
24452 case OPT_ffile_prefix_map_:
24453 case OPT_fcompare_debug:
24454 case OPT_fchecking:
24455 case OPT_fchecking_:
24456 /* Ignore these. */
24457 continue;
24458 default:
24459 if (cl_options[save_decoded_options[j].opt_index].flags
24460 & CL_NO_DWARF_RECORD)
24461 continue;
24462 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24463 == '-');
24464 switch (save_decoded_options[j].canonical_option[0][1])
24465 {
24466 case 'M':
24467 case 'i':
24468 case 'W':
24469 continue;
24470 case 'f':
24471 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24472 "dump", 4) == 0)
24473 continue;
24474 break;
24475 default:
24476 break;
24477 }
24478 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24479 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24480 break;
24481 }
24482
24483 producer = XNEWVEC (char, plen + 1 + len + 1);
24484 tail = producer;
24485 sprintf (tail, "%s %s", language_string, version_string);
24486 tail += plen;
24487
24488 FOR_EACH_VEC_ELT (switches, j, p)
24489 {
24490 len = strlen (p);
24491 *tail = ' ';
24492 memcpy (tail + 1, p, len);
24493 tail += len + 1;
24494 }
24495
24496 *tail = '\0';
24497 return producer;
24498 }
24499
24500 /* Given a C and/or C++ language/version string return the "highest".
24501 C++ is assumed to be "higher" than C in this case. Used for merging
24502 LTO translation unit languages. */
24503 static const char *
24504 highest_c_language (const char *lang1, const char *lang2)
24505 {
24506 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24507 return "GNU C++17";
24508 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24509 return "GNU C++14";
24510 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24511 return "GNU C++11";
24512 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24513 return "GNU C++98";
24514
24515 if (strcmp ("GNU C2X", lang1) == 0 || strcmp ("GNU C2X", lang2) == 0)
24516 return "GNU C2X";
24517 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24518 return "GNU C17";
24519 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24520 return "GNU C11";
24521 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24522 return "GNU C99";
24523 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24524 return "GNU C89";
24525
24526 gcc_unreachable ();
24527 }
24528
24529
24530 /* Generate the DIE for the compilation unit. */
24531
24532 static dw_die_ref
24533 gen_compile_unit_die (const char *filename)
24534 {
24535 dw_die_ref die;
24536 const char *language_string = lang_hooks.name;
24537 int language;
24538
24539 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24540
24541 if (filename)
24542 {
24543 add_name_attribute (die, filename);
24544 /* Don't add cwd for <built-in>. */
24545 if (filename[0] != '<')
24546 add_comp_dir_attribute (die);
24547 }
24548
24549 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24550
24551 /* If our producer is LTO try to figure out a common language to use
24552 from the global list of translation units. */
24553 if (strcmp (language_string, "GNU GIMPLE") == 0)
24554 {
24555 unsigned i;
24556 tree t;
24557 const char *common_lang = NULL;
24558
24559 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24560 {
24561 if (!TRANSLATION_UNIT_LANGUAGE (t))
24562 continue;
24563 if (!common_lang)
24564 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24565 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24566 ;
24567 else if (strncmp (common_lang, "GNU C", 5) == 0
24568 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24569 /* Mixing C and C++ is ok, use C++ in that case. */
24570 common_lang = highest_c_language (common_lang,
24571 TRANSLATION_UNIT_LANGUAGE (t));
24572 else
24573 {
24574 /* Fall back to C. */
24575 common_lang = NULL;
24576 break;
24577 }
24578 }
24579
24580 if (common_lang)
24581 language_string = common_lang;
24582 }
24583
24584 language = DW_LANG_C;
24585 if (strncmp (language_string, "GNU C", 5) == 0
24586 && ISDIGIT (language_string[5]))
24587 {
24588 language = DW_LANG_C89;
24589 if (dwarf_version >= 3 || !dwarf_strict)
24590 {
24591 if (strcmp (language_string, "GNU C89") != 0)
24592 language = DW_LANG_C99;
24593
24594 if (dwarf_version >= 5 /* || !dwarf_strict */)
24595 if (strcmp (language_string, "GNU C11") == 0
24596 || strcmp (language_string, "GNU C17") == 0
24597 || strcmp (language_string, "GNU C2X"))
24598 language = DW_LANG_C11;
24599 }
24600 }
24601 else if (strncmp (language_string, "GNU C++", 7) == 0)
24602 {
24603 language = DW_LANG_C_plus_plus;
24604 if (dwarf_version >= 5 /* || !dwarf_strict */)
24605 {
24606 if (strcmp (language_string, "GNU C++11") == 0)
24607 language = DW_LANG_C_plus_plus_11;
24608 else if (strcmp (language_string, "GNU C++14") == 0)
24609 language = DW_LANG_C_plus_plus_14;
24610 else if (strcmp (language_string, "GNU C++17") == 0)
24611 /* For now. */
24612 language = DW_LANG_C_plus_plus_14;
24613 }
24614 }
24615 else if (strcmp (language_string, "GNU F77") == 0)
24616 language = DW_LANG_Fortran77;
24617 else if (dwarf_version >= 3 || !dwarf_strict)
24618 {
24619 if (strcmp (language_string, "GNU Ada") == 0)
24620 language = DW_LANG_Ada95;
24621 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24622 {
24623 language = DW_LANG_Fortran95;
24624 if (dwarf_version >= 5 /* || !dwarf_strict */)
24625 {
24626 if (strcmp (language_string, "GNU Fortran2003") == 0)
24627 language = DW_LANG_Fortran03;
24628 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24629 language = DW_LANG_Fortran08;
24630 }
24631 }
24632 else if (strcmp (language_string, "GNU Objective-C") == 0)
24633 language = DW_LANG_ObjC;
24634 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24635 language = DW_LANG_ObjC_plus_plus;
24636 else if (strcmp (language_string, "GNU D") == 0)
24637 language = DW_LANG_D;
24638 else if (dwarf_version >= 5 || !dwarf_strict)
24639 {
24640 if (strcmp (language_string, "GNU Go") == 0)
24641 language = DW_LANG_Go;
24642 }
24643 }
24644 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24645 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24646 language = DW_LANG_Fortran90;
24647 /* Likewise for Ada. */
24648 else if (strcmp (language_string, "GNU Ada") == 0)
24649 language = DW_LANG_Ada83;
24650
24651 add_AT_unsigned (die, DW_AT_language, language);
24652
24653 switch (language)
24654 {
24655 case DW_LANG_Fortran77:
24656 case DW_LANG_Fortran90:
24657 case DW_LANG_Fortran95:
24658 case DW_LANG_Fortran03:
24659 case DW_LANG_Fortran08:
24660 /* Fortran has case insensitive identifiers and the front-end
24661 lowercases everything. */
24662 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24663 break;
24664 default:
24665 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24666 break;
24667 }
24668 return die;
24669 }
24670
24671 /* Generate the DIE for a base class. */
24672
24673 static void
24674 gen_inheritance_die (tree binfo, tree access, tree type,
24675 dw_die_ref context_die)
24676 {
24677 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24678 struct vlr_context ctx = { type, NULL };
24679
24680 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24681 context_die);
24682 add_data_member_location_attribute (die, binfo, &ctx);
24683
24684 if (BINFO_VIRTUAL_P (binfo))
24685 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24686
24687 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24688 children, otherwise the default is DW_ACCESS_public. In DWARF2
24689 the default has always been DW_ACCESS_private. */
24690 if (access == access_public_node)
24691 {
24692 if (dwarf_version == 2
24693 || context_die->die_tag == DW_TAG_class_type)
24694 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24695 }
24696 else if (access == access_protected_node)
24697 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24698 else if (dwarf_version > 2
24699 && context_die->die_tag != DW_TAG_class_type)
24700 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24701 }
24702
24703 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24704 structure. */
24705
24706 static bool
24707 is_variant_part (tree decl)
24708 {
24709 return (TREE_CODE (decl) == FIELD_DECL
24710 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24711 }
24712
24713 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24714 return the FIELD_DECL. Return NULL_TREE otherwise. */
24715
24716 static tree
24717 analyze_discr_in_predicate (tree operand, tree struct_type)
24718 {
24719 while (CONVERT_EXPR_P (operand))
24720 operand = TREE_OPERAND (operand, 0);
24721
24722 /* Match field access to members of struct_type only. */
24723 if (TREE_CODE (operand) == COMPONENT_REF
24724 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24725 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24726 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24727 return TREE_OPERAND (operand, 1);
24728 else
24729 return NULL_TREE;
24730 }
24731
24732 /* Check that SRC is a constant integer that can be represented as a native
24733 integer constant (either signed or unsigned). If so, store it into DEST and
24734 return true. Return false otherwise. */
24735
24736 static bool
24737 get_discr_value (tree src, dw_discr_value *dest)
24738 {
24739 tree discr_type = TREE_TYPE (src);
24740
24741 if (lang_hooks.types.get_debug_type)
24742 {
24743 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24744 if (debug_type != NULL)
24745 discr_type = debug_type;
24746 }
24747
24748 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24749 return false;
24750
24751 /* Signedness can vary between the original type and the debug type. This
24752 can happen for character types in Ada for instance: the character type
24753 used for code generation can be signed, to be compatible with the C one,
24754 but from a debugger point of view, it must be unsigned. */
24755 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24756 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24757
24758 if (is_orig_unsigned != is_debug_unsigned)
24759 src = fold_convert (discr_type, src);
24760
24761 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24762 return false;
24763
24764 dest->pos = is_debug_unsigned;
24765 if (is_debug_unsigned)
24766 dest->v.uval = tree_to_uhwi (src);
24767 else
24768 dest->v.sval = tree_to_shwi (src);
24769
24770 return true;
24771 }
24772
24773 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24774 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24775 store NULL_TREE in DISCR_DECL. Otherwise:
24776
24777 - store the discriminant field in STRUCT_TYPE that controls the variant
24778 part to *DISCR_DECL
24779
24780 - put in *DISCR_LISTS_P an array where for each variant, the item
24781 represents the corresponding matching list of discriminant values.
24782
24783 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24784 the above array.
24785
24786 Note that when the array is allocated (i.e. when the analysis is
24787 successful), it is up to the caller to free the array. */
24788
24789 static void
24790 analyze_variants_discr (tree variant_part_decl,
24791 tree struct_type,
24792 tree *discr_decl,
24793 dw_discr_list_ref **discr_lists_p,
24794 unsigned *discr_lists_length)
24795 {
24796 tree variant_part_type = TREE_TYPE (variant_part_decl);
24797 tree variant;
24798 dw_discr_list_ref *discr_lists;
24799 unsigned i;
24800
24801 /* Compute how many variants there are in this variant part. */
24802 *discr_lists_length = 0;
24803 for (variant = TYPE_FIELDS (variant_part_type);
24804 variant != NULL_TREE;
24805 variant = DECL_CHAIN (variant))
24806 ++*discr_lists_length;
24807
24808 *discr_decl = NULL_TREE;
24809 *discr_lists_p
24810 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24811 sizeof (**discr_lists_p));
24812 discr_lists = *discr_lists_p;
24813
24814 /* And then analyze all variants to extract discriminant information for all
24815 of them. This analysis is conservative: as soon as we detect something we
24816 do not support, abort everything and pretend we found nothing. */
24817 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24818 variant != NULL_TREE;
24819 variant = DECL_CHAIN (variant), ++i)
24820 {
24821 tree match_expr = DECL_QUALIFIER (variant);
24822
24823 /* Now, try to analyze the predicate and deduce a discriminant for
24824 it. */
24825 if (match_expr == boolean_true_node)
24826 /* Typically happens for the default variant: it matches all cases that
24827 previous variants rejected. Don't output any matching value for
24828 this one. */
24829 continue;
24830
24831 /* The following loop tries to iterate over each discriminant
24832 possibility: single values or ranges. */
24833 while (match_expr != NULL_TREE)
24834 {
24835 tree next_round_match_expr;
24836 tree candidate_discr = NULL_TREE;
24837 dw_discr_list_ref new_node = NULL;
24838
24839 /* Possibilities are matched one after the other by nested
24840 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24841 continue with the rest at next iteration. */
24842 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24843 {
24844 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24845 match_expr = TREE_OPERAND (match_expr, 1);
24846 }
24847 else
24848 next_round_match_expr = NULL_TREE;
24849
24850 if (match_expr == boolean_false_node)
24851 /* This sub-expression matches nothing: just wait for the next
24852 one. */
24853 ;
24854
24855 else if (TREE_CODE (match_expr) == EQ_EXPR)
24856 {
24857 /* We are matching: <discr_field> == <integer_cst>
24858 This sub-expression matches a single value. */
24859 tree integer_cst = TREE_OPERAND (match_expr, 1);
24860
24861 candidate_discr
24862 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24863 struct_type);
24864
24865 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24866 if (!get_discr_value (integer_cst,
24867 &new_node->dw_discr_lower_bound))
24868 goto abort;
24869 new_node->dw_discr_range = false;
24870 }
24871
24872 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24873 {
24874 /* We are matching:
24875 <discr_field> > <integer_cst>
24876 && <discr_field> < <integer_cst>.
24877 This sub-expression matches the range of values between the
24878 two matched integer constants. Note that comparisons can be
24879 inclusive or exclusive. */
24880 tree candidate_discr_1, candidate_discr_2;
24881 tree lower_cst, upper_cst;
24882 bool lower_cst_included, upper_cst_included;
24883 tree lower_op = TREE_OPERAND (match_expr, 0);
24884 tree upper_op = TREE_OPERAND (match_expr, 1);
24885
24886 /* When the comparison is exclusive, the integer constant is not
24887 the discriminant range bound we are looking for: we will have
24888 to increment or decrement it. */
24889 if (TREE_CODE (lower_op) == GE_EXPR)
24890 lower_cst_included = true;
24891 else if (TREE_CODE (lower_op) == GT_EXPR)
24892 lower_cst_included = false;
24893 else
24894 goto abort;
24895
24896 if (TREE_CODE (upper_op) == LE_EXPR)
24897 upper_cst_included = true;
24898 else if (TREE_CODE (upper_op) == LT_EXPR)
24899 upper_cst_included = false;
24900 else
24901 goto abort;
24902
24903 /* Extract the discriminant from the first operand and check it
24904 is consistant with the same analysis in the second
24905 operand. */
24906 candidate_discr_1
24907 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24908 struct_type);
24909 candidate_discr_2
24910 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24911 struct_type);
24912 if (candidate_discr_1 == candidate_discr_2)
24913 candidate_discr = candidate_discr_1;
24914 else
24915 goto abort;
24916
24917 /* Extract bounds from both. */
24918 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24919 lower_cst = TREE_OPERAND (lower_op, 1);
24920 upper_cst = TREE_OPERAND (upper_op, 1);
24921
24922 if (!lower_cst_included)
24923 lower_cst
24924 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24925 build_int_cst (TREE_TYPE (lower_cst), 1));
24926 if (!upper_cst_included)
24927 upper_cst
24928 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24929 build_int_cst (TREE_TYPE (upper_cst), 1));
24930
24931 if (!get_discr_value (lower_cst,
24932 &new_node->dw_discr_lower_bound)
24933 || !get_discr_value (upper_cst,
24934 &new_node->dw_discr_upper_bound))
24935 goto abort;
24936
24937 new_node->dw_discr_range = true;
24938 }
24939
24940 else if ((candidate_discr
24941 = analyze_discr_in_predicate (match_expr, struct_type))
24942 && TREE_TYPE (candidate_discr) == boolean_type_node)
24943 {
24944 /* We are matching: <discr_field> for a boolean discriminant.
24945 This sub-expression matches boolean_true_node. */
24946 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24947 if (!get_discr_value (boolean_true_node,
24948 &new_node->dw_discr_lower_bound))
24949 goto abort;
24950 new_node->dw_discr_range = false;
24951 }
24952
24953 else
24954 /* Unsupported sub-expression: we cannot determine the set of
24955 matching discriminant values. Abort everything. */
24956 goto abort;
24957
24958 /* If the discriminant info is not consistant with what we saw so
24959 far, consider the analysis failed and abort everything. */
24960 if (candidate_discr == NULL_TREE
24961 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24962 goto abort;
24963 else
24964 *discr_decl = candidate_discr;
24965
24966 if (new_node != NULL)
24967 {
24968 new_node->dw_discr_next = discr_lists[i];
24969 discr_lists[i] = new_node;
24970 }
24971 match_expr = next_round_match_expr;
24972 }
24973 }
24974
24975 /* If we reach this point, we could match everything we were interested
24976 in. */
24977 return;
24978
24979 abort:
24980 /* Clean all data structure and return no result. */
24981 free (*discr_lists_p);
24982 *discr_lists_p = NULL;
24983 *discr_decl = NULL_TREE;
24984 }
24985
24986 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24987 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24988 under CONTEXT_DIE.
24989
24990 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24991 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24992 this type, which are record types, represent the available variants and each
24993 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24994 values are inferred from these attributes.
24995
24996 In trees, the offsets for the fields inside these sub-records are relative
24997 to the variant part itself, whereas the corresponding DIEs should have
24998 offset attributes that are relative to the embedding record base address.
24999 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
25000 must be an expression that computes the offset of the variant part to
25001 describe in DWARF. */
25002
25003 static void
25004 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
25005 dw_die_ref context_die)
25006 {
25007 const tree variant_part_type = TREE_TYPE (variant_part_decl);
25008 tree variant_part_offset = vlr_ctx->variant_part_offset;
25009 struct loc_descr_context ctx = {
25010 vlr_ctx->struct_type, /* context_type */
25011 NULL_TREE, /* base_decl */
25012 NULL, /* dpi */
25013 false, /* placeholder_arg */
25014 false /* placeholder_seen */
25015 };
25016
25017 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
25018 NULL_TREE if there is no such field. */
25019 tree discr_decl = NULL_TREE;
25020 dw_discr_list_ref *discr_lists;
25021 unsigned discr_lists_length = 0;
25022 unsigned i;
25023
25024 dw_die_ref dwarf_proc_die = NULL;
25025 dw_die_ref variant_part_die
25026 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
25027
25028 equate_decl_number_to_die (variant_part_decl, variant_part_die);
25029
25030 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
25031 &discr_decl, &discr_lists, &discr_lists_length);
25032
25033 if (discr_decl != NULL_TREE)
25034 {
25035 dw_die_ref discr_die = lookup_decl_die (discr_decl);
25036
25037 if (discr_die)
25038 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
25039 else
25040 /* We have no DIE for the discriminant, so just discard all
25041 discrimimant information in the output. */
25042 discr_decl = NULL_TREE;
25043 }
25044
25045 /* If the offset for this variant part is more complex than a constant,
25046 create a DWARF procedure for it so that we will not have to generate DWARF
25047 expressions for it for each member. */
25048 if (TREE_CODE (variant_part_offset) != INTEGER_CST
25049 && (dwarf_version >= 3 || !dwarf_strict))
25050 {
25051 const tree dwarf_proc_fndecl
25052 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
25053 build_function_type (TREE_TYPE (variant_part_offset),
25054 NULL_TREE));
25055 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
25056 const dw_loc_descr_ref dwarf_proc_body
25057 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
25058
25059 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
25060 dwarf_proc_fndecl, context_die);
25061 if (dwarf_proc_die != NULL)
25062 variant_part_offset = dwarf_proc_call;
25063 }
25064
25065 /* Output DIEs for all variants. */
25066 i = 0;
25067 for (tree variant = TYPE_FIELDS (variant_part_type);
25068 variant != NULL_TREE;
25069 variant = DECL_CHAIN (variant), ++i)
25070 {
25071 tree variant_type = TREE_TYPE (variant);
25072 dw_die_ref variant_die;
25073
25074 /* All variants (i.e. members of a variant part) are supposed to be
25075 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
25076 under these records. */
25077 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
25078
25079 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
25080 equate_decl_number_to_die (variant, variant_die);
25081
25082 /* Output discriminant values this variant matches, if any. */
25083 if (discr_decl == NULL || discr_lists[i] == NULL)
25084 /* In the case we have discriminant information at all, this is
25085 probably the default variant: as the standard says, don't
25086 output any discriminant value/list attribute. */
25087 ;
25088 else if (discr_lists[i]->dw_discr_next == NULL
25089 && !discr_lists[i]->dw_discr_range)
25090 /* If there is only one accepted value, don't bother outputting a
25091 list. */
25092 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
25093 else
25094 add_discr_list (variant_die, discr_lists[i]);
25095
25096 for (tree member = TYPE_FIELDS (variant_type);
25097 member != NULL_TREE;
25098 member = DECL_CHAIN (member))
25099 {
25100 struct vlr_context vlr_sub_ctx = {
25101 vlr_ctx->struct_type, /* struct_type */
25102 NULL /* variant_part_offset */
25103 };
25104 if (is_variant_part (member))
25105 {
25106 /* All offsets for fields inside variant parts are relative to
25107 the top-level embedding RECORD_TYPE's base address. On the
25108 other hand, offsets in GCC's types are relative to the
25109 nested-most variant part. So we have to sum offsets each time
25110 we recurse. */
25111
25112 vlr_sub_ctx.variant_part_offset
25113 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
25114 variant_part_offset, byte_position (member));
25115 gen_variant_part (member, &vlr_sub_ctx, variant_die);
25116 }
25117 else
25118 {
25119 vlr_sub_ctx.variant_part_offset = variant_part_offset;
25120 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
25121 }
25122 }
25123 }
25124
25125 free (discr_lists);
25126 }
25127
25128 /* Generate a DIE for a class member. */
25129
25130 static void
25131 gen_member_die (tree type, dw_die_ref context_die)
25132 {
25133 tree member;
25134 tree binfo = TYPE_BINFO (type);
25135
25136 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
25137
25138 /* If this is not an incomplete type, output descriptions of each of its
25139 members. Note that as we output the DIEs necessary to represent the
25140 members of this record or union type, we will also be trying to output
25141 DIEs to represent the *types* of those members. However the `type'
25142 function (above) will specifically avoid generating type DIEs for member
25143 types *within* the list of member DIEs for this (containing) type except
25144 for those types (of members) which are explicitly marked as also being
25145 members of this (containing) type themselves. The g++ front- end can
25146 force any given type to be treated as a member of some other (containing)
25147 type by setting the TYPE_CONTEXT of the given (member) type to point to
25148 the TREE node representing the appropriate (containing) type. */
25149
25150 /* First output info about the base classes. */
25151 if (binfo && early_dwarf)
25152 {
25153 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
25154 int i;
25155 tree base;
25156
25157 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
25158 gen_inheritance_die (base,
25159 (accesses ? (*accesses)[i] : access_public_node),
25160 type,
25161 context_die);
25162 }
25163
25164 /* Now output info about the members. */
25165 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
25166 {
25167 /* Ignore clones. */
25168 if (DECL_ABSTRACT_ORIGIN (member))
25169 continue;
25170
25171 struct vlr_context vlr_ctx = { type, NULL_TREE };
25172 bool static_inline_p
25173 = (VAR_P (member)
25174 && TREE_STATIC (member)
25175 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
25176 != -1));
25177
25178 /* If we thought we were generating minimal debug info for TYPE
25179 and then changed our minds, some of the member declarations
25180 may have already been defined. Don't define them again, but
25181 do put them in the right order. */
25182
25183 if (dw_die_ref child = lookup_decl_die (member))
25184 {
25185 /* Handle inline static data members, which only have in-class
25186 declarations. */
25187 bool splice = true;
25188
25189 dw_die_ref ref = NULL;
25190 if (child->die_tag == DW_TAG_variable
25191 && child->die_parent == comp_unit_die ())
25192 {
25193 ref = get_AT_ref (child, DW_AT_specification);
25194
25195 /* For C++17 inline static data members followed by redundant
25196 out of class redeclaration, we might get here with
25197 child being the DIE created for the out of class
25198 redeclaration and with its DW_AT_specification being
25199 the DIE created for in-class definition. We want to
25200 reparent the latter, and don't want to create another
25201 DIE with DW_AT_specification in that case, because
25202 we already have one. */
25203 if (ref
25204 && static_inline_p
25205 && ref->die_tag == DW_TAG_variable
25206 && ref->die_parent == comp_unit_die ()
25207 && get_AT (ref, DW_AT_specification) == NULL)
25208 {
25209 child = ref;
25210 ref = NULL;
25211 static_inline_p = false;
25212 }
25213
25214 if (!ref)
25215 {
25216 reparent_child (child, context_die);
25217 if (dwarf_version < 5)
25218 child->die_tag = DW_TAG_member;
25219 splice = false;
25220 }
25221 }
25222
25223 if (splice)
25224 splice_child_die (context_die, child);
25225 }
25226
25227 /* Do not generate standard DWARF for variant parts if we are generating
25228 the corresponding GNAT encodings: DIEs generated for both would
25229 conflict in our mappings. */
25230 else if (is_variant_part (member)
25231 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25232 {
25233 vlr_ctx.variant_part_offset = byte_position (member);
25234 gen_variant_part (member, &vlr_ctx, context_die);
25235 }
25236 else
25237 {
25238 vlr_ctx.variant_part_offset = NULL_TREE;
25239 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25240 }
25241
25242 /* For C++ inline static data members emit immediately a DW_TAG_variable
25243 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25244 DW_AT_specification. */
25245 if (static_inline_p)
25246 {
25247 int old_extern = DECL_EXTERNAL (member);
25248 DECL_EXTERNAL (member) = 0;
25249 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25250 DECL_EXTERNAL (member) = old_extern;
25251 }
25252 }
25253 }
25254
25255 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25256 is set, we pretend that the type was never defined, so we only get the
25257 member DIEs needed by later specification DIEs. */
25258
25259 static void
25260 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25261 enum debug_info_usage usage)
25262 {
25263 if (TREE_ASM_WRITTEN (type))
25264 {
25265 /* Fill in the bound of variable-length fields in late dwarf if
25266 still incomplete. */
25267 if (!early_dwarf && variably_modified_type_p (type, NULL))
25268 for (tree member = TYPE_FIELDS (type);
25269 member;
25270 member = DECL_CHAIN (member))
25271 fill_variable_array_bounds (TREE_TYPE (member));
25272 return;
25273 }
25274
25275 dw_die_ref type_die = lookup_type_die (type);
25276 dw_die_ref scope_die = 0;
25277 int nested = 0;
25278 int complete = (TYPE_SIZE (type)
25279 && (! TYPE_STUB_DECL (type)
25280 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25281 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25282 complete = complete && should_emit_struct_debug (type, usage);
25283
25284 if (type_die && ! complete)
25285 return;
25286
25287 if (TYPE_CONTEXT (type) != NULL_TREE
25288 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25289 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25290 nested = 1;
25291
25292 scope_die = scope_die_for (type, context_die);
25293
25294 /* Generate child dies for template paramaters. */
25295 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25296 schedule_generic_params_dies_gen (type);
25297
25298 if (! type_die || (nested && is_cu_die (scope_die)))
25299 /* First occurrence of type or toplevel definition of nested class. */
25300 {
25301 dw_die_ref old_die = type_die;
25302
25303 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25304 ? record_type_tag (type) : DW_TAG_union_type,
25305 scope_die, type);
25306 equate_type_number_to_die (type, type_die);
25307 if (old_die)
25308 add_AT_specification (type_die, old_die);
25309 else
25310 add_name_attribute (type_die, type_tag (type));
25311 }
25312 else
25313 remove_AT (type_die, DW_AT_declaration);
25314
25315 /* If this type has been completed, then give it a byte_size attribute and
25316 then give a list of members. */
25317 if (complete && !ns_decl)
25318 {
25319 /* Prevent infinite recursion in cases where the type of some member of
25320 this type is expressed in terms of this type itself. */
25321 TREE_ASM_WRITTEN (type) = 1;
25322 add_byte_size_attribute (type_die, type);
25323 add_alignment_attribute (type_die, type);
25324 if (TYPE_STUB_DECL (type) != NULL_TREE)
25325 {
25326 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25327 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25328 }
25329
25330 /* If the first reference to this type was as the return type of an
25331 inline function, then it may not have a parent. Fix this now. */
25332 if (type_die->die_parent == NULL)
25333 add_child_die (scope_die, type_die);
25334
25335 gen_member_die (type, type_die);
25336
25337 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25338 if (TYPE_ARTIFICIAL (type))
25339 add_AT_flag (type_die, DW_AT_artificial, 1);
25340
25341 /* GNU extension: Record what type our vtable lives in. */
25342 if (TYPE_VFIELD (type))
25343 {
25344 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25345
25346 gen_type_die (vtype, context_die);
25347 add_AT_die_ref (type_die, DW_AT_containing_type,
25348 lookup_type_die (vtype));
25349 }
25350 }
25351 else
25352 {
25353 add_AT_flag (type_die, DW_AT_declaration, 1);
25354
25355 /* We don't need to do this for function-local types. */
25356 if (TYPE_STUB_DECL (type)
25357 && ! decl_function_context (TYPE_STUB_DECL (type)))
25358 vec_safe_push (incomplete_types, type);
25359 }
25360
25361 if (get_AT (type_die, DW_AT_name))
25362 add_pubtype (type, type_die);
25363 }
25364
25365 /* Generate a DIE for a subroutine _type_. */
25366
25367 static void
25368 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25369 {
25370 tree return_type = TREE_TYPE (type);
25371 dw_die_ref subr_die
25372 = new_die (DW_TAG_subroutine_type,
25373 scope_die_for (type, context_die), type);
25374
25375 equate_type_number_to_die (type, subr_die);
25376 add_prototyped_attribute (subr_die, type);
25377 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25378 context_die);
25379 add_alignment_attribute (subr_die, type);
25380 gen_formal_types_die (type, subr_die);
25381
25382 if (get_AT (subr_die, DW_AT_name))
25383 add_pubtype (type, subr_die);
25384 if ((dwarf_version >= 5 || !dwarf_strict)
25385 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25386 add_AT_flag (subr_die, DW_AT_reference, 1);
25387 if ((dwarf_version >= 5 || !dwarf_strict)
25388 && lang_hooks.types.type_dwarf_attribute (type,
25389 DW_AT_rvalue_reference) != -1)
25390 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25391 }
25392
25393 /* Generate a DIE for a type definition. */
25394
25395 static void
25396 gen_typedef_die (tree decl, dw_die_ref context_die)
25397 {
25398 dw_die_ref type_die;
25399 tree type;
25400
25401 if (TREE_ASM_WRITTEN (decl))
25402 {
25403 if (DECL_ORIGINAL_TYPE (decl))
25404 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25405 return;
25406 }
25407
25408 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25409 checks in process_scope_var and modified_type_die), this should be called
25410 only for original types. */
25411 gcc_assert (decl_ultimate_origin (decl) == NULL
25412 || decl_ultimate_origin (decl) == decl);
25413
25414 TREE_ASM_WRITTEN (decl) = 1;
25415 type_die = new_die (DW_TAG_typedef, context_die, decl);
25416
25417 add_name_and_src_coords_attributes (type_die, decl);
25418 if (DECL_ORIGINAL_TYPE (decl))
25419 {
25420 type = DECL_ORIGINAL_TYPE (decl);
25421 if (type == error_mark_node)
25422 return;
25423
25424 gcc_assert (type != TREE_TYPE (decl));
25425 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25426 }
25427 else
25428 {
25429 type = TREE_TYPE (decl);
25430 if (type == error_mark_node)
25431 return;
25432
25433 if (is_naming_typedef_decl (TYPE_NAME (type)))
25434 {
25435 /* Here, we are in the case of decl being a typedef naming
25436 an anonymous type, e.g:
25437 typedef struct {...} foo;
25438 In that case TREE_TYPE (decl) is not a typedef variant
25439 type and TYPE_NAME of the anonymous type is set to the
25440 TYPE_DECL of the typedef. This construct is emitted by
25441 the C++ FE.
25442
25443 TYPE is the anonymous struct named by the typedef
25444 DECL. As we need the DW_AT_type attribute of the
25445 DW_TAG_typedef to point to the DIE of TYPE, let's
25446 generate that DIE right away. add_type_attribute
25447 called below will then pick (via lookup_type_die) that
25448 anonymous struct DIE. */
25449 if (!TREE_ASM_WRITTEN (type))
25450 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25451
25452 /* This is a GNU Extension. We are adding a
25453 DW_AT_linkage_name attribute to the DIE of the
25454 anonymous struct TYPE. The value of that attribute
25455 is the name of the typedef decl naming the anonymous
25456 struct. This greatly eases the work of consumers of
25457 this debug info. */
25458 add_linkage_name_raw (lookup_type_die (type), decl);
25459 }
25460 }
25461
25462 add_type_attribute (type_die, type, decl_quals (decl), false,
25463 context_die);
25464
25465 if (is_naming_typedef_decl (decl))
25466 /* We want that all subsequent calls to lookup_type_die with
25467 TYPE in argument yield the DW_TAG_typedef we have just
25468 created. */
25469 equate_type_number_to_die (type, type_die);
25470
25471 add_alignment_attribute (type_die, TREE_TYPE (decl));
25472
25473 add_accessibility_attribute (type_die, decl);
25474
25475 if (DECL_ABSTRACT_P (decl))
25476 equate_decl_number_to_die (decl, type_die);
25477
25478 if (get_AT (type_die, DW_AT_name))
25479 add_pubtype (decl, type_die);
25480 }
25481
25482 /* Generate a DIE for a struct, class, enum or union type. */
25483
25484 static void
25485 gen_tagged_type_die (tree type,
25486 dw_die_ref context_die,
25487 enum debug_info_usage usage)
25488 {
25489 if (type == NULL_TREE
25490 || !is_tagged_type (type))
25491 return;
25492
25493 if (TREE_ASM_WRITTEN (type))
25494 ;
25495 /* If this is a nested type whose containing class hasn't been written
25496 out yet, writing it out will cover this one, too. This does not apply
25497 to instantiations of member class templates; they need to be added to
25498 the containing class as they are generated. FIXME: This hurts the
25499 idea of combining type decls from multiple TUs, since we can't predict
25500 what set of template instantiations we'll get. */
25501 else if (TYPE_CONTEXT (type)
25502 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25503 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25504 {
25505 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25506
25507 if (TREE_ASM_WRITTEN (type))
25508 return;
25509
25510 /* If that failed, attach ourselves to the stub. */
25511 context_die = lookup_type_die (TYPE_CONTEXT (type));
25512 }
25513 else if (TYPE_CONTEXT (type) != NULL_TREE
25514 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25515 {
25516 /* If this type is local to a function that hasn't been written
25517 out yet, use a NULL context for now; it will be fixed up in
25518 decls_for_scope. */
25519 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25520 /* A declaration DIE doesn't count; nested types need to go in the
25521 specification. */
25522 if (context_die && is_declaration_die (context_die))
25523 context_die = NULL;
25524 }
25525 else
25526 context_die = declare_in_namespace (type, context_die);
25527
25528 if (TREE_CODE (type) == ENUMERAL_TYPE)
25529 {
25530 /* This might have been written out by the call to
25531 declare_in_namespace. */
25532 if (!TREE_ASM_WRITTEN (type))
25533 gen_enumeration_type_die (type, context_die);
25534 }
25535 else
25536 gen_struct_or_union_type_die (type, context_die, usage);
25537
25538 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25539 it up if it is ever completed. gen_*_type_die will set it for us
25540 when appropriate. */
25541 }
25542
25543 /* Generate a type description DIE. */
25544
25545 static void
25546 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25547 enum debug_info_usage usage)
25548 {
25549 struct array_descr_info info;
25550
25551 if (type == NULL_TREE || type == error_mark_node)
25552 return;
25553
25554 if (flag_checking && type)
25555 verify_type (type);
25556
25557 if (TYPE_NAME (type) != NULL_TREE
25558 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25559 && is_redundant_typedef (TYPE_NAME (type))
25560 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25561 /* The DECL of this type is a typedef we don't want to emit debug
25562 info for but we want debug info for its underlying typedef.
25563 This can happen for e.g, the injected-class-name of a C++
25564 type. */
25565 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25566
25567 /* If TYPE is a typedef type variant, let's generate debug info
25568 for the parent typedef which TYPE is a type of. */
25569 if (typedef_variant_p (type))
25570 {
25571 if (TREE_ASM_WRITTEN (type))
25572 return;
25573
25574 tree name = TYPE_NAME (type);
25575 tree origin = decl_ultimate_origin (name);
25576 if (origin != NULL && origin != name)
25577 {
25578 gen_decl_die (origin, NULL, NULL, context_die);
25579 return;
25580 }
25581
25582 /* Prevent broken recursion; we can't hand off to the same type. */
25583 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25584
25585 /* Give typedefs the right scope. */
25586 context_die = scope_die_for (type, context_die);
25587
25588 TREE_ASM_WRITTEN (type) = 1;
25589
25590 gen_decl_die (name, NULL, NULL, context_die);
25591 return;
25592 }
25593
25594 /* If type is an anonymous tagged type named by a typedef, let's
25595 generate debug info for the typedef. */
25596 if (is_naming_typedef_decl (TYPE_NAME (type)))
25597 {
25598 /* Give typedefs the right scope. */
25599 context_die = scope_die_for (type, context_die);
25600
25601 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25602 return;
25603 }
25604
25605 if (lang_hooks.types.get_debug_type)
25606 {
25607 tree debug_type = lang_hooks.types.get_debug_type (type);
25608
25609 if (debug_type != NULL_TREE && debug_type != type)
25610 {
25611 gen_type_die_with_usage (debug_type, context_die, usage);
25612 return;
25613 }
25614 }
25615
25616 /* We are going to output a DIE to represent the unqualified version
25617 of this type (i.e. without any const or volatile qualifiers) so
25618 get the main variant (i.e. the unqualified version) of this type
25619 now. (Vectors and arrays are special because the debugging info is in the
25620 cloned type itself. Similarly function/method types can contain extra
25621 ref-qualification). */
25622 if (TREE_CODE (type) == FUNCTION_TYPE
25623 || TREE_CODE (type) == METHOD_TYPE)
25624 {
25625 /* For function/method types, can't use type_main_variant here,
25626 because that can have different ref-qualifiers for C++,
25627 but try to canonicalize. */
25628 tree main = TYPE_MAIN_VARIANT (type);
25629 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25630 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25631 && check_base_type (t, main)
25632 && check_lang_type (t, type))
25633 {
25634 type = t;
25635 break;
25636 }
25637 }
25638 else if (TREE_CODE (type) != VECTOR_TYPE
25639 && TREE_CODE (type) != ARRAY_TYPE)
25640 type = type_main_variant (type);
25641
25642 /* If this is an array type with hidden descriptor, handle it first. */
25643 if (!TREE_ASM_WRITTEN (type)
25644 && lang_hooks.types.get_array_descr_info)
25645 {
25646 memset (&info, 0, sizeof (info));
25647 if (lang_hooks.types.get_array_descr_info (type, &info))
25648 {
25649 /* Fortran sometimes emits array types with no dimension. */
25650 gcc_assert (info.ndimensions >= 0
25651 && (info.ndimensions
25652 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25653 gen_descr_array_type_die (type, &info, context_die);
25654 TREE_ASM_WRITTEN (type) = 1;
25655 return;
25656 }
25657 }
25658
25659 if (TREE_ASM_WRITTEN (type))
25660 {
25661 /* Variable-length types may be incomplete even if
25662 TREE_ASM_WRITTEN. For such types, fall through to
25663 gen_array_type_die() and possibly fill in
25664 DW_AT_{upper,lower}_bound attributes. */
25665 if ((TREE_CODE (type) != ARRAY_TYPE
25666 && TREE_CODE (type) != RECORD_TYPE
25667 && TREE_CODE (type) != UNION_TYPE
25668 && TREE_CODE (type) != QUAL_UNION_TYPE)
25669 || !variably_modified_type_p (type, NULL))
25670 return;
25671 }
25672
25673 switch (TREE_CODE (type))
25674 {
25675 case ERROR_MARK:
25676 break;
25677
25678 case POINTER_TYPE:
25679 case REFERENCE_TYPE:
25680 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25681 ensures that the gen_type_die recursion will terminate even if the
25682 type is recursive. Recursive types are possible in Ada. */
25683 /* ??? We could perhaps do this for all types before the switch
25684 statement. */
25685 TREE_ASM_WRITTEN (type) = 1;
25686
25687 /* For these types, all that is required is that we output a DIE (or a
25688 set of DIEs) to represent the "basis" type. */
25689 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25690 DINFO_USAGE_IND_USE);
25691 break;
25692
25693 case OFFSET_TYPE:
25694 /* This code is used for C++ pointer-to-data-member types.
25695 Output a description of the relevant class type. */
25696 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25697 DINFO_USAGE_IND_USE);
25698
25699 /* Output a description of the type of the object pointed to. */
25700 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25701 DINFO_USAGE_IND_USE);
25702
25703 /* Now output a DIE to represent this pointer-to-data-member type
25704 itself. */
25705 gen_ptr_to_mbr_type_die (type, context_die);
25706 break;
25707
25708 case FUNCTION_TYPE:
25709 /* Force out return type (in case it wasn't forced out already). */
25710 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25711 DINFO_USAGE_DIR_USE);
25712 gen_subroutine_type_die (type, context_die);
25713 break;
25714
25715 case METHOD_TYPE:
25716 /* Force out return type (in case it wasn't forced out already). */
25717 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25718 DINFO_USAGE_DIR_USE);
25719 gen_subroutine_type_die (type, context_die);
25720 break;
25721
25722 case ARRAY_TYPE:
25723 case VECTOR_TYPE:
25724 gen_array_type_die (type, context_die);
25725 break;
25726
25727 case ENUMERAL_TYPE:
25728 case RECORD_TYPE:
25729 case UNION_TYPE:
25730 case QUAL_UNION_TYPE:
25731 gen_tagged_type_die (type, context_die, usage);
25732 return;
25733
25734 case VOID_TYPE:
25735 case INTEGER_TYPE:
25736 case REAL_TYPE:
25737 case FIXED_POINT_TYPE:
25738 case COMPLEX_TYPE:
25739 case BOOLEAN_TYPE:
25740 /* No DIEs needed for fundamental types. */
25741 break;
25742
25743 case NULLPTR_TYPE:
25744 case LANG_TYPE:
25745 /* Just use DW_TAG_unspecified_type. */
25746 {
25747 dw_die_ref type_die = lookup_type_die (type);
25748 if (type_die == NULL)
25749 {
25750 tree name = TYPE_IDENTIFIER (type);
25751 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25752 type);
25753 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25754 equate_type_number_to_die (type, type_die);
25755 }
25756 }
25757 break;
25758
25759 default:
25760 if (is_cxx_auto (type))
25761 {
25762 tree name = TYPE_IDENTIFIER (type);
25763 dw_die_ref *die = (name == get_identifier ("auto")
25764 ? &auto_die : &decltype_auto_die);
25765 if (!*die)
25766 {
25767 *die = new_die (DW_TAG_unspecified_type,
25768 comp_unit_die (), NULL_TREE);
25769 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25770 }
25771 equate_type_number_to_die (type, *die);
25772 break;
25773 }
25774 gcc_unreachable ();
25775 }
25776
25777 TREE_ASM_WRITTEN (type) = 1;
25778 }
25779
25780 static void
25781 gen_type_die (tree type, dw_die_ref context_die)
25782 {
25783 if (type != error_mark_node)
25784 {
25785 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25786 if (flag_checking)
25787 {
25788 dw_die_ref die = lookup_type_die (type);
25789 if (die)
25790 check_die (die);
25791 }
25792 }
25793 }
25794
25795 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25796 things which are local to the given block. */
25797
25798 static void
25799 gen_block_die (tree stmt, dw_die_ref context_die)
25800 {
25801 int must_output_die = 0;
25802 bool inlined_func;
25803
25804 /* Ignore blocks that are NULL. */
25805 if (stmt == NULL_TREE)
25806 return;
25807
25808 inlined_func = inlined_function_outer_scope_p (stmt);
25809
25810 /* If the block is one fragment of a non-contiguous block, do not
25811 process the variables, since they will have been done by the
25812 origin block. Do process subblocks. */
25813 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25814 {
25815 tree sub;
25816
25817 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25818 gen_block_die (sub, context_die);
25819
25820 return;
25821 }
25822
25823 /* Determine if we need to output any Dwarf DIEs at all to represent this
25824 block. */
25825 if (inlined_func)
25826 /* The outer scopes for inlinings *must* always be represented. We
25827 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25828 must_output_die = 1;
25829 else if (lookup_block_die (stmt))
25830 /* If we already have a DIE then it was filled early. Meanwhile
25831 we might have pruned all BLOCK_VARS as optimized out but we
25832 still want to generate high/low PC attributes so output it. */
25833 must_output_die = 1;
25834 else if (TREE_USED (stmt)
25835 || TREE_ASM_WRITTEN (stmt))
25836 {
25837 /* Determine if this block directly contains any "significant"
25838 local declarations which we will need to output DIEs for. */
25839 if (debug_info_level > DINFO_LEVEL_TERSE)
25840 {
25841 /* We are not in terse mode so any local declaration that
25842 is not ignored for debug purposes counts as being a
25843 "significant" one. */
25844 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25845 must_output_die = 1;
25846 else
25847 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25848 if (!DECL_IGNORED_P (var))
25849 {
25850 must_output_die = 1;
25851 break;
25852 }
25853 }
25854 else if (!dwarf2out_ignore_block (stmt))
25855 must_output_die = 1;
25856 }
25857
25858 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25859 DIE for any block which contains no significant local declarations at
25860 all. Rather, in such cases we just call `decls_for_scope' so that any
25861 needed Dwarf info for any sub-blocks will get properly generated. Note
25862 that in terse mode, our definition of what constitutes a "significant"
25863 local declaration gets restricted to include only inlined function
25864 instances and local (nested) function definitions. */
25865 if (must_output_die)
25866 {
25867 if (inlined_func)
25868 gen_inlined_subroutine_die (stmt, context_die);
25869 else
25870 gen_lexical_block_die (stmt, context_die);
25871 }
25872 else
25873 decls_for_scope (stmt, context_die);
25874 }
25875
25876 /* Process variable DECL (or variable with origin ORIGIN) within
25877 block STMT and add it to CONTEXT_DIE. */
25878 static void
25879 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25880 {
25881 dw_die_ref die;
25882 tree decl_or_origin = decl ? decl : origin;
25883
25884 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25885 die = lookup_decl_die (decl_or_origin);
25886 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25887 {
25888 if (TYPE_DECL_IS_STUB (decl_or_origin))
25889 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25890 else
25891 die = lookup_decl_die (decl_or_origin);
25892 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25893 if (! die && ! early_dwarf)
25894 return;
25895 }
25896 else
25897 die = NULL;
25898
25899 /* Avoid creating DIEs for local typedefs and concrete static variables that
25900 will only be pruned later. */
25901 if ((origin || decl_ultimate_origin (decl))
25902 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25903 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25904 {
25905 origin = decl_ultimate_origin (decl_or_origin);
25906 if (decl && VAR_P (decl) && die != NULL)
25907 {
25908 die = lookup_decl_die (origin);
25909 if (die != NULL)
25910 equate_decl_number_to_die (decl, die);
25911 }
25912 return;
25913 }
25914
25915 if (die != NULL && die->die_parent == NULL)
25916 add_child_die (context_die, die);
25917 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25918 {
25919 if (early_dwarf)
25920 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25921 stmt, context_die);
25922 }
25923 else
25924 {
25925 if (decl && DECL_P (decl))
25926 {
25927 die = lookup_decl_die (decl);
25928
25929 /* Early created DIEs do not have a parent as the decls refer
25930 to the function as DECL_CONTEXT rather than the BLOCK. */
25931 if (die && die->die_parent == NULL)
25932 {
25933 gcc_assert (in_lto_p);
25934 add_child_die (context_die, die);
25935 }
25936 }
25937
25938 gen_decl_die (decl, origin, NULL, context_die);
25939 }
25940 }
25941
25942 /* Generate all of the decls declared within a given scope and (recursively)
25943 all of its sub-blocks. */
25944
25945 static void
25946 decls_for_scope (tree stmt, dw_die_ref context_die, bool recurse)
25947 {
25948 tree decl;
25949 unsigned int i;
25950 tree subblocks;
25951
25952 /* Ignore NULL blocks. */
25953 if (stmt == NULL_TREE)
25954 return;
25955
25956 /* Output the DIEs to represent all of the data objects and typedefs
25957 declared directly within this block but not within any nested
25958 sub-blocks. Also, nested function and tag DIEs have been
25959 generated with a parent of NULL; fix that up now. We don't
25960 have to do this if we're at -g1. */
25961 if (debug_info_level > DINFO_LEVEL_TERSE)
25962 {
25963 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25964 process_scope_var (stmt, decl, NULL_TREE, context_die);
25965 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25966 origin - avoid doing this twice as we have no good way to see
25967 if we've done it once already. */
25968 if (! early_dwarf)
25969 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25970 {
25971 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25972 if (decl == current_function_decl)
25973 /* Ignore declarations of the current function, while they
25974 are declarations, gen_subprogram_die would treat them
25975 as definitions again, because they are equal to
25976 current_function_decl and endlessly recurse. */;
25977 else if (TREE_CODE (decl) == FUNCTION_DECL)
25978 process_scope_var (stmt, decl, NULL_TREE, context_die);
25979 else
25980 process_scope_var (stmt, NULL_TREE, decl, context_die);
25981 }
25982 }
25983
25984 /* Even if we're at -g1, we need to process the subblocks in order to get
25985 inlined call information. */
25986
25987 /* Output the DIEs to represent all sub-blocks (and the items declared
25988 therein) of this block. */
25989 if (recurse)
25990 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25991 subblocks != NULL;
25992 subblocks = BLOCK_CHAIN (subblocks))
25993 gen_block_die (subblocks, context_die);
25994 }
25995
25996 /* Is this a typedef we can avoid emitting? */
25997
25998 static bool
25999 is_redundant_typedef (const_tree decl)
26000 {
26001 if (TYPE_DECL_IS_STUB (decl))
26002 return true;
26003
26004 if (DECL_ARTIFICIAL (decl)
26005 && DECL_CONTEXT (decl)
26006 && is_tagged_type (DECL_CONTEXT (decl))
26007 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
26008 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
26009 /* Also ignore the artificial member typedef for the class name. */
26010 return true;
26011
26012 return false;
26013 }
26014
26015 /* Return TRUE if TYPE is a typedef that names a type for linkage
26016 purposes. This kind of typedefs is produced by the C++ FE for
26017 constructs like:
26018
26019 typedef struct {...} foo;
26020
26021 In that case, there is no typedef variant type produced for foo.
26022 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
26023 struct type. */
26024
26025 static bool
26026 is_naming_typedef_decl (const_tree decl)
26027 {
26028 if (decl == NULL_TREE
26029 || TREE_CODE (decl) != TYPE_DECL
26030 || DECL_NAMELESS (decl)
26031 || !is_tagged_type (TREE_TYPE (decl))
26032 || DECL_IS_BUILTIN (decl)
26033 || is_redundant_typedef (decl)
26034 /* It looks like Ada produces TYPE_DECLs that are very similar
26035 to C++ naming typedefs but that have different
26036 semantics. Let's be specific to c++ for now. */
26037 || !is_cxx (decl))
26038 return FALSE;
26039
26040 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
26041 && TYPE_NAME (TREE_TYPE (decl)) == decl
26042 && (TYPE_STUB_DECL (TREE_TYPE (decl))
26043 != TYPE_NAME (TREE_TYPE (decl))));
26044 }
26045
26046 /* Looks up the DIE for a context. */
26047
26048 static inline dw_die_ref
26049 lookup_context_die (tree context)
26050 {
26051 if (context)
26052 {
26053 /* Find die that represents this context. */
26054 if (TYPE_P (context))
26055 {
26056 context = TYPE_MAIN_VARIANT (context);
26057 dw_die_ref ctx = lookup_type_die (context);
26058 if (!ctx)
26059 return NULL;
26060 return strip_naming_typedef (context, ctx);
26061 }
26062 else
26063 return lookup_decl_die (context);
26064 }
26065 return comp_unit_die ();
26066 }
26067
26068 /* Returns the DIE for a context. */
26069
26070 static inline dw_die_ref
26071 get_context_die (tree context)
26072 {
26073 if (context)
26074 {
26075 /* Find die that represents this context. */
26076 if (TYPE_P (context))
26077 {
26078 context = TYPE_MAIN_VARIANT (context);
26079 return strip_naming_typedef (context, force_type_die (context));
26080 }
26081 else
26082 return force_decl_die (context);
26083 }
26084 return comp_unit_die ();
26085 }
26086
26087 /* Returns the DIE for decl. A DIE will always be returned. */
26088
26089 static dw_die_ref
26090 force_decl_die (tree decl)
26091 {
26092 dw_die_ref decl_die;
26093 unsigned saved_external_flag;
26094 tree save_fn = NULL_TREE;
26095 decl_die = lookup_decl_die (decl);
26096 if (!decl_die)
26097 {
26098 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
26099
26100 decl_die = lookup_decl_die (decl);
26101 if (decl_die)
26102 return decl_die;
26103
26104 switch (TREE_CODE (decl))
26105 {
26106 case FUNCTION_DECL:
26107 /* Clear current_function_decl, so that gen_subprogram_die thinks
26108 that this is a declaration. At this point, we just want to force
26109 declaration die. */
26110 save_fn = current_function_decl;
26111 current_function_decl = NULL_TREE;
26112 gen_subprogram_die (decl, context_die);
26113 current_function_decl = save_fn;
26114 break;
26115
26116 case VAR_DECL:
26117 /* Set external flag to force declaration die. Restore it after
26118 gen_decl_die() call. */
26119 saved_external_flag = DECL_EXTERNAL (decl);
26120 DECL_EXTERNAL (decl) = 1;
26121 gen_decl_die (decl, NULL, NULL, context_die);
26122 DECL_EXTERNAL (decl) = saved_external_flag;
26123 break;
26124
26125 case NAMESPACE_DECL:
26126 if (dwarf_version >= 3 || !dwarf_strict)
26127 dwarf2out_decl (decl);
26128 else
26129 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
26130 decl_die = comp_unit_die ();
26131 break;
26132
26133 case TRANSLATION_UNIT_DECL:
26134 decl_die = comp_unit_die ();
26135 break;
26136
26137 default:
26138 gcc_unreachable ();
26139 }
26140
26141 /* We should be able to find the DIE now. */
26142 if (!decl_die)
26143 decl_die = lookup_decl_die (decl);
26144 gcc_assert (decl_die);
26145 }
26146
26147 return decl_die;
26148 }
26149
26150 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
26151 always returned. */
26152
26153 static dw_die_ref
26154 force_type_die (tree type)
26155 {
26156 dw_die_ref type_die;
26157
26158 type_die = lookup_type_die (type);
26159 if (!type_die)
26160 {
26161 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
26162
26163 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
26164 false, context_die);
26165 gcc_assert (type_die);
26166 }
26167 return type_die;
26168 }
26169
26170 /* Force out any required namespaces to be able to output DECL,
26171 and return the new context_die for it, if it's changed. */
26172
26173 static dw_die_ref
26174 setup_namespace_context (tree thing, dw_die_ref context_die)
26175 {
26176 tree context = (DECL_P (thing)
26177 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26178 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26179 /* Force out the namespace. */
26180 context_die = force_decl_die (context);
26181
26182 return context_die;
26183 }
26184
26185 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26186 type) within its namespace, if appropriate.
26187
26188 For compatibility with older debuggers, namespace DIEs only contain
26189 declarations; all definitions are emitted at CU scope, with
26190 DW_AT_specification pointing to the declaration (like with class
26191 members). */
26192
26193 static dw_die_ref
26194 declare_in_namespace (tree thing, dw_die_ref context_die)
26195 {
26196 dw_die_ref ns_context;
26197
26198 if (debug_info_level <= DINFO_LEVEL_TERSE)
26199 return context_die;
26200
26201 /* External declarations in the local scope only need to be emitted
26202 once, not once in the namespace and once in the scope.
26203
26204 This avoids declaring the `extern' below in the
26205 namespace DIE as well as in the innermost scope:
26206
26207 namespace S
26208 {
26209 int i=5;
26210 int foo()
26211 {
26212 int i=8;
26213 extern int i;
26214 return i;
26215 }
26216 }
26217 */
26218 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26219 return context_die;
26220
26221 /* If this decl is from an inlined function, then don't try to emit it in its
26222 namespace, as we will get confused. It would have already been emitted
26223 when the abstract instance of the inline function was emitted anyways. */
26224 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26225 return context_die;
26226
26227 ns_context = setup_namespace_context (thing, context_die);
26228
26229 if (ns_context != context_die)
26230 {
26231 if (is_fortran () || is_dlang ())
26232 return ns_context;
26233 if (DECL_P (thing))
26234 gen_decl_die (thing, NULL, NULL, ns_context);
26235 else
26236 gen_type_die (thing, ns_context);
26237 }
26238 return context_die;
26239 }
26240
26241 /* Generate a DIE for a namespace or namespace alias. */
26242
26243 static void
26244 gen_namespace_die (tree decl, dw_die_ref context_die)
26245 {
26246 dw_die_ref namespace_die;
26247
26248 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26249 they are an alias of. */
26250 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26251 {
26252 /* Output a real namespace or module. */
26253 context_die = setup_namespace_context (decl, comp_unit_die ());
26254 namespace_die = new_die (is_fortran () || is_dlang ()
26255 ? DW_TAG_module : DW_TAG_namespace,
26256 context_die, decl);
26257 /* For Fortran modules defined in different CU don't add src coords. */
26258 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26259 {
26260 const char *name = dwarf2_name (decl, 0);
26261 if (name)
26262 add_name_attribute (namespace_die, name);
26263 }
26264 else
26265 add_name_and_src_coords_attributes (namespace_die, decl);
26266 if (DECL_EXTERNAL (decl))
26267 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26268 equate_decl_number_to_die (decl, namespace_die);
26269 }
26270 else
26271 {
26272 /* Output a namespace alias. */
26273
26274 /* Force out the namespace we are an alias of, if necessary. */
26275 dw_die_ref origin_die
26276 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26277
26278 if (DECL_FILE_SCOPE_P (decl)
26279 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26280 context_die = setup_namespace_context (decl, comp_unit_die ());
26281 /* Now create the namespace alias DIE. */
26282 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26283 add_name_and_src_coords_attributes (namespace_die, decl);
26284 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26285 equate_decl_number_to_die (decl, namespace_die);
26286 }
26287 if ((dwarf_version >= 5 || !dwarf_strict)
26288 && lang_hooks.decls.decl_dwarf_attribute (decl,
26289 DW_AT_export_symbols) == 1)
26290 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26291
26292 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26293 if (want_pubnames ())
26294 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26295 }
26296
26297 /* Generate Dwarf debug information for a decl described by DECL.
26298 The return value is currently only meaningful for PARM_DECLs,
26299 for all other decls it returns NULL.
26300
26301 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26302 It can be NULL otherwise. */
26303
26304 static dw_die_ref
26305 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26306 dw_die_ref context_die)
26307 {
26308 tree decl_or_origin = decl ? decl : origin;
26309 tree class_origin = NULL, ultimate_origin;
26310
26311 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26312 return NULL;
26313
26314 switch (TREE_CODE (decl_or_origin))
26315 {
26316 case ERROR_MARK:
26317 break;
26318
26319 case CONST_DECL:
26320 if (!is_fortran () && !is_ada () && !is_dlang ())
26321 {
26322 /* The individual enumerators of an enum type get output when we output
26323 the Dwarf representation of the relevant enum type itself. */
26324 break;
26325 }
26326
26327 /* Emit its type. */
26328 gen_type_die (TREE_TYPE (decl), context_die);
26329
26330 /* And its containing namespace. */
26331 context_die = declare_in_namespace (decl, context_die);
26332
26333 gen_const_die (decl, context_die);
26334 break;
26335
26336 case FUNCTION_DECL:
26337 #if 0
26338 /* FIXME */
26339 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26340 on local redeclarations of global functions. That seems broken. */
26341 if (current_function_decl != decl)
26342 /* This is only a declaration. */;
26343 #endif
26344
26345 /* We should have abstract copies already and should not generate
26346 stray type DIEs in late LTO dumping. */
26347 if (! early_dwarf)
26348 ;
26349
26350 /* If we're emitting a clone, emit info for the abstract instance. */
26351 else if (origin || DECL_ORIGIN (decl) != decl)
26352 dwarf2out_abstract_function (origin
26353 ? DECL_ORIGIN (origin)
26354 : DECL_ABSTRACT_ORIGIN (decl));
26355
26356 /* If we're emitting a possibly inlined function emit it as
26357 abstract instance. */
26358 else if (cgraph_function_possibly_inlined_p (decl)
26359 && ! DECL_ABSTRACT_P (decl)
26360 && ! class_or_namespace_scope_p (context_die)
26361 /* dwarf2out_abstract_function won't emit a die if this is just
26362 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26363 that case, because that works only if we have a die. */
26364 && DECL_INITIAL (decl) != NULL_TREE)
26365 dwarf2out_abstract_function (decl);
26366
26367 /* Otherwise we're emitting the primary DIE for this decl. */
26368 else if (debug_info_level > DINFO_LEVEL_TERSE)
26369 {
26370 /* Before we describe the FUNCTION_DECL itself, make sure that we
26371 have its containing type. */
26372 if (!origin)
26373 origin = decl_class_context (decl);
26374 if (origin != NULL_TREE)
26375 gen_type_die (origin, context_die);
26376
26377 /* And its return type. */
26378 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26379
26380 /* And its virtual context. */
26381 if (DECL_VINDEX (decl) != NULL_TREE)
26382 gen_type_die (DECL_CONTEXT (decl), context_die);
26383
26384 /* Make sure we have a member DIE for decl. */
26385 if (origin != NULL_TREE)
26386 gen_type_die_for_member (origin, decl, context_die);
26387
26388 /* And its containing namespace. */
26389 context_die = declare_in_namespace (decl, context_die);
26390 }
26391
26392 /* Now output a DIE to represent the function itself. */
26393 if (decl)
26394 gen_subprogram_die (decl, context_die);
26395 break;
26396
26397 case TYPE_DECL:
26398 /* If we are in terse mode, don't generate any DIEs to represent any
26399 actual typedefs. */
26400 if (debug_info_level <= DINFO_LEVEL_TERSE)
26401 break;
26402
26403 /* In the special case of a TYPE_DECL node representing the declaration
26404 of some type tag, if the given TYPE_DECL is marked as having been
26405 instantiated from some other (original) TYPE_DECL node (e.g. one which
26406 was generated within the original definition of an inline function) we
26407 used to generate a special (abbreviated) DW_TAG_structure_type,
26408 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26409 should be actually referencing those DIEs, as variable DIEs with that
26410 type would be emitted already in the abstract origin, so it was always
26411 removed during unused type prunning. Don't add anything in this
26412 case. */
26413 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26414 break;
26415
26416 if (is_redundant_typedef (decl))
26417 gen_type_die (TREE_TYPE (decl), context_die);
26418 else
26419 /* Output a DIE to represent the typedef itself. */
26420 gen_typedef_die (decl, context_die);
26421 break;
26422
26423 case LABEL_DECL:
26424 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26425 gen_label_die (decl, context_die);
26426 break;
26427
26428 case VAR_DECL:
26429 case RESULT_DECL:
26430 /* If we are in terse mode, don't generate any DIEs to represent any
26431 variable declarations or definitions. */
26432 if (debug_info_level <= DINFO_LEVEL_TERSE)
26433 break;
26434
26435 /* Avoid generating stray type DIEs during late dwarf dumping.
26436 All types have been dumped early. */
26437 if (early_dwarf
26438 /* ??? But in LTRANS we cannot annotate early created variably
26439 modified type DIEs without copying them and adjusting all
26440 references to them. Dump them again as happens for inlining
26441 which copies both the decl and the types. */
26442 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26443 in VLA bound information for example. */
26444 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26445 current_function_decl)))
26446 {
26447 /* Output any DIEs that are needed to specify the type of this data
26448 object. */
26449 if (decl_by_reference_p (decl_or_origin))
26450 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26451 else
26452 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26453 }
26454
26455 if (early_dwarf)
26456 {
26457 /* And its containing type. */
26458 class_origin = decl_class_context (decl_or_origin);
26459 if (class_origin != NULL_TREE)
26460 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26461
26462 /* And its containing namespace. */
26463 context_die = declare_in_namespace (decl_or_origin, context_die);
26464 }
26465
26466 /* Now output the DIE to represent the data object itself. This gets
26467 complicated because of the possibility that the VAR_DECL really
26468 represents an inlined instance of a formal parameter for an inline
26469 function. */
26470 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26471 if (ultimate_origin != NULL_TREE
26472 && TREE_CODE (ultimate_origin) == PARM_DECL)
26473 gen_formal_parameter_die (decl, origin,
26474 true /* Emit name attribute. */,
26475 context_die);
26476 else
26477 gen_variable_die (decl, origin, context_die);
26478 break;
26479
26480 case FIELD_DECL:
26481 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26482 /* Ignore the nameless fields that are used to skip bits but handle C++
26483 anonymous unions and structs. */
26484 if (DECL_NAME (decl) != NULL_TREE
26485 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26486 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26487 {
26488 gen_type_die (member_declared_type (decl), context_die);
26489 gen_field_die (decl, ctx, context_die);
26490 }
26491 break;
26492
26493 case PARM_DECL:
26494 /* Avoid generating stray type DIEs during late dwarf dumping.
26495 All types have been dumped early. */
26496 if (early_dwarf
26497 /* ??? But in LTRANS we cannot annotate early created variably
26498 modified type DIEs without copying them and adjusting all
26499 references to them. Dump them again as happens for inlining
26500 which copies both the decl and the types. */
26501 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26502 in VLA bound information for example. */
26503 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26504 current_function_decl)))
26505 {
26506 if (DECL_BY_REFERENCE (decl_or_origin))
26507 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26508 else
26509 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26510 }
26511 return gen_formal_parameter_die (decl, origin,
26512 true /* Emit name attribute. */,
26513 context_die);
26514
26515 case NAMESPACE_DECL:
26516 if (dwarf_version >= 3 || !dwarf_strict)
26517 gen_namespace_die (decl, context_die);
26518 break;
26519
26520 case IMPORTED_DECL:
26521 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26522 DECL_CONTEXT (decl), context_die);
26523 break;
26524
26525 case NAMELIST_DECL:
26526 gen_namelist_decl (DECL_NAME (decl), context_die,
26527 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26528 break;
26529
26530 default:
26531 /* Probably some frontend-internal decl. Assume we don't care. */
26532 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26533 break;
26534 }
26535
26536 return NULL;
26537 }
26538 \f
26539 /* Output initial debug information for global DECL. Called at the
26540 end of the parsing process.
26541
26542 This is the initial debug generation process. As such, the DIEs
26543 generated may be incomplete. A later debug generation pass
26544 (dwarf2out_late_global_decl) will augment the information generated
26545 in this pass (e.g., with complete location info). */
26546
26547 static void
26548 dwarf2out_early_global_decl (tree decl)
26549 {
26550 set_early_dwarf s;
26551
26552 /* gen_decl_die() will set DECL_ABSTRACT because
26553 cgraph_function_possibly_inlined_p() returns true. This is in
26554 turn will cause DW_AT_inline attributes to be set.
26555
26556 This happens because at early dwarf generation, there is no
26557 cgraph information, causing cgraph_function_possibly_inlined_p()
26558 to return true. Trick cgraph_function_possibly_inlined_p()
26559 while we generate dwarf early. */
26560 bool save = symtab->global_info_ready;
26561 symtab->global_info_ready = true;
26562
26563 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26564 other DECLs and they can point to template types or other things
26565 that dwarf2out can't handle when done via dwarf2out_decl. */
26566 if (TREE_CODE (decl) != TYPE_DECL
26567 && TREE_CODE (decl) != PARM_DECL)
26568 {
26569 if (TREE_CODE (decl) == FUNCTION_DECL)
26570 {
26571 tree save_fndecl = current_function_decl;
26572
26573 /* For nested functions, make sure we have DIEs for the parents first
26574 so that all nested DIEs are generated at the proper scope in the
26575 first shot. */
26576 tree context = decl_function_context (decl);
26577 if (context != NULL)
26578 {
26579 dw_die_ref context_die = lookup_decl_die (context);
26580 current_function_decl = context;
26581
26582 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26583 enough so that it lands in its own context. This avoids type
26584 pruning issues later on. */
26585 if (context_die == NULL || is_declaration_die (context_die))
26586 dwarf2out_early_global_decl (context);
26587 }
26588
26589 /* Emit an abstract origin of a function first. This happens
26590 with C++ constructor clones for example and makes
26591 dwarf2out_abstract_function happy which requires the early
26592 DIE of the abstract instance to be present. */
26593 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26594 dw_die_ref origin_die;
26595 if (origin != NULL
26596 /* Do not emit the DIE multiple times but make sure to
26597 process it fully here in case we just saw a declaration. */
26598 && ((origin_die = lookup_decl_die (origin)) == NULL
26599 || is_declaration_die (origin_die)))
26600 {
26601 current_function_decl = origin;
26602 dwarf2out_decl (origin);
26603 }
26604
26605 /* Emit the DIE for decl but avoid doing that multiple times. */
26606 dw_die_ref old_die;
26607 if ((old_die = lookup_decl_die (decl)) == NULL
26608 || is_declaration_die (old_die))
26609 {
26610 current_function_decl = decl;
26611 dwarf2out_decl (decl);
26612 }
26613
26614 current_function_decl = save_fndecl;
26615 }
26616 else
26617 dwarf2out_decl (decl);
26618 }
26619 symtab->global_info_ready = save;
26620 }
26621
26622 /* Return whether EXPR is an expression with the following pattern:
26623 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26624
26625 static bool
26626 is_trivial_indirect_ref (tree expr)
26627 {
26628 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26629 return false;
26630
26631 tree nop = TREE_OPERAND (expr, 0);
26632 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26633 return false;
26634
26635 tree int_cst = TREE_OPERAND (nop, 0);
26636 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26637 }
26638
26639 /* Output debug information for global decl DECL. Called from
26640 toplev.c after compilation proper has finished. */
26641
26642 static void
26643 dwarf2out_late_global_decl (tree decl)
26644 {
26645 /* Fill-in any location information we were unable to determine
26646 on the first pass. */
26647 if (VAR_P (decl))
26648 {
26649 dw_die_ref die = lookup_decl_die (decl);
26650
26651 /* We may have to generate early debug late for LTO in case debug
26652 was not enabled at compile-time or the target doesn't support
26653 the LTO early debug scheme. */
26654 if (! die && in_lto_p)
26655 {
26656 dwarf2out_decl (decl);
26657 die = lookup_decl_die (decl);
26658 }
26659
26660 if (die)
26661 {
26662 /* We get called via the symtab code invoking late_global_decl
26663 for symbols that are optimized out.
26664
26665 Do not add locations for those, except if they have a
26666 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26667 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26668 INDIRECT_REF expression, as this could generate relocations to
26669 text symbols in LTO object files, which is invalid. */
26670 varpool_node *node = varpool_node::get (decl);
26671 if ((! node || ! node->definition)
26672 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26673 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26674 tree_add_const_value_attribute_for_decl (die, decl);
26675 else
26676 add_location_or_const_value_attribute (die, decl, false);
26677 }
26678 }
26679 }
26680
26681 /* Output debug information for type decl DECL. Called from toplev.c
26682 and from language front ends (to record built-in types). */
26683 static void
26684 dwarf2out_type_decl (tree decl, int local)
26685 {
26686 if (!local)
26687 {
26688 set_early_dwarf s;
26689 dwarf2out_decl (decl);
26690 }
26691 }
26692
26693 /* Output debug information for imported module or decl DECL.
26694 NAME is non-NULL name in the lexical block if the decl has been renamed.
26695 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26696 that DECL belongs to.
26697 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26698 static void
26699 dwarf2out_imported_module_or_decl_1 (tree decl,
26700 tree name,
26701 tree lexical_block,
26702 dw_die_ref lexical_block_die)
26703 {
26704 expanded_location xloc;
26705 dw_die_ref imported_die = NULL;
26706 dw_die_ref at_import_die;
26707
26708 if (TREE_CODE (decl) == IMPORTED_DECL)
26709 {
26710 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26711 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26712 gcc_assert (decl);
26713 }
26714 else
26715 xloc = expand_location (input_location);
26716
26717 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26718 {
26719 at_import_die = force_type_die (TREE_TYPE (decl));
26720 /* For namespace N { typedef void T; } using N::T; base_type_die
26721 returns NULL, but DW_TAG_imported_declaration requires
26722 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26723 if (!at_import_die)
26724 {
26725 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26726 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26727 at_import_die = lookup_type_die (TREE_TYPE (decl));
26728 gcc_assert (at_import_die);
26729 }
26730 }
26731 else
26732 {
26733 at_import_die = lookup_decl_die (decl);
26734 if (!at_import_die)
26735 {
26736 /* If we're trying to avoid duplicate debug info, we may not have
26737 emitted the member decl for this field. Emit it now. */
26738 if (TREE_CODE (decl) == FIELD_DECL)
26739 {
26740 tree type = DECL_CONTEXT (decl);
26741
26742 if (TYPE_CONTEXT (type)
26743 && TYPE_P (TYPE_CONTEXT (type))
26744 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26745 DINFO_USAGE_DIR_USE))
26746 return;
26747 gen_type_die_for_member (type, decl,
26748 get_context_die (TYPE_CONTEXT (type)));
26749 }
26750 if (TREE_CODE (decl) == NAMELIST_DECL)
26751 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26752 get_context_die (DECL_CONTEXT (decl)),
26753 NULL_TREE);
26754 else
26755 at_import_die = force_decl_die (decl);
26756 }
26757 }
26758
26759 if (TREE_CODE (decl) == NAMESPACE_DECL)
26760 {
26761 if (dwarf_version >= 3 || !dwarf_strict)
26762 imported_die = new_die (DW_TAG_imported_module,
26763 lexical_block_die,
26764 lexical_block);
26765 else
26766 return;
26767 }
26768 else
26769 imported_die = new_die (DW_TAG_imported_declaration,
26770 lexical_block_die,
26771 lexical_block);
26772
26773 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26774 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26775 if (debug_column_info && xloc.column)
26776 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26777 if (name)
26778 add_AT_string (imported_die, DW_AT_name,
26779 IDENTIFIER_POINTER (name));
26780 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26781 }
26782
26783 /* Output debug information for imported module or decl DECL.
26784 NAME is non-NULL name in context if the decl has been renamed.
26785 CHILD is true if decl is one of the renamed decls as part of
26786 importing whole module.
26787 IMPLICIT is set if this hook is called for an implicit import
26788 such as inline namespace. */
26789
26790 static void
26791 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26792 bool child, bool implicit)
26793 {
26794 /* dw_die_ref at_import_die; */
26795 dw_die_ref scope_die;
26796
26797 if (debug_info_level <= DINFO_LEVEL_TERSE)
26798 return;
26799
26800 gcc_assert (decl);
26801
26802 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26803 should be enough, for DWARF4 and older even if we emit as extension
26804 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26805 for the benefit of consumers unaware of DW_AT_export_symbols. */
26806 if (implicit
26807 && dwarf_version >= 5
26808 && lang_hooks.decls.decl_dwarf_attribute (decl,
26809 DW_AT_export_symbols) == 1)
26810 return;
26811
26812 set_early_dwarf s;
26813
26814 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26815 We need decl DIE for reference and scope die. First, get DIE for the decl
26816 itself. */
26817
26818 /* Get the scope die for decl context. Use comp_unit_die for global module
26819 or decl. If die is not found for non globals, force new die. */
26820 if (context
26821 && TYPE_P (context)
26822 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26823 return;
26824
26825 scope_die = get_context_die (context);
26826
26827 if (child)
26828 {
26829 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26830 there is nothing we can do, here. */
26831 if (dwarf_version < 3 && dwarf_strict)
26832 return;
26833
26834 gcc_assert (scope_die->die_child);
26835 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26836 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26837 scope_die = scope_die->die_child;
26838 }
26839
26840 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26841 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26842 }
26843
26844 /* Output debug information for namelists. */
26845
26846 static dw_die_ref
26847 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26848 {
26849 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26850 tree value;
26851 unsigned i;
26852
26853 if (debug_info_level <= DINFO_LEVEL_TERSE)
26854 return NULL;
26855
26856 gcc_assert (scope_die != NULL);
26857 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26858 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26859
26860 /* If there are no item_decls, we have a nondefining namelist, e.g.
26861 with USE association; hence, set DW_AT_declaration. */
26862 if (item_decls == NULL_TREE)
26863 {
26864 add_AT_flag (nml_die, DW_AT_declaration, 1);
26865 return nml_die;
26866 }
26867
26868 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26869 {
26870 nml_item_ref_die = lookup_decl_die (value);
26871 if (!nml_item_ref_die)
26872 nml_item_ref_die = force_decl_die (value);
26873
26874 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26875 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26876 }
26877 return nml_die;
26878 }
26879
26880
26881 /* Write the debugging output for DECL and return the DIE. */
26882
26883 static void
26884 dwarf2out_decl (tree decl)
26885 {
26886 dw_die_ref context_die = comp_unit_die ();
26887
26888 switch (TREE_CODE (decl))
26889 {
26890 case ERROR_MARK:
26891 return;
26892
26893 case FUNCTION_DECL:
26894 /* If we're a nested function, initially use a parent of NULL; if we're
26895 a plain function, this will be fixed up in decls_for_scope. If
26896 we're a method, it will be ignored, since we already have a DIE.
26897 Avoid doing this late though since clones of class methods may
26898 otherwise end up in limbo and create type DIEs late. */
26899 if (early_dwarf
26900 && decl_function_context (decl)
26901 /* But if we're in terse mode, we don't care about scope. */
26902 && debug_info_level > DINFO_LEVEL_TERSE)
26903 context_die = NULL;
26904 break;
26905
26906 case VAR_DECL:
26907 /* For local statics lookup proper context die. */
26908 if (local_function_static (decl))
26909 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26910
26911 /* If we are in terse mode, don't generate any DIEs to represent any
26912 variable declarations or definitions. */
26913 if (debug_info_level <= DINFO_LEVEL_TERSE)
26914 return;
26915 break;
26916
26917 case CONST_DECL:
26918 if (debug_info_level <= DINFO_LEVEL_TERSE)
26919 return;
26920 if (!is_fortran () && !is_ada () && !is_dlang ())
26921 return;
26922 if (TREE_STATIC (decl) && decl_function_context (decl))
26923 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26924 break;
26925
26926 case NAMESPACE_DECL:
26927 case IMPORTED_DECL:
26928 if (debug_info_level <= DINFO_LEVEL_TERSE)
26929 return;
26930 if (lookup_decl_die (decl) != NULL)
26931 return;
26932 break;
26933
26934 case TYPE_DECL:
26935 /* Don't emit stubs for types unless they are needed by other DIEs. */
26936 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26937 return;
26938
26939 /* Don't bother trying to generate any DIEs to represent any of the
26940 normal built-in types for the language we are compiling. */
26941 if (DECL_IS_BUILTIN (decl))
26942 return;
26943
26944 /* If we are in terse mode, don't generate any DIEs for types. */
26945 if (debug_info_level <= DINFO_LEVEL_TERSE)
26946 return;
26947
26948 /* If we're a function-scope tag, initially use a parent of NULL;
26949 this will be fixed up in decls_for_scope. */
26950 if (decl_function_context (decl))
26951 context_die = NULL;
26952
26953 break;
26954
26955 case NAMELIST_DECL:
26956 break;
26957
26958 default:
26959 return;
26960 }
26961
26962 gen_decl_die (decl, NULL, NULL, context_die);
26963
26964 if (flag_checking)
26965 {
26966 dw_die_ref die = lookup_decl_die (decl);
26967 if (die)
26968 check_die (die);
26969 }
26970 }
26971
26972 /* Write the debugging output for DECL. */
26973
26974 static void
26975 dwarf2out_function_decl (tree decl)
26976 {
26977 dwarf2out_decl (decl);
26978 call_arg_locations = NULL;
26979 call_arg_loc_last = NULL;
26980 call_site_count = -1;
26981 tail_call_site_count = -1;
26982 decl_loc_table->empty ();
26983 cached_dw_loc_list_table->empty ();
26984 }
26985
26986 /* Output a marker (i.e. a label) for the beginning of the generated code for
26987 a lexical block. */
26988
26989 static void
26990 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26991 unsigned int blocknum)
26992 {
26993 switch_to_section (current_function_section ());
26994 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26995 }
26996
26997 /* Output a marker (i.e. a label) for the end of the generated code for a
26998 lexical block. */
26999
27000 static void
27001 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
27002 {
27003 switch_to_section (current_function_section ());
27004 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
27005 }
27006
27007 /* Returns nonzero if it is appropriate not to emit any debugging
27008 information for BLOCK, because it doesn't contain any instructions.
27009
27010 Don't allow this for blocks with nested functions or local classes
27011 as we would end up with orphans, and in the presence of scheduling
27012 we may end up calling them anyway. */
27013
27014 static bool
27015 dwarf2out_ignore_block (const_tree block)
27016 {
27017 tree decl;
27018 unsigned int i;
27019
27020 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
27021 if (TREE_CODE (decl) == FUNCTION_DECL
27022 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
27023 return 0;
27024 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
27025 {
27026 decl = BLOCK_NONLOCALIZED_VAR (block, i);
27027 if (TREE_CODE (decl) == FUNCTION_DECL
27028 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
27029 return 0;
27030 }
27031
27032 return 1;
27033 }
27034
27035 /* Hash table routines for file_hash. */
27036
27037 bool
27038 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
27039 {
27040 return filename_cmp (p1->filename, p2) == 0;
27041 }
27042
27043 hashval_t
27044 dwarf_file_hasher::hash (dwarf_file_data *p)
27045 {
27046 return htab_hash_string (p->filename);
27047 }
27048
27049 /* Lookup FILE_NAME (in the list of filenames that we know about here in
27050 dwarf2out.c) and return its "index". The index of each (known) filename is
27051 just a unique number which is associated with only that one filename. We
27052 need such numbers for the sake of generating labels (in the .debug_sfnames
27053 section) and references to those files numbers (in the .debug_srcinfo
27054 and .debug_macinfo sections). If the filename given as an argument is not
27055 found in our current list, add it to the list and assign it the next
27056 available unique index number. */
27057
27058 static struct dwarf_file_data *
27059 lookup_filename (const char *file_name)
27060 {
27061 struct dwarf_file_data * created;
27062
27063 if (!file_name)
27064 return NULL;
27065
27066 dwarf_file_data **slot
27067 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
27068 INSERT);
27069 if (*slot)
27070 return *slot;
27071
27072 created = ggc_alloc<dwarf_file_data> ();
27073 created->filename = file_name;
27074 created->emitted_number = 0;
27075 *slot = created;
27076 return created;
27077 }
27078
27079 /* If the assembler will construct the file table, then translate the compiler
27080 internal file table number into the assembler file table number, and emit
27081 a .file directive if we haven't already emitted one yet. The file table
27082 numbers are different because we prune debug info for unused variables and
27083 types, which may include filenames. */
27084
27085 static int
27086 maybe_emit_file (struct dwarf_file_data * fd)
27087 {
27088 if (! fd->emitted_number)
27089 {
27090 if (last_emitted_file)
27091 fd->emitted_number = last_emitted_file->emitted_number + 1;
27092 else
27093 fd->emitted_number = 1;
27094 last_emitted_file = fd;
27095
27096 if (output_asm_line_debug_info ())
27097 {
27098 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
27099 output_quoted_string (asm_out_file,
27100 remap_debug_filename (fd->filename));
27101 fputc ('\n', asm_out_file);
27102 }
27103 }
27104
27105 return fd->emitted_number;
27106 }
27107
27108 /* Schedule generation of a DW_AT_const_value attribute to DIE.
27109 That generation should happen after function debug info has been
27110 generated. The value of the attribute is the constant value of ARG. */
27111
27112 static void
27113 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
27114 {
27115 die_arg_entry entry;
27116
27117 if (!die || !arg)
27118 return;
27119
27120 gcc_assert (early_dwarf);
27121
27122 if (!tmpl_value_parm_die_table)
27123 vec_alloc (tmpl_value_parm_die_table, 32);
27124
27125 entry.die = die;
27126 entry.arg = arg;
27127 vec_safe_push (tmpl_value_parm_die_table, entry);
27128 }
27129
27130 /* Return TRUE if T is an instance of generic type, FALSE
27131 otherwise. */
27132
27133 static bool
27134 generic_type_p (tree t)
27135 {
27136 if (t == NULL_TREE || !TYPE_P (t))
27137 return false;
27138 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
27139 }
27140
27141 /* Schedule the generation of the generic parameter dies for the
27142 instance of generic type T. The proper generation itself is later
27143 done by gen_scheduled_generic_parms_dies. */
27144
27145 static void
27146 schedule_generic_params_dies_gen (tree t)
27147 {
27148 if (!generic_type_p (t))
27149 return;
27150
27151 gcc_assert (early_dwarf);
27152
27153 if (!generic_type_instances)
27154 vec_alloc (generic_type_instances, 256);
27155
27156 vec_safe_push (generic_type_instances, t);
27157 }
27158
27159 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
27160 by append_entry_to_tmpl_value_parm_die_table. This function must
27161 be called after function DIEs have been generated. */
27162
27163 static void
27164 gen_remaining_tmpl_value_param_die_attribute (void)
27165 {
27166 if (tmpl_value_parm_die_table)
27167 {
27168 unsigned i, j;
27169 die_arg_entry *e;
27170
27171 /* We do this in two phases - first get the cases we can
27172 handle during early-finish, preserving those we cannot
27173 (containing symbolic constants where we don't yet know
27174 whether we are going to output the referenced symbols).
27175 For those we try again at late-finish. */
27176 j = 0;
27177 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27178 {
27179 if (!e->die->removed
27180 && !tree_add_const_value_attribute (e->die, e->arg))
27181 {
27182 dw_loc_descr_ref loc = NULL;
27183 if (! early_dwarf
27184 && (dwarf_version >= 5 || !dwarf_strict))
27185 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27186 if (loc)
27187 add_AT_loc (e->die, DW_AT_location, loc);
27188 else
27189 (*tmpl_value_parm_die_table)[j++] = *e;
27190 }
27191 }
27192 tmpl_value_parm_die_table->truncate (j);
27193 }
27194 }
27195
27196 /* Generate generic parameters DIEs for instances of generic types
27197 that have been previously scheduled by
27198 schedule_generic_params_dies_gen. This function must be called
27199 after all the types of the CU have been laid out. */
27200
27201 static void
27202 gen_scheduled_generic_parms_dies (void)
27203 {
27204 unsigned i;
27205 tree t;
27206
27207 if (!generic_type_instances)
27208 return;
27209
27210 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27211 if (COMPLETE_TYPE_P (t))
27212 gen_generic_params_dies (t);
27213
27214 generic_type_instances = NULL;
27215 }
27216
27217
27218 /* Replace DW_AT_name for the decl with name. */
27219
27220 static void
27221 dwarf2out_set_name (tree decl, tree name)
27222 {
27223 dw_die_ref die;
27224 dw_attr_node *attr;
27225 const char *dname;
27226
27227 die = TYPE_SYMTAB_DIE (decl);
27228 if (!die)
27229 return;
27230
27231 dname = dwarf2_name (name, 0);
27232 if (!dname)
27233 return;
27234
27235 attr = get_AT (die, DW_AT_name);
27236 if (attr)
27237 {
27238 struct indirect_string_node *node;
27239
27240 node = find_AT_string (dname);
27241 /* replace the string. */
27242 attr->dw_attr_val.v.val_str = node;
27243 }
27244
27245 else
27246 add_name_attribute (die, dname);
27247 }
27248
27249 /* True if before or during processing of the first function being emitted. */
27250 static bool in_first_function_p = true;
27251 /* True if loc_note during dwarf2out_var_location call might still be
27252 before first real instruction at address equal to .Ltext0. */
27253 static bool maybe_at_text_label_p = true;
27254 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27255 static unsigned int first_loclabel_num_not_at_text_label;
27256
27257 /* Look ahead for a real insn, or for a begin stmt marker. */
27258
27259 static rtx_insn *
27260 dwarf2out_next_real_insn (rtx_insn *loc_note)
27261 {
27262 rtx_insn *next_real = NEXT_INSN (loc_note);
27263
27264 while (next_real)
27265 if (INSN_P (next_real))
27266 break;
27267 else
27268 next_real = NEXT_INSN (next_real);
27269
27270 return next_real;
27271 }
27272
27273 /* Called by the final INSN scan whenever we see a var location. We
27274 use it to drop labels in the right places, and throw the location in
27275 our lookup table. */
27276
27277 static void
27278 dwarf2out_var_location (rtx_insn *loc_note)
27279 {
27280 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27281 struct var_loc_node *newloc;
27282 rtx_insn *next_real, *next_note;
27283 rtx_insn *call_insn = NULL;
27284 static const char *last_label;
27285 static const char *last_postcall_label;
27286 static bool last_in_cold_section_p;
27287 static rtx_insn *expected_next_loc_note;
27288 tree decl;
27289 bool var_loc_p;
27290 var_loc_view view = 0;
27291
27292 if (!NOTE_P (loc_note))
27293 {
27294 if (CALL_P (loc_note))
27295 {
27296 maybe_reset_location_view (loc_note, cur_line_info_table);
27297 call_site_count++;
27298 if (SIBLING_CALL_P (loc_note))
27299 tail_call_site_count++;
27300 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27301 {
27302 call_insn = loc_note;
27303 loc_note = NULL;
27304 var_loc_p = false;
27305
27306 next_real = dwarf2out_next_real_insn (call_insn);
27307 next_note = NULL;
27308 cached_next_real_insn = NULL;
27309 goto create_label;
27310 }
27311 if (optimize == 0 && !flag_var_tracking)
27312 {
27313 /* When the var-tracking pass is not running, there is no note
27314 for indirect calls whose target is compile-time known. In this
27315 case, process such calls specifically so that we generate call
27316 sites for them anyway. */
27317 rtx x = PATTERN (loc_note);
27318 if (GET_CODE (x) == PARALLEL)
27319 x = XVECEXP (x, 0, 0);
27320 if (GET_CODE (x) == SET)
27321 x = SET_SRC (x);
27322 if (GET_CODE (x) == CALL)
27323 x = XEXP (x, 0);
27324 if (!MEM_P (x)
27325 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27326 || !SYMBOL_REF_DECL (XEXP (x, 0))
27327 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27328 != FUNCTION_DECL))
27329 {
27330 call_insn = loc_note;
27331 loc_note = NULL;
27332 var_loc_p = false;
27333
27334 next_real = dwarf2out_next_real_insn (call_insn);
27335 next_note = NULL;
27336 cached_next_real_insn = NULL;
27337 goto create_label;
27338 }
27339 }
27340 }
27341 else if (!debug_variable_location_views)
27342 gcc_unreachable ();
27343 else
27344 maybe_reset_location_view (loc_note, cur_line_info_table);
27345
27346 return;
27347 }
27348
27349 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27350 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27351 return;
27352
27353 /* Optimize processing a large consecutive sequence of location
27354 notes so we don't spend too much time in next_real_insn. If the
27355 next insn is another location note, remember the next_real_insn
27356 calculation for next time. */
27357 next_real = cached_next_real_insn;
27358 if (next_real)
27359 {
27360 if (expected_next_loc_note != loc_note)
27361 next_real = NULL;
27362 }
27363
27364 next_note = NEXT_INSN (loc_note);
27365 if (! next_note
27366 || next_note->deleted ()
27367 || ! NOTE_P (next_note)
27368 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27369 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27370 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27371 next_note = NULL;
27372
27373 if (! next_real)
27374 next_real = dwarf2out_next_real_insn (loc_note);
27375
27376 if (next_note)
27377 {
27378 expected_next_loc_note = next_note;
27379 cached_next_real_insn = next_real;
27380 }
27381 else
27382 cached_next_real_insn = NULL;
27383
27384 /* If there are no instructions which would be affected by this note,
27385 don't do anything. */
27386 if (var_loc_p
27387 && next_real == NULL_RTX
27388 && !NOTE_DURING_CALL_P (loc_note))
27389 return;
27390
27391 create_label:
27392
27393 if (next_real == NULL_RTX)
27394 next_real = get_last_insn ();
27395
27396 /* If there were any real insns between note we processed last time
27397 and this note (or if it is the first note), clear
27398 last_{,postcall_}label so that they are not reused this time. */
27399 if (last_var_location_insn == NULL_RTX
27400 || last_var_location_insn != next_real
27401 || last_in_cold_section_p != in_cold_section_p)
27402 {
27403 last_label = NULL;
27404 last_postcall_label = NULL;
27405 }
27406
27407 if (var_loc_p)
27408 {
27409 const char *label
27410 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27411 view = cur_line_info_table->view;
27412 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27413 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27414 if (newloc == NULL)
27415 return;
27416 }
27417 else
27418 {
27419 decl = NULL_TREE;
27420 newloc = NULL;
27421 }
27422
27423 /* If there were no real insns between note we processed last time
27424 and this note, use the label we emitted last time. Otherwise
27425 create a new label and emit it. */
27426 if (last_label == NULL)
27427 {
27428 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27429 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27430 loclabel_num++;
27431 last_label = ggc_strdup (loclabel);
27432 /* See if loclabel might be equal to .Ltext0. If yes,
27433 bump first_loclabel_num_not_at_text_label. */
27434 if (!have_multiple_function_sections
27435 && in_first_function_p
27436 && maybe_at_text_label_p)
27437 {
27438 static rtx_insn *last_start;
27439 rtx_insn *insn;
27440 for (insn = loc_note; insn; insn = previous_insn (insn))
27441 if (insn == last_start)
27442 break;
27443 else if (!NONDEBUG_INSN_P (insn))
27444 continue;
27445 else
27446 {
27447 rtx body = PATTERN (insn);
27448 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27449 continue;
27450 /* Inline asm could occupy zero bytes. */
27451 else if (GET_CODE (body) == ASM_INPUT
27452 || asm_noperands (body) >= 0)
27453 continue;
27454 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27455 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27456 continue;
27457 #endif
27458 else
27459 {
27460 /* Assume insn has non-zero length. */
27461 maybe_at_text_label_p = false;
27462 break;
27463 }
27464 }
27465 if (maybe_at_text_label_p)
27466 {
27467 last_start = loc_note;
27468 first_loclabel_num_not_at_text_label = loclabel_num;
27469 }
27470 }
27471 }
27472
27473 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27474 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27475
27476 if (!var_loc_p)
27477 {
27478 struct call_arg_loc_node *ca_loc
27479 = ggc_cleared_alloc<call_arg_loc_node> ();
27480 rtx_insn *prev = call_insn;
27481
27482 ca_loc->call_arg_loc_note
27483 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27484 ca_loc->next = NULL;
27485 ca_loc->label = last_label;
27486 gcc_assert (prev
27487 && (CALL_P (prev)
27488 || (NONJUMP_INSN_P (prev)
27489 && GET_CODE (PATTERN (prev)) == SEQUENCE
27490 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27491 if (!CALL_P (prev))
27492 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27493 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27494
27495 /* Look for a SYMBOL_REF in the "prev" instruction. */
27496 rtx x = get_call_rtx_from (PATTERN (prev));
27497 if (x)
27498 {
27499 /* Try to get the call symbol, if any. */
27500 if (MEM_P (XEXP (x, 0)))
27501 x = XEXP (x, 0);
27502 /* First, look for a memory access to a symbol_ref. */
27503 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27504 && SYMBOL_REF_DECL (XEXP (x, 0))
27505 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27506 ca_loc->symbol_ref = XEXP (x, 0);
27507 /* Otherwise, look at a compile-time known user-level function
27508 declaration. */
27509 else if (MEM_P (x)
27510 && MEM_EXPR (x)
27511 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27512 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27513 }
27514
27515 ca_loc->block = insn_scope (prev);
27516 if (call_arg_locations)
27517 call_arg_loc_last->next = ca_loc;
27518 else
27519 call_arg_locations = ca_loc;
27520 call_arg_loc_last = ca_loc;
27521 }
27522 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27523 {
27524 newloc->label = last_label;
27525 newloc->view = view;
27526 }
27527 else
27528 {
27529 if (!last_postcall_label)
27530 {
27531 sprintf (loclabel, "%s-1", last_label);
27532 last_postcall_label = ggc_strdup (loclabel);
27533 }
27534 newloc->label = last_postcall_label;
27535 /* ??? This view is at last_label, not last_label-1, but we
27536 could only assume view at last_label-1 is zero if we could
27537 assume calls always have length greater than one. This is
27538 probably true in general, though there might be a rare
27539 exception to this rule, e.g. if a call insn is optimized out
27540 by target magic. Then, even the -1 in the label will be
27541 wrong, which might invalidate the range. Anyway, using view,
27542 though technically possibly incorrect, will work as far as
27543 ranges go: since L-1 is in the middle of the call insn,
27544 (L-1).0 and (L-1).V shouldn't make any difference, and having
27545 the loclist entry refer to the .loc entry might be useful, so
27546 leave it like this. */
27547 newloc->view = view;
27548 }
27549
27550 if (var_loc_p && flag_debug_asm)
27551 {
27552 const char *name, *sep, *patstr;
27553 if (decl && DECL_NAME (decl))
27554 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27555 else
27556 name = "";
27557 if (NOTE_VAR_LOCATION_LOC (loc_note))
27558 {
27559 sep = " => ";
27560 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27561 }
27562 else
27563 {
27564 sep = " ";
27565 patstr = "RESET";
27566 }
27567 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27568 name, sep, patstr);
27569 }
27570
27571 last_var_location_insn = next_real;
27572 last_in_cold_section_p = in_cold_section_p;
27573 }
27574
27575 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27576 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27577 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27578 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27579 BLOCK_FRAGMENT_ORIGIN links. */
27580 static bool
27581 block_within_block_p (tree block, tree outer, bool bothways)
27582 {
27583 if (block == outer)
27584 return true;
27585
27586 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27587 for (tree context = BLOCK_SUPERCONTEXT (block);
27588 context != outer;
27589 context = BLOCK_SUPERCONTEXT (context))
27590 if (!context || TREE_CODE (context) != BLOCK)
27591 return false;
27592
27593 if (!bothways)
27594 return true;
27595
27596 /* Now check that each block is actually referenced by its
27597 parent. */
27598 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27599 context = BLOCK_SUPERCONTEXT (context))
27600 {
27601 if (BLOCK_FRAGMENT_ORIGIN (context))
27602 {
27603 gcc_assert (!BLOCK_SUBBLOCKS (context));
27604 context = BLOCK_FRAGMENT_ORIGIN (context);
27605 }
27606 for (tree sub = BLOCK_SUBBLOCKS (context);
27607 sub != block;
27608 sub = BLOCK_CHAIN (sub))
27609 if (!sub)
27610 return false;
27611 if (context == outer)
27612 return true;
27613 else
27614 block = context;
27615 }
27616 }
27617
27618 /* Called during final while assembling the marker of the entry point
27619 for an inlined function. */
27620
27621 static void
27622 dwarf2out_inline_entry (tree block)
27623 {
27624 gcc_assert (debug_inline_points);
27625
27626 /* If we can't represent it, don't bother. */
27627 if (!(dwarf_version >= 3 || !dwarf_strict))
27628 return;
27629
27630 gcc_assert (DECL_P (block_ultimate_origin (block)));
27631
27632 /* Sanity check the block tree. This would catch a case in which
27633 BLOCK got removed from the tree reachable from the outermost
27634 lexical block, but got retained in markers. It would still link
27635 back to its parents, but some ancestor would be missing a link
27636 down the path to the sub BLOCK. If the block got removed, its
27637 BLOCK_NUMBER will not be a usable value. */
27638 if (flag_checking)
27639 gcc_assert (block_within_block_p (block,
27640 DECL_INITIAL (current_function_decl),
27641 true));
27642
27643 gcc_assert (inlined_function_outer_scope_p (block));
27644 gcc_assert (!lookup_block_die (block));
27645
27646 if (BLOCK_FRAGMENT_ORIGIN (block))
27647 block = BLOCK_FRAGMENT_ORIGIN (block);
27648 /* Can the entry point ever not be at the beginning of an
27649 unfragmented lexical block? */
27650 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27651 || (cur_line_info_table
27652 && !ZERO_VIEW_P (cur_line_info_table->view))))
27653 return;
27654
27655 if (!inline_entry_data_table)
27656 inline_entry_data_table
27657 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27658
27659
27660 inline_entry_data **iedp
27661 = inline_entry_data_table->find_slot_with_hash (block,
27662 htab_hash_pointer (block),
27663 INSERT);
27664 if (*iedp)
27665 /* ??? Ideally, we'd record all entry points for the same inlined
27666 function (some may have been duplicated by e.g. unrolling), but
27667 we have no way to represent that ATM. */
27668 return;
27669
27670 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27671 ied->block = block;
27672 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27673 ied->label_num = BLOCK_NUMBER (block);
27674 if (cur_line_info_table)
27675 ied->view = cur_line_info_table->view;
27676
27677 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27678
27679 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27680 BLOCK_NUMBER (block));
27681 ASM_OUTPUT_LABEL (asm_out_file, label);
27682 }
27683
27684 /* Called from finalize_size_functions for size functions so that their body
27685 can be encoded in the debug info to describe the layout of variable-length
27686 structures. */
27687
27688 static void
27689 dwarf2out_size_function (tree decl)
27690 {
27691 function_to_dwarf_procedure (decl);
27692 }
27693
27694 /* Note in one location list that text section has changed. */
27695
27696 int
27697 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27698 {
27699 var_loc_list *list = *slot;
27700 if (list->first)
27701 list->last_before_switch
27702 = list->last->next ? list->last->next : list->last;
27703 return 1;
27704 }
27705
27706 /* Note in all location lists that text section has changed. */
27707
27708 static void
27709 var_location_switch_text_section (void)
27710 {
27711 if (decl_loc_table == NULL)
27712 return;
27713
27714 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27715 }
27716
27717 /* Create a new line number table. */
27718
27719 static dw_line_info_table *
27720 new_line_info_table (void)
27721 {
27722 dw_line_info_table *table;
27723
27724 table = ggc_cleared_alloc<dw_line_info_table> ();
27725 table->file_num = 1;
27726 table->line_num = 1;
27727 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27728 FORCE_RESET_NEXT_VIEW (table->view);
27729 table->symviews_since_reset = 0;
27730
27731 return table;
27732 }
27733
27734 /* Lookup the "current" table into which we emit line info, so
27735 that we don't have to do it for every source line. */
27736
27737 static void
27738 set_cur_line_info_table (section *sec)
27739 {
27740 dw_line_info_table *table;
27741
27742 if (sec == text_section)
27743 table = text_section_line_info;
27744 else if (sec == cold_text_section)
27745 {
27746 table = cold_text_section_line_info;
27747 if (!table)
27748 {
27749 cold_text_section_line_info = table = new_line_info_table ();
27750 table->end_label = cold_end_label;
27751 }
27752 }
27753 else
27754 {
27755 const char *end_label;
27756
27757 if (crtl->has_bb_partition)
27758 {
27759 if (in_cold_section_p)
27760 end_label = crtl->subsections.cold_section_end_label;
27761 else
27762 end_label = crtl->subsections.hot_section_end_label;
27763 }
27764 else
27765 {
27766 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27767 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27768 current_function_funcdef_no);
27769 end_label = ggc_strdup (label);
27770 }
27771
27772 table = new_line_info_table ();
27773 table->end_label = end_label;
27774
27775 vec_safe_push (separate_line_info, table);
27776 }
27777
27778 if (output_asm_line_debug_info ())
27779 table->is_stmt = (cur_line_info_table
27780 ? cur_line_info_table->is_stmt
27781 : DWARF_LINE_DEFAULT_IS_STMT_START);
27782 cur_line_info_table = table;
27783 }
27784
27785
27786 /* We need to reset the locations at the beginning of each
27787 function. We can't do this in the end_function hook, because the
27788 declarations that use the locations won't have been output when
27789 that hook is called. Also compute have_multiple_function_sections here. */
27790
27791 static void
27792 dwarf2out_begin_function (tree fun)
27793 {
27794 section *sec = function_section (fun);
27795
27796 if (sec != text_section)
27797 have_multiple_function_sections = true;
27798
27799 if (crtl->has_bb_partition && !cold_text_section)
27800 {
27801 gcc_assert (current_function_decl == fun);
27802 cold_text_section = unlikely_text_section ();
27803 switch_to_section (cold_text_section);
27804 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27805 switch_to_section (sec);
27806 }
27807
27808 dwarf2out_note_section_used ();
27809 call_site_count = 0;
27810 tail_call_site_count = 0;
27811
27812 set_cur_line_info_table (sec);
27813 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27814 }
27815
27816 /* Helper function of dwarf2out_end_function, called only after emitting
27817 the very first function into assembly. Check if some .debug_loc range
27818 might end with a .LVL* label that could be equal to .Ltext0.
27819 In that case we must force using absolute addresses in .debug_loc ranges,
27820 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27821 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27822 list terminator.
27823 Set have_multiple_function_sections to true in that case and
27824 terminate htab traversal. */
27825
27826 int
27827 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27828 {
27829 var_loc_list *entry = *slot;
27830 struct var_loc_node *node;
27831
27832 node = entry->first;
27833 if (node && node->next && node->next->label)
27834 {
27835 unsigned int i;
27836 const char *label = node->next->label;
27837 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27838
27839 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27840 {
27841 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27842 if (strcmp (label, loclabel) == 0)
27843 {
27844 have_multiple_function_sections = true;
27845 return 0;
27846 }
27847 }
27848 }
27849 return 1;
27850 }
27851
27852 /* Hook called after emitting a function into assembly.
27853 This does something only for the very first function emitted. */
27854
27855 static void
27856 dwarf2out_end_function (unsigned int)
27857 {
27858 if (in_first_function_p
27859 && !have_multiple_function_sections
27860 && first_loclabel_num_not_at_text_label
27861 && decl_loc_table)
27862 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27863 in_first_function_p = false;
27864 maybe_at_text_label_p = false;
27865 }
27866
27867 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27868 front-ends register a translation unit even before dwarf2out_init is
27869 called. */
27870 static tree main_translation_unit = NULL_TREE;
27871
27872 /* Hook called by front-ends after they built their main translation unit.
27873 Associate comp_unit_die to UNIT. */
27874
27875 static void
27876 dwarf2out_register_main_translation_unit (tree unit)
27877 {
27878 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27879 && main_translation_unit == NULL_TREE);
27880 main_translation_unit = unit;
27881 /* If dwarf2out_init has not been called yet, it will perform the association
27882 itself looking at main_translation_unit. */
27883 if (decl_die_table != NULL)
27884 equate_decl_number_to_die (unit, comp_unit_die ());
27885 }
27886
27887 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27888
27889 static void
27890 push_dw_line_info_entry (dw_line_info_table *table,
27891 enum dw_line_info_opcode opcode, unsigned int val)
27892 {
27893 dw_line_info_entry e;
27894 e.opcode = opcode;
27895 e.val = val;
27896 vec_safe_push (table->entries, e);
27897 }
27898
27899 /* Output a label to mark the beginning of a source code line entry
27900 and record information relating to this source line, in
27901 'line_info_table' for later output of the .debug_line section. */
27902 /* ??? The discriminator parameter ought to be unsigned. */
27903
27904 static void
27905 dwarf2out_source_line (unsigned int line, unsigned int column,
27906 const char *filename,
27907 int discriminator, bool is_stmt)
27908 {
27909 unsigned int file_num;
27910 dw_line_info_table *table;
27911 static var_loc_view lvugid;
27912
27913 if (debug_info_level < DINFO_LEVEL_TERSE)
27914 return;
27915
27916 table = cur_line_info_table;
27917
27918 if (line == 0)
27919 {
27920 if (debug_variable_location_views
27921 && output_asm_line_debug_info ()
27922 && table && !RESETTING_VIEW_P (table->view))
27923 {
27924 /* If we're using the assembler to compute view numbers, we
27925 can't issue a .loc directive for line zero, so we can't
27926 get a view number at this point. We might attempt to
27927 compute it from the previous view, or equate it to a
27928 subsequent view (though it might not be there!), but
27929 since we're omitting the line number entry, we might as
27930 well omit the view number as well. That means pretending
27931 it's a view number zero, which might very well turn out
27932 to be correct. ??? Extend the assembler so that the
27933 compiler could emit e.g. ".locview .LVU#", to output a
27934 view without changing line number information. We'd then
27935 have to count it in symviews_since_reset; when it's omitted,
27936 it doesn't count. */
27937 if (!zero_view_p)
27938 zero_view_p = BITMAP_GGC_ALLOC ();
27939 bitmap_set_bit (zero_view_p, table->view);
27940 if (flag_debug_asm)
27941 {
27942 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27943 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27944 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27945 ASM_COMMENT_START);
27946 assemble_name (asm_out_file, label);
27947 putc ('\n', asm_out_file);
27948 }
27949 table->view = ++lvugid;
27950 }
27951 return;
27952 }
27953
27954 /* The discriminator column was added in dwarf4. Simplify the below
27955 by simply removing it if we're not supposed to output it. */
27956 if (dwarf_version < 4 && dwarf_strict)
27957 discriminator = 0;
27958
27959 if (!debug_column_info)
27960 column = 0;
27961
27962 file_num = maybe_emit_file (lookup_filename (filename));
27963
27964 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27965 the debugger has used the second (possibly duplicate) line number
27966 at the beginning of the function to mark the end of the prologue.
27967 We could eliminate any other duplicates within the function. For
27968 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27969 that second line number entry. */
27970 /* Recall that this end-of-prologue indication is *not* the same thing
27971 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27972 to which the hook corresponds, follows the last insn that was
27973 emitted by gen_prologue. What we need is to precede the first insn
27974 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27975 insn that corresponds to something the user wrote. These may be
27976 very different locations once scheduling is enabled. */
27977
27978 if (0 && file_num == table->file_num
27979 && line == table->line_num
27980 && column == table->column_num
27981 && discriminator == table->discrim_num
27982 && is_stmt == table->is_stmt)
27983 return;
27984
27985 switch_to_section (current_function_section ());
27986
27987 /* If requested, emit something human-readable. */
27988 if (flag_debug_asm)
27989 {
27990 if (debug_column_info)
27991 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27992 filename, line, column);
27993 else
27994 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27995 filename, line);
27996 }
27997
27998 if (output_asm_line_debug_info ())
27999 {
28000 /* Emit the .loc directive understood by GNU as. */
28001 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
28002 file_num, line, is_stmt, discriminator */
28003 fputs ("\t.loc ", asm_out_file);
28004 fprint_ul (asm_out_file, file_num);
28005 putc (' ', asm_out_file);
28006 fprint_ul (asm_out_file, line);
28007 putc (' ', asm_out_file);
28008 fprint_ul (asm_out_file, column);
28009
28010 if (is_stmt != table->is_stmt)
28011 {
28012 #if HAVE_GAS_LOC_STMT
28013 fputs (" is_stmt ", asm_out_file);
28014 putc (is_stmt ? '1' : '0', asm_out_file);
28015 #endif
28016 }
28017 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
28018 {
28019 gcc_assert (discriminator > 0);
28020 fputs (" discriminator ", asm_out_file);
28021 fprint_ul (asm_out_file, (unsigned long) discriminator);
28022 }
28023 if (debug_variable_location_views)
28024 {
28025 if (!RESETTING_VIEW_P (table->view))
28026 {
28027 table->symviews_since_reset++;
28028 if (table->symviews_since_reset > symview_upper_bound)
28029 symview_upper_bound = table->symviews_since_reset;
28030 /* When we're using the assembler to compute view
28031 numbers, we output symbolic labels after "view" in
28032 .loc directives, and the assembler will set them for
28033 us, so that we can refer to the view numbers in
28034 location lists. The only exceptions are when we know
28035 a view will be zero: "-0" is a forced reset, used
28036 e.g. in the beginning of functions, whereas "0" tells
28037 the assembler to check that there was a PC change
28038 since the previous view, in a way that implicitly
28039 resets the next view. */
28040 fputs (" view ", asm_out_file);
28041 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28042 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
28043 assemble_name (asm_out_file, label);
28044 table->view = ++lvugid;
28045 }
28046 else
28047 {
28048 table->symviews_since_reset = 0;
28049 if (FORCE_RESETTING_VIEW_P (table->view))
28050 fputs (" view -0", asm_out_file);
28051 else
28052 fputs (" view 0", asm_out_file);
28053 /* Mark the present view as a zero view. Earlier debug
28054 binds may have already added its id to loclists to be
28055 emitted later, so we can't reuse the id for something
28056 else. However, it's good to know whether a view is
28057 known to be zero, because then we may be able to
28058 optimize out locviews that are all zeros, so take
28059 note of it in zero_view_p. */
28060 if (!zero_view_p)
28061 zero_view_p = BITMAP_GGC_ALLOC ();
28062 bitmap_set_bit (zero_view_p, lvugid);
28063 table->view = ++lvugid;
28064 }
28065 }
28066 putc ('\n', asm_out_file);
28067 }
28068 else
28069 {
28070 unsigned int label_num = ++line_info_label_num;
28071
28072 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
28073
28074 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
28075 push_dw_line_info_entry (table, LI_adv_address, label_num);
28076 else
28077 push_dw_line_info_entry (table, LI_set_address, label_num);
28078 if (debug_variable_location_views)
28079 {
28080 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
28081 if (resetting)
28082 table->view = 0;
28083
28084 if (flag_debug_asm)
28085 fprintf (asm_out_file, "\t%s view %s%d\n",
28086 ASM_COMMENT_START,
28087 resetting ? "-" : "",
28088 table->view);
28089
28090 table->view++;
28091 }
28092 if (file_num != table->file_num)
28093 push_dw_line_info_entry (table, LI_set_file, file_num);
28094 if (discriminator != table->discrim_num)
28095 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
28096 if (is_stmt != table->is_stmt)
28097 push_dw_line_info_entry (table, LI_negate_stmt, 0);
28098 push_dw_line_info_entry (table, LI_set_line, line);
28099 if (debug_column_info)
28100 push_dw_line_info_entry (table, LI_set_column, column);
28101 }
28102
28103 table->file_num = file_num;
28104 table->line_num = line;
28105 table->column_num = column;
28106 table->discrim_num = discriminator;
28107 table->is_stmt = is_stmt;
28108 table->in_use = true;
28109 }
28110
28111 /* Record the beginning of a new source file. */
28112
28113 static void
28114 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
28115 {
28116 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28117 {
28118 macinfo_entry e;
28119 e.code = DW_MACINFO_start_file;
28120 e.lineno = lineno;
28121 e.info = ggc_strdup (filename);
28122 vec_safe_push (macinfo_table, e);
28123 }
28124 }
28125
28126 /* Record the end of a source file. */
28127
28128 static void
28129 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
28130 {
28131 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28132 {
28133 macinfo_entry e;
28134 e.code = DW_MACINFO_end_file;
28135 e.lineno = lineno;
28136 e.info = NULL;
28137 vec_safe_push (macinfo_table, e);
28138 }
28139 }
28140
28141 /* Called from debug_define in toplev.c. The `buffer' parameter contains
28142 the tail part of the directive line, i.e. the part which is past the
28143 initial whitespace, #, whitespace, directive-name, whitespace part. */
28144
28145 static void
28146 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
28147 const char *buffer ATTRIBUTE_UNUSED)
28148 {
28149 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28150 {
28151 macinfo_entry e;
28152 /* Insert a dummy first entry to be able to optimize the whole
28153 predefined macro block using DW_MACRO_import. */
28154 if (macinfo_table->is_empty () && lineno <= 1)
28155 {
28156 e.code = 0;
28157 e.lineno = 0;
28158 e.info = NULL;
28159 vec_safe_push (macinfo_table, e);
28160 }
28161 e.code = DW_MACINFO_define;
28162 e.lineno = lineno;
28163 e.info = ggc_strdup (buffer);
28164 vec_safe_push (macinfo_table, e);
28165 }
28166 }
28167
28168 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
28169 the tail part of the directive line, i.e. the part which is past the
28170 initial whitespace, #, whitespace, directive-name, whitespace part. */
28171
28172 static void
28173 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28174 const char *buffer ATTRIBUTE_UNUSED)
28175 {
28176 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28177 {
28178 macinfo_entry e;
28179 /* Insert a dummy first entry to be able to optimize the whole
28180 predefined macro block using DW_MACRO_import. */
28181 if (macinfo_table->is_empty () && lineno <= 1)
28182 {
28183 e.code = 0;
28184 e.lineno = 0;
28185 e.info = NULL;
28186 vec_safe_push (macinfo_table, e);
28187 }
28188 e.code = DW_MACINFO_undef;
28189 e.lineno = lineno;
28190 e.info = ggc_strdup (buffer);
28191 vec_safe_push (macinfo_table, e);
28192 }
28193 }
28194
28195 /* Helpers to manipulate hash table of CUs. */
28196
28197 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28198 {
28199 static inline hashval_t hash (const macinfo_entry *);
28200 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28201 };
28202
28203 inline hashval_t
28204 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28205 {
28206 return htab_hash_string (entry->info);
28207 }
28208
28209 inline bool
28210 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28211 const macinfo_entry *entry2)
28212 {
28213 return !strcmp (entry1->info, entry2->info);
28214 }
28215
28216 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28217
28218 /* Output a single .debug_macinfo entry. */
28219
28220 static void
28221 output_macinfo_op (macinfo_entry *ref)
28222 {
28223 int file_num;
28224 size_t len;
28225 struct indirect_string_node *node;
28226 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28227 struct dwarf_file_data *fd;
28228
28229 switch (ref->code)
28230 {
28231 case DW_MACINFO_start_file:
28232 fd = lookup_filename (ref->info);
28233 file_num = maybe_emit_file (fd);
28234 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28235 dw2_asm_output_data_uleb128 (ref->lineno,
28236 "Included from line number %lu",
28237 (unsigned long) ref->lineno);
28238 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28239 break;
28240 case DW_MACINFO_end_file:
28241 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28242 break;
28243 case DW_MACINFO_define:
28244 case DW_MACINFO_undef:
28245 len = strlen (ref->info) + 1;
28246 if (!dwarf_strict
28247 && len > DWARF_OFFSET_SIZE
28248 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28249 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28250 {
28251 ref->code = ref->code == DW_MACINFO_define
28252 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28253 output_macinfo_op (ref);
28254 return;
28255 }
28256 dw2_asm_output_data (1, ref->code,
28257 ref->code == DW_MACINFO_define
28258 ? "Define macro" : "Undefine macro");
28259 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28260 (unsigned long) ref->lineno);
28261 dw2_asm_output_nstring (ref->info, -1, "The macro");
28262 break;
28263 case DW_MACRO_define_strp:
28264 case DW_MACRO_undef_strp:
28265 /* NB: dwarf2out_finish performs:
28266 1. save_macinfo_strings
28267 2. hash table traverse of index_string
28268 3. output_macinfo -> output_macinfo_op
28269 4. output_indirect_strings
28270 -> hash table traverse of output_index_string
28271
28272 When output_macinfo_op is called, all index strings have been
28273 added to hash table by save_macinfo_strings and we can't pass
28274 INSERT to find_slot_with_hash which may expand hash table, even
28275 if no insertion is needed, and change hash table traverse order
28276 between index_string and output_index_string. */
28277 node = find_AT_string (ref->info, NO_INSERT);
28278 gcc_assert (node
28279 && (node->form == DW_FORM_strp
28280 || node->form == dwarf_FORM (DW_FORM_strx)));
28281 dw2_asm_output_data (1, ref->code,
28282 ref->code == DW_MACRO_define_strp
28283 ? "Define macro strp"
28284 : "Undefine macro strp");
28285 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28286 (unsigned long) ref->lineno);
28287 if (node->form == DW_FORM_strp)
28288 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28289 debug_str_section, "The macro: \"%s\"",
28290 ref->info);
28291 else
28292 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28293 ref->info);
28294 break;
28295 case DW_MACRO_import:
28296 dw2_asm_output_data (1, ref->code, "Import");
28297 ASM_GENERATE_INTERNAL_LABEL (label,
28298 DEBUG_MACRO_SECTION_LABEL,
28299 ref->lineno + macinfo_label_base);
28300 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28301 break;
28302 default:
28303 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28304 ASM_COMMENT_START, (unsigned long) ref->code);
28305 break;
28306 }
28307 }
28308
28309 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28310 other compilation unit .debug_macinfo sections. IDX is the first
28311 index of a define/undef, return the number of ops that should be
28312 emitted in a comdat .debug_macinfo section and emit
28313 a DW_MACRO_import entry referencing it.
28314 If the define/undef entry should be emitted normally, return 0. */
28315
28316 static unsigned
28317 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28318 macinfo_hash_type **macinfo_htab)
28319 {
28320 macinfo_entry *first, *second, *cur, *inc;
28321 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28322 unsigned char checksum[16];
28323 struct md5_ctx ctx;
28324 char *grp_name, *tail;
28325 const char *base;
28326 unsigned int i, count, encoded_filename_len, linebuf_len;
28327 macinfo_entry **slot;
28328
28329 first = &(*macinfo_table)[idx];
28330 second = &(*macinfo_table)[idx + 1];
28331
28332 /* Optimize only if there are at least two consecutive define/undef ops,
28333 and either all of them are before first DW_MACINFO_start_file
28334 with lineno {0,1} (i.e. predefined macro block), or all of them are
28335 in some included header file. */
28336 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28337 return 0;
28338 if (vec_safe_is_empty (files))
28339 {
28340 if (first->lineno > 1 || second->lineno > 1)
28341 return 0;
28342 }
28343 else if (first->lineno == 0)
28344 return 0;
28345
28346 /* Find the last define/undef entry that can be grouped together
28347 with first and at the same time compute md5 checksum of their
28348 codes, linenumbers and strings. */
28349 md5_init_ctx (&ctx);
28350 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28351 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28352 break;
28353 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28354 break;
28355 else
28356 {
28357 unsigned char code = cur->code;
28358 md5_process_bytes (&code, 1, &ctx);
28359 checksum_uleb128 (cur->lineno, &ctx);
28360 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28361 }
28362 md5_finish_ctx (&ctx, checksum);
28363 count = i - idx;
28364
28365 /* From the containing include filename (if any) pick up just
28366 usable characters from its basename. */
28367 if (vec_safe_is_empty (files))
28368 base = "";
28369 else
28370 base = lbasename (files->last ().info);
28371 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28372 if (ISIDNUM (base[i]) || base[i] == '.')
28373 encoded_filename_len++;
28374 /* Count . at the end. */
28375 if (encoded_filename_len)
28376 encoded_filename_len++;
28377
28378 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28379 linebuf_len = strlen (linebuf);
28380
28381 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28382 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28383 + 16 * 2 + 1);
28384 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28385 tail = grp_name + 4;
28386 if (encoded_filename_len)
28387 {
28388 for (i = 0; base[i]; i++)
28389 if (ISIDNUM (base[i]) || base[i] == '.')
28390 *tail++ = base[i];
28391 *tail++ = '.';
28392 }
28393 memcpy (tail, linebuf, linebuf_len);
28394 tail += linebuf_len;
28395 *tail++ = '.';
28396 for (i = 0; i < 16; i++)
28397 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28398
28399 /* Construct a macinfo_entry for DW_MACRO_import
28400 in the empty vector entry before the first define/undef. */
28401 inc = &(*macinfo_table)[idx - 1];
28402 inc->code = DW_MACRO_import;
28403 inc->lineno = 0;
28404 inc->info = ggc_strdup (grp_name);
28405 if (!*macinfo_htab)
28406 *macinfo_htab = new macinfo_hash_type (10);
28407 /* Avoid emitting duplicates. */
28408 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28409 if (*slot != NULL)
28410 {
28411 inc->code = 0;
28412 inc->info = NULL;
28413 /* If such an entry has been used before, just emit
28414 a DW_MACRO_import op. */
28415 inc = *slot;
28416 output_macinfo_op (inc);
28417 /* And clear all macinfo_entry in the range to avoid emitting them
28418 in the second pass. */
28419 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28420 {
28421 cur->code = 0;
28422 cur->info = NULL;
28423 }
28424 }
28425 else
28426 {
28427 *slot = inc;
28428 inc->lineno = (*macinfo_htab)->elements ();
28429 output_macinfo_op (inc);
28430 }
28431 return count;
28432 }
28433
28434 /* Save any strings needed by the macinfo table in the debug str
28435 table. All strings must be collected into the table by the time
28436 index_string is called. */
28437
28438 static void
28439 save_macinfo_strings (void)
28440 {
28441 unsigned len;
28442 unsigned i;
28443 macinfo_entry *ref;
28444
28445 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28446 {
28447 switch (ref->code)
28448 {
28449 /* Match the logic in output_macinfo_op to decide on
28450 indirect strings. */
28451 case DW_MACINFO_define:
28452 case DW_MACINFO_undef:
28453 len = strlen (ref->info) + 1;
28454 if (!dwarf_strict
28455 && len > DWARF_OFFSET_SIZE
28456 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28457 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28458 set_indirect_string (find_AT_string (ref->info));
28459 break;
28460 case DW_MACINFO_start_file:
28461 /* -gsplit-dwarf -g3 will also output filename as indirect
28462 string. */
28463 if (!dwarf_split_debug_info)
28464 break;
28465 /* Fall through. */
28466 case DW_MACRO_define_strp:
28467 case DW_MACRO_undef_strp:
28468 set_indirect_string (find_AT_string (ref->info));
28469 break;
28470 default:
28471 break;
28472 }
28473 }
28474 }
28475
28476 /* Output macinfo section(s). */
28477
28478 static void
28479 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28480 {
28481 unsigned i;
28482 unsigned long length = vec_safe_length (macinfo_table);
28483 macinfo_entry *ref;
28484 vec<macinfo_entry, va_gc> *files = NULL;
28485 macinfo_hash_type *macinfo_htab = NULL;
28486 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28487
28488 if (! length)
28489 return;
28490
28491 /* output_macinfo* uses these interchangeably. */
28492 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28493 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28494 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28495 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28496
28497 /* AIX Assembler inserts the length, so adjust the reference to match the
28498 offset expected by debuggers. */
28499 strcpy (dl_section_ref, debug_line_label);
28500 if (XCOFF_DEBUGGING_INFO)
28501 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28502
28503 /* For .debug_macro emit the section header. */
28504 if (!dwarf_strict || dwarf_version >= 5)
28505 {
28506 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28507 "DWARF macro version number");
28508 if (DWARF_OFFSET_SIZE == 8)
28509 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28510 else
28511 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28512 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28513 debug_line_section, NULL);
28514 }
28515
28516 /* In the first loop, it emits the primary .debug_macinfo section
28517 and after each emitted op the macinfo_entry is cleared.
28518 If a longer range of define/undef ops can be optimized using
28519 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28520 the vector before the first define/undef in the range and the
28521 whole range of define/undef ops is not emitted and kept. */
28522 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28523 {
28524 switch (ref->code)
28525 {
28526 case DW_MACINFO_start_file:
28527 vec_safe_push (files, *ref);
28528 break;
28529 case DW_MACINFO_end_file:
28530 if (!vec_safe_is_empty (files))
28531 files->pop ();
28532 break;
28533 case DW_MACINFO_define:
28534 case DW_MACINFO_undef:
28535 if ((!dwarf_strict || dwarf_version >= 5)
28536 && HAVE_COMDAT_GROUP
28537 && vec_safe_length (files) != 1
28538 && i > 0
28539 && i + 1 < length
28540 && (*macinfo_table)[i - 1].code == 0)
28541 {
28542 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28543 if (count)
28544 {
28545 i += count - 1;
28546 continue;
28547 }
28548 }
28549 break;
28550 case 0:
28551 /* A dummy entry may be inserted at the beginning to be able
28552 to optimize the whole block of predefined macros. */
28553 if (i == 0)
28554 continue;
28555 default:
28556 break;
28557 }
28558 output_macinfo_op (ref);
28559 ref->info = NULL;
28560 ref->code = 0;
28561 }
28562
28563 if (!macinfo_htab)
28564 return;
28565
28566 /* Save the number of transparent includes so we can adjust the
28567 label number for the fat LTO object DWARF. */
28568 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28569
28570 delete macinfo_htab;
28571 macinfo_htab = NULL;
28572
28573 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28574 terminate the current chain and switch to a new comdat .debug_macinfo
28575 section and emit the define/undef entries within it. */
28576 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28577 switch (ref->code)
28578 {
28579 case 0:
28580 continue;
28581 case DW_MACRO_import:
28582 {
28583 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28584 tree comdat_key = get_identifier (ref->info);
28585 /* Terminate the previous .debug_macinfo section. */
28586 dw2_asm_output_data (1, 0, "End compilation unit");
28587 targetm.asm_out.named_section (debug_macinfo_section_name,
28588 SECTION_DEBUG
28589 | SECTION_LINKONCE
28590 | (early_lto_debug
28591 ? SECTION_EXCLUDE : 0),
28592 comdat_key);
28593 ASM_GENERATE_INTERNAL_LABEL (label,
28594 DEBUG_MACRO_SECTION_LABEL,
28595 ref->lineno + macinfo_label_base);
28596 ASM_OUTPUT_LABEL (asm_out_file, label);
28597 ref->code = 0;
28598 ref->info = NULL;
28599 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28600 "DWARF macro version number");
28601 if (DWARF_OFFSET_SIZE == 8)
28602 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28603 else
28604 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28605 }
28606 break;
28607 case DW_MACINFO_define:
28608 case DW_MACINFO_undef:
28609 output_macinfo_op (ref);
28610 ref->code = 0;
28611 ref->info = NULL;
28612 break;
28613 default:
28614 gcc_unreachable ();
28615 }
28616
28617 macinfo_label_base += macinfo_label_base_adj;
28618 }
28619
28620 /* Initialize the various sections and labels for dwarf output and prefix
28621 them with PREFIX if non-NULL. Returns the generation (zero based
28622 number of times function was called). */
28623
28624 static unsigned
28625 init_sections_and_labels (bool early_lto_debug)
28626 {
28627 /* As we may get called multiple times have a generation count for
28628 labels. */
28629 static unsigned generation = 0;
28630
28631 if (early_lto_debug)
28632 {
28633 if (!dwarf_split_debug_info)
28634 {
28635 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28636 SECTION_DEBUG | SECTION_EXCLUDE,
28637 NULL);
28638 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28639 SECTION_DEBUG | SECTION_EXCLUDE,
28640 NULL);
28641 debug_macinfo_section_name
28642 = ((dwarf_strict && dwarf_version < 5)
28643 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28644 debug_macinfo_section = get_section (debug_macinfo_section_name,
28645 SECTION_DEBUG
28646 | SECTION_EXCLUDE, NULL);
28647 }
28648 else
28649 {
28650 /* ??? Which of the following do we need early? */
28651 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28652 SECTION_DEBUG | SECTION_EXCLUDE,
28653 NULL);
28654 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28655 SECTION_DEBUG | SECTION_EXCLUDE,
28656 NULL);
28657 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28658 SECTION_DEBUG
28659 | SECTION_EXCLUDE, NULL);
28660 debug_skeleton_abbrev_section
28661 = get_section (DEBUG_LTO_ABBREV_SECTION,
28662 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28663 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28664 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28665 generation);
28666
28667 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28668 stay in the main .o, but the skeleton_line goes into the split
28669 off dwo. */
28670 debug_skeleton_line_section
28671 = get_section (DEBUG_LTO_LINE_SECTION,
28672 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28673 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28674 DEBUG_SKELETON_LINE_SECTION_LABEL,
28675 generation);
28676 debug_str_offsets_section
28677 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28678 SECTION_DEBUG | SECTION_EXCLUDE,
28679 NULL);
28680 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28681 DEBUG_SKELETON_INFO_SECTION_LABEL,
28682 generation);
28683 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28684 DEBUG_STR_DWO_SECTION_FLAGS,
28685 NULL);
28686 debug_macinfo_section_name
28687 = ((dwarf_strict && dwarf_version < 5)
28688 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28689 debug_macinfo_section = get_section (debug_macinfo_section_name,
28690 SECTION_DEBUG | SECTION_EXCLUDE,
28691 NULL);
28692 }
28693 /* For macro info and the file table we have to refer to a
28694 debug_line section. */
28695 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28696 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28697 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28698 DEBUG_LINE_SECTION_LABEL, generation);
28699
28700 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28701 DEBUG_STR_SECTION_FLAGS
28702 | SECTION_EXCLUDE, NULL);
28703 if (!dwarf_split_debug_info)
28704 debug_line_str_section
28705 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28706 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28707 }
28708 else
28709 {
28710 if (!dwarf_split_debug_info)
28711 {
28712 debug_info_section = get_section (DEBUG_INFO_SECTION,
28713 SECTION_DEBUG, NULL);
28714 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28715 SECTION_DEBUG, NULL);
28716 debug_loc_section = get_section (dwarf_version >= 5
28717 ? DEBUG_LOCLISTS_SECTION
28718 : DEBUG_LOC_SECTION,
28719 SECTION_DEBUG, NULL);
28720 debug_macinfo_section_name
28721 = ((dwarf_strict && dwarf_version < 5)
28722 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28723 debug_macinfo_section = get_section (debug_macinfo_section_name,
28724 SECTION_DEBUG, NULL);
28725 }
28726 else
28727 {
28728 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28729 SECTION_DEBUG | SECTION_EXCLUDE,
28730 NULL);
28731 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28732 SECTION_DEBUG | SECTION_EXCLUDE,
28733 NULL);
28734 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28735 SECTION_DEBUG, NULL);
28736 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28737 SECTION_DEBUG, NULL);
28738 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28739 SECTION_DEBUG, NULL);
28740 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28741 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28742 generation);
28743
28744 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28745 stay in the main .o, but the skeleton_line goes into the
28746 split off dwo. */
28747 debug_skeleton_line_section
28748 = get_section (DEBUG_DWO_LINE_SECTION,
28749 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28750 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28751 DEBUG_SKELETON_LINE_SECTION_LABEL,
28752 generation);
28753 debug_str_offsets_section
28754 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28755 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28756 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28757 DEBUG_SKELETON_INFO_SECTION_LABEL,
28758 generation);
28759 debug_loc_section = get_section (dwarf_version >= 5
28760 ? DEBUG_DWO_LOCLISTS_SECTION
28761 : DEBUG_DWO_LOC_SECTION,
28762 SECTION_DEBUG | SECTION_EXCLUDE,
28763 NULL);
28764 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28765 DEBUG_STR_DWO_SECTION_FLAGS,
28766 NULL);
28767 debug_macinfo_section_name
28768 = ((dwarf_strict && dwarf_version < 5)
28769 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28770 debug_macinfo_section = get_section (debug_macinfo_section_name,
28771 SECTION_DEBUG | SECTION_EXCLUDE,
28772 NULL);
28773 }
28774 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28775 SECTION_DEBUG, NULL);
28776 debug_line_section = get_section (DEBUG_LINE_SECTION,
28777 SECTION_DEBUG, NULL);
28778 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28779 SECTION_DEBUG, NULL);
28780 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28781 SECTION_DEBUG, NULL);
28782 debug_str_section = get_section (DEBUG_STR_SECTION,
28783 DEBUG_STR_SECTION_FLAGS, NULL);
28784 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28785 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28786 DEBUG_STR_SECTION_FLAGS, NULL);
28787
28788 debug_ranges_section = get_section (dwarf_version >= 5
28789 ? DEBUG_RNGLISTS_SECTION
28790 : DEBUG_RANGES_SECTION,
28791 SECTION_DEBUG, NULL);
28792 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28793 SECTION_DEBUG, NULL);
28794 }
28795
28796 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28797 DEBUG_ABBREV_SECTION_LABEL, generation);
28798 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28799 DEBUG_INFO_SECTION_LABEL, generation);
28800 info_section_emitted = false;
28801 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28802 DEBUG_LINE_SECTION_LABEL, generation);
28803 /* There are up to 4 unique ranges labels per generation.
28804 See also output_rnglists. */
28805 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28806 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28807 if (dwarf_version >= 5 && dwarf_split_debug_info)
28808 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28809 DEBUG_RANGES_SECTION_LABEL,
28810 1 + generation * 4);
28811 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28812 DEBUG_ADDR_SECTION_LABEL, generation);
28813 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28814 (dwarf_strict && dwarf_version < 5)
28815 ? DEBUG_MACINFO_SECTION_LABEL
28816 : DEBUG_MACRO_SECTION_LABEL, generation);
28817 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28818 generation);
28819
28820 ++generation;
28821 return generation - 1;
28822 }
28823
28824 /* Set up for Dwarf output at the start of compilation. */
28825
28826 static void
28827 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28828 {
28829 /* Allocate the file_table. */
28830 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28831
28832 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28833 /* Allocate the decl_die_table. */
28834 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28835
28836 /* Allocate the decl_loc_table. */
28837 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28838
28839 /* Allocate the cached_dw_loc_list_table. */
28840 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28841
28842 /* Allocate the initial hunk of the abbrev_die_table. */
28843 vec_alloc (abbrev_die_table, 256);
28844 /* Zero-th entry is allocated, but unused. */
28845 abbrev_die_table->quick_push (NULL);
28846
28847 /* Allocate the dwarf_proc_stack_usage_map. */
28848 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28849
28850 /* Allocate the pubtypes and pubnames vectors. */
28851 vec_alloc (pubname_table, 32);
28852 vec_alloc (pubtype_table, 32);
28853
28854 vec_alloc (incomplete_types, 64);
28855
28856 vec_alloc (used_rtx_array, 32);
28857
28858 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28859 vec_alloc (macinfo_table, 64);
28860 #endif
28861
28862 /* If front-ends already registered a main translation unit but we were not
28863 ready to perform the association, do this now. */
28864 if (main_translation_unit != NULL_TREE)
28865 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28866 }
28867
28868 /* Called before compile () starts outputtting functions, variables
28869 and toplevel asms into assembly. */
28870
28871 static void
28872 dwarf2out_assembly_start (void)
28873 {
28874 if (text_section_line_info)
28875 return;
28876
28877 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28878 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28879 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28880 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28881 COLD_TEXT_SECTION_LABEL, 0);
28882 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28883
28884 switch_to_section (text_section);
28885 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28886 #endif
28887
28888 /* Make sure the line number table for .text always exists. */
28889 text_section_line_info = new_line_info_table ();
28890 text_section_line_info->end_label = text_end_label;
28891
28892 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28893 cur_line_info_table = text_section_line_info;
28894 #endif
28895
28896 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28897 && dwarf2out_do_cfi_asm ()
28898 && !dwarf2out_do_eh_frame ())
28899 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28900 }
28901
28902 /* A helper function for dwarf2out_finish called through
28903 htab_traverse. Assign a string its index. All strings must be
28904 collected into the table by the time index_string is called,
28905 because the indexing code relies on htab_traverse to traverse nodes
28906 in the same order for each run. */
28907
28908 int
28909 index_string (indirect_string_node **h, unsigned int *index)
28910 {
28911 indirect_string_node *node = *h;
28912
28913 find_string_form (node);
28914 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28915 {
28916 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28917 node->index = *index;
28918 *index += 1;
28919 }
28920 return 1;
28921 }
28922
28923 /* A helper function for output_indirect_strings called through
28924 htab_traverse. Output the offset to a string and update the
28925 current offset. */
28926
28927 int
28928 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28929 {
28930 indirect_string_node *node = *h;
28931
28932 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28933 {
28934 /* Assert that this node has been assigned an index. */
28935 gcc_assert (node->index != NO_INDEX_ASSIGNED
28936 && node->index != NOT_INDEXED);
28937 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28938 "indexed string 0x%x: %s", node->index, node->str);
28939 *offset += strlen (node->str) + 1;
28940 }
28941 return 1;
28942 }
28943
28944 /* A helper function for dwarf2out_finish called through
28945 htab_traverse. Output the indexed string. */
28946
28947 int
28948 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28949 {
28950 struct indirect_string_node *node = *h;
28951
28952 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28953 {
28954 /* Assert that the strings are output in the same order as their
28955 indexes were assigned. */
28956 gcc_assert (*cur_idx == node->index);
28957 assemble_string (node->str, strlen (node->str) + 1);
28958 *cur_idx += 1;
28959 }
28960 return 1;
28961 }
28962
28963 /* A helper function for output_indirect_strings. Counts the number
28964 of index strings offsets. Must match the logic of the functions
28965 output_index_string[_offsets] above. */
28966 int
28967 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28968 {
28969 struct indirect_string_node *node = *h;
28970
28971 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28972 *last_idx += 1;
28973 return 1;
28974 }
28975
28976 /* A helper function for dwarf2out_finish called through
28977 htab_traverse. Emit one queued .debug_str string. */
28978
28979 int
28980 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28981 {
28982 struct indirect_string_node *node = *h;
28983
28984 node->form = find_string_form (node);
28985 if (node->form == form && node->refcount > 0)
28986 {
28987 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28988 assemble_string (node->str, strlen (node->str) + 1);
28989 }
28990
28991 return 1;
28992 }
28993
28994 /* Output the indexed string table. */
28995
28996 static void
28997 output_indirect_strings (void)
28998 {
28999 switch_to_section (debug_str_section);
29000 if (!dwarf_split_debug_info)
29001 debug_str_hash->traverse<enum dwarf_form,
29002 output_indirect_string> (DW_FORM_strp);
29003 else
29004 {
29005 unsigned int offset = 0;
29006 unsigned int cur_idx = 0;
29007
29008 if (skeleton_debug_str_hash)
29009 skeleton_debug_str_hash->traverse<enum dwarf_form,
29010 output_indirect_string> (DW_FORM_strp);
29011
29012 switch_to_section (debug_str_offsets_section);
29013 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
29014 header. Note that we don't need to generate a label to the
29015 actual index table following the header here, because this is
29016 for the split dwarf case only. In an .dwo file there is only
29017 one string offsets table (and one debug info section). But
29018 if we would start using string offset tables for the main (or
29019 skeleton) unit, then we have to add a DW_AT_str_offsets_base
29020 pointing to the actual index after the header. Split dwarf
29021 units will never have a string offsets base attribute. When
29022 a split unit is moved into a .dwp file the string offsets can
29023 be found through the .debug_cu_index section table. */
29024 if (dwarf_version >= 5)
29025 {
29026 unsigned int last_idx = 0;
29027 unsigned long str_offsets_length;
29028
29029 debug_str_hash->traverse_noresize
29030 <unsigned int *, count_index_strings> (&last_idx);
29031 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
29032 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
29033 dw2_asm_output_data (4, 0xffffffff,
29034 "Escape value for 64-bit DWARF extension");
29035 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
29036 "Length of string offsets unit");
29037 dw2_asm_output_data (2, 5, "DWARF string offsets version");
29038 dw2_asm_output_data (2, 0, "Header zero padding");
29039 }
29040 debug_str_hash->traverse_noresize
29041 <unsigned int *, output_index_string_offset> (&offset);
29042 switch_to_section (debug_str_dwo_section);
29043 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
29044 (&cur_idx);
29045 }
29046 }
29047
29048 /* Callback for htab_traverse to assign an index to an entry in the
29049 table, and to write that entry to the .debug_addr section. */
29050
29051 int
29052 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
29053 {
29054 addr_table_entry *entry = *slot;
29055
29056 if (entry->refcount == 0)
29057 {
29058 gcc_assert (entry->index == NO_INDEX_ASSIGNED
29059 || entry->index == NOT_INDEXED);
29060 return 1;
29061 }
29062
29063 gcc_assert (entry->index == *cur_index);
29064 (*cur_index)++;
29065
29066 switch (entry->kind)
29067 {
29068 case ate_kind_rtx:
29069 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
29070 "0x%x", entry->index);
29071 break;
29072 case ate_kind_rtx_dtprel:
29073 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
29074 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
29075 DWARF2_ADDR_SIZE,
29076 entry->addr.rtl);
29077 fputc ('\n', asm_out_file);
29078 break;
29079 case ate_kind_label:
29080 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
29081 "0x%x", entry->index);
29082 break;
29083 default:
29084 gcc_unreachable ();
29085 }
29086 return 1;
29087 }
29088
29089 /* A helper function for dwarf2out_finish. Counts the number
29090 of indexed addresses. Must match the logic of the functions
29091 output_addr_table_entry above. */
29092 int
29093 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
29094 {
29095 addr_table_entry *entry = *slot;
29096
29097 if (entry->refcount > 0)
29098 *last_idx += 1;
29099 return 1;
29100 }
29101
29102 /* Produce the .debug_addr section. */
29103
29104 static void
29105 output_addr_table (void)
29106 {
29107 unsigned int index = 0;
29108 if (addr_index_table == NULL || addr_index_table->size () == 0)
29109 return;
29110
29111 switch_to_section (debug_addr_section);
29112 addr_index_table
29113 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
29114 }
29115
29116 #if ENABLE_ASSERT_CHECKING
29117 /* Verify that all marks are clear. */
29118
29119 static void
29120 verify_marks_clear (dw_die_ref die)
29121 {
29122 dw_die_ref c;
29123
29124 gcc_assert (! die->die_mark);
29125 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
29126 }
29127 #endif /* ENABLE_ASSERT_CHECKING */
29128
29129 /* Clear the marks for a die and its children.
29130 Be cool if the mark isn't set. */
29131
29132 static void
29133 prune_unmark_dies (dw_die_ref die)
29134 {
29135 dw_die_ref c;
29136
29137 if (die->die_mark)
29138 die->die_mark = 0;
29139 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
29140 }
29141
29142 /* Given LOC that is referenced by a DIE we're marking as used, find all
29143 referenced DWARF procedures it references and mark them as used. */
29144
29145 static void
29146 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
29147 {
29148 for (; loc != NULL; loc = loc->dw_loc_next)
29149 switch (loc->dw_loc_opc)
29150 {
29151 case DW_OP_implicit_pointer:
29152 case DW_OP_convert:
29153 case DW_OP_reinterpret:
29154 case DW_OP_GNU_implicit_pointer:
29155 case DW_OP_GNU_convert:
29156 case DW_OP_GNU_reinterpret:
29157 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
29158 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29159 break;
29160 case DW_OP_GNU_variable_value:
29161 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29162 {
29163 dw_die_ref ref
29164 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29165 if (ref == NULL)
29166 break;
29167 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29168 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29169 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29170 }
29171 /* FALLTHRU */
29172 case DW_OP_call2:
29173 case DW_OP_call4:
29174 case DW_OP_call_ref:
29175 case DW_OP_const_type:
29176 case DW_OP_GNU_const_type:
29177 case DW_OP_GNU_parameter_ref:
29178 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
29179 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29180 break;
29181 case DW_OP_regval_type:
29182 case DW_OP_deref_type:
29183 case DW_OP_GNU_regval_type:
29184 case DW_OP_GNU_deref_type:
29185 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
29186 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
29187 break;
29188 case DW_OP_entry_value:
29189 case DW_OP_GNU_entry_value:
29190 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29191 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29192 break;
29193 default:
29194 break;
29195 }
29196 }
29197
29198 /* Given DIE that we're marking as used, find any other dies
29199 it references as attributes and mark them as used. */
29200
29201 static void
29202 prune_unused_types_walk_attribs (dw_die_ref die)
29203 {
29204 dw_attr_node *a;
29205 unsigned ix;
29206
29207 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29208 {
29209 switch (AT_class (a))
29210 {
29211 /* Make sure DWARF procedures referenced by location descriptions will
29212 get emitted. */
29213 case dw_val_class_loc:
29214 prune_unused_types_walk_loc_descr (AT_loc (a));
29215 break;
29216 case dw_val_class_loc_list:
29217 for (dw_loc_list_ref list = AT_loc_list (a);
29218 list != NULL;
29219 list = list->dw_loc_next)
29220 prune_unused_types_walk_loc_descr (list->expr);
29221 break;
29222
29223 case dw_val_class_view_list:
29224 /* This points to a loc_list in another attribute, so it's
29225 already covered. */
29226 break;
29227
29228 case dw_val_class_die_ref:
29229 /* A reference to another DIE.
29230 Make sure that it will get emitted.
29231 If it was broken out into a comdat group, don't follow it. */
29232 if (! AT_ref (a)->comdat_type_p
29233 || a->dw_attr == DW_AT_specification)
29234 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29235 break;
29236
29237 case dw_val_class_str:
29238 /* Set the string's refcount to 0 so that prune_unused_types_mark
29239 accounts properly for it. */
29240 a->dw_attr_val.v.val_str->refcount = 0;
29241 break;
29242
29243 default:
29244 break;
29245 }
29246 }
29247 }
29248
29249 /* Mark the generic parameters and arguments children DIEs of DIE. */
29250
29251 static void
29252 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29253 {
29254 dw_die_ref c;
29255
29256 if (die == NULL || die->die_child == NULL)
29257 return;
29258 c = die->die_child;
29259 do
29260 {
29261 if (is_template_parameter (c))
29262 prune_unused_types_mark (c, 1);
29263 c = c->die_sib;
29264 } while (c && c != die->die_child);
29265 }
29266
29267 /* Mark DIE as being used. If DOKIDS is true, then walk down
29268 to DIE's children. */
29269
29270 static void
29271 prune_unused_types_mark (dw_die_ref die, int dokids)
29272 {
29273 dw_die_ref c;
29274
29275 if (die->die_mark == 0)
29276 {
29277 /* We haven't done this node yet. Mark it as used. */
29278 die->die_mark = 1;
29279 /* If this is the DIE of a generic type instantiation,
29280 mark the children DIEs that describe its generic parms and
29281 args. */
29282 prune_unused_types_mark_generic_parms_dies (die);
29283
29284 /* We also have to mark its parents as used.
29285 (But we don't want to mark our parent's kids due to this,
29286 unless it is a class.) */
29287 if (die->die_parent)
29288 prune_unused_types_mark (die->die_parent,
29289 class_scope_p (die->die_parent));
29290
29291 /* Mark any referenced nodes. */
29292 prune_unused_types_walk_attribs (die);
29293
29294 /* If this node is a specification,
29295 also mark the definition, if it exists. */
29296 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29297 prune_unused_types_mark (die->die_definition, 1);
29298 }
29299
29300 if (dokids && die->die_mark != 2)
29301 {
29302 /* We need to walk the children, but haven't done so yet.
29303 Remember that we've walked the kids. */
29304 die->die_mark = 2;
29305
29306 /* If this is an array type, we need to make sure our
29307 kids get marked, even if they're types. If we're
29308 breaking out types into comdat sections, do this
29309 for all type definitions. */
29310 if (die->die_tag == DW_TAG_array_type
29311 || (use_debug_types
29312 && is_type_die (die) && ! is_declaration_die (die)))
29313 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29314 else
29315 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29316 }
29317 }
29318
29319 /* For local classes, look if any static member functions were emitted
29320 and if so, mark them. */
29321
29322 static void
29323 prune_unused_types_walk_local_classes (dw_die_ref die)
29324 {
29325 dw_die_ref c;
29326
29327 if (die->die_mark == 2)
29328 return;
29329
29330 switch (die->die_tag)
29331 {
29332 case DW_TAG_structure_type:
29333 case DW_TAG_union_type:
29334 case DW_TAG_class_type:
29335 case DW_TAG_interface_type:
29336 break;
29337
29338 case DW_TAG_subprogram:
29339 if (!get_AT_flag (die, DW_AT_declaration)
29340 || die->die_definition != NULL)
29341 prune_unused_types_mark (die, 1);
29342 return;
29343
29344 default:
29345 return;
29346 }
29347
29348 /* Mark children. */
29349 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29350 }
29351
29352 /* Walk the tree DIE and mark types that we actually use. */
29353
29354 static void
29355 prune_unused_types_walk (dw_die_ref die)
29356 {
29357 dw_die_ref c;
29358
29359 /* Don't do anything if this node is already marked and
29360 children have been marked as well. */
29361 if (die->die_mark == 2)
29362 return;
29363
29364 switch (die->die_tag)
29365 {
29366 case DW_TAG_structure_type:
29367 case DW_TAG_union_type:
29368 case DW_TAG_class_type:
29369 case DW_TAG_interface_type:
29370 if (die->die_perennial_p)
29371 break;
29372
29373 for (c = die->die_parent; c; c = c->die_parent)
29374 if (c->die_tag == DW_TAG_subprogram)
29375 break;
29376
29377 /* Finding used static member functions inside of classes
29378 is needed just for local classes, because for other classes
29379 static member function DIEs with DW_AT_specification
29380 are emitted outside of the DW_TAG_*_type. If we ever change
29381 it, we'd need to call this even for non-local classes. */
29382 if (c)
29383 prune_unused_types_walk_local_classes (die);
29384
29385 /* It's a type node --- don't mark it. */
29386 return;
29387
29388 case DW_TAG_const_type:
29389 case DW_TAG_packed_type:
29390 case DW_TAG_pointer_type:
29391 case DW_TAG_reference_type:
29392 case DW_TAG_rvalue_reference_type:
29393 case DW_TAG_volatile_type:
29394 case DW_TAG_typedef:
29395 case DW_TAG_array_type:
29396 case DW_TAG_friend:
29397 case DW_TAG_enumeration_type:
29398 case DW_TAG_subroutine_type:
29399 case DW_TAG_string_type:
29400 case DW_TAG_set_type:
29401 case DW_TAG_subrange_type:
29402 case DW_TAG_ptr_to_member_type:
29403 case DW_TAG_file_type:
29404 /* Type nodes are useful only when other DIEs reference them --- don't
29405 mark them. */
29406 /* FALLTHROUGH */
29407
29408 case DW_TAG_dwarf_procedure:
29409 /* Likewise for DWARF procedures. */
29410
29411 if (die->die_perennial_p)
29412 break;
29413
29414 return;
29415
29416 case DW_TAG_variable:
29417 if (flag_debug_only_used_symbols)
29418 {
29419 if (die->die_perennial_p)
29420 break;
29421
29422 /* premark_used_variables marks external variables --- don't mark
29423 them here. */
29424 if (get_AT (die, DW_AT_external))
29425 return;
29426 }
29427 /* FALLTHROUGH */
29428
29429 default:
29430 /* Mark everything else. */
29431 break;
29432 }
29433
29434 if (die->die_mark == 0)
29435 {
29436 die->die_mark = 1;
29437
29438 /* Now, mark any dies referenced from here. */
29439 prune_unused_types_walk_attribs (die);
29440 }
29441
29442 die->die_mark = 2;
29443
29444 /* Mark children. */
29445 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29446 }
29447
29448 /* Increment the string counts on strings referred to from DIE's
29449 attributes. */
29450
29451 static void
29452 prune_unused_types_update_strings (dw_die_ref die)
29453 {
29454 dw_attr_node *a;
29455 unsigned ix;
29456
29457 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29458 if (AT_class (a) == dw_val_class_str)
29459 {
29460 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29461 s->refcount++;
29462 /* Avoid unnecessarily putting strings that are used less than
29463 twice in the hash table. */
29464 if (s->refcount
29465 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29466 {
29467 indirect_string_node **slot
29468 = debug_str_hash->find_slot_with_hash (s->str,
29469 htab_hash_string (s->str),
29470 INSERT);
29471 gcc_assert (*slot == NULL);
29472 *slot = s;
29473 }
29474 }
29475 }
29476
29477 /* Mark DIE and its children as removed. */
29478
29479 static void
29480 mark_removed (dw_die_ref die)
29481 {
29482 dw_die_ref c;
29483 die->removed = true;
29484 FOR_EACH_CHILD (die, c, mark_removed (c));
29485 }
29486
29487 /* Remove from the tree DIE any dies that aren't marked. */
29488
29489 static void
29490 prune_unused_types_prune (dw_die_ref die)
29491 {
29492 dw_die_ref c;
29493
29494 gcc_assert (die->die_mark);
29495 prune_unused_types_update_strings (die);
29496
29497 if (! die->die_child)
29498 return;
29499
29500 c = die->die_child;
29501 do {
29502 dw_die_ref prev = c, next;
29503 for (c = c->die_sib; ! c->die_mark; c = next)
29504 if (c == die->die_child)
29505 {
29506 /* No marked children between 'prev' and the end of the list. */
29507 if (prev == c)
29508 /* No marked children at all. */
29509 die->die_child = NULL;
29510 else
29511 {
29512 prev->die_sib = c->die_sib;
29513 die->die_child = prev;
29514 }
29515 c->die_sib = NULL;
29516 mark_removed (c);
29517 return;
29518 }
29519 else
29520 {
29521 next = c->die_sib;
29522 c->die_sib = NULL;
29523 mark_removed (c);
29524 }
29525
29526 if (c != prev->die_sib)
29527 prev->die_sib = c;
29528 prune_unused_types_prune (c);
29529 } while (c != die->die_child);
29530 }
29531
29532 /* Remove dies representing declarations that we never use. */
29533
29534 static void
29535 prune_unused_types (void)
29536 {
29537 unsigned int i;
29538 limbo_die_node *node;
29539 comdat_type_node *ctnode;
29540 pubname_entry *pub;
29541 dw_die_ref base_type;
29542
29543 #if ENABLE_ASSERT_CHECKING
29544 /* All the marks should already be clear. */
29545 verify_marks_clear (comp_unit_die ());
29546 for (node = limbo_die_list; node; node = node->next)
29547 verify_marks_clear (node->die);
29548 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29549 verify_marks_clear (ctnode->root_die);
29550 #endif /* ENABLE_ASSERT_CHECKING */
29551
29552 /* Mark types that are used in global variables. */
29553 premark_types_used_by_global_vars ();
29554
29555 /* Mark variables used in the symtab. */
29556 if (flag_debug_only_used_symbols)
29557 premark_used_variables ();
29558
29559 /* Set the mark on nodes that are actually used. */
29560 prune_unused_types_walk (comp_unit_die ());
29561 for (node = limbo_die_list; node; node = node->next)
29562 prune_unused_types_walk (node->die);
29563 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29564 {
29565 prune_unused_types_walk (ctnode->root_die);
29566 prune_unused_types_mark (ctnode->type_die, 1);
29567 }
29568
29569 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29570 are unusual in that they are pubnames that are the children of pubtypes.
29571 They should only be marked via their parent DW_TAG_enumeration_type die,
29572 not as roots in themselves. */
29573 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29574 if (pub->die->die_tag != DW_TAG_enumerator)
29575 prune_unused_types_mark (pub->die, 1);
29576 for (i = 0; base_types.iterate (i, &base_type); i++)
29577 prune_unused_types_mark (base_type, 1);
29578
29579 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29580 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29581 callees). */
29582 cgraph_node *cnode;
29583 FOR_EACH_FUNCTION (cnode)
29584 if (cnode->referred_to_p (false))
29585 {
29586 dw_die_ref die = lookup_decl_die (cnode->decl);
29587 if (die == NULL || die->die_mark)
29588 continue;
29589 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29590 if (e->caller != cnode
29591 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29592 {
29593 prune_unused_types_mark (die, 1);
29594 break;
29595 }
29596 }
29597
29598 if (debug_str_hash)
29599 debug_str_hash->empty ();
29600 if (skeleton_debug_str_hash)
29601 skeleton_debug_str_hash->empty ();
29602 prune_unused_types_prune (comp_unit_die ());
29603 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29604 {
29605 node = *pnode;
29606 if (!node->die->die_mark)
29607 *pnode = node->next;
29608 else
29609 {
29610 prune_unused_types_prune (node->die);
29611 pnode = &node->next;
29612 }
29613 }
29614 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29615 prune_unused_types_prune (ctnode->root_die);
29616
29617 /* Leave the marks clear. */
29618 prune_unmark_dies (comp_unit_die ());
29619 for (node = limbo_die_list; node; node = node->next)
29620 prune_unmark_dies (node->die);
29621 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29622 prune_unmark_dies (ctnode->root_die);
29623 }
29624
29625 /* Helpers to manipulate hash table of comdat type units. */
29626
29627 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29628 {
29629 static inline hashval_t hash (const comdat_type_node *);
29630 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29631 };
29632
29633 inline hashval_t
29634 comdat_type_hasher::hash (const comdat_type_node *type_node)
29635 {
29636 hashval_t h;
29637 memcpy (&h, type_node->signature, sizeof (h));
29638 return h;
29639 }
29640
29641 inline bool
29642 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29643 const comdat_type_node *type_node_2)
29644 {
29645 return (! memcmp (type_node_1->signature, type_node_2->signature,
29646 DWARF_TYPE_SIGNATURE_SIZE));
29647 }
29648
29649 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29650 to the location it would have been added, should we know its
29651 DECL_ASSEMBLER_NAME when we added other attributes. This will
29652 probably improve compactness of debug info, removing equivalent
29653 abbrevs, and hide any differences caused by deferring the
29654 computation of the assembler name, triggered by e.g. PCH. */
29655
29656 static inline void
29657 move_linkage_attr (dw_die_ref die)
29658 {
29659 unsigned ix = vec_safe_length (die->die_attr);
29660 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29661
29662 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29663 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29664
29665 while (--ix > 0)
29666 {
29667 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29668
29669 if (prev->dw_attr == DW_AT_decl_line
29670 || prev->dw_attr == DW_AT_decl_column
29671 || prev->dw_attr == DW_AT_name)
29672 break;
29673 }
29674
29675 if (ix != vec_safe_length (die->die_attr) - 1)
29676 {
29677 die->die_attr->pop ();
29678 die->die_attr->quick_insert (ix, linkage);
29679 }
29680 }
29681
29682 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29683 referenced from typed stack ops and count how often they are used. */
29684
29685 static void
29686 mark_base_types (dw_loc_descr_ref loc)
29687 {
29688 dw_die_ref base_type = NULL;
29689
29690 for (; loc; loc = loc->dw_loc_next)
29691 {
29692 switch (loc->dw_loc_opc)
29693 {
29694 case DW_OP_regval_type:
29695 case DW_OP_deref_type:
29696 case DW_OP_GNU_regval_type:
29697 case DW_OP_GNU_deref_type:
29698 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29699 break;
29700 case DW_OP_convert:
29701 case DW_OP_reinterpret:
29702 case DW_OP_GNU_convert:
29703 case DW_OP_GNU_reinterpret:
29704 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29705 continue;
29706 /* FALLTHRU */
29707 case DW_OP_const_type:
29708 case DW_OP_GNU_const_type:
29709 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29710 break;
29711 case DW_OP_entry_value:
29712 case DW_OP_GNU_entry_value:
29713 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29714 continue;
29715 default:
29716 continue;
29717 }
29718 gcc_assert (base_type->die_parent == comp_unit_die ());
29719 if (base_type->die_mark)
29720 base_type->die_mark++;
29721 else
29722 {
29723 base_types.safe_push (base_type);
29724 base_type->die_mark = 1;
29725 }
29726 }
29727 }
29728
29729 /* Comparison function for sorting marked base types. */
29730
29731 static int
29732 base_type_cmp (const void *x, const void *y)
29733 {
29734 dw_die_ref dx = *(const dw_die_ref *) x;
29735 dw_die_ref dy = *(const dw_die_ref *) y;
29736 unsigned int byte_size1, byte_size2;
29737 unsigned int encoding1, encoding2;
29738 unsigned int align1, align2;
29739 if (dx->die_mark > dy->die_mark)
29740 return -1;
29741 if (dx->die_mark < dy->die_mark)
29742 return 1;
29743 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29744 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29745 if (byte_size1 < byte_size2)
29746 return 1;
29747 if (byte_size1 > byte_size2)
29748 return -1;
29749 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29750 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29751 if (encoding1 < encoding2)
29752 return 1;
29753 if (encoding1 > encoding2)
29754 return -1;
29755 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29756 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29757 if (align1 < align2)
29758 return 1;
29759 if (align1 > align2)
29760 return -1;
29761 return 0;
29762 }
29763
29764 /* Move base types marked by mark_base_types as early as possible
29765 in the CU, sorted by decreasing usage count both to make the
29766 uleb128 references as small as possible and to make sure they
29767 will have die_offset already computed by calc_die_sizes when
29768 sizes of typed stack loc ops is computed. */
29769
29770 static void
29771 move_marked_base_types (void)
29772 {
29773 unsigned int i;
29774 dw_die_ref base_type, die, c;
29775
29776 if (base_types.is_empty ())
29777 return;
29778
29779 /* Sort by decreasing usage count, they will be added again in that
29780 order later on. */
29781 base_types.qsort (base_type_cmp);
29782 die = comp_unit_die ();
29783 c = die->die_child;
29784 do
29785 {
29786 dw_die_ref prev = c;
29787 c = c->die_sib;
29788 while (c->die_mark)
29789 {
29790 remove_child_with_prev (c, prev);
29791 /* As base types got marked, there must be at least
29792 one node other than DW_TAG_base_type. */
29793 gcc_assert (die->die_child != NULL);
29794 c = prev->die_sib;
29795 }
29796 }
29797 while (c != die->die_child);
29798 gcc_assert (die->die_child);
29799 c = die->die_child;
29800 for (i = 0; base_types.iterate (i, &base_type); i++)
29801 {
29802 base_type->die_mark = 0;
29803 base_type->die_sib = c->die_sib;
29804 c->die_sib = base_type;
29805 c = base_type;
29806 }
29807 }
29808
29809 /* Helper function for resolve_addr, attempt to resolve
29810 one CONST_STRING, return true if successful. Similarly verify that
29811 SYMBOL_REFs refer to variables emitted in the current CU. */
29812
29813 static bool
29814 resolve_one_addr (rtx *addr)
29815 {
29816 rtx rtl = *addr;
29817
29818 if (GET_CODE (rtl) == CONST_STRING)
29819 {
29820 size_t len = strlen (XSTR (rtl, 0)) + 1;
29821 tree t = build_string (len, XSTR (rtl, 0));
29822 tree tlen = size_int (len - 1);
29823 TREE_TYPE (t)
29824 = build_array_type (char_type_node, build_index_type (tlen));
29825 rtl = lookup_constant_def (t);
29826 if (!rtl || !MEM_P (rtl))
29827 return false;
29828 rtl = XEXP (rtl, 0);
29829 if (GET_CODE (rtl) == SYMBOL_REF
29830 && SYMBOL_REF_DECL (rtl)
29831 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29832 return false;
29833 vec_safe_push (used_rtx_array, rtl);
29834 *addr = rtl;
29835 return true;
29836 }
29837
29838 if (GET_CODE (rtl) == SYMBOL_REF
29839 && SYMBOL_REF_DECL (rtl))
29840 {
29841 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29842 {
29843 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29844 return false;
29845 }
29846 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29847 return false;
29848 }
29849
29850 if (GET_CODE (rtl) == CONST)
29851 {
29852 subrtx_ptr_iterator::array_type array;
29853 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29854 if (!resolve_one_addr (*iter))
29855 return false;
29856 }
29857
29858 return true;
29859 }
29860
29861 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29862 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29863 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29864
29865 static rtx
29866 string_cst_pool_decl (tree t)
29867 {
29868 rtx rtl = output_constant_def (t, 1);
29869 unsigned char *array;
29870 dw_loc_descr_ref l;
29871 tree decl;
29872 size_t len;
29873 dw_die_ref ref;
29874
29875 if (!rtl || !MEM_P (rtl))
29876 return NULL_RTX;
29877 rtl = XEXP (rtl, 0);
29878 if (GET_CODE (rtl) != SYMBOL_REF
29879 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29880 return NULL_RTX;
29881
29882 decl = SYMBOL_REF_DECL (rtl);
29883 if (!lookup_decl_die (decl))
29884 {
29885 len = TREE_STRING_LENGTH (t);
29886 vec_safe_push (used_rtx_array, rtl);
29887 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29888 array = ggc_vec_alloc<unsigned char> (len);
29889 memcpy (array, TREE_STRING_POINTER (t), len);
29890 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29891 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29892 l->dw_loc_oprnd2.v.val_vec.length = len;
29893 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29894 l->dw_loc_oprnd2.v.val_vec.array = array;
29895 add_AT_loc (ref, DW_AT_location, l);
29896 equate_decl_number_to_die (decl, ref);
29897 }
29898 return rtl;
29899 }
29900
29901 /* Helper function of resolve_addr_in_expr. LOC is
29902 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29903 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29904 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29905 with DW_OP_implicit_pointer if possible
29906 and return true, if unsuccessful, return false. */
29907
29908 static bool
29909 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29910 {
29911 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29912 HOST_WIDE_INT offset = 0;
29913 dw_die_ref ref = NULL;
29914 tree decl;
29915
29916 if (GET_CODE (rtl) == CONST
29917 && GET_CODE (XEXP (rtl, 0)) == PLUS
29918 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29919 {
29920 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29921 rtl = XEXP (XEXP (rtl, 0), 0);
29922 }
29923 if (GET_CODE (rtl) == CONST_STRING)
29924 {
29925 size_t len = strlen (XSTR (rtl, 0)) + 1;
29926 tree t = build_string (len, XSTR (rtl, 0));
29927 tree tlen = size_int (len - 1);
29928
29929 TREE_TYPE (t)
29930 = build_array_type (char_type_node, build_index_type (tlen));
29931 rtl = string_cst_pool_decl (t);
29932 if (!rtl)
29933 return false;
29934 }
29935 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29936 {
29937 decl = SYMBOL_REF_DECL (rtl);
29938 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29939 {
29940 ref = lookup_decl_die (decl);
29941 if (ref && (get_AT (ref, DW_AT_location)
29942 || get_AT (ref, DW_AT_const_value)))
29943 {
29944 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29945 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29946 loc->dw_loc_oprnd1.val_entry = NULL;
29947 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29948 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29949 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29950 loc->dw_loc_oprnd2.v.val_int = offset;
29951 return true;
29952 }
29953 }
29954 }
29955 return false;
29956 }
29957
29958 /* Helper function for resolve_addr, handle one location
29959 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29960 the location list couldn't be resolved. */
29961
29962 static bool
29963 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29964 {
29965 dw_loc_descr_ref keep = NULL;
29966 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29967 switch (loc->dw_loc_opc)
29968 {
29969 case DW_OP_addr:
29970 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29971 {
29972 if ((prev == NULL
29973 || prev->dw_loc_opc == DW_OP_piece
29974 || prev->dw_loc_opc == DW_OP_bit_piece)
29975 && loc->dw_loc_next
29976 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29977 && (!dwarf_strict || dwarf_version >= 5)
29978 && optimize_one_addr_into_implicit_ptr (loc))
29979 break;
29980 return false;
29981 }
29982 break;
29983 case DW_OP_GNU_addr_index:
29984 case DW_OP_addrx:
29985 case DW_OP_GNU_const_index:
29986 case DW_OP_constx:
29987 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29988 || loc->dw_loc_opc == DW_OP_addrx)
29989 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29990 || loc->dw_loc_opc == DW_OP_constx)
29991 && loc->dtprel))
29992 {
29993 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29994 if (!resolve_one_addr (&rtl))
29995 return false;
29996 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29997 loc->dw_loc_oprnd1.val_entry
29998 = add_addr_table_entry (rtl, ate_kind_rtx);
29999 }
30000 break;
30001 case DW_OP_const4u:
30002 case DW_OP_const8u:
30003 if (loc->dtprel
30004 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
30005 return false;
30006 break;
30007 case DW_OP_plus_uconst:
30008 if (size_of_loc_descr (loc)
30009 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
30010 + 1
30011 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
30012 {
30013 dw_loc_descr_ref repl
30014 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
30015 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
30016 add_loc_descr (&repl, loc->dw_loc_next);
30017 *loc = *repl;
30018 }
30019 break;
30020 case DW_OP_implicit_value:
30021 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
30022 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
30023 return false;
30024 break;
30025 case DW_OP_implicit_pointer:
30026 case DW_OP_GNU_implicit_pointer:
30027 case DW_OP_GNU_parameter_ref:
30028 case DW_OP_GNU_variable_value:
30029 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30030 {
30031 dw_die_ref ref
30032 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
30033 if (ref == NULL)
30034 return false;
30035 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30036 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30037 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30038 }
30039 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
30040 {
30041 if (prev == NULL
30042 && loc->dw_loc_next == NULL
30043 && AT_class (a) == dw_val_class_loc)
30044 switch (a->dw_attr)
30045 {
30046 /* Following attributes allow both exprloc and reference,
30047 so if the whole expression is DW_OP_GNU_variable_value
30048 alone we could transform it into reference. */
30049 case DW_AT_byte_size:
30050 case DW_AT_bit_size:
30051 case DW_AT_lower_bound:
30052 case DW_AT_upper_bound:
30053 case DW_AT_bit_stride:
30054 case DW_AT_count:
30055 case DW_AT_allocated:
30056 case DW_AT_associated:
30057 case DW_AT_byte_stride:
30058 a->dw_attr_val.val_class = dw_val_class_die_ref;
30059 a->dw_attr_val.val_entry = NULL;
30060 a->dw_attr_val.v.val_die_ref.die
30061 = loc->dw_loc_oprnd1.v.val_die_ref.die;
30062 a->dw_attr_val.v.val_die_ref.external = 0;
30063 return true;
30064 default:
30065 break;
30066 }
30067 if (dwarf_strict)
30068 return false;
30069 }
30070 break;
30071 case DW_OP_const_type:
30072 case DW_OP_regval_type:
30073 case DW_OP_deref_type:
30074 case DW_OP_convert:
30075 case DW_OP_reinterpret:
30076 case DW_OP_GNU_const_type:
30077 case DW_OP_GNU_regval_type:
30078 case DW_OP_GNU_deref_type:
30079 case DW_OP_GNU_convert:
30080 case DW_OP_GNU_reinterpret:
30081 while (loc->dw_loc_next
30082 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
30083 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
30084 {
30085 dw_die_ref base1, base2;
30086 unsigned enc1, enc2, size1, size2;
30087 if (loc->dw_loc_opc == DW_OP_regval_type
30088 || loc->dw_loc_opc == DW_OP_deref_type
30089 || loc->dw_loc_opc == DW_OP_GNU_regval_type
30090 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
30091 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
30092 else if (loc->dw_loc_oprnd1.val_class
30093 == dw_val_class_unsigned_const)
30094 break;
30095 else
30096 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
30097 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
30098 == dw_val_class_unsigned_const)
30099 break;
30100 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
30101 gcc_assert (base1->die_tag == DW_TAG_base_type
30102 && base2->die_tag == DW_TAG_base_type);
30103 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
30104 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
30105 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
30106 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
30107 if (size1 == size2
30108 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
30109 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
30110 && loc != keep)
30111 || enc1 == enc2))
30112 {
30113 /* Optimize away next DW_OP_convert after
30114 adjusting LOC's base type die reference. */
30115 if (loc->dw_loc_opc == DW_OP_regval_type
30116 || loc->dw_loc_opc == DW_OP_deref_type
30117 || loc->dw_loc_opc == DW_OP_GNU_regval_type
30118 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
30119 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
30120 else
30121 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
30122 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
30123 continue;
30124 }
30125 /* Don't change integer DW_OP_convert after e.g. floating
30126 point typed stack entry. */
30127 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
30128 keep = loc->dw_loc_next;
30129 break;
30130 }
30131 break;
30132 default:
30133 break;
30134 }
30135 return true;
30136 }
30137
30138 /* Helper function of resolve_addr. DIE had DW_AT_location of
30139 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
30140 and DW_OP_addr couldn't be resolved. resolve_addr has already
30141 removed the DW_AT_location attribute. This function attempts to
30142 add a new DW_AT_location attribute with DW_OP_implicit_pointer
30143 to it or DW_AT_const_value attribute, if possible. */
30144
30145 static void
30146 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
30147 {
30148 if (!VAR_P (decl)
30149 || lookup_decl_die (decl) != die
30150 || DECL_EXTERNAL (decl)
30151 || !TREE_STATIC (decl)
30152 || DECL_INITIAL (decl) == NULL_TREE
30153 || DECL_P (DECL_INITIAL (decl))
30154 || get_AT (die, DW_AT_const_value))
30155 return;
30156
30157 tree init = DECL_INITIAL (decl);
30158 HOST_WIDE_INT offset = 0;
30159 /* For variables that have been optimized away and thus
30160 don't have a memory location, see if we can emit
30161 DW_AT_const_value instead. */
30162 if (tree_add_const_value_attribute (die, init))
30163 return;
30164 if (dwarf_strict && dwarf_version < 5)
30165 return;
30166 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
30167 and ADDR_EXPR refers to a decl that has DW_AT_location or
30168 DW_AT_const_value (but isn't addressable, otherwise
30169 resolving the original DW_OP_addr wouldn't fail), see if
30170 we can add DW_OP_implicit_pointer. */
30171 STRIP_NOPS (init);
30172 if (TREE_CODE (init) == POINTER_PLUS_EXPR
30173 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
30174 {
30175 offset = tree_to_shwi (TREE_OPERAND (init, 1));
30176 init = TREE_OPERAND (init, 0);
30177 STRIP_NOPS (init);
30178 }
30179 if (TREE_CODE (init) != ADDR_EXPR)
30180 return;
30181 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
30182 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
30183 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
30184 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
30185 && TREE_OPERAND (init, 0) != decl))
30186 {
30187 dw_die_ref ref;
30188 dw_loc_descr_ref l;
30189
30190 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
30191 {
30192 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
30193 if (!rtl)
30194 return;
30195 decl = SYMBOL_REF_DECL (rtl);
30196 }
30197 else
30198 decl = TREE_OPERAND (init, 0);
30199 ref = lookup_decl_die (decl);
30200 if (ref == NULL
30201 || (!get_AT (ref, DW_AT_location)
30202 && !get_AT (ref, DW_AT_const_value)))
30203 return;
30204 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
30205 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30206 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30207 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30208 add_AT_loc (die, DW_AT_location, l);
30209 }
30210 }
30211
30212 /* Return NULL if l is a DWARF expression, or first op that is not
30213 valid DWARF expression. */
30214
30215 static dw_loc_descr_ref
30216 non_dwarf_expression (dw_loc_descr_ref l)
30217 {
30218 while (l)
30219 {
30220 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30221 return l;
30222 switch (l->dw_loc_opc)
30223 {
30224 case DW_OP_regx:
30225 case DW_OP_implicit_value:
30226 case DW_OP_stack_value:
30227 case DW_OP_implicit_pointer:
30228 case DW_OP_GNU_implicit_pointer:
30229 case DW_OP_GNU_parameter_ref:
30230 case DW_OP_piece:
30231 case DW_OP_bit_piece:
30232 return l;
30233 default:
30234 break;
30235 }
30236 l = l->dw_loc_next;
30237 }
30238 return NULL;
30239 }
30240
30241 /* Return adjusted copy of EXPR:
30242 If it is empty DWARF expression, return it.
30243 If it is valid non-empty DWARF expression,
30244 return copy of EXPR with DW_OP_deref appended to it.
30245 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30246 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30247 If it is DWARF expression followed by DW_OP_stack_value, return
30248 copy of the DWARF expression without anything appended.
30249 Otherwise, return NULL. */
30250
30251 static dw_loc_descr_ref
30252 copy_deref_exprloc (dw_loc_descr_ref expr)
30253 {
30254 dw_loc_descr_ref tail = NULL;
30255
30256 if (expr == NULL)
30257 return NULL;
30258
30259 dw_loc_descr_ref l = non_dwarf_expression (expr);
30260 if (l && l->dw_loc_next)
30261 return NULL;
30262
30263 if (l)
30264 {
30265 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30266 tail = new_loc_descr ((enum dwarf_location_atom)
30267 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30268 0, 0);
30269 else
30270 switch (l->dw_loc_opc)
30271 {
30272 case DW_OP_regx:
30273 tail = new_loc_descr (DW_OP_bregx,
30274 l->dw_loc_oprnd1.v.val_unsigned, 0);
30275 break;
30276 case DW_OP_stack_value:
30277 break;
30278 default:
30279 return NULL;
30280 }
30281 }
30282 else
30283 tail = new_loc_descr (DW_OP_deref, 0, 0);
30284
30285 dw_loc_descr_ref ret = NULL, *p = &ret;
30286 while (expr != l)
30287 {
30288 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30289 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30290 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30291 p = &(*p)->dw_loc_next;
30292 expr = expr->dw_loc_next;
30293 }
30294 *p = tail;
30295 return ret;
30296 }
30297
30298 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30299 reference to a variable or argument, adjust it if needed and return:
30300 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30301 attribute if present should be removed
30302 0 keep the attribute perhaps with minor modifications, no need to rescan
30303 1 if the attribute has been successfully adjusted. */
30304
30305 static int
30306 optimize_string_length (dw_attr_node *a)
30307 {
30308 dw_loc_descr_ref l = AT_loc (a), lv;
30309 dw_die_ref die;
30310 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30311 {
30312 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30313 die = lookup_decl_die (decl);
30314 if (die)
30315 {
30316 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30317 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30318 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30319 }
30320 else
30321 return -1;
30322 }
30323 else
30324 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30325
30326 /* DWARF5 allows reference class, so we can then reference the DIE.
30327 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30328 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30329 {
30330 a->dw_attr_val.val_class = dw_val_class_die_ref;
30331 a->dw_attr_val.val_entry = NULL;
30332 a->dw_attr_val.v.val_die_ref.die = die;
30333 a->dw_attr_val.v.val_die_ref.external = 0;
30334 return 0;
30335 }
30336
30337 dw_attr_node *av = get_AT (die, DW_AT_location);
30338 dw_loc_list_ref d;
30339 bool non_dwarf_expr = false;
30340
30341 if (av == NULL)
30342 return dwarf_strict ? -1 : 0;
30343 switch (AT_class (av))
30344 {
30345 case dw_val_class_loc_list:
30346 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30347 if (d->expr && non_dwarf_expression (d->expr))
30348 non_dwarf_expr = true;
30349 break;
30350 case dw_val_class_view_list:
30351 gcc_unreachable ();
30352 case dw_val_class_loc:
30353 lv = AT_loc (av);
30354 if (lv == NULL)
30355 return dwarf_strict ? -1 : 0;
30356 if (non_dwarf_expression (lv))
30357 non_dwarf_expr = true;
30358 break;
30359 default:
30360 return dwarf_strict ? -1 : 0;
30361 }
30362
30363 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30364 into DW_OP_call4 or DW_OP_GNU_variable_value into
30365 DW_OP_call4 DW_OP_deref, do so. */
30366 if (!non_dwarf_expr
30367 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30368 {
30369 l->dw_loc_opc = DW_OP_call4;
30370 if (l->dw_loc_next)
30371 l->dw_loc_next = NULL;
30372 else
30373 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30374 return 0;
30375 }
30376
30377 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30378 copy over the DW_AT_location attribute from die to a. */
30379 if (l->dw_loc_next != NULL)
30380 {
30381 a->dw_attr_val = av->dw_attr_val;
30382 return 1;
30383 }
30384
30385 dw_loc_list_ref list, *p;
30386 switch (AT_class (av))
30387 {
30388 case dw_val_class_loc_list:
30389 p = &list;
30390 list = NULL;
30391 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30392 {
30393 lv = copy_deref_exprloc (d->expr);
30394 if (lv)
30395 {
30396 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30397 p = &(*p)->dw_loc_next;
30398 }
30399 else if (!dwarf_strict && d->expr)
30400 return 0;
30401 }
30402 if (list == NULL)
30403 return dwarf_strict ? -1 : 0;
30404 a->dw_attr_val.val_class = dw_val_class_loc_list;
30405 gen_llsym (list);
30406 *AT_loc_list_ptr (a) = list;
30407 return 1;
30408 case dw_val_class_loc:
30409 lv = copy_deref_exprloc (AT_loc (av));
30410 if (lv == NULL)
30411 return dwarf_strict ? -1 : 0;
30412 a->dw_attr_val.v.val_loc = lv;
30413 return 1;
30414 default:
30415 gcc_unreachable ();
30416 }
30417 }
30418
30419 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30420 an address in .rodata section if the string literal is emitted there,
30421 or remove the containing location list or replace DW_AT_const_value
30422 with DW_AT_location and empty location expression, if it isn't found
30423 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30424 to something that has been emitted in the current CU. */
30425
30426 static void
30427 resolve_addr (dw_die_ref die)
30428 {
30429 dw_die_ref c;
30430 dw_attr_node *a;
30431 dw_loc_list_ref *curr, *start, loc;
30432 unsigned ix;
30433 bool remove_AT_byte_size = false;
30434
30435 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30436 switch (AT_class (a))
30437 {
30438 case dw_val_class_loc_list:
30439 start = curr = AT_loc_list_ptr (a);
30440 loc = *curr;
30441 gcc_assert (loc);
30442 /* The same list can be referenced more than once. See if we have
30443 already recorded the result from a previous pass. */
30444 if (loc->replaced)
30445 *curr = loc->dw_loc_next;
30446 else if (!loc->resolved_addr)
30447 {
30448 /* As things stand, we do not expect or allow one die to
30449 reference a suffix of another die's location list chain.
30450 References must be identical or completely separate.
30451 There is therefore no need to cache the result of this
30452 pass on any list other than the first; doing so
30453 would lead to unnecessary writes. */
30454 while (*curr)
30455 {
30456 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30457 if (!resolve_addr_in_expr (a, (*curr)->expr))
30458 {
30459 dw_loc_list_ref next = (*curr)->dw_loc_next;
30460 dw_loc_descr_ref l = (*curr)->expr;
30461
30462 if (next && (*curr)->ll_symbol)
30463 {
30464 gcc_assert (!next->ll_symbol);
30465 next->ll_symbol = (*curr)->ll_symbol;
30466 next->vl_symbol = (*curr)->vl_symbol;
30467 }
30468 if (dwarf_split_debug_info)
30469 remove_loc_list_addr_table_entries (l);
30470 *curr = next;
30471 }
30472 else
30473 {
30474 mark_base_types ((*curr)->expr);
30475 curr = &(*curr)->dw_loc_next;
30476 }
30477 }
30478 if (loc == *start)
30479 loc->resolved_addr = 1;
30480 else
30481 {
30482 loc->replaced = 1;
30483 loc->dw_loc_next = *start;
30484 }
30485 }
30486 if (!*start)
30487 {
30488 remove_AT (die, a->dw_attr);
30489 ix--;
30490 }
30491 break;
30492 case dw_val_class_view_list:
30493 {
30494 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30495 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30496 dw_val_node *llnode
30497 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30498 /* If we no longer have a loclist, or it no longer needs
30499 views, drop this attribute. */
30500 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30501 {
30502 remove_AT (die, a->dw_attr);
30503 ix--;
30504 }
30505 break;
30506 }
30507 case dw_val_class_loc:
30508 {
30509 dw_loc_descr_ref l = AT_loc (a);
30510 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30511 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30512 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30513 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30514 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30515 with DW_FORM_ref referencing the same DIE as
30516 DW_OP_GNU_variable_value used to reference. */
30517 if (a->dw_attr == DW_AT_string_length
30518 && l
30519 && l->dw_loc_opc == DW_OP_GNU_variable_value
30520 && (l->dw_loc_next == NULL
30521 || (l->dw_loc_next->dw_loc_next == NULL
30522 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30523 {
30524 switch (optimize_string_length (a))
30525 {
30526 case -1:
30527 remove_AT (die, a->dw_attr);
30528 ix--;
30529 /* If we drop DW_AT_string_length, we need to drop also
30530 DW_AT_{string_length_,}byte_size. */
30531 remove_AT_byte_size = true;
30532 continue;
30533 default:
30534 break;
30535 case 1:
30536 /* Even if we keep the optimized DW_AT_string_length,
30537 it might have changed AT_class, so process it again. */
30538 ix--;
30539 continue;
30540 }
30541 }
30542 /* For -gdwarf-2 don't attempt to optimize
30543 DW_AT_data_member_location containing
30544 DW_OP_plus_uconst - older consumers might
30545 rely on it being that op instead of a more complex,
30546 but shorter, location description. */
30547 if ((dwarf_version > 2
30548 || a->dw_attr != DW_AT_data_member_location
30549 || l == NULL
30550 || l->dw_loc_opc != DW_OP_plus_uconst
30551 || l->dw_loc_next != NULL)
30552 && !resolve_addr_in_expr (a, l))
30553 {
30554 if (dwarf_split_debug_info)
30555 remove_loc_list_addr_table_entries (l);
30556 if (l != NULL
30557 && l->dw_loc_next == NULL
30558 && l->dw_loc_opc == DW_OP_addr
30559 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30560 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30561 && a->dw_attr == DW_AT_location)
30562 {
30563 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30564 remove_AT (die, a->dw_attr);
30565 ix--;
30566 optimize_location_into_implicit_ptr (die, decl);
30567 break;
30568 }
30569 if (a->dw_attr == DW_AT_string_length)
30570 /* If we drop DW_AT_string_length, we need to drop also
30571 DW_AT_{string_length_,}byte_size. */
30572 remove_AT_byte_size = true;
30573 remove_AT (die, a->dw_attr);
30574 ix--;
30575 }
30576 else
30577 mark_base_types (l);
30578 }
30579 break;
30580 case dw_val_class_addr:
30581 if (a->dw_attr == DW_AT_const_value
30582 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30583 {
30584 if (AT_index (a) != NOT_INDEXED)
30585 remove_addr_table_entry (a->dw_attr_val.val_entry);
30586 remove_AT (die, a->dw_attr);
30587 ix--;
30588 }
30589 if ((die->die_tag == DW_TAG_call_site
30590 && a->dw_attr == DW_AT_call_origin)
30591 || (die->die_tag == DW_TAG_GNU_call_site
30592 && a->dw_attr == DW_AT_abstract_origin))
30593 {
30594 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30595 dw_die_ref tdie = lookup_decl_die (tdecl);
30596 dw_die_ref cdie;
30597 if (tdie == NULL
30598 && DECL_EXTERNAL (tdecl)
30599 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30600 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30601 {
30602 dw_die_ref pdie = cdie;
30603 /* Make sure we don't add these DIEs into type units.
30604 We could emit skeleton DIEs for context (namespaces,
30605 outer structs/classes) and a skeleton DIE for the
30606 innermost context with DW_AT_signature pointing to the
30607 type unit. See PR78835. */
30608 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30609 pdie = pdie->die_parent;
30610 if (pdie == NULL)
30611 {
30612 /* Creating a full DIE for tdecl is overly expensive and
30613 at this point even wrong when in the LTO phase
30614 as it can end up generating new type DIEs we didn't
30615 output and thus optimize_external_refs will crash. */
30616 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30617 add_AT_flag (tdie, DW_AT_external, 1);
30618 add_AT_flag (tdie, DW_AT_declaration, 1);
30619 add_linkage_attr (tdie, tdecl);
30620 add_name_and_src_coords_attributes (tdie, tdecl, true);
30621 equate_decl_number_to_die (tdecl, tdie);
30622 }
30623 }
30624 if (tdie)
30625 {
30626 a->dw_attr_val.val_class = dw_val_class_die_ref;
30627 a->dw_attr_val.v.val_die_ref.die = tdie;
30628 a->dw_attr_val.v.val_die_ref.external = 0;
30629 }
30630 else
30631 {
30632 if (AT_index (a) != NOT_INDEXED)
30633 remove_addr_table_entry (a->dw_attr_val.val_entry);
30634 remove_AT (die, a->dw_attr);
30635 ix--;
30636 }
30637 }
30638 break;
30639 default:
30640 break;
30641 }
30642
30643 if (remove_AT_byte_size)
30644 remove_AT (die, dwarf_version >= 5
30645 ? DW_AT_string_length_byte_size
30646 : DW_AT_byte_size);
30647
30648 FOR_EACH_CHILD (die, c, resolve_addr (c));
30649 }
30650 \f
30651 /* Helper routines for optimize_location_lists.
30652 This pass tries to share identical local lists in .debug_loc
30653 section. */
30654
30655 /* Iteratively hash operands of LOC opcode into HSTATE. */
30656
30657 static void
30658 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30659 {
30660 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30661 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30662
30663 switch (loc->dw_loc_opc)
30664 {
30665 case DW_OP_const4u:
30666 case DW_OP_const8u:
30667 if (loc->dtprel)
30668 goto hash_addr;
30669 /* FALLTHRU */
30670 case DW_OP_const1u:
30671 case DW_OP_const1s:
30672 case DW_OP_const2u:
30673 case DW_OP_const2s:
30674 case DW_OP_const4s:
30675 case DW_OP_const8s:
30676 case DW_OP_constu:
30677 case DW_OP_consts:
30678 case DW_OP_pick:
30679 case DW_OP_plus_uconst:
30680 case DW_OP_breg0:
30681 case DW_OP_breg1:
30682 case DW_OP_breg2:
30683 case DW_OP_breg3:
30684 case DW_OP_breg4:
30685 case DW_OP_breg5:
30686 case DW_OP_breg6:
30687 case DW_OP_breg7:
30688 case DW_OP_breg8:
30689 case DW_OP_breg9:
30690 case DW_OP_breg10:
30691 case DW_OP_breg11:
30692 case DW_OP_breg12:
30693 case DW_OP_breg13:
30694 case DW_OP_breg14:
30695 case DW_OP_breg15:
30696 case DW_OP_breg16:
30697 case DW_OP_breg17:
30698 case DW_OP_breg18:
30699 case DW_OP_breg19:
30700 case DW_OP_breg20:
30701 case DW_OP_breg21:
30702 case DW_OP_breg22:
30703 case DW_OP_breg23:
30704 case DW_OP_breg24:
30705 case DW_OP_breg25:
30706 case DW_OP_breg26:
30707 case DW_OP_breg27:
30708 case DW_OP_breg28:
30709 case DW_OP_breg29:
30710 case DW_OP_breg30:
30711 case DW_OP_breg31:
30712 case DW_OP_regx:
30713 case DW_OP_fbreg:
30714 case DW_OP_piece:
30715 case DW_OP_deref_size:
30716 case DW_OP_xderef_size:
30717 hstate.add_object (val1->v.val_int);
30718 break;
30719 case DW_OP_skip:
30720 case DW_OP_bra:
30721 {
30722 int offset;
30723
30724 gcc_assert (val1->val_class == dw_val_class_loc);
30725 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30726 hstate.add_object (offset);
30727 }
30728 break;
30729 case DW_OP_implicit_value:
30730 hstate.add_object (val1->v.val_unsigned);
30731 switch (val2->val_class)
30732 {
30733 case dw_val_class_const:
30734 hstate.add_object (val2->v.val_int);
30735 break;
30736 case dw_val_class_vec:
30737 {
30738 unsigned int elt_size = val2->v.val_vec.elt_size;
30739 unsigned int len = val2->v.val_vec.length;
30740
30741 hstate.add_int (elt_size);
30742 hstate.add_int (len);
30743 hstate.add (val2->v.val_vec.array, len * elt_size);
30744 }
30745 break;
30746 case dw_val_class_const_double:
30747 hstate.add_object (val2->v.val_double.low);
30748 hstate.add_object (val2->v.val_double.high);
30749 break;
30750 case dw_val_class_wide_int:
30751 hstate.add (val2->v.val_wide->get_val (),
30752 get_full_len (*val2->v.val_wide)
30753 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30754 break;
30755 case dw_val_class_addr:
30756 inchash::add_rtx (val2->v.val_addr, hstate);
30757 break;
30758 default:
30759 gcc_unreachable ();
30760 }
30761 break;
30762 case DW_OP_bregx:
30763 case DW_OP_bit_piece:
30764 hstate.add_object (val1->v.val_int);
30765 hstate.add_object (val2->v.val_int);
30766 break;
30767 case DW_OP_addr:
30768 hash_addr:
30769 if (loc->dtprel)
30770 {
30771 unsigned char dtprel = 0xd1;
30772 hstate.add_object (dtprel);
30773 }
30774 inchash::add_rtx (val1->v.val_addr, hstate);
30775 break;
30776 case DW_OP_GNU_addr_index:
30777 case DW_OP_addrx:
30778 case DW_OP_GNU_const_index:
30779 case DW_OP_constx:
30780 {
30781 if (loc->dtprel)
30782 {
30783 unsigned char dtprel = 0xd1;
30784 hstate.add_object (dtprel);
30785 }
30786 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30787 }
30788 break;
30789 case DW_OP_implicit_pointer:
30790 case DW_OP_GNU_implicit_pointer:
30791 hstate.add_int (val2->v.val_int);
30792 break;
30793 case DW_OP_entry_value:
30794 case DW_OP_GNU_entry_value:
30795 hstate.add_object (val1->v.val_loc);
30796 break;
30797 case DW_OP_regval_type:
30798 case DW_OP_deref_type:
30799 case DW_OP_GNU_regval_type:
30800 case DW_OP_GNU_deref_type:
30801 {
30802 unsigned int byte_size
30803 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30804 unsigned int encoding
30805 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30806 hstate.add_object (val1->v.val_int);
30807 hstate.add_object (byte_size);
30808 hstate.add_object (encoding);
30809 }
30810 break;
30811 case DW_OP_convert:
30812 case DW_OP_reinterpret:
30813 case DW_OP_GNU_convert:
30814 case DW_OP_GNU_reinterpret:
30815 if (val1->val_class == dw_val_class_unsigned_const)
30816 {
30817 hstate.add_object (val1->v.val_unsigned);
30818 break;
30819 }
30820 /* FALLTHRU */
30821 case DW_OP_const_type:
30822 case DW_OP_GNU_const_type:
30823 {
30824 unsigned int byte_size
30825 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30826 unsigned int encoding
30827 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30828 hstate.add_object (byte_size);
30829 hstate.add_object (encoding);
30830 if (loc->dw_loc_opc != DW_OP_const_type
30831 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30832 break;
30833 hstate.add_object (val2->val_class);
30834 switch (val2->val_class)
30835 {
30836 case dw_val_class_const:
30837 hstate.add_object (val2->v.val_int);
30838 break;
30839 case dw_val_class_vec:
30840 {
30841 unsigned int elt_size = val2->v.val_vec.elt_size;
30842 unsigned int len = val2->v.val_vec.length;
30843
30844 hstate.add_object (elt_size);
30845 hstate.add_object (len);
30846 hstate.add (val2->v.val_vec.array, len * elt_size);
30847 }
30848 break;
30849 case dw_val_class_const_double:
30850 hstate.add_object (val2->v.val_double.low);
30851 hstate.add_object (val2->v.val_double.high);
30852 break;
30853 case dw_val_class_wide_int:
30854 hstate.add (val2->v.val_wide->get_val (),
30855 get_full_len (*val2->v.val_wide)
30856 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30857 break;
30858 default:
30859 gcc_unreachable ();
30860 }
30861 }
30862 break;
30863
30864 default:
30865 /* Other codes have no operands. */
30866 break;
30867 }
30868 }
30869
30870 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30871
30872 static inline void
30873 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30874 {
30875 dw_loc_descr_ref l;
30876 bool sizes_computed = false;
30877 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30878 size_of_locs (loc);
30879
30880 for (l = loc; l != NULL; l = l->dw_loc_next)
30881 {
30882 enum dwarf_location_atom opc = l->dw_loc_opc;
30883 hstate.add_object (opc);
30884 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30885 {
30886 size_of_locs (loc);
30887 sizes_computed = true;
30888 }
30889 hash_loc_operands (l, hstate);
30890 }
30891 }
30892
30893 /* Compute hash of the whole location list LIST_HEAD. */
30894
30895 static inline void
30896 hash_loc_list (dw_loc_list_ref list_head)
30897 {
30898 dw_loc_list_ref curr = list_head;
30899 inchash::hash hstate;
30900
30901 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30902 {
30903 hstate.add (curr->begin, strlen (curr->begin) + 1);
30904 hstate.add (curr->end, strlen (curr->end) + 1);
30905 hstate.add_object (curr->vbegin);
30906 hstate.add_object (curr->vend);
30907 if (curr->section)
30908 hstate.add (curr->section, strlen (curr->section) + 1);
30909 hash_locs (curr->expr, hstate);
30910 }
30911 list_head->hash = hstate.end ();
30912 }
30913
30914 /* Return true if X and Y opcodes have the same operands. */
30915
30916 static inline bool
30917 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30918 {
30919 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30920 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30921 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30922 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30923
30924 switch (x->dw_loc_opc)
30925 {
30926 case DW_OP_const4u:
30927 case DW_OP_const8u:
30928 if (x->dtprel)
30929 goto hash_addr;
30930 /* FALLTHRU */
30931 case DW_OP_const1u:
30932 case DW_OP_const1s:
30933 case DW_OP_const2u:
30934 case DW_OP_const2s:
30935 case DW_OP_const4s:
30936 case DW_OP_const8s:
30937 case DW_OP_constu:
30938 case DW_OP_consts:
30939 case DW_OP_pick:
30940 case DW_OP_plus_uconst:
30941 case DW_OP_breg0:
30942 case DW_OP_breg1:
30943 case DW_OP_breg2:
30944 case DW_OP_breg3:
30945 case DW_OP_breg4:
30946 case DW_OP_breg5:
30947 case DW_OP_breg6:
30948 case DW_OP_breg7:
30949 case DW_OP_breg8:
30950 case DW_OP_breg9:
30951 case DW_OP_breg10:
30952 case DW_OP_breg11:
30953 case DW_OP_breg12:
30954 case DW_OP_breg13:
30955 case DW_OP_breg14:
30956 case DW_OP_breg15:
30957 case DW_OP_breg16:
30958 case DW_OP_breg17:
30959 case DW_OP_breg18:
30960 case DW_OP_breg19:
30961 case DW_OP_breg20:
30962 case DW_OP_breg21:
30963 case DW_OP_breg22:
30964 case DW_OP_breg23:
30965 case DW_OP_breg24:
30966 case DW_OP_breg25:
30967 case DW_OP_breg26:
30968 case DW_OP_breg27:
30969 case DW_OP_breg28:
30970 case DW_OP_breg29:
30971 case DW_OP_breg30:
30972 case DW_OP_breg31:
30973 case DW_OP_regx:
30974 case DW_OP_fbreg:
30975 case DW_OP_piece:
30976 case DW_OP_deref_size:
30977 case DW_OP_xderef_size:
30978 return valx1->v.val_int == valy1->v.val_int;
30979 case DW_OP_skip:
30980 case DW_OP_bra:
30981 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30982 can cause irrelevant differences in dw_loc_addr. */
30983 gcc_assert (valx1->val_class == dw_val_class_loc
30984 && valy1->val_class == dw_val_class_loc
30985 && (dwarf_split_debug_info
30986 || x->dw_loc_addr == y->dw_loc_addr));
30987 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30988 case DW_OP_implicit_value:
30989 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30990 || valx2->val_class != valy2->val_class)
30991 return false;
30992 switch (valx2->val_class)
30993 {
30994 case dw_val_class_const:
30995 return valx2->v.val_int == valy2->v.val_int;
30996 case dw_val_class_vec:
30997 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30998 && valx2->v.val_vec.length == valy2->v.val_vec.length
30999 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
31000 valx2->v.val_vec.elt_size
31001 * valx2->v.val_vec.length) == 0;
31002 case dw_val_class_const_double:
31003 return valx2->v.val_double.low == valy2->v.val_double.low
31004 && valx2->v.val_double.high == valy2->v.val_double.high;
31005 case dw_val_class_wide_int:
31006 return *valx2->v.val_wide == *valy2->v.val_wide;
31007 case dw_val_class_addr:
31008 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
31009 default:
31010 gcc_unreachable ();
31011 }
31012 case DW_OP_bregx:
31013 case DW_OP_bit_piece:
31014 return valx1->v.val_int == valy1->v.val_int
31015 && valx2->v.val_int == valy2->v.val_int;
31016 case DW_OP_addr:
31017 hash_addr:
31018 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
31019 case DW_OP_GNU_addr_index:
31020 case DW_OP_addrx:
31021 case DW_OP_GNU_const_index:
31022 case DW_OP_constx:
31023 {
31024 rtx ax1 = valx1->val_entry->addr.rtl;
31025 rtx ay1 = valy1->val_entry->addr.rtl;
31026 return rtx_equal_p (ax1, ay1);
31027 }
31028 case DW_OP_implicit_pointer:
31029 case DW_OP_GNU_implicit_pointer:
31030 return valx1->val_class == dw_val_class_die_ref
31031 && valx1->val_class == valy1->val_class
31032 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
31033 && valx2->v.val_int == valy2->v.val_int;
31034 case DW_OP_entry_value:
31035 case DW_OP_GNU_entry_value:
31036 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
31037 case DW_OP_const_type:
31038 case DW_OP_GNU_const_type:
31039 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
31040 || valx2->val_class != valy2->val_class)
31041 return false;
31042 switch (valx2->val_class)
31043 {
31044 case dw_val_class_const:
31045 return valx2->v.val_int == valy2->v.val_int;
31046 case dw_val_class_vec:
31047 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
31048 && valx2->v.val_vec.length == valy2->v.val_vec.length
31049 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
31050 valx2->v.val_vec.elt_size
31051 * valx2->v.val_vec.length) == 0;
31052 case dw_val_class_const_double:
31053 return valx2->v.val_double.low == valy2->v.val_double.low
31054 && valx2->v.val_double.high == valy2->v.val_double.high;
31055 case dw_val_class_wide_int:
31056 return *valx2->v.val_wide == *valy2->v.val_wide;
31057 default:
31058 gcc_unreachable ();
31059 }
31060 case DW_OP_regval_type:
31061 case DW_OP_deref_type:
31062 case DW_OP_GNU_regval_type:
31063 case DW_OP_GNU_deref_type:
31064 return valx1->v.val_int == valy1->v.val_int
31065 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
31066 case DW_OP_convert:
31067 case DW_OP_reinterpret:
31068 case DW_OP_GNU_convert:
31069 case DW_OP_GNU_reinterpret:
31070 if (valx1->val_class != valy1->val_class)
31071 return false;
31072 if (valx1->val_class == dw_val_class_unsigned_const)
31073 return valx1->v.val_unsigned == valy1->v.val_unsigned;
31074 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
31075 case DW_OP_GNU_parameter_ref:
31076 return valx1->val_class == dw_val_class_die_ref
31077 && valx1->val_class == valy1->val_class
31078 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
31079 default:
31080 /* Other codes have no operands. */
31081 return true;
31082 }
31083 }
31084
31085 /* Return true if DWARF location expressions X and Y are the same. */
31086
31087 static inline bool
31088 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
31089 {
31090 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
31091 if (x->dw_loc_opc != y->dw_loc_opc
31092 || x->dtprel != y->dtprel
31093 || !compare_loc_operands (x, y))
31094 break;
31095 return x == NULL && y == NULL;
31096 }
31097
31098 /* Hashtable helpers. */
31099
31100 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
31101 {
31102 static inline hashval_t hash (const dw_loc_list_struct *);
31103 static inline bool equal (const dw_loc_list_struct *,
31104 const dw_loc_list_struct *);
31105 };
31106
31107 /* Return precomputed hash of location list X. */
31108
31109 inline hashval_t
31110 loc_list_hasher::hash (const dw_loc_list_struct *x)
31111 {
31112 return x->hash;
31113 }
31114
31115 /* Return true if location lists A and B are the same. */
31116
31117 inline bool
31118 loc_list_hasher::equal (const dw_loc_list_struct *a,
31119 const dw_loc_list_struct *b)
31120 {
31121 if (a == b)
31122 return 1;
31123 if (a->hash != b->hash)
31124 return 0;
31125 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
31126 if (strcmp (a->begin, b->begin) != 0
31127 || strcmp (a->end, b->end) != 0
31128 || (a->section == NULL) != (b->section == NULL)
31129 || (a->section && strcmp (a->section, b->section) != 0)
31130 || a->vbegin != b->vbegin || a->vend != b->vend
31131 || !compare_locs (a->expr, b->expr))
31132 break;
31133 return a == NULL && b == NULL;
31134 }
31135
31136 typedef hash_table<loc_list_hasher> loc_list_hash_type;
31137
31138
31139 /* Recursively optimize location lists referenced from DIE
31140 children and share them whenever possible. */
31141
31142 static void
31143 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
31144 {
31145 dw_die_ref c;
31146 dw_attr_node *a;
31147 unsigned ix;
31148 dw_loc_list_struct **slot;
31149 bool drop_locviews = false;
31150 bool has_locviews = false;
31151
31152 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31153 if (AT_class (a) == dw_val_class_loc_list)
31154 {
31155 dw_loc_list_ref list = AT_loc_list (a);
31156 /* TODO: perform some optimizations here, before hashing
31157 it and storing into the hash table. */
31158 hash_loc_list (list);
31159 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
31160 if (*slot == NULL)
31161 {
31162 *slot = list;
31163 if (loc_list_has_views (list))
31164 gcc_assert (list->vl_symbol);
31165 else if (list->vl_symbol)
31166 {
31167 drop_locviews = true;
31168 list->vl_symbol = NULL;
31169 }
31170 }
31171 else
31172 {
31173 if (list->vl_symbol && !(*slot)->vl_symbol)
31174 drop_locviews = true;
31175 a->dw_attr_val.v.val_loc_list = *slot;
31176 }
31177 }
31178 else if (AT_class (a) == dw_val_class_view_list)
31179 {
31180 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
31181 has_locviews = true;
31182 }
31183
31184
31185 if (drop_locviews && has_locviews)
31186 remove_AT (die, DW_AT_GNU_locviews);
31187
31188 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
31189 }
31190
31191
31192 /* Recursively assign each location list a unique index into the debug_addr
31193 section. */
31194
31195 static void
31196 index_location_lists (dw_die_ref die)
31197 {
31198 dw_die_ref c;
31199 dw_attr_node *a;
31200 unsigned ix;
31201
31202 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31203 if (AT_class (a) == dw_val_class_loc_list)
31204 {
31205 dw_loc_list_ref list = AT_loc_list (a);
31206 dw_loc_list_ref curr;
31207 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31208 {
31209 /* Don't index an entry that has already been indexed
31210 or won't be output. Make sure skip_loc_list_entry doesn't
31211 call size_of_locs, because that might cause circular dependency,
31212 index_location_lists requiring address table indexes to be
31213 computed, but adding new indexes through add_addr_table_entry
31214 and address table index computation requiring no new additions
31215 to the hash table. In the rare case of DWARF[234] >= 64KB
31216 location expression, we'll just waste unused address table entry
31217 for it. */
31218 if (curr->begin_entry != NULL
31219 || skip_loc_list_entry (curr))
31220 continue;
31221
31222 curr->begin_entry
31223 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31224 }
31225 }
31226
31227 FOR_EACH_CHILD (die, c, index_location_lists (c));
31228 }
31229
31230 /* Optimize location lists referenced from DIE
31231 children and share them whenever possible. */
31232
31233 static void
31234 optimize_location_lists (dw_die_ref die)
31235 {
31236 loc_list_hash_type htab (500);
31237 optimize_location_lists_1 (die, &htab);
31238 }
31239 \f
31240 /* Traverse the limbo die list, and add parent/child links. The only
31241 dies without parents that should be here are concrete instances of
31242 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31243 For concrete instances, we can get the parent die from the abstract
31244 instance. */
31245
31246 static void
31247 flush_limbo_die_list (void)
31248 {
31249 limbo_die_node *node;
31250
31251 /* get_context_die calls force_decl_die, which can put new DIEs on the
31252 limbo list in LTO mode when nested functions are put in a different
31253 partition than that of their parent function. */
31254 while ((node = limbo_die_list))
31255 {
31256 dw_die_ref die = node->die;
31257 limbo_die_list = node->next;
31258
31259 if (die->die_parent == NULL)
31260 {
31261 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31262
31263 if (origin && origin->die_parent)
31264 add_child_die (origin->die_parent, die);
31265 else if (is_cu_die (die))
31266 ;
31267 else if (seen_error ())
31268 /* It's OK to be confused by errors in the input. */
31269 add_child_die (comp_unit_die (), die);
31270 else
31271 {
31272 /* In certain situations, the lexical block containing a
31273 nested function can be optimized away, which results
31274 in the nested function die being orphaned. Likewise
31275 with the return type of that nested function. Force
31276 this to be a child of the containing function.
31277
31278 It may happen that even the containing function got fully
31279 inlined and optimized out. In that case we are lost and
31280 assign the empty child. This should not be big issue as
31281 the function is likely unreachable too. */
31282 gcc_assert (node->created_for);
31283
31284 if (DECL_P (node->created_for))
31285 origin = get_context_die (DECL_CONTEXT (node->created_for));
31286 else if (TYPE_P (node->created_for))
31287 origin = scope_die_for (node->created_for, comp_unit_die ());
31288 else
31289 origin = comp_unit_die ();
31290
31291 add_child_die (origin, die);
31292 }
31293 }
31294 }
31295 }
31296
31297 /* Reset DIEs so we can output them again. */
31298
31299 static void
31300 reset_dies (dw_die_ref die)
31301 {
31302 dw_die_ref c;
31303
31304 /* Remove stuff we re-generate. */
31305 die->die_mark = 0;
31306 die->die_offset = 0;
31307 die->die_abbrev = 0;
31308 remove_AT (die, DW_AT_sibling);
31309
31310 FOR_EACH_CHILD (die, c, reset_dies (c));
31311 }
31312
31313 /* Output stuff that dwarf requires at the end of every file,
31314 and generate the DWARF-2 debugging info. */
31315
31316 static void
31317 dwarf2out_finish (const char *filename)
31318 {
31319 comdat_type_node *ctnode;
31320 dw_die_ref main_comp_unit_die;
31321 unsigned char checksum[16];
31322 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31323
31324 /* Flush out any latecomers to the limbo party. */
31325 flush_limbo_die_list ();
31326
31327 if (inline_entry_data_table)
31328 gcc_assert (inline_entry_data_table->is_empty ());
31329
31330 if (flag_checking)
31331 {
31332 verify_die (comp_unit_die ());
31333 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31334 verify_die (node->die);
31335 }
31336
31337 /* We shouldn't have any symbols with delayed asm names for
31338 DIEs generated after early finish. */
31339 gcc_assert (deferred_asm_name == NULL);
31340
31341 gen_remaining_tmpl_value_param_die_attribute ();
31342
31343 if (flag_generate_lto || flag_generate_offload)
31344 {
31345 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31346
31347 /* Prune stuff so that dwarf2out_finish runs successfully
31348 for the fat part of the object. */
31349 reset_dies (comp_unit_die ());
31350 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31351 reset_dies (node->die);
31352
31353 hash_table<comdat_type_hasher> comdat_type_table (100);
31354 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31355 {
31356 comdat_type_node **slot
31357 = comdat_type_table.find_slot (ctnode, INSERT);
31358
31359 /* Don't reset types twice. */
31360 if (*slot != HTAB_EMPTY_ENTRY)
31361 continue;
31362
31363 /* Remove the pointer to the line table. */
31364 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31365
31366 if (debug_info_level >= DINFO_LEVEL_TERSE)
31367 reset_dies (ctnode->root_die);
31368
31369 *slot = ctnode;
31370 }
31371
31372 /* Reset die CU symbol so we don't output it twice. */
31373 comp_unit_die ()->die_id.die_symbol = NULL;
31374
31375 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31376 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31377 if (have_macinfo)
31378 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31379
31380 /* Remove indirect string decisions. */
31381 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31382 if (debug_line_str_hash)
31383 {
31384 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31385 debug_line_str_hash = NULL;
31386 }
31387 }
31388
31389 #if ENABLE_ASSERT_CHECKING
31390 {
31391 dw_die_ref die = comp_unit_die (), c;
31392 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31393 }
31394 #endif
31395 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31396 resolve_addr (ctnode->root_die);
31397 resolve_addr (comp_unit_die ());
31398 move_marked_base_types ();
31399
31400 if (dump_file)
31401 {
31402 fprintf (dump_file, "DWARF for %s\n", filename);
31403 print_die (comp_unit_die (), dump_file);
31404 }
31405
31406 /* Initialize sections and labels used for actual assembler output. */
31407 unsigned generation = init_sections_and_labels (false);
31408
31409 /* Traverse the DIE's and add sibling attributes to those DIE's that
31410 have children. */
31411 add_sibling_attributes (comp_unit_die ());
31412 limbo_die_node *node;
31413 for (node = cu_die_list; node; node = node->next)
31414 add_sibling_attributes (node->die);
31415 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31416 add_sibling_attributes (ctnode->root_die);
31417
31418 /* When splitting DWARF info, we put some attributes in the
31419 skeleton compile_unit DIE that remains in the .o, while
31420 most attributes go in the DWO compile_unit_die. */
31421 if (dwarf_split_debug_info)
31422 {
31423 limbo_die_node *cu;
31424 main_comp_unit_die = gen_compile_unit_die (NULL);
31425 if (dwarf_version >= 5)
31426 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31427 cu = limbo_die_list;
31428 gcc_assert (cu->die == main_comp_unit_die);
31429 limbo_die_list = limbo_die_list->next;
31430 cu->next = cu_die_list;
31431 cu_die_list = cu;
31432 }
31433 else
31434 main_comp_unit_die = comp_unit_die ();
31435
31436 /* Output a terminator label for the .text section. */
31437 switch_to_section (text_section);
31438 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31439 if (cold_text_section)
31440 {
31441 switch_to_section (cold_text_section);
31442 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31443 }
31444
31445 /* We can only use the low/high_pc attributes if all of the code was
31446 in .text. */
31447 if (!have_multiple_function_sections
31448 || (dwarf_version < 3 && dwarf_strict))
31449 {
31450 /* Don't add if the CU has no associated code. */
31451 if (text_section_used)
31452 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31453 text_end_label, true);
31454 }
31455 else
31456 {
31457 unsigned fde_idx;
31458 dw_fde_ref fde;
31459 bool range_list_added = false;
31460
31461 if (text_section_used)
31462 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31463 text_end_label, &range_list_added, true);
31464 if (cold_text_section_used)
31465 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31466 cold_end_label, &range_list_added, true);
31467
31468 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31469 {
31470 if (DECL_IGNORED_P (fde->decl))
31471 continue;
31472 if (!fde->in_std_section)
31473 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31474 fde->dw_fde_end, &range_list_added,
31475 true);
31476 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31477 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31478 fde->dw_fde_second_end, &range_list_added,
31479 true);
31480 }
31481
31482 if (range_list_added)
31483 {
31484 /* We need to give .debug_loc and .debug_ranges an appropriate
31485 "base address". Use zero so that these addresses become
31486 absolute. Historically, we've emitted the unexpected
31487 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31488 Emit both to give time for other tools to adapt. */
31489 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31490 if (! dwarf_strict && dwarf_version < 4)
31491 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31492
31493 add_ranges (NULL);
31494 }
31495 }
31496
31497 /* AIX Assembler inserts the length, so adjust the reference to match the
31498 offset expected by debuggers. */
31499 strcpy (dl_section_ref, debug_line_section_label);
31500 if (XCOFF_DEBUGGING_INFO)
31501 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31502
31503 if (debug_info_level >= DINFO_LEVEL_TERSE)
31504 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31505 dl_section_ref);
31506
31507 if (have_macinfo)
31508 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31509 macinfo_section_label);
31510
31511 if (dwarf_split_debug_info)
31512 {
31513 if (have_location_lists)
31514 {
31515 /* Since we generate the loclists in the split DWARF .dwo
31516 file itself, we don't need to generate a loclists_base
31517 attribute for the split compile unit DIE. That attribute
31518 (and using relocatable sec_offset FORMs) isn't allowed
31519 for a split compile unit. Only if the .debug_loclists
31520 section was in the main file, would we need to generate a
31521 loclists_base attribute here (for the full or skeleton
31522 unit DIE). */
31523
31524 /* optimize_location_lists calculates the size of the lists,
31525 so index them first, and assign indices to the entries.
31526 Although optimize_location_lists will remove entries from
31527 the table, it only does so for duplicates, and therefore
31528 only reduces ref_counts to 1. */
31529 index_location_lists (comp_unit_die ());
31530 }
31531
31532 if (addr_index_table != NULL)
31533 {
31534 unsigned int index = 0;
31535 addr_index_table
31536 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31537 (&index);
31538 }
31539 }
31540
31541 loc_list_idx = 0;
31542 if (have_location_lists)
31543 {
31544 optimize_location_lists (comp_unit_die ());
31545 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31546 if (dwarf_version >= 5 && dwarf_split_debug_info)
31547 assign_location_list_indexes (comp_unit_die ());
31548 }
31549
31550 save_macinfo_strings ();
31551
31552 if (dwarf_split_debug_info)
31553 {
31554 unsigned int index = 0;
31555
31556 /* Add attributes common to skeleton compile_units and
31557 type_units. Because these attributes include strings, it
31558 must be done before freezing the string table. Top-level
31559 skeleton die attrs are added when the skeleton type unit is
31560 created, so ensure it is created by this point. */
31561 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31562 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31563 }
31564
31565 /* Output all of the compilation units. We put the main one last so that
31566 the offsets are available to output_pubnames. */
31567 for (node = cu_die_list; node; node = node->next)
31568 output_comp_unit (node->die, 0, NULL);
31569
31570 hash_table<comdat_type_hasher> comdat_type_table (100);
31571 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31572 {
31573 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31574
31575 /* Don't output duplicate types. */
31576 if (*slot != HTAB_EMPTY_ENTRY)
31577 continue;
31578
31579 /* Add a pointer to the line table for the main compilation unit
31580 so that the debugger can make sense of DW_AT_decl_file
31581 attributes. */
31582 if (debug_info_level >= DINFO_LEVEL_TERSE)
31583 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31584 (!dwarf_split_debug_info
31585 ? dl_section_ref
31586 : debug_skeleton_line_section_label));
31587
31588 output_comdat_type_unit (ctnode, false);
31589 *slot = ctnode;
31590 }
31591
31592 if (dwarf_split_debug_info)
31593 {
31594 int mark;
31595 struct md5_ctx ctx;
31596
31597 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31598 index_rnglists ();
31599
31600 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31601 md5_init_ctx (&ctx);
31602 mark = 0;
31603 die_checksum (comp_unit_die (), &ctx, &mark);
31604 unmark_all_dies (comp_unit_die ());
31605 md5_finish_ctx (&ctx, checksum);
31606
31607 if (dwarf_version < 5)
31608 {
31609 /* Use the first 8 bytes of the checksum as the dwo_id,
31610 and add it to both comp-unit DIEs. */
31611 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31612 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31613 }
31614
31615 /* Add the base offset of the ranges table to the skeleton
31616 comp-unit DIE. */
31617 if (!vec_safe_is_empty (ranges_table))
31618 {
31619 if (dwarf_version >= 5)
31620 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31621 ranges_base_label);
31622 else
31623 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31624 ranges_section_label);
31625 }
31626
31627 switch_to_section (debug_addr_section);
31628 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
31629 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
31630 before DWARF5, didn't have a header for .debug_addr units.
31631 DWARF5 specifies a small header when address tables are used. */
31632 if (dwarf_version >= 5)
31633 {
31634 unsigned int last_idx = 0;
31635 unsigned long addrs_length;
31636
31637 addr_index_table->traverse_noresize
31638 <unsigned int *, count_index_addrs> (&last_idx);
31639 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
31640
31641 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31642 dw2_asm_output_data (4, 0xffffffff,
31643 "Escape value for 64-bit DWARF extension");
31644 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
31645 "Length of Address Unit");
31646 dw2_asm_output_data (2, 5, "DWARF addr version");
31647 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
31648 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
31649 }
31650 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31651 output_addr_table ();
31652 }
31653
31654 /* Output the main compilation unit if non-empty or if .debug_macinfo
31655 or .debug_macro will be emitted. */
31656 output_comp_unit (comp_unit_die (), have_macinfo,
31657 dwarf_split_debug_info ? checksum : NULL);
31658
31659 if (dwarf_split_debug_info && info_section_emitted)
31660 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31661
31662 /* Output the abbreviation table. */
31663 if (vec_safe_length (abbrev_die_table) != 1)
31664 {
31665 switch_to_section (debug_abbrev_section);
31666 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31667 output_abbrev_section ();
31668 }
31669
31670 /* Output location list section if necessary. */
31671 if (have_location_lists)
31672 {
31673 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31674 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31675 /* Output the location lists info. */
31676 switch_to_section (debug_loc_section);
31677 if (dwarf_version >= 5)
31678 {
31679 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31680 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31681 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31682 dw2_asm_output_data (4, 0xffffffff,
31683 "Initial length escape value indicating "
31684 "64-bit DWARF extension");
31685 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31686 "Length of Location Lists");
31687 ASM_OUTPUT_LABEL (asm_out_file, l1);
31688 output_dwarf_version ();
31689 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31690 dw2_asm_output_data (1, 0, "Segment Size");
31691 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31692 "Offset Entry Count");
31693 }
31694 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31695 if (dwarf_version >= 5 && dwarf_split_debug_info)
31696 {
31697 unsigned int save_loc_list_idx = loc_list_idx;
31698 loc_list_idx = 0;
31699 output_loclists_offsets (comp_unit_die ());
31700 gcc_assert (save_loc_list_idx == loc_list_idx);
31701 }
31702 output_location_lists (comp_unit_die ());
31703 if (dwarf_version >= 5)
31704 ASM_OUTPUT_LABEL (asm_out_file, l2);
31705 }
31706
31707 output_pubtables ();
31708
31709 /* Output the address range information if a CU (.debug_info section)
31710 was emitted. We output an empty table even if we had no functions
31711 to put in it. This because the consumer has no way to tell the
31712 difference between an empty table that we omitted and failure to
31713 generate a table that would have contained data. */
31714 if (info_section_emitted)
31715 {
31716 switch_to_section (debug_aranges_section);
31717 output_aranges ();
31718 }
31719
31720 /* Output ranges section if necessary. */
31721 if (!vec_safe_is_empty (ranges_table))
31722 {
31723 if (dwarf_version >= 5)
31724 output_rnglists (generation);
31725 else
31726 output_ranges ();
31727 }
31728
31729 /* Have to end the macro section. */
31730 if (have_macinfo)
31731 {
31732 switch_to_section (debug_macinfo_section);
31733 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31734 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31735 : debug_skeleton_line_section_label, false);
31736 dw2_asm_output_data (1, 0, "End compilation unit");
31737 }
31738
31739 /* Output the source line correspondence table. We must do this
31740 even if there is no line information. Otherwise, on an empty
31741 translation unit, we will generate a present, but empty,
31742 .debug_info section. IRIX 6.5 `nm' will then complain when
31743 examining the file. This is done late so that any filenames
31744 used by the debug_info section are marked as 'used'. */
31745 switch_to_section (debug_line_section);
31746 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31747 if (! output_asm_line_debug_info ())
31748 output_line_info (false);
31749
31750 if (dwarf_split_debug_info && info_section_emitted)
31751 {
31752 switch_to_section (debug_skeleton_line_section);
31753 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31754 output_line_info (true);
31755 }
31756
31757 /* If we emitted any indirect strings, output the string table too. */
31758 if (debug_str_hash || skeleton_debug_str_hash)
31759 output_indirect_strings ();
31760 if (debug_line_str_hash)
31761 {
31762 switch_to_section (debug_line_str_section);
31763 const enum dwarf_form form = DW_FORM_line_strp;
31764 debug_line_str_hash->traverse<enum dwarf_form,
31765 output_indirect_string> (form);
31766 }
31767
31768 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31769 symview_upper_bound = 0;
31770 if (zero_view_p)
31771 bitmap_clear (zero_view_p);
31772 }
31773
31774 /* Returns a hash value for X (which really is a variable_value_struct). */
31775
31776 inline hashval_t
31777 variable_value_hasher::hash (variable_value_struct *x)
31778 {
31779 return (hashval_t) x->decl_id;
31780 }
31781
31782 /* Return nonzero if decl_id of variable_value_struct X is the same as
31783 UID of decl Y. */
31784
31785 inline bool
31786 variable_value_hasher::equal (variable_value_struct *x, tree y)
31787 {
31788 return x->decl_id == DECL_UID (y);
31789 }
31790
31791 /* Helper function for resolve_variable_value, handle
31792 DW_OP_GNU_variable_value in one location expression.
31793 Return true if exprloc has been changed into loclist. */
31794
31795 static bool
31796 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31797 {
31798 dw_loc_descr_ref next;
31799 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31800 {
31801 next = loc->dw_loc_next;
31802 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31803 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31804 continue;
31805
31806 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31807 if (DECL_CONTEXT (decl) != current_function_decl)
31808 continue;
31809
31810 dw_die_ref ref = lookup_decl_die (decl);
31811 if (ref)
31812 {
31813 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31814 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31815 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31816 continue;
31817 }
31818 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31819 if (l == NULL)
31820 continue;
31821 if (l->dw_loc_next)
31822 {
31823 if (AT_class (a) != dw_val_class_loc)
31824 continue;
31825 switch (a->dw_attr)
31826 {
31827 /* Following attributes allow both exprloc and loclist
31828 classes, so we can change them into a loclist. */
31829 case DW_AT_location:
31830 case DW_AT_string_length:
31831 case DW_AT_return_addr:
31832 case DW_AT_data_member_location:
31833 case DW_AT_frame_base:
31834 case DW_AT_segment:
31835 case DW_AT_static_link:
31836 case DW_AT_use_location:
31837 case DW_AT_vtable_elem_location:
31838 if (prev)
31839 {
31840 prev->dw_loc_next = NULL;
31841 prepend_loc_descr_to_each (l, AT_loc (a));
31842 }
31843 if (next)
31844 add_loc_descr_to_each (l, next);
31845 a->dw_attr_val.val_class = dw_val_class_loc_list;
31846 a->dw_attr_val.val_entry = NULL;
31847 a->dw_attr_val.v.val_loc_list = l;
31848 have_location_lists = true;
31849 return true;
31850 /* Following attributes allow both exprloc and reference,
31851 so if the whole expression is DW_OP_GNU_variable_value alone
31852 we could transform it into reference. */
31853 case DW_AT_byte_size:
31854 case DW_AT_bit_size:
31855 case DW_AT_lower_bound:
31856 case DW_AT_upper_bound:
31857 case DW_AT_bit_stride:
31858 case DW_AT_count:
31859 case DW_AT_allocated:
31860 case DW_AT_associated:
31861 case DW_AT_byte_stride:
31862 if (prev == NULL && next == NULL)
31863 break;
31864 /* FALLTHRU */
31865 default:
31866 if (dwarf_strict)
31867 continue;
31868 break;
31869 }
31870 /* Create DW_TAG_variable that we can refer to. */
31871 gen_decl_die (decl, NULL_TREE, NULL,
31872 lookup_decl_die (current_function_decl));
31873 ref = lookup_decl_die (decl);
31874 if (ref)
31875 {
31876 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31877 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31878 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31879 }
31880 continue;
31881 }
31882 if (prev)
31883 {
31884 prev->dw_loc_next = l->expr;
31885 add_loc_descr (&prev->dw_loc_next, next);
31886 free_loc_descr (loc, NULL);
31887 next = prev->dw_loc_next;
31888 }
31889 else
31890 {
31891 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31892 add_loc_descr (&loc, next);
31893 next = loc;
31894 }
31895 loc = prev;
31896 }
31897 return false;
31898 }
31899
31900 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31901
31902 static void
31903 resolve_variable_value (dw_die_ref die)
31904 {
31905 dw_attr_node *a;
31906 dw_loc_list_ref loc;
31907 unsigned ix;
31908
31909 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31910 switch (AT_class (a))
31911 {
31912 case dw_val_class_loc:
31913 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31914 break;
31915 /* FALLTHRU */
31916 case dw_val_class_loc_list:
31917 loc = AT_loc_list (a);
31918 gcc_assert (loc);
31919 for (; loc; loc = loc->dw_loc_next)
31920 resolve_variable_value_in_expr (a, loc->expr);
31921 break;
31922 default:
31923 break;
31924 }
31925 }
31926
31927 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31928 temporaries in the current function. */
31929
31930 static void
31931 resolve_variable_values (void)
31932 {
31933 if (!variable_value_hash || !current_function_decl)
31934 return;
31935
31936 struct variable_value_struct *node
31937 = variable_value_hash->find_with_hash (current_function_decl,
31938 DECL_UID (current_function_decl));
31939
31940 if (node == NULL)
31941 return;
31942
31943 unsigned int i;
31944 dw_die_ref die;
31945 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31946 resolve_variable_value (die);
31947 }
31948
31949 /* Helper function for note_variable_value, handle one location
31950 expression. */
31951
31952 static void
31953 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31954 {
31955 for (; loc; loc = loc->dw_loc_next)
31956 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31957 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31958 {
31959 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31960 dw_die_ref ref = lookup_decl_die (decl);
31961 if (! ref && (flag_generate_lto || flag_generate_offload))
31962 {
31963 /* ??? This is somewhat a hack because we do not create DIEs
31964 for variables not in BLOCK trees early but when generating
31965 early LTO output we need the dw_val_class_decl_ref to be
31966 fully resolved. For fat LTO objects we'd also like to
31967 undo this after LTO dwarf output. */
31968 gcc_assert (DECL_CONTEXT (decl));
31969 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31970 gcc_assert (ctx != NULL);
31971 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31972 ref = lookup_decl_die (decl);
31973 gcc_assert (ref != NULL);
31974 }
31975 if (ref)
31976 {
31977 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31978 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31979 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31980 continue;
31981 }
31982 if (VAR_P (decl)
31983 && DECL_CONTEXT (decl)
31984 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31985 && lookup_decl_die (DECL_CONTEXT (decl)))
31986 {
31987 if (!variable_value_hash)
31988 variable_value_hash
31989 = hash_table<variable_value_hasher>::create_ggc (10);
31990
31991 tree fndecl = DECL_CONTEXT (decl);
31992 struct variable_value_struct *node;
31993 struct variable_value_struct **slot
31994 = variable_value_hash->find_slot_with_hash (fndecl,
31995 DECL_UID (fndecl),
31996 INSERT);
31997 if (*slot == NULL)
31998 {
31999 node = ggc_cleared_alloc<variable_value_struct> ();
32000 node->decl_id = DECL_UID (fndecl);
32001 *slot = node;
32002 }
32003 else
32004 node = *slot;
32005
32006 vec_safe_push (node->dies, die);
32007 }
32008 }
32009 }
32010
32011 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
32012 with dw_val_class_decl_ref operand. */
32013
32014 static void
32015 note_variable_value (dw_die_ref die)
32016 {
32017 dw_die_ref c;
32018 dw_attr_node *a;
32019 dw_loc_list_ref loc;
32020 unsigned ix;
32021
32022 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
32023 switch (AT_class (a))
32024 {
32025 case dw_val_class_loc_list:
32026 loc = AT_loc_list (a);
32027 gcc_assert (loc);
32028 if (!loc->noted_variable_value)
32029 {
32030 loc->noted_variable_value = 1;
32031 for (; loc; loc = loc->dw_loc_next)
32032 note_variable_value_in_expr (die, loc->expr);
32033 }
32034 break;
32035 case dw_val_class_loc:
32036 note_variable_value_in_expr (die, AT_loc (a));
32037 break;
32038 default:
32039 break;
32040 }
32041
32042 /* Mark children. */
32043 FOR_EACH_CHILD (die, c, note_variable_value (c));
32044 }
32045
32046 /* Perform any cleanups needed after the early debug generation pass
32047 has run. */
32048
32049 static void
32050 dwarf2out_early_finish (const char *filename)
32051 {
32052 set_early_dwarf s;
32053 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
32054
32055 /* PCH might result in DW_AT_producer string being restored from the
32056 header compilation, so always fill it with empty string initially
32057 and overwrite only here. */
32058 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
32059 producer_string = gen_producer_string ();
32060 producer->dw_attr_val.v.val_str->refcount--;
32061 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
32062
32063 /* Add the name for the main input file now. We delayed this from
32064 dwarf2out_init to avoid complications with PCH. */
32065 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
32066 add_comp_dir_attribute (comp_unit_die ());
32067
32068 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
32069 DW_AT_comp_dir into .debug_line_str section. */
32070 if (!output_asm_line_debug_info ()
32071 && dwarf_version >= 5
32072 && DWARF5_USE_DEBUG_LINE_STR)
32073 {
32074 for (int i = 0; i < 2; i++)
32075 {
32076 dw_attr_node *a = get_AT (comp_unit_die (),
32077 i ? DW_AT_comp_dir : DW_AT_name);
32078 if (a == NULL
32079 || AT_class (a) != dw_val_class_str
32080 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
32081 continue;
32082
32083 if (! debug_line_str_hash)
32084 debug_line_str_hash
32085 = hash_table<indirect_string_hasher>::create_ggc (10);
32086
32087 struct indirect_string_node *node
32088 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
32089 set_indirect_string (node);
32090 node->form = DW_FORM_line_strp;
32091 a->dw_attr_val.v.val_str->refcount--;
32092 a->dw_attr_val.v.val_str = node;
32093 }
32094 }
32095
32096 /* With LTO early dwarf was really finished at compile-time, so make
32097 sure to adjust the phase after annotating the LTRANS CU DIE. */
32098 if (in_lto_p)
32099 {
32100 /* Force DW_TAG_imported_unit to be created now, otherwise
32101 we might end up without it or ordered after DW_TAG_inlined_subroutine
32102 referencing DIEs from it. */
32103 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
32104 {
32105 unsigned i;
32106 tree tu;
32107 if (external_die_map)
32108 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, tu)
32109 if (sym_off_pair *desc = external_die_map->get (tu))
32110 {
32111 dw_die_ref import = new_die (DW_TAG_imported_unit,
32112 comp_unit_die (), NULL_TREE);
32113 add_AT_external_die_ref (import, DW_AT_import,
32114 desc->sym, desc->off);
32115 }
32116 }
32117
32118 early_dwarf_finished = true;
32119 if (dump_file)
32120 {
32121 fprintf (dump_file, "LTO EARLY DWARF for %s\n", filename);
32122 print_die (comp_unit_die (), dump_file);
32123 }
32124 return;
32125 }
32126
32127 /* Walk through the list of incomplete types again, trying once more to
32128 emit full debugging info for them. */
32129 retry_incomplete_types ();
32130
32131 /* The point here is to flush out the limbo list so that it is empty
32132 and we don't need to stream it for LTO. */
32133 flush_limbo_die_list ();
32134
32135 gen_scheduled_generic_parms_dies ();
32136 gen_remaining_tmpl_value_param_die_attribute ();
32137
32138 /* Add DW_AT_linkage_name for all deferred DIEs. */
32139 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
32140 {
32141 tree decl = node->created_for;
32142 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
32143 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
32144 ended up in deferred_asm_name before we knew it was
32145 constant and never written to disk. */
32146 && DECL_ASSEMBLER_NAME (decl))
32147 {
32148 add_linkage_attr (node->die, decl);
32149 move_linkage_attr (node->die);
32150 }
32151 }
32152 deferred_asm_name = NULL;
32153
32154 if (flag_eliminate_unused_debug_types)
32155 prune_unused_types ();
32156
32157 /* Generate separate COMDAT sections for type DIEs. */
32158 if (use_debug_types)
32159 {
32160 break_out_comdat_types (comp_unit_die ());
32161
32162 /* Each new type_unit DIE was added to the limbo die list when created.
32163 Since these have all been added to comdat_type_list, clear the
32164 limbo die list. */
32165 limbo_die_list = NULL;
32166
32167 /* For each new comdat type unit, copy declarations for incomplete
32168 types to make the new unit self-contained (i.e., no direct
32169 references to the main compile unit). */
32170 for (comdat_type_node *ctnode = comdat_type_list;
32171 ctnode != NULL; ctnode = ctnode->next)
32172 copy_decls_for_unworthy_types (ctnode->root_die);
32173 copy_decls_for_unworthy_types (comp_unit_die ());
32174
32175 /* In the process of copying declarations from one unit to another,
32176 we may have left some declarations behind that are no longer
32177 referenced. Prune them. */
32178 prune_unused_types ();
32179 }
32180
32181 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
32182 with dw_val_class_decl_ref operand. */
32183 note_variable_value (comp_unit_die ());
32184 for (limbo_die_node *node = cu_die_list; node; node = node->next)
32185 note_variable_value (node->die);
32186 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
32187 ctnode = ctnode->next)
32188 note_variable_value (ctnode->root_die);
32189 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32190 note_variable_value (node->die);
32191
32192 /* The AT_pubnames attribute needs to go in all skeleton dies, including
32193 both the main_cu and all skeleton TUs. Making this call unconditional
32194 would end up either adding a second copy of the AT_pubnames attribute, or
32195 requiring a special case in add_top_level_skeleton_die_attrs. */
32196 if (!dwarf_split_debug_info)
32197 add_AT_pubnames (comp_unit_die ());
32198
32199 /* The early debug phase is now finished. */
32200 early_dwarf_finished = true;
32201 if (dump_file)
32202 {
32203 fprintf (dump_file, "EARLY DWARF for %s\n", filename);
32204 print_die (comp_unit_die (), dump_file);
32205 }
32206
32207 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
32208 if ((!flag_generate_lto && !flag_generate_offload)
32209 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
32210 copy_lto_debug_sections operation of the simple object support in
32211 libiberty is not implemented for them yet. */
32212 || TARGET_PECOFF || TARGET_COFF)
32213 return;
32214
32215 /* Now as we are going to output for LTO initialize sections and labels
32216 to the LTO variants. We don't need a random-seed postfix as other
32217 LTO sections as linking the LTO debug sections into one in a partial
32218 link is fine. */
32219 init_sections_and_labels (true);
32220
32221 /* The output below is modeled after dwarf2out_finish with all
32222 location related output removed and some LTO specific changes.
32223 Some refactoring might make both smaller and easier to match up. */
32224
32225 /* Traverse the DIE's and add add sibling attributes to those DIE's
32226 that have children. */
32227 add_sibling_attributes (comp_unit_die ());
32228 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32229 add_sibling_attributes (node->die);
32230 for (comdat_type_node *ctnode = comdat_type_list;
32231 ctnode != NULL; ctnode = ctnode->next)
32232 add_sibling_attributes (ctnode->root_die);
32233
32234 /* AIX Assembler inserts the length, so adjust the reference to match the
32235 offset expected by debuggers. */
32236 strcpy (dl_section_ref, debug_line_section_label);
32237 if (XCOFF_DEBUGGING_INFO)
32238 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
32239
32240 if (debug_info_level >= DINFO_LEVEL_TERSE)
32241 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
32242
32243 if (have_macinfo)
32244 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
32245 macinfo_section_label);
32246
32247 save_macinfo_strings ();
32248
32249 if (dwarf_split_debug_info)
32250 {
32251 unsigned int index = 0;
32252 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32253 }
32254
32255 /* Output all of the compilation units. We put the main one last so that
32256 the offsets are available to output_pubnames. */
32257 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32258 output_comp_unit (node->die, 0, NULL);
32259
32260 hash_table<comdat_type_hasher> comdat_type_table (100);
32261 for (comdat_type_node *ctnode = comdat_type_list;
32262 ctnode != NULL; ctnode = ctnode->next)
32263 {
32264 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32265
32266 /* Don't output duplicate types. */
32267 if (*slot != HTAB_EMPTY_ENTRY)
32268 continue;
32269
32270 /* Add a pointer to the line table for the main compilation unit
32271 so that the debugger can make sense of DW_AT_decl_file
32272 attributes. */
32273 if (debug_info_level >= DINFO_LEVEL_TERSE)
32274 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32275 (!dwarf_split_debug_info
32276 ? debug_line_section_label
32277 : debug_skeleton_line_section_label));
32278
32279 output_comdat_type_unit (ctnode, true);
32280 *slot = ctnode;
32281 }
32282
32283 /* Stick a unique symbol to the main debuginfo section. */
32284 compute_comp_unit_symbol (comp_unit_die ());
32285
32286 /* Output the main compilation unit. We always need it if only for
32287 the CU symbol. */
32288 output_comp_unit (comp_unit_die (), true, NULL);
32289
32290 /* Output the abbreviation table. */
32291 if (vec_safe_length (abbrev_die_table) != 1)
32292 {
32293 switch_to_section (debug_abbrev_section);
32294 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32295 output_abbrev_section ();
32296 }
32297
32298 /* Have to end the macro section. */
32299 if (have_macinfo)
32300 {
32301 /* We have to save macinfo state if we need to output it again
32302 for the FAT part of the object. */
32303 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32304 if (flag_fat_lto_objects)
32305 macinfo_table = macinfo_table->copy ();
32306
32307 switch_to_section (debug_macinfo_section);
32308 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32309 output_macinfo (debug_line_section_label, true);
32310 dw2_asm_output_data (1, 0, "End compilation unit");
32311
32312 if (flag_fat_lto_objects)
32313 {
32314 vec_free (macinfo_table);
32315 macinfo_table = saved_macinfo_table;
32316 }
32317 }
32318
32319 /* Emit a skeleton debug_line section. */
32320 switch_to_section (debug_line_section);
32321 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32322 output_line_info (true);
32323
32324 /* If we emitted any indirect strings, output the string table too. */
32325 if (debug_str_hash || skeleton_debug_str_hash)
32326 output_indirect_strings ();
32327 if (debug_line_str_hash)
32328 {
32329 switch_to_section (debug_line_str_section);
32330 const enum dwarf_form form = DW_FORM_line_strp;
32331 debug_line_str_hash->traverse<enum dwarf_form,
32332 output_indirect_string> (form);
32333 }
32334
32335 /* Switch back to the text section. */
32336 switch_to_section (text_section);
32337 }
32338
32339 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32340 within the same process. For use by toplev::finalize. */
32341
32342 void
32343 dwarf2out_c_finalize (void)
32344 {
32345 last_var_location_insn = NULL;
32346 cached_next_real_insn = NULL;
32347 used_rtx_array = NULL;
32348 incomplete_types = NULL;
32349 debug_info_section = NULL;
32350 debug_skeleton_info_section = NULL;
32351 debug_abbrev_section = NULL;
32352 debug_skeleton_abbrev_section = NULL;
32353 debug_aranges_section = NULL;
32354 debug_addr_section = NULL;
32355 debug_macinfo_section = NULL;
32356 debug_line_section = NULL;
32357 debug_skeleton_line_section = NULL;
32358 debug_loc_section = NULL;
32359 debug_pubnames_section = NULL;
32360 debug_pubtypes_section = NULL;
32361 debug_str_section = NULL;
32362 debug_line_str_section = NULL;
32363 debug_str_dwo_section = NULL;
32364 debug_str_offsets_section = NULL;
32365 debug_ranges_section = NULL;
32366 debug_frame_section = NULL;
32367 fde_vec = NULL;
32368 debug_str_hash = NULL;
32369 debug_line_str_hash = NULL;
32370 skeleton_debug_str_hash = NULL;
32371 dw2_string_counter = 0;
32372 have_multiple_function_sections = false;
32373 text_section_used = false;
32374 cold_text_section_used = false;
32375 cold_text_section = NULL;
32376 current_unit_personality = NULL;
32377
32378 early_dwarf = false;
32379 early_dwarf_finished = false;
32380
32381 next_die_offset = 0;
32382 single_comp_unit_die = NULL;
32383 comdat_type_list = NULL;
32384 limbo_die_list = NULL;
32385 file_table = NULL;
32386 decl_die_table = NULL;
32387 common_block_die_table = NULL;
32388 decl_loc_table = NULL;
32389 call_arg_locations = NULL;
32390 call_arg_loc_last = NULL;
32391 call_site_count = -1;
32392 tail_call_site_count = -1;
32393 cached_dw_loc_list_table = NULL;
32394 abbrev_die_table = NULL;
32395 delete dwarf_proc_stack_usage_map;
32396 dwarf_proc_stack_usage_map = NULL;
32397 line_info_label_num = 0;
32398 cur_line_info_table = NULL;
32399 text_section_line_info = NULL;
32400 cold_text_section_line_info = NULL;
32401 separate_line_info = NULL;
32402 info_section_emitted = false;
32403 pubname_table = NULL;
32404 pubtype_table = NULL;
32405 macinfo_table = NULL;
32406 ranges_table = NULL;
32407 ranges_by_label = NULL;
32408 rnglist_idx = 0;
32409 have_location_lists = false;
32410 loclabel_num = 0;
32411 poc_label_num = 0;
32412 last_emitted_file = NULL;
32413 label_num = 0;
32414 tmpl_value_parm_die_table = NULL;
32415 generic_type_instances = NULL;
32416 frame_pointer_fb_offset = 0;
32417 frame_pointer_fb_offset_valid = false;
32418 base_types.release ();
32419 XDELETEVEC (producer_string);
32420 producer_string = NULL;
32421 }
32422
32423 #include "gt-dwarf2out.h"