[debug] Add -gdescribe-dies
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105 static bool is_redundant_typedef (const_tree);
106
107 #ifndef XCOFF_DEBUGGING_INFO
108 #define XCOFF_DEBUGGING_INFO 0
109 #endif
110
111 #ifndef HAVE_XCOFF_DWARF_EXTRAS
112 #define HAVE_XCOFF_DWARF_EXTRAS 0
113 #endif
114
115 #ifdef VMS_DEBUGGING_INFO
116 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
117
118 /* Define this macro to be a nonzero value if the directory specifications
119 which are output in the debug info should end with a separator. */
120 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
121 /* Define this macro to evaluate to a nonzero value if GCC should refrain
122 from generating indirect strings in DWARF2 debug information, for instance
123 if your target is stuck with an old version of GDB that is unable to
124 process them properly or uses VMS Debug. */
125 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
126 #else
127 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
128 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
129 #endif
130
131 /* ??? Poison these here until it can be done generically. They've been
132 totally replaced in this file; make sure it stays that way. */
133 #undef DWARF2_UNWIND_INFO
134 #undef DWARF2_FRAME_INFO
135 #if (GCC_VERSION >= 3000)
136 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
137 #endif
138
139 /* The size of the target's pointer type. */
140 #ifndef PTR_SIZE
141 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
142 #endif
143
144 /* Array of RTXes referenced by the debugging information, which therefore
145 must be kept around forever. */
146 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
147
148 /* A pointer to the base of a list of incomplete types which might be
149 completed at some later time. incomplete_types_list needs to be a
150 vec<tree, va_gc> *because we want to tell the garbage collector about
151 it. */
152 static GTY(()) vec<tree, va_gc> *incomplete_types;
153
154 /* Pointers to various DWARF2 sections. */
155 static GTY(()) section *debug_info_section;
156 static GTY(()) section *debug_skeleton_info_section;
157 static GTY(()) section *debug_abbrev_section;
158 static GTY(()) section *debug_skeleton_abbrev_section;
159 static GTY(()) section *debug_aranges_section;
160 static GTY(()) section *debug_addr_section;
161 static GTY(()) section *debug_macinfo_section;
162 static const char *debug_macinfo_section_name;
163 static unsigned macinfo_label_base = 1;
164 static GTY(()) section *debug_line_section;
165 static GTY(()) section *debug_skeleton_line_section;
166 static GTY(()) section *debug_loc_section;
167 static GTY(()) section *debug_pubnames_section;
168 static GTY(()) section *debug_pubtypes_section;
169 static GTY(()) section *debug_str_section;
170 static GTY(()) section *debug_line_str_section;
171 static GTY(()) section *debug_str_dwo_section;
172 static GTY(()) section *debug_str_offsets_section;
173 static GTY(()) section *debug_ranges_section;
174 static GTY(()) section *debug_frame_section;
175
176 /* Maximum size (in bytes) of an artificially generated label. */
177 #define MAX_ARTIFICIAL_LABEL_BYTES 40
178
179 /* According to the (draft) DWARF 3 specification, the initial length
180 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
181 bytes are 0xffffffff, followed by the length stored in the next 8
182 bytes.
183
184 However, the SGI/MIPS ABI uses an initial length which is equal to
185 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
186
187 #ifndef DWARF_INITIAL_LENGTH_SIZE
188 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
189 #endif
190
191 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
192 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
193 #endif
194
195 /* Round SIZE up to the nearest BOUNDARY. */
196 #define DWARF_ROUND(SIZE,BOUNDARY) \
197 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
198
199 /* CIE identifier. */
200 #if HOST_BITS_PER_WIDE_INT >= 64
201 #define DWARF_CIE_ID \
202 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
203 #else
204 #define DWARF_CIE_ID DW_CIE_ID
205 #endif
206
207
208 /* A vector for a table that contains frame description
209 information for each routine. */
210 #define NOT_INDEXED (-1U)
211 #define NO_INDEX_ASSIGNED (-2U)
212
213 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
214
215 struct GTY((for_user)) indirect_string_node {
216 const char *str;
217 unsigned int refcount;
218 enum dwarf_form form;
219 char *label;
220 unsigned int index;
221 };
222
223 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
224 {
225 typedef const char *compare_type;
226
227 static hashval_t hash (indirect_string_node *);
228 static bool equal (indirect_string_node *, const char *);
229 };
230
231 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
232
233 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
234
235 /* With split_debug_info, both the comp_dir and dwo_name go in the
236 main object file, rather than the dwo, similar to the force_direct
237 parameter elsewhere but with additional complications:
238
239 1) The string is needed in both the main object file and the dwo.
240 That is, the comp_dir and dwo_name will appear in both places.
241
242 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
243 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
244
245 3) GCC chooses the form to use late, depending on the size and
246 reference count.
247
248 Rather than forcing the all debug string handling functions and
249 callers to deal with these complications, simply use a separate,
250 special-cased string table for any attribute that should go in the
251 main object file. This limits the complexity to just the places
252 that need it. */
253
254 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
255
256 static GTY(()) int dw2_string_counter;
257
258 /* True if the compilation unit places functions in more than one section. */
259 static GTY(()) bool have_multiple_function_sections = false;
260
261 /* Whether the default text and cold text sections have been used at all. */
262 static GTY(()) bool text_section_used = false;
263 static GTY(()) bool cold_text_section_used = false;
264
265 /* The default cold text section. */
266 static GTY(()) section *cold_text_section;
267
268 /* The DIE for C++14 'auto' in a function return type. */
269 static GTY(()) dw_die_ref auto_die;
270
271 /* The DIE for C++14 'decltype(auto)' in a function return type. */
272 static GTY(()) dw_die_ref decltype_auto_die;
273
274 /* Forward declarations for functions defined in this file. */
275
276 static void output_call_frame_info (int);
277 static void dwarf2out_note_section_used (void);
278
279 /* Personality decl of current unit. Used only when assembler does not support
280 personality CFI. */
281 static GTY(()) rtx current_unit_personality;
282
283 /* Whether an eh_frame section is required. */
284 static GTY(()) bool do_eh_frame = false;
285
286 /* .debug_rnglists next index. */
287 static unsigned int rnglist_idx;
288
289 /* Data and reference forms for relocatable data. */
290 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
291 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
292
293 #ifndef DEBUG_FRAME_SECTION
294 #define DEBUG_FRAME_SECTION ".debug_frame"
295 #endif
296
297 #ifndef FUNC_BEGIN_LABEL
298 #define FUNC_BEGIN_LABEL "LFB"
299 #endif
300
301 #ifndef FUNC_SECOND_SECT_LABEL
302 #define FUNC_SECOND_SECT_LABEL "LFSB"
303 #endif
304
305 #ifndef FUNC_END_LABEL
306 #define FUNC_END_LABEL "LFE"
307 #endif
308
309 #ifndef PROLOGUE_END_LABEL
310 #define PROLOGUE_END_LABEL "LPE"
311 #endif
312
313 #ifndef EPILOGUE_BEGIN_LABEL
314 #define EPILOGUE_BEGIN_LABEL "LEB"
315 #endif
316
317 #ifndef FRAME_BEGIN_LABEL
318 #define FRAME_BEGIN_LABEL "Lframe"
319 #endif
320 #define CIE_AFTER_SIZE_LABEL "LSCIE"
321 #define CIE_END_LABEL "LECIE"
322 #define FDE_LABEL "LSFDE"
323 #define FDE_AFTER_SIZE_LABEL "LASFDE"
324 #define FDE_END_LABEL "LEFDE"
325 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
326 #define LINE_NUMBER_END_LABEL "LELT"
327 #define LN_PROLOG_AS_LABEL "LASLTP"
328 #define LN_PROLOG_END_LABEL "LELTP"
329 #define DIE_LABEL_PREFIX "DW"
330 \f
331 /* Match the base name of a file to the base name of a compilation unit. */
332
333 static int
334 matches_main_base (const char *path)
335 {
336 /* Cache the last query. */
337 static const char *last_path = NULL;
338 static int last_match = 0;
339 if (path != last_path)
340 {
341 const char *base;
342 int length = base_of_path (path, &base);
343 last_path = path;
344 last_match = (length == main_input_baselength
345 && memcmp (base, main_input_basename, length) == 0);
346 }
347 return last_match;
348 }
349
350 #ifdef DEBUG_DEBUG_STRUCT
351
352 static int
353 dump_struct_debug (tree type, enum debug_info_usage usage,
354 enum debug_struct_file criterion, int generic,
355 int matches, int result)
356 {
357 /* Find the type name. */
358 tree type_decl = TYPE_STUB_DECL (type);
359 tree t = type_decl;
360 const char *name = 0;
361 if (TREE_CODE (t) == TYPE_DECL)
362 t = DECL_NAME (t);
363 if (t)
364 name = IDENTIFIER_POINTER (t);
365
366 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
367 criterion,
368 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
369 matches ? "bas" : "hdr",
370 generic ? "gen" : "ord",
371 usage == DINFO_USAGE_DFN ? ";" :
372 usage == DINFO_USAGE_DIR_USE ? "." : "*",
373 result,
374 (void*) type_decl, name);
375 return result;
376 }
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 dump_struct_debug (type, usage, criterion, generic, matches, result)
379
380 #else
381
382 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
383 (result)
384
385 #endif
386
387 /* Get the number of HOST_WIDE_INTs needed to represent the precision
388 of the number. Some constants have a large uniform precision, so
389 we get the precision needed for the actual value of the number. */
390
391 static unsigned int
392 get_full_len (const wide_int &op)
393 {
394 int prec = wi::min_precision (op, UNSIGNED);
395 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
396 / HOST_BITS_PER_WIDE_INT);
397 }
398
399 static bool
400 should_emit_struct_debug (tree type, enum debug_info_usage usage)
401 {
402 enum debug_struct_file criterion;
403 tree type_decl;
404 bool generic = lang_hooks.types.generic_p (type);
405
406 if (generic)
407 criterion = debug_struct_generic[usage];
408 else
409 criterion = debug_struct_ordinary[usage];
410
411 if (criterion == DINFO_STRUCT_FILE_NONE)
412 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
413 if (criterion == DINFO_STRUCT_FILE_ANY)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
415
416 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
417
418 if (type_decl != NULL)
419 {
420 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
421 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
422
423 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
424 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
425 }
426
427 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
428 }
429 \f
430 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
431 switch to the data section instead, and write out a synthetic start label
432 for collect2 the first time around. */
433
434 static void
435 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
436 {
437 if (eh_frame_section == 0)
438 {
439 int flags;
440
441 if (EH_TABLES_CAN_BE_READ_ONLY)
442 {
443 int fde_encoding;
444 int per_encoding;
445 int lsda_encoding;
446
447 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
448 /*global=*/0);
449 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
450 /*global=*/1);
451 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
452 /*global=*/0);
453 flags = ((! flag_pic
454 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
455 && (fde_encoding & 0x70) != DW_EH_PE_aligned
456 && (per_encoding & 0x70) != DW_EH_PE_absptr
457 && (per_encoding & 0x70) != DW_EH_PE_aligned
458 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
459 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
460 ? 0 : SECTION_WRITE);
461 }
462 else
463 flags = SECTION_WRITE;
464
465 #ifdef EH_FRAME_SECTION_NAME
466 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
467 #else
468 eh_frame_section = ((flags == SECTION_WRITE)
469 ? data_section : readonly_data_section);
470 #endif /* EH_FRAME_SECTION_NAME */
471 }
472
473 switch_to_section (eh_frame_section);
474
475 #ifdef EH_FRAME_THROUGH_COLLECT2
476 /* We have no special eh_frame section. Emit special labels to guide
477 collect2. */
478 if (!back)
479 {
480 tree label = get_file_function_name ("F");
481 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
482 targetm.asm_out.globalize_label (asm_out_file,
483 IDENTIFIER_POINTER (label));
484 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
485 }
486 #endif
487 }
488
489 /* Switch [BACK] to the eh or debug frame table section, depending on
490 FOR_EH. */
491
492 static void
493 switch_to_frame_table_section (int for_eh, bool back)
494 {
495 if (for_eh)
496 switch_to_eh_frame_section (back);
497 else
498 {
499 if (!debug_frame_section)
500 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
501 SECTION_DEBUG, NULL);
502 switch_to_section (debug_frame_section);
503 }
504 }
505
506 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
507
508 enum dw_cfi_oprnd_type
509 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
510 {
511 switch (cfi)
512 {
513 case DW_CFA_nop:
514 case DW_CFA_GNU_window_save:
515 case DW_CFA_remember_state:
516 case DW_CFA_restore_state:
517 return dw_cfi_oprnd_unused;
518
519 case DW_CFA_set_loc:
520 case DW_CFA_advance_loc1:
521 case DW_CFA_advance_loc2:
522 case DW_CFA_advance_loc4:
523 case DW_CFA_MIPS_advance_loc8:
524 return dw_cfi_oprnd_addr;
525
526 case DW_CFA_offset:
527 case DW_CFA_offset_extended:
528 case DW_CFA_def_cfa:
529 case DW_CFA_offset_extended_sf:
530 case DW_CFA_def_cfa_sf:
531 case DW_CFA_restore:
532 case DW_CFA_restore_extended:
533 case DW_CFA_undefined:
534 case DW_CFA_same_value:
535 case DW_CFA_def_cfa_register:
536 case DW_CFA_register:
537 case DW_CFA_expression:
538 case DW_CFA_val_expression:
539 return dw_cfi_oprnd_reg_num;
540
541 case DW_CFA_def_cfa_offset:
542 case DW_CFA_GNU_args_size:
543 case DW_CFA_def_cfa_offset_sf:
544 return dw_cfi_oprnd_offset;
545
546 case DW_CFA_def_cfa_expression:
547 return dw_cfi_oprnd_loc;
548
549 default:
550 gcc_unreachable ();
551 }
552 }
553
554 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
555
556 enum dw_cfi_oprnd_type
557 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
558 {
559 switch (cfi)
560 {
561 case DW_CFA_def_cfa:
562 case DW_CFA_def_cfa_sf:
563 case DW_CFA_offset:
564 case DW_CFA_offset_extended_sf:
565 case DW_CFA_offset_extended:
566 return dw_cfi_oprnd_offset;
567
568 case DW_CFA_register:
569 return dw_cfi_oprnd_reg_num;
570
571 case DW_CFA_expression:
572 case DW_CFA_val_expression:
573 return dw_cfi_oprnd_loc;
574
575 case DW_CFA_def_cfa_expression:
576 return dw_cfi_oprnd_cfa_loc;
577
578 default:
579 return dw_cfi_oprnd_unused;
580 }
581 }
582
583 /* Output one FDE. */
584
585 static void
586 output_fde (dw_fde_ref fde, bool for_eh, bool second,
587 char *section_start_label, int fde_encoding, char *augmentation,
588 bool any_lsda_needed, int lsda_encoding)
589 {
590 const char *begin, *end;
591 static unsigned int j;
592 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
593
594 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
595 /* empty */ 0);
596 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
597 for_eh + j);
598 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
599 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
600 if (!XCOFF_DEBUGGING_INFO || for_eh)
601 {
602 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
603 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
604 " indicating 64-bit DWARF extension");
605 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
606 "FDE Length");
607 }
608 ASM_OUTPUT_LABEL (asm_out_file, l1);
609
610 if (for_eh)
611 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
612 else
613 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
614 debug_frame_section, "FDE CIE offset");
615
616 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
617 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
618
619 if (for_eh)
620 {
621 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
622 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
623 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
624 "FDE initial location");
625 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
626 end, begin, "FDE address range");
627 }
628 else
629 {
630 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
631 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
632 }
633
634 if (augmentation[0])
635 {
636 if (any_lsda_needed)
637 {
638 int size = size_of_encoded_value (lsda_encoding);
639
640 if (lsda_encoding == DW_EH_PE_aligned)
641 {
642 int offset = ( 4 /* Length */
643 + 4 /* CIE offset */
644 + 2 * size_of_encoded_value (fde_encoding)
645 + 1 /* Augmentation size */ );
646 int pad = -offset & (PTR_SIZE - 1);
647
648 size += pad;
649 gcc_assert (size_of_uleb128 (size) == 1);
650 }
651
652 dw2_asm_output_data_uleb128 (size, "Augmentation size");
653
654 if (fde->uses_eh_lsda)
655 {
656 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
657 fde->funcdef_number);
658 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
659 gen_rtx_SYMBOL_REF (Pmode, l1),
660 false,
661 "Language Specific Data Area");
662 }
663 else
664 {
665 if (lsda_encoding == DW_EH_PE_aligned)
666 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
667 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
668 "Language Specific Data Area (none)");
669 }
670 }
671 else
672 dw2_asm_output_data_uleb128 (0, "Augmentation size");
673 }
674
675 /* Loop through the Call Frame Instructions associated with this FDE. */
676 fde->dw_fde_current_label = begin;
677 {
678 size_t from, until, i;
679
680 from = 0;
681 until = vec_safe_length (fde->dw_fde_cfi);
682
683 if (fde->dw_fde_second_begin == NULL)
684 ;
685 else if (!second)
686 until = fde->dw_fde_switch_cfi_index;
687 else
688 from = fde->dw_fde_switch_cfi_index;
689
690 for (i = from; i < until; i++)
691 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
692 }
693
694 /* If we are to emit a ref/link from function bodies to their frame tables,
695 do it now. This is typically performed to make sure that tables
696 associated with functions are dragged with them and not discarded in
697 garbage collecting links. We need to do this on a per function basis to
698 cope with -ffunction-sections. */
699
700 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
701 /* Switch to the function section, emit the ref to the tables, and
702 switch *back* into the table section. */
703 switch_to_section (function_section (fde->decl));
704 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
705 switch_to_frame_table_section (for_eh, true);
706 #endif
707
708 /* Pad the FDE out to an address sized boundary. */
709 ASM_OUTPUT_ALIGN (asm_out_file,
710 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
711 ASM_OUTPUT_LABEL (asm_out_file, l2);
712
713 j += 2;
714 }
715
716 /* Return true if frame description entry FDE is needed for EH. */
717
718 static bool
719 fde_needed_for_eh_p (dw_fde_ref fde)
720 {
721 if (flag_asynchronous_unwind_tables)
722 return true;
723
724 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
725 return true;
726
727 if (fde->uses_eh_lsda)
728 return true;
729
730 /* If exceptions are enabled, we have collected nothrow info. */
731 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
732 return false;
733
734 return true;
735 }
736
737 /* Output the call frame information used to record information
738 that relates to calculating the frame pointer, and records the
739 location of saved registers. */
740
741 static void
742 output_call_frame_info (int for_eh)
743 {
744 unsigned int i;
745 dw_fde_ref fde;
746 dw_cfi_ref cfi;
747 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
748 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
749 bool any_lsda_needed = false;
750 char augmentation[6];
751 int augmentation_size;
752 int fde_encoding = DW_EH_PE_absptr;
753 int per_encoding = DW_EH_PE_absptr;
754 int lsda_encoding = DW_EH_PE_absptr;
755 int return_reg;
756 rtx personality = NULL;
757 int dw_cie_version;
758
759 /* Don't emit a CIE if there won't be any FDEs. */
760 if (!fde_vec)
761 return;
762
763 /* Nothing to do if the assembler's doing it all. */
764 if (dwarf2out_do_cfi_asm ())
765 return;
766
767 /* If we don't have any functions we'll want to unwind out of, don't emit
768 any EH unwind information. If we make FDEs linkonce, we may have to
769 emit an empty label for an FDE that wouldn't otherwise be emitted. We
770 want to avoid having an FDE kept around when the function it refers to
771 is discarded. Example where this matters: a primary function template
772 in C++ requires EH information, an explicit specialization doesn't. */
773 if (for_eh)
774 {
775 bool any_eh_needed = false;
776
777 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
778 {
779 if (fde->uses_eh_lsda)
780 any_eh_needed = any_lsda_needed = true;
781 else if (fde_needed_for_eh_p (fde))
782 any_eh_needed = true;
783 else if (TARGET_USES_WEAK_UNWIND_INFO)
784 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
785 }
786
787 if (!any_eh_needed)
788 return;
789 }
790
791 /* We're going to be generating comments, so turn on app. */
792 if (flag_debug_asm)
793 app_enable ();
794
795 /* Switch to the proper frame section, first time. */
796 switch_to_frame_table_section (for_eh, false);
797
798 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
799 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
800
801 /* Output the CIE. */
802 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
803 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
804 if (!XCOFF_DEBUGGING_INFO || for_eh)
805 {
806 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
807 dw2_asm_output_data (4, 0xffffffff,
808 "Initial length escape value indicating 64-bit DWARF extension");
809 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
810 "Length of Common Information Entry");
811 }
812 ASM_OUTPUT_LABEL (asm_out_file, l1);
813
814 /* Now that the CIE pointer is PC-relative for EH,
815 use 0 to identify the CIE. */
816 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
817 (for_eh ? 0 : DWARF_CIE_ID),
818 "CIE Identifier Tag");
819
820 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
821 use CIE version 1, unless that would produce incorrect results
822 due to overflowing the return register column. */
823 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
824 dw_cie_version = 1;
825 if (return_reg >= 256 || dwarf_version > 2)
826 dw_cie_version = 3;
827 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
828
829 augmentation[0] = 0;
830 augmentation_size = 0;
831
832 personality = current_unit_personality;
833 if (for_eh)
834 {
835 char *p;
836
837 /* Augmentation:
838 z Indicates that a uleb128 is present to size the
839 augmentation section.
840 L Indicates the encoding (and thus presence) of
841 an LSDA pointer in the FDE augmentation.
842 R Indicates a non-default pointer encoding for
843 FDE code pointers.
844 P Indicates the presence of an encoding + language
845 personality routine in the CIE augmentation. */
846
847 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
848 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
849 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
850
851 p = augmentation + 1;
852 if (personality)
853 {
854 *p++ = 'P';
855 augmentation_size += 1 + size_of_encoded_value (per_encoding);
856 assemble_external_libcall (personality);
857 }
858 if (any_lsda_needed)
859 {
860 *p++ = 'L';
861 augmentation_size += 1;
862 }
863 if (fde_encoding != DW_EH_PE_absptr)
864 {
865 *p++ = 'R';
866 augmentation_size += 1;
867 }
868 if (p > augmentation + 1)
869 {
870 augmentation[0] = 'z';
871 *p = '\0';
872 }
873
874 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
875 if (personality && per_encoding == DW_EH_PE_aligned)
876 {
877 int offset = ( 4 /* Length */
878 + 4 /* CIE Id */
879 + 1 /* CIE version */
880 + strlen (augmentation) + 1 /* Augmentation */
881 + size_of_uleb128 (1) /* Code alignment */
882 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
883 + 1 /* RA column */
884 + 1 /* Augmentation size */
885 + 1 /* Personality encoding */ );
886 int pad = -offset & (PTR_SIZE - 1);
887
888 augmentation_size += pad;
889
890 /* Augmentations should be small, so there's scarce need to
891 iterate for a solution. Die if we exceed one uleb128 byte. */
892 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
893 }
894 }
895
896 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
897 if (dw_cie_version >= 4)
898 {
899 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
900 dw2_asm_output_data (1, 0, "CIE Segment Size");
901 }
902 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
903 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
904 "CIE Data Alignment Factor");
905
906 if (dw_cie_version == 1)
907 dw2_asm_output_data (1, return_reg, "CIE RA Column");
908 else
909 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
910
911 if (augmentation[0])
912 {
913 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
914 if (personality)
915 {
916 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
917 eh_data_format_name (per_encoding));
918 dw2_asm_output_encoded_addr_rtx (per_encoding,
919 personality,
920 true, NULL);
921 }
922
923 if (any_lsda_needed)
924 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
925 eh_data_format_name (lsda_encoding));
926
927 if (fde_encoding != DW_EH_PE_absptr)
928 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
929 eh_data_format_name (fde_encoding));
930 }
931
932 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
933 output_cfi (cfi, NULL, for_eh);
934
935 /* Pad the CIE out to an address sized boundary. */
936 ASM_OUTPUT_ALIGN (asm_out_file,
937 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
938 ASM_OUTPUT_LABEL (asm_out_file, l2);
939
940 /* Loop through all of the FDE's. */
941 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
942 {
943 unsigned int k;
944
945 /* Don't emit EH unwind info for leaf functions that don't need it. */
946 if (for_eh && !fde_needed_for_eh_p (fde))
947 continue;
948
949 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
950 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
951 augmentation, any_lsda_needed, lsda_encoding);
952 }
953
954 if (for_eh && targetm.terminate_dw2_eh_frame_info)
955 dw2_asm_output_data (4, 0, "End of Table");
956
957 /* Turn off app to make assembly quicker. */
958 if (flag_debug_asm)
959 app_disable ();
960 }
961
962 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
963
964 static void
965 dwarf2out_do_cfi_startproc (bool second)
966 {
967 int enc;
968 rtx ref;
969
970 fprintf (asm_out_file, "\t.cfi_startproc\n");
971
972 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
973 eh unwinders. */
974 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
975 return;
976
977 rtx personality = get_personality_function (current_function_decl);
978
979 if (personality)
980 {
981 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
982 ref = personality;
983
984 /* ??? The GAS support isn't entirely consistent. We have to
985 handle indirect support ourselves, but PC-relative is done
986 in the assembler. Further, the assembler can't handle any
987 of the weirder relocation types. */
988 if (enc & DW_EH_PE_indirect)
989 ref = dw2_force_const_mem (ref, true);
990
991 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
992 output_addr_const (asm_out_file, ref);
993 fputc ('\n', asm_out_file);
994 }
995
996 if (crtl->uses_eh_lsda)
997 {
998 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
999
1000 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1001 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1002 current_function_funcdef_no);
1003 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1004 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1005
1006 if (enc & DW_EH_PE_indirect)
1007 ref = dw2_force_const_mem (ref, true);
1008
1009 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1010 output_addr_const (asm_out_file, ref);
1011 fputc ('\n', asm_out_file);
1012 }
1013 }
1014
1015 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1016 this allocation may be done before pass_final. */
1017
1018 dw_fde_ref
1019 dwarf2out_alloc_current_fde (void)
1020 {
1021 dw_fde_ref fde;
1022
1023 fde = ggc_cleared_alloc<dw_fde_node> ();
1024 fde->decl = current_function_decl;
1025 fde->funcdef_number = current_function_funcdef_no;
1026 fde->fde_index = vec_safe_length (fde_vec);
1027 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1028 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1029 fde->nothrow = crtl->nothrow;
1030 fde->drap_reg = INVALID_REGNUM;
1031 fde->vdrap_reg = INVALID_REGNUM;
1032
1033 /* Record the FDE associated with this function. */
1034 cfun->fde = fde;
1035 vec_safe_push (fde_vec, fde);
1036
1037 return fde;
1038 }
1039
1040 /* Output a marker (i.e. a label) for the beginning of a function, before
1041 the prologue. */
1042
1043 void
1044 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1045 unsigned int column ATTRIBUTE_UNUSED,
1046 const char *file ATTRIBUTE_UNUSED)
1047 {
1048 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1049 char * dup_label;
1050 dw_fde_ref fde;
1051 section *fnsec;
1052 bool do_frame;
1053
1054 current_function_func_begin_label = NULL;
1055
1056 do_frame = dwarf2out_do_frame ();
1057
1058 /* ??? current_function_func_begin_label is also used by except.c for
1059 call-site information. We must emit this label if it might be used. */
1060 if (!do_frame
1061 && (!flag_exceptions
1062 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1063 return;
1064
1065 fnsec = function_section (current_function_decl);
1066 switch_to_section (fnsec);
1067 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1068 current_function_funcdef_no);
1069 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 dup_label = xstrdup (label);
1072 current_function_func_begin_label = dup_label;
1073
1074 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1075 if (!do_frame)
1076 return;
1077
1078 /* Unlike the debug version, the EH version of frame unwind info is a per-
1079 function setting so we need to record whether we need it for the unit. */
1080 do_eh_frame |= dwarf2out_do_eh_frame ();
1081
1082 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1083 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1084 would include pass_dwarf2_frame. If we've not created the FDE yet,
1085 do so now. */
1086 fde = cfun->fde;
1087 if (fde == NULL)
1088 fde = dwarf2out_alloc_current_fde ();
1089
1090 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1091 fde->dw_fde_begin = dup_label;
1092 fde->dw_fde_current_label = dup_label;
1093 fde->in_std_section = (fnsec == text_section
1094 || (cold_text_section && fnsec == cold_text_section));
1095
1096 /* We only want to output line number information for the genuine dwarf2
1097 prologue case, not the eh frame case. */
1098 #ifdef DWARF2_DEBUGGING_INFO
1099 if (file)
1100 dwarf2out_source_line (line, column, file, 0, true);
1101 #endif
1102
1103 if (dwarf2out_do_cfi_asm ())
1104 dwarf2out_do_cfi_startproc (false);
1105 else
1106 {
1107 rtx personality = get_personality_function (current_function_decl);
1108 if (!current_unit_personality)
1109 current_unit_personality = personality;
1110
1111 /* We cannot keep a current personality per function as without CFI
1112 asm, at the point where we emit the CFI data, there is no current
1113 function anymore. */
1114 if (personality && current_unit_personality != personality)
1115 sorry ("multiple EH personalities are supported only with assemblers "
1116 "supporting .cfi_personality directive");
1117 }
1118 }
1119
1120 /* Output a marker (i.e. a label) for the end of the generated code
1121 for a function prologue. This gets called *after* the prologue code has
1122 been generated. */
1123
1124 void
1125 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1126 const char *file ATTRIBUTE_UNUSED)
1127 {
1128 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1129
1130 /* Output a label to mark the endpoint of the code generated for this
1131 function. */
1132 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1133 current_function_funcdef_no);
1134 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1137 }
1138
1139 /* Output a marker (i.e. a label) for the beginning of the generated code
1140 for a function epilogue. This gets called *before* the prologue code has
1141 been generated. */
1142
1143 void
1144 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1145 const char *file ATTRIBUTE_UNUSED)
1146 {
1147 dw_fde_ref fde = cfun->fde;
1148 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1149
1150 if (fde->dw_fde_vms_begin_epilogue)
1151 return;
1152
1153 /* Output a label to mark the endpoint of the code generated for this
1154 function. */
1155 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1156 current_function_funcdef_no);
1157 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1160 }
1161
1162 /* Output a marker (i.e. a label) for the absolute end of the generated code
1163 for a function definition. This gets called *after* the epilogue code has
1164 been generated. */
1165
1166 void
1167 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1168 const char *file ATTRIBUTE_UNUSED)
1169 {
1170 dw_fde_ref fde;
1171 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1172
1173 last_var_location_insn = NULL;
1174 cached_next_real_insn = NULL;
1175
1176 if (dwarf2out_do_cfi_asm ())
1177 fprintf (asm_out_file, "\t.cfi_endproc\n");
1178
1179 /* Output a label to mark the endpoint of the code generated for this
1180 function. */
1181 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1182 current_function_funcdef_no);
1183 ASM_OUTPUT_LABEL (asm_out_file, label);
1184 fde = cfun->fde;
1185 gcc_assert (fde != NULL);
1186 if (fde->dw_fde_second_begin == NULL)
1187 fde->dw_fde_end = xstrdup (label);
1188 }
1189
1190 void
1191 dwarf2out_frame_finish (void)
1192 {
1193 /* Output call frame information. */
1194 if (targetm.debug_unwind_info () == UI_DWARF2)
1195 output_call_frame_info (0);
1196
1197 /* Output another copy for the unwinder. */
1198 if (do_eh_frame)
1199 output_call_frame_info (1);
1200 }
1201
1202 /* Note that the current function section is being used for code. */
1203
1204 static void
1205 dwarf2out_note_section_used (void)
1206 {
1207 section *sec = current_function_section ();
1208 if (sec == text_section)
1209 text_section_used = true;
1210 else if (sec == cold_text_section)
1211 cold_text_section_used = true;
1212 }
1213
1214 static void var_location_switch_text_section (void);
1215 static void set_cur_line_info_table (section *);
1216
1217 void
1218 dwarf2out_switch_text_section (void)
1219 {
1220 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1221 section *sect;
1222 dw_fde_ref fde = cfun->fde;
1223
1224 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1225
1226 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL,
1227 current_function_funcdef_no);
1228
1229 fde->dw_fde_second_begin = ggc_strdup (label);
1230 if (!in_cold_section_p)
1231 {
1232 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1233 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1234 }
1235 else
1236 {
1237 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1238 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1239 }
1240 have_multiple_function_sections = true;
1241
1242 /* There is no need to mark used sections when not debugging. */
1243 if (cold_text_section != NULL)
1244 dwarf2out_note_section_used ();
1245
1246 if (dwarf2out_do_cfi_asm ())
1247 fprintf (asm_out_file, "\t.cfi_endproc\n");
1248
1249 /* Now do the real section switch. */
1250 sect = current_function_section ();
1251 switch_to_section (sect);
1252
1253 fde->second_in_std_section
1254 = (sect == text_section
1255 || (cold_text_section && sect == cold_text_section));
1256
1257 if (dwarf2out_do_cfi_asm ())
1258 dwarf2out_do_cfi_startproc (true);
1259
1260 var_location_switch_text_section ();
1261
1262 if (cold_text_section != NULL)
1263 set_cur_line_info_table (sect);
1264 }
1265 \f
1266 /* And now, the subset of the debugging information support code necessary
1267 for emitting location expressions. */
1268
1269 /* Data about a single source file. */
1270 struct GTY((for_user)) dwarf_file_data {
1271 const char * filename;
1272 int emitted_number;
1273 };
1274
1275 /* Describe an entry into the .debug_addr section. */
1276
1277 enum ate_kind {
1278 ate_kind_rtx,
1279 ate_kind_rtx_dtprel,
1280 ate_kind_label
1281 };
1282
1283 struct GTY((for_user)) addr_table_entry {
1284 enum ate_kind kind;
1285 unsigned int refcount;
1286 unsigned int index;
1287 union addr_table_entry_struct_union
1288 {
1289 rtx GTY ((tag ("0"))) rtl;
1290 char * GTY ((tag ("1"))) label;
1291 }
1292 GTY ((desc ("%1.kind"))) addr;
1293 };
1294
1295 typedef unsigned int var_loc_view;
1296
1297 /* Location lists are ranges + location descriptions for that range,
1298 so you can track variables that are in different places over
1299 their entire life. */
1300 typedef struct GTY(()) dw_loc_list_struct {
1301 dw_loc_list_ref dw_loc_next;
1302 const char *begin; /* Label and addr_entry for start of range */
1303 addr_table_entry *begin_entry;
1304 const char *end; /* Label for end of range */
1305 char *ll_symbol; /* Label for beginning of location list.
1306 Only on head of list. */
1307 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1308 const char *section; /* Section this loclist is relative to */
1309 dw_loc_descr_ref expr;
1310 var_loc_view vbegin, vend;
1311 hashval_t hash;
1312 /* True if all addresses in this and subsequent lists are known to be
1313 resolved. */
1314 bool resolved_addr;
1315 /* True if this list has been replaced by dw_loc_next. */
1316 bool replaced;
1317 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1318 section. */
1319 unsigned char emitted : 1;
1320 /* True if hash field is index rather than hash value. */
1321 unsigned char num_assigned : 1;
1322 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1323 unsigned char offset_emitted : 1;
1324 /* True if note_variable_value_in_expr has been called on it. */
1325 unsigned char noted_variable_value : 1;
1326 /* True if the range should be emitted even if begin and end
1327 are the same. */
1328 bool force;
1329 } dw_loc_list_node;
1330
1331 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1332 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1333
1334 /* Convert a DWARF stack opcode into its string name. */
1335
1336 static const char *
1337 dwarf_stack_op_name (unsigned int op)
1338 {
1339 const char *name = get_DW_OP_name (op);
1340
1341 if (name != NULL)
1342 return name;
1343
1344 return "OP_<unknown>";
1345 }
1346
1347 /* Return TRUE iff we're to output location view lists as a separate
1348 attribute next to the location lists, as an extension compatible
1349 with DWARF 2 and above. */
1350
1351 static inline bool
1352 dwarf2out_locviews_in_attribute ()
1353 {
1354 return debug_variable_location_views == 1;
1355 }
1356
1357 /* Return TRUE iff we're to output location view lists as part of the
1358 location lists, as proposed for standardization after DWARF 5. */
1359
1360 static inline bool
1361 dwarf2out_locviews_in_loclist ()
1362 {
1363 #ifndef DW_LLE_view_pair
1364 return false;
1365 #else
1366 return debug_variable_location_views == -1;
1367 #endif
1368 }
1369
1370 /* Return a pointer to a newly allocated location description. Location
1371 descriptions are simple expression terms that can be strung
1372 together to form more complicated location (address) descriptions. */
1373
1374 static inline dw_loc_descr_ref
1375 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1376 unsigned HOST_WIDE_INT oprnd2)
1377 {
1378 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1379
1380 descr->dw_loc_opc = op;
1381 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1382 descr->dw_loc_oprnd1.val_entry = NULL;
1383 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1384 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1385 descr->dw_loc_oprnd2.val_entry = NULL;
1386 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1387
1388 return descr;
1389 }
1390
1391 /* Add a location description term to a location description expression. */
1392
1393 static inline void
1394 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1395 {
1396 dw_loc_descr_ref *d;
1397
1398 /* Find the end of the chain. */
1399 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1400 ;
1401
1402 *d = descr;
1403 }
1404
1405 /* Compare two location operands for exact equality. */
1406
1407 static bool
1408 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1409 {
1410 if (a->val_class != b->val_class)
1411 return false;
1412 switch (a->val_class)
1413 {
1414 case dw_val_class_none:
1415 return true;
1416 case dw_val_class_addr:
1417 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1418
1419 case dw_val_class_offset:
1420 case dw_val_class_unsigned_const:
1421 case dw_val_class_const:
1422 case dw_val_class_unsigned_const_implicit:
1423 case dw_val_class_const_implicit:
1424 case dw_val_class_range_list:
1425 /* These are all HOST_WIDE_INT, signed or unsigned. */
1426 return a->v.val_unsigned == b->v.val_unsigned;
1427
1428 case dw_val_class_loc:
1429 return a->v.val_loc == b->v.val_loc;
1430 case dw_val_class_loc_list:
1431 return a->v.val_loc_list == b->v.val_loc_list;
1432 case dw_val_class_view_list:
1433 return a->v.val_view_list == b->v.val_view_list;
1434 case dw_val_class_die_ref:
1435 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1436 case dw_val_class_fde_ref:
1437 return a->v.val_fde_index == b->v.val_fde_index;
1438 case dw_val_class_symview:
1439 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1440 case dw_val_class_lbl_id:
1441 case dw_val_class_lineptr:
1442 case dw_val_class_macptr:
1443 case dw_val_class_loclistsptr:
1444 case dw_val_class_high_pc:
1445 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1446 case dw_val_class_str:
1447 return a->v.val_str == b->v.val_str;
1448 case dw_val_class_flag:
1449 return a->v.val_flag == b->v.val_flag;
1450 case dw_val_class_file:
1451 case dw_val_class_file_implicit:
1452 return a->v.val_file == b->v.val_file;
1453 case dw_val_class_decl_ref:
1454 return a->v.val_decl_ref == b->v.val_decl_ref;
1455
1456 case dw_val_class_const_double:
1457 return (a->v.val_double.high == b->v.val_double.high
1458 && a->v.val_double.low == b->v.val_double.low);
1459
1460 case dw_val_class_wide_int:
1461 return *a->v.val_wide == *b->v.val_wide;
1462
1463 case dw_val_class_vec:
1464 {
1465 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1466 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1467
1468 return (a_len == b_len
1469 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1470 }
1471
1472 case dw_val_class_data8:
1473 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1474
1475 case dw_val_class_vms_delta:
1476 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1477 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1478
1479 case dw_val_class_discr_value:
1480 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1481 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1482 case dw_val_class_discr_list:
1483 /* It makes no sense comparing two discriminant value lists. */
1484 return false;
1485 }
1486 gcc_unreachable ();
1487 }
1488
1489 /* Compare two location atoms for exact equality. */
1490
1491 static bool
1492 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1493 {
1494 if (a->dw_loc_opc != b->dw_loc_opc)
1495 return false;
1496
1497 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1498 address size, but since we always allocate cleared storage it
1499 should be zero for other types of locations. */
1500 if (a->dtprel != b->dtprel)
1501 return false;
1502
1503 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1504 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1505 }
1506
1507 /* Compare two complete location expressions for exact equality. */
1508
1509 bool
1510 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1511 {
1512 while (1)
1513 {
1514 if (a == b)
1515 return true;
1516 if (a == NULL || b == NULL)
1517 return false;
1518 if (!loc_descr_equal_p_1 (a, b))
1519 return false;
1520
1521 a = a->dw_loc_next;
1522 b = b->dw_loc_next;
1523 }
1524 }
1525
1526
1527 /* Add a constant POLY_OFFSET to a location expression. */
1528
1529 static void
1530 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1531 {
1532 dw_loc_descr_ref loc;
1533 HOST_WIDE_INT *p;
1534
1535 gcc_assert (*list_head != NULL);
1536
1537 if (known_eq (poly_offset, 0))
1538 return;
1539
1540 /* Find the end of the chain. */
1541 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1542 ;
1543
1544 HOST_WIDE_INT offset;
1545 if (!poly_offset.is_constant (&offset))
1546 {
1547 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1548 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1549 return;
1550 }
1551
1552 p = NULL;
1553 if (loc->dw_loc_opc == DW_OP_fbreg
1554 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1555 p = &loc->dw_loc_oprnd1.v.val_int;
1556 else if (loc->dw_loc_opc == DW_OP_bregx)
1557 p = &loc->dw_loc_oprnd2.v.val_int;
1558
1559 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1560 offset. Don't optimize if an signed integer overflow would happen. */
1561 if (p != NULL
1562 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1563 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1564 *p += offset;
1565
1566 else if (offset > 0)
1567 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1568
1569 else
1570 {
1571 loc->dw_loc_next
1572 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1573 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1574 }
1575 }
1576
1577 /* Return a pointer to a newly allocated location description for
1578 REG and OFFSET. */
1579
1580 static inline dw_loc_descr_ref
1581 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1582 {
1583 HOST_WIDE_INT const_offset;
1584 if (offset.is_constant (&const_offset))
1585 {
1586 if (reg <= 31)
1587 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1588 const_offset, 0);
1589 else
1590 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1591 }
1592 else
1593 {
1594 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1595 loc_descr_plus_const (&ret, offset);
1596 return ret;
1597 }
1598 }
1599
1600 /* Add a constant OFFSET to a location list. */
1601
1602 static void
1603 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1604 {
1605 dw_loc_list_ref d;
1606 for (d = list_head; d != NULL; d = d->dw_loc_next)
1607 loc_descr_plus_const (&d->expr, offset);
1608 }
1609
1610 #define DWARF_REF_SIZE \
1611 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1612
1613 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1614 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1615 DW_FORM_data16 with 128 bits. */
1616 #define DWARF_LARGEST_DATA_FORM_BITS \
1617 (dwarf_version >= 5 ? 128 : 64)
1618
1619 /* Utility inline function for construction of ops that were GNU extension
1620 before DWARF 5. */
1621 static inline enum dwarf_location_atom
1622 dwarf_OP (enum dwarf_location_atom op)
1623 {
1624 switch (op)
1625 {
1626 case DW_OP_implicit_pointer:
1627 if (dwarf_version < 5)
1628 return DW_OP_GNU_implicit_pointer;
1629 break;
1630
1631 case DW_OP_entry_value:
1632 if (dwarf_version < 5)
1633 return DW_OP_GNU_entry_value;
1634 break;
1635
1636 case DW_OP_const_type:
1637 if (dwarf_version < 5)
1638 return DW_OP_GNU_const_type;
1639 break;
1640
1641 case DW_OP_regval_type:
1642 if (dwarf_version < 5)
1643 return DW_OP_GNU_regval_type;
1644 break;
1645
1646 case DW_OP_deref_type:
1647 if (dwarf_version < 5)
1648 return DW_OP_GNU_deref_type;
1649 break;
1650
1651 case DW_OP_convert:
1652 if (dwarf_version < 5)
1653 return DW_OP_GNU_convert;
1654 break;
1655
1656 case DW_OP_reinterpret:
1657 if (dwarf_version < 5)
1658 return DW_OP_GNU_reinterpret;
1659 break;
1660
1661 case DW_OP_addrx:
1662 if (dwarf_version < 5)
1663 return DW_OP_GNU_addr_index;
1664 break;
1665
1666 case DW_OP_constx:
1667 if (dwarf_version < 5)
1668 return DW_OP_GNU_const_index;
1669 break;
1670
1671 default:
1672 break;
1673 }
1674 return op;
1675 }
1676
1677 /* Similarly for attributes. */
1678 static inline enum dwarf_attribute
1679 dwarf_AT (enum dwarf_attribute at)
1680 {
1681 switch (at)
1682 {
1683 case DW_AT_call_return_pc:
1684 if (dwarf_version < 5)
1685 return DW_AT_low_pc;
1686 break;
1687
1688 case DW_AT_call_tail_call:
1689 if (dwarf_version < 5)
1690 return DW_AT_GNU_tail_call;
1691 break;
1692
1693 case DW_AT_call_origin:
1694 if (dwarf_version < 5)
1695 return DW_AT_abstract_origin;
1696 break;
1697
1698 case DW_AT_call_target:
1699 if (dwarf_version < 5)
1700 return DW_AT_GNU_call_site_target;
1701 break;
1702
1703 case DW_AT_call_target_clobbered:
1704 if (dwarf_version < 5)
1705 return DW_AT_GNU_call_site_target_clobbered;
1706 break;
1707
1708 case DW_AT_call_parameter:
1709 if (dwarf_version < 5)
1710 return DW_AT_abstract_origin;
1711 break;
1712
1713 case DW_AT_call_value:
1714 if (dwarf_version < 5)
1715 return DW_AT_GNU_call_site_value;
1716 break;
1717
1718 case DW_AT_call_data_value:
1719 if (dwarf_version < 5)
1720 return DW_AT_GNU_call_site_data_value;
1721 break;
1722
1723 case DW_AT_call_all_calls:
1724 if (dwarf_version < 5)
1725 return DW_AT_GNU_all_call_sites;
1726 break;
1727
1728 case DW_AT_call_all_tail_calls:
1729 if (dwarf_version < 5)
1730 return DW_AT_GNU_all_tail_call_sites;
1731 break;
1732
1733 case DW_AT_dwo_name:
1734 if (dwarf_version < 5)
1735 return DW_AT_GNU_dwo_name;
1736 break;
1737
1738 case DW_AT_addr_base:
1739 if (dwarf_version < 5)
1740 return DW_AT_GNU_addr_base;
1741 break;
1742
1743 default:
1744 break;
1745 }
1746 return at;
1747 }
1748
1749 /* And similarly for tags. */
1750 static inline enum dwarf_tag
1751 dwarf_TAG (enum dwarf_tag tag)
1752 {
1753 switch (tag)
1754 {
1755 case DW_TAG_call_site:
1756 if (dwarf_version < 5)
1757 return DW_TAG_GNU_call_site;
1758 break;
1759
1760 case DW_TAG_call_site_parameter:
1761 if (dwarf_version < 5)
1762 return DW_TAG_GNU_call_site_parameter;
1763 break;
1764
1765 default:
1766 break;
1767 }
1768 return tag;
1769 }
1770
1771 /* And similarly for forms. */
1772 static inline enum dwarf_form
1773 dwarf_FORM (enum dwarf_form form)
1774 {
1775 switch (form)
1776 {
1777 case DW_FORM_addrx:
1778 if (dwarf_version < 5)
1779 return DW_FORM_GNU_addr_index;
1780 break;
1781
1782 case DW_FORM_strx:
1783 if (dwarf_version < 5)
1784 return DW_FORM_GNU_str_index;
1785 break;
1786
1787 default:
1788 break;
1789 }
1790 return form;
1791 }
1792
1793 static unsigned long int get_base_type_offset (dw_die_ref);
1794
1795 /* Return the size of a location descriptor. */
1796
1797 static unsigned long
1798 size_of_loc_descr (dw_loc_descr_ref loc)
1799 {
1800 unsigned long size = 1;
1801
1802 switch (loc->dw_loc_opc)
1803 {
1804 case DW_OP_addr:
1805 size += DWARF2_ADDR_SIZE;
1806 break;
1807 case DW_OP_GNU_addr_index:
1808 case DW_OP_addrx:
1809 case DW_OP_GNU_const_index:
1810 case DW_OP_constx:
1811 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1812 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1813 break;
1814 case DW_OP_const1u:
1815 case DW_OP_const1s:
1816 size += 1;
1817 break;
1818 case DW_OP_const2u:
1819 case DW_OP_const2s:
1820 size += 2;
1821 break;
1822 case DW_OP_const4u:
1823 case DW_OP_const4s:
1824 size += 4;
1825 break;
1826 case DW_OP_const8u:
1827 case DW_OP_const8s:
1828 size += 8;
1829 break;
1830 case DW_OP_constu:
1831 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1832 break;
1833 case DW_OP_consts:
1834 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1835 break;
1836 case DW_OP_pick:
1837 size += 1;
1838 break;
1839 case DW_OP_plus_uconst:
1840 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1841 break;
1842 case DW_OP_skip:
1843 case DW_OP_bra:
1844 size += 2;
1845 break;
1846 case DW_OP_breg0:
1847 case DW_OP_breg1:
1848 case DW_OP_breg2:
1849 case DW_OP_breg3:
1850 case DW_OP_breg4:
1851 case DW_OP_breg5:
1852 case DW_OP_breg6:
1853 case DW_OP_breg7:
1854 case DW_OP_breg8:
1855 case DW_OP_breg9:
1856 case DW_OP_breg10:
1857 case DW_OP_breg11:
1858 case DW_OP_breg12:
1859 case DW_OP_breg13:
1860 case DW_OP_breg14:
1861 case DW_OP_breg15:
1862 case DW_OP_breg16:
1863 case DW_OP_breg17:
1864 case DW_OP_breg18:
1865 case DW_OP_breg19:
1866 case DW_OP_breg20:
1867 case DW_OP_breg21:
1868 case DW_OP_breg22:
1869 case DW_OP_breg23:
1870 case DW_OP_breg24:
1871 case DW_OP_breg25:
1872 case DW_OP_breg26:
1873 case DW_OP_breg27:
1874 case DW_OP_breg28:
1875 case DW_OP_breg29:
1876 case DW_OP_breg30:
1877 case DW_OP_breg31:
1878 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1879 break;
1880 case DW_OP_regx:
1881 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1882 break;
1883 case DW_OP_fbreg:
1884 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1885 break;
1886 case DW_OP_bregx:
1887 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1888 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1889 break;
1890 case DW_OP_piece:
1891 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1892 break;
1893 case DW_OP_bit_piece:
1894 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1895 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1896 break;
1897 case DW_OP_deref_size:
1898 case DW_OP_xderef_size:
1899 size += 1;
1900 break;
1901 case DW_OP_call2:
1902 size += 2;
1903 break;
1904 case DW_OP_call4:
1905 size += 4;
1906 break;
1907 case DW_OP_call_ref:
1908 case DW_OP_GNU_variable_value:
1909 size += DWARF_REF_SIZE;
1910 break;
1911 case DW_OP_implicit_value:
1912 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1913 + loc->dw_loc_oprnd1.v.val_unsigned;
1914 break;
1915 case DW_OP_implicit_pointer:
1916 case DW_OP_GNU_implicit_pointer:
1917 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1918 break;
1919 case DW_OP_entry_value:
1920 case DW_OP_GNU_entry_value:
1921 {
1922 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1923 size += size_of_uleb128 (op_size) + op_size;
1924 break;
1925 }
1926 case DW_OP_const_type:
1927 case DW_OP_GNU_const_type:
1928 {
1929 unsigned long o
1930 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1931 size += size_of_uleb128 (o) + 1;
1932 switch (loc->dw_loc_oprnd2.val_class)
1933 {
1934 case dw_val_class_vec:
1935 size += loc->dw_loc_oprnd2.v.val_vec.length
1936 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1937 break;
1938 case dw_val_class_const:
1939 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1940 break;
1941 case dw_val_class_const_double:
1942 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1943 break;
1944 case dw_val_class_wide_int:
1945 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1946 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1947 break;
1948 default:
1949 gcc_unreachable ();
1950 }
1951 break;
1952 }
1953 case DW_OP_regval_type:
1954 case DW_OP_GNU_regval_type:
1955 {
1956 unsigned long o
1957 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1958 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1959 + size_of_uleb128 (o);
1960 }
1961 break;
1962 case DW_OP_deref_type:
1963 case DW_OP_GNU_deref_type:
1964 {
1965 unsigned long o
1966 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1967 size += 1 + size_of_uleb128 (o);
1968 }
1969 break;
1970 case DW_OP_convert:
1971 case DW_OP_reinterpret:
1972 case DW_OP_GNU_convert:
1973 case DW_OP_GNU_reinterpret:
1974 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1975 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1976 else
1977 {
1978 unsigned long o
1979 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1980 size += size_of_uleb128 (o);
1981 }
1982 break;
1983 case DW_OP_GNU_parameter_ref:
1984 size += 4;
1985 break;
1986 default:
1987 break;
1988 }
1989
1990 return size;
1991 }
1992
1993 /* Return the size of a series of location descriptors. */
1994
1995 unsigned long
1996 size_of_locs (dw_loc_descr_ref loc)
1997 {
1998 dw_loc_descr_ref l;
1999 unsigned long size;
2000
2001 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
2002 field, to avoid writing to a PCH file. */
2003 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2004 {
2005 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
2006 break;
2007 size += size_of_loc_descr (l);
2008 }
2009 if (! l)
2010 return size;
2011
2012 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2013 {
2014 l->dw_loc_addr = size;
2015 size += size_of_loc_descr (l);
2016 }
2017
2018 return size;
2019 }
2020
2021 /* Return the size of the value in a DW_AT_discr_value attribute. */
2022
2023 static int
2024 size_of_discr_value (dw_discr_value *discr_value)
2025 {
2026 if (discr_value->pos)
2027 return size_of_uleb128 (discr_value->v.uval);
2028 else
2029 return size_of_sleb128 (discr_value->v.sval);
2030 }
2031
2032 /* Return the size of the value in a DW_AT_discr_list attribute. */
2033
2034 static int
2035 size_of_discr_list (dw_discr_list_ref discr_list)
2036 {
2037 int size = 0;
2038
2039 for (dw_discr_list_ref list = discr_list;
2040 list != NULL;
2041 list = list->dw_discr_next)
2042 {
2043 /* One byte for the discriminant value descriptor, and then one or two
2044 LEB128 numbers, depending on whether it's a single case label or a
2045 range label. */
2046 size += 1;
2047 size += size_of_discr_value (&list->dw_discr_lower_bound);
2048 if (list->dw_discr_range != 0)
2049 size += size_of_discr_value (&list->dw_discr_upper_bound);
2050 }
2051 return size;
2052 }
2053
2054 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2055 static void get_ref_die_offset_label (char *, dw_die_ref);
2056 static unsigned long int get_ref_die_offset (dw_die_ref);
2057
2058 /* Output location description stack opcode's operands (if any).
2059 The for_eh_or_skip parameter controls whether register numbers are
2060 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2061 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2062 info). This should be suppressed for the cases that have not been converted
2063 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2064
2065 static void
2066 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2067 {
2068 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2069 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2070
2071 switch (loc->dw_loc_opc)
2072 {
2073 #ifdef DWARF2_DEBUGGING_INFO
2074 case DW_OP_const2u:
2075 case DW_OP_const2s:
2076 dw2_asm_output_data (2, val1->v.val_int, NULL);
2077 break;
2078 case DW_OP_const4u:
2079 if (loc->dtprel)
2080 {
2081 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2082 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2083 val1->v.val_addr);
2084 fputc ('\n', asm_out_file);
2085 break;
2086 }
2087 /* FALLTHRU */
2088 case DW_OP_const4s:
2089 dw2_asm_output_data (4, val1->v.val_int, NULL);
2090 break;
2091 case DW_OP_const8u:
2092 if (loc->dtprel)
2093 {
2094 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2095 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2096 val1->v.val_addr);
2097 fputc ('\n', asm_out_file);
2098 break;
2099 }
2100 /* FALLTHRU */
2101 case DW_OP_const8s:
2102 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2103 dw2_asm_output_data (8, val1->v.val_int, NULL);
2104 break;
2105 case DW_OP_skip:
2106 case DW_OP_bra:
2107 {
2108 int offset;
2109
2110 gcc_assert (val1->val_class == dw_val_class_loc);
2111 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2112
2113 dw2_asm_output_data (2, offset, NULL);
2114 }
2115 break;
2116 case DW_OP_implicit_value:
2117 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2118 switch (val2->val_class)
2119 {
2120 case dw_val_class_const:
2121 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2122 break;
2123 case dw_val_class_vec:
2124 {
2125 unsigned int elt_size = val2->v.val_vec.elt_size;
2126 unsigned int len = val2->v.val_vec.length;
2127 unsigned int i;
2128 unsigned char *p;
2129
2130 if (elt_size > sizeof (HOST_WIDE_INT))
2131 {
2132 elt_size /= 2;
2133 len *= 2;
2134 }
2135 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2136 i < len;
2137 i++, p += elt_size)
2138 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2139 "fp or vector constant word %u", i);
2140 }
2141 break;
2142 case dw_val_class_const_double:
2143 {
2144 unsigned HOST_WIDE_INT first, second;
2145
2146 if (WORDS_BIG_ENDIAN)
2147 {
2148 first = val2->v.val_double.high;
2149 second = val2->v.val_double.low;
2150 }
2151 else
2152 {
2153 first = val2->v.val_double.low;
2154 second = val2->v.val_double.high;
2155 }
2156 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2157 first, NULL);
2158 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2159 second, NULL);
2160 }
2161 break;
2162 case dw_val_class_wide_int:
2163 {
2164 int i;
2165 int len = get_full_len (*val2->v.val_wide);
2166 if (WORDS_BIG_ENDIAN)
2167 for (i = len - 1; i >= 0; --i)
2168 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2169 val2->v.val_wide->elt (i), NULL);
2170 else
2171 for (i = 0; i < len; ++i)
2172 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2173 val2->v.val_wide->elt (i), NULL);
2174 }
2175 break;
2176 case dw_val_class_addr:
2177 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2178 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2179 break;
2180 default:
2181 gcc_unreachable ();
2182 }
2183 break;
2184 #else
2185 case DW_OP_const2u:
2186 case DW_OP_const2s:
2187 case DW_OP_const4u:
2188 case DW_OP_const4s:
2189 case DW_OP_const8u:
2190 case DW_OP_const8s:
2191 case DW_OP_skip:
2192 case DW_OP_bra:
2193 case DW_OP_implicit_value:
2194 /* We currently don't make any attempt to make sure these are
2195 aligned properly like we do for the main unwind info, so
2196 don't support emitting things larger than a byte if we're
2197 only doing unwinding. */
2198 gcc_unreachable ();
2199 #endif
2200 case DW_OP_const1u:
2201 case DW_OP_const1s:
2202 dw2_asm_output_data (1, val1->v.val_int, NULL);
2203 break;
2204 case DW_OP_constu:
2205 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2206 break;
2207 case DW_OP_consts:
2208 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2209 break;
2210 case DW_OP_pick:
2211 dw2_asm_output_data (1, val1->v.val_int, NULL);
2212 break;
2213 case DW_OP_plus_uconst:
2214 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2215 break;
2216 case DW_OP_breg0:
2217 case DW_OP_breg1:
2218 case DW_OP_breg2:
2219 case DW_OP_breg3:
2220 case DW_OP_breg4:
2221 case DW_OP_breg5:
2222 case DW_OP_breg6:
2223 case DW_OP_breg7:
2224 case DW_OP_breg8:
2225 case DW_OP_breg9:
2226 case DW_OP_breg10:
2227 case DW_OP_breg11:
2228 case DW_OP_breg12:
2229 case DW_OP_breg13:
2230 case DW_OP_breg14:
2231 case DW_OP_breg15:
2232 case DW_OP_breg16:
2233 case DW_OP_breg17:
2234 case DW_OP_breg18:
2235 case DW_OP_breg19:
2236 case DW_OP_breg20:
2237 case DW_OP_breg21:
2238 case DW_OP_breg22:
2239 case DW_OP_breg23:
2240 case DW_OP_breg24:
2241 case DW_OP_breg25:
2242 case DW_OP_breg26:
2243 case DW_OP_breg27:
2244 case DW_OP_breg28:
2245 case DW_OP_breg29:
2246 case DW_OP_breg30:
2247 case DW_OP_breg31:
2248 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2249 break;
2250 case DW_OP_regx:
2251 {
2252 unsigned r = val1->v.val_unsigned;
2253 if (for_eh_or_skip >= 0)
2254 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2255 gcc_assert (size_of_uleb128 (r)
2256 == size_of_uleb128 (val1->v.val_unsigned));
2257 dw2_asm_output_data_uleb128 (r, NULL);
2258 }
2259 break;
2260 case DW_OP_fbreg:
2261 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2262 break;
2263 case DW_OP_bregx:
2264 {
2265 unsigned r = val1->v.val_unsigned;
2266 if (for_eh_or_skip >= 0)
2267 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2268 gcc_assert (size_of_uleb128 (r)
2269 == size_of_uleb128 (val1->v.val_unsigned));
2270 dw2_asm_output_data_uleb128 (r, NULL);
2271 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2272 }
2273 break;
2274 case DW_OP_piece:
2275 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2276 break;
2277 case DW_OP_bit_piece:
2278 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2279 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2280 break;
2281 case DW_OP_deref_size:
2282 case DW_OP_xderef_size:
2283 dw2_asm_output_data (1, val1->v.val_int, NULL);
2284 break;
2285
2286 case DW_OP_addr:
2287 if (loc->dtprel)
2288 {
2289 if (targetm.asm_out.output_dwarf_dtprel)
2290 {
2291 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2292 DWARF2_ADDR_SIZE,
2293 val1->v.val_addr);
2294 fputc ('\n', asm_out_file);
2295 }
2296 else
2297 gcc_unreachable ();
2298 }
2299 else
2300 {
2301 #ifdef DWARF2_DEBUGGING_INFO
2302 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2303 #else
2304 gcc_unreachable ();
2305 #endif
2306 }
2307 break;
2308
2309 case DW_OP_GNU_addr_index:
2310 case DW_OP_addrx:
2311 case DW_OP_GNU_const_index:
2312 case DW_OP_constx:
2313 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2314 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2315 "(index into .debug_addr)");
2316 break;
2317
2318 case DW_OP_call2:
2319 case DW_OP_call4:
2320 {
2321 unsigned long die_offset
2322 = get_ref_die_offset (val1->v.val_die_ref.die);
2323 /* Make sure the offset has been computed and that we can encode it as
2324 an operand. */
2325 gcc_assert (die_offset > 0
2326 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2327 ? 0xffff
2328 : 0xffffffff));
2329 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2330 die_offset, NULL);
2331 }
2332 break;
2333
2334 case DW_OP_call_ref:
2335 case DW_OP_GNU_variable_value:
2336 {
2337 char label[MAX_ARTIFICIAL_LABEL_BYTES
2338 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2339 gcc_assert (val1->val_class == dw_val_class_die_ref);
2340 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2341 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2342 }
2343 break;
2344
2345 case DW_OP_implicit_pointer:
2346 case DW_OP_GNU_implicit_pointer:
2347 {
2348 char label[MAX_ARTIFICIAL_LABEL_BYTES
2349 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2350 gcc_assert (val1->val_class == dw_val_class_die_ref);
2351 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2352 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2353 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2354 }
2355 break;
2356
2357 case DW_OP_entry_value:
2358 case DW_OP_GNU_entry_value:
2359 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2360 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2361 break;
2362
2363 case DW_OP_const_type:
2364 case DW_OP_GNU_const_type:
2365 {
2366 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2367 gcc_assert (o);
2368 dw2_asm_output_data_uleb128 (o, NULL);
2369 switch (val2->val_class)
2370 {
2371 case dw_val_class_const:
2372 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2373 dw2_asm_output_data (1, l, NULL);
2374 dw2_asm_output_data (l, val2->v.val_int, NULL);
2375 break;
2376 case dw_val_class_vec:
2377 {
2378 unsigned int elt_size = val2->v.val_vec.elt_size;
2379 unsigned int len = val2->v.val_vec.length;
2380 unsigned int i;
2381 unsigned char *p;
2382
2383 l = len * elt_size;
2384 dw2_asm_output_data (1, l, NULL);
2385 if (elt_size > sizeof (HOST_WIDE_INT))
2386 {
2387 elt_size /= 2;
2388 len *= 2;
2389 }
2390 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2391 i < len;
2392 i++, p += elt_size)
2393 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2394 "fp or vector constant word %u", i);
2395 }
2396 break;
2397 case dw_val_class_const_double:
2398 {
2399 unsigned HOST_WIDE_INT first, second;
2400 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2401
2402 dw2_asm_output_data (1, 2 * l, NULL);
2403 if (WORDS_BIG_ENDIAN)
2404 {
2405 first = val2->v.val_double.high;
2406 second = val2->v.val_double.low;
2407 }
2408 else
2409 {
2410 first = val2->v.val_double.low;
2411 second = val2->v.val_double.high;
2412 }
2413 dw2_asm_output_data (l, first, NULL);
2414 dw2_asm_output_data (l, second, NULL);
2415 }
2416 break;
2417 case dw_val_class_wide_int:
2418 {
2419 int i;
2420 int len = get_full_len (*val2->v.val_wide);
2421 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2422
2423 dw2_asm_output_data (1, len * l, NULL);
2424 if (WORDS_BIG_ENDIAN)
2425 for (i = len - 1; i >= 0; --i)
2426 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2427 else
2428 for (i = 0; i < len; ++i)
2429 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2430 }
2431 break;
2432 default:
2433 gcc_unreachable ();
2434 }
2435 }
2436 break;
2437 case DW_OP_regval_type:
2438 case DW_OP_GNU_regval_type:
2439 {
2440 unsigned r = val1->v.val_unsigned;
2441 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2442 gcc_assert (o);
2443 if (for_eh_or_skip >= 0)
2444 {
2445 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2446 gcc_assert (size_of_uleb128 (r)
2447 == size_of_uleb128 (val1->v.val_unsigned));
2448 }
2449 dw2_asm_output_data_uleb128 (r, NULL);
2450 dw2_asm_output_data_uleb128 (o, NULL);
2451 }
2452 break;
2453 case DW_OP_deref_type:
2454 case DW_OP_GNU_deref_type:
2455 {
2456 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2457 gcc_assert (o);
2458 dw2_asm_output_data (1, val1->v.val_int, NULL);
2459 dw2_asm_output_data_uleb128 (o, NULL);
2460 }
2461 break;
2462 case DW_OP_convert:
2463 case DW_OP_reinterpret:
2464 case DW_OP_GNU_convert:
2465 case DW_OP_GNU_reinterpret:
2466 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2467 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2468 else
2469 {
2470 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2471 gcc_assert (o);
2472 dw2_asm_output_data_uleb128 (o, NULL);
2473 }
2474 break;
2475
2476 case DW_OP_GNU_parameter_ref:
2477 {
2478 unsigned long o;
2479 gcc_assert (val1->val_class == dw_val_class_die_ref);
2480 o = get_ref_die_offset (val1->v.val_die_ref.die);
2481 dw2_asm_output_data (4, o, NULL);
2482 }
2483 break;
2484
2485 default:
2486 /* Other codes have no operands. */
2487 break;
2488 }
2489 }
2490
2491 /* Output a sequence of location operations.
2492 The for_eh_or_skip parameter controls whether register numbers are
2493 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2494 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2495 info). This should be suppressed for the cases that have not been converted
2496 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2497
2498 void
2499 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2500 {
2501 for (; loc != NULL; loc = loc->dw_loc_next)
2502 {
2503 enum dwarf_location_atom opc = loc->dw_loc_opc;
2504 /* Output the opcode. */
2505 if (for_eh_or_skip >= 0
2506 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2507 {
2508 unsigned r = (opc - DW_OP_breg0);
2509 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2510 gcc_assert (r <= 31);
2511 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2512 }
2513 else if (for_eh_or_skip >= 0
2514 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2515 {
2516 unsigned r = (opc - DW_OP_reg0);
2517 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2518 gcc_assert (r <= 31);
2519 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2520 }
2521
2522 dw2_asm_output_data (1, opc,
2523 "%s", dwarf_stack_op_name (opc));
2524
2525 /* Output the operand(s) (if any). */
2526 output_loc_operands (loc, for_eh_or_skip);
2527 }
2528 }
2529
2530 /* Output location description stack opcode's operands (if any).
2531 The output is single bytes on a line, suitable for .cfi_escape. */
2532
2533 static void
2534 output_loc_operands_raw (dw_loc_descr_ref loc)
2535 {
2536 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2537 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2538
2539 switch (loc->dw_loc_opc)
2540 {
2541 case DW_OP_addr:
2542 case DW_OP_GNU_addr_index:
2543 case DW_OP_addrx:
2544 case DW_OP_GNU_const_index:
2545 case DW_OP_constx:
2546 case DW_OP_implicit_value:
2547 /* We cannot output addresses in .cfi_escape, only bytes. */
2548 gcc_unreachable ();
2549
2550 case DW_OP_const1u:
2551 case DW_OP_const1s:
2552 case DW_OP_pick:
2553 case DW_OP_deref_size:
2554 case DW_OP_xderef_size:
2555 fputc (',', asm_out_file);
2556 dw2_asm_output_data_raw (1, val1->v.val_int);
2557 break;
2558
2559 case DW_OP_const2u:
2560 case DW_OP_const2s:
2561 fputc (',', asm_out_file);
2562 dw2_asm_output_data_raw (2, val1->v.val_int);
2563 break;
2564
2565 case DW_OP_const4u:
2566 case DW_OP_const4s:
2567 fputc (',', asm_out_file);
2568 dw2_asm_output_data_raw (4, val1->v.val_int);
2569 break;
2570
2571 case DW_OP_const8u:
2572 case DW_OP_const8s:
2573 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2574 fputc (',', asm_out_file);
2575 dw2_asm_output_data_raw (8, val1->v.val_int);
2576 break;
2577
2578 case DW_OP_skip:
2579 case DW_OP_bra:
2580 {
2581 int offset;
2582
2583 gcc_assert (val1->val_class == dw_val_class_loc);
2584 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2585
2586 fputc (',', asm_out_file);
2587 dw2_asm_output_data_raw (2, offset);
2588 }
2589 break;
2590
2591 case DW_OP_regx:
2592 {
2593 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2594 gcc_assert (size_of_uleb128 (r)
2595 == size_of_uleb128 (val1->v.val_unsigned));
2596 fputc (',', asm_out_file);
2597 dw2_asm_output_data_uleb128_raw (r);
2598 }
2599 break;
2600
2601 case DW_OP_constu:
2602 case DW_OP_plus_uconst:
2603 case DW_OP_piece:
2604 fputc (',', asm_out_file);
2605 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2606 break;
2607
2608 case DW_OP_bit_piece:
2609 fputc (',', asm_out_file);
2610 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2611 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2612 break;
2613
2614 case DW_OP_consts:
2615 case DW_OP_breg0:
2616 case DW_OP_breg1:
2617 case DW_OP_breg2:
2618 case DW_OP_breg3:
2619 case DW_OP_breg4:
2620 case DW_OP_breg5:
2621 case DW_OP_breg6:
2622 case DW_OP_breg7:
2623 case DW_OP_breg8:
2624 case DW_OP_breg9:
2625 case DW_OP_breg10:
2626 case DW_OP_breg11:
2627 case DW_OP_breg12:
2628 case DW_OP_breg13:
2629 case DW_OP_breg14:
2630 case DW_OP_breg15:
2631 case DW_OP_breg16:
2632 case DW_OP_breg17:
2633 case DW_OP_breg18:
2634 case DW_OP_breg19:
2635 case DW_OP_breg20:
2636 case DW_OP_breg21:
2637 case DW_OP_breg22:
2638 case DW_OP_breg23:
2639 case DW_OP_breg24:
2640 case DW_OP_breg25:
2641 case DW_OP_breg26:
2642 case DW_OP_breg27:
2643 case DW_OP_breg28:
2644 case DW_OP_breg29:
2645 case DW_OP_breg30:
2646 case DW_OP_breg31:
2647 case DW_OP_fbreg:
2648 fputc (',', asm_out_file);
2649 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2650 break;
2651
2652 case DW_OP_bregx:
2653 {
2654 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2655 gcc_assert (size_of_uleb128 (r)
2656 == size_of_uleb128 (val1->v.val_unsigned));
2657 fputc (',', asm_out_file);
2658 dw2_asm_output_data_uleb128_raw (r);
2659 fputc (',', asm_out_file);
2660 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2661 }
2662 break;
2663
2664 case DW_OP_implicit_pointer:
2665 case DW_OP_entry_value:
2666 case DW_OP_const_type:
2667 case DW_OP_regval_type:
2668 case DW_OP_deref_type:
2669 case DW_OP_convert:
2670 case DW_OP_reinterpret:
2671 case DW_OP_GNU_implicit_pointer:
2672 case DW_OP_GNU_entry_value:
2673 case DW_OP_GNU_const_type:
2674 case DW_OP_GNU_regval_type:
2675 case DW_OP_GNU_deref_type:
2676 case DW_OP_GNU_convert:
2677 case DW_OP_GNU_reinterpret:
2678 case DW_OP_GNU_parameter_ref:
2679 gcc_unreachable ();
2680 break;
2681
2682 default:
2683 /* Other codes have no operands. */
2684 break;
2685 }
2686 }
2687
2688 void
2689 output_loc_sequence_raw (dw_loc_descr_ref loc)
2690 {
2691 while (1)
2692 {
2693 enum dwarf_location_atom opc = loc->dw_loc_opc;
2694 /* Output the opcode. */
2695 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2696 {
2697 unsigned r = (opc - DW_OP_breg0);
2698 r = DWARF2_FRAME_REG_OUT (r, 1);
2699 gcc_assert (r <= 31);
2700 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2701 }
2702 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2703 {
2704 unsigned r = (opc - DW_OP_reg0);
2705 r = DWARF2_FRAME_REG_OUT (r, 1);
2706 gcc_assert (r <= 31);
2707 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2708 }
2709 /* Output the opcode. */
2710 fprintf (asm_out_file, "%#x", opc);
2711 output_loc_operands_raw (loc);
2712
2713 if (!loc->dw_loc_next)
2714 break;
2715 loc = loc->dw_loc_next;
2716
2717 fputc (',', asm_out_file);
2718 }
2719 }
2720
2721 /* This function builds a dwarf location descriptor sequence from a
2722 dw_cfa_location, adding the given OFFSET to the result of the
2723 expression. */
2724
2725 struct dw_loc_descr_node *
2726 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2727 {
2728 struct dw_loc_descr_node *head, *tmp;
2729
2730 offset += cfa->offset;
2731
2732 if (cfa->indirect)
2733 {
2734 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2735 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2736 head->dw_loc_oprnd1.val_entry = NULL;
2737 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2738 add_loc_descr (&head, tmp);
2739 loc_descr_plus_const (&head, offset);
2740 }
2741 else
2742 head = new_reg_loc_descr (cfa->reg, offset);
2743
2744 return head;
2745 }
2746
2747 /* This function builds a dwarf location descriptor sequence for
2748 the address at OFFSET from the CFA when stack is aligned to
2749 ALIGNMENT byte. */
2750
2751 struct dw_loc_descr_node *
2752 build_cfa_aligned_loc (dw_cfa_location *cfa,
2753 poly_int64 offset, HOST_WIDE_INT alignment)
2754 {
2755 struct dw_loc_descr_node *head;
2756 unsigned int dwarf_fp
2757 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2758
2759 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2760 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2761 {
2762 head = new_reg_loc_descr (dwarf_fp, 0);
2763 add_loc_descr (&head, int_loc_descriptor (alignment));
2764 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2765 loc_descr_plus_const (&head, offset);
2766 }
2767 else
2768 head = new_reg_loc_descr (dwarf_fp, offset);
2769 return head;
2770 }
2771 \f
2772 /* And now, the support for symbolic debugging information. */
2773
2774 /* .debug_str support. */
2775
2776 static void dwarf2out_init (const char *);
2777 static void dwarf2out_finish (const char *);
2778 static void dwarf2out_early_finish (const char *);
2779 static void dwarf2out_assembly_start (void);
2780 static void dwarf2out_define (unsigned int, const char *);
2781 static void dwarf2out_undef (unsigned int, const char *);
2782 static void dwarf2out_start_source_file (unsigned, const char *);
2783 static void dwarf2out_end_source_file (unsigned);
2784 static void dwarf2out_function_decl (tree);
2785 static void dwarf2out_begin_block (unsigned, unsigned);
2786 static void dwarf2out_end_block (unsigned, unsigned);
2787 static bool dwarf2out_ignore_block (const_tree);
2788 static void dwarf2out_early_global_decl (tree);
2789 static void dwarf2out_late_global_decl (tree);
2790 static void dwarf2out_type_decl (tree, int);
2791 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2792 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2793 dw_die_ref);
2794 static void dwarf2out_abstract_function (tree);
2795 static void dwarf2out_var_location (rtx_insn *);
2796 static void dwarf2out_inline_entry (tree);
2797 static void dwarf2out_size_function (tree);
2798 static void dwarf2out_begin_function (tree);
2799 static void dwarf2out_end_function (unsigned int);
2800 static void dwarf2out_register_main_translation_unit (tree unit);
2801 static void dwarf2out_set_name (tree, tree);
2802 static void dwarf2out_register_external_die (tree decl, const char *sym,
2803 unsigned HOST_WIDE_INT off);
2804 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2805 unsigned HOST_WIDE_INT *off);
2806
2807 /* The debug hooks structure. */
2808
2809 const struct gcc_debug_hooks dwarf2_debug_hooks =
2810 {
2811 dwarf2out_init,
2812 dwarf2out_finish,
2813 dwarf2out_early_finish,
2814 dwarf2out_assembly_start,
2815 dwarf2out_define,
2816 dwarf2out_undef,
2817 dwarf2out_start_source_file,
2818 dwarf2out_end_source_file,
2819 dwarf2out_begin_block,
2820 dwarf2out_end_block,
2821 dwarf2out_ignore_block,
2822 dwarf2out_source_line,
2823 dwarf2out_begin_prologue,
2824 #if VMS_DEBUGGING_INFO
2825 dwarf2out_vms_end_prologue,
2826 dwarf2out_vms_begin_epilogue,
2827 #else
2828 debug_nothing_int_charstar,
2829 debug_nothing_int_charstar,
2830 #endif
2831 dwarf2out_end_epilogue,
2832 dwarf2out_begin_function,
2833 dwarf2out_end_function, /* end_function */
2834 dwarf2out_register_main_translation_unit,
2835 dwarf2out_function_decl, /* function_decl */
2836 dwarf2out_early_global_decl,
2837 dwarf2out_late_global_decl,
2838 dwarf2out_type_decl, /* type_decl */
2839 dwarf2out_imported_module_or_decl,
2840 dwarf2out_die_ref_for_decl,
2841 dwarf2out_register_external_die,
2842 debug_nothing_tree, /* deferred_inline_function */
2843 /* The DWARF 2 backend tries to reduce debugging bloat by not
2844 emitting the abstract description of inline functions until
2845 something tries to reference them. */
2846 dwarf2out_abstract_function, /* outlining_inline_function */
2847 debug_nothing_rtx_code_label, /* label */
2848 debug_nothing_int, /* handle_pch */
2849 dwarf2out_var_location,
2850 dwarf2out_inline_entry, /* inline_entry */
2851 dwarf2out_size_function, /* size_function */
2852 dwarf2out_switch_text_section,
2853 dwarf2out_set_name,
2854 1, /* start_end_main_source_file */
2855 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2856 };
2857
2858 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2859 {
2860 dwarf2out_init,
2861 debug_nothing_charstar,
2862 debug_nothing_charstar,
2863 dwarf2out_assembly_start,
2864 debug_nothing_int_charstar,
2865 debug_nothing_int_charstar,
2866 debug_nothing_int_charstar,
2867 debug_nothing_int,
2868 debug_nothing_int_int, /* begin_block */
2869 debug_nothing_int_int, /* end_block */
2870 debug_true_const_tree, /* ignore_block */
2871 dwarf2out_source_line, /* source_line */
2872 debug_nothing_int_int_charstar, /* begin_prologue */
2873 debug_nothing_int_charstar, /* end_prologue */
2874 debug_nothing_int_charstar, /* begin_epilogue */
2875 debug_nothing_int_charstar, /* end_epilogue */
2876 debug_nothing_tree, /* begin_function */
2877 debug_nothing_int, /* end_function */
2878 debug_nothing_tree, /* register_main_translation_unit */
2879 debug_nothing_tree, /* function_decl */
2880 debug_nothing_tree, /* early_global_decl */
2881 debug_nothing_tree, /* late_global_decl */
2882 debug_nothing_tree_int, /* type_decl */
2883 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2884 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2885 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2886 debug_nothing_tree, /* deferred_inline_function */
2887 debug_nothing_tree, /* outlining_inline_function */
2888 debug_nothing_rtx_code_label, /* label */
2889 debug_nothing_int, /* handle_pch */
2890 debug_nothing_rtx_insn, /* var_location */
2891 debug_nothing_tree, /* inline_entry */
2892 debug_nothing_tree, /* size_function */
2893 debug_nothing_void, /* switch_text_section */
2894 debug_nothing_tree_tree, /* set_name */
2895 0, /* start_end_main_source_file */
2896 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2897 };
2898 \f
2899 /* NOTE: In the comments in this file, many references are made to
2900 "Debugging Information Entries". This term is abbreviated as `DIE'
2901 throughout the remainder of this file. */
2902
2903 /* An internal representation of the DWARF output is built, and then
2904 walked to generate the DWARF debugging info. The walk of the internal
2905 representation is done after the entire program has been compiled.
2906 The types below are used to describe the internal representation. */
2907
2908 /* Whether to put type DIEs into their own section .debug_types instead
2909 of making them part of the .debug_info section. Only supported for
2910 Dwarf V4 or higher and the user didn't disable them through
2911 -fno-debug-types-section. It is more efficient to put them in a
2912 separate comdat sections since the linker will then be able to
2913 remove duplicates. But not all tools support .debug_types sections
2914 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2915 it is DW_UT_type unit type in .debug_info section. */
2916
2917 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2918
2919 /* Various DIE's use offsets relative to the beginning of the
2920 .debug_info section to refer to each other. */
2921
2922 typedef long int dw_offset;
2923
2924 struct comdat_type_node;
2925
2926 /* The entries in the line_info table more-or-less mirror the opcodes
2927 that are used in the real dwarf line table. Arrays of these entries
2928 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2929 supported. */
2930
2931 enum dw_line_info_opcode {
2932 /* Emit DW_LNE_set_address; the operand is the label index. */
2933 LI_set_address,
2934
2935 /* Emit a row to the matrix with the given line. This may be done
2936 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2937 special opcodes. */
2938 LI_set_line,
2939
2940 /* Emit a DW_LNS_set_file. */
2941 LI_set_file,
2942
2943 /* Emit a DW_LNS_set_column. */
2944 LI_set_column,
2945
2946 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2947 LI_negate_stmt,
2948
2949 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2950 LI_set_prologue_end,
2951 LI_set_epilogue_begin,
2952
2953 /* Emit a DW_LNE_set_discriminator. */
2954 LI_set_discriminator,
2955
2956 /* Output a Fixed Advance PC; the target PC is the label index; the
2957 base PC is the previous LI_adv_address or LI_set_address entry.
2958 We only use this when emitting debug views without assembler
2959 support, at explicit user request. Ideally, we should only use
2960 it when the offset might be zero but we can't tell: it's the only
2961 way to maybe change the PC without resetting the view number. */
2962 LI_adv_address
2963 };
2964
2965 typedef struct GTY(()) dw_line_info_struct {
2966 enum dw_line_info_opcode opcode;
2967 unsigned int val;
2968 } dw_line_info_entry;
2969
2970
2971 struct GTY(()) dw_line_info_table {
2972 /* The label that marks the end of this section. */
2973 const char *end_label;
2974
2975 /* The values for the last row of the matrix, as collected in the table.
2976 These are used to minimize the changes to the next row. */
2977 unsigned int file_num;
2978 unsigned int line_num;
2979 unsigned int column_num;
2980 int discrim_num;
2981 bool is_stmt;
2982 bool in_use;
2983
2984 /* This denotes the NEXT view number.
2985
2986 If it is 0, it is known that the NEXT view will be the first view
2987 at the given PC.
2988
2989 If it is -1, we're forcing the view number to be reset, e.g. at a
2990 function entry.
2991
2992 The meaning of other nonzero values depends on whether we're
2993 computing views internally or leaving it for the assembler to do
2994 so. If we're emitting them internally, view denotes the view
2995 number since the last known advance of PC. If we're leaving it
2996 for the assembler, it denotes the LVU label number that we're
2997 going to ask the assembler to assign. */
2998 var_loc_view view;
2999
3000 /* This counts the number of symbolic views emitted in this table
3001 since the latest view reset. Its max value, over all tables,
3002 sets symview_upper_bound. */
3003 var_loc_view symviews_since_reset;
3004
3005 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
3006 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3007 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3008 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3009
3010 vec<dw_line_info_entry, va_gc> *entries;
3011 };
3012
3013 /* This is an upper bound for view numbers that the assembler may
3014 assign to symbolic views output in this translation. It is used to
3015 decide how big a field to use to represent view numbers in
3016 symview-classed attributes. */
3017
3018 static var_loc_view symview_upper_bound;
3019
3020 /* If we're keep track of location views and their reset points, and
3021 INSN is a reset point (i.e., it necessarily advances the PC), mark
3022 the next view in TABLE as reset. */
3023
3024 static void
3025 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3026 {
3027 if (!debug_internal_reset_location_views)
3028 return;
3029
3030 /* Maybe turn (part of?) this test into a default target hook. */
3031 int reset = 0;
3032
3033 if (targetm.reset_location_view)
3034 reset = targetm.reset_location_view (insn);
3035
3036 if (reset)
3037 ;
3038 else if (JUMP_TABLE_DATA_P (insn))
3039 reset = 1;
3040 else if (GET_CODE (insn) == USE
3041 || GET_CODE (insn) == CLOBBER
3042 || GET_CODE (insn) == ASM_INPUT
3043 || asm_noperands (insn) >= 0)
3044 ;
3045 else if (get_attr_min_length (insn) > 0)
3046 reset = 1;
3047
3048 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3049 RESET_NEXT_VIEW (table->view);
3050 }
3051
3052 /* Each DIE attribute has a field specifying the attribute kind,
3053 a link to the next attribute in the chain, and an attribute value.
3054 Attributes are typically linked below the DIE they modify. */
3055
3056 typedef struct GTY(()) dw_attr_struct {
3057 enum dwarf_attribute dw_attr;
3058 dw_val_node dw_attr_val;
3059 }
3060 dw_attr_node;
3061
3062
3063 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3064 The children of each node form a circular list linked by
3065 die_sib. die_child points to the node *before* the "first" child node. */
3066
3067 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3068 union die_symbol_or_type_node
3069 {
3070 const char * GTY ((tag ("0"))) die_symbol;
3071 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3072 }
3073 GTY ((desc ("%0.comdat_type_p"))) die_id;
3074 vec<dw_attr_node, va_gc> *die_attr;
3075 dw_die_ref die_parent;
3076 dw_die_ref die_child;
3077 dw_die_ref die_sib;
3078 dw_die_ref die_definition; /* ref from a specification to its definition */
3079 dw_offset die_offset;
3080 unsigned long die_abbrev;
3081 int die_mark;
3082 unsigned int decl_id;
3083 enum dwarf_tag die_tag;
3084 /* Die is used and must not be pruned as unused. */
3085 BOOL_BITFIELD die_perennial_p : 1;
3086 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3087 /* For an external ref to die_symbol if die_offset contains an extra
3088 offset to that symbol. */
3089 BOOL_BITFIELD with_offset : 1;
3090 /* Whether this DIE was removed from the DIE tree, for example via
3091 prune_unused_types. We don't consider those present from the
3092 DIE lookup routines. */
3093 BOOL_BITFIELD removed : 1;
3094 /* Lots of spare bits. */
3095 }
3096 die_node;
3097
3098 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3099 static bool early_dwarf;
3100 static bool early_dwarf_finished;
3101 struct set_early_dwarf {
3102 bool saved;
3103 set_early_dwarf () : saved(early_dwarf)
3104 {
3105 gcc_assert (! early_dwarf_finished);
3106 early_dwarf = true;
3107 }
3108 ~set_early_dwarf () { early_dwarf = saved; }
3109 };
3110
3111 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3112 #define FOR_EACH_CHILD(die, c, expr) do { \
3113 c = die->die_child; \
3114 if (c) do { \
3115 c = c->die_sib; \
3116 expr; \
3117 } while (c != die->die_child); \
3118 } while (0)
3119
3120 /* The pubname structure */
3121
3122 typedef struct GTY(()) pubname_struct {
3123 dw_die_ref die;
3124 const char *name;
3125 }
3126 pubname_entry;
3127
3128
3129 struct GTY(()) dw_ranges {
3130 const char *label;
3131 /* If this is positive, it's a block number, otherwise it's a
3132 bitwise-negated index into dw_ranges_by_label. */
3133 int num;
3134 /* Index for the range list for DW_FORM_rnglistx. */
3135 unsigned int idx : 31;
3136 /* True if this range might be possibly in a different section
3137 from previous entry. */
3138 unsigned int maybe_new_sec : 1;
3139 };
3140
3141 /* A structure to hold a macinfo entry. */
3142
3143 typedef struct GTY(()) macinfo_struct {
3144 unsigned char code;
3145 unsigned HOST_WIDE_INT lineno;
3146 const char *info;
3147 }
3148 macinfo_entry;
3149
3150
3151 struct GTY(()) dw_ranges_by_label {
3152 const char *begin;
3153 const char *end;
3154 };
3155
3156 /* The comdat type node structure. */
3157 struct GTY(()) comdat_type_node
3158 {
3159 dw_die_ref root_die;
3160 dw_die_ref type_die;
3161 dw_die_ref skeleton_die;
3162 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3163 comdat_type_node *next;
3164 };
3165
3166 /* A list of DIEs for which we can't determine ancestry (parent_die
3167 field) just yet. Later in dwarf2out_finish we will fill in the
3168 missing bits. */
3169 typedef struct GTY(()) limbo_die_struct {
3170 dw_die_ref die;
3171 /* The tree for which this DIE was created. We use this to
3172 determine ancestry later. */
3173 tree created_for;
3174 struct limbo_die_struct *next;
3175 }
3176 limbo_die_node;
3177
3178 typedef struct skeleton_chain_struct
3179 {
3180 dw_die_ref old_die;
3181 dw_die_ref new_die;
3182 struct skeleton_chain_struct *parent;
3183 }
3184 skeleton_chain_node;
3185
3186 /* Define a macro which returns nonzero for a TYPE_DECL which was
3187 implicitly generated for a type.
3188
3189 Note that, unlike the C front-end (which generates a NULL named
3190 TYPE_DECL node for each complete tagged type, each array type,
3191 and each function type node created) the C++ front-end generates
3192 a _named_ TYPE_DECL node for each tagged type node created.
3193 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3194 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3195 front-end, but for each type, tagged or not. */
3196
3197 #define TYPE_DECL_IS_STUB(decl) \
3198 (DECL_NAME (decl) == NULL_TREE \
3199 || (DECL_ARTIFICIAL (decl) \
3200 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3201 /* This is necessary for stub decls that \
3202 appear in nested inline functions. */ \
3203 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3204 && (decl_ultimate_origin (decl) \
3205 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3206
3207 /* Information concerning the compilation unit's programming
3208 language, and compiler version. */
3209
3210 /* Fixed size portion of the DWARF compilation unit header. */
3211 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3212 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3213 + (dwarf_version >= 5 ? 4 : 3))
3214
3215 /* Fixed size portion of the DWARF comdat type unit header. */
3216 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3217 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3218 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3219
3220 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3221 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3222 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3223
3224 /* Fixed size portion of public names info. */
3225 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3226
3227 /* Fixed size portion of the address range info. */
3228 #define DWARF_ARANGES_HEADER_SIZE \
3229 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3230 DWARF2_ADDR_SIZE * 2) \
3231 - DWARF_INITIAL_LENGTH_SIZE)
3232
3233 /* Size of padding portion in the address range info. It must be
3234 aligned to twice the pointer size. */
3235 #define DWARF_ARANGES_PAD_SIZE \
3236 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3237 DWARF2_ADDR_SIZE * 2) \
3238 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3239
3240 /* Use assembler line directives if available. */
3241 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3242 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3243 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3244 #else
3245 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3246 #endif
3247 #endif
3248
3249 /* Use assembler views in line directives if available. */
3250 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3251 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3252 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3253 #else
3254 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3255 #endif
3256 #endif
3257
3258 /* Return true if GCC configure detected assembler support for .loc. */
3259
3260 bool
3261 dwarf2out_default_as_loc_support (void)
3262 {
3263 return DWARF2_ASM_LINE_DEBUG_INFO;
3264 #if (GCC_VERSION >= 3000)
3265 # undef DWARF2_ASM_LINE_DEBUG_INFO
3266 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3267 #endif
3268 }
3269
3270 /* Return true if GCC configure detected assembler support for views
3271 in .loc directives. */
3272
3273 bool
3274 dwarf2out_default_as_locview_support (void)
3275 {
3276 return DWARF2_ASM_VIEW_DEBUG_INFO;
3277 #if (GCC_VERSION >= 3000)
3278 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3279 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3280 #endif
3281 }
3282
3283 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3284 view computation, and it refers to a view identifier for which we
3285 will not emit a label because it is known to map to a view number
3286 zero. We won't allocate the bitmap if we're not using assembler
3287 support for location views, but we have to make the variable
3288 visible for GGC and for code that will be optimized out for lack of
3289 support but that's still parsed and compiled. We could abstract it
3290 out with macros, but it's not worth it. */
3291 static GTY(()) bitmap zero_view_p;
3292
3293 /* Evaluate to TRUE iff N is known to identify the first location view
3294 at its PC. When not using assembler location view computation,
3295 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3296 and views label numbers recorded in it are the ones known to be
3297 zero. */
3298 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3299 || (N) == (var_loc_view)-1 \
3300 || (zero_view_p \
3301 && bitmap_bit_p (zero_view_p, (N))))
3302
3303 /* Return true iff we're to emit .loc directives for the assembler to
3304 generate line number sections.
3305
3306 When we're not emitting views, all we need from the assembler is
3307 support for .loc directives.
3308
3309 If we are emitting views, we can only use the assembler's .loc
3310 support if it also supports views.
3311
3312 When the compiler is emitting the line number programs and
3313 computing view numbers itself, it resets view numbers at known PC
3314 changes and counts from that, and then it emits view numbers as
3315 literal constants in locviewlists. There are cases in which the
3316 compiler is not sure about PC changes, e.g. when extra alignment is
3317 requested for a label. In these cases, the compiler may not reset
3318 the view counter, and the potential PC advance in the line number
3319 program will use an opcode that does not reset the view counter
3320 even if the PC actually changes, so that compiler and debug info
3321 consumer can keep view numbers in sync.
3322
3323 When the compiler defers view computation to the assembler, it
3324 emits symbolic view numbers in locviewlists, with the exception of
3325 views known to be zero (forced resets, or reset after
3326 compiler-visible PC changes): instead of emitting symbols for
3327 these, we emit literal zero and assert the assembler agrees with
3328 the compiler's assessment. We could use symbolic views everywhere,
3329 instead of special-casing zero views, but then we'd be unable to
3330 optimize out locviewlists that contain only zeros. */
3331
3332 static bool
3333 output_asm_line_debug_info (void)
3334 {
3335 return (dwarf2out_as_loc_support
3336 && (dwarf2out_as_locview_support
3337 || !debug_variable_location_views));
3338 }
3339
3340 /* Minimum line offset in a special line info. opcode.
3341 This value was chosen to give a reasonable range of values. */
3342 #define DWARF_LINE_BASE -10
3343
3344 /* First special line opcode - leave room for the standard opcodes. */
3345 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3346
3347 /* Range of line offsets in a special line info. opcode. */
3348 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3349
3350 /* Flag that indicates the initial value of the is_stmt_start flag.
3351 In the present implementation, we do not mark any lines as
3352 the beginning of a source statement, because that information
3353 is not made available by the GCC front-end. */
3354 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3355
3356 /* Maximum number of operations per instruction bundle. */
3357 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3358 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3359 #endif
3360
3361 /* This location is used by calc_die_sizes() to keep track
3362 the offset of each DIE within the .debug_info section. */
3363 static unsigned long next_die_offset;
3364
3365 /* Record the root of the DIE's built for the current compilation unit. */
3366 static GTY(()) dw_die_ref single_comp_unit_die;
3367
3368 /* A list of type DIEs that have been separated into comdat sections. */
3369 static GTY(()) comdat_type_node *comdat_type_list;
3370
3371 /* A list of CU DIEs that have been separated. */
3372 static GTY(()) limbo_die_node *cu_die_list;
3373
3374 /* A list of DIEs with a NULL parent waiting to be relocated. */
3375 static GTY(()) limbo_die_node *limbo_die_list;
3376
3377 /* A list of DIEs for which we may have to generate
3378 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3379 static GTY(()) limbo_die_node *deferred_asm_name;
3380
3381 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3382 {
3383 typedef const char *compare_type;
3384
3385 static hashval_t hash (dwarf_file_data *);
3386 static bool equal (dwarf_file_data *, const char *);
3387 };
3388
3389 /* Filenames referenced by this compilation unit. */
3390 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3391
3392 struct decl_die_hasher : ggc_ptr_hash<die_node>
3393 {
3394 typedef tree compare_type;
3395
3396 static hashval_t hash (die_node *);
3397 static bool equal (die_node *, tree);
3398 };
3399 /* A hash table of references to DIE's that describe declarations.
3400 The key is a DECL_UID() which is a unique number identifying each decl. */
3401 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3402
3403 struct GTY ((for_user)) variable_value_struct {
3404 unsigned int decl_id;
3405 vec<dw_die_ref, va_gc> *dies;
3406 };
3407
3408 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3409 {
3410 typedef tree compare_type;
3411
3412 static hashval_t hash (variable_value_struct *);
3413 static bool equal (variable_value_struct *, tree);
3414 };
3415 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3416 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3417 DECL_CONTEXT of the referenced VAR_DECLs. */
3418 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3419
3420 struct block_die_hasher : ggc_ptr_hash<die_struct>
3421 {
3422 static hashval_t hash (die_struct *);
3423 static bool equal (die_struct *, die_struct *);
3424 };
3425
3426 /* A hash table of references to DIE's that describe COMMON blocks.
3427 The key is DECL_UID() ^ die_parent. */
3428 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3429
3430 typedef struct GTY(()) die_arg_entry_struct {
3431 dw_die_ref die;
3432 tree arg;
3433 } die_arg_entry;
3434
3435
3436 /* Node of the variable location list. */
3437 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3438 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3439 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3440 in mode of the EXPR_LIST node and first EXPR_LIST operand
3441 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3442 location or NULL for padding. For larger bitsizes,
3443 mode is 0 and first operand is a CONCAT with bitsize
3444 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3445 NULL as second operand. */
3446 rtx GTY (()) loc;
3447 const char * GTY (()) label;
3448 struct var_loc_node * GTY (()) next;
3449 var_loc_view view;
3450 };
3451
3452 /* Variable location list. */
3453 struct GTY ((for_user)) var_loc_list_def {
3454 struct var_loc_node * GTY (()) first;
3455
3456 /* Pointer to the last but one or last element of the
3457 chained list. If the list is empty, both first and
3458 last are NULL, if the list contains just one node
3459 or the last node certainly is not redundant, it points
3460 to the last node, otherwise points to the last but one.
3461 Do not mark it for GC because it is marked through the chain. */
3462 struct var_loc_node * GTY ((skip ("%h"))) last;
3463
3464 /* Pointer to the last element before section switch,
3465 if NULL, either sections weren't switched or first
3466 is after section switch. */
3467 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3468
3469 /* DECL_UID of the variable decl. */
3470 unsigned int decl_id;
3471 };
3472 typedef struct var_loc_list_def var_loc_list;
3473
3474 /* Call argument location list. */
3475 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3476 rtx GTY (()) call_arg_loc_note;
3477 const char * GTY (()) label;
3478 tree GTY (()) block;
3479 bool tail_call_p;
3480 rtx GTY (()) symbol_ref;
3481 struct call_arg_loc_node * GTY (()) next;
3482 };
3483
3484
3485 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3486 {
3487 typedef const_tree compare_type;
3488
3489 static hashval_t hash (var_loc_list *);
3490 static bool equal (var_loc_list *, const_tree);
3491 };
3492
3493 /* Table of decl location linked lists. */
3494 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3495
3496 /* Head and tail of call_arg_loc chain. */
3497 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3498 static struct call_arg_loc_node *call_arg_loc_last;
3499
3500 /* Number of call sites in the current function. */
3501 static int call_site_count = -1;
3502 /* Number of tail call sites in the current function. */
3503 static int tail_call_site_count = -1;
3504
3505 /* A cached location list. */
3506 struct GTY ((for_user)) cached_dw_loc_list_def {
3507 /* The DECL_UID of the decl that this entry describes. */
3508 unsigned int decl_id;
3509
3510 /* The cached location list. */
3511 dw_loc_list_ref loc_list;
3512 };
3513 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3514
3515 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3516 {
3517
3518 typedef const_tree compare_type;
3519
3520 static hashval_t hash (cached_dw_loc_list *);
3521 static bool equal (cached_dw_loc_list *, const_tree);
3522 };
3523
3524 /* Table of cached location lists. */
3525 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3526
3527 /* A vector of references to DIE's that are uniquely identified by their tag,
3528 presence/absence of children DIE's, and list of attribute/value pairs. */
3529 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3530
3531 /* A hash map to remember the stack usage for DWARF procedures. The value
3532 stored is the stack size difference between before the DWARF procedure
3533 invokation and after it returned. In other words, for a DWARF procedure
3534 that consumes N stack slots and that pushes M ones, this stores M - N. */
3535 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3536
3537 /* A global counter for generating labels for line number data. */
3538 static unsigned int line_info_label_num;
3539
3540 /* The current table to which we should emit line number information
3541 for the current function. This will be set up at the beginning of
3542 assembly for the function. */
3543 static GTY(()) dw_line_info_table *cur_line_info_table;
3544
3545 /* The two default tables of line number info. */
3546 static GTY(()) dw_line_info_table *text_section_line_info;
3547 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3548
3549 /* The set of all non-default tables of line number info. */
3550 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3551
3552 /* A flag to tell pubnames/types export if there is an info section to
3553 refer to. */
3554 static bool info_section_emitted;
3555
3556 /* A pointer to the base of a table that contains a list of publicly
3557 accessible names. */
3558 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3559
3560 /* A pointer to the base of a table that contains a list of publicly
3561 accessible types. */
3562 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3563
3564 /* A pointer to the base of a table that contains a list of macro
3565 defines/undefines (and file start/end markers). */
3566 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3567
3568 /* True if .debug_macinfo or .debug_macros section is going to be
3569 emitted. */
3570 #define have_macinfo \
3571 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3572 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3573 && !macinfo_table->is_empty ())
3574
3575 /* Vector of dies for which we should generate .debug_ranges info. */
3576 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3577
3578 /* Vector of pairs of labels referenced in ranges_table. */
3579 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3580
3581 /* Whether we have location lists that need outputting */
3582 static GTY(()) bool have_location_lists;
3583
3584 /* Unique label counter. */
3585 static GTY(()) unsigned int loclabel_num;
3586
3587 /* Unique label counter for point-of-call tables. */
3588 static GTY(()) unsigned int poc_label_num;
3589
3590 /* The last file entry emitted by maybe_emit_file(). */
3591 static GTY(()) struct dwarf_file_data * last_emitted_file;
3592
3593 /* Number of internal labels generated by gen_internal_sym(). */
3594 static GTY(()) int label_num;
3595
3596 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3597
3598 /* Instances of generic types for which we need to generate debug
3599 info that describe their generic parameters and arguments. That
3600 generation needs to happen once all types are properly laid out so
3601 we do it at the end of compilation. */
3602 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3603
3604 /* Offset from the "steady-state frame pointer" to the frame base,
3605 within the current function. */
3606 static poly_int64 frame_pointer_fb_offset;
3607 static bool frame_pointer_fb_offset_valid;
3608
3609 static vec<dw_die_ref> base_types;
3610
3611 /* Flags to represent a set of attribute classes for attributes that represent
3612 a scalar value (bounds, pointers, ...). */
3613 enum dw_scalar_form
3614 {
3615 dw_scalar_form_constant = 0x01,
3616 dw_scalar_form_exprloc = 0x02,
3617 dw_scalar_form_reference = 0x04
3618 };
3619
3620 /* Forward declarations for functions defined in this file. */
3621
3622 static int is_pseudo_reg (const_rtx);
3623 static tree type_main_variant (tree);
3624 static int is_tagged_type (const_tree);
3625 static const char *dwarf_tag_name (unsigned);
3626 static const char *dwarf_attr_name (unsigned);
3627 static const char *dwarf_form_name (unsigned);
3628 static tree decl_ultimate_origin (const_tree);
3629 static tree decl_class_context (tree);
3630 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3631 static inline enum dw_val_class AT_class (dw_attr_node *);
3632 static inline unsigned int AT_index (dw_attr_node *);
3633 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3634 static inline unsigned AT_flag (dw_attr_node *);
3635 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3636 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3637 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3638 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3639 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3640 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3641 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3642 unsigned int, unsigned char *);
3643 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3644 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3645 static inline const char *AT_string (dw_attr_node *);
3646 static enum dwarf_form AT_string_form (dw_attr_node *);
3647 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3648 static void add_AT_specification (dw_die_ref, dw_die_ref);
3649 static inline dw_die_ref AT_ref (dw_attr_node *);
3650 static inline int AT_ref_external (dw_attr_node *);
3651 static inline void set_AT_ref_external (dw_attr_node *, int);
3652 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3653 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3654 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3655 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3656 dw_loc_list_ref);
3657 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3658 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3659 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3660 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3661 static void remove_addr_table_entry (addr_table_entry *);
3662 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3663 static inline rtx AT_addr (dw_attr_node *);
3664 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3665 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3666 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3667 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3668 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3669 const char *);
3670 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3671 unsigned HOST_WIDE_INT);
3672 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3673 unsigned long, bool);
3674 static inline const char *AT_lbl (dw_attr_node *);
3675 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3676 static const char *get_AT_low_pc (dw_die_ref);
3677 static const char *get_AT_hi_pc (dw_die_ref);
3678 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3679 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3680 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3681 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3682 static bool is_cxx (void);
3683 static bool is_cxx (const_tree);
3684 static bool is_fortran (void);
3685 static bool is_ada (void);
3686 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3687 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3688 static void add_child_die (dw_die_ref, dw_die_ref);
3689 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3690 static dw_die_ref lookup_type_die (tree);
3691 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3692 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3693 static void equate_type_number_to_die (tree, dw_die_ref);
3694 static dw_die_ref lookup_decl_die (tree);
3695 static var_loc_list *lookup_decl_loc (const_tree);
3696 static void equate_decl_number_to_die (tree, dw_die_ref);
3697 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3698 static void print_spaces (FILE *);
3699 static void print_die (dw_die_ref, FILE *);
3700 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3701 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3702 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3703 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3704 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3705 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3706 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3707 struct md5_ctx *, int *);
3708 struct checksum_attributes;
3709 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3710 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3711 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3712 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3713 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3714 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3715 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3716 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3717 static int is_type_die (dw_die_ref);
3718 static int is_comdat_die (dw_die_ref);
3719 static inline bool is_template_instantiation (dw_die_ref);
3720 static int is_declaration_die (dw_die_ref);
3721 static int should_move_die_to_comdat (dw_die_ref);
3722 static dw_die_ref clone_as_declaration (dw_die_ref);
3723 static dw_die_ref clone_die (dw_die_ref);
3724 static dw_die_ref clone_tree (dw_die_ref);
3725 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3726 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3727 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3728 static dw_die_ref generate_skeleton (dw_die_ref);
3729 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3730 dw_die_ref,
3731 dw_die_ref);
3732 static void break_out_comdat_types (dw_die_ref);
3733 static void copy_decls_for_unworthy_types (dw_die_ref);
3734
3735 static void add_sibling_attributes (dw_die_ref);
3736 static void output_location_lists (dw_die_ref);
3737 static int constant_size (unsigned HOST_WIDE_INT);
3738 static unsigned long size_of_die (dw_die_ref);
3739 static void calc_die_sizes (dw_die_ref);
3740 static void calc_base_type_die_sizes (void);
3741 static void mark_dies (dw_die_ref);
3742 static void unmark_dies (dw_die_ref);
3743 static void unmark_all_dies (dw_die_ref);
3744 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3745 static unsigned long size_of_aranges (void);
3746 static enum dwarf_form value_format (dw_attr_node *);
3747 static void output_value_format (dw_attr_node *);
3748 static void output_abbrev_section (void);
3749 static void output_die_abbrevs (unsigned long, dw_die_ref);
3750 static void output_die (dw_die_ref);
3751 static void output_compilation_unit_header (enum dwarf_unit_type);
3752 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3753 static void output_comdat_type_unit (comdat_type_node *);
3754 static const char *dwarf2_name (tree, int);
3755 static void add_pubname (tree, dw_die_ref);
3756 static void add_enumerator_pubname (const char *, dw_die_ref);
3757 static void add_pubname_string (const char *, dw_die_ref);
3758 static void add_pubtype (tree, dw_die_ref);
3759 static void output_pubnames (vec<pubname_entry, va_gc> *);
3760 static void output_aranges (void);
3761 static unsigned int add_ranges (const_tree, bool = false);
3762 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3763 bool *, bool);
3764 static void output_ranges (void);
3765 static dw_line_info_table *new_line_info_table (void);
3766 static void output_line_info (bool);
3767 static void output_file_names (void);
3768 static dw_die_ref base_type_die (tree, bool);
3769 static int is_base_type (tree);
3770 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3771 static int decl_quals (const_tree);
3772 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3773 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3774 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3775 static int type_is_enum (const_tree);
3776 static unsigned int dbx_reg_number (const_rtx);
3777 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3778 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3779 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3780 enum var_init_status);
3781 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3782 enum var_init_status);
3783 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3784 enum var_init_status);
3785 static int is_based_loc (const_rtx);
3786 static bool resolve_one_addr (rtx *);
3787 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3788 enum var_init_status);
3789 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3790 enum var_init_status);
3791 struct loc_descr_context;
3792 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3793 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3794 static dw_loc_list_ref loc_list_from_tree (tree, int,
3795 struct loc_descr_context *);
3796 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3797 struct loc_descr_context *);
3798 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3799 static tree field_type (const_tree);
3800 static unsigned int simple_type_align_in_bits (const_tree);
3801 static unsigned int simple_decl_align_in_bits (const_tree);
3802 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3803 struct vlr_context;
3804 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3805 HOST_WIDE_INT *);
3806 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3807 dw_loc_list_ref);
3808 static void add_data_member_location_attribute (dw_die_ref, tree,
3809 struct vlr_context *);
3810 static bool add_const_value_attribute (dw_die_ref, rtx);
3811 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3812 static void insert_wide_int (const wide_int &, unsigned char *, int);
3813 static void insert_float (const_rtx, unsigned char *);
3814 static rtx rtl_for_decl_location (tree);
3815 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3816 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3817 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3818 static void add_name_attribute (dw_die_ref, const char *);
3819 static void add_desc_attribute (dw_die_ref, tree);
3820 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3821 static void add_comp_dir_attribute (dw_die_ref);
3822 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3823 struct loc_descr_context *);
3824 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3825 struct loc_descr_context *);
3826 static void add_subscript_info (dw_die_ref, tree, bool);
3827 static void add_byte_size_attribute (dw_die_ref, tree);
3828 static void add_alignment_attribute (dw_die_ref, tree);
3829 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3830 struct vlr_context *);
3831 static void add_bit_size_attribute (dw_die_ref, tree);
3832 static void add_prototyped_attribute (dw_die_ref, tree);
3833 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3834 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3835 static void add_src_coords_attributes (dw_die_ref, tree);
3836 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3837 static void add_discr_value (dw_die_ref, dw_discr_value *);
3838 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3839 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3840 static dw_die_ref scope_die_for (tree, dw_die_ref);
3841 static inline int local_scope_p (dw_die_ref);
3842 static inline int class_scope_p (dw_die_ref);
3843 static inline int class_or_namespace_scope_p (dw_die_ref);
3844 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3845 static void add_calling_convention_attribute (dw_die_ref, tree);
3846 static const char *type_tag (const_tree);
3847 static tree member_declared_type (const_tree);
3848 #if 0
3849 static const char *decl_start_label (tree);
3850 #endif
3851 static void gen_array_type_die (tree, dw_die_ref);
3852 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3853 #if 0
3854 static void gen_entry_point_die (tree, dw_die_ref);
3855 #endif
3856 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3857 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3858 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3859 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3860 static void gen_formal_types_die (tree, dw_die_ref);
3861 static void gen_subprogram_die (tree, dw_die_ref);
3862 static void gen_variable_die (tree, tree, dw_die_ref);
3863 static void gen_const_die (tree, dw_die_ref);
3864 static void gen_label_die (tree, dw_die_ref);
3865 static void gen_lexical_block_die (tree, dw_die_ref);
3866 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3867 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3868 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3869 static dw_die_ref gen_compile_unit_die (const char *);
3870 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3871 static void gen_member_die (tree, dw_die_ref);
3872 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3873 enum debug_info_usage);
3874 static void gen_subroutine_type_die (tree, dw_die_ref);
3875 static void gen_typedef_die (tree, dw_die_ref);
3876 static void gen_type_die (tree, dw_die_ref);
3877 static void gen_block_die (tree, dw_die_ref);
3878 static void decls_for_scope (tree, dw_die_ref);
3879 static bool is_naming_typedef_decl (const_tree);
3880 static inline dw_die_ref get_context_die (tree);
3881 static void gen_namespace_die (tree, dw_die_ref);
3882 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3883 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3884 static dw_die_ref force_decl_die (tree);
3885 static dw_die_ref force_type_die (tree);
3886 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3887 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3888 static struct dwarf_file_data * lookup_filename (const char *);
3889 static void retry_incomplete_types (void);
3890 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3891 static void gen_generic_params_dies (tree);
3892 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3893 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3894 static void splice_child_die (dw_die_ref, dw_die_ref);
3895 static int file_info_cmp (const void *, const void *);
3896 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3897 const char *, var_loc_view, const char *);
3898 static void output_loc_list (dw_loc_list_ref);
3899 static char *gen_internal_sym (const char *);
3900 static bool want_pubnames (void);
3901
3902 static void prune_unmark_dies (dw_die_ref);
3903 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3904 static void prune_unused_types_mark (dw_die_ref, int);
3905 static void prune_unused_types_walk (dw_die_ref);
3906 static void prune_unused_types_walk_attribs (dw_die_ref);
3907 static void prune_unused_types_prune (dw_die_ref);
3908 static void prune_unused_types (void);
3909 static int maybe_emit_file (struct dwarf_file_data *fd);
3910 static inline const char *AT_vms_delta1 (dw_attr_node *);
3911 static inline const char *AT_vms_delta2 (dw_attr_node *);
3912 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3913 const char *, const char *);
3914 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3915 static void gen_remaining_tmpl_value_param_die_attribute (void);
3916 static bool generic_type_p (tree);
3917 static void schedule_generic_params_dies_gen (tree t);
3918 static void gen_scheduled_generic_parms_dies (void);
3919 static void resolve_variable_values (void);
3920
3921 static const char *comp_dir_string (void);
3922
3923 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3924
3925 /* enum for tracking thread-local variables whose address is really an offset
3926 relative to the TLS pointer, which will need link-time relocation, but will
3927 not need relocation by the DWARF consumer. */
3928
3929 enum dtprel_bool
3930 {
3931 dtprel_false = 0,
3932 dtprel_true = 1
3933 };
3934
3935 /* Return the operator to use for an address of a variable. For dtprel_true, we
3936 use DW_OP_const*. For regular variables, which need both link-time
3937 relocation and consumer-level relocation (e.g., to account for shared objects
3938 loaded at a random address), we use DW_OP_addr*. */
3939
3940 static inline enum dwarf_location_atom
3941 dw_addr_op (enum dtprel_bool dtprel)
3942 {
3943 if (dtprel == dtprel_true)
3944 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3945 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3946 else
3947 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3948 }
3949
3950 /* Return a pointer to a newly allocated address location description. If
3951 dwarf_split_debug_info is true, then record the address with the appropriate
3952 relocation. */
3953 static inline dw_loc_descr_ref
3954 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3955 {
3956 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3957
3958 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3959 ref->dw_loc_oprnd1.v.val_addr = addr;
3960 ref->dtprel = dtprel;
3961 if (dwarf_split_debug_info)
3962 ref->dw_loc_oprnd1.val_entry
3963 = add_addr_table_entry (addr,
3964 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3965 else
3966 ref->dw_loc_oprnd1.val_entry = NULL;
3967
3968 return ref;
3969 }
3970
3971 /* Section names used to hold DWARF debugging information. */
3972
3973 #ifndef DEBUG_INFO_SECTION
3974 #define DEBUG_INFO_SECTION ".debug_info"
3975 #endif
3976 #ifndef DEBUG_DWO_INFO_SECTION
3977 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3978 #endif
3979 #ifndef DEBUG_LTO_INFO_SECTION
3980 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3981 #endif
3982 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3983 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3984 #endif
3985 #ifndef DEBUG_ABBREV_SECTION
3986 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3987 #endif
3988 #ifndef DEBUG_LTO_ABBREV_SECTION
3989 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3990 #endif
3991 #ifndef DEBUG_DWO_ABBREV_SECTION
3992 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3993 #endif
3994 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3995 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3996 #endif
3997 #ifndef DEBUG_ARANGES_SECTION
3998 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3999 #endif
4000 #ifndef DEBUG_ADDR_SECTION
4001 #define DEBUG_ADDR_SECTION ".debug_addr"
4002 #endif
4003 #ifndef DEBUG_MACINFO_SECTION
4004 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
4005 #endif
4006 #ifndef DEBUG_LTO_MACINFO_SECTION
4007 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
4008 #endif
4009 #ifndef DEBUG_DWO_MACINFO_SECTION
4010 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4011 #endif
4012 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4013 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4014 #endif
4015 #ifndef DEBUG_MACRO_SECTION
4016 #define DEBUG_MACRO_SECTION ".debug_macro"
4017 #endif
4018 #ifndef DEBUG_LTO_MACRO_SECTION
4019 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4020 #endif
4021 #ifndef DEBUG_DWO_MACRO_SECTION
4022 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4023 #endif
4024 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4025 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4026 #endif
4027 #ifndef DEBUG_LINE_SECTION
4028 #define DEBUG_LINE_SECTION ".debug_line"
4029 #endif
4030 #ifndef DEBUG_LTO_LINE_SECTION
4031 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4032 #endif
4033 #ifndef DEBUG_DWO_LINE_SECTION
4034 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4035 #endif
4036 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4037 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4038 #endif
4039 #ifndef DEBUG_LOC_SECTION
4040 #define DEBUG_LOC_SECTION ".debug_loc"
4041 #endif
4042 #ifndef DEBUG_DWO_LOC_SECTION
4043 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4044 #endif
4045 #ifndef DEBUG_LOCLISTS_SECTION
4046 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4047 #endif
4048 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4049 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4050 #endif
4051 #ifndef DEBUG_PUBNAMES_SECTION
4052 #define DEBUG_PUBNAMES_SECTION \
4053 ((debug_generate_pub_sections == 2) \
4054 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4055 #endif
4056 #ifndef DEBUG_PUBTYPES_SECTION
4057 #define DEBUG_PUBTYPES_SECTION \
4058 ((debug_generate_pub_sections == 2) \
4059 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4060 #endif
4061 #ifndef DEBUG_STR_OFFSETS_SECTION
4062 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4063 #endif
4064 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4065 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4066 #endif
4067 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4068 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4069 #endif
4070 #ifndef DEBUG_STR_SECTION
4071 #define DEBUG_STR_SECTION ".debug_str"
4072 #endif
4073 #ifndef DEBUG_LTO_STR_SECTION
4074 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4075 #endif
4076 #ifndef DEBUG_STR_DWO_SECTION
4077 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4078 #endif
4079 #ifndef DEBUG_LTO_STR_DWO_SECTION
4080 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4081 #endif
4082 #ifndef DEBUG_RANGES_SECTION
4083 #define DEBUG_RANGES_SECTION ".debug_ranges"
4084 #endif
4085 #ifndef DEBUG_RNGLISTS_SECTION
4086 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4087 #endif
4088 #ifndef DEBUG_LINE_STR_SECTION
4089 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4090 #endif
4091 #ifndef DEBUG_LTO_LINE_STR_SECTION
4092 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4093 #endif
4094
4095 /* Standard ELF section names for compiled code and data. */
4096 #ifndef TEXT_SECTION_NAME
4097 #define TEXT_SECTION_NAME ".text"
4098 #endif
4099
4100 /* Section flags for .debug_str section. */
4101 #define DEBUG_STR_SECTION_FLAGS \
4102 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4103 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4104 : SECTION_DEBUG)
4105
4106 /* Section flags for .debug_str.dwo section. */
4107 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4108
4109 /* Attribute used to refer to the macro section. */
4110 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4111 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4112
4113 /* Labels we insert at beginning sections we can reference instead of
4114 the section names themselves. */
4115
4116 #ifndef TEXT_SECTION_LABEL
4117 #define TEXT_SECTION_LABEL "Ltext"
4118 #endif
4119 #ifndef COLD_TEXT_SECTION_LABEL
4120 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4121 #endif
4122 #ifndef DEBUG_LINE_SECTION_LABEL
4123 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4124 #endif
4125 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4126 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4127 #endif
4128 #ifndef DEBUG_INFO_SECTION_LABEL
4129 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4130 #endif
4131 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4132 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4133 #endif
4134 #ifndef DEBUG_ABBREV_SECTION_LABEL
4135 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4136 #endif
4137 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4138 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4139 #endif
4140 #ifndef DEBUG_ADDR_SECTION_LABEL
4141 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4142 #endif
4143 #ifndef DEBUG_LOC_SECTION_LABEL
4144 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4145 #endif
4146 #ifndef DEBUG_RANGES_SECTION_LABEL
4147 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4148 #endif
4149 #ifndef DEBUG_MACINFO_SECTION_LABEL
4150 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4151 #endif
4152 #ifndef DEBUG_MACRO_SECTION_LABEL
4153 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4154 #endif
4155 #define SKELETON_COMP_DIE_ABBREV 1
4156 #define SKELETON_TYPE_DIE_ABBREV 2
4157
4158 /* Definitions of defaults for formats and names of various special
4159 (artificial) labels which may be generated within this file (when the -g
4160 options is used and DWARF2_DEBUGGING_INFO is in effect.
4161 If necessary, these may be overridden from within the tm.h file, but
4162 typically, overriding these defaults is unnecessary. */
4163
4164 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4169 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4170 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4171 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4172 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4173 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4174 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4175 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4176 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4177 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4178 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4179
4180 #ifndef TEXT_END_LABEL
4181 #define TEXT_END_LABEL "Letext"
4182 #endif
4183 #ifndef COLD_END_LABEL
4184 #define COLD_END_LABEL "Letext_cold"
4185 #endif
4186 #ifndef BLOCK_BEGIN_LABEL
4187 #define BLOCK_BEGIN_LABEL "LBB"
4188 #endif
4189 #ifndef BLOCK_INLINE_ENTRY_LABEL
4190 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4191 #endif
4192 #ifndef BLOCK_END_LABEL
4193 #define BLOCK_END_LABEL "LBE"
4194 #endif
4195 #ifndef LINE_CODE_LABEL
4196 #define LINE_CODE_LABEL "LM"
4197 #endif
4198
4199 \f
4200 /* Return the root of the DIE's built for the current compilation unit. */
4201 static dw_die_ref
4202 comp_unit_die (void)
4203 {
4204 if (!single_comp_unit_die)
4205 single_comp_unit_die = gen_compile_unit_die (NULL);
4206 return single_comp_unit_die;
4207 }
4208
4209 /* We allow a language front-end to designate a function that is to be
4210 called to "demangle" any name before it is put into a DIE. */
4211
4212 static const char *(*demangle_name_func) (const char *);
4213
4214 void
4215 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4216 {
4217 demangle_name_func = func;
4218 }
4219
4220 /* Test if rtl node points to a pseudo register. */
4221
4222 static inline int
4223 is_pseudo_reg (const_rtx rtl)
4224 {
4225 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4226 || (GET_CODE (rtl) == SUBREG
4227 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4228 }
4229
4230 /* Return a reference to a type, with its const and volatile qualifiers
4231 removed. */
4232
4233 static inline tree
4234 type_main_variant (tree type)
4235 {
4236 type = TYPE_MAIN_VARIANT (type);
4237
4238 /* ??? There really should be only one main variant among any group of
4239 variants of a given type (and all of the MAIN_VARIANT values for all
4240 members of the group should point to that one type) but sometimes the C
4241 front-end messes this up for array types, so we work around that bug
4242 here. */
4243 if (TREE_CODE (type) == ARRAY_TYPE)
4244 while (type != TYPE_MAIN_VARIANT (type))
4245 type = TYPE_MAIN_VARIANT (type);
4246
4247 return type;
4248 }
4249
4250 /* Return nonzero if the given type node represents a tagged type. */
4251
4252 static inline int
4253 is_tagged_type (const_tree type)
4254 {
4255 enum tree_code code = TREE_CODE (type);
4256
4257 return (code == RECORD_TYPE || code == UNION_TYPE
4258 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4259 }
4260
4261 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4262
4263 static void
4264 get_ref_die_offset_label (char *label, dw_die_ref ref)
4265 {
4266 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4267 }
4268
4269 /* Return die_offset of a DIE reference to a base type. */
4270
4271 static unsigned long int
4272 get_base_type_offset (dw_die_ref ref)
4273 {
4274 if (ref->die_offset)
4275 return ref->die_offset;
4276 if (comp_unit_die ()->die_abbrev)
4277 {
4278 calc_base_type_die_sizes ();
4279 gcc_assert (ref->die_offset);
4280 }
4281 return ref->die_offset;
4282 }
4283
4284 /* Return die_offset of a DIE reference other than base type. */
4285
4286 static unsigned long int
4287 get_ref_die_offset (dw_die_ref ref)
4288 {
4289 gcc_assert (ref->die_offset);
4290 return ref->die_offset;
4291 }
4292
4293 /* Convert a DIE tag into its string name. */
4294
4295 static const char *
4296 dwarf_tag_name (unsigned int tag)
4297 {
4298 const char *name = get_DW_TAG_name (tag);
4299
4300 if (name != NULL)
4301 return name;
4302
4303 return "DW_TAG_<unknown>";
4304 }
4305
4306 /* Convert a DWARF attribute code into its string name. */
4307
4308 static const char *
4309 dwarf_attr_name (unsigned int attr)
4310 {
4311 const char *name;
4312
4313 switch (attr)
4314 {
4315 #if VMS_DEBUGGING_INFO
4316 case DW_AT_HP_prologue:
4317 return "DW_AT_HP_prologue";
4318 #else
4319 case DW_AT_MIPS_loop_unroll_factor:
4320 return "DW_AT_MIPS_loop_unroll_factor";
4321 #endif
4322
4323 #if VMS_DEBUGGING_INFO
4324 case DW_AT_HP_epilogue:
4325 return "DW_AT_HP_epilogue";
4326 #else
4327 case DW_AT_MIPS_stride:
4328 return "DW_AT_MIPS_stride";
4329 #endif
4330 }
4331
4332 name = get_DW_AT_name (attr);
4333
4334 if (name != NULL)
4335 return name;
4336
4337 return "DW_AT_<unknown>";
4338 }
4339
4340 /* Convert a DWARF value form code into its string name. */
4341
4342 static const char *
4343 dwarf_form_name (unsigned int form)
4344 {
4345 const char *name = get_DW_FORM_name (form);
4346
4347 if (name != NULL)
4348 return name;
4349
4350 return "DW_FORM_<unknown>";
4351 }
4352 \f
4353 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4354 instance of an inlined instance of a decl which is local to an inline
4355 function, so we have to trace all of the way back through the origin chain
4356 to find out what sort of node actually served as the original seed for the
4357 given block. */
4358
4359 static tree
4360 decl_ultimate_origin (const_tree decl)
4361 {
4362 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4363 return NULL_TREE;
4364
4365 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4366 we're trying to output the abstract instance of this function. */
4367 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4368 return NULL_TREE;
4369
4370 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4371 most distant ancestor, this should never happen. */
4372 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4373
4374 return DECL_ABSTRACT_ORIGIN (decl);
4375 }
4376
4377 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4378 of a virtual function may refer to a base class, so we check the 'this'
4379 parameter. */
4380
4381 static tree
4382 decl_class_context (tree decl)
4383 {
4384 tree context = NULL_TREE;
4385
4386 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4387 context = DECL_CONTEXT (decl);
4388 else
4389 context = TYPE_MAIN_VARIANT
4390 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4391
4392 if (context && !TYPE_P (context))
4393 context = NULL_TREE;
4394
4395 return context;
4396 }
4397 \f
4398 /* Add an attribute/value pair to a DIE. */
4399
4400 static inline void
4401 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4402 {
4403 /* Maybe this should be an assert? */
4404 if (die == NULL)
4405 return;
4406
4407 if (flag_checking)
4408 {
4409 /* Check we do not add duplicate attrs. Can't use get_AT here
4410 because that recurses to the specification/abstract origin DIE. */
4411 dw_attr_node *a;
4412 unsigned ix;
4413 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4414 gcc_assert (a->dw_attr != attr->dw_attr);
4415 }
4416
4417 vec_safe_reserve (die->die_attr, 1);
4418 vec_safe_push (die->die_attr, *attr);
4419 }
4420
4421 static inline enum dw_val_class
4422 AT_class (dw_attr_node *a)
4423 {
4424 return a->dw_attr_val.val_class;
4425 }
4426
4427 /* Return the index for any attribute that will be referenced with a
4428 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4429 indices are stored in dw_attr_val.v.val_str for reference counting
4430 pruning. */
4431
4432 static inline unsigned int
4433 AT_index (dw_attr_node *a)
4434 {
4435 if (AT_class (a) == dw_val_class_str)
4436 return a->dw_attr_val.v.val_str->index;
4437 else if (a->dw_attr_val.val_entry != NULL)
4438 return a->dw_attr_val.val_entry->index;
4439 return NOT_INDEXED;
4440 }
4441
4442 /* Add a flag value attribute to a DIE. */
4443
4444 static inline void
4445 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4446 {
4447 dw_attr_node attr;
4448
4449 attr.dw_attr = attr_kind;
4450 attr.dw_attr_val.val_class = dw_val_class_flag;
4451 attr.dw_attr_val.val_entry = NULL;
4452 attr.dw_attr_val.v.val_flag = flag;
4453 add_dwarf_attr (die, &attr);
4454 }
4455
4456 static inline unsigned
4457 AT_flag (dw_attr_node *a)
4458 {
4459 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4460 return a->dw_attr_val.v.val_flag;
4461 }
4462
4463 /* Add a signed integer attribute value to a DIE. */
4464
4465 static inline void
4466 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4467 {
4468 dw_attr_node attr;
4469
4470 attr.dw_attr = attr_kind;
4471 attr.dw_attr_val.val_class = dw_val_class_const;
4472 attr.dw_attr_val.val_entry = NULL;
4473 attr.dw_attr_val.v.val_int = int_val;
4474 add_dwarf_attr (die, &attr);
4475 }
4476
4477 static inline HOST_WIDE_INT
4478 AT_int (dw_attr_node *a)
4479 {
4480 gcc_assert (a && (AT_class (a) == dw_val_class_const
4481 || AT_class (a) == dw_val_class_const_implicit));
4482 return a->dw_attr_val.v.val_int;
4483 }
4484
4485 /* Add an unsigned integer attribute value to a DIE. */
4486
4487 static inline void
4488 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4489 unsigned HOST_WIDE_INT unsigned_val)
4490 {
4491 dw_attr_node attr;
4492
4493 attr.dw_attr = attr_kind;
4494 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4495 attr.dw_attr_val.val_entry = NULL;
4496 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4497 add_dwarf_attr (die, &attr);
4498 }
4499
4500 static inline unsigned HOST_WIDE_INT
4501 AT_unsigned (dw_attr_node *a)
4502 {
4503 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4504 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4505 return a->dw_attr_val.v.val_unsigned;
4506 }
4507
4508 /* Add an unsigned wide integer attribute value to a DIE. */
4509
4510 static inline void
4511 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4512 const wide_int& w)
4513 {
4514 dw_attr_node attr;
4515
4516 attr.dw_attr = attr_kind;
4517 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4518 attr.dw_attr_val.val_entry = NULL;
4519 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4520 *attr.dw_attr_val.v.val_wide = w;
4521 add_dwarf_attr (die, &attr);
4522 }
4523
4524 /* Add an unsigned double integer attribute value to a DIE. */
4525
4526 static inline void
4527 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4528 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4529 {
4530 dw_attr_node attr;
4531
4532 attr.dw_attr = attr_kind;
4533 attr.dw_attr_val.val_class = dw_val_class_const_double;
4534 attr.dw_attr_val.val_entry = NULL;
4535 attr.dw_attr_val.v.val_double.high = high;
4536 attr.dw_attr_val.v.val_double.low = low;
4537 add_dwarf_attr (die, &attr);
4538 }
4539
4540 /* Add a floating point attribute value to a DIE and return it. */
4541
4542 static inline void
4543 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4544 unsigned int length, unsigned int elt_size, unsigned char *array)
4545 {
4546 dw_attr_node attr;
4547
4548 attr.dw_attr = attr_kind;
4549 attr.dw_attr_val.val_class = dw_val_class_vec;
4550 attr.dw_attr_val.val_entry = NULL;
4551 attr.dw_attr_val.v.val_vec.length = length;
4552 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4553 attr.dw_attr_val.v.val_vec.array = array;
4554 add_dwarf_attr (die, &attr);
4555 }
4556
4557 /* Add an 8-byte data attribute value to a DIE. */
4558
4559 static inline void
4560 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4561 unsigned char data8[8])
4562 {
4563 dw_attr_node attr;
4564
4565 attr.dw_attr = attr_kind;
4566 attr.dw_attr_val.val_class = dw_val_class_data8;
4567 attr.dw_attr_val.val_entry = NULL;
4568 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4569 add_dwarf_attr (die, &attr);
4570 }
4571
4572 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4573 dwarf_split_debug_info, address attributes in dies destined for the
4574 final executable have force_direct set to avoid using indexed
4575 references. */
4576
4577 static inline void
4578 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4579 bool force_direct)
4580 {
4581 dw_attr_node attr;
4582 char * lbl_id;
4583
4584 lbl_id = xstrdup (lbl_low);
4585 attr.dw_attr = DW_AT_low_pc;
4586 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4587 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4588 if (dwarf_split_debug_info && !force_direct)
4589 attr.dw_attr_val.val_entry
4590 = add_addr_table_entry (lbl_id, ate_kind_label);
4591 else
4592 attr.dw_attr_val.val_entry = NULL;
4593 add_dwarf_attr (die, &attr);
4594
4595 attr.dw_attr = DW_AT_high_pc;
4596 if (dwarf_version < 4)
4597 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4598 else
4599 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4600 lbl_id = xstrdup (lbl_high);
4601 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4602 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4603 && dwarf_split_debug_info && !force_direct)
4604 attr.dw_attr_val.val_entry
4605 = add_addr_table_entry (lbl_id, ate_kind_label);
4606 else
4607 attr.dw_attr_val.val_entry = NULL;
4608 add_dwarf_attr (die, &attr);
4609 }
4610
4611 /* Hash and equality functions for debug_str_hash. */
4612
4613 hashval_t
4614 indirect_string_hasher::hash (indirect_string_node *x)
4615 {
4616 return htab_hash_string (x->str);
4617 }
4618
4619 bool
4620 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4621 {
4622 return strcmp (x1->str, x2) == 0;
4623 }
4624
4625 /* Add STR to the given string hash table. */
4626
4627 static struct indirect_string_node *
4628 find_AT_string_in_table (const char *str,
4629 hash_table<indirect_string_hasher> *table)
4630 {
4631 struct indirect_string_node *node;
4632
4633 indirect_string_node **slot
4634 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4635 if (*slot == NULL)
4636 {
4637 node = ggc_cleared_alloc<indirect_string_node> ();
4638 node->str = ggc_strdup (str);
4639 *slot = node;
4640 }
4641 else
4642 node = *slot;
4643
4644 node->refcount++;
4645 return node;
4646 }
4647
4648 /* Add STR to the indirect string hash table. */
4649
4650 static struct indirect_string_node *
4651 find_AT_string (const char *str)
4652 {
4653 if (! debug_str_hash)
4654 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4655
4656 return find_AT_string_in_table (str, debug_str_hash);
4657 }
4658
4659 /* Add a string attribute value to a DIE. */
4660
4661 static inline void
4662 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4663 {
4664 dw_attr_node attr;
4665 struct indirect_string_node *node;
4666
4667 node = find_AT_string (str);
4668
4669 attr.dw_attr = attr_kind;
4670 attr.dw_attr_val.val_class = dw_val_class_str;
4671 attr.dw_attr_val.val_entry = NULL;
4672 attr.dw_attr_val.v.val_str = node;
4673 add_dwarf_attr (die, &attr);
4674 }
4675
4676 static inline const char *
4677 AT_string (dw_attr_node *a)
4678 {
4679 gcc_assert (a && AT_class (a) == dw_val_class_str);
4680 return a->dw_attr_val.v.val_str->str;
4681 }
4682
4683 /* Call this function directly to bypass AT_string_form's logic to put
4684 the string inline in the die. */
4685
4686 static void
4687 set_indirect_string (struct indirect_string_node *node)
4688 {
4689 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4690 /* Already indirect is a no op. */
4691 if (node->form == DW_FORM_strp
4692 || node->form == DW_FORM_line_strp
4693 || node->form == dwarf_FORM (DW_FORM_strx))
4694 {
4695 gcc_assert (node->label);
4696 return;
4697 }
4698 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4699 ++dw2_string_counter;
4700 node->label = xstrdup (label);
4701
4702 if (!dwarf_split_debug_info)
4703 {
4704 node->form = DW_FORM_strp;
4705 node->index = NOT_INDEXED;
4706 }
4707 else
4708 {
4709 node->form = dwarf_FORM (DW_FORM_strx);
4710 node->index = NO_INDEX_ASSIGNED;
4711 }
4712 }
4713
4714 /* A helper function for dwarf2out_finish, called to reset indirect
4715 string decisions done for early LTO dwarf output before fat object
4716 dwarf output. */
4717
4718 int
4719 reset_indirect_string (indirect_string_node **h, void *)
4720 {
4721 struct indirect_string_node *node = *h;
4722 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4723 {
4724 free (node->label);
4725 node->label = NULL;
4726 node->form = (dwarf_form) 0;
4727 node->index = 0;
4728 }
4729 return 1;
4730 }
4731
4732 /* Find out whether a string should be output inline in DIE
4733 or out-of-line in .debug_str section. */
4734
4735 static enum dwarf_form
4736 find_string_form (struct indirect_string_node *node)
4737 {
4738 unsigned int len;
4739
4740 if (node->form)
4741 return node->form;
4742
4743 len = strlen (node->str) + 1;
4744
4745 /* If the string is shorter or equal to the size of the reference, it is
4746 always better to put it inline. */
4747 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4748 return node->form = DW_FORM_string;
4749
4750 /* If we cannot expect the linker to merge strings in .debug_str
4751 section, only put it into .debug_str if it is worth even in this
4752 single module. */
4753 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4754 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4755 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4756 return node->form = DW_FORM_string;
4757
4758 set_indirect_string (node);
4759
4760 return node->form;
4761 }
4762
4763 /* Find out whether the string referenced from the attribute should be
4764 output inline in DIE or out-of-line in .debug_str section. */
4765
4766 static enum dwarf_form
4767 AT_string_form (dw_attr_node *a)
4768 {
4769 gcc_assert (a && AT_class (a) == dw_val_class_str);
4770 return find_string_form (a->dw_attr_val.v.val_str);
4771 }
4772
4773 /* Add a DIE reference attribute value to a DIE. */
4774
4775 static inline void
4776 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4777 {
4778 dw_attr_node attr;
4779 gcc_checking_assert (targ_die != NULL);
4780
4781 /* With LTO we can end up trying to reference something we didn't create
4782 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4783 if (targ_die == NULL)
4784 return;
4785
4786 attr.dw_attr = attr_kind;
4787 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4788 attr.dw_attr_val.val_entry = NULL;
4789 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4790 attr.dw_attr_val.v.val_die_ref.external = 0;
4791 add_dwarf_attr (die, &attr);
4792 }
4793
4794 /* Change DIE reference REF to point to NEW_DIE instead. */
4795
4796 static inline void
4797 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4798 {
4799 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4800 ref->dw_attr_val.v.val_die_ref.die = new_die;
4801 ref->dw_attr_val.v.val_die_ref.external = 0;
4802 }
4803
4804 /* Add an AT_specification attribute to a DIE, and also make the back
4805 pointer from the specification to the definition. */
4806
4807 static inline void
4808 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4809 {
4810 add_AT_die_ref (die, DW_AT_specification, targ_die);
4811 gcc_assert (!targ_die->die_definition);
4812 targ_die->die_definition = die;
4813 }
4814
4815 static inline dw_die_ref
4816 AT_ref (dw_attr_node *a)
4817 {
4818 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4819 return a->dw_attr_val.v.val_die_ref.die;
4820 }
4821
4822 static inline int
4823 AT_ref_external (dw_attr_node *a)
4824 {
4825 if (a && AT_class (a) == dw_val_class_die_ref)
4826 return a->dw_attr_val.v.val_die_ref.external;
4827
4828 return 0;
4829 }
4830
4831 static inline void
4832 set_AT_ref_external (dw_attr_node *a, int i)
4833 {
4834 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4835 a->dw_attr_val.v.val_die_ref.external = i;
4836 }
4837
4838 /* Add an FDE reference attribute value to a DIE. */
4839
4840 static inline void
4841 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4842 {
4843 dw_attr_node attr;
4844
4845 attr.dw_attr = attr_kind;
4846 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4847 attr.dw_attr_val.val_entry = NULL;
4848 attr.dw_attr_val.v.val_fde_index = targ_fde;
4849 add_dwarf_attr (die, &attr);
4850 }
4851
4852 /* Add a location description attribute value to a DIE. */
4853
4854 static inline void
4855 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4856 {
4857 dw_attr_node attr;
4858
4859 attr.dw_attr = attr_kind;
4860 attr.dw_attr_val.val_class = dw_val_class_loc;
4861 attr.dw_attr_val.val_entry = NULL;
4862 attr.dw_attr_val.v.val_loc = loc;
4863 add_dwarf_attr (die, &attr);
4864 }
4865
4866 static inline dw_loc_descr_ref
4867 AT_loc (dw_attr_node *a)
4868 {
4869 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4870 return a->dw_attr_val.v.val_loc;
4871 }
4872
4873 static inline void
4874 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4875 {
4876 dw_attr_node attr;
4877
4878 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4879 return;
4880
4881 attr.dw_attr = attr_kind;
4882 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4883 attr.dw_attr_val.val_entry = NULL;
4884 attr.dw_attr_val.v.val_loc_list = loc_list;
4885 add_dwarf_attr (die, &attr);
4886 have_location_lists = true;
4887 }
4888
4889 static inline dw_loc_list_ref
4890 AT_loc_list (dw_attr_node *a)
4891 {
4892 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4893 return a->dw_attr_val.v.val_loc_list;
4894 }
4895
4896 /* Add a view list attribute to DIE. It must have a DW_AT_location
4897 attribute, because the view list complements the location list. */
4898
4899 static inline void
4900 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4901 {
4902 dw_attr_node attr;
4903
4904 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4905 return;
4906
4907 attr.dw_attr = attr_kind;
4908 attr.dw_attr_val.val_class = dw_val_class_view_list;
4909 attr.dw_attr_val.val_entry = NULL;
4910 attr.dw_attr_val.v.val_view_list = die;
4911 add_dwarf_attr (die, &attr);
4912 gcc_checking_assert (get_AT (die, DW_AT_location));
4913 gcc_assert (have_location_lists);
4914 }
4915
4916 /* Return a pointer to the location list referenced by the attribute.
4917 If the named attribute is a view list, look up the corresponding
4918 DW_AT_location attribute and return its location list. */
4919
4920 static inline dw_loc_list_ref *
4921 AT_loc_list_ptr (dw_attr_node *a)
4922 {
4923 gcc_assert (a);
4924 switch (AT_class (a))
4925 {
4926 case dw_val_class_loc_list:
4927 return &a->dw_attr_val.v.val_loc_list;
4928 case dw_val_class_view_list:
4929 {
4930 dw_attr_node *l;
4931 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4932 if (!l)
4933 return NULL;
4934 gcc_checking_assert (l + 1 == a);
4935 return AT_loc_list_ptr (l);
4936 }
4937 default:
4938 gcc_unreachable ();
4939 }
4940 }
4941
4942 /* Return the location attribute value associated with a view list
4943 attribute value. */
4944
4945 static inline dw_val_node *
4946 view_list_to_loc_list_val_node (dw_val_node *val)
4947 {
4948 gcc_assert (val->val_class == dw_val_class_view_list);
4949 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4950 if (!loc)
4951 return NULL;
4952 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4953 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4954 return &loc->dw_attr_val;
4955 }
4956
4957 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4958 {
4959 static hashval_t hash (addr_table_entry *);
4960 static bool equal (addr_table_entry *, addr_table_entry *);
4961 };
4962
4963 /* Table of entries into the .debug_addr section. */
4964
4965 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4966
4967 /* Hash an address_table_entry. */
4968
4969 hashval_t
4970 addr_hasher::hash (addr_table_entry *a)
4971 {
4972 inchash::hash hstate;
4973 switch (a->kind)
4974 {
4975 case ate_kind_rtx:
4976 hstate.add_int (0);
4977 break;
4978 case ate_kind_rtx_dtprel:
4979 hstate.add_int (1);
4980 break;
4981 case ate_kind_label:
4982 return htab_hash_string (a->addr.label);
4983 default:
4984 gcc_unreachable ();
4985 }
4986 inchash::add_rtx (a->addr.rtl, hstate);
4987 return hstate.end ();
4988 }
4989
4990 /* Determine equality for two address_table_entries. */
4991
4992 bool
4993 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4994 {
4995 if (a1->kind != a2->kind)
4996 return 0;
4997 switch (a1->kind)
4998 {
4999 case ate_kind_rtx:
5000 case ate_kind_rtx_dtprel:
5001 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
5002 case ate_kind_label:
5003 return strcmp (a1->addr.label, a2->addr.label) == 0;
5004 default:
5005 gcc_unreachable ();
5006 }
5007 }
5008
5009 /* Initialize an addr_table_entry. */
5010
5011 void
5012 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
5013 {
5014 e->kind = kind;
5015 switch (kind)
5016 {
5017 case ate_kind_rtx:
5018 case ate_kind_rtx_dtprel:
5019 e->addr.rtl = (rtx) addr;
5020 break;
5021 case ate_kind_label:
5022 e->addr.label = (char *) addr;
5023 break;
5024 }
5025 e->refcount = 0;
5026 e->index = NO_INDEX_ASSIGNED;
5027 }
5028
5029 /* Add attr to the address table entry to the table. Defer setting an
5030 index until output time. */
5031
5032 static addr_table_entry *
5033 add_addr_table_entry (void *addr, enum ate_kind kind)
5034 {
5035 addr_table_entry *node;
5036 addr_table_entry finder;
5037
5038 gcc_assert (dwarf_split_debug_info);
5039 if (! addr_index_table)
5040 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5041 init_addr_table_entry (&finder, kind, addr);
5042 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5043
5044 if (*slot == HTAB_EMPTY_ENTRY)
5045 {
5046 node = ggc_cleared_alloc<addr_table_entry> ();
5047 init_addr_table_entry (node, kind, addr);
5048 *slot = node;
5049 }
5050 else
5051 node = *slot;
5052
5053 node->refcount++;
5054 return node;
5055 }
5056
5057 /* Remove an entry from the addr table by decrementing its refcount.
5058 Strictly, decrementing the refcount would be enough, but the
5059 assertion that the entry is actually in the table has found
5060 bugs. */
5061
5062 static void
5063 remove_addr_table_entry (addr_table_entry *entry)
5064 {
5065 gcc_assert (dwarf_split_debug_info && addr_index_table);
5066 /* After an index is assigned, the table is frozen. */
5067 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5068 entry->refcount--;
5069 }
5070
5071 /* Given a location list, remove all addresses it refers to from the
5072 address_table. */
5073
5074 static void
5075 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5076 {
5077 for (; descr; descr = descr->dw_loc_next)
5078 if (descr->dw_loc_oprnd1.val_entry != NULL)
5079 {
5080 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5081 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5082 }
5083 }
5084
5085 /* A helper function for dwarf2out_finish called through
5086 htab_traverse. Assign an addr_table_entry its index. All entries
5087 must be collected into the table when this function is called,
5088 because the indexing code relies on htab_traverse to traverse nodes
5089 in the same order for each run. */
5090
5091 int
5092 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5093 {
5094 addr_table_entry *node = *h;
5095
5096 /* Don't index unreferenced nodes. */
5097 if (node->refcount == 0)
5098 return 1;
5099
5100 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5101 node->index = *index;
5102 *index += 1;
5103
5104 return 1;
5105 }
5106
5107 /* Add an address constant attribute value to a DIE. When using
5108 dwarf_split_debug_info, address attributes in dies destined for the
5109 final executable should be direct references--setting the parameter
5110 force_direct ensures this behavior. */
5111
5112 static inline void
5113 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5114 bool force_direct)
5115 {
5116 dw_attr_node attr;
5117
5118 attr.dw_attr = attr_kind;
5119 attr.dw_attr_val.val_class = dw_val_class_addr;
5120 attr.dw_attr_val.v.val_addr = addr;
5121 if (dwarf_split_debug_info && !force_direct)
5122 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5123 else
5124 attr.dw_attr_val.val_entry = NULL;
5125 add_dwarf_attr (die, &attr);
5126 }
5127
5128 /* Get the RTX from to an address DIE attribute. */
5129
5130 static inline rtx
5131 AT_addr (dw_attr_node *a)
5132 {
5133 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5134 return a->dw_attr_val.v.val_addr;
5135 }
5136
5137 /* Add a file attribute value to a DIE. */
5138
5139 static inline void
5140 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5141 struct dwarf_file_data *fd)
5142 {
5143 dw_attr_node attr;
5144
5145 attr.dw_attr = attr_kind;
5146 attr.dw_attr_val.val_class = dw_val_class_file;
5147 attr.dw_attr_val.val_entry = NULL;
5148 attr.dw_attr_val.v.val_file = fd;
5149 add_dwarf_attr (die, &attr);
5150 }
5151
5152 /* Get the dwarf_file_data from a file DIE attribute. */
5153
5154 static inline struct dwarf_file_data *
5155 AT_file (dw_attr_node *a)
5156 {
5157 gcc_assert (a && (AT_class (a) == dw_val_class_file
5158 || AT_class (a) == dw_val_class_file_implicit));
5159 return a->dw_attr_val.v.val_file;
5160 }
5161
5162 /* Add a vms delta attribute value to a DIE. */
5163
5164 static inline void
5165 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5166 const char *lbl1, const char *lbl2)
5167 {
5168 dw_attr_node attr;
5169
5170 attr.dw_attr = attr_kind;
5171 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5172 attr.dw_attr_val.val_entry = NULL;
5173 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5174 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5175 add_dwarf_attr (die, &attr);
5176 }
5177
5178 /* Add a symbolic view identifier attribute value to a DIE. */
5179
5180 static inline void
5181 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5182 const char *view_label)
5183 {
5184 dw_attr_node attr;
5185
5186 attr.dw_attr = attr_kind;
5187 attr.dw_attr_val.val_class = dw_val_class_symview;
5188 attr.dw_attr_val.val_entry = NULL;
5189 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5190 add_dwarf_attr (die, &attr);
5191 }
5192
5193 /* Add a label identifier attribute value to a DIE. */
5194
5195 static inline void
5196 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5197 const char *lbl_id)
5198 {
5199 dw_attr_node attr;
5200
5201 attr.dw_attr = attr_kind;
5202 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5203 attr.dw_attr_val.val_entry = NULL;
5204 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5205 if (dwarf_split_debug_info)
5206 attr.dw_attr_val.val_entry
5207 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5208 ate_kind_label);
5209 add_dwarf_attr (die, &attr);
5210 }
5211
5212 /* Add a section offset attribute value to a DIE, an offset into the
5213 debug_line section. */
5214
5215 static inline void
5216 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5217 const char *label)
5218 {
5219 dw_attr_node attr;
5220
5221 attr.dw_attr = attr_kind;
5222 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5223 attr.dw_attr_val.val_entry = NULL;
5224 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5225 add_dwarf_attr (die, &attr);
5226 }
5227
5228 /* Add a section offset attribute value to a DIE, an offset into the
5229 debug_loclists section. */
5230
5231 static inline void
5232 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5233 const char *label)
5234 {
5235 dw_attr_node attr;
5236
5237 attr.dw_attr = attr_kind;
5238 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
5239 attr.dw_attr_val.val_entry = NULL;
5240 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5241 add_dwarf_attr (die, &attr);
5242 }
5243
5244 /* Add a section offset attribute value to a DIE, an offset into the
5245 debug_macinfo section. */
5246
5247 static inline void
5248 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5249 const char *label)
5250 {
5251 dw_attr_node attr;
5252
5253 attr.dw_attr = attr_kind;
5254 attr.dw_attr_val.val_class = dw_val_class_macptr;
5255 attr.dw_attr_val.val_entry = NULL;
5256 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5257 add_dwarf_attr (die, &attr);
5258 }
5259
5260 /* Add an offset attribute value to a DIE. */
5261
5262 static inline void
5263 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
5264 unsigned HOST_WIDE_INT offset)
5265 {
5266 dw_attr_node attr;
5267
5268 attr.dw_attr = attr_kind;
5269 attr.dw_attr_val.val_class = dw_val_class_offset;
5270 attr.dw_attr_val.val_entry = NULL;
5271 attr.dw_attr_val.v.val_offset = offset;
5272 add_dwarf_attr (die, &attr);
5273 }
5274
5275 /* Add a range_list attribute value to a DIE. When using
5276 dwarf_split_debug_info, address attributes in dies destined for the
5277 final executable should be direct references--setting the parameter
5278 force_direct ensures this behavior. */
5279
5280 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5281 #define RELOCATED_OFFSET (NULL)
5282
5283 static void
5284 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5285 long unsigned int offset, bool force_direct)
5286 {
5287 dw_attr_node attr;
5288
5289 attr.dw_attr = attr_kind;
5290 attr.dw_attr_val.val_class = dw_val_class_range_list;
5291 /* For the range_list attribute, use val_entry to store whether the
5292 offset should follow split-debug-info or normal semantics. This
5293 value is read in output_range_list_offset. */
5294 if (dwarf_split_debug_info && !force_direct)
5295 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5296 else
5297 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5298 attr.dw_attr_val.v.val_offset = offset;
5299 add_dwarf_attr (die, &attr);
5300 }
5301
5302 /* Return the start label of a delta attribute. */
5303
5304 static inline const char *
5305 AT_vms_delta1 (dw_attr_node *a)
5306 {
5307 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5308 return a->dw_attr_val.v.val_vms_delta.lbl1;
5309 }
5310
5311 /* Return the end label of a delta attribute. */
5312
5313 static inline const char *
5314 AT_vms_delta2 (dw_attr_node *a)
5315 {
5316 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5317 return a->dw_attr_val.v.val_vms_delta.lbl2;
5318 }
5319
5320 static inline const char *
5321 AT_lbl (dw_attr_node *a)
5322 {
5323 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5324 || AT_class (a) == dw_val_class_lineptr
5325 || AT_class (a) == dw_val_class_macptr
5326 || AT_class (a) == dw_val_class_loclistsptr
5327 || AT_class (a) == dw_val_class_high_pc));
5328 return a->dw_attr_val.v.val_lbl_id;
5329 }
5330
5331 /* Get the attribute of type attr_kind. */
5332
5333 static dw_attr_node *
5334 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5335 {
5336 dw_attr_node *a;
5337 unsigned ix;
5338 dw_die_ref spec = NULL;
5339
5340 if (! die)
5341 return NULL;
5342
5343 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5344 if (a->dw_attr == attr_kind)
5345 return a;
5346 else if (a->dw_attr == DW_AT_specification
5347 || a->dw_attr == DW_AT_abstract_origin)
5348 spec = AT_ref (a);
5349
5350 if (spec)
5351 return get_AT (spec, attr_kind);
5352
5353 return NULL;
5354 }
5355
5356 /* Returns the parent of the declaration of DIE. */
5357
5358 static dw_die_ref
5359 get_die_parent (dw_die_ref die)
5360 {
5361 dw_die_ref t;
5362
5363 if (!die)
5364 return NULL;
5365
5366 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5367 || (t = get_AT_ref (die, DW_AT_specification)))
5368 die = t;
5369
5370 return die->die_parent;
5371 }
5372
5373 /* Return the "low pc" attribute value, typically associated with a subprogram
5374 DIE. Return null if the "low pc" attribute is either not present, or if it
5375 cannot be represented as an assembler label identifier. */
5376
5377 static inline const char *
5378 get_AT_low_pc (dw_die_ref die)
5379 {
5380 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5381
5382 return a ? AT_lbl (a) : NULL;
5383 }
5384
5385 /* Return the "high pc" attribute value, typically associated with a subprogram
5386 DIE. Return null if the "high pc" attribute is either not present, or if it
5387 cannot be represented as an assembler label identifier. */
5388
5389 static inline const char *
5390 get_AT_hi_pc (dw_die_ref die)
5391 {
5392 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5393
5394 return a ? AT_lbl (a) : NULL;
5395 }
5396
5397 /* Return the value of the string attribute designated by ATTR_KIND, or
5398 NULL if it is not present. */
5399
5400 static inline const char *
5401 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5402 {
5403 dw_attr_node *a = get_AT (die, attr_kind);
5404
5405 return a ? AT_string (a) : NULL;
5406 }
5407
5408 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5409 if it is not present. */
5410
5411 static inline int
5412 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5413 {
5414 dw_attr_node *a = get_AT (die, attr_kind);
5415
5416 return a ? AT_flag (a) : 0;
5417 }
5418
5419 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5420 if it is not present. */
5421
5422 static inline unsigned
5423 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5424 {
5425 dw_attr_node *a = get_AT (die, attr_kind);
5426
5427 return a ? AT_unsigned (a) : 0;
5428 }
5429
5430 static inline dw_die_ref
5431 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5432 {
5433 dw_attr_node *a = get_AT (die, attr_kind);
5434
5435 return a ? AT_ref (a) : NULL;
5436 }
5437
5438 static inline struct dwarf_file_data *
5439 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5440 {
5441 dw_attr_node *a = get_AT (die, attr_kind);
5442
5443 return a ? AT_file (a) : NULL;
5444 }
5445
5446 /* Return TRUE if the language is C++. */
5447
5448 static inline bool
5449 is_cxx (void)
5450 {
5451 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5452
5453 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5454 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5455 }
5456
5457 /* Return TRUE if DECL was created by the C++ frontend. */
5458
5459 static bool
5460 is_cxx (const_tree decl)
5461 {
5462 if (in_lto_p)
5463 {
5464 const_tree context = get_ultimate_context (decl);
5465 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5466 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5467 }
5468 return is_cxx ();
5469 }
5470
5471 /* Return TRUE if the language is Fortran. */
5472
5473 static inline bool
5474 is_fortran (void)
5475 {
5476 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5477
5478 return (lang == DW_LANG_Fortran77
5479 || lang == DW_LANG_Fortran90
5480 || lang == DW_LANG_Fortran95
5481 || lang == DW_LANG_Fortran03
5482 || lang == DW_LANG_Fortran08);
5483 }
5484
5485 static inline bool
5486 is_fortran (const_tree decl)
5487 {
5488 if (in_lto_p)
5489 {
5490 const_tree context = get_ultimate_context (decl);
5491 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5492 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5493 "GNU Fortran", 11) == 0
5494 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5495 "GNU F77") == 0);
5496 }
5497 return is_fortran ();
5498 }
5499
5500 /* Return TRUE if the language is Ada. */
5501
5502 static inline bool
5503 is_ada (void)
5504 {
5505 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5506
5507 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5508 }
5509
5510 /* Remove the specified attribute if present. Return TRUE if removal
5511 was successful. */
5512
5513 static bool
5514 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5515 {
5516 dw_attr_node *a;
5517 unsigned ix;
5518
5519 if (! die)
5520 return false;
5521
5522 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5523 if (a->dw_attr == attr_kind)
5524 {
5525 if (AT_class (a) == dw_val_class_str)
5526 if (a->dw_attr_val.v.val_str->refcount)
5527 a->dw_attr_val.v.val_str->refcount--;
5528
5529 /* vec::ordered_remove should help reduce the number of abbrevs
5530 that are needed. */
5531 die->die_attr->ordered_remove (ix);
5532 return true;
5533 }
5534 return false;
5535 }
5536
5537 /* Remove CHILD from its parent. PREV must have the property that
5538 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5539
5540 static void
5541 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5542 {
5543 gcc_assert (child->die_parent == prev->die_parent);
5544 gcc_assert (prev->die_sib == child);
5545 if (prev == child)
5546 {
5547 gcc_assert (child->die_parent->die_child == child);
5548 prev = NULL;
5549 }
5550 else
5551 prev->die_sib = child->die_sib;
5552 if (child->die_parent->die_child == child)
5553 child->die_parent->die_child = prev;
5554 child->die_sib = NULL;
5555 }
5556
5557 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5558 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5559
5560 static void
5561 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5562 {
5563 dw_die_ref parent = old_child->die_parent;
5564
5565 gcc_assert (parent == prev->die_parent);
5566 gcc_assert (prev->die_sib == old_child);
5567
5568 new_child->die_parent = parent;
5569 if (prev == old_child)
5570 {
5571 gcc_assert (parent->die_child == old_child);
5572 new_child->die_sib = new_child;
5573 }
5574 else
5575 {
5576 prev->die_sib = new_child;
5577 new_child->die_sib = old_child->die_sib;
5578 }
5579 if (old_child->die_parent->die_child == old_child)
5580 old_child->die_parent->die_child = new_child;
5581 old_child->die_sib = NULL;
5582 }
5583
5584 /* Move all children from OLD_PARENT to NEW_PARENT. */
5585
5586 static void
5587 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5588 {
5589 dw_die_ref c;
5590 new_parent->die_child = old_parent->die_child;
5591 old_parent->die_child = NULL;
5592 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5593 }
5594
5595 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5596 matches TAG. */
5597
5598 static void
5599 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5600 {
5601 dw_die_ref c;
5602
5603 c = die->die_child;
5604 if (c) do {
5605 dw_die_ref prev = c;
5606 c = c->die_sib;
5607 while (c->die_tag == tag)
5608 {
5609 remove_child_with_prev (c, prev);
5610 c->die_parent = NULL;
5611 /* Might have removed every child. */
5612 if (die->die_child == NULL)
5613 return;
5614 c = prev->die_sib;
5615 }
5616 } while (c != die->die_child);
5617 }
5618
5619 /* Add a CHILD_DIE as the last child of DIE. */
5620
5621 static void
5622 add_child_die (dw_die_ref die, dw_die_ref child_die)
5623 {
5624 /* FIXME this should probably be an assert. */
5625 if (! die || ! child_die)
5626 return;
5627 gcc_assert (die != child_die);
5628
5629 child_die->die_parent = die;
5630 if (die->die_child)
5631 {
5632 child_die->die_sib = die->die_child->die_sib;
5633 die->die_child->die_sib = child_die;
5634 }
5635 else
5636 child_die->die_sib = child_die;
5637 die->die_child = child_die;
5638 }
5639
5640 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5641
5642 static void
5643 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5644 dw_die_ref after_die)
5645 {
5646 gcc_assert (die
5647 && child_die
5648 && after_die
5649 && die->die_child
5650 && die != child_die);
5651
5652 child_die->die_parent = die;
5653 child_die->die_sib = after_die->die_sib;
5654 after_die->die_sib = child_die;
5655 if (die->die_child == after_die)
5656 die->die_child = child_die;
5657 }
5658
5659 /* Unassociate CHILD from its parent, and make its parent be
5660 NEW_PARENT. */
5661
5662 static void
5663 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5664 {
5665 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5666 if (p->die_sib == child)
5667 {
5668 remove_child_with_prev (child, p);
5669 break;
5670 }
5671 add_child_die (new_parent, child);
5672 }
5673
5674 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5675 is the specification, to the end of PARENT's list of children.
5676 This is done by removing and re-adding it. */
5677
5678 static void
5679 splice_child_die (dw_die_ref parent, dw_die_ref child)
5680 {
5681 /* We want the declaration DIE from inside the class, not the
5682 specification DIE at toplevel. */
5683 if (child->die_parent != parent)
5684 {
5685 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5686
5687 if (tmp)
5688 child = tmp;
5689 }
5690
5691 gcc_assert (child->die_parent == parent
5692 || (child->die_parent
5693 == get_AT_ref (parent, DW_AT_specification)));
5694
5695 reparent_child (child, parent);
5696 }
5697
5698 /* Create and return a new die with TAG_VALUE as tag. */
5699
5700 static inline dw_die_ref
5701 new_die_raw (enum dwarf_tag tag_value)
5702 {
5703 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5704 die->die_tag = tag_value;
5705 return die;
5706 }
5707
5708 /* Create and return a new die with a parent of PARENT_DIE. If
5709 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5710 associated tree T must be supplied to determine parenthood
5711 later. */
5712
5713 static inline dw_die_ref
5714 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5715 {
5716 dw_die_ref die = new_die_raw (tag_value);
5717
5718 if (parent_die != NULL)
5719 add_child_die (parent_die, die);
5720 else
5721 {
5722 limbo_die_node *limbo_node;
5723
5724 /* No DIEs created after early dwarf should end up in limbo,
5725 because the limbo list should not persist past LTO
5726 streaming. */
5727 if (tag_value != DW_TAG_compile_unit
5728 /* These are allowed because they're generated while
5729 breaking out COMDAT units late. */
5730 && tag_value != DW_TAG_type_unit
5731 && tag_value != DW_TAG_skeleton_unit
5732 && !early_dwarf
5733 /* Allow nested functions to live in limbo because they will
5734 only temporarily live there, as decls_for_scope will fix
5735 them up. */
5736 && (TREE_CODE (t) != FUNCTION_DECL
5737 || !decl_function_context (t))
5738 /* Same as nested functions above but for types. Types that
5739 are local to a function will be fixed in
5740 decls_for_scope. */
5741 && (!RECORD_OR_UNION_TYPE_P (t)
5742 || !TYPE_CONTEXT (t)
5743 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5744 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5745 especially in the ltrans stage, but once we implement LTO
5746 dwarf streaming, we should remove this exception. */
5747 && !in_lto_p)
5748 {
5749 fprintf (stderr, "symbol ended up in limbo too late:");
5750 debug_generic_stmt (t);
5751 gcc_unreachable ();
5752 }
5753
5754 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5755 limbo_node->die = die;
5756 limbo_node->created_for = t;
5757 limbo_node->next = limbo_die_list;
5758 limbo_die_list = limbo_node;
5759 }
5760
5761 return die;
5762 }
5763
5764 /* Return the DIE associated with the given type specifier. */
5765
5766 static inline dw_die_ref
5767 lookup_type_die (tree type)
5768 {
5769 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5770 if (die && die->removed)
5771 {
5772 TYPE_SYMTAB_DIE (type) = NULL;
5773 return NULL;
5774 }
5775 return die;
5776 }
5777
5778 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5779 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5780 anonymous type instead the one of the naming typedef. */
5781
5782 static inline dw_die_ref
5783 strip_naming_typedef (tree type, dw_die_ref type_die)
5784 {
5785 if (type
5786 && TREE_CODE (type) == RECORD_TYPE
5787 && type_die
5788 && type_die->die_tag == DW_TAG_typedef
5789 && is_naming_typedef_decl (TYPE_NAME (type)))
5790 type_die = get_AT_ref (type_die, DW_AT_type);
5791 return type_die;
5792 }
5793
5794 /* Like lookup_type_die, but if type is an anonymous type named by a
5795 typedef[1], return the DIE of the anonymous type instead the one of
5796 the naming typedef. This is because in gen_typedef_die, we did
5797 equate the anonymous struct named by the typedef with the DIE of
5798 the naming typedef. So by default, lookup_type_die on an anonymous
5799 struct yields the DIE of the naming typedef.
5800
5801 [1]: Read the comment of is_naming_typedef_decl to learn about what
5802 a naming typedef is. */
5803
5804 static inline dw_die_ref
5805 lookup_type_die_strip_naming_typedef (tree type)
5806 {
5807 dw_die_ref die = lookup_type_die (type);
5808 return strip_naming_typedef (type, die);
5809 }
5810
5811 /* Equate a DIE to a given type specifier. */
5812
5813 static inline void
5814 equate_type_number_to_die (tree type, dw_die_ref type_die)
5815 {
5816 TYPE_SYMTAB_DIE (type) = type_die;
5817 }
5818
5819 /* Returns a hash value for X (which really is a die_struct). */
5820
5821 inline hashval_t
5822 decl_die_hasher::hash (die_node *x)
5823 {
5824 return (hashval_t) x->decl_id;
5825 }
5826
5827 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5828
5829 inline bool
5830 decl_die_hasher::equal (die_node *x, tree y)
5831 {
5832 return (x->decl_id == DECL_UID (y));
5833 }
5834
5835 /* Return the DIE associated with a given declaration. */
5836
5837 static inline dw_die_ref
5838 lookup_decl_die (tree decl)
5839 {
5840 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5841 NO_INSERT);
5842 if (!die)
5843 return NULL;
5844 if ((*die)->removed)
5845 {
5846 decl_die_table->clear_slot (die);
5847 return NULL;
5848 }
5849 return *die;
5850 }
5851
5852
5853 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5854 style reference. Return true if we found one refering to a DIE for
5855 DECL, otherwise return false. */
5856
5857 static bool
5858 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5859 unsigned HOST_WIDE_INT *off)
5860 {
5861 dw_die_ref die;
5862
5863 if (in_lto_p && !decl_die_table)
5864 return false;
5865
5866 if (TREE_CODE (decl) == BLOCK)
5867 die = BLOCK_DIE (decl);
5868 else
5869 die = lookup_decl_die (decl);
5870 if (!die)
5871 return false;
5872
5873 /* During WPA stage and incremental linking we currently use DIEs
5874 to store the decl <-> label + offset map. That's quite inefficient
5875 but it works for now. */
5876 if (in_lto_p)
5877 {
5878 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5879 if (!ref)
5880 {
5881 gcc_assert (die == comp_unit_die ());
5882 return false;
5883 }
5884 *off = ref->die_offset;
5885 *sym = ref->die_id.die_symbol;
5886 return true;
5887 }
5888
5889 /* Similar to get_ref_die_offset_label, but using the "correct"
5890 label. */
5891 *off = die->die_offset;
5892 while (die->die_parent)
5893 die = die->die_parent;
5894 /* For the containing CU DIE we compute a die_symbol in
5895 compute_comp_unit_symbol. */
5896 gcc_assert (die->die_tag == DW_TAG_compile_unit
5897 && die->die_id.die_symbol != NULL);
5898 *sym = die->die_id.die_symbol;
5899 return true;
5900 }
5901
5902 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5903
5904 static void
5905 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5906 const char *symbol, HOST_WIDE_INT offset)
5907 {
5908 /* Create a fake DIE that contains the reference. Don't use
5909 new_die because we don't want to end up in the limbo list. */
5910 dw_die_ref ref = new_die_raw (die->die_tag);
5911 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5912 ref->die_offset = offset;
5913 ref->with_offset = 1;
5914 add_AT_die_ref (die, attr_kind, ref);
5915 }
5916
5917 /* Create a DIE for DECL if required and add a reference to a DIE
5918 at SYMBOL + OFFSET which contains attributes dumped early. */
5919
5920 static void
5921 dwarf2out_register_external_die (tree decl, const char *sym,
5922 unsigned HOST_WIDE_INT off)
5923 {
5924 if (debug_info_level == DINFO_LEVEL_NONE)
5925 return;
5926
5927 if ((flag_wpa
5928 || flag_incremental_link == INCREMENTAL_LINK_LTO) && !decl_die_table)
5929 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5930
5931 dw_die_ref die
5932 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5933 gcc_assert (!die);
5934
5935 tree ctx;
5936 dw_die_ref parent = NULL;
5937 /* Need to lookup a DIE for the decls context - the containing
5938 function or translation unit. */
5939 if (TREE_CODE (decl) == BLOCK)
5940 {
5941 ctx = BLOCK_SUPERCONTEXT (decl);
5942 /* ??? We do not output DIEs for all scopes thus skip as
5943 many DIEs as needed. */
5944 while (TREE_CODE (ctx) == BLOCK
5945 && !BLOCK_DIE (ctx))
5946 ctx = BLOCK_SUPERCONTEXT (ctx);
5947 }
5948 else
5949 ctx = DECL_CONTEXT (decl);
5950 /* Peel types in the context stack. */
5951 while (ctx && TYPE_P (ctx))
5952 ctx = TYPE_CONTEXT (ctx);
5953 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5954 if (debug_info_level <= DINFO_LEVEL_TERSE)
5955 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5956 ctx = DECL_CONTEXT (ctx);
5957 if (ctx)
5958 {
5959 if (TREE_CODE (ctx) == BLOCK)
5960 parent = BLOCK_DIE (ctx);
5961 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5962 /* Keep the 1:1 association during WPA. */
5963 && !flag_wpa
5964 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5965 /* Otherwise all late annotations go to the main CU which
5966 imports the original CUs. */
5967 parent = comp_unit_die ();
5968 else if (TREE_CODE (ctx) == FUNCTION_DECL
5969 && TREE_CODE (decl) != FUNCTION_DECL
5970 && TREE_CODE (decl) != PARM_DECL
5971 && TREE_CODE (decl) != RESULT_DECL
5972 && TREE_CODE (decl) != BLOCK)
5973 /* Leave function local entities parent determination to when
5974 we process scope vars. */
5975 ;
5976 else
5977 parent = lookup_decl_die (ctx);
5978 }
5979 else
5980 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5981 Handle this case gracefully by globalizing stuff. */
5982 parent = comp_unit_die ();
5983 /* Create a DIE "stub". */
5984 switch (TREE_CODE (decl))
5985 {
5986 case TRANSLATION_UNIT_DECL:
5987 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
5988 {
5989 die = comp_unit_die ();
5990 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5991 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5992 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5993 to create a DIE for the original CUs. */
5994 return;
5995 }
5996 /* Keep the 1:1 association during WPA. */
5997 die = new_die (DW_TAG_compile_unit, NULL, decl);
5998 break;
5999 case NAMESPACE_DECL:
6000 if (is_fortran (decl))
6001 die = new_die (DW_TAG_module, parent, decl);
6002 else
6003 die = new_die (DW_TAG_namespace, parent, decl);
6004 break;
6005 case FUNCTION_DECL:
6006 die = new_die (DW_TAG_subprogram, parent, decl);
6007 break;
6008 case VAR_DECL:
6009 die = new_die (DW_TAG_variable, parent, decl);
6010 break;
6011 case RESULT_DECL:
6012 die = new_die (DW_TAG_variable, parent, decl);
6013 break;
6014 case PARM_DECL:
6015 die = new_die (DW_TAG_formal_parameter, parent, decl);
6016 break;
6017 case CONST_DECL:
6018 die = new_die (DW_TAG_constant, parent, decl);
6019 break;
6020 case LABEL_DECL:
6021 die = new_die (DW_TAG_label, parent, decl);
6022 break;
6023 case BLOCK:
6024 die = new_die (DW_TAG_lexical_block, parent, decl);
6025 break;
6026 default:
6027 gcc_unreachable ();
6028 }
6029 if (TREE_CODE (decl) == BLOCK)
6030 BLOCK_DIE (decl) = die;
6031 else
6032 equate_decl_number_to_die (decl, die);
6033
6034 add_desc_attribute (die, decl);
6035
6036 /* Add a reference to the DIE providing early debug at $sym + off. */
6037 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6038 }
6039
6040 /* Returns a hash value for X (which really is a var_loc_list). */
6041
6042 inline hashval_t
6043 decl_loc_hasher::hash (var_loc_list *x)
6044 {
6045 return (hashval_t) x->decl_id;
6046 }
6047
6048 /* Return nonzero if decl_id of var_loc_list X is the same as
6049 UID of decl *Y. */
6050
6051 inline bool
6052 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6053 {
6054 return (x->decl_id == DECL_UID (y));
6055 }
6056
6057 /* Return the var_loc list associated with a given declaration. */
6058
6059 static inline var_loc_list *
6060 lookup_decl_loc (const_tree decl)
6061 {
6062 if (!decl_loc_table)
6063 return NULL;
6064 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6065 }
6066
6067 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6068
6069 inline hashval_t
6070 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6071 {
6072 return (hashval_t) x->decl_id;
6073 }
6074
6075 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6076 UID of decl *Y. */
6077
6078 inline bool
6079 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6080 {
6081 return (x->decl_id == DECL_UID (y));
6082 }
6083
6084 /* Equate a DIE to a particular declaration. */
6085
6086 static void
6087 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6088 {
6089 unsigned int decl_id = DECL_UID (decl);
6090
6091 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6092 decl_die->decl_id = decl_id;
6093 }
6094
6095 /* Return how many bits covers PIECE EXPR_LIST. */
6096
6097 static HOST_WIDE_INT
6098 decl_piece_bitsize (rtx piece)
6099 {
6100 int ret = (int) GET_MODE (piece);
6101 if (ret)
6102 return ret;
6103 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6104 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6105 return INTVAL (XEXP (XEXP (piece, 0), 0));
6106 }
6107
6108 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6109
6110 static rtx *
6111 decl_piece_varloc_ptr (rtx piece)
6112 {
6113 if ((int) GET_MODE (piece))
6114 return &XEXP (piece, 0);
6115 else
6116 return &XEXP (XEXP (piece, 0), 1);
6117 }
6118
6119 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6120 Next is the chain of following piece nodes. */
6121
6122 static rtx_expr_list *
6123 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6124 {
6125 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6126 return alloc_EXPR_LIST (bitsize, loc_note, next);
6127 else
6128 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6129 GEN_INT (bitsize),
6130 loc_note), next);
6131 }
6132
6133 /* Return rtx that should be stored into loc field for
6134 LOC_NOTE and BITPOS/BITSIZE. */
6135
6136 static rtx
6137 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6138 HOST_WIDE_INT bitsize)
6139 {
6140 if (bitsize != -1)
6141 {
6142 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6143 if (bitpos != 0)
6144 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6145 }
6146 return loc_note;
6147 }
6148
6149 /* This function either modifies location piece list *DEST in
6150 place (if SRC and INNER is NULL), or copies location piece list
6151 *SRC to *DEST while modifying it. Location BITPOS is modified
6152 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6153 not copied and if needed some padding around it is added.
6154 When modifying in place, DEST should point to EXPR_LIST where
6155 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6156 to the start of the whole list and INNER points to the EXPR_LIST
6157 where earlier pieces cover PIECE_BITPOS bits. */
6158
6159 static void
6160 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6161 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6162 HOST_WIDE_INT bitsize, rtx loc_note)
6163 {
6164 HOST_WIDE_INT diff;
6165 bool copy = inner != NULL;
6166
6167 if (copy)
6168 {
6169 /* First copy all nodes preceding the current bitpos. */
6170 while (src != inner)
6171 {
6172 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6173 decl_piece_bitsize (*src), NULL_RTX);
6174 dest = &XEXP (*dest, 1);
6175 src = &XEXP (*src, 1);
6176 }
6177 }
6178 /* Add padding if needed. */
6179 if (bitpos != piece_bitpos)
6180 {
6181 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6182 copy ? NULL_RTX : *dest);
6183 dest = &XEXP (*dest, 1);
6184 }
6185 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6186 {
6187 gcc_assert (!copy);
6188 /* A piece with correct bitpos and bitsize already exist,
6189 just update the location for it and return. */
6190 *decl_piece_varloc_ptr (*dest) = loc_note;
6191 return;
6192 }
6193 /* Add the piece that changed. */
6194 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6195 dest = &XEXP (*dest, 1);
6196 /* Skip over pieces that overlap it. */
6197 diff = bitpos - piece_bitpos + bitsize;
6198 if (!copy)
6199 src = dest;
6200 while (diff > 0 && *src)
6201 {
6202 rtx piece = *src;
6203 diff -= decl_piece_bitsize (piece);
6204 if (copy)
6205 src = &XEXP (piece, 1);
6206 else
6207 {
6208 *src = XEXP (piece, 1);
6209 free_EXPR_LIST_node (piece);
6210 }
6211 }
6212 /* Add padding if needed. */
6213 if (diff < 0 && *src)
6214 {
6215 if (!copy)
6216 dest = src;
6217 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6218 dest = &XEXP (*dest, 1);
6219 }
6220 if (!copy)
6221 return;
6222 /* Finally copy all nodes following it. */
6223 while (*src)
6224 {
6225 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6226 decl_piece_bitsize (*src), NULL_RTX);
6227 dest = &XEXP (*dest, 1);
6228 src = &XEXP (*src, 1);
6229 }
6230 }
6231
6232 /* Add a variable location node to the linked list for DECL. */
6233
6234 static struct var_loc_node *
6235 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6236 {
6237 unsigned int decl_id;
6238 var_loc_list *temp;
6239 struct var_loc_node *loc = NULL;
6240 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6241
6242 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6243 {
6244 tree realdecl = DECL_DEBUG_EXPR (decl);
6245 if (handled_component_p (realdecl)
6246 || (TREE_CODE (realdecl) == MEM_REF
6247 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6248 {
6249 bool reverse;
6250 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6251 &bitsize, &reverse);
6252 if (!innerdecl
6253 || !DECL_P (innerdecl)
6254 || DECL_IGNORED_P (innerdecl)
6255 || TREE_STATIC (innerdecl)
6256 || bitsize == 0
6257 || bitpos + bitsize > 256)
6258 return NULL;
6259 decl = innerdecl;
6260 }
6261 }
6262
6263 decl_id = DECL_UID (decl);
6264 var_loc_list **slot
6265 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6266 if (*slot == NULL)
6267 {
6268 temp = ggc_cleared_alloc<var_loc_list> ();
6269 temp->decl_id = decl_id;
6270 *slot = temp;
6271 }
6272 else
6273 temp = *slot;
6274
6275 /* For PARM_DECLs try to keep around the original incoming value,
6276 even if that means we'll emit a zero-range .debug_loc entry. */
6277 if (temp->last
6278 && temp->first == temp->last
6279 && TREE_CODE (decl) == PARM_DECL
6280 && NOTE_P (temp->first->loc)
6281 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6282 && DECL_INCOMING_RTL (decl)
6283 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6284 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6285 == GET_CODE (DECL_INCOMING_RTL (decl))
6286 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6287 && (bitsize != -1
6288 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6289 NOTE_VAR_LOCATION_LOC (loc_note))
6290 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6291 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6292 {
6293 loc = ggc_cleared_alloc<var_loc_node> ();
6294 temp->first->next = loc;
6295 temp->last = loc;
6296 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6297 }
6298 else if (temp->last)
6299 {
6300 struct var_loc_node *last = temp->last, *unused = NULL;
6301 rtx *piece_loc = NULL, last_loc_note;
6302 HOST_WIDE_INT piece_bitpos = 0;
6303 if (last->next)
6304 {
6305 last = last->next;
6306 gcc_assert (last->next == NULL);
6307 }
6308 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6309 {
6310 piece_loc = &last->loc;
6311 do
6312 {
6313 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6314 if (piece_bitpos + cur_bitsize > bitpos)
6315 break;
6316 piece_bitpos += cur_bitsize;
6317 piece_loc = &XEXP (*piece_loc, 1);
6318 }
6319 while (*piece_loc);
6320 }
6321 /* TEMP->LAST here is either pointer to the last but one or
6322 last element in the chained list, LAST is pointer to the
6323 last element. */
6324 if (label && strcmp (last->label, label) == 0 && last->view == view)
6325 {
6326 /* For SRA optimized variables if there weren't any real
6327 insns since last note, just modify the last node. */
6328 if (piece_loc != NULL)
6329 {
6330 adjust_piece_list (piece_loc, NULL, NULL,
6331 bitpos, piece_bitpos, bitsize, loc_note);
6332 return NULL;
6333 }
6334 /* If the last note doesn't cover any instructions, remove it. */
6335 if (temp->last != last)
6336 {
6337 temp->last->next = NULL;
6338 unused = last;
6339 last = temp->last;
6340 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6341 }
6342 else
6343 {
6344 gcc_assert (temp->first == temp->last
6345 || (temp->first->next == temp->last
6346 && TREE_CODE (decl) == PARM_DECL));
6347 memset (temp->last, '\0', sizeof (*temp->last));
6348 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6349 return temp->last;
6350 }
6351 }
6352 if (bitsize == -1 && NOTE_P (last->loc))
6353 last_loc_note = last->loc;
6354 else if (piece_loc != NULL
6355 && *piece_loc != NULL_RTX
6356 && piece_bitpos == bitpos
6357 && decl_piece_bitsize (*piece_loc) == bitsize)
6358 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6359 else
6360 last_loc_note = NULL_RTX;
6361 /* If the current location is the same as the end of the list,
6362 and either both or neither of the locations is uninitialized,
6363 we have nothing to do. */
6364 if (last_loc_note == NULL_RTX
6365 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6366 NOTE_VAR_LOCATION_LOC (loc_note)))
6367 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6368 != NOTE_VAR_LOCATION_STATUS (loc_note))
6369 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6370 == VAR_INIT_STATUS_UNINITIALIZED)
6371 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6372 == VAR_INIT_STATUS_UNINITIALIZED))))
6373 {
6374 /* Add LOC to the end of list and update LAST. If the last
6375 element of the list has been removed above, reuse its
6376 memory for the new node, otherwise allocate a new one. */
6377 if (unused)
6378 {
6379 loc = unused;
6380 memset (loc, '\0', sizeof (*loc));
6381 }
6382 else
6383 loc = ggc_cleared_alloc<var_loc_node> ();
6384 if (bitsize == -1 || piece_loc == NULL)
6385 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6386 else
6387 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6388 bitpos, piece_bitpos, bitsize, loc_note);
6389 last->next = loc;
6390 /* Ensure TEMP->LAST will point either to the new last but one
6391 element of the chain, or to the last element in it. */
6392 if (last != temp->last)
6393 temp->last = last;
6394 }
6395 else if (unused)
6396 ggc_free (unused);
6397 }
6398 else
6399 {
6400 loc = ggc_cleared_alloc<var_loc_node> ();
6401 temp->first = loc;
6402 temp->last = loc;
6403 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6404 }
6405 return loc;
6406 }
6407 \f
6408 /* Keep track of the number of spaces used to indent the
6409 output of the debugging routines that print the structure of
6410 the DIE internal representation. */
6411 static int print_indent;
6412
6413 /* Indent the line the number of spaces given by print_indent. */
6414
6415 static inline void
6416 print_spaces (FILE *outfile)
6417 {
6418 fprintf (outfile, "%*s", print_indent, "");
6419 }
6420
6421 /* Print a type signature in hex. */
6422
6423 static inline void
6424 print_signature (FILE *outfile, char *sig)
6425 {
6426 int i;
6427
6428 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6429 fprintf (outfile, "%02x", sig[i] & 0xff);
6430 }
6431
6432 static inline void
6433 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6434 {
6435 if (discr_value->pos)
6436 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6437 else
6438 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6439 }
6440
6441 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6442
6443 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6444 RECURSE, output location descriptor operations. */
6445
6446 static void
6447 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6448 {
6449 switch (val->val_class)
6450 {
6451 case dw_val_class_addr:
6452 fprintf (outfile, "address");
6453 break;
6454 case dw_val_class_offset:
6455 fprintf (outfile, "offset");
6456 break;
6457 case dw_val_class_loc:
6458 fprintf (outfile, "location descriptor");
6459 if (val->v.val_loc == NULL)
6460 fprintf (outfile, " -> <null>\n");
6461 else if (recurse)
6462 {
6463 fprintf (outfile, ":\n");
6464 print_indent += 4;
6465 print_loc_descr (val->v.val_loc, outfile);
6466 print_indent -= 4;
6467 }
6468 else
6469 {
6470 if (flag_dump_noaddr || flag_dump_unnumbered)
6471 fprintf (outfile, " #\n");
6472 else
6473 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6474 }
6475 break;
6476 case dw_val_class_loc_list:
6477 fprintf (outfile, "location list -> label:%s",
6478 val->v.val_loc_list->ll_symbol);
6479 break;
6480 case dw_val_class_view_list:
6481 val = view_list_to_loc_list_val_node (val);
6482 fprintf (outfile, "location list with views -> labels:%s and %s",
6483 val->v.val_loc_list->ll_symbol,
6484 val->v.val_loc_list->vl_symbol);
6485 break;
6486 case dw_val_class_range_list:
6487 fprintf (outfile, "range list");
6488 break;
6489 case dw_val_class_const:
6490 case dw_val_class_const_implicit:
6491 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6492 break;
6493 case dw_val_class_unsigned_const:
6494 case dw_val_class_unsigned_const_implicit:
6495 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6496 break;
6497 case dw_val_class_const_double:
6498 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6499 HOST_WIDE_INT_PRINT_UNSIGNED")",
6500 val->v.val_double.high,
6501 val->v.val_double.low);
6502 break;
6503 case dw_val_class_wide_int:
6504 {
6505 int i = val->v.val_wide->get_len ();
6506 fprintf (outfile, "constant (");
6507 gcc_assert (i > 0);
6508 if (val->v.val_wide->elt (i - 1) == 0)
6509 fprintf (outfile, "0x");
6510 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6511 val->v.val_wide->elt (--i));
6512 while (--i >= 0)
6513 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6514 val->v.val_wide->elt (i));
6515 fprintf (outfile, ")");
6516 break;
6517 }
6518 case dw_val_class_vec:
6519 fprintf (outfile, "floating-point or vector constant");
6520 break;
6521 case dw_val_class_flag:
6522 fprintf (outfile, "%u", val->v.val_flag);
6523 break;
6524 case dw_val_class_die_ref:
6525 if (val->v.val_die_ref.die != NULL)
6526 {
6527 dw_die_ref die = val->v.val_die_ref.die;
6528
6529 if (die->comdat_type_p)
6530 {
6531 fprintf (outfile, "die -> signature: ");
6532 print_signature (outfile,
6533 die->die_id.die_type_node->signature);
6534 }
6535 else if (die->die_id.die_symbol)
6536 {
6537 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6538 if (die->with_offset)
6539 fprintf (outfile, " + %ld", die->die_offset);
6540 }
6541 else
6542 fprintf (outfile, "die -> %ld", die->die_offset);
6543 if (flag_dump_noaddr || flag_dump_unnumbered)
6544 fprintf (outfile, " #");
6545 else
6546 fprintf (outfile, " (%p)", (void *) die);
6547 }
6548 else
6549 fprintf (outfile, "die -> <null>");
6550 break;
6551 case dw_val_class_vms_delta:
6552 fprintf (outfile, "delta: @slotcount(%s-%s)",
6553 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6554 break;
6555 case dw_val_class_symview:
6556 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6557 break;
6558 case dw_val_class_lbl_id:
6559 case dw_val_class_lineptr:
6560 case dw_val_class_macptr:
6561 case dw_val_class_loclistsptr:
6562 case dw_val_class_high_pc:
6563 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6564 break;
6565 case dw_val_class_str:
6566 if (val->v.val_str->str != NULL)
6567 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6568 else
6569 fprintf (outfile, "<null>");
6570 break;
6571 case dw_val_class_file:
6572 case dw_val_class_file_implicit:
6573 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6574 val->v.val_file->emitted_number);
6575 break;
6576 case dw_val_class_data8:
6577 {
6578 int i;
6579
6580 for (i = 0; i < 8; i++)
6581 fprintf (outfile, "%02x", val->v.val_data8[i]);
6582 break;
6583 }
6584 case dw_val_class_discr_value:
6585 print_discr_value (outfile, &val->v.val_discr_value);
6586 break;
6587 case dw_val_class_discr_list:
6588 for (dw_discr_list_ref node = val->v.val_discr_list;
6589 node != NULL;
6590 node = node->dw_discr_next)
6591 {
6592 if (node->dw_discr_range)
6593 {
6594 fprintf (outfile, " .. ");
6595 print_discr_value (outfile, &node->dw_discr_lower_bound);
6596 print_discr_value (outfile, &node->dw_discr_upper_bound);
6597 }
6598 else
6599 print_discr_value (outfile, &node->dw_discr_lower_bound);
6600
6601 if (node->dw_discr_next != NULL)
6602 fprintf (outfile, " | ");
6603 }
6604 default:
6605 break;
6606 }
6607 }
6608
6609 /* Likewise, for a DIE attribute. */
6610
6611 static void
6612 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6613 {
6614 print_dw_val (&a->dw_attr_val, recurse, outfile);
6615 }
6616
6617
6618 /* Print the list of operands in the LOC location description to OUTFILE. This
6619 routine is a debugging aid only. */
6620
6621 static void
6622 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6623 {
6624 dw_loc_descr_ref l = loc;
6625
6626 if (loc == NULL)
6627 {
6628 print_spaces (outfile);
6629 fprintf (outfile, "<null>\n");
6630 return;
6631 }
6632
6633 for (l = loc; l != NULL; l = l->dw_loc_next)
6634 {
6635 print_spaces (outfile);
6636 if (flag_dump_noaddr || flag_dump_unnumbered)
6637 fprintf (outfile, "#");
6638 else
6639 fprintf (outfile, "(%p)", (void *) l);
6640 fprintf (outfile, " %s",
6641 dwarf_stack_op_name (l->dw_loc_opc));
6642 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6643 {
6644 fprintf (outfile, " ");
6645 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6646 }
6647 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6648 {
6649 fprintf (outfile, ", ");
6650 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6651 }
6652 fprintf (outfile, "\n");
6653 }
6654 }
6655
6656 /* Print the information associated with a given DIE, and its children.
6657 This routine is a debugging aid only. */
6658
6659 static void
6660 print_die (dw_die_ref die, FILE *outfile)
6661 {
6662 dw_attr_node *a;
6663 dw_die_ref c;
6664 unsigned ix;
6665
6666 print_spaces (outfile);
6667 fprintf (outfile, "DIE %4ld: %s ",
6668 die->die_offset, dwarf_tag_name (die->die_tag));
6669 if (flag_dump_noaddr || flag_dump_unnumbered)
6670 fprintf (outfile, "#\n");
6671 else
6672 fprintf (outfile, "(%p)\n", (void*) die);
6673 print_spaces (outfile);
6674 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6675 fprintf (outfile, " offset: %ld", die->die_offset);
6676 fprintf (outfile, " mark: %d\n", die->die_mark);
6677
6678 if (die->comdat_type_p)
6679 {
6680 print_spaces (outfile);
6681 fprintf (outfile, " signature: ");
6682 print_signature (outfile, die->die_id.die_type_node->signature);
6683 fprintf (outfile, "\n");
6684 }
6685
6686 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6687 {
6688 print_spaces (outfile);
6689 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6690
6691 print_attribute (a, true, outfile);
6692 fprintf (outfile, "\n");
6693 }
6694
6695 if (die->die_child != NULL)
6696 {
6697 print_indent += 4;
6698 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6699 print_indent -= 4;
6700 }
6701 if (print_indent == 0)
6702 fprintf (outfile, "\n");
6703 }
6704
6705 /* Print the list of operations in the LOC location description. */
6706
6707 DEBUG_FUNCTION void
6708 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6709 {
6710 print_loc_descr (loc, stderr);
6711 }
6712
6713 /* Print the information collected for a given DIE. */
6714
6715 DEBUG_FUNCTION void
6716 debug_dwarf_die (dw_die_ref die)
6717 {
6718 print_die (die, stderr);
6719 }
6720
6721 DEBUG_FUNCTION void
6722 debug (die_struct &ref)
6723 {
6724 print_die (&ref, stderr);
6725 }
6726
6727 DEBUG_FUNCTION void
6728 debug (die_struct *ptr)
6729 {
6730 if (ptr)
6731 debug (*ptr);
6732 else
6733 fprintf (stderr, "<nil>\n");
6734 }
6735
6736
6737 /* Print all DWARF information collected for the compilation unit.
6738 This routine is a debugging aid only. */
6739
6740 DEBUG_FUNCTION void
6741 debug_dwarf (void)
6742 {
6743 print_indent = 0;
6744 print_die (comp_unit_die (), stderr);
6745 }
6746
6747 /* Verify the DIE tree structure. */
6748
6749 DEBUG_FUNCTION void
6750 verify_die (dw_die_ref die)
6751 {
6752 gcc_assert (!die->die_mark);
6753 if (die->die_parent == NULL
6754 && die->die_sib == NULL)
6755 return;
6756 /* Verify the die_sib list is cyclic. */
6757 dw_die_ref x = die;
6758 do
6759 {
6760 x->die_mark = 1;
6761 x = x->die_sib;
6762 }
6763 while (x && !x->die_mark);
6764 gcc_assert (x == die);
6765 x = die;
6766 do
6767 {
6768 /* Verify all dies have the same parent. */
6769 gcc_assert (x->die_parent == die->die_parent);
6770 if (x->die_child)
6771 {
6772 /* Verify the child has the proper parent and recurse. */
6773 gcc_assert (x->die_child->die_parent == x);
6774 verify_die (x->die_child);
6775 }
6776 x->die_mark = 0;
6777 x = x->die_sib;
6778 }
6779 while (x && x->die_mark);
6780 }
6781
6782 /* Sanity checks on DIEs. */
6783
6784 static void
6785 check_die (dw_die_ref die)
6786 {
6787 unsigned ix;
6788 dw_attr_node *a;
6789 bool inline_found = false;
6790 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6791 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6792 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6793 {
6794 switch (a->dw_attr)
6795 {
6796 case DW_AT_inline:
6797 if (a->dw_attr_val.v.val_unsigned)
6798 inline_found = true;
6799 break;
6800 case DW_AT_location:
6801 ++n_location;
6802 break;
6803 case DW_AT_low_pc:
6804 ++n_low_pc;
6805 break;
6806 case DW_AT_high_pc:
6807 ++n_high_pc;
6808 break;
6809 case DW_AT_artificial:
6810 ++n_artificial;
6811 break;
6812 case DW_AT_decl_column:
6813 ++n_decl_column;
6814 break;
6815 case DW_AT_decl_line:
6816 ++n_decl_line;
6817 break;
6818 case DW_AT_decl_file:
6819 ++n_decl_file;
6820 break;
6821 default:
6822 break;
6823 }
6824 }
6825 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6826 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6827 {
6828 fprintf (stderr, "Duplicate attributes in DIE:\n");
6829 debug_dwarf_die (die);
6830 gcc_unreachable ();
6831 }
6832 if (inline_found)
6833 {
6834 /* A debugging information entry that is a member of an abstract
6835 instance tree [that has DW_AT_inline] should not contain any
6836 attributes which describe aspects of the subroutine which vary
6837 between distinct inlined expansions or distinct out-of-line
6838 expansions. */
6839 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6840 gcc_assert (a->dw_attr != DW_AT_low_pc
6841 && a->dw_attr != DW_AT_high_pc
6842 && a->dw_attr != DW_AT_location
6843 && a->dw_attr != DW_AT_frame_base
6844 && a->dw_attr != DW_AT_call_all_calls
6845 && a->dw_attr != DW_AT_GNU_all_call_sites);
6846 }
6847 }
6848 \f
6849 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6850 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6851 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6852
6853 /* Calculate the checksum of a location expression. */
6854
6855 static inline void
6856 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6857 {
6858 int tem;
6859 inchash::hash hstate;
6860 hashval_t hash;
6861
6862 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6863 CHECKSUM (tem);
6864 hash_loc_operands (loc, hstate);
6865 hash = hstate.end();
6866 CHECKSUM (hash);
6867 }
6868
6869 /* Calculate the checksum of an attribute. */
6870
6871 static void
6872 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6873 {
6874 dw_loc_descr_ref loc;
6875 rtx r;
6876
6877 CHECKSUM (at->dw_attr);
6878
6879 /* We don't care that this was compiled with a different compiler
6880 snapshot; if the output is the same, that's what matters. */
6881 if (at->dw_attr == DW_AT_producer)
6882 return;
6883
6884 switch (AT_class (at))
6885 {
6886 case dw_val_class_const:
6887 case dw_val_class_const_implicit:
6888 CHECKSUM (at->dw_attr_val.v.val_int);
6889 break;
6890 case dw_val_class_unsigned_const:
6891 case dw_val_class_unsigned_const_implicit:
6892 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6893 break;
6894 case dw_val_class_const_double:
6895 CHECKSUM (at->dw_attr_val.v.val_double);
6896 break;
6897 case dw_val_class_wide_int:
6898 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6899 get_full_len (*at->dw_attr_val.v.val_wide)
6900 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6901 break;
6902 case dw_val_class_vec:
6903 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6904 (at->dw_attr_val.v.val_vec.length
6905 * at->dw_attr_val.v.val_vec.elt_size));
6906 break;
6907 case dw_val_class_flag:
6908 CHECKSUM (at->dw_attr_val.v.val_flag);
6909 break;
6910 case dw_val_class_str:
6911 CHECKSUM_STRING (AT_string (at));
6912 break;
6913
6914 case dw_val_class_addr:
6915 r = AT_addr (at);
6916 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6917 CHECKSUM_STRING (XSTR (r, 0));
6918 break;
6919
6920 case dw_val_class_offset:
6921 CHECKSUM (at->dw_attr_val.v.val_offset);
6922 break;
6923
6924 case dw_val_class_loc:
6925 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6926 loc_checksum (loc, ctx);
6927 break;
6928
6929 case dw_val_class_die_ref:
6930 die_checksum (AT_ref (at), ctx, mark);
6931 break;
6932
6933 case dw_val_class_fde_ref:
6934 case dw_val_class_vms_delta:
6935 case dw_val_class_symview:
6936 case dw_val_class_lbl_id:
6937 case dw_val_class_lineptr:
6938 case dw_val_class_macptr:
6939 case dw_val_class_loclistsptr:
6940 case dw_val_class_high_pc:
6941 break;
6942
6943 case dw_val_class_file:
6944 case dw_val_class_file_implicit:
6945 CHECKSUM_STRING (AT_file (at)->filename);
6946 break;
6947
6948 case dw_val_class_data8:
6949 CHECKSUM (at->dw_attr_val.v.val_data8);
6950 break;
6951
6952 default:
6953 break;
6954 }
6955 }
6956
6957 /* Calculate the checksum of a DIE. */
6958
6959 static void
6960 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6961 {
6962 dw_die_ref c;
6963 dw_attr_node *a;
6964 unsigned ix;
6965
6966 /* To avoid infinite recursion. */
6967 if (die->die_mark)
6968 {
6969 CHECKSUM (die->die_mark);
6970 return;
6971 }
6972 die->die_mark = ++(*mark);
6973
6974 CHECKSUM (die->die_tag);
6975
6976 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6977 attr_checksum (a, ctx, mark);
6978
6979 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6980 }
6981
6982 #undef CHECKSUM
6983 #undef CHECKSUM_BLOCK
6984 #undef CHECKSUM_STRING
6985
6986 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6987 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6988 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6989 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6990 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6991 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6992 #define CHECKSUM_ATTR(FOO) \
6993 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6994
6995 /* Calculate the checksum of a number in signed LEB128 format. */
6996
6997 static void
6998 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6999 {
7000 unsigned char byte;
7001 bool more;
7002
7003 while (1)
7004 {
7005 byte = (value & 0x7f);
7006 value >>= 7;
7007 more = !((value == 0 && (byte & 0x40) == 0)
7008 || (value == -1 && (byte & 0x40) != 0));
7009 if (more)
7010 byte |= 0x80;
7011 CHECKSUM (byte);
7012 if (!more)
7013 break;
7014 }
7015 }
7016
7017 /* Calculate the checksum of a number in unsigned LEB128 format. */
7018
7019 static void
7020 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
7021 {
7022 while (1)
7023 {
7024 unsigned char byte = (value & 0x7f);
7025 value >>= 7;
7026 if (value != 0)
7027 /* More bytes to follow. */
7028 byte |= 0x80;
7029 CHECKSUM (byte);
7030 if (value == 0)
7031 break;
7032 }
7033 }
7034
7035 /* Checksum the context of the DIE. This adds the names of any
7036 surrounding namespaces or structures to the checksum. */
7037
7038 static void
7039 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7040 {
7041 const char *name;
7042 dw_die_ref spec;
7043 int tag = die->die_tag;
7044
7045 if (tag != DW_TAG_namespace
7046 && tag != DW_TAG_structure_type
7047 && tag != DW_TAG_class_type)
7048 return;
7049
7050 name = get_AT_string (die, DW_AT_name);
7051
7052 spec = get_AT_ref (die, DW_AT_specification);
7053 if (spec != NULL)
7054 die = spec;
7055
7056 if (die->die_parent != NULL)
7057 checksum_die_context (die->die_parent, ctx);
7058
7059 CHECKSUM_ULEB128 ('C');
7060 CHECKSUM_ULEB128 (tag);
7061 if (name != NULL)
7062 CHECKSUM_STRING (name);
7063 }
7064
7065 /* Calculate the checksum of a location expression. */
7066
7067 static inline void
7068 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7069 {
7070 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7071 were emitted as a DW_FORM_sdata instead of a location expression. */
7072 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7073 {
7074 CHECKSUM_ULEB128 (DW_FORM_sdata);
7075 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7076 return;
7077 }
7078
7079 /* Otherwise, just checksum the raw location expression. */
7080 while (loc != NULL)
7081 {
7082 inchash::hash hstate;
7083 hashval_t hash;
7084
7085 CHECKSUM_ULEB128 (loc->dtprel);
7086 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7087 hash_loc_operands (loc, hstate);
7088 hash = hstate.end ();
7089 CHECKSUM (hash);
7090 loc = loc->dw_loc_next;
7091 }
7092 }
7093
7094 /* Calculate the checksum of an attribute. */
7095
7096 static void
7097 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7098 struct md5_ctx *ctx, int *mark)
7099 {
7100 dw_loc_descr_ref loc;
7101 rtx r;
7102
7103 if (AT_class (at) == dw_val_class_die_ref)
7104 {
7105 dw_die_ref target_die = AT_ref (at);
7106
7107 /* For pointer and reference types, we checksum only the (qualified)
7108 name of the target type (if there is a name). For friend entries,
7109 we checksum only the (qualified) name of the target type or function.
7110 This allows the checksum to remain the same whether the target type
7111 is complete or not. */
7112 if ((at->dw_attr == DW_AT_type
7113 && (tag == DW_TAG_pointer_type
7114 || tag == DW_TAG_reference_type
7115 || tag == DW_TAG_rvalue_reference_type
7116 || tag == DW_TAG_ptr_to_member_type))
7117 || (at->dw_attr == DW_AT_friend
7118 && tag == DW_TAG_friend))
7119 {
7120 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7121
7122 if (name_attr != NULL)
7123 {
7124 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7125
7126 if (decl == NULL)
7127 decl = target_die;
7128 CHECKSUM_ULEB128 ('N');
7129 CHECKSUM_ULEB128 (at->dw_attr);
7130 if (decl->die_parent != NULL)
7131 checksum_die_context (decl->die_parent, ctx);
7132 CHECKSUM_ULEB128 ('E');
7133 CHECKSUM_STRING (AT_string (name_attr));
7134 return;
7135 }
7136 }
7137
7138 /* For all other references to another DIE, we check to see if the
7139 target DIE has already been visited. If it has, we emit a
7140 backward reference; if not, we descend recursively. */
7141 if (target_die->die_mark > 0)
7142 {
7143 CHECKSUM_ULEB128 ('R');
7144 CHECKSUM_ULEB128 (at->dw_attr);
7145 CHECKSUM_ULEB128 (target_die->die_mark);
7146 }
7147 else
7148 {
7149 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7150
7151 if (decl == NULL)
7152 decl = target_die;
7153 target_die->die_mark = ++(*mark);
7154 CHECKSUM_ULEB128 ('T');
7155 CHECKSUM_ULEB128 (at->dw_attr);
7156 if (decl->die_parent != NULL)
7157 checksum_die_context (decl->die_parent, ctx);
7158 die_checksum_ordered (target_die, ctx, mark);
7159 }
7160 return;
7161 }
7162
7163 CHECKSUM_ULEB128 ('A');
7164 CHECKSUM_ULEB128 (at->dw_attr);
7165
7166 switch (AT_class (at))
7167 {
7168 case dw_val_class_const:
7169 case dw_val_class_const_implicit:
7170 CHECKSUM_ULEB128 (DW_FORM_sdata);
7171 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7172 break;
7173
7174 case dw_val_class_unsigned_const:
7175 case dw_val_class_unsigned_const_implicit:
7176 CHECKSUM_ULEB128 (DW_FORM_sdata);
7177 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7178 break;
7179
7180 case dw_val_class_const_double:
7181 CHECKSUM_ULEB128 (DW_FORM_block);
7182 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7183 CHECKSUM (at->dw_attr_val.v.val_double);
7184 break;
7185
7186 case dw_val_class_wide_int:
7187 CHECKSUM_ULEB128 (DW_FORM_block);
7188 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7189 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7190 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7191 get_full_len (*at->dw_attr_val.v.val_wide)
7192 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7193 break;
7194
7195 case dw_val_class_vec:
7196 CHECKSUM_ULEB128 (DW_FORM_block);
7197 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7198 * at->dw_attr_val.v.val_vec.elt_size);
7199 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7200 (at->dw_attr_val.v.val_vec.length
7201 * at->dw_attr_val.v.val_vec.elt_size));
7202 break;
7203
7204 case dw_val_class_flag:
7205 CHECKSUM_ULEB128 (DW_FORM_flag);
7206 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7207 break;
7208
7209 case dw_val_class_str:
7210 CHECKSUM_ULEB128 (DW_FORM_string);
7211 CHECKSUM_STRING (AT_string (at));
7212 break;
7213
7214 case dw_val_class_addr:
7215 r = AT_addr (at);
7216 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7217 CHECKSUM_ULEB128 (DW_FORM_string);
7218 CHECKSUM_STRING (XSTR (r, 0));
7219 break;
7220
7221 case dw_val_class_offset:
7222 CHECKSUM_ULEB128 (DW_FORM_sdata);
7223 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7224 break;
7225
7226 case dw_val_class_loc:
7227 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7228 loc_checksum_ordered (loc, ctx);
7229 break;
7230
7231 case dw_val_class_fde_ref:
7232 case dw_val_class_symview:
7233 case dw_val_class_lbl_id:
7234 case dw_val_class_lineptr:
7235 case dw_val_class_macptr:
7236 case dw_val_class_loclistsptr:
7237 case dw_val_class_high_pc:
7238 break;
7239
7240 case dw_val_class_file:
7241 case dw_val_class_file_implicit:
7242 CHECKSUM_ULEB128 (DW_FORM_string);
7243 CHECKSUM_STRING (AT_file (at)->filename);
7244 break;
7245
7246 case dw_val_class_data8:
7247 CHECKSUM (at->dw_attr_val.v.val_data8);
7248 break;
7249
7250 default:
7251 break;
7252 }
7253 }
7254
7255 struct checksum_attributes
7256 {
7257 dw_attr_node *at_name;
7258 dw_attr_node *at_type;
7259 dw_attr_node *at_friend;
7260 dw_attr_node *at_accessibility;
7261 dw_attr_node *at_address_class;
7262 dw_attr_node *at_alignment;
7263 dw_attr_node *at_allocated;
7264 dw_attr_node *at_artificial;
7265 dw_attr_node *at_associated;
7266 dw_attr_node *at_binary_scale;
7267 dw_attr_node *at_bit_offset;
7268 dw_attr_node *at_bit_size;
7269 dw_attr_node *at_bit_stride;
7270 dw_attr_node *at_byte_size;
7271 dw_attr_node *at_byte_stride;
7272 dw_attr_node *at_const_value;
7273 dw_attr_node *at_containing_type;
7274 dw_attr_node *at_count;
7275 dw_attr_node *at_data_location;
7276 dw_attr_node *at_data_member_location;
7277 dw_attr_node *at_decimal_scale;
7278 dw_attr_node *at_decimal_sign;
7279 dw_attr_node *at_default_value;
7280 dw_attr_node *at_digit_count;
7281 dw_attr_node *at_discr;
7282 dw_attr_node *at_discr_list;
7283 dw_attr_node *at_discr_value;
7284 dw_attr_node *at_encoding;
7285 dw_attr_node *at_endianity;
7286 dw_attr_node *at_explicit;
7287 dw_attr_node *at_is_optional;
7288 dw_attr_node *at_location;
7289 dw_attr_node *at_lower_bound;
7290 dw_attr_node *at_mutable;
7291 dw_attr_node *at_ordering;
7292 dw_attr_node *at_picture_string;
7293 dw_attr_node *at_prototyped;
7294 dw_attr_node *at_small;
7295 dw_attr_node *at_segment;
7296 dw_attr_node *at_string_length;
7297 dw_attr_node *at_string_length_bit_size;
7298 dw_attr_node *at_string_length_byte_size;
7299 dw_attr_node *at_threads_scaled;
7300 dw_attr_node *at_upper_bound;
7301 dw_attr_node *at_use_location;
7302 dw_attr_node *at_use_UTF8;
7303 dw_attr_node *at_variable_parameter;
7304 dw_attr_node *at_virtuality;
7305 dw_attr_node *at_visibility;
7306 dw_attr_node *at_vtable_elem_location;
7307 };
7308
7309 /* Collect the attributes that we will want to use for the checksum. */
7310
7311 static void
7312 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7313 {
7314 dw_attr_node *a;
7315 unsigned ix;
7316
7317 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7318 {
7319 switch (a->dw_attr)
7320 {
7321 case DW_AT_name:
7322 attrs->at_name = a;
7323 break;
7324 case DW_AT_type:
7325 attrs->at_type = a;
7326 break;
7327 case DW_AT_friend:
7328 attrs->at_friend = a;
7329 break;
7330 case DW_AT_accessibility:
7331 attrs->at_accessibility = a;
7332 break;
7333 case DW_AT_address_class:
7334 attrs->at_address_class = a;
7335 break;
7336 case DW_AT_alignment:
7337 attrs->at_alignment = a;
7338 break;
7339 case DW_AT_allocated:
7340 attrs->at_allocated = a;
7341 break;
7342 case DW_AT_artificial:
7343 attrs->at_artificial = a;
7344 break;
7345 case DW_AT_associated:
7346 attrs->at_associated = a;
7347 break;
7348 case DW_AT_binary_scale:
7349 attrs->at_binary_scale = a;
7350 break;
7351 case DW_AT_bit_offset:
7352 attrs->at_bit_offset = a;
7353 break;
7354 case DW_AT_bit_size:
7355 attrs->at_bit_size = a;
7356 break;
7357 case DW_AT_bit_stride:
7358 attrs->at_bit_stride = a;
7359 break;
7360 case DW_AT_byte_size:
7361 attrs->at_byte_size = a;
7362 break;
7363 case DW_AT_byte_stride:
7364 attrs->at_byte_stride = a;
7365 break;
7366 case DW_AT_const_value:
7367 attrs->at_const_value = a;
7368 break;
7369 case DW_AT_containing_type:
7370 attrs->at_containing_type = a;
7371 break;
7372 case DW_AT_count:
7373 attrs->at_count = a;
7374 break;
7375 case DW_AT_data_location:
7376 attrs->at_data_location = a;
7377 break;
7378 case DW_AT_data_member_location:
7379 attrs->at_data_member_location = a;
7380 break;
7381 case DW_AT_decimal_scale:
7382 attrs->at_decimal_scale = a;
7383 break;
7384 case DW_AT_decimal_sign:
7385 attrs->at_decimal_sign = a;
7386 break;
7387 case DW_AT_default_value:
7388 attrs->at_default_value = a;
7389 break;
7390 case DW_AT_digit_count:
7391 attrs->at_digit_count = a;
7392 break;
7393 case DW_AT_discr:
7394 attrs->at_discr = a;
7395 break;
7396 case DW_AT_discr_list:
7397 attrs->at_discr_list = a;
7398 break;
7399 case DW_AT_discr_value:
7400 attrs->at_discr_value = a;
7401 break;
7402 case DW_AT_encoding:
7403 attrs->at_encoding = a;
7404 break;
7405 case DW_AT_endianity:
7406 attrs->at_endianity = a;
7407 break;
7408 case DW_AT_explicit:
7409 attrs->at_explicit = a;
7410 break;
7411 case DW_AT_is_optional:
7412 attrs->at_is_optional = a;
7413 break;
7414 case DW_AT_location:
7415 attrs->at_location = a;
7416 break;
7417 case DW_AT_lower_bound:
7418 attrs->at_lower_bound = a;
7419 break;
7420 case DW_AT_mutable:
7421 attrs->at_mutable = a;
7422 break;
7423 case DW_AT_ordering:
7424 attrs->at_ordering = a;
7425 break;
7426 case DW_AT_picture_string:
7427 attrs->at_picture_string = a;
7428 break;
7429 case DW_AT_prototyped:
7430 attrs->at_prototyped = a;
7431 break;
7432 case DW_AT_small:
7433 attrs->at_small = a;
7434 break;
7435 case DW_AT_segment:
7436 attrs->at_segment = a;
7437 break;
7438 case DW_AT_string_length:
7439 attrs->at_string_length = a;
7440 break;
7441 case DW_AT_string_length_bit_size:
7442 attrs->at_string_length_bit_size = a;
7443 break;
7444 case DW_AT_string_length_byte_size:
7445 attrs->at_string_length_byte_size = a;
7446 break;
7447 case DW_AT_threads_scaled:
7448 attrs->at_threads_scaled = a;
7449 break;
7450 case DW_AT_upper_bound:
7451 attrs->at_upper_bound = a;
7452 break;
7453 case DW_AT_use_location:
7454 attrs->at_use_location = a;
7455 break;
7456 case DW_AT_use_UTF8:
7457 attrs->at_use_UTF8 = a;
7458 break;
7459 case DW_AT_variable_parameter:
7460 attrs->at_variable_parameter = a;
7461 break;
7462 case DW_AT_virtuality:
7463 attrs->at_virtuality = a;
7464 break;
7465 case DW_AT_visibility:
7466 attrs->at_visibility = a;
7467 break;
7468 case DW_AT_vtable_elem_location:
7469 attrs->at_vtable_elem_location = a;
7470 break;
7471 default:
7472 break;
7473 }
7474 }
7475 }
7476
7477 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7478
7479 static void
7480 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7481 {
7482 dw_die_ref c;
7483 dw_die_ref decl;
7484 struct checksum_attributes attrs;
7485
7486 CHECKSUM_ULEB128 ('D');
7487 CHECKSUM_ULEB128 (die->die_tag);
7488
7489 memset (&attrs, 0, sizeof (attrs));
7490
7491 decl = get_AT_ref (die, DW_AT_specification);
7492 if (decl != NULL)
7493 collect_checksum_attributes (&attrs, decl);
7494 collect_checksum_attributes (&attrs, die);
7495
7496 CHECKSUM_ATTR (attrs.at_name);
7497 CHECKSUM_ATTR (attrs.at_accessibility);
7498 CHECKSUM_ATTR (attrs.at_address_class);
7499 CHECKSUM_ATTR (attrs.at_allocated);
7500 CHECKSUM_ATTR (attrs.at_artificial);
7501 CHECKSUM_ATTR (attrs.at_associated);
7502 CHECKSUM_ATTR (attrs.at_binary_scale);
7503 CHECKSUM_ATTR (attrs.at_bit_offset);
7504 CHECKSUM_ATTR (attrs.at_bit_size);
7505 CHECKSUM_ATTR (attrs.at_bit_stride);
7506 CHECKSUM_ATTR (attrs.at_byte_size);
7507 CHECKSUM_ATTR (attrs.at_byte_stride);
7508 CHECKSUM_ATTR (attrs.at_const_value);
7509 CHECKSUM_ATTR (attrs.at_containing_type);
7510 CHECKSUM_ATTR (attrs.at_count);
7511 CHECKSUM_ATTR (attrs.at_data_location);
7512 CHECKSUM_ATTR (attrs.at_data_member_location);
7513 CHECKSUM_ATTR (attrs.at_decimal_scale);
7514 CHECKSUM_ATTR (attrs.at_decimal_sign);
7515 CHECKSUM_ATTR (attrs.at_default_value);
7516 CHECKSUM_ATTR (attrs.at_digit_count);
7517 CHECKSUM_ATTR (attrs.at_discr);
7518 CHECKSUM_ATTR (attrs.at_discr_list);
7519 CHECKSUM_ATTR (attrs.at_discr_value);
7520 CHECKSUM_ATTR (attrs.at_encoding);
7521 CHECKSUM_ATTR (attrs.at_endianity);
7522 CHECKSUM_ATTR (attrs.at_explicit);
7523 CHECKSUM_ATTR (attrs.at_is_optional);
7524 CHECKSUM_ATTR (attrs.at_location);
7525 CHECKSUM_ATTR (attrs.at_lower_bound);
7526 CHECKSUM_ATTR (attrs.at_mutable);
7527 CHECKSUM_ATTR (attrs.at_ordering);
7528 CHECKSUM_ATTR (attrs.at_picture_string);
7529 CHECKSUM_ATTR (attrs.at_prototyped);
7530 CHECKSUM_ATTR (attrs.at_small);
7531 CHECKSUM_ATTR (attrs.at_segment);
7532 CHECKSUM_ATTR (attrs.at_string_length);
7533 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7534 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7535 CHECKSUM_ATTR (attrs.at_threads_scaled);
7536 CHECKSUM_ATTR (attrs.at_upper_bound);
7537 CHECKSUM_ATTR (attrs.at_use_location);
7538 CHECKSUM_ATTR (attrs.at_use_UTF8);
7539 CHECKSUM_ATTR (attrs.at_variable_parameter);
7540 CHECKSUM_ATTR (attrs.at_virtuality);
7541 CHECKSUM_ATTR (attrs.at_visibility);
7542 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7543 CHECKSUM_ATTR (attrs.at_type);
7544 CHECKSUM_ATTR (attrs.at_friend);
7545 CHECKSUM_ATTR (attrs.at_alignment);
7546
7547 /* Checksum the child DIEs. */
7548 c = die->die_child;
7549 if (c) do {
7550 dw_attr_node *name_attr;
7551
7552 c = c->die_sib;
7553 name_attr = get_AT (c, DW_AT_name);
7554 if (is_template_instantiation (c))
7555 {
7556 /* Ignore instantiations of member type and function templates. */
7557 }
7558 else if (name_attr != NULL
7559 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7560 {
7561 /* Use a shallow checksum for named nested types and member
7562 functions. */
7563 CHECKSUM_ULEB128 ('S');
7564 CHECKSUM_ULEB128 (c->die_tag);
7565 CHECKSUM_STRING (AT_string (name_attr));
7566 }
7567 else
7568 {
7569 /* Use a deep checksum for other children. */
7570 /* Mark this DIE so it gets processed when unmarking. */
7571 if (c->die_mark == 0)
7572 c->die_mark = -1;
7573 die_checksum_ordered (c, ctx, mark);
7574 }
7575 } while (c != die->die_child);
7576
7577 CHECKSUM_ULEB128 (0);
7578 }
7579
7580 /* Add a type name and tag to a hash. */
7581 static void
7582 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7583 {
7584 CHECKSUM_ULEB128 (tag);
7585 CHECKSUM_STRING (name);
7586 }
7587
7588 #undef CHECKSUM
7589 #undef CHECKSUM_STRING
7590 #undef CHECKSUM_ATTR
7591 #undef CHECKSUM_LEB128
7592 #undef CHECKSUM_ULEB128
7593
7594 /* Generate the type signature for DIE. This is computed by generating an
7595 MD5 checksum over the DIE's tag, its relevant attributes, and its
7596 children. Attributes that are references to other DIEs are processed
7597 by recursion, using the MARK field to prevent infinite recursion.
7598 If the DIE is nested inside a namespace or another type, we also
7599 need to include that context in the signature. The lower 64 bits
7600 of the resulting MD5 checksum comprise the signature. */
7601
7602 static void
7603 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7604 {
7605 int mark;
7606 const char *name;
7607 unsigned char checksum[16];
7608 struct md5_ctx ctx;
7609 dw_die_ref decl;
7610 dw_die_ref parent;
7611
7612 name = get_AT_string (die, DW_AT_name);
7613 decl = get_AT_ref (die, DW_AT_specification);
7614 parent = get_die_parent (die);
7615
7616 /* First, compute a signature for just the type name (and its surrounding
7617 context, if any. This is stored in the type unit DIE for link-time
7618 ODR (one-definition rule) checking. */
7619
7620 if (is_cxx () && name != NULL)
7621 {
7622 md5_init_ctx (&ctx);
7623
7624 /* Checksum the names of surrounding namespaces and structures. */
7625 if (parent != NULL)
7626 checksum_die_context (parent, &ctx);
7627
7628 /* Checksum the current DIE. */
7629 die_odr_checksum (die->die_tag, name, &ctx);
7630 md5_finish_ctx (&ctx, checksum);
7631
7632 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7633 }
7634
7635 /* Next, compute the complete type signature. */
7636
7637 md5_init_ctx (&ctx);
7638 mark = 1;
7639 die->die_mark = mark;
7640
7641 /* Checksum the names of surrounding namespaces and structures. */
7642 if (parent != NULL)
7643 checksum_die_context (parent, &ctx);
7644
7645 /* Checksum the DIE and its children. */
7646 die_checksum_ordered (die, &ctx, &mark);
7647 unmark_all_dies (die);
7648 md5_finish_ctx (&ctx, checksum);
7649
7650 /* Store the signature in the type node and link the type DIE and the
7651 type node together. */
7652 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7653 DWARF_TYPE_SIGNATURE_SIZE);
7654 die->comdat_type_p = true;
7655 die->die_id.die_type_node = type_node;
7656 type_node->type_die = die;
7657
7658 /* If the DIE is a specification, link its declaration to the type node
7659 as well. */
7660 if (decl != NULL)
7661 {
7662 decl->comdat_type_p = true;
7663 decl->die_id.die_type_node = type_node;
7664 }
7665 }
7666
7667 /* Do the location expressions look same? */
7668 static inline int
7669 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7670 {
7671 return loc1->dw_loc_opc == loc2->dw_loc_opc
7672 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7673 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7674 }
7675
7676 /* Do the values look the same? */
7677 static int
7678 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7679 {
7680 dw_loc_descr_ref loc1, loc2;
7681 rtx r1, r2;
7682
7683 if (v1->val_class != v2->val_class)
7684 return 0;
7685
7686 switch (v1->val_class)
7687 {
7688 case dw_val_class_const:
7689 case dw_val_class_const_implicit:
7690 return v1->v.val_int == v2->v.val_int;
7691 case dw_val_class_unsigned_const:
7692 case dw_val_class_unsigned_const_implicit:
7693 return v1->v.val_unsigned == v2->v.val_unsigned;
7694 case dw_val_class_const_double:
7695 return v1->v.val_double.high == v2->v.val_double.high
7696 && v1->v.val_double.low == v2->v.val_double.low;
7697 case dw_val_class_wide_int:
7698 return *v1->v.val_wide == *v2->v.val_wide;
7699 case dw_val_class_vec:
7700 if (v1->v.val_vec.length != v2->v.val_vec.length
7701 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7702 return 0;
7703 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7704 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7705 return 0;
7706 return 1;
7707 case dw_val_class_flag:
7708 return v1->v.val_flag == v2->v.val_flag;
7709 case dw_val_class_str:
7710 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7711
7712 case dw_val_class_addr:
7713 r1 = v1->v.val_addr;
7714 r2 = v2->v.val_addr;
7715 if (GET_CODE (r1) != GET_CODE (r2))
7716 return 0;
7717 return !rtx_equal_p (r1, r2);
7718
7719 case dw_val_class_offset:
7720 return v1->v.val_offset == v2->v.val_offset;
7721
7722 case dw_val_class_loc:
7723 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7724 loc1 && loc2;
7725 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7726 if (!same_loc_p (loc1, loc2, mark))
7727 return 0;
7728 return !loc1 && !loc2;
7729
7730 case dw_val_class_die_ref:
7731 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7732
7733 case dw_val_class_symview:
7734 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7735
7736 case dw_val_class_fde_ref:
7737 case dw_val_class_vms_delta:
7738 case dw_val_class_lbl_id:
7739 case dw_val_class_lineptr:
7740 case dw_val_class_macptr:
7741 case dw_val_class_loclistsptr:
7742 case dw_val_class_high_pc:
7743 return 1;
7744
7745 case dw_val_class_file:
7746 case dw_val_class_file_implicit:
7747 return v1->v.val_file == v2->v.val_file;
7748
7749 case dw_val_class_data8:
7750 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7751
7752 default:
7753 return 1;
7754 }
7755 }
7756
7757 /* Do the attributes look the same? */
7758
7759 static int
7760 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7761 {
7762 if (at1->dw_attr != at2->dw_attr)
7763 return 0;
7764
7765 /* We don't care that this was compiled with a different compiler
7766 snapshot; if the output is the same, that's what matters. */
7767 if (at1->dw_attr == DW_AT_producer)
7768 return 1;
7769
7770 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7771 }
7772
7773 /* Do the dies look the same? */
7774
7775 static int
7776 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7777 {
7778 dw_die_ref c1, c2;
7779 dw_attr_node *a1;
7780 unsigned ix;
7781
7782 /* To avoid infinite recursion. */
7783 if (die1->die_mark)
7784 return die1->die_mark == die2->die_mark;
7785 die1->die_mark = die2->die_mark = ++(*mark);
7786
7787 if (die1->die_tag != die2->die_tag)
7788 return 0;
7789
7790 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7791 return 0;
7792
7793 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7794 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7795 return 0;
7796
7797 c1 = die1->die_child;
7798 c2 = die2->die_child;
7799 if (! c1)
7800 {
7801 if (c2)
7802 return 0;
7803 }
7804 else
7805 for (;;)
7806 {
7807 if (!same_die_p (c1, c2, mark))
7808 return 0;
7809 c1 = c1->die_sib;
7810 c2 = c2->die_sib;
7811 if (c1 == die1->die_child)
7812 {
7813 if (c2 == die2->die_child)
7814 break;
7815 else
7816 return 0;
7817 }
7818 }
7819
7820 return 1;
7821 }
7822
7823 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7824 children, and set die_symbol. */
7825
7826 static void
7827 compute_comp_unit_symbol (dw_die_ref unit_die)
7828 {
7829 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7830 const char *base = die_name ? lbasename (die_name) : "anonymous";
7831 char *name = XALLOCAVEC (char, strlen (base) + 64);
7832 char *p;
7833 int i, mark;
7834 unsigned char checksum[16];
7835 struct md5_ctx ctx;
7836
7837 /* Compute the checksum of the DIE, then append part of it as hex digits to
7838 the name filename of the unit. */
7839
7840 md5_init_ctx (&ctx);
7841 mark = 0;
7842 die_checksum (unit_die, &ctx, &mark);
7843 unmark_all_dies (unit_die);
7844 md5_finish_ctx (&ctx, checksum);
7845
7846 /* When we this for comp_unit_die () we have a DW_AT_name that might
7847 not start with a letter but with anything valid for filenames and
7848 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7849 character is not a letter. */
7850 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7851 clean_symbol_name (name);
7852
7853 p = name + strlen (name);
7854 for (i = 0; i < 4; i++)
7855 {
7856 sprintf (p, "%.2x", checksum[i]);
7857 p += 2;
7858 }
7859
7860 unit_die->die_id.die_symbol = xstrdup (name);
7861 }
7862
7863 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7864
7865 static int
7866 is_type_die (dw_die_ref die)
7867 {
7868 switch (die->die_tag)
7869 {
7870 case DW_TAG_array_type:
7871 case DW_TAG_class_type:
7872 case DW_TAG_interface_type:
7873 case DW_TAG_enumeration_type:
7874 case DW_TAG_pointer_type:
7875 case DW_TAG_reference_type:
7876 case DW_TAG_rvalue_reference_type:
7877 case DW_TAG_string_type:
7878 case DW_TAG_structure_type:
7879 case DW_TAG_subroutine_type:
7880 case DW_TAG_union_type:
7881 case DW_TAG_ptr_to_member_type:
7882 case DW_TAG_set_type:
7883 case DW_TAG_subrange_type:
7884 case DW_TAG_base_type:
7885 case DW_TAG_const_type:
7886 case DW_TAG_file_type:
7887 case DW_TAG_packed_type:
7888 case DW_TAG_volatile_type:
7889 case DW_TAG_typedef:
7890 return 1;
7891 default:
7892 return 0;
7893 }
7894 }
7895
7896 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7897 Basically, we want to choose the bits that are likely to be shared between
7898 compilations (types) and leave out the bits that are specific to individual
7899 compilations (functions). */
7900
7901 static int
7902 is_comdat_die (dw_die_ref c)
7903 {
7904 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7905 we do for stabs. The advantage is a greater likelihood of sharing between
7906 objects that don't include headers in the same order (and therefore would
7907 put the base types in a different comdat). jason 8/28/00 */
7908
7909 if (c->die_tag == DW_TAG_base_type)
7910 return 0;
7911
7912 if (c->die_tag == DW_TAG_pointer_type
7913 || c->die_tag == DW_TAG_reference_type
7914 || c->die_tag == DW_TAG_rvalue_reference_type
7915 || c->die_tag == DW_TAG_const_type
7916 || c->die_tag == DW_TAG_volatile_type)
7917 {
7918 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7919
7920 return t ? is_comdat_die (t) : 0;
7921 }
7922
7923 return is_type_die (c);
7924 }
7925
7926 /* Returns true iff C is a compile-unit DIE. */
7927
7928 static inline bool
7929 is_cu_die (dw_die_ref c)
7930 {
7931 return c && (c->die_tag == DW_TAG_compile_unit
7932 || c->die_tag == DW_TAG_skeleton_unit);
7933 }
7934
7935 /* Returns true iff C is a unit DIE of some sort. */
7936
7937 static inline bool
7938 is_unit_die (dw_die_ref c)
7939 {
7940 return c && (c->die_tag == DW_TAG_compile_unit
7941 || c->die_tag == DW_TAG_partial_unit
7942 || c->die_tag == DW_TAG_type_unit
7943 || c->die_tag == DW_TAG_skeleton_unit);
7944 }
7945
7946 /* Returns true iff C is a namespace DIE. */
7947
7948 static inline bool
7949 is_namespace_die (dw_die_ref c)
7950 {
7951 return c && c->die_tag == DW_TAG_namespace;
7952 }
7953
7954 /* Returns true iff C is a class or structure DIE. */
7955
7956 static inline bool
7957 is_class_die (dw_die_ref c)
7958 {
7959 return c && (c->die_tag == DW_TAG_class_type
7960 || c->die_tag == DW_TAG_structure_type);
7961 }
7962
7963 /* Return non-zero if this DIE is a template parameter. */
7964
7965 static inline bool
7966 is_template_parameter (dw_die_ref die)
7967 {
7968 switch (die->die_tag)
7969 {
7970 case DW_TAG_template_type_param:
7971 case DW_TAG_template_value_param:
7972 case DW_TAG_GNU_template_template_param:
7973 case DW_TAG_GNU_template_parameter_pack:
7974 return true;
7975 default:
7976 return false;
7977 }
7978 }
7979
7980 /* Return non-zero if this DIE represents a template instantiation. */
7981
7982 static inline bool
7983 is_template_instantiation (dw_die_ref die)
7984 {
7985 dw_die_ref c;
7986
7987 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7988 return false;
7989 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7990 return false;
7991 }
7992
7993 static char *
7994 gen_internal_sym (const char *prefix)
7995 {
7996 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7997
7998 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7999 return xstrdup (buf);
8000 }
8001
8002 /* Return non-zero if this DIE is a declaration. */
8003
8004 static int
8005 is_declaration_die (dw_die_ref die)
8006 {
8007 dw_attr_node *a;
8008 unsigned ix;
8009
8010 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8011 if (a->dw_attr == DW_AT_declaration)
8012 return 1;
8013
8014 return 0;
8015 }
8016
8017 /* Return non-zero if this DIE is nested inside a subprogram. */
8018
8019 static int
8020 is_nested_in_subprogram (dw_die_ref die)
8021 {
8022 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
8023
8024 if (decl == NULL)
8025 decl = die;
8026 return local_scope_p (decl);
8027 }
8028
8029 /* Return non-zero if this DIE contains a defining declaration of a
8030 subprogram. */
8031
8032 static int
8033 contains_subprogram_definition (dw_die_ref die)
8034 {
8035 dw_die_ref c;
8036
8037 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
8038 return 1;
8039 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
8040 return 0;
8041 }
8042
8043 /* Return non-zero if this is a type DIE that should be moved to a
8044 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8045 unit type. */
8046
8047 static int
8048 should_move_die_to_comdat (dw_die_ref die)
8049 {
8050 switch (die->die_tag)
8051 {
8052 case DW_TAG_class_type:
8053 case DW_TAG_structure_type:
8054 case DW_TAG_enumeration_type:
8055 case DW_TAG_union_type:
8056 /* Don't move declarations, inlined instances, types nested in a
8057 subprogram, or types that contain subprogram definitions. */
8058 if (is_declaration_die (die)
8059 || get_AT (die, DW_AT_abstract_origin)
8060 || is_nested_in_subprogram (die)
8061 || contains_subprogram_definition (die))
8062 return 0;
8063 return 1;
8064 case DW_TAG_array_type:
8065 case DW_TAG_interface_type:
8066 case DW_TAG_pointer_type:
8067 case DW_TAG_reference_type:
8068 case DW_TAG_rvalue_reference_type:
8069 case DW_TAG_string_type:
8070 case DW_TAG_subroutine_type:
8071 case DW_TAG_ptr_to_member_type:
8072 case DW_TAG_set_type:
8073 case DW_TAG_subrange_type:
8074 case DW_TAG_base_type:
8075 case DW_TAG_const_type:
8076 case DW_TAG_file_type:
8077 case DW_TAG_packed_type:
8078 case DW_TAG_volatile_type:
8079 case DW_TAG_typedef:
8080 default:
8081 return 0;
8082 }
8083 }
8084
8085 /* Make a clone of DIE. */
8086
8087 static dw_die_ref
8088 clone_die (dw_die_ref die)
8089 {
8090 dw_die_ref clone = new_die_raw (die->die_tag);
8091 dw_attr_node *a;
8092 unsigned ix;
8093
8094 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8095 add_dwarf_attr (clone, a);
8096
8097 return clone;
8098 }
8099
8100 /* Make a clone of the tree rooted at DIE. */
8101
8102 static dw_die_ref
8103 clone_tree (dw_die_ref die)
8104 {
8105 dw_die_ref c;
8106 dw_die_ref clone = clone_die (die);
8107
8108 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8109
8110 return clone;
8111 }
8112
8113 /* Make a clone of DIE as a declaration. */
8114
8115 static dw_die_ref
8116 clone_as_declaration (dw_die_ref die)
8117 {
8118 dw_die_ref clone;
8119 dw_die_ref decl;
8120 dw_attr_node *a;
8121 unsigned ix;
8122
8123 /* If the DIE is already a declaration, just clone it. */
8124 if (is_declaration_die (die))
8125 return clone_die (die);
8126
8127 /* If the DIE is a specification, just clone its declaration DIE. */
8128 decl = get_AT_ref (die, DW_AT_specification);
8129 if (decl != NULL)
8130 {
8131 clone = clone_die (decl);
8132 if (die->comdat_type_p)
8133 add_AT_die_ref (clone, DW_AT_signature, die);
8134 return clone;
8135 }
8136
8137 clone = new_die_raw (die->die_tag);
8138
8139 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8140 {
8141 /* We don't want to copy over all attributes.
8142 For example we don't want DW_AT_byte_size because otherwise we will no
8143 longer have a declaration and GDB will treat it as a definition. */
8144
8145 switch (a->dw_attr)
8146 {
8147 case DW_AT_abstract_origin:
8148 case DW_AT_artificial:
8149 case DW_AT_containing_type:
8150 case DW_AT_external:
8151 case DW_AT_name:
8152 case DW_AT_type:
8153 case DW_AT_virtuality:
8154 case DW_AT_linkage_name:
8155 case DW_AT_MIPS_linkage_name:
8156 add_dwarf_attr (clone, a);
8157 break;
8158 case DW_AT_byte_size:
8159 case DW_AT_alignment:
8160 default:
8161 break;
8162 }
8163 }
8164
8165 if (die->comdat_type_p)
8166 add_AT_die_ref (clone, DW_AT_signature, die);
8167
8168 add_AT_flag (clone, DW_AT_declaration, 1);
8169 return clone;
8170 }
8171
8172
8173 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8174
8175 struct decl_table_entry
8176 {
8177 dw_die_ref orig;
8178 dw_die_ref copy;
8179 };
8180
8181 /* Helpers to manipulate hash table of copied declarations. */
8182
8183 /* Hashtable helpers. */
8184
8185 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8186 {
8187 typedef die_struct *compare_type;
8188 static inline hashval_t hash (const decl_table_entry *);
8189 static inline bool equal (const decl_table_entry *, const die_struct *);
8190 };
8191
8192 inline hashval_t
8193 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8194 {
8195 return htab_hash_pointer (entry->orig);
8196 }
8197
8198 inline bool
8199 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8200 const die_struct *entry2)
8201 {
8202 return entry1->orig == entry2;
8203 }
8204
8205 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8206
8207 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8208 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8209 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8210 to check if the ancestor has already been copied into UNIT. */
8211
8212 static dw_die_ref
8213 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8214 decl_hash_type *decl_table)
8215 {
8216 dw_die_ref parent = die->die_parent;
8217 dw_die_ref new_parent = unit;
8218 dw_die_ref copy;
8219 decl_table_entry **slot = NULL;
8220 struct decl_table_entry *entry = NULL;
8221
8222 if (decl_table)
8223 {
8224 /* Check if the entry has already been copied to UNIT. */
8225 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8226 INSERT);
8227 if (*slot != HTAB_EMPTY_ENTRY)
8228 {
8229 entry = *slot;
8230 return entry->copy;
8231 }
8232
8233 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8234 entry = XCNEW (struct decl_table_entry);
8235 entry->orig = die;
8236 entry->copy = NULL;
8237 *slot = entry;
8238 }
8239
8240 if (parent != NULL)
8241 {
8242 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8243 if (spec != NULL)
8244 parent = spec;
8245 if (!is_unit_die (parent))
8246 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8247 }
8248
8249 copy = clone_as_declaration (die);
8250 add_child_die (new_parent, copy);
8251
8252 if (decl_table)
8253 {
8254 /* Record the pointer to the copy. */
8255 entry->copy = copy;
8256 }
8257
8258 return copy;
8259 }
8260 /* Copy the declaration context to the new type unit DIE. This includes
8261 any surrounding namespace or type declarations. If the DIE has an
8262 AT_specification attribute, it also includes attributes and children
8263 attached to the specification, and returns a pointer to the original
8264 parent of the declaration DIE. Returns NULL otherwise. */
8265
8266 static dw_die_ref
8267 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8268 {
8269 dw_die_ref decl;
8270 dw_die_ref new_decl;
8271 dw_die_ref orig_parent = NULL;
8272
8273 decl = get_AT_ref (die, DW_AT_specification);
8274 if (decl == NULL)
8275 decl = die;
8276 else
8277 {
8278 unsigned ix;
8279 dw_die_ref c;
8280 dw_attr_node *a;
8281
8282 /* The original DIE will be changed to a declaration, and must
8283 be moved to be a child of the original declaration DIE. */
8284 orig_parent = decl->die_parent;
8285
8286 /* Copy the type node pointer from the new DIE to the original
8287 declaration DIE so we can forward references later. */
8288 decl->comdat_type_p = true;
8289 decl->die_id.die_type_node = die->die_id.die_type_node;
8290
8291 remove_AT (die, DW_AT_specification);
8292
8293 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8294 {
8295 if (a->dw_attr != DW_AT_name
8296 && a->dw_attr != DW_AT_declaration
8297 && a->dw_attr != DW_AT_external)
8298 add_dwarf_attr (die, a);
8299 }
8300
8301 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8302 }
8303
8304 if (decl->die_parent != NULL
8305 && !is_unit_die (decl->die_parent))
8306 {
8307 new_decl = copy_ancestor_tree (unit, decl, NULL);
8308 if (new_decl != NULL)
8309 {
8310 remove_AT (new_decl, DW_AT_signature);
8311 add_AT_specification (die, new_decl);
8312 }
8313 }
8314
8315 return orig_parent;
8316 }
8317
8318 /* Generate the skeleton ancestor tree for the given NODE, then clone
8319 the DIE and add the clone into the tree. */
8320
8321 static void
8322 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8323 {
8324 if (node->new_die != NULL)
8325 return;
8326
8327 node->new_die = clone_as_declaration (node->old_die);
8328
8329 if (node->parent != NULL)
8330 {
8331 generate_skeleton_ancestor_tree (node->parent);
8332 add_child_die (node->parent->new_die, node->new_die);
8333 }
8334 }
8335
8336 /* Generate a skeleton tree of DIEs containing any declarations that are
8337 found in the original tree. We traverse the tree looking for declaration
8338 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8339
8340 static void
8341 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8342 {
8343 skeleton_chain_node node;
8344 dw_die_ref c;
8345 dw_die_ref first;
8346 dw_die_ref prev = NULL;
8347 dw_die_ref next = NULL;
8348
8349 node.parent = parent;
8350
8351 first = c = parent->old_die->die_child;
8352 if (c)
8353 next = c->die_sib;
8354 if (c) do {
8355 if (prev == NULL || prev->die_sib == c)
8356 prev = c;
8357 c = next;
8358 next = (c == first ? NULL : c->die_sib);
8359 node.old_die = c;
8360 node.new_die = NULL;
8361 if (is_declaration_die (c))
8362 {
8363 if (is_template_instantiation (c))
8364 {
8365 /* Instantiated templates do not need to be cloned into the
8366 type unit. Just move the DIE and its children back to
8367 the skeleton tree (in the main CU). */
8368 remove_child_with_prev (c, prev);
8369 add_child_die (parent->new_die, c);
8370 c = prev;
8371 }
8372 else if (c->comdat_type_p)
8373 {
8374 /* This is the skeleton of earlier break_out_comdat_types
8375 type. Clone the existing DIE, but keep the children
8376 under the original (which is in the main CU). */
8377 dw_die_ref clone = clone_die (c);
8378
8379 replace_child (c, clone, prev);
8380 generate_skeleton_ancestor_tree (parent);
8381 add_child_die (parent->new_die, c);
8382 c = clone;
8383 continue;
8384 }
8385 else
8386 {
8387 /* Clone the existing DIE, move the original to the skeleton
8388 tree (which is in the main CU), and put the clone, with
8389 all the original's children, where the original came from
8390 (which is about to be moved to the type unit). */
8391 dw_die_ref clone = clone_die (c);
8392 move_all_children (c, clone);
8393
8394 /* If the original has a DW_AT_object_pointer attribute,
8395 it would now point to a child DIE just moved to the
8396 cloned tree, so we need to remove that attribute from
8397 the original. */
8398 remove_AT (c, DW_AT_object_pointer);
8399
8400 replace_child (c, clone, prev);
8401 generate_skeleton_ancestor_tree (parent);
8402 add_child_die (parent->new_die, c);
8403 node.old_die = clone;
8404 node.new_die = c;
8405 c = clone;
8406 }
8407 }
8408 generate_skeleton_bottom_up (&node);
8409 } while (next != NULL);
8410 }
8411
8412 /* Wrapper function for generate_skeleton_bottom_up. */
8413
8414 static dw_die_ref
8415 generate_skeleton (dw_die_ref die)
8416 {
8417 skeleton_chain_node node;
8418
8419 node.old_die = die;
8420 node.new_die = NULL;
8421 node.parent = NULL;
8422
8423 /* If this type definition is nested inside another type,
8424 and is not an instantiation of a template, always leave
8425 at least a declaration in its place. */
8426 if (die->die_parent != NULL
8427 && is_type_die (die->die_parent)
8428 && !is_template_instantiation (die))
8429 node.new_die = clone_as_declaration (die);
8430
8431 generate_skeleton_bottom_up (&node);
8432 return node.new_die;
8433 }
8434
8435 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8436 declaration. The original DIE is moved to a new compile unit so that
8437 existing references to it follow it to the new location. If any of the
8438 original DIE's descendants is a declaration, we need to replace the
8439 original DIE with a skeleton tree and move the declarations back into the
8440 skeleton tree. */
8441
8442 static dw_die_ref
8443 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8444 dw_die_ref prev)
8445 {
8446 dw_die_ref skeleton, orig_parent;
8447
8448 /* Copy the declaration context to the type unit DIE. If the returned
8449 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8450 that DIE. */
8451 orig_parent = copy_declaration_context (unit, child);
8452
8453 skeleton = generate_skeleton (child);
8454 if (skeleton == NULL)
8455 remove_child_with_prev (child, prev);
8456 else
8457 {
8458 skeleton->comdat_type_p = true;
8459 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8460
8461 /* If the original DIE was a specification, we need to put
8462 the skeleton under the parent DIE of the declaration.
8463 This leaves the original declaration in the tree, but
8464 it will be pruned later since there are no longer any
8465 references to it. */
8466 if (orig_parent != NULL)
8467 {
8468 remove_child_with_prev (child, prev);
8469 add_child_die (orig_parent, skeleton);
8470 }
8471 else
8472 replace_child (child, skeleton, prev);
8473 }
8474
8475 return skeleton;
8476 }
8477
8478 static void
8479 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8480 comdat_type_node *type_node,
8481 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8482
8483 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8484 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8485 DWARF procedure references in the DW_AT_location attribute. */
8486
8487 static dw_die_ref
8488 copy_dwarf_procedure (dw_die_ref die,
8489 comdat_type_node *type_node,
8490 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8491 {
8492 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8493
8494 /* DWARF procedures are not supposed to have children... */
8495 gcc_assert (die->die_child == NULL);
8496
8497 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8498 gcc_assert (vec_safe_length (die->die_attr) == 1
8499 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8500
8501 /* Do not copy more than once DWARF procedures. */
8502 bool existed;
8503 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8504 if (existed)
8505 return die_copy;
8506
8507 die_copy = clone_die (die);
8508 add_child_die (type_node->root_die, die_copy);
8509 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8510 return die_copy;
8511 }
8512
8513 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8514 procedures in DIE's attributes. */
8515
8516 static void
8517 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8518 comdat_type_node *type_node,
8519 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8520 {
8521 dw_attr_node *a;
8522 unsigned i;
8523
8524 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8525 {
8526 dw_loc_descr_ref loc;
8527
8528 if (a->dw_attr_val.val_class != dw_val_class_loc)
8529 continue;
8530
8531 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8532 {
8533 switch (loc->dw_loc_opc)
8534 {
8535 case DW_OP_call2:
8536 case DW_OP_call4:
8537 case DW_OP_call_ref:
8538 gcc_assert (loc->dw_loc_oprnd1.val_class
8539 == dw_val_class_die_ref);
8540 loc->dw_loc_oprnd1.v.val_die_ref.die
8541 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8542 type_node,
8543 copied_dwarf_procs);
8544
8545 default:
8546 break;
8547 }
8548 }
8549 }
8550 }
8551
8552 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8553 rewrite references to point to the copies.
8554
8555 References are looked for in DIE's attributes and recursively in all its
8556 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8557 mapping from old DWARF procedures to their copy. It is used not to copy
8558 twice the same DWARF procedure under TYPE_NODE. */
8559
8560 static void
8561 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8562 comdat_type_node *type_node,
8563 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8564 {
8565 dw_die_ref c;
8566
8567 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8568 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8569 type_node,
8570 copied_dwarf_procs));
8571 }
8572
8573 /* Traverse the DIE and set up additional .debug_types or .debug_info
8574 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8575 section. */
8576
8577 static void
8578 break_out_comdat_types (dw_die_ref die)
8579 {
8580 dw_die_ref c;
8581 dw_die_ref first;
8582 dw_die_ref prev = NULL;
8583 dw_die_ref next = NULL;
8584 dw_die_ref unit = NULL;
8585
8586 first = c = die->die_child;
8587 if (c)
8588 next = c->die_sib;
8589 if (c) do {
8590 if (prev == NULL || prev->die_sib == c)
8591 prev = c;
8592 c = next;
8593 next = (c == first ? NULL : c->die_sib);
8594 if (should_move_die_to_comdat (c))
8595 {
8596 dw_die_ref replacement;
8597 comdat_type_node *type_node;
8598
8599 /* Break out nested types into their own type units. */
8600 break_out_comdat_types (c);
8601
8602 /* Create a new type unit DIE as the root for the new tree, and
8603 add it to the list of comdat types. */
8604 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8605 add_AT_unsigned (unit, DW_AT_language,
8606 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8607 type_node = ggc_cleared_alloc<comdat_type_node> ();
8608 type_node->root_die = unit;
8609 type_node->next = comdat_type_list;
8610 comdat_type_list = type_node;
8611
8612 /* Generate the type signature. */
8613 generate_type_signature (c, type_node);
8614
8615 /* Copy the declaration context, attributes, and children of the
8616 declaration into the new type unit DIE, then remove this DIE
8617 from the main CU (or replace it with a skeleton if necessary). */
8618 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8619 type_node->skeleton_die = replacement;
8620
8621 /* Add the DIE to the new compunit. */
8622 add_child_die (unit, c);
8623
8624 /* Types can reference DWARF procedures for type size or data location
8625 expressions. Calls in DWARF expressions cannot target procedures
8626 that are not in the same section. So we must copy DWARF procedures
8627 along with this type and then rewrite references to them. */
8628 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8629 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8630
8631 if (replacement != NULL)
8632 c = replacement;
8633 }
8634 else if (c->die_tag == DW_TAG_namespace
8635 || c->die_tag == DW_TAG_class_type
8636 || c->die_tag == DW_TAG_structure_type
8637 || c->die_tag == DW_TAG_union_type)
8638 {
8639 /* Look for nested types that can be broken out. */
8640 break_out_comdat_types (c);
8641 }
8642 } while (next != NULL);
8643 }
8644
8645 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8646 Enter all the cloned children into the hash table decl_table. */
8647
8648 static dw_die_ref
8649 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8650 {
8651 dw_die_ref c;
8652 dw_die_ref clone;
8653 struct decl_table_entry *entry;
8654 decl_table_entry **slot;
8655
8656 if (die->die_tag == DW_TAG_subprogram)
8657 clone = clone_as_declaration (die);
8658 else
8659 clone = clone_die (die);
8660
8661 slot = decl_table->find_slot_with_hash (die,
8662 htab_hash_pointer (die), INSERT);
8663
8664 /* Assert that DIE isn't in the hash table yet. If it would be there
8665 before, the ancestors would be necessarily there as well, therefore
8666 clone_tree_partial wouldn't be called. */
8667 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8668
8669 entry = XCNEW (struct decl_table_entry);
8670 entry->orig = die;
8671 entry->copy = clone;
8672 *slot = entry;
8673
8674 if (die->die_tag != DW_TAG_subprogram)
8675 FOR_EACH_CHILD (die, c,
8676 add_child_die (clone, clone_tree_partial (c, decl_table)));
8677
8678 return clone;
8679 }
8680
8681 /* Walk the DIE and its children, looking for references to incomplete
8682 or trivial types that are unmarked (i.e., that are not in the current
8683 type_unit). */
8684
8685 static void
8686 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8687 {
8688 dw_die_ref c;
8689 dw_attr_node *a;
8690 unsigned ix;
8691
8692 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8693 {
8694 if (AT_class (a) == dw_val_class_die_ref)
8695 {
8696 dw_die_ref targ = AT_ref (a);
8697 decl_table_entry **slot;
8698 struct decl_table_entry *entry;
8699
8700 if (targ->die_mark != 0 || targ->comdat_type_p)
8701 continue;
8702
8703 slot = decl_table->find_slot_with_hash (targ,
8704 htab_hash_pointer (targ),
8705 INSERT);
8706
8707 if (*slot != HTAB_EMPTY_ENTRY)
8708 {
8709 /* TARG has already been copied, so we just need to
8710 modify the reference to point to the copy. */
8711 entry = *slot;
8712 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8713 }
8714 else
8715 {
8716 dw_die_ref parent = unit;
8717 dw_die_ref copy = clone_die (targ);
8718
8719 /* Record in DECL_TABLE that TARG has been copied.
8720 Need to do this now, before the recursive call,
8721 because DECL_TABLE may be expanded and SLOT
8722 would no longer be a valid pointer. */
8723 entry = XCNEW (struct decl_table_entry);
8724 entry->orig = targ;
8725 entry->copy = copy;
8726 *slot = entry;
8727
8728 /* If TARG is not a declaration DIE, we need to copy its
8729 children. */
8730 if (!is_declaration_die (targ))
8731 {
8732 FOR_EACH_CHILD (
8733 targ, c,
8734 add_child_die (copy,
8735 clone_tree_partial (c, decl_table)));
8736 }
8737
8738 /* Make sure the cloned tree is marked as part of the
8739 type unit. */
8740 mark_dies (copy);
8741
8742 /* If TARG has surrounding context, copy its ancestor tree
8743 into the new type unit. */
8744 if (targ->die_parent != NULL
8745 && !is_unit_die (targ->die_parent))
8746 parent = copy_ancestor_tree (unit, targ->die_parent,
8747 decl_table);
8748
8749 add_child_die (parent, copy);
8750 a->dw_attr_val.v.val_die_ref.die = copy;
8751
8752 /* Make sure the newly-copied DIE is walked. If it was
8753 installed in a previously-added context, it won't
8754 get visited otherwise. */
8755 if (parent != unit)
8756 {
8757 /* Find the highest point of the newly-added tree,
8758 mark each node along the way, and walk from there. */
8759 parent->die_mark = 1;
8760 while (parent->die_parent
8761 && parent->die_parent->die_mark == 0)
8762 {
8763 parent = parent->die_parent;
8764 parent->die_mark = 1;
8765 }
8766 copy_decls_walk (unit, parent, decl_table);
8767 }
8768 }
8769 }
8770 }
8771
8772 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8773 }
8774
8775 /* Copy declarations for "unworthy" types into the new comdat section.
8776 Incomplete types, modified types, and certain other types aren't broken
8777 out into comdat sections of their own, so they don't have a signature,
8778 and we need to copy the declaration into the same section so that we
8779 don't have an external reference. */
8780
8781 static void
8782 copy_decls_for_unworthy_types (dw_die_ref unit)
8783 {
8784 mark_dies (unit);
8785 decl_hash_type decl_table (10);
8786 copy_decls_walk (unit, unit, &decl_table);
8787 unmark_dies (unit);
8788 }
8789
8790 /* Traverse the DIE and add a sibling attribute if it may have the
8791 effect of speeding up access to siblings. To save some space,
8792 avoid generating sibling attributes for DIE's without children. */
8793
8794 static void
8795 add_sibling_attributes (dw_die_ref die)
8796 {
8797 dw_die_ref c;
8798
8799 if (! die->die_child)
8800 return;
8801
8802 if (die->die_parent && die != die->die_parent->die_child)
8803 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8804
8805 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8806 }
8807
8808 /* Output all location lists for the DIE and its children. */
8809
8810 static void
8811 output_location_lists (dw_die_ref die)
8812 {
8813 dw_die_ref c;
8814 dw_attr_node *a;
8815 unsigned ix;
8816
8817 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8818 if (AT_class (a) == dw_val_class_loc_list)
8819 output_loc_list (AT_loc_list (a));
8820
8821 FOR_EACH_CHILD (die, c, output_location_lists (c));
8822 }
8823
8824 /* During assign_location_list_indexes and output_loclists_offset the
8825 current index, after it the number of assigned indexes (i.e. how
8826 large the .debug_loclists* offset table should be). */
8827 static unsigned int loc_list_idx;
8828
8829 /* Output all location list offsets for the DIE and its children. */
8830
8831 static void
8832 output_loclists_offsets (dw_die_ref die)
8833 {
8834 dw_die_ref c;
8835 dw_attr_node *a;
8836 unsigned ix;
8837
8838 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8839 if (AT_class (a) == dw_val_class_loc_list)
8840 {
8841 dw_loc_list_ref l = AT_loc_list (a);
8842 if (l->offset_emitted)
8843 continue;
8844 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8845 loc_section_label, NULL);
8846 gcc_assert (l->hash == loc_list_idx);
8847 loc_list_idx++;
8848 l->offset_emitted = true;
8849 }
8850
8851 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8852 }
8853
8854 /* Recursively set indexes of location lists. */
8855
8856 static void
8857 assign_location_list_indexes (dw_die_ref die)
8858 {
8859 dw_die_ref c;
8860 dw_attr_node *a;
8861 unsigned ix;
8862
8863 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8864 if (AT_class (a) == dw_val_class_loc_list)
8865 {
8866 dw_loc_list_ref list = AT_loc_list (a);
8867 if (!list->num_assigned)
8868 {
8869 list->num_assigned = true;
8870 list->hash = loc_list_idx++;
8871 }
8872 }
8873
8874 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8875 }
8876
8877 /* We want to limit the number of external references, because they are
8878 larger than local references: a relocation takes multiple words, and
8879 even a sig8 reference is always eight bytes, whereas a local reference
8880 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8881 So if we encounter multiple external references to the same type DIE, we
8882 make a local typedef stub for it and redirect all references there.
8883
8884 This is the element of the hash table for keeping track of these
8885 references. */
8886
8887 struct external_ref
8888 {
8889 dw_die_ref type;
8890 dw_die_ref stub;
8891 unsigned n_refs;
8892 };
8893
8894 /* Hashtable helpers. */
8895
8896 struct external_ref_hasher : free_ptr_hash <external_ref>
8897 {
8898 static inline hashval_t hash (const external_ref *);
8899 static inline bool equal (const external_ref *, const external_ref *);
8900 };
8901
8902 inline hashval_t
8903 external_ref_hasher::hash (const external_ref *r)
8904 {
8905 dw_die_ref die = r->type;
8906 hashval_t h = 0;
8907
8908 /* We can't use the address of the DIE for hashing, because
8909 that will make the order of the stub DIEs non-deterministic. */
8910 if (! die->comdat_type_p)
8911 /* We have a symbol; use it to compute a hash. */
8912 h = htab_hash_string (die->die_id.die_symbol);
8913 else
8914 {
8915 /* We have a type signature; use a subset of the bits as the hash.
8916 The 8-byte signature is at least as large as hashval_t. */
8917 comdat_type_node *type_node = die->die_id.die_type_node;
8918 memcpy (&h, type_node->signature, sizeof (h));
8919 }
8920 return h;
8921 }
8922
8923 inline bool
8924 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8925 {
8926 return r1->type == r2->type;
8927 }
8928
8929 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8930
8931 /* Return a pointer to the external_ref for references to DIE. */
8932
8933 static struct external_ref *
8934 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8935 {
8936 struct external_ref ref, *ref_p;
8937 external_ref **slot;
8938
8939 ref.type = die;
8940 slot = map->find_slot (&ref, INSERT);
8941 if (*slot != HTAB_EMPTY_ENTRY)
8942 return *slot;
8943
8944 ref_p = XCNEW (struct external_ref);
8945 ref_p->type = die;
8946 *slot = ref_p;
8947 return ref_p;
8948 }
8949
8950 /* Subroutine of optimize_external_refs, below.
8951
8952 If we see a type skeleton, record it as our stub. If we see external
8953 references, remember how many we've seen. */
8954
8955 static void
8956 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8957 {
8958 dw_die_ref c;
8959 dw_attr_node *a;
8960 unsigned ix;
8961 struct external_ref *ref_p;
8962
8963 if (is_type_die (die)
8964 && (c = get_AT_ref (die, DW_AT_signature)))
8965 {
8966 /* This is a local skeleton; use it for local references. */
8967 ref_p = lookup_external_ref (map, c);
8968 ref_p->stub = die;
8969 }
8970
8971 /* Scan the DIE references, and remember any that refer to DIEs from
8972 other CUs (i.e. those which are not marked). */
8973 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8974 if (AT_class (a) == dw_val_class_die_ref
8975 && (c = AT_ref (a))->die_mark == 0
8976 && is_type_die (c))
8977 {
8978 ref_p = lookup_external_ref (map, c);
8979 ref_p->n_refs++;
8980 }
8981
8982 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8983 }
8984
8985 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8986 points to an external_ref, DATA is the CU we're processing. If we don't
8987 already have a local stub, and we have multiple refs, build a stub. */
8988
8989 int
8990 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8991 {
8992 struct external_ref *ref_p = *slot;
8993
8994 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8995 {
8996 /* We have multiple references to this type, so build a small stub.
8997 Both of these forms are a bit dodgy from the perspective of the
8998 DWARF standard, since technically they should have names. */
8999 dw_die_ref cu = data;
9000 dw_die_ref type = ref_p->type;
9001 dw_die_ref stub = NULL;
9002
9003 if (type->comdat_type_p)
9004 {
9005 /* If we refer to this type via sig8, use AT_signature. */
9006 stub = new_die (type->die_tag, cu, NULL_TREE);
9007 add_AT_die_ref (stub, DW_AT_signature, type);
9008 }
9009 else
9010 {
9011 /* Otherwise, use a typedef with no name. */
9012 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
9013 add_AT_die_ref (stub, DW_AT_type, type);
9014 }
9015
9016 stub->die_mark++;
9017 ref_p->stub = stub;
9018 }
9019 return 1;
9020 }
9021
9022 /* DIE is a unit; look through all the DIE references to see if there are
9023 any external references to types, and if so, create local stubs for
9024 them which will be applied in build_abbrev_table. This is useful because
9025 references to local DIEs are smaller. */
9026
9027 static external_ref_hash_type *
9028 optimize_external_refs (dw_die_ref die)
9029 {
9030 external_ref_hash_type *map = new external_ref_hash_type (10);
9031 optimize_external_refs_1 (die, map);
9032 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9033 return map;
9034 }
9035
9036 /* The following 3 variables are temporaries that are computed only during the
9037 build_abbrev_table call and used and released during the following
9038 optimize_abbrev_table call. */
9039
9040 /* First abbrev_id that can be optimized based on usage. */
9041 static unsigned int abbrev_opt_start;
9042
9043 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9044 abbrev_id smaller than this, because they must be already sized
9045 during build_abbrev_table). */
9046 static unsigned int abbrev_opt_base_type_end;
9047
9048 /* Vector of usage counts during build_abbrev_table. Indexed by
9049 abbrev_id - abbrev_opt_start. */
9050 static vec<unsigned int> abbrev_usage_count;
9051
9052 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9053 static vec<dw_die_ref> sorted_abbrev_dies;
9054
9055 /* The format of each DIE (and its attribute value pairs) is encoded in an
9056 abbreviation table. This routine builds the abbreviation table and assigns
9057 a unique abbreviation id for each abbreviation entry. The children of each
9058 die are visited recursively. */
9059
9060 static void
9061 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9062 {
9063 unsigned int abbrev_id = 0;
9064 dw_die_ref c;
9065 dw_attr_node *a;
9066 unsigned ix;
9067 dw_die_ref abbrev;
9068
9069 /* Scan the DIE references, and replace any that refer to
9070 DIEs from other CUs (i.e. those which are not marked) with
9071 the local stubs we built in optimize_external_refs. */
9072 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9073 if (AT_class (a) == dw_val_class_die_ref
9074 && (c = AT_ref (a))->die_mark == 0)
9075 {
9076 struct external_ref *ref_p;
9077 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9078
9079 ref_p = lookup_external_ref (extern_map, c);
9080 if (ref_p->stub && ref_p->stub != die)
9081 change_AT_die_ref (a, ref_p->stub);
9082 else
9083 /* We aren't changing this reference, so mark it external. */
9084 set_AT_ref_external (a, 1);
9085 }
9086
9087 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9088 {
9089 dw_attr_node *die_a, *abbrev_a;
9090 unsigned ix;
9091 bool ok = true;
9092
9093 if (abbrev_id == 0)
9094 continue;
9095 if (abbrev->die_tag != die->die_tag)
9096 continue;
9097 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9098 continue;
9099
9100 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9101 continue;
9102
9103 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9104 {
9105 abbrev_a = &(*abbrev->die_attr)[ix];
9106 if ((abbrev_a->dw_attr != die_a->dw_attr)
9107 || (value_format (abbrev_a) != value_format (die_a)))
9108 {
9109 ok = false;
9110 break;
9111 }
9112 }
9113 if (ok)
9114 break;
9115 }
9116
9117 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9118 {
9119 vec_safe_push (abbrev_die_table, die);
9120 if (abbrev_opt_start)
9121 abbrev_usage_count.safe_push (0);
9122 }
9123 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9124 {
9125 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9126 sorted_abbrev_dies.safe_push (die);
9127 }
9128
9129 die->die_abbrev = abbrev_id;
9130 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9131 }
9132
9133 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9134 by die_abbrev's usage count, from the most commonly used
9135 abbreviation to the least. */
9136
9137 static int
9138 die_abbrev_cmp (const void *p1, const void *p2)
9139 {
9140 dw_die_ref die1 = *(const dw_die_ref *) p1;
9141 dw_die_ref die2 = *(const dw_die_ref *) p2;
9142
9143 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9144 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9145
9146 if (die1->die_abbrev >= abbrev_opt_base_type_end
9147 && die2->die_abbrev >= abbrev_opt_base_type_end)
9148 {
9149 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9150 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9151 return -1;
9152 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9153 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9154 return 1;
9155 }
9156
9157 /* Stabilize the sort. */
9158 if (die1->die_abbrev < die2->die_abbrev)
9159 return -1;
9160 if (die1->die_abbrev > die2->die_abbrev)
9161 return 1;
9162
9163 return 0;
9164 }
9165
9166 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9167 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9168 into dw_val_class_const_implicit or
9169 dw_val_class_unsigned_const_implicit. */
9170
9171 static void
9172 optimize_implicit_const (unsigned int first_id, unsigned int end,
9173 vec<bool> &implicit_consts)
9174 {
9175 /* It never makes sense if there is just one DIE using the abbreviation. */
9176 if (end < first_id + 2)
9177 return;
9178
9179 dw_attr_node *a;
9180 unsigned ix, i;
9181 dw_die_ref die = sorted_abbrev_dies[first_id];
9182 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9183 if (implicit_consts[ix])
9184 {
9185 enum dw_val_class new_class = dw_val_class_none;
9186 switch (AT_class (a))
9187 {
9188 case dw_val_class_unsigned_const:
9189 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9190 continue;
9191
9192 /* The .debug_abbrev section will grow by
9193 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9194 in all the DIEs using that abbreviation. */
9195 if (constant_size (AT_unsigned (a)) * (end - first_id)
9196 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9197 continue;
9198
9199 new_class = dw_val_class_unsigned_const_implicit;
9200 break;
9201
9202 case dw_val_class_const:
9203 new_class = dw_val_class_const_implicit;
9204 break;
9205
9206 case dw_val_class_file:
9207 new_class = dw_val_class_file_implicit;
9208 break;
9209
9210 default:
9211 continue;
9212 }
9213 for (i = first_id; i < end; i++)
9214 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9215 = new_class;
9216 }
9217 }
9218
9219 /* Attempt to optimize abbreviation table from abbrev_opt_start
9220 abbreviation above. */
9221
9222 static void
9223 optimize_abbrev_table (void)
9224 {
9225 if (abbrev_opt_start
9226 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9227 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9228 {
9229 auto_vec<bool, 32> implicit_consts;
9230 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9231
9232 unsigned int abbrev_id = abbrev_opt_start - 1;
9233 unsigned int first_id = ~0U;
9234 unsigned int last_abbrev_id = 0;
9235 unsigned int i;
9236 dw_die_ref die;
9237 if (abbrev_opt_base_type_end > abbrev_opt_start)
9238 abbrev_id = abbrev_opt_base_type_end - 1;
9239 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9240 most commonly used abbreviations come first. */
9241 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9242 {
9243 dw_attr_node *a;
9244 unsigned ix;
9245
9246 /* If calc_base_type_die_sizes has been called, the CU and
9247 base types after it can't be optimized, because we've already
9248 calculated their DIE offsets. We've sorted them first. */
9249 if (die->die_abbrev < abbrev_opt_base_type_end)
9250 continue;
9251 if (die->die_abbrev != last_abbrev_id)
9252 {
9253 last_abbrev_id = die->die_abbrev;
9254 if (dwarf_version >= 5 && first_id != ~0U)
9255 optimize_implicit_const (first_id, i, implicit_consts);
9256 abbrev_id++;
9257 (*abbrev_die_table)[abbrev_id] = die;
9258 if (dwarf_version >= 5)
9259 {
9260 first_id = i;
9261 implicit_consts.truncate (0);
9262
9263 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9264 switch (AT_class (a))
9265 {
9266 case dw_val_class_const:
9267 case dw_val_class_unsigned_const:
9268 case dw_val_class_file:
9269 implicit_consts.safe_push (true);
9270 break;
9271 default:
9272 implicit_consts.safe_push (false);
9273 break;
9274 }
9275 }
9276 }
9277 else if (dwarf_version >= 5)
9278 {
9279 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9280 if (!implicit_consts[ix])
9281 continue;
9282 else
9283 {
9284 dw_attr_node *other_a
9285 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9286 if (!dw_val_equal_p (&a->dw_attr_val,
9287 &other_a->dw_attr_val))
9288 implicit_consts[ix] = false;
9289 }
9290 }
9291 die->die_abbrev = abbrev_id;
9292 }
9293 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9294 if (dwarf_version >= 5 && first_id != ~0U)
9295 optimize_implicit_const (first_id, i, implicit_consts);
9296 }
9297
9298 abbrev_opt_start = 0;
9299 abbrev_opt_base_type_end = 0;
9300 abbrev_usage_count.release ();
9301 sorted_abbrev_dies.release ();
9302 }
9303 \f
9304 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9305
9306 static int
9307 constant_size (unsigned HOST_WIDE_INT value)
9308 {
9309 int log;
9310
9311 if (value == 0)
9312 log = 0;
9313 else
9314 log = floor_log2 (value);
9315
9316 log = log / 8;
9317 log = 1 << (floor_log2 (log) + 1);
9318
9319 return log;
9320 }
9321
9322 /* Return the size of a DIE as it is represented in the
9323 .debug_info section. */
9324
9325 static unsigned long
9326 size_of_die (dw_die_ref die)
9327 {
9328 unsigned long size = 0;
9329 dw_attr_node *a;
9330 unsigned ix;
9331 enum dwarf_form form;
9332
9333 size += size_of_uleb128 (die->die_abbrev);
9334 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9335 {
9336 switch (AT_class (a))
9337 {
9338 case dw_val_class_addr:
9339 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9340 {
9341 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9342 size += size_of_uleb128 (AT_index (a));
9343 }
9344 else
9345 size += DWARF2_ADDR_SIZE;
9346 break;
9347 case dw_val_class_offset:
9348 size += DWARF_OFFSET_SIZE;
9349 break;
9350 case dw_val_class_loc:
9351 {
9352 unsigned long lsize = size_of_locs (AT_loc (a));
9353
9354 /* Block length. */
9355 if (dwarf_version >= 4)
9356 size += size_of_uleb128 (lsize);
9357 else
9358 size += constant_size (lsize);
9359 size += lsize;
9360 }
9361 break;
9362 case dw_val_class_loc_list:
9363 case dw_val_class_view_list:
9364 if (dwarf_split_debug_info && dwarf_version >= 5)
9365 {
9366 gcc_assert (AT_loc_list (a)->num_assigned);
9367 size += size_of_uleb128 (AT_loc_list (a)->hash);
9368 }
9369 else
9370 size += DWARF_OFFSET_SIZE;
9371 break;
9372 case dw_val_class_range_list:
9373 if (value_format (a) == DW_FORM_rnglistx)
9374 {
9375 gcc_assert (rnglist_idx);
9376 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9377 size += size_of_uleb128 (r->idx);
9378 }
9379 else
9380 size += DWARF_OFFSET_SIZE;
9381 break;
9382 case dw_val_class_const:
9383 size += size_of_sleb128 (AT_int (a));
9384 break;
9385 case dw_val_class_unsigned_const:
9386 {
9387 int csize = constant_size (AT_unsigned (a));
9388 if (dwarf_version == 3
9389 && a->dw_attr == DW_AT_data_member_location
9390 && csize >= 4)
9391 size += size_of_uleb128 (AT_unsigned (a));
9392 else
9393 size += csize;
9394 }
9395 break;
9396 case dw_val_class_symview:
9397 if (symview_upper_bound <= 0xff)
9398 size += 1;
9399 else if (symview_upper_bound <= 0xffff)
9400 size += 2;
9401 else if (symview_upper_bound <= 0xffffffff)
9402 size += 4;
9403 else
9404 size += 8;
9405 break;
9406 case dw_val_class_const_implicit:
9407 case dw_val_class_unsigned_const_implicit:
9408 case dw_val_class_file_implicit:
9409 /* These occupy no size in the DIE, just an extra sleb128 in
9410 .debug_abbrev. */
9411 break;
9412 case dw_val_class_const_double:
9413 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9414 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9415 size++; /* block */
9416 break;
9417 case dw_val_class_wide_int:
9418 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9419 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9420 if (get_full_len (*a->dw_attr_val.v.val_wide)
9421 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9422 size++; /* block */
9423 break;
9424 case dw_val_class_vec:
9425 size += constant_size (a->dw_attr_val.v.val_vec.length
9426 * a->dw_attr_val.v.val_vec.elt_size)
9427 + a->dw_attr_val.v.val_vec.length
9428 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9429 break;
9430 case dw_val_class_flag:
9431 if (dwarf_version >= 4)
9432 /* Currently all add_AT_flag calls pass in 1 as last argument,
9433 so DW_FORM_flag_present can be used. If that ever changes,
9434 we'll need to use DW_FORM_flag and have some optimization
9435 in build_abbrev_table that will change those to
9436 DW_FORM_flag_present if it is set to 1 in all DIEs using
9437 the same abbrev entry. */
9438 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9439 else
9440 size += 1;
9441 break;
9442 case dw_val_class_die_ref:
9443 if (AT_ref_external (a))
9444 {
9445 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9446 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9447 is sized by target address length, whereas in DWARF3
9448 it's always sized as an offset. */
9449 if (use_debug_types)
9450 size += DWARF_TYPE_SIGNATURE_SIZE;
9451 else if (dwarf_version == 2)
9452 size += DWARF2_ADDR_SIZE;
9453 else
9454 size += DWARF_OFFSET_SIZE;
9455 }
9456 else
9457 size += DWARF_OFFSET_SIZE;
9458 break;
9459 case dw_val_class_fde_ref:
9460 size += DWARF_OFFSET_SIZE;
9461 break;
9462 case dw_val_class_lbl_id:
9463 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9464 {
9465 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9466 size += size_of_uleb128 (AT_index (a));
9467 }
9468 else
9469 size += DWARF2_ADDR_SIZE;
9470 break;
9471 case dw_val_class_lineptr:
9472 case dw_val_class_macptr:
9473 case dw_val_class_loclistsptr:
9474 size += DWARF_OFFSET_SIZE;
9475 break;
9476 case dw_val_class_str:
9477 form = AT_string_form (a);
9478 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9479 size += DWARF_OFFSET_SIZE;
9480 else if (form == dwarf_FORM (DW_FORM_strx))
9481 size += size_of_uleb128 (AT_index (a));
9482 else
9483 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9484 break;
9485 case dw_val_class_file:
9486 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9487 break;
9488 case dw_val_class_data8:
9489 size += 8;
9490 break;
9491 case dw_val_class_vms_delta:
9492 size += DWARF_OFFSET_SIZE;
9493 break;
9494 case dw_val_class_high_pc:
9495 size += DWARF2_ADDR_SIZE;
9496 break;
9497 case dw_val_class_discr_value:
9498 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9499 break;
9500 case dw_val_class_discr_list:
9501 {
9502 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9503
9504 /* This is a block, so we have the block length and then its
9505 data. */
9506 size += constant_size (block_size) + block_size;
9507 }
9508 break;
9509 default:
9510 gcc_unreachable ();
9511 }
9512 }
9513
9514 return size;
9515 }
9516
9517 /* Size the debugging information associated with a given DIE. Visits the
9518 DIE's children recursively. Updates the global variable next_die_offset, on
9519 each time through. Uses the current value of next_die_offset to update the
9520 die_offset field in each DIE. */
9521
9522 static void
9523 calc_die_sizes (dw_die_ref die)
9524 {
9525 dw_die_ref c;
9526
9527 gcc_assert (die->die_offset == 0
9528 || (unsigned long int) die->die_offset == next_die_offset);
9529 die->die_offset = next_die_offset;
9530 next_die_offset += size_of_die (die);
9531
9532 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9533
9534 if (die->die_child != NULL)
9535 /* Count the null byte used to terminate sibling lists. */
9536 next_die_offset += 1;
9537 }
9538
9539 /* Size just the base type children at the start of the CU.
9540 This is needed because build_abbrev needs to size locs
9541 and sizing of type based stack ops needs to know die_offset
9542 values for the base types. */
9543
9544 static void
9545 calc_base_type_die_sizes (void)
9546 {
9547 unsigned long die_offset = (dwarf_split_debug_info
9548 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9549 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9550 unsigned int i;
9551 dw_die_ref base_type;
9552 #if ENABLE_ASSERT_CHECKING
9553 dw_die_ref prev = comp_unit_die ()->die_child;
9554 #endif
9555
9556 die_offset += size_of_die (comp_unit_die ());
9557 for (i = 0; base_types.iterate (i, &base_type); i++)
9558 {
9559 #if ENABLE_ASSERT_CHECKING
9560 gcc_assert (base_type->die_offset == 0
9561 && prev->die_sib == base_type
9562 && base_type->die_child == NULL
9563 && base_type->die_abbrev);
9564 prev = base_type;
9565 #endif
9566 if (abbrev_opt_start
9567 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9568 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9569 base_type->die_offset = die_offset;
9570 die_offset += size_of_die (base_type);
9571 }
9572 }
9573
9574 /* Set the marks for a die and its children. We do this so
9575 that we know whether or not a reference needs to use FORM_ref_addr; only
9576 DIEs in the same CU will be marked. We used to clear out the offset
9577 and use that as the flag, but ran into ordering problems. */
9578
9579 static void
9580 mark_dies (dw_die_ref die)
9581 {
9582 dw_die_ref c;
9583
9584 gcc_assert (!die->die_mark);
9585
9586 die->die_mark = 1;
9587 FOR_EACH_CHILD (die, c, mark_dies (c));
9588 }
9589
9590 /* Clear the marks for a die and its children. */
9591
9592 static void
9593 unmark_dies (dw_die_ref die)
9594 {
9595 dw_die_ref c;
9596
9597 if (! use_debug_types)
9598 gcc_assert (die->die_mark);
9599
9600 die->die_mark = 0;
9601 FOR_EACH_CHILD (die, c, unmark_dies (c));
9602 }
9603
9604 /* Clear the marks for a die, its children and referred dies. */
9605
9606 static void
9607 unmark_all_dies (dw_die_ref die)
9608 {
9609 dw_die_ref c;
9610 dw_attr_node *a;
9611 unsigned ix;
9612
9613 if (!die->die_mark)
9614 return;
9615 die->die_mark = 0;
9616
9617 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9618
9619 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9620 if (AT_class (a) == dw_val_class_die_ref)
9621 unmark_all_dies (AT_ref (a));
9622 }
9623
9624 /* Calculate if the entry should appear in the final output file. It may be
9625 from a pruned a type. */
9626
9627 static bool
9628 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9629 {
9630 /* By limiting gnu pubnames to definitions only, gold can generate a
9631 gdb index without entries for declarations, which don't include
9632 enough information to be useful. */
9633 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9634 return false;
9635
9636 if (table == pubname_table)
9637 {
9638 /* Enumerator names are part of the pubname table, but the
9639 parent DW_TAG_enumeration_type die may have been pruned.
9640 Don't output them if that is the case. */
9641 if (p->die->die_tag == DW_TAG_enumerator &&
9642 (p->die->die_parent == NULL
9643 || !p->die->die_parent->die_perennial_p))
9644 return false;
9645
9646 /* Everything else in the pubname table is included. */
9647 return true;
9648 }
9649
9650 /* The pubtypes table shouldn't include types that have been
9651 pruned. */
9652 return (p->die->die_offset != 0
9653 || !flag_eliminate_unused_debug_types);
9654 }
9655
9656 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9657 generated for the compilation unit. */
9658
9659 static unsigned long
9660 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9661 {
9662 unsigned long size;
9663 unsigned i;
9664 pubname_entry *p;
9665 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9666
9667 size = DWARF_PUBNAMES_HEADER_SIZE;
9668 FOR_EACH_VEC_ELT (*names, i, p)
9669 if (include_pubname_in_output (names, p))
9670 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9671
9672 size += DWARF_OFFSET_SIZE;
9673 return size;
9674 }
9675
9676 /* Return the size of the information in the .debug_aranges section. */
9677
9678 static unsigned long
9679 size_of_aranges (void)
9680 {
9681 unsigned long size;
9682
9683 size = DWARF_ARANGES_HEADER_SIZE;
9684
9685 /* Count the address/length pair for this compilation unit. */
9686 if (text_section_used)
9687 size += 2 * DWARF2_ADDR_SIZE;
9688 if (cold_text_section_used)
9689 size += 2 * DWARF2_ADDR_SIZE;
9690 if (have_multiple_function_sections)
9691 {
9692 unsigned fde_idx;
9693 dw_fde_ref fde;
9694
9695 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9696 {
9697 if (DECL_IGNORED_P (fde->decl))
9698 continue;
9699 if (!fde->in_std_section)
9700 size += 2 * DWARF2_ADDR_SIZE;
9701 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9702 size += 2 * DWARF2_ADDR_SIZE;
9703 }
9704 }
9705
9706 /* Count the two zero words used to terminated the address range table. */
9707 size += 2 * DWARF2_ADDR_SIZE;
9708 return size;
9709 }
9710 \f
9711 /* Select the encoding of an attribute value. */
9712
9713 static enum dwarf_form
9714 value_format (dw_attr_node *a)
9715 {
9716 switch (AT_class (a))
9717 {
9718 case dw_val_class_addr:
9719 /* Only very few attributes allow DW_FORM_addr. */
9720 switch (a->dw_attr)
9721 {
9722 case DW_AT_low_pc:
9723 case DW_AT_high_pc:
9724 case DW_AT_entry_pc:
9725 case DW_AT_trampoline:
9726 return (AT_index (a) == NOT_INDEXED
9727 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9728 default:
9729 break;
9730 }
9731 switch (DWARF2_ADDR_SIZE)
9732 {
9733 case 1:
9734 return DW_FORM_data1;
9735 case 2:
9736 return DW_FORM_data2;
9737 case 4:
9738 return DW_FORM_data4;
9739 case 8:
9740 return DW_FORM_data8;
9741 default:
9742 gcc_unreachable ();
9743 }
9744 case dw_val_class_loc_list:
9745 case dw_val_class_view_list:
9746 if (dwarf_split_debug_info
9747 && dwarf_version >= 5
9748 && AT_loc_list (a)->num_assigned)
9749 return DW_FORM_loclistx;
9750 /* FALLTHRU */
9751 case dw_val_class_range_list:
9752 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9753 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9754 care about sizes of .debug* sections in shared libraries and
9755 executables and don't take into account relocations that affect just
9756 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9757 table in the .debug_rnglists section. */
9758 if (dwarf_split_debug_info
9759 && dwarf_version >= 5
9760 && AT_class (a) == dw_val_class_range_list
9761 && rnglist_idx
9762 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9763 return DW_FORM_rnglistx;
9764 if (dwarf_version >= 4)
9765 return DW_FORM_sec_offset;
9766 /* FALLTHRU */
9767 case dw_val_class_vms_delta:
9768 case dw_val_class_offset:
9769 switch (DWARF_OFFSET_SIZE)
9770 {
9771 case 4:
9772 return DW_FORM_data4;
9773 case 8:
9774 return DW_FORM_data8;
9775 default:
9776 gcc_unreachable ();
9777 }
9778 case dw_val_class_loc:
9779 if (dwarf_version >= 4)
9780 return DW_FORM_exprloc;
9781 switch (constant_size (size_of_locs (AT_loc (a))))
9782 {
9783 case 1:
9784 return DW_FORM_block1;
9785 case 2:
9786 return DW_FORM_block2;
9787 case 4:
9788 return DW_FORM_block4;
9789 default:
9790 gcc_unreachable ();
9791 }
9792 case dw_val_class_const:
9793 return DW_FORM_sdata;
9794 case dw_val_class_unsigned_const:
9795 switch (constant_size (AT_unsigned (a)))
9796 {
9797 case 1:
9798 return DW_FORM_data1;
9799 case 2:
9800 return DW_FORM_data2;
9801 case 4:
9802 /* In DWARF3 DW_AT_data_member_location with
9803 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9804 constant, so we need to use DW_FORM_udata if we need
9805 a large constant. */
9806 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9807 return DW_FORM_udata;
9808 return DW_FORM_data4;
9809 case 8:
9810 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9811 return DW_FORM_udata;
9812 return DW_FORM_data8;
9813 default:
9814 gcc_unreachable ();
9815 }
9816 case dw_val_class_const_implicit:
9817 case dw_val_class_unsigned_const_implicit:
9818 case dw_val_class_file_implicit:
9819 return DW_FORM_implicit_const;
9820 case dw_val_class_const_double:
9821 switch (HOST_BITS_PER_WIDE_INT)
9822 {
9823 case 8:
9824 return DW_FORM_data2;
9825 case 16:
9826 return DW_FORM_data4;
9827 case 32:
9828 return DW_FORM_data8;
9829 case 64:
9830 if (dwarf_version >= 5)
9831 return DW_FORM_data16;
9832 /* FALLTHRU */
9833 default:
9834 return DW_FORM_block1;
9835 }
9836 case dw_val_class_wide_int:
9837 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9838 {
9839 case 8:
9840 return DW_FORM_data1;
9841 case 16:
9842 return DW_FORM_data2;
9843 case 32:
9844 return DW_FORM_data4;
9845 case 64:
9846 return DW_FORM_data8;
9847 case 128:
9848 if (dwarf_version >= 5)
9849 return DW_FORM_data16;
9850 /* FALLTHRU */
9851 default:
9852 return DW_FORM_block1;
9853 }
9854 case dw_val_class_symview:
9855 /* ??? We might use uleb128, but then we'd have to compute
9856 .debug_info offsets in the assembler. */
9857 if (symview_upper_bound <= 0xff)
9858 return DW_FORM_data1;
9859 else if (symview_upper_bound <= 0xffff)
9860 return DW_FORM_data2;
9861 else if (symview_upper_bound <= 0xffffffff)
9862 return DW_FORM_data4;
9863 else
9864 return DW_FORM_data8;
9865 case dw_val_class_vec:
9866 switch (constant_size (a->dw_attr_val.v.val_vec.length
9867 * a->dw_attr_val.v.val_vec.elt_size))
9868 {
9869 case 1:
9870 return DW_FORM_block1;
9871 case 2:
9872 return DW_FORM_block2;
9873 case 4:
9874 return DW_FORM_block4;
9875 default:
9876 gcc_unreachable ();
9877 }
9878 case dw_val_class_flag:
9879 if (dwarf_version >= 4)
9880 {
9881 /* Currently all add_AT_flag calls pass in 1 as last argument,
9882 so DW_FORM_flag_present can be used. If that ever changes,
9883 we'll need to use DW_FORM_flag and have some optimization
9884 in build_abbrev_table that will change those to
9885 DW_FORM_flag_present if it is set to 1 in all DIEs using
9886 the same abbrev entry. */
9887 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9888 return DW_FORM_flag_present;
9889 }
9890 return DW_FORM_flag;
9891 case dw_val_class_die_ref:
9892 if (AT_ref_external (a))
9893 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9894 else
9895 return DW_FORM_ref;
9896 case dw_val_class_fde_ref:
9897 return DW_FORM_data;
9898 case dw_val_class_lbl_id:
9899 return (AT_index (a) == NOT_INDEXED
9900 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9901 case dw_val_class_lineptr:
9902 case dw_val_class_macptr:
9903 case dw_val_class_loclistsptr:
9904 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9905 case dw_val_class_str:
9906 return AT_string_form (a);
9907 case dw_val_class_file:
9908 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9909 {
9910 case 1:
9911 return DW_FORM_data1;
9912 case 2:
9913 return DW_FORM_data2;
9914 case 4:
9915 return DW_FORM_data4;
9916 default:
9917 gcc_unreachable ();
9918 }
9919
9920 case dw_val_class_data8:
9921 return DW_FORM_data8;
9922
9923 case dw_val_class_high_pc:
9924 switch (DWARF2_ADDR_SIZE)
9925 {
9926 case 1:
9927 return DW_FORM_data1;
9928 case 2:
9929 return DW_FORM_data2;
9930 case 4:
9931 return DW_FORM_data4;
9932 case 8:
9933 return DW_FORM_data8;
9934 default:
9935 gcc_unreachable ();
9936 }
9937
9938 case dw_val_class_discr_value:
9939 return (a->dw_attr_val.v.val_discr_value.pos
9940 ? DW_FORM_udata
9941 : DW_FORM_sdata);
9942 case dw_val_class_discr_list:
9943 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9944 {
9945 case 1:
9946 return DW_FORM_block1;
9947 case 2:
9948 return DW_FORM_block2;
9949 case 4:
9950 return DW_FORM_block4;
9951 default:
9952 gcc_unreachable ();
9953 }
9954
9955 default:
9956 gcc_unreachable ();
9957 }
9958 }
9959
9960 /* Output the encoding of an attribute value. */
9961
9962 static void
9963 output_value_format (dw_attr_node *a)
9964 {
9965 enum dwarf_form form = value_format (a);
9966
9967 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9968 }
9969
9970 /* Given a die and id, produce the appropriate abbreviations. */
9971
9972 static void
9973 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9974 {
9975 unsigned ix;
9976 dw_attr_node *a_attr;
9977
9978 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9979 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9980 dwarf_tag_name (abbrev->die_tag));
9981
9982 if (abbrev->die_child != NULL)
9983 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9984 else
9985 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9986
9987 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9988 {
9989 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9990 dwarf_attr_name (a_attr->dw_attr));
9991 output_value_format (a_attr);
9992 if (value_format (a_attr) == DW_FORM_implicit_const)
9993 {
9994 if (AT_class (a_attr) == dw_val_class_file_implicit)
9995 {
9996 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9997 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9998 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9999 }
10000 else
10001 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
10002 }
10003 }
10004
10005 dw2_asm_output_data (1, 0, NULL);
10006 dw2_asm_output_data (1, 0, NULL);
10007 }
10008
10009
10010 /* Output the .debug_abbrev section which defines the DIE abbreviation
10011 table. */
10012
10013 static void
10014 output_abbrev_section (void)
10015 {
10016 unsigned int abbrev_id;
10017 dw_die_ref abbrev;
10018
10019 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
10020 if (abbrev_id != 0)
10021 output_die_abbrevs (abbrev_id, abbrev);
10022
10023 /* Terminate the table. */
10024 dw2_asm_output_data (1, 0, NULL);
10025 }
10026
10027 /* Return a new location list, given the begin and end range, and the
10028 expression. */
10029
10030 static inline dw_loc_list_ref
10031 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10032 const char *end, var_loc_view vend,
10033 const char *section)
10034 {
10035 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10036
10037 retlist->begin = begin;
10038 retlist->begin_entry = NULL;
10039 retlist->end = end;
10040 retlist->expr = expr;
10041 retlist->section = section;
10042 retlist->vbegin = vbegin;
10043 retlist->vend = vend;
10044
10045 return retlist;
10046 }
10047
10048 /* Return true iff there's any nonzero view number in the loc list.
10049
10050 ??? When views are not enabled, we'll often extend a single range
10051 to the entire function, so that we emit a single location
10052 expression rather than a location list. With views, even with a
10053 single range, we'll output a list if start or end have a nonzero
10054 view. If we change this, we may want to stop splitting a single
10055 range in dw_loc_list just because of a nonzero view, even if it
10056 straddles across hot/cold partitions. */
10057
10058 static bool
10059 loc_list_has_views (dw_loc_list_ref list)
10060 {
10061 if (!debug_variable_location_views)
10062 return false;
10063
10064 for (dw_loc_list_ref loc = list;
10065 loc != NULL; loc = loc->dw_loc_next)
10066 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10067 return true;
10068
10069 return false;
10070 }
10071
10072 /* Generate a new internal symbol for this location list node, if it
10073 hasn't got one yet. */
10074
10075 static inline void
10076 gen_llsym (dw_loc_list_ref list)
10077 {
10078 gcc_assert (!list->ll_symbol);
10079 list->ll_symbol = gen_internal_sym ("LLST");
10080
10081 if (!loc_list_has_views (list))
10082 return;
10083
10084 if (dwarf2out_locviews_in_attribute ())
10085 {
10086 /* Use the same label_num for the view list. */
10087 label_num--;
10088 list->vl_symbol = gen_internal_sym ("LVUS");
10089 }
10090 else
10091 list->vl_symbol = list->ll_symbol;
10092 }
10093
10094 /* Generate a symbol for the list, but only if we really want to emit
10095 it as a list. */
10096
10097 static inline void
10098 maybe_gen_llsym (dw_loc_list_ref list)
10099 {
10100 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10101 return;
10102
10103 gen_llsym (list);
10104 }
10105
10106 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10107 NULL, don't consider size of the location expression. If we're not
10108 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10109 representation in *SIZEP. */
10110
10111 static bool
10112 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10113 {
10114 /* Don't output an entry that starts and ends at the same address. */
10115 if (strcmp (curr->begin, curr->end) == 0
10116 && curr->vbegin == curr->vend && !curr->force)
10117 return true;
10118
10119 if (!sizep)
10120 return false;
10121
10122 unsigned long size = size_of_locs (curr->expr);
10123
10124 /* If the expression is too large, drop it on the floor. We could
10125 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10126 in the expression, but >= 64KB expressions for a single value
10127 in a single range are unlikely very useful. */
10128 if (dwarf_version < 5 && size > 0xffff)
10129 return true;
10130
10131 *sizep = size;
10132
10133 return false;
10134 }
10135
10136 /* Output a view pair loclist entry for CURR, if it requires one. */
10137
10138 static void
10139 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10140 {
10141 if (!dwarf2out_locviews_in_loclist ())
10142 return;
10143
10144 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10145 return;
10146
10147 #ifdef DW_LLE_view_pair
10148 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10149
10150 if (dwarf2out_as_locview_support)
10151 {
10152 if (ZERO_VIEW_P (curr->vbegin))
10153 dw2_asm_output_data_uleb128 (0, "Location view begin");
10154 else
10155 {
10156 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10157 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10158 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10159 }
10160
10161 if (ZERO_VIEW_P (curr->vend))
10162 dw2_asm_output_data_uleb128 (0, "Location view end");
10163 else
10164 {
10165 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10166 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10167 dw2_asm_output_symname_uleb128 (label, "Location view end");
10168 }
10169 }
10170 else
10171 {
10172 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10173 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10174 }
10175 #endif /* DW_LLE_view_pair */
10176
10177 return;
10178 }
10179
10180 /* Output the location list given to us. */
10181
10182 static void
10183 output_loc_list (dw_loc_list_ref list_head)
10184 {
10185 int vcount = 0, lcount = 0;
10186
10187 if (list_head->emitted)
10188 return;
10189 list_head->emitted = true;
10190
10191 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10192 {
10193 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10194
10195 for (dw_loc_list_ref curr = list_head; curr != NULL;
10196 curr = curr->dw_loc_next)
10197 {
10198 unsigned long size;
10199
10200 if (skip_loc_list_entry (curr, &size))
10201 continue;
10202
10203 vcount++;
10204
10205 /* ?? dwarf_split_debug_info? */
10206 if (dwarf2out_as_locview_support)
10207 {
10208 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10209
10210 if (!ZERO_VIEW_P (curr->vbegin))
10211 {
10212 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10213 dw2_asm_output_symname_uleb128 (label,
10214 "View list begin (%s)",
10215 list_head->vl_symbol);
10216 }
10217 else
10218 dw2_asm_output_data_uleb128 (0,
10219 "View list begin (%s)",
10220 list_head->vl_symbol);
10221
10222 if (!ZERO_VIEW_P (curr->vend))
10223 {
10224 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10225 dw2_asm_output_symname_uleb128 (label,
10226 "View list end (%s)",
10227 list_head->vl_symbol);
10228 }
10229 else
10230 dw2_asm_output_data_uleb128 (0,
10231 "View list end (%s)",
10232 list_head->vl_symbol);
10233 }
10234 else
10235 {
10236 dw2_asm_output_data_uleb128 (curr->vbegin,
10237 "View list begin (%s)",
10238 list_head->vl_symbol);
10239 dw2_asm_output_data_uleb128 (curr->vend,
10240 "View list end (%s)",
10241 list_head->vl_symbol);
10242 }
10243 }
10244 }
10245
10246 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10247
10248 const char *last_section = NULL;
10249 const char *base_label = NULL;
10250
10251 /* Walk the location list, and output each range + expression. */
10252 for (dw_loc_list_ref curr = list_head; curr != NULL;
10253 curr = curr->dw_loc_next)
10254 {
10255 unsigned long size;
10256
10257 /* Skip this entry? If we skip it here, we must skip it in the
10258 view list above as well. */
10259 if (skip_loc_list_entry (curr, &size))
10260 continue;
10261
10262 lcount++;
10263
10264 if (dwarf_version >= 5)
10265 {
10266 if (dwarf_split_debug_info)
10267 {
10268 dwarf2out_maybe_output_loclist_view_pair (curr);
10269 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10270 uleb128 index into .debug_addr and uleb128 length. */
10271 dw2_asm_output_data (1, DW_LLE_startx_length,
10272 "DW_LLE_startx_length (%s)",
10273 list_head->ll_symbol);
10274 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10275 "Location list range start index "
10276 "(%s)", curr->begin);
10277 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10278 For that case we probably need to emit DW_LLE_startx_endx,
10279 but we'd need 2 .debug_addr entries rather than just one. */
10280 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10281 "Location list length (%s)",
10282 list_head->ll_symbol);
10283 }
10284 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10285 {
10286 dwarf2out_maybe_output_loclist_view_pair (curr);
10287 /* If all code is in .text section, the base address is
10288 already provided by the CU attributes. Use
10289 DW_LLE_offset_pair where both addresses are uleb128 encoded
10290 offsets against that base. */
10291 dw2_asm_output_data (1, DW_LLE_offset_pair,
10292 "DW_LLE_offset_pair (%s)",
10293 list_head->ll_symbol);
10294 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10295 "Location list begin address (%s)",
10296 list_head->ll_symbol);
10297 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10298 "Location list end address (%s)",
10299 list_head->ll_symbol);
10300 }
10301 else if (HAVE_AS_LEB128)
10302 {
10303 /* Otherwise, find out how many consecutive entries could share
10304 the same base entry. If just one, emit DW_LLE_start_length,
10305 otherwise emit DW_LLE_base_address for the base address
10306 followed by a series of DW_LLE_offset_pair. */
10307 if (last_section == NULL || curr->section != last_section)
10308 {
10309 dw_loc_list_ref curr2;
10310 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10311 curr2 = curr2->dw_loc_next)
10312 {
10313 if (strcmp (curr2->begin, curr2->end) == 0
10314 && !curr2->force)
10315 continue;
10316 break;
10317 }
10318 if (curr2 == NULL || curr->section != curr2->section)
10319 last_section = NULL;
10320 else
10321 {
10322 last_section = curr->section;
10323 base_label = curr->begin;
10324 dw2_asm_output_data (1, DW_LLE_base_address,
10325 "DW_LLE_base_address (%s)",
10326 list_head->ll_symbol);
10327 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10328 "Base address (%s)",
10329 list_head->ll_symbol);
10330 }
10331 }
10332 /* Only one entry with the same base address. Use
10333 DW_LLE_start_length with absolute address and uleb128
10334 length. */
10335 if (last_section == NULL)
10336 {
10337 dwarf2out_maybe_output_loclist_view_pair (curr);
10338 dw2_asm_output_data (1, DW_LLE_start_length,
10339 "DW_LLE_start_length (%s)",
10340 list_head->ll_symbol);
10341 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10342 "Location list begin address (%s)",
10343 list_head->ll_symbol);
10344 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10345 "Location list length "
10346 "(%s)", list_head->ll_symbol);
10347 }
10348 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10349 DW_LLE_base_address. */
10350 else
10351 {
10352 dwarf2out_maybe_output_loclist_view_pair (curr);
10353 dw2_asm_output_data (1, DW_LLE_offset_pair,
10354 "DW_LLE_offset_pair (%s)",
10355 list_head->ll_symbol);
10356 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10357 "Location list begin address "
10358 "(%s)", list_head->ll_symbol);
10359 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10360 "Location list end address "
10361 "(%s)", list_head->ll_symbol);
10362 }
10363 }
10364 /* The assembler does not support .uleb128 directive. Emit
10365 DW_LLE_start_end with a pair of absolute addresses. */
10366 else
10367 {
10368 dwarf2out_maybe_output_loclist_view_pair (curr);
10369 dw2_asm_output_data (1, DW_LLE_start_end,
10370 "DW_LLE_start_end (%s)",
10371 list_head->ll_symbol);
10372 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10373 "Location list begin address (%s)",
10374 list_head->ll_symbol);
10375 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10376 "Location list end address (%s)",
10377 list_head->ll_symbol);
10378 }
10379 }
10380 else if (dwarf_split_debug_info)
10381 {
10382 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10383 and 4 byte length. */
10384 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10385 "Location list start/length entry (%s)",
10386 list_head->ll_symbol);
10387 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10388 "Location list range start index (%s)",
10389 curr->begin);
10390 /* The length field is 4 bytes. If we ever need to support
10391 an 8-byte length, we can add a new DW_LLE code or fall back
10392 to DW_LLE_GNU_start_end_entry. */
10393 dw2_asm_output_delta (4, curr->end, curr->begin,
10394 "Location list range length (%s)",
10395 list_head->ll_symbol);
10396 }
10397 else if (!have_multiple_function_sections)
10398 {
10399 /* Pair of relative addresses against start of text section. */
10400 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10401 "Location list begin address (%s)",
10402 list_head->ll_symbol);
10403 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10404 "Location list end address (%s)",
10405 list_head->ll_symbol);
10406 }
10407 else
10408 {
10409 /* Pair of absolute addresses. */
10410 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10411 "Location list begin address (%s)",
10412 list_head->ll_symbol);
10413 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10414 "Location list end address (%s)",
10415 list_head->ll_symbol);
10416 }
10417
10418 /* Output the block length for this list of location operations. */
10419 if (dwarf_version >= 5)
10420 dw2_asm_output_data_uleb128 (size, "Location expression size");
10421 else
10422 {
10423 gcc_assert (size <= 0xffff);
10424 dw2_asm_output_data (2, size, "Location expression size");
10425 }
10426
10427 output_loc_sequence (curr->expr, -1);
10428 }
10429
10430 /* And finally list termination. */
10431 if (dwarf_version >= 5)
10432 dw2_asm_output_data (1, DW_LLE_end_of_list,
10433 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10434 else if (dwarf_split_debug_info)
10435 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10436 "Location list terminator (%s)",
10437 list_head->ll_symbol);
10438 else
10439 {
10440 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10441 "Location list terminator begin (%s)",
10442 list_head->ll_symbol);
10443 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10444 "Location list terminator end (%s)",
10445 list_head->ll_symbol);
10446 }
10447
10448 gcc_assert (!list_head->vl_symbol
10449 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10450 }
10451
10452 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10453 section. Emit a relocated reference if val_entry is NULL, otherwise,
10454 emit an indirect reference. */
10455
10456 static void
10457 output_range_list_offset (dw_attr_node *a)
10458 {
10459 const char *name = dwarf_attr_name (a->dw_attr);
10460
10461 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10462 {
10463 if (dwarf_version >= 5)
10464 {
10465 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10466 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10467 debug_ranges_section, "%s", name);
10468 }
10469 else
10470 {
10471 char *p = strchr (ranges_section_label, '\0');
10472 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10473 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10474 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10475 debug_ranges_section, "%s", name);
10476 *p = '\0';
10477 }
10478 }
10479 else if (dwarf_version >= 5)
10480 {
10481 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10482 gcc_assert (rnglist_idx);
10483 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10484 }
10485 else
10486 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10487 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10488 "%s (offset from %s)", name, ranges_section_label);
10489 }
10490
10491 /* Output the offset into the debug_loc section. */
10492
10493 static void
10494 output_loc_list_offset (dw_attr_node *a)
10495 {
10496 char *sym = AT_loc_list (a)->ll_symbol;
10497
10498 gcc_assert (sym);
10499 if (!dwarf_split_debug_info)
10500 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10501 "%s", dwarf_attr_name (a->dw_attr));
10502 else if (dwarf_version >= 5)
10503 {
10504 gcc_assert (AT_loc_list (a)->num_assigned);
10505 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10506 dwarf_attr_name (a->dw_attr),
10507 sym);
10508 }
10509 else
10510 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10511 "%s", dwarf_attr_name (a->dw_attr));
10512 }
10513
10514 /* Output the offset into the debug_loc section. */
10515
10516 static void
10517 output_view_list_offset (dw_attr_node *a)
10518 {
10519 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10520
10521 gcc_assert (sym);
10522 if (dwarf_split_debug_info)
10523 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10524 "%s", dwarf_attr_name (a->dw_attr));
10525 else
10526 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10527 "%s", dwarf_attr_name (a->dw_attr));
10528 }
10529
10530 /* Output an attribute's index or value appropriately. */
10531
10532 static void
10533 output_attr_index_or_value (dw_attr_node *a)
10534 {
10535 const char *name = dwarf_attr_name (a->dw_attr);
10536
10537 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10538 {
10539 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10540 return;
10541 }
10542 switch (AT_class (a))
10543 {
10544 case dw_val_class_addr:
10545 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10546 break;
10547 case dw_val_class_high_pc:
10548 case dw_val_class_lbl_id:
10549 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10550 break;
10551 default:
10552 gcc_unreachable ();
10553 }
10554 }
10555
10556 /* Output a type signature. */
10557
10558 static inline void
10559 output_signature (const char *sig, const char *name)
10560 {
10561 int i;
10562
10563 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10564 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10565 }
10566
10567 /* Output a discriminant value. */
10568
10569 static inline void
10570 output_discr_value (dw_discr_value *discr_value, const char *name)
10571 {
10572 if (discr_value->pos)
10573 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10574 else
10575 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10576 }
10577
10578 /* Output the DIE and its attributes. Called recursively to generate
10579 the definitions of each child DIE. */
10580
10581 static void
10582 output_die (dw_die_ref die)
10583 {
10584 dw_attr_node *a;
10585 dw_die_ref c;
10586 unsigned long size;
10587 unsigned ix;
10588
10589 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10590 (unsigned long)die->die_offset,
10591 dwarf_tag_name (die->die_tag));
10592
10593 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10594 {
10595 const char *name = dwarf_attr_name (a->dw_attr);
10596
10597 switch (AT_class (a))
10598 {
10599 case dw_val_class_addr:
10600 output_attr_index_or_value (a);
10601 break;
10602
10603 case dw_val_class_offset:
10604 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10605 "%s", name);
10606 break;
10607
10608 case dw_val_class_range_list:
10609 output_range_list_offset (a);
10610 break;
10611
10612 case dw_val_class_loc:
10613 size = size_of_locs (AT_loc (a));
10614
10615 /* Output the block length for this list of location operations. */
10616 if (dwarf_version >= 4)
10617 dw2_asm_output_data_uleb128 (size, "%s", name);
10618 else
10619 dw2_asm_output_data (constant_size (size), size, "%s", name);
10620
10621 output_loc_sequence (AT_loc (a), -1);
10622 break;
10623
10624 case dw_val_class_const:
10625 /* ??? It would be slightly more efficient to use a scheme like is
10626 used for unsigned constants below, but gdb 4.x does not sign
10627 extend. Gdb 5.x does sign extend. */
10628 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10629 break;
10630
10631 case dw_val_class_unsigned_const:
10632 {
10633 int csize = constant_size (AT_unsigned (a));
10634 if (dwarf_version == 3
10635 && a->dw_attr == DW_AT_data_member_location
10636 && csize >= 4)
10637 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10638 else
10639 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10640 }
10641 break;
10642
10643 case dw_val_class_symview:
10644 {
10645 int vsize;
10646 if (symview_upper_bound <= 0xff)
10647 vsize = 1;
10648 else if (symview_upper_bound <= 0xffff)
10649 vsize = 2;
10650 else if (symview_upper_bound <= 0xffffffff)
10651 vsize = 4;
10652 else
10653 vsize = 8;
10654 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10655 "%s", name);
10656 }
10657 break;
10658
10659 case dw_val_class_const_implicit:
10660 if (flag_debug_asm)
10661 fprintf (asm_out_file, "\t\t\t%s %s ("
10662 HOST_WIDE_INT_PRINT_DEC ")\n",
10663 ASM_COMMENT_START, name, AT_int (a));
10664 break;
10665
10666 case dw_val_class_unsigned_const_implicit:
10667 if (flag_debug_asm)
10668 fprintf (asm_out_file, "\t\t\t%s %s ("
10669 HOST_WIDE_INT_PRINT_HEX ")\n",
10670 ASM_COMMENT_START, name, AT_unsigned (a));
10671 break;
10672
10673 case dw_val_class_const_double:
10674 {
10675 unsigned HOST_WIDE_INT first, second;
10676
10677 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10678 dw2_asm_output_data (1,
10679 HOST_BITS_PER_DOUBLE_INT
10680 / HOST_BITS_PER_CHAR,
10681 NULL);
10682
10683 if (WORDS_BIG_ENDIAN)
10684 {
10685 first = a->dw_attr_val.v.val_double.high;
10686 second = a->dw_attr_val.v.val_double.low;
10687 }
10688 else
10689 {
10690 first = a->dw_attr_val.v.val_double.low;
10691 second = a->dw_attr_val.v.val_double.high;
10692 }
10693
10694 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10695 first, "%s", name);
10696 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10697 second, NULL);
10698 }
10699 break;
10700
10701 case dw_val_class_wide_int:
10702 {
10703 int i;
10704 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10705 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10706 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10707 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10708 * l, NULL);
10709
10710 if (WORDS_BIG_ENDIAN)
10711 for (i = len - 1; i >= 0; --i)
10712 {
10713 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10714 "%s", name);
10715 name = "";
10716 }
10717 else
10718 for (i = 0; i < len; ++i)
10719 {
10720 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10721 "%s", name);
10722 name = "";
10723 }
10724 }
10725 break;
10726
10727 case dw_val_class_vec:
10728 {
10729 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10730 unsigned int len = a->dw_attr_val.v.val_vec.length;
10731 unsigned int i;
10732 unsigned char *p;
10733
10734 dw2_asm_output_data (constant_size (len * elt_size),
10735 len * elt_size, "%s", name);
10736 if (elt_size > sizeof (HOST_WIDE_INT))
10737 {
10738 elt_size /= 2;
10739 len *= 2;
10740 }
10741 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10742 i < len;
10743 i++, p += elt_size)
10744 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10745 "fp or vector constant word %u", i);
10746 break;
10747 }
10748
10749 case dw_val_class_flag:
10750 if (dwarf_version >= 4)
10751 {
10752 /* Currently all add_AT_flag calls pass in 1 as last argument,
10753 so DW_FORM_flag_present can be used. If that ever changes,
10754 we'll need to use DW_FORM_flag and have some optimization
10755 in build_abbrev_table that will change those to
10756 DW_FORM_flag_present if it is set to 1 in all DIEs using
10757 the same abbrev entry. */
10758 gcc_assert (AT_flag (a) == 1);
10759 if (flag_debug_asm)
10760 fprintf (asm_out_file, "\t\t\t%s %s\n",
10761 ASM_COMMENT_START, name);
10762 break;
10763 }
10764 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10765 break;
10766
10767 case dw_val_class_loc_list:
10768 output_loc_list_offset (a);
10769 break;
10770
10771 case dw_val_class_view_list:
10772 output_view_list_offset (a);
10773 break;
10774
10775 case dw_val_class_die_ref:
10776 if (AT_ref_external (a))
10777 {
10778 if (AT_ref (a)->comdat_type_p)
10779 {
10780 comdat_type_node *type_node
10781 = AT_ref (a)->die_id.die_type_node;
10782
10783 gcc_assert (type_node);
10784 output_signature (type_node->signature, name);
10785 }
10786 else
10787 {
10788 const char *sym = AT_ref (a)->die_id.die_symbol;
10789 int size;
10790
10791 gcc_assert (sym);
10792 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10793 length, whereas in DWARF3 it's always sized as an
10794 offset. */
10795 if (dwarf_version == 2)
10796 size = DWARF2_ADDR_SIZE;
10797 else
10798 size = DWARF_OFFSET_SIZE;
10799 /* ??? We cannot unconditionally output die_offset if
10800 non-zero - others might create references to those
10801 DIEs via symbols.
10802 And we do not clear its DIE offset after outputting it
10803 (and the label refers to the actual DIEs, not the
10804 DWARF CU unit header which is when using label + offset
10805 would be the correct thing to do).
10806 ??? This is the reason for the with_offset flag. */
10807 if (AT_ref (a)->with_offset)
10808 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10809 debug_info_section, "%s", name);
10810 else
10811 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10812 name);
10813 }
10814 }
10815 else
10816 {
10817 gcc_assert (AT_ref (a)->die_offset);
10818 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10819 "%s", name);
10820 }
10821 break;
10822
10823 case dw_val_class_fde_ref:
10824 {
10825 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10826
10827 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10828 a->dw_attr_val.v.val_fde_index * 2);
10829 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10830 "%s", name);
10831 }
10832 break;
10833
10834 case dw_val_class_vms_delta:
10835 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10836 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10837 AT_vms_delta2 (a), AT_vms_delta1 (a),
10838 "%s", name);
10839 #else
10840 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10841 AT_vms_delta2 (a), AT_vms_delta1 (a),
10842 "%s", name);
10843 #endif
10844 break;
10845
10846 case dw_val_class_lbl_id:
10847 output_attr_index_or_value (a);
10848 break;
10849
10850 case dw_val_class_lineptr:
10851 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10852 debug_line_section, "%s", name);
10853 break;
10854
10855 case dw_val_class_macptr:
10856 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10857 debug_macinfo_section, "%s", name);
10858 break;
10859
10860 case dw_val_class_loclistsptr:
10861 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10862 debug_loc_section, "%s", name);
10863 break;
10864
10865 case dw_val_class_str:
10866 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10867 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10868 a->dw_attr_val.v.val_str->label,
10869 debug_str_section,
10870 "%s: \"%s\"", name, AT_string (a));
10871 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10872 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10873 a->dw_attr_val.v.val_str->label,
10874 debug_line_str_section,
10875 "%s: \"%s\"", name, AT_string (a));
10876 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10877 dw2_asm_output_data_uleb128 (AT_index (a),
10878 "%s: \"%s\"", name, AT_string (a));
10879 else
10880 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10881 break;
10882
10883 case dw_val_class_file:
10884 {
10885 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10886
10887 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10888 a->dw_attr_val.v.val_file->filename);
10889 break;
10890 }
10891
10892 case dw_val_class_file_implicit:
10893 if (flag_debug_asm)
10894 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10895 ASM_COMMENT_START, name,
10896 maybe_emit_file (a->dw_attr_val.v.val_file),
10897 a->dw_attr_val.v.val_file->filename);
10898 break;
10899
10900 case dw_val_class_data8:
10901 {
10902 int i;
10903
10904 for (i = 0; i < 8; i++)
10905 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10906 i == 0 ? "%s" : NULL, name);
10907 break;
10908 }
10909
10910 case dw_val_class_high_pc:
10911 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10912 get_AT_low_pc (die), "DW_AT_high_pc");
10913 break;
10914
10915 case dw_val_class_discr_value:
10916 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10917 break;
10918
10919 case dw_val_class_discr_list:
10920 {
10921 dw_discr_list_ref list = AT_discr_list (a);
10922 const int size = size_of_discr_list (list);
10923
10924 /* This is a block, so output its length first. */
10925 dw2_asm_output_data (constant_size (size), size,
10926 "%s: block size", name);
10927
10928 for (; list != NULL; list = list->dw_discr_next)
10929 {
10930 /* One byte for the discriminant value descriptor, and then as
10931 many LEB128 numbers as required. */
10932 if (list->dw_discr_range)
10933 dw2_asm_output_data (1, DW_DSC_range,
10934 "%s: DW_DSC_range", name);
10935 else
10936 dw2_asm_output_data (1, DW_DSC_label,
10937 "%s: DW_DSC_label", name);
10938
10939 output_discr_value (&list->dw_discr_lower_bound, name);
10940 if (list->dw_discr_range)
10941 output_discr_value (&list->dw_discr_upper_bound, name);
10942 }
10943 break;
10944 }
10945
10946 default:
10947 gcc_unreachable ();
10948 }
10949 }
10950
10951 FOR_EACH_CHILD (die, c, output_die (c));
10952
10953 /* Add null byte to terminate sibling list. */
10954 if (die->die_child != NULL)
10955 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10956 (unsigned long) die->die_offset);
10957 }
10958
10959 /* Output the dwarf version number. */
10960
10961 static void
10962 output_dwarf_version ()
10963 {
10964 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10965 views in loclist. That will change eventually. */
10966 if (dwarf_version == 6)
10967 {
10968 static bool once;
10969 if (!once)
10970 {
10971 warning (0,
10972 "-gdwarf-6 is output as version 5 with incompatibilities");
10973 once = true;
10974 }
10975 dw2_asm_output_data (2, 5, "DWARF version number");
10976 }
10977 else
10978 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10979 }
10980
10981 /* Output the compilation unit that appears at the beginning of the
10982 .debug_info section, and precedes the DIE descriptions. */
10983
10984 static void
10985 output_compilation_unit_header (enum dwarf_unit_type ut)
10986 {
10987 if (!XCOFF_DEBUGGING_INFO)
10988 {
10989 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10990 dw2_asm_output_data (4, 0xffffffff,
10991 "Initial length escape value indicating 64-bit DWARF extension");
10992 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10993 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10994 "Length of Compilation Unit Info");
10995 }
10996
10997 output_dwarf_version ();
10998 if (dwarf_version >= 5)
10999 {
11000 const char *name;
11001 switch (ut)
11002 {
11003 case DW_UT_compile: name = "DW_UT_compile"; break;
11004 case DW_UT_type: name = "DW_UT_type"; break;
11005 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
11006 case DW_UT_split_type: name = "DW_UT_split_type"; break;
11007 default: gcc_unreachable ();
11008 }
11009 dw2_asm_output_data (1, ut, "%s", name);
11010 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11011 }
11012 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
11013 debug_abbrev_section,
11014 "Offset Into Abbrev. Section");
11015 if (dwarf_version < 5)
11016 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11017 }
11018
11019 /* Output the compilation unit DIE and its children. */
11020
11021 static void
11022 output_comp_unit (dw_die_ref die, int output_if_empty,
11023 const unsigned char *dwo_id)
11024 {
11025 const char *secname, *oldsym;
11026 char *tmp;
11027
11028 /* Unless we are outputting main CU, we may throw away empty ones. */
11029 if (!output_if_empty && die->die_child == NULL)
11030 return;
11031
11032 /* Even if there are no children of this DIE, we must output the information
11033 about the compilation unit. Otherwise, on an empty translation unit, we
11034 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11035 will then complain when examining the file. First mark all the DIEs in
11036 this CU so we know which get local refs. */
11037 mark_dies (die);
11038
11039 external_ref_hash_type *extern_map = optimize_external_refs (die);
11040
11041 /* For now, optimize only the main CU, in order to optimize the rest
11042 we'd need to see all of them earlier. Leave the rest for post-linking
11043 tools like DWZ. */
11044 if (die == comp_unit_die ())
11045 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11046
11047 build_abbrev_table (die, extern_map);
11048
11049 optimize_abbrev_table ();
11050
11051 delete extern_map;
11052
11053 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11054 next_die_offset = (dwo_id
11055 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11056 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11057 calc_die_sizes (die);
11058
11059 oldsym = die->die_id.die_symbol;
11060 if (oldsym && die->comdat_type_p)
11061 {
11062 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11063
11064 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11065 secname = tmp;
11066 die->die_id.die_symbol = NULL;
11067 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11068 }
11069 else
11070 {
11071 switch_to_section (debug_info_section);
11072 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11073 info_section_emitted = true;
11074 }
11075
11076 /* For LTO cross unit DIE refs we want a symbol on the start of the
11077 debuginfo section, not on the CU DIE. */
11078 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11079 {
11080 /* ??? No way to get visibility assembled without a decl. */
11081 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11082 get_identifier (oldsym), char_type_node);
11083 TREE_PUBLIC (decl) = true;
11084 TREE_STATIC (decl) = true;
11085 DECL_ARTIFICIAL (decl) = true;
11086 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11087 DECL_VISIBILITY_SPECIFIED (decl) = true;
11088 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11089 #ifdef ASM_WEAKEN_LABEL
11090 /* We prefer a .weak because that handles duplicates from duplicate
11091 archive members in a graceful way. */
11092 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11093 #else
11094 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11095 #endif
11096 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11097 }
11098
11099 /* Output debugging information. */
11100 output_compilation_unit_header (dwo_id
11101 ? DW_UT_split_compile : DW_UT_compile);
11102 if (dwarf_version >= 5)
11103 {
11104 if (dwo_id != NULL)
11105 for (int i = 0; i < 8; i++)
11106 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11107 }
11108 output_die (die);
11109
11110 /* Leave the marks on the main CU, so we can check them in
11111 output_pubnames. */
11112 if (oldsym)
11113 {
11114 unmark_dies (die);
11115 die->die_id.die_symbol = oldsym;
11116 }
11117 }
11118
11119 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11120 and .debug_pubtypes. This is configured per-target, but can be
11121 overridden by the -gpubnames or -gno-pubnames options. */
11122
11123 static inline bool
11124 want_pubnames (void)
11125 {
11126 if (debug_info_level <= DINFO_LEVEL_TERSE)
11127 return false;
11128 if (debug_generate_pub_sections != -1)
11129 return debug_generate_pub_sections;
11130 return targetm.want_debug_pub_sections;
11131 }
11132
11133 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11134
11135 static void
11136 add_AT_pubnames (dw_die_ref die)
11137 {
11138 if (want_pubnames ())
11139 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11140 }
11141
11142 /* Add a string attribute value to a skeleton DIE. */
11143
11144 static inline void
11145 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11146 const char *str)
11147 {
11148 dw_attr_node attr;
11149 struct indirect_string_node *node;
11150
11151 if (! skeleton_debug_str_hash)
11152 skeleton_debug_str_hash
11153 = hash_table<indirect_string_hasher>::create_ggc (10);
11154
11155 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11156 find_string_form (node);
11157 if (node->form == dwarf_FORM (DW_FORM_strx))
11158 node->form = DW_FORM_strp;
11159
11160 attr.dw_attr = attr_kind;
11161 attr.dw_attr_val.val_class = dw_val_class_str;
11162 attr.dw_attr_val.val_entry = NULL;
11163 attr.dw_attr_val.v.val_str = node;
11164 add_dwarf_attr (die, &attr);
11165 }
11166
11167 /* Helper function to generate top-level dies for skeleton debug_info and
11168 debug_types. */
11169
11170 static void
11171 add_top_level_skeleton_die_attrs (dw_die_ref die)
11172 {
11173 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11174 const char *comp_dir = comp_dir_string ();
11175
11176 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11177 if (comp_dir != NULL)
11178 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11179 add_AT_pubnames (die);
11180 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11181 }
11182
11183 /* Output skeleton debug sections that point to the dwo file. */
11184
11185 static void
11186 output_skeleton_debug_sections (dw_die_ref comp_unit,
11187 const unsigned char *dwo_id)
11188 {
11189 /* These attributes will be found in the full debug_info section. */
11190 remove_AT (comp_unit, DW_AT_producer);
11191 remove_AT (comp_unit, DW_AT_language);
11192
11193 switch_to_section (debug_skeleton_info_section);
11194 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11195
11196 /* Produce the skeleton compilation-unit header. This one differs enough from
11197 a normal CU header that it's better not to call output_compilation_unit
11198 header. */
11199 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11200 dw2_asm_output_data (4, 0xffffffff,
11201 "Initial length escape value indicating 64-bit "
11202 "DWARF extension");
11203
11204 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11205 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11206 - DWARF_INITIAL_LENGTH_SIZE
11207 + size_of_die (comp_unit),
11208 "Length of Compilation Unit Info");
11209 output_dwarf_version ();
11210 if (dwarf_version >= 5)
11211 {
11212 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11213 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11214 }
11215 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11216 debug_skeleton_abbrev_section,
11217 "Offset Into Abbrev. Section");
11218 if (dwarf_version < 5)
11219 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11220 else
11221 for (int i = 0; i < 8; i++)
11222 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11223
11224 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11225 output_die (comp_unit);
11226
11227 /* Build the skeleton debug_abbrev section. */
11228 switch_to_section (debug_skeleton_abbrev_section);
11229 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11230
11231 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11232
11233 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11234 }
11235
11236 /* Output a comdat type unit DIE and its children. */
11237
11238 static void
11239 output_comdat_type_unit (comdat_type_node *node)
11240 {
11241 const char *secname;
11242 char *tmp;
11243 int i;
11244 #if defined (OBJECT_FORMAT_ELF)
11245 tree comdat_key;
11246 #endif
11247
11248 /* First mark all the DIEs in this CU so we know which get local refs. */
11249 mark_dies (node->root_die);
11250
11251 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11252
11253 build_abbrev_table (node->root_die, extern_map);
11254
11255 delete extern_map;
11256 extern_map = NULL;
11257
11258 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11259 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11260 calc_die_sizes (node->root_die);
11261
11262 #if defined (OBJECT_FORMAT_ELF)
11263 if (dwarf_version >= 5)
11264 {
11265 if (!dwarf_split_debug_info)
11266 secname = ".debug_info";
11267 else
11268 secname = ".debug_info.dwo";
11269 }
11270 else if (!dwarf_split_debug_info)
11271 secname = ".debug_types";
11272 else
11273 secname = ".debug_types.dwo";
11274
11275 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11276 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11277 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11278 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11279 comdat_key = get_identifier (tmp);
11280 targetm.asm_out.named_section (secname,
11281 SECTION_DEBUG | SECTION_LINKONCE,
11282 comdat_key);
11283 #else
11284 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11285 sprintf (tmp, (dwarf_version >= 5
11286 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11287 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11288 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11289 secname = tmp;
11290 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11291 #endif
11292
11293 /* Output debugging information. */
11294 output_compilation_unit_header (dwarf_split_debug_info
11295 ? DW_UT_split_type : DW_UT_type);
11296 output_signature (node->signature, "Type Signature");
11297 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11298 "Offset to Type DIE");
11299 output_die (node->root_die);
11300
11301 unmark_dies (node->root_die);
11302 }
11303
11304 /* Return the DWARF2/3 pubname associated with a decl. */
11305
11306 static const char *
11307 dwarf2_name (tree decl, int scope)
11308 {
11309 if (DECL_NAMELESS (decl))
11310 return NULL;
11311 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11312 }
11313
11314 /* Add a new entry to .debug_pubnames if appropriate. */
11315
11316 static void
11317 add_pubname_string (const char *str, dw_die_ref die)
11318 {
11319 pubname_entry e;
11320
11321 e.die = die;
11322 e.name = xstrdup (str);
11323 vec_safe_push (pubname_table, e);
11324 }
11325
11326 static void
11327 add_pubname (tree decl, dw_die_ref die)
11328 {
11329 if (!want_pubnames ())
11330 return;
11331
11332 /* Don't add items to the table when we expect that the consumer will have
11333 just read the enclosing die. For example, if the consumer is looking at a
11334 class_member, it will either be inside the class already, or will have just
11335 looked up the class to find the member. Either way, searching the class is
11336 faster than searching the index. */
11337 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11338 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11339 {
11340 const char *name = dwarf2_name (decl, 1);
11341
11342 if (name)
11343 add_pubname_string (name, die);
11344 }
11345 }
11346
11347 /* Add an enumerator to the pubnames section. */
11348
11349 static void
11350 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11351 {
11352 pubname_entry e;
11353
11354 gcc_assert (scope_name);
11355 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11356 e.die = die;
11357 vec_safe_push (pubname_table, e);
11358 }
11359
11360 /* Add a new entry to .debug_pubtypes if appropriate. */
11361
11362 static void
11363 add_pubtype (tree decl, dw_die_ref die)
11364 {
11365 pubname_entry e;
11366
11367 if (!want_pubnames ())
11368 return;
11369
11370 if ((TREE_PUBLIC (decl)
11371 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11372 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11373 {
11374 tree scope = NULL;
11375 const char *scope_name = "";
11376 const char *sep = is_cxx () ? "::" : ".";
11377 const char *name;
11378
11379 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11380 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11381 {
11382 scope_name = lang_hooks.dwarf_name (scope, 1);
11383 if (scope_name != NULL && scope_name[0] != '\0')
11384 scope_name = concat (scope_name, sep, NULL);
11385 else
11386 scope_name = "";
11387 }
11388
11389 if (TYPE_P (decl))
11390 name = type_tag (decl);
11391 else
11392 name = lang_hooks.dwarf_name (decl, 1);
11393
11394 /* If we don't have a name for the type, there's no point in adding
11395 it to the table. */
11396 if (name != NULL && name[0] != '\0')
11397 {
11398 e.die = die;
11399 e.name = concat (scope_name, name, NULL);
11400 vec_safe_push (pubtype_table, e);
11401 }
11402
11403 /* Although it might be more consistent to add the pubinfo for the
11404 enumerators as their dies are created, they should only be added if the
11405 enum type meets the criteria above. So rather than re-check the parent
11406 enum type whenever an enumerator die is created, just output them all
11407 here. This isn't protected by the name conditional because anonymous
11408 enums don't have names. */
11409 if (die->die_tag == DW_TAG_enumeration_type)
11410 {
11411 dw_die_ref c;
11412
11413 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11414 }
11415 }
11416 }
11417
11418 /* Output a single entry in the pubnames table. */
11419
11420 static void
11421 output_pubname (dw_offset die_offset, pubname_entry *entry)
11422 {
11423 dw_die_ref die = entry->die;
11424 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11425
11426 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11427
11428 if (debug_generate_pub_sections == 2)
11429 {
11430 /* This logic follows gdb's method for determining the value of the flag
11431 byte. */
11432 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11433 switch (die->die_tag)
11434 {
11435 case DW_TAG_typedef:
11436 case DW_TAG_base_type:
11437 case DW_TAG_subrange_type:
11438 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11439 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11440 break;
11441 case DW_TAG_enumerator:
11442 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11443 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11444 if (!is_cxx ())
11445 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11446 break;
11447 case DW_TAG_subprogram:
11448 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11449 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11450 if (!is_ada ())
11451 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11452 break;
11453 case DW_TAG_constant:
11454 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11455 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11456 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11457 break;
11458 case DW_TAG_variable:
11459 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11460 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11461 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11462 break;
11463 case DW_TAG_namespace:
11464 case DW_TAG_imported_declaration:
11465 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11466 break;
11467 case DW_TAG_class_type:
11468 case DW_TAG_interface_type:
11469 case DW_TAG_structure_type:
11470 case DW_TAG_union_type:
11471 case DW_TAG_enumeration_type:
11472 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11473 if (!is_cxx ())
11474 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11475 break;
11476 default:
11477 /* An unusual tag. Leave the flag-byte empty. */
11478 break;
11479 }
11480 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11481 "GDB-index flags");
11482 }
11483
11484 dw2_asm_output_nstring (entry->name, -1, "external name");
11485 }
11486
11487
11488 /* Output the public names table used to speed up access to externally
11489 visible names; or the public types table used to find type definitions. */
11490
11491 static void
11492 output_pubnames (vec<pubname_entry, va_gc> *names)
11493 {
11494 unsigned i;
11495 unsigned long pubnames_length = size_of_pubnames (names);
11496 pubname_entry *pub;
11497
11498 if (!XCOFF_DEBUGGING_INFO)
11499 {
11500 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11501 dw2_asm_output_data (4, 0xffffffff,
11502 "Initial length escape value indicating 64-bit DWARF extension");
11503 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11504 "Pub Info Length");
11505 }
11506
11507 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11508 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11509
11510 if (dwarf_split_debug_info)
11511 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11512 debug_skeleton_info_section,
11513 "Offset of Compilation Unit Info");
11514 else
11515 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11516 debug_info_section,
11517 "Offset of Compilation Unit Info");
11518 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11519 "Compilation Unit Length");
11520
11521 FOR_EACH_VEC_ELT (*names, i, pub)
11522 {
11523 if (include_pubname_in_output (names, pub))
11524 {
11525 dw_offset die_offset = pub->die->die_offset;
11526
11527 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11528 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11529 gcc_assert (pub->die->die_mark);
11530
11531 /* If we're putting types in their own .debug_types sections,
11532 the .debug_pubtypes table will still point to the compile
11533 unit (not the type unit), so we want to use the offset of
11534 the skeleton DIE (if there is one). */
11535 if (pub->die->comdat_type_p && names == pubtype_table)
11536 {
11537 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11538
11539 if (type_node != NULL)
11540 die_offset = (type_node->skeleton_die != NULL
11541 ? type_node->skeleton_die->die_offset
11542 : comp_unit_die ()->die_offset);
11543 }
11544
11545 output_pubname (die_offset, pub);
11546 }
11547 }
11548
11549 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11550 }
11551
11552 /* Output public names and types tables if necessary. */
11553
11554 static void
11555 output_pubtables (void)
11556 {
11557 if (!want_pubnames () || !info_section_emitted)
11558 return;
11559
11560 switch_to_section (debug_pubnames_section);
11561 output_pubnames (pubname_table);
11562 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11563 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11564 simply won't look for the section. */
11565 switch_to_section (debug_pubtypes_section);
11566 output_pubnames (pubtype_table);
11567 }
11568
11569
11570 /* Output the information that goes into the .debug_aranges table.
11571 Namely, define the beginning and ending address range of the
11572 text section generated for this compilation unit. */
11573
11574 static void
11575 output_aranges (void)
11576 {
11577 unsigned i;
11578 unsigned long aranges_length = size_of_aranges ();
11579
11580 if (!XCOFF_DEBUGGING_INFO)
11581 {
11582 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11583 dw2_asm_output_data (4, 0xffffffff,
11584 "Initial length escape value indicating 64-bit DWARF extension");
11585 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11586 "Length of Address Ranges Info");
11587 }
11588
11589 /* Version number for aranges is still 2, even up to DWARF5. */
11590 dw2_asm_output_data (2, 2, "DWARF aranges version");
11591 if (dwarf_split_debug_info)
11592 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11593 debug_skeleton_info_section,
11594 "Offset of Compilation Unit Info");
11595 else
11596 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11597 debug_info_section,
11598 "Offset of Compilation Unit Info");
11599 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11600 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11601
11602 /* We need to align to twice the pointer size here. */
11603 if (DWARF_ARANGES_PAD_SIZE)
11604 {
11605 /* Pad using a 2 byte words so that padding is correct for any
11606 pointer size. */
11607 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11608 2 * DWARF2_ADDR_SIZE);
11609 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11610 dw2_asm_output_data (2, 0, NULL);
11611 }
11612
11613 /* It is necessary not to output these entries if the sections were
11614 not used; if the sections were not used, the length will be 0 and
11615 the address may end up as 0 if the section is discarded by ld
11616 --gc-sections, leaving an invalid (0, 0) entry that can be
11617 confused with the terminator. */
11618 if (text_section_used)
11619 {
11620 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11621 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11622 text_section_label, "Length");
11623 }
11624 if (cold_text_section_used)
11625 {
11626 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11627 "Address");
11628 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11629 cold_text_section_label, "Length");
11630 }
11631
11632 if (have_multiple_function_sections)
11633 {
11634 unsigned fde_idx;
11635 dw_fde_ref fde;
11636
11637 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11638 {
11639 if (DECL_IGNORED_P (fde->decl))
11640 continue;
11641 if (!fde->in_std_section)
11642 {
11643 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11644 "Address");
11645 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11646 fde->dw_fde_begin, "Length");
11647 }
11648 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11649 {
11650 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11651 "Address");
11652 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11653 fde->dw_fde_second_begin, "Length");
11654 }
11655 }
11656 }
11657
11658 /* Output the terminator words. */
11659 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11660 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11661 }
11662
11663 /* Add a new entry to .debug_ranges. Return its index into
11664 ranges_table vector. */
11665
11666 static unsigned int
11667 add_ranges_num (int num, bool maybe_new_sec)
11668 {
11669 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11670 vec_safe_push (ranges_table, r);
11671 return vec_safe_length (ranges_table) - 1;
11672 }
11673
11674 /* Add a new entry to .debug_ranges corresponding to a block, or a
11675 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11676 this entry might be in a different section from previous range. */
11677
11678 static unsigned int
11679 add_ranges (const_tree block, bool maybe_new_sec)
11680 {
11681 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11682 }
11683
11684 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11685 chain, or middle entry of a chain that will be directly referred to. */
11686
11687 static void
11688 note_rnglist_head (unsigned int offset)
11689 {
11690 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11691 return;
11692 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11693 }
11694
11695 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11696 When using dwarf_split_debug_info, address attributes in dies destined
11697 for the final executable should be direct references--setting the
11698 parameter force_direct ensures this behavior. */
11699
11700 static void
11701 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11702 bool *added, bool force_direct)
11703 {
11704 unsigned int in_use = vec_safe_length (ranges_by_label);
11705 unsigned int offset;
11706 dw_ranges_by_label rbl = { begin, end };
11707 vec_safe_push (ranges_by_label, rbl);
11708 offset = add_ranges_num (-(int)in_use - 1, true);
11709 if (!*added)
11710 {
11711 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11712 *added = true;
11713 note_rnglist_head (offset);
11714 }
11715 }
11716
11717 /* Emit .debug_ranges section. */
11718
11719 static void
11720 output_ranges (void)
11721 {
11722 unsigned i;
11723 static const char *const start_fmt = "Offset %#x";
11724 const char *fmt = start_fmt;
11725 dw_ranges *r;
11726
11727 switch_to_section (debug_ranges_section);
11728 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11729 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11730 {
11731 int block_num = r->num;
11732
11733 if (block_num > 0)
11734 {
11735 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11736 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11737
11738 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11739 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11740
11741 /* If all code is in the text section, then the compilation
11742 unit base address defaults to DW_AT_low_pc, which is the
11743 base of the text section. */
11744 if (!have_multiple_function_sections)
11745 {
11746 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11747 text_section_label,
11748 fmt, i * 2 * DWARF2_ADDR_SIZE);
11749 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11750 text_section_label, NULL);
11751 }
11752
11753 /* Otherwise, the compilation unit base address is zero,
11754 which allows us to use absolute addresses, and not worry
11755 about whether the target supports cross-section
11756 arithmetic. */
11757 else
11758 {
11759 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11760 fmt, i * 2 * DWARF2_ADDR_SIZE);
11761 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11762 }
11763
11764 fmt = NULL;
11765 }
11766
11767 /* Negative block_num stands for an index into ranges_by_label. */
11768 else if (block_num < 0)
11769 {
11770 int lab_idx = - block_num - 1;
11771
11772 if (!have_multiple_function_sections)
11773 {
11774 gcc_unreachable ();
11775 #if 0
11776 /* If we ever use add_ranges_by_labels () for a single
11777 function section, all we have to do is to take out
11778 the #if 0 above. */
11779 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11780 (*ranges_by_label)[lab_idx].begin,
11781 text_section_label,
11782 fmt, i * 2 * DWARF2_ADDR_SIZE);
11783 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11784 (*ranges_by_label)[lab_idx].end,
11785 text_section_label, NULL);
11786 #endif
11787 }
11788 else
11789 {
11790 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11791 (*ranges_by_label)[lab_idx].begin,
11792 fmt, i * 2 * DWARF2_ADDR_SIZE);
11793 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11794 (*ranges_by_label)[lab_idx].end,
11795 NULL);
11796 }
11797 }
11798 else
11799 {
11800 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11801 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11802 fmt = start_fmt;
11803 }
11804 }
11805 }
11806
11807 /* Non-zero if .debug_line_str should be used for .debug_line section
11808 strings or strings that are likely shareable with those. */
11809 #define DWARF5_USE_DEBUG_LINE_STR \
11810 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11811 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11812 /* FIXME: there is no .debug_line_str.dwo section, \
11813 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11814 && !dwarf_split_debug_info)
11815
11816 /* Assign .debug_rnglists indexes. */
11817
11818 static void
11819 index_rnglists (void)
11820 {
11821 unsigned i;
11822 dw_ranges *r;
11823
11824 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11825 if (r->label)
11826 r->idx = rnglist_idx++;
11827 }
11828
11829 /* Emit .debug_rnglists section. */
11830
11831 static void
11832 output_rnglists (unsigned generation)
11833 {
11834 unsigned i;
11835 dw_ranges *r;
11836 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11837 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11838 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11839
11840 switch_to_section (debug_ranges_section);
11841 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11842 /* There are up to 4 unique ranges labels per generation.
11843 See also init_sections_and_labels. */
11844 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11845 2 + generation * 4);
11846 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11847 3 + generation * 4);
11848 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11849 dw2_asm_output_data (4, 0xffffffff,
11850 "Initial length escape value indicating "
11851 "64-bit DWARF extension");
11852 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11853 "Length of Range Lists");
11854 ASM_OUTPUT_LABEL (asm_out_file, l1);
11855 output_dwarf_version ();
11856 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11857 dw2_asm_output_data (1, 0, "Segment Size");
11858 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11859 about relocation sizes and primarily care about the size of .debug*
11860 sections in linked shared libraries and executables, then
11861 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11862 into it are usually larger than just DW_FORM_sec_offset offsets
11863 into the .debug_rnglists section. */
11864 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11865 "Offset Entry Count");
11866 if (dwarf_split_debug_info)
11867 {
11868 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11869 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11870 if (r->label)
11871 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11872 ranges_base_label, NULL);
11873 }
11874
11875 const char *lab = "";
11876 unsigned int len = vec_safe_length (ranges_table);
11877 const char *base = NULL;
11878 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11879 {
11880 int block_num = r->num;
11881
11882 if (r->label)
11883 {
11884 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11885 lab = r->label;
11886 }
11887 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11888 base = NULL;
11889 if (block_num > 0)
11890 {
11891 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11892 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11893
11894 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11895 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11896
11897 if (HAVE_AS_LEB128)
11898 {
11899 /* If all code is in the text section, then the compilation
11900 unit base address defaults to DW_AT_low_pc, which is the
11901 base of the text section. */
11902 if (!have_multiple_function_sections)
11903 {
11904 dw2_asm_output_data (1, DW_RLE_offset_pair,
11905 "DW_RLE_offset_pair (%s)", lab);
11906 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11907 "Range begin address (%s)", lab);
11908 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11909 "Range end address (%s)", lab);
11910 continue;
11911 }
11912 if (base == NULL)
11913 {
11914 dw_ranges *r2 = NULL;
11915 if (i < len - 1)
11916 r2 = &(*ranges_table)[i + 1];
11917 if (r2
11918 && r2->num != 0
11919 && r2->label == NULL
11920 && !r2->maybe_new_sec)
11921 {
11922 dw2_asm_output_data (1, DW_RLE_base_address,
11923 "DW_RLE_base_address (%s)", lab);
11924 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11925 "Base address (%s)", lab);
11926 strcpy (basebuf, blabel);
11927 base = basebuf;
11928 }
11929 }
11930 if (base)
11931 {
11932 dw2_asm_output_data (1, DW_RLE_offset_pair,
11933 "DW_RLE_offset_pair (%s)", lab);
11934 dw2_asm_output_delta_uleb128 (blabel, base,
11935 "Range begin address (%s)", lab);
11936 dw2_asm_output_delta_uleb128 (elabel, base,
11937 "Range end address (%s)", lab);
11938 continue;
11939 }
11940 dw2_asm_output_data (1, DW_RLE_start_length,
11941 "DW_RLE_start_length (%s)", lab);
11942 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11943 "Range begin address (%s)", lab);
11944 dw2_asm_output_delta_uleb128 (elabel, blabel,
11945 "Range length (%s)", lab);
11946 }
11947 else
11948 {
11949 dw2_asm_output_data (1, DW_RLE_start_end,
11950 "DW_RLE_start_end (%s)", lab);
11951 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11952 "Range begin address (%s)", lab);
11953 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11954 "Range end address (%s)", lab);
11955 }
11956 }
11957
11958 /* Negative block_num stands for an index into ranges_by_label. */
11959 else if (block_num < 0)
11960 {
11961 int lab_idx = - block_num - 1;
11962 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11963 const char *elabel = (*ranges_by_label)[lab_idx].end;
11964
11965 if (!have_multiple_function_sections)
11966 gcc_unreachable ();
11967 if (HAVE_AS_LEB128)
11968 {
11969 dw2_asm_output_data (1, DW_RLE_start_length,
11970 "DW_RLE_start_length (%s)", lab);
11971 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11972 "Range begin address (%s)", lab);
11973 dw2_asm_output_delta_uleb128 (elabel, blabel,
11974 "Range length (%s)", lab);
11975 }
11976 else
11977 {
11978 dw2_asm_output_data (1, DW_RLE_start_end,
11979 "DW_RLE_start_end (%s)", lab);
11980 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11981 "Range begin address (%s)", lab);
11982 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11983 "Range end address (%s)", lab);
11984 }
11985 }
11986 else
11987 dw2_asm_output_data (1, DW_RLE_end_of_list,
11988 "DW_RLE_end_of_list (%s)", lab);
11989 }
11990 ASM_OUTPUT_LABEL (asm_out_file, l2);
11991 }
11992
11993 /* Data structure containing information about input files. */
11994 struct file_info
11995 {
11996 const char *path; /* Complete file name. */
11997 const char *fname; /* File name part. */
11998 int length; /* Length of entire string. */
11999 struct dwarf_file_data * file_idx; /* Index in input file table. */
12000 int dir_idx; /* Index in directory table. */
12001 };
12002
12003 /* Data structure containing information about directories with source
12004 files. */
12005 struct dir_info
12006 {
12007 const char *path; /* Path including directory name. */
12008 int length; /* Path length. */
12009 int prefix; /* Index of directory entry which is a prefix. */
12010 int count; /* Number of files in this directory. */
12011 int dir_idx; /* Index of directory used as base. */
12012 };
12013
12014 /* Callback function for file_info comparison. We sort by looking at
12015 the directories in the path. */
12016
12017 static int
12018 file_info_cmp (const void *p1, const void *p2)
12019 {
12020 const struct file_info *const s1 = (const struct file_info *) p1;
12021 const struct file_info *const s2 = (const struct file_info *) p2;
12022 const unsigned char *cp1;
12023 const unsigned char *cp2;
12024
12025 /* Take care of file names without directories. We need to make sure that
12026 we return consistent values to qsort since some will get confused if
12027 we return the same value when identical operands are passed in opposite
12028 orders. So if neither has a directory, return 0 and otherwise return
12029 1 or -1 depending on which one has the directory. We want the one with
12030 the directory to sort after the one without, so all no directory files
12031 are at the start (normally only the compilation unit file). */
12032 if ((s1->path == s1->fname || s2->path == s2->fname))
12033 return (s2->path == s2->fname) - (s1->path == s1->fname);
12034
12035 cp1 = (const unsigned char *) s1->path;
12036 cp2 = (const unsigned char *) s2->path;
12037
12038 while (1)
12039 {
12040 ++cp1;
12041 ++cp2;
12042 /* Reached the end of the first path? If so, handle like above,
12043 but now we want longer directory prefixes before shorter ones. */
12044 if ((cp1 == (const unsigned char *) s1->fname)
12045 || (cp2 == (const unsigned char *) s2->fname))
12046 return ((cp1 == (const unsigned char *) s1->fname)
12047 - (cp2 == (const unsigned char *) s2->fname));
12048
12049 /* Character of current path component the same? */
12050 else if (*cp1 != *cp2)
12051 return *cp1 - *cp2;
12052 }
12053 }
12054
12055 struct file_name_acquire_data
12056 {
12057 struct file_info *files;
12058 int used_files;
12059 int max_files;
12060 };
12061
12062 /* Traversal function for the hash table. */
12063
12064 int
12065 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12066 {
12067 struct dwarf_file_data *d = *slot;
12068 struct file_info *fi;
12069 const char *f;
12070
12071 gcc_assert (fnad->max_files >= d->emitted_number);
12072
12073 if (! d->emitted_number)
12074 return 1;
12075
12076 gcc_assert (fnad->max_files != fnad->used_files);
12077
12078 fi = fnad->files + fnad->used_files++;
12079
12080 /* Skip all leading "./". */
12081 f = d->filename;
12082 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12083 f += 2;
12084
12085 /* Create a new array entry. */
12086 fi->path = f;
12087 fi->length = strlen (f);
12088 fi->file_idx = d;
12089
12090 /* Search for the file name part. */
12091 f = strrchr (f, DIR_SEPARATOR);
12092 #if defined (DIR_SEPARATOR_2)
12093 {
12094 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12095
12096 if (g != NULL)
12097 {
12098 if (f == NULL || f < g)
12099 f = g;
12100 }
12101 }
12102 #endif
12103
12104 fi->fname = f == NULL ? fi->path : f + 1;
12105 return 1;
12106 }
12107
12108 /* Helper function for output_file_names. Emit a FORM encoded
12109 string STR, with assembly comment start ENTRY_KIND and
12110 index IDX */
12111
12112 static void
12113 output_line_string (enum dwarf_form form, const char *str,
12114 const char *entry_kind, unsigned int idx)
12115 {
12116 switch (form)
12117 {
12118 case DW_FORM_string:
12119 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12120 break;
12121 case DW_FORM_line_strp:
12122 if (!debug_line_str_hash)
12123 debug_line_str_hash
12124 = hash_table<indirect_string_hasher>::create_ggc (10);
12125
12126 struct indirect_string_node *node;
12127 node = find_AT_string_in_table (str, debug_line_str_hash);
12128 set_indirect_string (node);
12129 node->form = form;
12130 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12131 debug_line_str_section, "%s: %#x: \"%s\"",
12132 entry_kind, 0, node->str);
12133 break;
12134 default:
12135 gcc_unreachable ();
12136 }
12137 }
12138
12139 /* Output the directory table and the file name table. We try to minimize
12140 the total amount of memory needed. A heuristic is used to avoid large
12141 slowdowns with many input files. */
12142
12143 static void
12144 output_file_names (void)
12145 {
12146 struct file_name_acquire_data fnad;
12147 int numfiles;
12148 struct file_info *files;
12149 struct dir_info *dirs;
12150 int *saved;
12151 int *savehere;
12152 int *backmap;
12153 int ndirs;
12154 int idx_offset;
12155 int i;
12156
12157 if (!last_emitted_file)
12158 {
12159 if (dwarf_version >= 5)
12160 {
12161 dw2_asm_output_data (1, 0, "Directory entry format count");
12162 dw2_asm_output_data_uleb128 (0, "Directories count");
12163 dw2_asm_output_data (1, 0, "File name entry format count");
12164 dw2_asm_output_data_uleb128 (0, "File names count");
12165 }
12166 else
12167 {
12168 dw2_asm_output_data (1, 0, "End directory table");
12169 dw2_asm_output_data (1, 0, "End file name table");
12170 }
12171 return;
12172 }
12173
12174 numfiles = last_emitted_file->emitted_number;
12175
12176 /* Allocate the various arrays we need. */
12177 files = XALLOCAVEC (struct file_info, numfiles);
12178 dirs = XALLOCAVEC (struct dir_info, numfiles);
12179
12180 fnad.files = files;
12181 fnad.used_files = 0;
12182 fnad.max_files = numfiles;
12183 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12184 gcc_assert (fnad.used_files == fnad.max_files);
12185
12186 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12187
12188 /* Find all the different directories used. */
12189 dirs[0].path = files[0].path;
12190 dirs[0].length = files[0].fname - files[0].path;
12191 dirs[0].prefix = -1;
12192 dirs[0].count = 1;
12193 dirs[0].dir_idx = 0;
12194 files[0].dir_idx = 0;
12195 ndirs = 1;
12196
12197 for (i = 1; i < numfiles; i++)
12198 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12199 && memcmp (dirs[ndirs - 1].path, files[i].path,
12200 dirs[ndirs - 1].length) == 0)
12201 {
12202 /* Same directory as last entry. */
12203 files[i].dir_idx = ndirs - 1;
12204 ++dirs[ndirs - 1].count;
12205 }
12206 else
12207 {
12208 int j;
12209
12210 /* This is a new directory. */
12211 dirs[ndirs].path = files[i].path;
12212 dirs[ndirs].length = files[i].fname - files[i].path;
12213 dirs[ndirs].count = 1;
12214 dirs[ndirs].dir_idx = ndirs;
12215 files[i].dir_idx = ndirs;
12216
12217 /* Search for a prefix. */
12218 dirs[ndirs].prefix = -1;
12219 for (j = 0; j < ndirs; j++)
12220 if (dirs[j].length < dirs[ndirs].length
12221 && dirs[j].length > 1
12222 && (dirs[ndirs].prefix == -1
12223 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12224 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12225 dirs[ndirs].prefix = j;
12226
12227 ++ndirs;
12228 }
12229
12230 /* Now to the actual work. We have to find a subset of the directories which
12231 allow expressing the file name using references to the directory table
12232 with the least amount of characters. We do not do an exhaustive search
12233 where we would have to check out every combination of every single
12234 possible prefix. Instead we use a heuristic which provides nearly optimal
12235 results in most cases and never is much off. */
12236 saved = XALLOCAVEC (int, ndirs);
12237 savehere = XALLOCAVEC (int, ndirs);
12238
12239 memset (saved, '\0', ndirs * sizeof (saved[0]));
12240 for (i = 0; i < ndirs; i++)
12241 {
12242 int j;
12243 int total;
12244
12245 /* We can always save some space for the current directory. But this
12246 does not mean it will be enough to justify adding the directory. */
12247 savehere[i] = dirs[i].length;
12248 total = (savehere[i] - saved[i]) * dirs[i].count;
12249
12250 for (j = i + 1; j < ndirs; j++)
12251 {
12252 savehere[j] = 0;
12253 if (saved[j] < dirs[i].length)
12254 {
12255 /* Determine whether the dirs[i] path is a prefix of the
12256 dirs[j] path. */
12257 int k;
12258
12259 k = dirs[j].prefix;
12260 while (k != -1 && k != (int) i)
12261 k = dirs[k].prefix;
12262
12263 if (k == (int) i)
12264 {
12265 /* Yes it is. We can possibly save some memory by
12266 writing the filenames in dirs[j] relative to
12267 dirs[i]. */
12268 savehere[j] = dirs[i].length;
12269 total += (savehere[j] - saved[j]) * dirs[j].count;
12270 }
12271 }
12272 }
12273
12274 /* Check whether we can save enough to justify adding the dirs[i]
12275 directory. */
12276 if (total > dirs[i].length + 1)
12277 {
12278 /* It's worthwhile adding. */
12279 for (j = i; j < ndirs; j++)
12280 if (savehere[j] > 0)
12281 {
12282 /* Remember how much we saved for this directory so far. */
12283 saved[j] = savehere[j];
12284
12285 /* Remember the prefix directory. */
12286 dirs[j].dir_idx = i;
12287 }
12288 }
12289 }
12290
12291 /* Emit the directory name table. */
12292 idx_offset = dirs[0].length > 0 ? 1 : 0;
12293 enum dwarf_form str_form = DW_FORM_string;
12294 enum dwarf_form idx_form = DW_FORM_udata;
12295 if (dwarf_version >= 5)
12296 {
12297 const char *comp_dir = comp_dir_string ();
12298 if (comp_dir == NULL)
12299 comp_dir = "";
12300 dw2_asm_output_data (1, 1, "Directory entry format count");
12301 if (DWARF5_USE_DEBUG_LINE_STR)
12302 str_form = DW_FORM_line_strp;
12303 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12304 dw2_asm_output_data_uleb128 (str_form, "%s",
12305 get_DW_FORM_name (str_form));
12306 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12307 if (str_form == DW_FORM_string)
12308 {
12309 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12310 for (i = 1 - idx_offset; i < ndirs; i++)
12311 dw2_asm_output_nstring (dirs[i].path,
12312 dirs[i].length
12313 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12314 "Directory Entry: %#x", i + idx_offset);
12315 }
12316 else
12317 {
12318 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12319 for (i = 1 - idx_offset; i < ndirs; i++)
12320 {
12321 const char *str
12322 = ggc_alloc_string (dirs[i].path,
12323 dirs[i].length
12324 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12325 output_line_string (str_form, str, "Directory Entry",
12326 (unsigned) i + idx_offset);
12327 }
12328 }
12329 }
12330 else
12331 {
12332 for (i = 1 - idx_offset; i < ndirs; i++)
12333 dw2_asm_output_nstring (dirs[i].path,
12334 dirs[i].length
12335 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12336 "Directory Entry: %#x", i + idx_offset);
12337
12338 dw2_asm_output_data (1, 0, "End directory table");
12339 }
12340
12341 /* We have to emit them in the order of emitted_number since that's
12342 used in the debug info generation. To do this efficiently we
12343 generate a back-mapping of the indices first. */
12344 backmap = XALLOCAVEC (int, numfiles);
12345 for (i = 0; i < numfiles; i++)
12346 backmap[files[i].file_idx->emitted_number - 1] = i;
12347
12348 if (dwarf_version >= 5)
12349 {
12350 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12351 if (filename0 == NULL)
12352 filename0 = "";
12353 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12354 DW_FORM_data2. Choose one based on the number of directories
12355 and how much space would they occupy in each encoding.
12356 If we have at most 256 directories, all indexes fit into
12357 a single byte, so DW_FORM_data1 is most compact (if there
12358 are at most 128 directories, DW_FORM_udata would be as
12359 compact as that, but not shorter and slower to decode). */
12360 if (ndirs + idx_offset <= 256)
12361 idx_form = DW_FORM_data1;
12362 /* If there are more than 65536 directories, we have to use
12363 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12364 Otherwise, compute what space would occupy if all the indexes
12365 used DW_FORM_udata - sum - and compare that to how large would
12366 be DW_FORM_data2 encoding, and pick the more efficient one. */
12367 else if (ndirs + idx_offset <= 65536)
12368 {
12369 unsigned HOST_WIDE_INT sum = 1;
12370 for (i = 0; i < numfiles; i++)
12371 {
12372 int file_idx = backmap[i];
12373 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12374 sum += size_of_uleb128 (dir_idx);
12375 }
12376 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12377 idx_form = DW_FORM_data2;
12378 }
12379 #ifdef VMS_DEBUGGING_INFO
12380 dw2_asm_output_data (1, 4, "File name entry format count");
12381 #else
12382 dw2_asm_output_data (1, 2, "File name entry format count");
12383 #endif
12384 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12385 dw2_asm_output_data_uleb128 (str_form, "%s",
12386 get_DW_FORM_name (str_form));
12387 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12388 "DW_LNCT_directory_index");
12389 dw2_asm_output_data_uleb128 (idx_form, "%s",
12390 get_DW_FORM_name (idx_form));
12391 #ifdef VMS_DEBUGGING_INFO
12392 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12393 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12394 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12395 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12396 #endif
12397 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12398
12399 output_line_string (str_form, filename0, "File Entry", 0);
12400
12401 /* Include directory index. */
12402 if (idx_form != DW_FORM_udata)
12403 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12404 0, NULL);
12405 else
12406 dw2_asm_output_data_uleb128 (0, NULL);
12407
12408 #ifdef VMS_DEBUGGING_INFO
12409 dw2_asm_output_data_uleb128 (0, NULL);
12410 dw2_asm_output_data_uleb128 (0, NULL);
12411 #endif
12412 }
12413
12414 /* Now write all the file names. */
12415 for (i = 0; i < numfiles; i++)
12416 {
12417 int file_idx = backmap[i];
12418 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12419
12420 #ifdef VMS_DEBUGGING_INFO
12421 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12422
12423 /* Setting these fields can lead to debugger miscomparisons,
12424 but VMS Debug requires them to be set correctly. */
12425
12426 int ver;
12427 long long cdt;
12428 long siz;
12429 int maxfilelen = (strlen (files[file_idx].path)
12430 + dirs[dir_idx].length
12431 + MAX_VMS_VERSION_LEN + 1);
12432 char *filebuf = XALLOCAVEC (char, maxfilelen);
12433
12434 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12435 snprintf (filebuf, maxfilelen, "%s;%d",
12436 files[file_idx].path + dirs[dir_idx].length, ver);
12437
12438 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12439
12440 /* Include directory index. */
12441 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12442 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12443 dir_idx + idx_offset, NULL);
12444 else
12445 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12446
12447 /* Modification time. */
12448 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12449 &cdt, 0, 0, 0) == 0)
12450 ? cdt : 0, NULL);
12451
12452 /* File length in bytes. */
12453 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12454 0, &siz, 0, 0) == 0)
12455 ? siz : 0, NULL);
12456 #else
12457 output_line_string (str_form,
12458 files[file_idx].path + dirs[dir_idx].length,
12459 "File Entry", (unsigned) i + 1);
12460
12461 /* Include directory index. */
12462 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12463 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12464 dir_idx + idx_offset, NULL);
12465 else
12466 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12467
12468 if (dwarf_version >= 5)
12469 continue;
12470
12471 /* Modification time. */
12472 dw2_asm_output_data_uleb128 (0, NULL);
12473
12474 /* File length in bytes. */
12475 dw2_asm_output_data_uleb128 (0, NULL);
12476 #endif /* VMS_DEBUGGING_INFO */
12477 }
12478
12479 if (dwarf_version < 5)
12480 dw2_asm_output_data (1, 0, "End file name table");
12481 }
12482
12483
12484 /* Output one line number table into the .debug_line section. */
12485
12486 static void
12487 output_one_line_info_table (dw_line_info_table *table)
12488 {
12489 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12490 unsigned int current_line = 1;
12491 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12492 dw_line_info_entry *ent, *prev_addr;
12493 size_t i;
12494 unsigned int view;
12495
12496 view = 0;
12497
12498 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12499 {
12500 switch (ent->opcode)
12501 {
12502 case LI_set_address:
12503 /* ??? Unfortunately, we have little choice here currently, and
12504 must always use the most general form. GCC does not know the
12505 address delta itself, so we can't use DW_LNS_advance_pc. Many
12506 ports do have length attributes which will give an upper bound
12507 on the address range. We could perhaps use length attributes
12508 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12509 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12510
12511 view = 0;
12512
12513 /* This can handle any delta. This takes
12514 4+DWARF2_ADDR_SIZE bytes. */
12515 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12516 debug_variable_location_views
12517 ? ", reset view to 0" : "");
12518 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12519 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12520 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12521
12522 prev_addr = ent;
12523 break;
12524
12525 case LI_adv_address:
12526 {
12527 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12528 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12529 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12530
12531 view++;
12532
12533 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12534 dw2_asm_output_delta (2, line_label, prev_label,
12535 "from %s to %s", prev_label, line_label);
12536
12537 prev_addr = ent;
12538 break;
12539 }
12540
12541 case LI_set_line:
12542 if (ent->val == current_line)
12543 {
12544 /* We still need to start a new row, so output a copy insn. */
12545 dw2_asm_output_data (1, DW_LNS_copy,
12546 "copy line %u", current_line);
12547 }
12548 else
12549 {
12550 int line_offset = ent->val - current_line;
12551 int line_delta = line_offset - DWARF_LINE_BASE;
12552
12553 current_line = ent->val;
12554 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12555 {
12556 /* This can handle deltas from -10 to 234, using the current
12557 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12558 This takes 1 byte. */
12559 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12560 "line %u", current_line);
12561 }
12562 else
12563 {
12564 /* This can handle any delta. This takes at least 4 bytes,
12565 depending on the value being encoded. */
12566 dw2_asm_output_data (1, DW_LNS_advance_line,
12567 "advance to line %u", current_line);
12568 dw2_asm_output_data_sleb128 (line_offset, NULL);
12569 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12570 }
12571 }
12572 break;
12573
12574 case LI_set_file:
12575 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12576 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12577 break;
12578
12579 case LI_set_column:
12580 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12581 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12582 break;
12583
12584 case LI_negate_stmt:
12585 current_is_stmt = !current_is_stmt;
12586 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12587 "is_stmt %d", current_is_stmt);
12588 break;
12589
12590 case LI_set_prologue_end:
12591 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12592 "set prologue end");
12593 break;
12594
12595 case LI_set_epilogue_begin:
12596 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12597 "set epilogue begin");
12598 break;
12599
12600 case LI_set_discriminator:
12601 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12602 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12603 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12604 dw2_asm_output_data_uleb128 (ent->val, NULL);
12605 break;
12606 }
12607 }
12608
12609 /* Emit debug info for the address of the end of the table. */
12610 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12611 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12612 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12613 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12614
12615 dw2_asm_output_data (1, 0, "end sequence");
12616 dw2_asm_output_data_uleb128 (1, NULL);
12617 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12618 }
12619
12620 /* Output the source line number correspondence information. This
12621 information goes into the .debug_line section. */
12622
12623 static void
12624 output_line_info (bool prologue_only)
12625 {
12626 static unsigned int generation;
12627 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12628 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12629 bool saw_one = false;
12630 int opc;
12631
12632 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12633 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12634 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12635 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12636
12637 if (!XCOFF_DEBUGGING_INFO)
12638 {
12639 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12640 dw2_asm_output_data (4, 0xffffffff,
12641 "Initial length escape value indicating 64-bit DWARF extension");
12642 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12643 "Length of Source Line Info");
12644 }
12645
12646 ASM_OUTPUT_LABEL (asm_out_file, l1);
12647
12648 output_dwarf_version ();
12649 if (dwarf_version >= 5)
12650 {
12651 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12652 dw2_asm_output_data (1, 0, "Segment Size");
12653 }
12654 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12655 ASM_OUTPUT_LABEL (asm_out_file, p1);
12656
12657 /* Define the architecture-dependent minimum instruction length (in bytes).
12658 In this implementation of DWARF, this field is used for information
12659 purposes only. Since GCC generates assembly language, we have no
12660 a priori knowledge of how many instruction bytes are generated for each
12661 source line, and therefore can use only the DW_LNE_set_address and
12662 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12663 this as '1', which is "correct enough" for all architectures,
12664 and don't let the target override. */
12665 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12666
12667 if (dwarf_version >= 4)
12668 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12669 "Maximum Operations Per Instruction");
12670 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12671 "Default is_stmt_start flag");
12672 dw2_asm_output_data (1, DWARF_LINE_BASE,
12673 "Line Base Value (Special Opcodes)");
12674 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12675 "Line Range Value (Special Opcodes)");
12676 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12677 "Special Opcode Base");
12678
12679 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12680 {
12681 int n_op_args;
12682 switch (opc)
12683 {
12684 case DW_LNS_advance_pc:
12685 case DW_LNS_advance_line:
12686 case DW_LNS_set_file:
12687 case DW_LNS_set_column:
12688 case DW_LNS_fixed_advance_pc:
12689 case DW_LNS_set_isa:
12690 n_op_args = 1;
12691 break;
12692 default:
12693 n_op_args = 0;
12694 break;
12695 }
12696
12697 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12698 opc, n_op_args);
12699 }
12700
12701 /* Write out the information about the files we use. */
12702 output_file_names ();
12703 ASM_OUTPUT_LABEL (asm_out_file, p2);
12704 if (prologue_only)
12705 {
12706 /* Output the marker for the end of the line number info. */
12707 ASM_OUTPUT_LABEL (asm_out_file, l2);
12708 return;
12709 }
12710
12711 if (separate_line_info)
12712 {
12713 dw_line_info_table *table;
12714 size_t i;
12715
12716 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12717 if (table->in_use)
12718 {
12719 output_one_line_info_table (table);
12720 saw_one = true;
12721 }
12722 }
12723 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12724 {
12725 output_one_line_info_table (cold_text_section_line_info);
12726 saw_one = true;
12727 }
12728
12729 /* ??? Some Darwin linkers crash on a .debug_line section with no
12730 sequences. Further, merely a DW_LNE_end_sequence entry is not
12731 sufficient -- the address column must also be initialized.
12732 Make sure to output at least one set_address/end_sequence pair,
12733 choosing .text since that section is always present. */
12734 if (text_section_line_info->in_use || !saw_one)
12735 output_one_line_info_table (text_section_line_info);
12736
12737 /* Output the marker for the end of the line number info. */
12738 ASM_OUTPUT_LABEL (asm_out_file, l2);
12739 }
12740 \f
12741 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12742
12743 static inline bool
12744 need_endianity_attribute_p (bool reverse)
12745 {
12746 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12747 }
12748
12749 /* Given a pointer to a tree node for some base type, return a pointer to
12750 a DIE that describes the given type. REVERSE is true if the type is
12751 to be interpreted in the reverse storage order wrt the target order.
12752
12753 This routine must only be called for GCC type nodes that correspond to
12754 Dwarf base (fundamental) types. */
12755
12756 static dw_die_ref
12757 base_type_die (tree type, bool reverse)
12758 {
12759 dw_die_ref base_type_result;
12760 enum dwarf_type encoding;
12761 bool fpt_used = false;
12762 struct fixed_point_type_info fpt_info;
12763 tree type_bias = NULL_TREE;
12764
12765 /* If this is a subtype that should not be emitted as a subrange type,
12766 use the base type. See subrange_type_for_debug_p. */
12767 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12768 type = TREE_TYPE (type);
12769
12770 switch (TREE_CODE (type))
12771 {
12772 case INTEGER_TYPE:
12773 if ((dwarf_version >= 4 || !dwarf_strict)
12774 && TYPE_NAME (type)
12775 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12776 && DECL_IS_BUILTIN (TYPE_NAME (type))
12777 && DECL_NAME (TYPE_NAME (type)))
12778 {
12779 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12780 if (strcmp (name, "char16_t") == 0
12781 || strcmp (name, "char32_t") == 0)
12782 {
12783 encoding = DW_ATE_UTF;
12784 break;
12785 }
12786 }
12787 if ((dwarf_version >= 3 || !dwarf_strict)
12788 && lang_hooks.types.get_fixed_point_type_info)
12789 {
12790 memset (&fpt_info, 0, sizeof (fpt_info));
12791 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12792 {
12793 fpt_used = true;
12794 encoding = ((TYPE_UNSIGNED (type))
12795 ? DW_ATE_unsigned_fixed
12796 : DW_ATE_signed_fixed);
12797 break;
12798 }
12799 }
12800 if (TYPE_STRING_FLAG (type))
12801 {
12802 if (TYPE_UNSIGNED (type))
12803 encoding = DW_ATE_unsigned_char;
12804 else
12805 encoding = DW_ATE_signed_char;
12806 }
12807 else if (TYPE_UNSIGNED (type))
12808 encoding = DW_ATE_unsigned;
12809 else
12810 encoding = DW_ATE_signed;
12811
12812 if (!dwarf_strict
12813 && lang_hooks.types.get_type_bias)
12814 type_bias = lang_hooks.types.get_type_bias (type);
12815 break;
12816
12817 case REAL_TYPE:
12818 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12819 {
12820 if (dwarf_version >= 3 || !dwarf_strict)
12821 encoding = DW_ATE_decimal_float;
12822 else
12823 encoding = DW_ATE_lo_user;
12824 }
12825 else
12826 encoding = DW_ATE_float;
12827 break;
12828
12829 case FIXED_POINT_TYPE:
12830 if (!(dwarf_version >= 3 || !dwarf_strict))
12831 encoding = DW_ATE_lo_user;
12832 else if (TYPE_UNSIGNED (type))
12833 encoding = DW_ATE_unsigned_fixed;
12834 else
12835 encoding = DW_ATE_signed_fixed;
12836 break;
12837
12838 /* Dwarf2 doesn't know anything about complex ints, so use
12839 a user defined type for it. */
12840 case COMPLEX_TYPE:
12841 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12842 encoding = DW_ATE_complex_float;
12843 else
12844 encoding = DW_ATE_lo_user;
12845 break;
12846
12847 case BOOLEAN_TYPE:
12848 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12849 encoding = DW_ATE_boolean;
12850 break;
12851
12852 default:
12853 /* No other TREE_CODEs are Dwarf fundamental types. */
12854 gcc_unreachable ();
12855 }
12856
12857 base_type_result = new_die_raw (DW_TAG_base_type);
12858
12859 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12860 int_size_in_bytes (type));
12861 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12862
12863 if (need_endianity_attribute_p (reverse))
12864 add_AT_unsigned (base_type_result, DW_AT_endianity,
12865 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12866
12867 add_alignment_attribute (base_type_result, type);
12868
12869 if (fpt_used)
12870 {
12871 switch (fpt_info.scale_factor_kind)
12872 {
12873 case fixed_point_scale_factor_binary:
12874 add_AT_int (base_type_result, DW_AT_binary_scale,
12875 fpt_info.scale_factor.binary);
12876 break;
12877
12878 case fixed_point_scale_factor_decimal:
12879 add_AT_int (base_type_result, DW_AT_decimal_scale,
12880 fpt_info.scale_factor.decimal);
12881 break;
12882
12883 case fixed_point_scale_factor_arbitrary:
12884 /* Arbitrary scale factors cannot be described in standard DWARF,
12885 yet. */
12886 if (!dwarf_strict)
12887 {
12888 /* Describe the scale factor as a rational constant. */
12889 const dw_die_ref scale_factor
12890 = new_die (DW_TAG_constant, comp_unit_die (), type);
12891
12892 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12893 fpt_info.scale_factor.arbitrary.numerator);
12894 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12895 fpt_info.scale_factor.arbitrary.denominator);
12896
12897 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12898 }
12899 break;
12900
12901 default:
12902 gcc_unreachable ();
12903 }
12904 }
12905
12906 if (type_bias)
12907 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12908 dw_scalar_form_constant
12909 | dw_scalar_form_exprloc
12910 | dw_scalar_form_reference,
12911 NULL);
12912
12913 return base_type_result;
12914 }
12915
12916 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12917 named 'auto' in its type: return true for it, false otherwise. */
12918
12919 static inline bool
12920 is_cxx_auto (tree type)
12921 {
12922 if (is_cxx ())
12923 {
12924 tree name = TYPE_IDENTIFIER (type);
12925 if (name == get_identifier ("auto")
12926 || name == get_identifier ("decltype(auto)"))
12927 return true;
12928 }
12929 return false;
12930 }
12931
12932 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12933 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12934
12935 static inline int
12936 is_base_type (tree type)
12937 {
12938 switch (TREE_CODE (type))
12939 {
12940 case INTEGER_TYPE:
12941 case REAL_TYPE:
12942 case FIXED_POINT_TYPE:
12943 case COMPLEX_TYPE:
12944 case BOOLEAN_TYPE:
12945 return 1;
12946
12947 case VOID_TYPE:
12948 case ARRAY_TYPE:
12949 case RECORD_TYPE:
12950 case UNION_TYPE:
12951 case QUAL_UNION_TYPE:
12952 case ENUMERAL_TYPE:
12953 case FUNCTION_TYPE:
12954 case METHOD_TYPE:
12955 case POINTER_TYPE:
12956 case REFERENCE_TYPE:
12957 case NULLPTR_TYPE:
12958 case OFFSET_TYPE:
12959 case LANG_TYPE:
12960 case VECTOR_TYPE:
12961 return 0;
12962
12963 default:
12964 if (is_cxx_auto (type))
12965 return 0;
12966 gcc_unreachable ();
12967 }
12968
12969 return 0;
12970 }
12971
12972 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12973 node, return the size in bits for the type if it is a constant, or else
12974 return the alignment for the type if the type's size is not constant, or
12975 else return BITS_PER_WORD if the type actually turns out to be an
12976 ERROR_MARK node. */
12977
12978 static inline unsigned HOST_WIDE_INT
12979 simple_type_size_in_bits (const_tree type)
12980 {
12981 if (TREE_CODE (type) == ERROR_MARK)
12982 return BITS_PER_WORD;
12983 else if (TYPE_SIZE (type) == NULL_TREE)
12984 return 0;
12985 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12986 return tree_to_uhwi (TYPE_SIZE (type));
12987 else
12988 return TYPE_ALIGN (type);
12989 }
12990
12991 /* Similarly, but return an offset_int instead of UHWI. */
12992
12993 static inline offset_int
12994 offset_int_type_size_in_bits (const_tree type)
12995 {
12996 if (TREE_CODE (type) == ERROR_MARK)
12997 return BITS_PER_WORD;
12998 else if (TYPE_SIZE (type) == NULL_TREE)
12999 return 0;
13000 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
13001 return wi::to_offset (TYPE_SIZE (type));
13002 else
13003 return TYPE_ALIGN (type);
13004 }
13005
13006 /* Given a pointer to a tree node for a subrange type, return a pointer
13007 to a DIE that describes the given type. */
13008
13009 static dw_die_ref
13010 subrange_type_die (tree type, tree low, tree high, tree bias,
13011 dw_die_ref context_die)
13012 {
13013 dw_die_ref subrange_die;
13014 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
13015
13016 if (context_die == NULL)
13017 context_die = comp_unit_die ();
13018
13019 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
13020
13021 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
13022 {
13023 /* The size of the subrange type and its base type do not match,
13024 so we need to generate a size attribute for the subrange type. */
13025 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
13026 }
13027
13028 add_alignment_attribute (subrange_die, type);
13029
13030 if (low)
13031 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13032 if (high)
13033 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13034 if (bias && !dwarf_strict)
13035 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13036 dw_scalar_form_constant
13037 | dw_scalar_form_exprloc
13038 | dw_scalar_form_reference,
13039 NULL);
13040
13041 return subrange_die;
13042 }
13043
13044 /* Returns the (const and/or volatile) cv_qualifiers associated with
13045 the decl node. This will normally be augmented with the
13046 cv_qualifiers of the underlying type in add_type_attribute. */
13047
13048 static int
13049 decl_quals (const_tree decl)
13050 {
13051 return ((TREE_READONLY (decl)
13052 /* The C++ front-end correctly marks reference-typed
13053 variables as readonly, but from a language (and debug
13054 info) standpoint they are not const-qualified. */
13055 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13056 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13057 | (TREE_THIS_VOLATILE (decl)
13058 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13059 }
13060
13061 /* Determine the TYPE whose qualifiers match the largest strict subset
13062 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13063 qualifiers outside QUAL_MASK. */
13064
13065 static int
13066 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13067 {
13068 tree t;
13069 int best_rank = 0, best_qual = 0, max_rank;
13070
13071 type_quals &= qual_mask;
13072 max_rank = popcount_hwi (type_quals) - 1;
13073
13074 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13075 t = TYPE_NEXT_VARIANT (t))
13076 {
13077 int q = TYPE_QUALS (t) & qual_mask;
13078
13079 if ((q & type_quals) == q && q != type_quals
13080 && check_base_type (t, type))
13081 {
13082 int rank = popcount_hwi (q);
13083
13084 if (rank > best_rank)
13085 {
13086 best_rank = rank;
13087 best_qual = q;
13088 }
13089 }
13090 }
13091
13092 return best_qual;
13093 }
13094
13095 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13096 static const dwarf_qual_info_t dwarf_qual_info[] =
13097 {
13098 { TYPE_QUAL_CONST, DW_TAG_const_type },
13099 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13100 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13101 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13102 };
13103 static const unsigned int dwarf_qual_info_size
13104 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13105
13106 /* If DIE is a qualified DIE of some base DIE with the same parent,
13107 return the base DIE, otherwise return NULL. Set MASK to the
13108 qualifiers added compared to the returned DIE. */
13109
13110 static dw_die_ref
13111 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13112 {
13113 unsigned int i;
13114 for (i = 0; i < dwarf_qual_info_size; i++)
13115 if (die->die_tag == dwarf_qual_info[i].t)
13116 break;
13117 if (i == dwarf_qual_info_size)
13118 return NULL;
13119 if (vec_safe_length (die->die_attr) != 1)
13120 return NULL;
13121 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13122 if (type == NULL || type->die_parent != die->die_parent)
13123 return NULL;
13124 *mask |= dwarf_qual_info[i].q;
13125 if (depth)
13126 {
13127 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13128 if (ret)
13129 return ret;
13130 }
13131 return type;
13132 }
13133
13134 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13135 entry that chains the modifiers specified by CV_QUALS in front of the
13136 given type. REVERSE is true if the type is to be interpreted in the
13137 reverse storage order wrt the target order. */
13138
13139 static dw_die_ref
13140 modified_type_die (tree type, int cv_quals, bool reverse,
13141 dw_die_ref context_die)
13142 {
13143 enum tree_code code = TREE_CODE (type);
13144 dw_die_ref mod_type_die;
13145 dw_die_ref sub_die = NULL;
13146 tree item_type = NULL;
13147 tree qualified_type;
13148 tree name, low, high;
13149 dw_die_ref mod_scope;
13150 /* Only these cv-qualifiers are currently handled. */
13151 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13152 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13153 ENCODE_QUAL_ADDR_SPACE(~0U));
13154 const bool reverse_base_type
13155 = need_endianity_attribute_p (reverse) && is_base_type (type);
13156
13157 if (code == ERROR_MARK)
13158 return NULL;
13159
13160 if (lang_hooks.types.get_debug_type)
13161 {
13162 tree debug_type = lang_hooks.types.get_debug_type (type);
13163
13164 if (debug_type != NULL_TREE && debug_type != type)
13165 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13166 }
13167
13168 cv_quals &= cv_qual_mask;
13169
13170 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13171 tag modifier (and not an attribute) old consumers won't be able
13172 to handle it. */
13173 if (dwarf_version < 3)
13174 cv_quals &= ~TYPE_QUAL_RESTRICT;
13175
13176 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13177 if (dwarf_version < 5)
13178 cv_quals &= ~TYPE_QUAL_ATOMIC;
13179
13180 /* See if we already have the appropriately qualified variant of
13181 this type. */
13182 qualified_type = get_qualified_type (type, cv_quals);
13183
13184 if (qualified_type == sizetype)
13185 {
13186 /* Try not to expose the internal sizetype type's name. */
13187 if (TYPE_NAME (qualified_type)
13188 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13189 {
13190 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13191
13192 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13193 && (TYPE_PRECISION (t)
13194 == TYPE_PRECISION (qualified_type))
13195 && (TYPE_UNSIGNED (t)
13196 == TYPE_UNSIGNED (qualified_type)));
13197 qualified_type = t;
13198 }
13199 else if (qualified_type == sizetype
13200 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13201 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13202 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13203 qualified_type = size_type_node;
13204 }
13205
13206 /* If we do, then we can just use its DIE, if it exists. */
13207 if (qualified_type)
13208 {
13209 mod_type_die = lookup_type_die (qualified_type);
13210
13211 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13212 dealt with specially: the DIE with the attribute, if it exists, is
13213 placed immediately after the regular DIE for the same base type. */
13214 if (mod_type_die
13215 && (!reverse_base_type
13216 || ((mod_type_die = mod_type_die->die_sib) != NULL
13217 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13218 return mod_type_die;
13219 }
13220
13221 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13222
13223 /* Handle C typedef types. */
13224 if (name
13225 && TREE_CODE (name) == TYPE_DECL
13226 && DECL_ORIGINAL_TYPE (name)
13227 && !DECL_ARTIFICIAL (name))
13228 {
13229 tree dtype = TREE_TYPE (name);
13230
13231 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13232 if (qualified_type == dtype && !reverse_base_type)
13233 {
13234 tree origin = decl_ultimate_origin (name);
13235
13236 /* Typedef variants that have an abstract origin don't get their own
13237 type DIE (see gen_typedef_die), so fall back on the ultimate
13238 abstract origin instead. */
13239 if (origin != NULL && origin != name)
13240 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13241 context_die);
13242
13243 /* For a named type, use the typedef. */
13244 gen_type_die (qualified_type, context_die);
13245 return lookup_type_die (qualified_type);
13246 }
13247 else
13248 {
13249 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13250 dquals &= cv_qual_mask;
13251 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13252 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13253 /* cv-unqualified version of named type. Just use
13254 the unnamed type to which it refers. */
13255 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13256 reverse, context_die);
13257 /* Else cv-qualified version of named type; fall through. */
13258 }
13259 }
13260
13261 mod_scope = scope_die_for (type, context_die);
13262
13263 if (cv_quals)
13264 {
13265 int sub_quals = 0, first_quals = 0;
13266 unsigned i;
13267 dw_die_ref first = NULL, last = NULL;
13268
13269 /* Determine a lesser qualified type that most closely matches
13270 this one. Then generate DW_TAG_* entries for the remaining
13271 qualifiers. */
13272 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13273 cv_qual_mask);
13274 if (sub_quals && use_debug_types)
13275 {
13276 bool needed = false;
13277 /* If emitting type units, make sure the order of qualifiers
13278 is canonical. Thus, start from unqualified type if
13279 an earlier qualifier is missing in sub_quals, but some later
13280 one is present there. */
13281 for (i = 0; i < dwarf_qual_info_size; i++)
13282 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13283 needed = true;
13284 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13285 {
13286 sub_quals = 0;
13287 break;
13288 }
13289 }
13290 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13291 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13292 {
13293 /* As not all intermediate qualified DIEs have corresponding
13294 tree types, ensure that qualified DIEs in the same scope
13295 as their DW_AT_type are emitted after their DW_AT_type,
13296 only with other qualified DIEs for the same type possibly
13297 in between them. Determine the range of such qualified
13298 DIEs now (first being the base type, last being corresponding
13299 last qualified DIE for it). */
13300 unsigned int count = 0;
13301 first = qualified_die_p (mod_type_die, &first_quals,
13302 dwarf_qual_info_size);
13303 if (first == NULL)
13304 first = mod_type_die;
13305 gcc_assert ((first_quals & ~sub_quals) == 0);
13306 for (count = 0, last = first;
13307 count < (1U << dwarf_qual_info_size);
13308 count++, last = last->die_sib)
13309 {
13310 int quals = 0;
13311 if (last == mod_scope->die_child)
13312 break;
13313 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13314 != first)
13315 break;
13316 }
13317 }
13318
13319 for (i = 0; i < dwarf_qual_info_size; i++)
13320 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13321 {
13322 dw_die_ref d;
13323 if (first && first != last)
13324 {
13325 for (d = first->die_sib; ; d = d->die_sib)
13326 {
13327 int quals = 0;
13328 qualified_die_p (d, &quals, dwarf_qual_info_size);
13329 if (quals == (first_quals | dwarf_qual_info[i].q))
13330 break;
13331 if (d == last)
13332 {
13333 d = NULL;
13334 break;
13335 }
13336 }
13337 if (d)
13338 {
13339 mod_type_die = d;
13340 continue;
13341 }
13342 }
13343 if (first)
13344 {
13345 d = new_die_raw (dwarf_qual_info[i].t);
13346 add_child_die_after (mod_scope, d, last);
13347 last = d;
13348 }
13349 else
13350 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13351 if (mod_type_die)
13352 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13353 mod_type_die = d;
13354 first_quals |= dwarf_qual_info[i].q;
13355 }
13356 }
13357 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13358 {
13359 dwarf_tag tag = DW_TAG_pointer_type;
13360 if (code == REFERENCE_TYPE)
13361 {
13362 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13363 tag = DW_TAG_rvalue_reference_type;
13364 else
13365 tag = DW_TAG_reference_type;
13366 }
13367 mod_type_die = new_die (tag, mod_scope, type);
13368
13369 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13370 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13371 add_alignment_attribute (mod_type_die, type);
13372 item_type = TREE_TYPE (type);
13373
13374 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13375 if (!ADDR_SPACE_GENERIC_P (as))
13376 {
13377 int action = targetm.addr_space.debug (as);
13378 if (action >= 0)
13379 {
13380 /* Positive values indicate an address_class. */
13381 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13382 }
13383 else
13384 {
13385 /* Negative values indicate an (inverted) segment base reg. */
13386 dw_loc_descr_ref d
13387 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13388 add_AT_loc (mod_type_die, DW_AT_segment, d);
13389 }
13390 }
13391 }
13392 else if (code == INTEGER_TYPE
13393 && TREE_TYPE (type) != NULL_TREE
13394 && subrange_type_for_debug_p (type, &low, &high))
13395 {
13396 tree bias = NULL_TREE;
13397 if (lang_hooks.types.get_type_bias)
13398 bias = lang_hooks.types.get_type_bias (type);
13399 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13400 item_type = TREE_TYPE (type);
13401 }
13402 else if (is_base_type (type))
13403 {
13404 mod_type_die = base_type_die (type, reverse);
13405
13406 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13407 if (reverse_base_type)
13408 {
13409 dw_die_ref after_die
13410 = modified_type_die (type, cv_quals, false, context_die);
13411 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13412 }
13413 else
13414 add_child_die (comp_unit_die (), mod_type_die);
13415
13416 add_pubtype (type, mod_type_die);
13417 }
13418 else
13419 {
13420 gen_type_die (type, context_die);
13421
13422 /* We have to get the type_main_variant here (and pass that to the
13423 `lookup_type_die' routine) because the ..._TYPE node we have
13424 might simply be a *copy* of some original type node (where the
13425 copy was created to help us keep track of typedef names) and
13426 that copy might have a different TYPE_UID from the original
13427 ..._TYPE node. */
13428 if (TREE_CODE (type) == FUNCTION_TYPE
13429 || TREE_CODE (type) == METHOD_TYPE)
13430 {
13431 /* For function/method types, can't just use type_main_variant here,
13432 because that can have different ref-qualifiers for C++,
13433 but try to canonicalize. */
13434 tree main = TYPE_MAIN_VARIANT (type);
13435 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13436 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13437 && check_base_type (t, main)
13438 && check_lang_type (t, type))
13439 return lookup_type_die (t);
13440 return lookup_type_die (type);
13441 }
13442 else if (TREE_CODE (type) != VECTOR_TYPE
13443 && TREE_CODE (type) != ARRAY_TYPE)
13444 return lookup_type_die (type_main_variant (type));
13445 else
13446 /* Vectors have the debugging information in the type,
13447 not the main variant. */
13448 return lookup_type_die (type);
13449 }
13450
13451 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13452 don't output a DW_TAG_typedef, since there isn't one in the
13453 user's program; just attach a DW_AT_name to the type.
13454 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13455 if the base type already has the same name. */
13456 if (name
13457 && ((TREE_CODE (name) != TYPE_DECL
13458 && (qualified_type == TYPE_MAIN_VARIANT (type)
13459 || (cv_quals == TYPE_UNQUALIFIED)))
13460 || (TREE_CODE (name) == TYPE_DECL
13461 && TREE_TYPE (name) == qualified_type
13462 && DECL_NAME (name))))
13463 {
13464 if (TREE_CODE (name) == TYPE_DECL)
13465 /* Could just call add_name_and_src_coords_attributes here,
13466 but since this is a builtin type it doesn't have any
13467 useful source coordinates anyway. */
13468 name = DECL_NAME (name);
13469 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13470 }
13471 /* This probably indicates a bug. */
13472 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13473 {
13474 name = TYPE_IDENTIFIER (type);
13475 add_name_attribute (mod_type_die,
13476 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13477 }
13478
13479 if (qualified_type && !reverse_base_type)
13480 equate_type_number_to_die (qualified_type, mod_type_die);
13481
13482 if (item_type)
13483 /* We must do this after the equate_type_number_to_die call, in case
13484 this is a recursive type. This ensures that the modified_type_die
13485 recursion will terminate even if the type is recursive. Recursive
13486 types are possible in Ada. */
13487 sub_die = modified_type_die (item_type,
13488 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13489 reverse,
13490 context_die);
13491
13492 if (sub_die != NULL)
13493 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13494
13495 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13496 if (TYPE_ARTIFICIAL (type))
13497 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13498
13499 return mod_type_die;
13500 }
13501
13502 /* Generate DIEs for the generic parameters of T.
13503 T must be either a generic type or a generic function.
13504 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13505
13506 static void
13507 gen_generic_params_dies (tree t)
13508 {
13509 tree parms, args;
13510 int parms_num, i;
13511 dw_die_ref die = NULL;
13512 int non_default;
13513
13514 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13515 return;
13516
13517 if (TYPE_P (t))
13518 die = lookup_type_die (t);
13519 else if (DECL_P (t))
13520 die = lookup_decl_die (t);
13521
13522 gcc_assert (die);
13523
13524 parms = lang_hooks.get_innermost_generic_parms (t);
13525 if (!parms)
13526 /* T has no generic parameter. It means T is neither a generic type
13527 or function. End of story. */
13528 return;
13529
13530 parms_num = TREE_VEC_LENGTH (parms);
13531 args = lang_hooks.get_innermost_generic_args (t);
13532 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13533 non_default = int_cst_value (TREE_CHAIN (args));
13534 else
13535 non_default = TREE_VEC_LENGTH (args);
13536 for (i = 0; i < parms_num; i++)
13537 {
13538 tree parm, arg, arg_pack_elems;
13539 dw_die_ref parm_die;
13540
13541 parm = TREE_VEC_ELT (parms, i);
13542 arg = TREE_VEC_ELT (args, i);
13543 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13544 gcc_assert (parm && TREE_VALUE (parm) && arg);
13545
13546 if (parm && TREE_VALUE (parm) && arg)
13547 {
13548 /* If PARM represents a template parameter pack,
13549 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13550 by DW_TAG_template_*_parameter DIEs for the argument
13551 pack elements of ARG. Note that ARG would then be
13552 an argument pack. */
13553 if (arg_pack_elems)
13554 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13555 arg_pack_elems,
13556 die);
13557 else
13558 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13559 true /* emit name */, die);
13560 if (i >= non_default)
13561 add_AT_flag (parm_die, DW_AT_default_value, 1);
13562 }
13563 }
13564 }
13565
13566 /* Create and return a DIE for PARM which should be
13567 the representation of a generic type parameter.
13568 For instance, in the C++ front end, PARM would be a template parameter.
13569 ARG is the argument to PARM.
13570 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13571 name of the PARM.
13572 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13573 as a child node. */
13574
13575 static dw_die_ref
13576 generic_parameter_die (tree parm, tree arg,
13577 bool emit_name_p,
13578 dw_die_ref parent_die)
13579 {
13580 dw_die_ref tmpl_die = NULL;
13581 const char *name = NULL;
13582
13583 if (!parm || !DECL_NAME (parm) || !arg)
13584 return NULL;
13585
13586 /* We support non-type generic parameters and arguments,
13587 type generic parameters and arguments, as well as
13588 generic generic parameters (a.k.a. template template parameters in C++)
13589 and arguments. */
13590 if (TREE_CODE (parm) == PARM_DECL)
13591 /* PARM is a nontype generic parameter */
13592 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13593 else if (TREE_CODE (parm) == TYPE_DECL)
13594 /* PARM is a type generic parameter. */
13595 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13596 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13597 /* PARM is a generic generic parameter.
13598 Its DIE is a GNU extension. It shall have a
13599 DW_AT_name attribute to represent the name of the template template
13600 parameter, and a DW_AT_GNU_template_name attribute to represent the
13601 name of the template template argument. */
13602 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13603 parent_die, parm);
13604 else
13605 gcc_unreachable ();
13606
13607 if (tmpl_die)
13608 {
13609 tree tmpl_type;
13610
13611 /* If PARM is a generic parameter pack, it means we are
13612 emitting debug info for a template argument pack element.
13613 In other terms, ARG is a template argument pack element.
13614 In that case, we don't emit any DW_AT_name attribute for
13615 the die. */
13616 if (emit_name_p)
13617 {
13618 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13619 gcc_assert (name);
13620 add_AT_string (tmpl_die, DW_AT_name, name);
13621 }
13622
13623 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13624 {
13625 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13626 TMPL_DIE should have a child DW_AT_type attribute that is set
13627 to the type of the argument to PARM, which is ARG.
13628 If PARM is a type generic parameter, TMPL_DIE should have a
13629 child DW_AT_type that is set to ARG. */
13630 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13631 add_type_attribute (tmpl_die, tmpl_type,
13632 (TREE_THIS_VOLATILE (tmpl_type)
13633 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13634 false, parent_die);
13635 }
13636 else
13637 {
13638 /* So TMPL_DIE is a DIE representing a
13639 a generic generic template parameter, a.k.a template template
13640 parameter in C++ and arg is a template. */
13641
13642 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13643 to the name of the argument. */
13644 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13645 if (name)
13646 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13647 }
13648
13649 if (TREE_CODE (parm) == PARM_DECL)
13650 /* So PARM is a non-type generic parameter.
13651 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13652 attribute of TMPL_DIE which value represents the value
13653 of ARG.
13654 We must be careful here:
13655 The value of ARG might reference some function decls.
13656 We might currently be emitting debug info for a generic
13657 type and types are emitted before function decls, we don't
13658 know if the function decls referenced by ARG will actually be
13659 emitted after cgraph computations.
13660 So must defer the generation of the DW_AT_const_value to
13661 after cgraph is ready. */
13662 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13663 }
13664
13665 return tmpl_die;
13666 }
13667
13668 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13669 PARM_PACK must be a template parameter pack. The returned DIE
13670 will be child DIE of PARENT_DIE. */
13671
13672 static dw_die_ref
13673 template_parameter_pack_die (tree parm_pack,
13674 tree parm_pack_args,
13675 dw_die_ref parent_die)
13676 {
13677 dw_die_ref die;
13678 int j;
13679
13680 gcc_assert (parent_die && parm_pack);
13681
13682 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13683 add_name_and_src_coords_attributes (die, parm_pack);
13684 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13685 generic_parameter_die (parm_pack,
13686 TREE_VEC_ELT (parm_pack_args, j),
13687 false /* Don't emit DW_AT_name */,
13688 die);
13689 return die;
13690 }
13691
13692 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13693 an enumerated type. */
13694
13695 static inline int
13696 type_is_enum (const_tree type)
13697 {
13698 return TREE_CODE (type) == ENUMERAL_TYPE;
13699 }
13700
13701 /* Return the DBX register number described by a given RTL node. */
13702
13703 static unsigned int
13704 dbx_reg_number (const_rtx rtl)
13705 {
13706 unsigned regno = REGNO (rtl);
13707
13708 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13709
13710 #ifdef LEAF_REG_REMAP
13711 if (crtl->uses_only_leaf_regs)
13712 {
13713 int leaf_reg = LEAF_REG_REMAP (regno);
13714 if (leaf_reg != -1)
13715 regno = (unsigned) leaf_reg;
13716 }
13717 #endif
13718
13719 regno = DBX_REGISTER_NUMBER (regno);
13720 gcc_assert (regno != INVALID_REGNUM);
13721 return regno;
13722 }
13723
13724 /* Optionally add a DW_OP_piece term to a location description expression.
13725 DW_OP_piece is only added if the location description expression already
13726 doesn't end with DW_OP_piece. */
13727
13728 static void
13729 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13730 {
13731 dw_loc_descr_ref loc;
13732
13733 if (*list_head != NULL)
13734 {
13735 /* Find the end of the chain. */
13736 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13737 ;
13738
13739 if (loc->dw_loc_opc != DW_OP_piece)
13740 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13741 }
13742 }
13743
13744 /* Return a location descriptor that designates a machine register or
13745 zero if there is none. */
13746
13747 static dw_loc_descr_ref
13748 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13749 {
13750 rtx regs;
13751
13752 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13753 return 0;
13754
13755 /* We only use "frame base" when we're sure we're talking about the
13756 post-prologue local stack frame. We do this by *not* running
13757 register elimination until this point, and recognizing the special
13758 argument pointer and soft frame pointer rtx's.
13759 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13760 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13761 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13762 {
13763 dw_loc_descr_ref result = NULL;
13764
13765 if (dwarf_version >= 4 || !dwarf_strict)
13766 {
13767 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13768 initialized);
13769 if (result)
13770 add_loc_descr (&result,
13771 new_loc_descr (DW_OP_stack_value, 0, 0));
13772 }
13773 return result;
13774 }
13775
13776 regs = targetm.dwarf_register_span (rtl);
13777
13778 if (REG_NREGS (rtl) > 1 || regs)
13779 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13780 else
13781 {
13782 unsigned int dbx_regnum = dbx_reg_number (rtl);
13783 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13784 return 0;
13785 return one_reg_loc_descriptor (dbx_regnum, initialized);
13786 }
13787 }
13788
13789 /* Return a location descriptor that designates a machine register for
13790 a given hard register number. */
13791
13792 static dw_loc_descr_ref
13793 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13794 {
13795 dw_loc_descr_ref reg_loc_descr;
13796
13797 if (regno <= 31)
13798 reg_loc_descr
13799 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13800 else
13801 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13802
13803 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13804 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13805
13806 return reg_loc_descr;
13807 }
13808
13809 /* Given an RTL of a register, return a location descriptor that
13810 designates a value that spans more than one register. */
13811
13812 static dw_loc_descr_ref
13813 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13814 enum var_init_status initialized)
13815 {
13816 int size, i;
13817 dw_loc_descr_ref loc_result = NULL;
13818
13819 /* Simple, contiguous registers. */
13820 if (regs == NULL_RTX)
13821 {
13822 unsigned reg = REGNO (rtl);
13823 int nregs;
13824
13825 #ifdef LEAF_REG_REMAP
13826 if (crtl->uses_only_leaf_regs)
13827 {
13828 int leaf_reg = LEAF_REG_REMAP (reg);
13829 if (leaf_reg != -1)
13830 reg = (unsigned) leaf_reg;
13831 }
13832 #endif
13833
13834 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13835 nregs = REG_NREGS (rtl);
13836
13837 /* At present we only track constant-sized pieces. */
13838 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13839 return NULL;
13840 size /= nregs;
13841
13842 loc_result = NULL;
13843 while (nregs--)
13844 {
13845 dw_loc_descr_ref t;
13846
13847 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13848 VAR_INIT_STATUS_INITIALIZED);
13849 add_loc_descr (&loc_result, t);
13850 add_loc_descr_op_piece (&loc_result, size);
13851 ++reg;
13852 }
13853 return loc_result;
13854 }
13855
13856 /* Now onto stupid register sets in non contiguous locations. */
13857
13858 gcc_assert (GET_CODE (regs) == PARALLEL);
13859
13860 /* At present we only track constant-sized pieces. */
13861 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13862 return NULL;
13863 loc_result = NULL;
13864
13865 for (i = 0; i < XVECLEN (regs, 0); ++i)
13866 {
13867 dw_loc_descr_ref t;
13868
13869 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13870 VAR_INIT_STATUS_INITIALIZED);
13871 add_loc_descr (&loc_result, t);
13872 add_loc_descr_op_piece (&loc_result, size);
13873 }
13874
13875 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13876 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13877 return loc_result;
13878 }
13879
13880 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13881
13882 /* Return a location descriptor that designates a constant i,
13883 as a compound operation from constant (i >> shift), constant shift
13884 and DW_OP_shl. */
13885
13886 static dw_loc_descr_ref
13887 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13888 {
13889 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13890 add_loc_descr (&ret, int_loc_descriptor (shift));
13891 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13892 return ret;
13893 }
13894
13895 /* Return a location descriptor that designates constant POLY_I. */
13896
13897 static dw_loc_descr_ref
13898 int_loc_descriptor (poly_int64 poly_i)
13899 {
13900 enum dwarf_location_atom op;
13901
13902 HOST_WIDE_INT i;
13903 if (!poly_i.is_constant (&i))
13904 {
13905 /* Create location descriptions for the non-constant part and
13906 add any constant offset at the end. */
13907 dw_loc_descr_ref ret = NULL;
13908 HOST_WIDE_INT constant = poly_i.coeffs[0];
13909 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13910 {
13911 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13912 if (coeff != 0)
13913 {
13914 dw_loc_descr_ref start = ret;
13915 unsigned int factor;
13916 int bias;
13917 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13918 (j, &factor, &bias);
13919
13920 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13921 add COEFF * (REGNO / FACTOR) now and subtract
13922 COEFF * BIAS from the final constant part. */
13923 constant -= coeff * bias;
13924 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13925 if (coeff % factor == 0)
13926 coeff /= factor;
13927 else
13928 {
13929 int amount = exact_log2 (factor);
13930 gcc_assert (amount >= 0);
13931 add_loc_descr (&ret, int_loc_descriptor (amount));
13932 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13933 }
13934 if (coeff != 1)
13935 {
13936 add_loc_descr (&ret, int_loc_descriptor (coeff));
13937 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13938 }
13939 if (start)
13940 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13941 }
13942 }
13943 loc_descr_plus_const (&ret, constant);
13944 return ret;
13945 }
13946
13947 /* Pick the smallest representation of a constant, rather than just
13948 defaulting to the LEB encoding. */
13949 if (i >= 0)
13950 {
13951 int clz = clz_hwi (i);
13952 int ctz = ctz_hwi (i);
13953 if (i <= 31)
13954 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13955 else if (i <= 0xff)
13956 op = DW_OP_const1u;
13957 else if (i <= 0xffff)
13958 op = DW_OP_const2u;
13959 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13960 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13961 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13962 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13963 while DW_OP_const4u is 5 bytes. */
13964 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13965 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13966 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13967 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13968 while DW_OP_const4u is 5 bytes. */
13969 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13970
13971 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13972 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13973 <= 4)
13974 {
13975 /* As i >= 2**31, the double cast above will yield a negative number.
13976 Since wrapping is defined in DWARF expressions we can output big
13977 positive integers as small negative ones, regardless of the size
13978 of host wide ints.
13979
13980 Here, since the evaluator will handle 32-bit values and since i >=
13981 2**31, we know it's going to be interpreted as a negative literal:
13982 store it this way if we can do better than 5 bytes this way. */
13983 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13984 }
13985 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13986 op = DW_OP_const4u;
13987
13988 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13989 least 6 bytes: see if we can do better before falling back to it. */
13990 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13991 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13992 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13993 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13994 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13995 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13996 >= HOST_BITS_PER_WIDE_INT)
13997 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13998 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13999 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
14000 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14001 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14002 && size_of_uleb128 (i) > 6)
14003 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
14004 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
14005 else
14006 op = DW_OP_constu;
14007 }
14008 else
14009 {
14010 if (i >= -0x80)
14011 op = DW_OP_const1s;
14012 else if (i >= -0x8000)
14013 op = DW_OP_const2s;
14014 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14015 {
14016 if (size_of_int_loc_descriptor (i) < 5)
14017 {
14018 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14019 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14020 return ret;
14021 }
14022 op = DW_OP_const4s;
14023 }
14024 else
14025 {
14026 if (size_of_int_loc_descriptor (i)
14027 < (unsigned long) 1 + size_of_sleb128 (i))
14028 {
14029 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14030 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14031 return ret;
14032 }
14033 op = DW_OP_consts;
14034 }
14035 }
14036
14037 return new_loc_descr (op, i, 0);
14038 }
14039
14040 /* Likewise, for unsigned constants. */
14041
14042 static dw_loc_descr_ref
14043 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14044 {
14045 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14046 const unsigned HOST_WIDE_INT max_uint
14047 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14048
14049 /* If possible, use the clever signed constants handling. */
14050 if (i <= max_int)
14051 return int_loc_descriptor ((HOST_WIDE_INT) i);
14052
14053 /* Here, we are left with positive numbers that cannot be represented as
14054 HOST_WIDE_INT, i.e.:
14055 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14056
14057 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14058 whereas may be better to output a negative integer: thanks to integer
14059 wrapping, we know that:
14060 x = x - 2 ** DWARF2_ADDR_SIZE
14061 = x - 2 * (max (HOST_WIDE_INT) + 1)
14062 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14063 small negative integers. Let's try that in cases it will clearly improve
14064 the encoding: there is no gain turning DW_OP_const4u into
14065 DW_OP_const4s. */
14066 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14067 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14068 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14069 {
14070 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14071
14072 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14073 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14074 const HOST_WIDE_INT second_shift
14075 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14076
14077 /* So we finally have:
14078 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14079 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14080 return int_loc_descriptor (second_shift);
14081 }
14082
14083 /* Last chance: fallback to a simple constant operation. */
14084 return new_loc_descr
14085 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14086 ? DW_OP_const4u
14087 : DW_OP_const8u,
14088 i, 0);
14089 }
14090
14091 /* Generate and return a location description that computes the unsigned
14092 comparison of the two stack top entries (a OP b where b is the top-most
14093 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14094 LE_EXPR, GT_EXPR or GE_EXPR. */
14095
14096 static dw_loc_descr_ref
14097 uint_comparison_loc_list (enum tree_code kind)
14098 {
14099 enum dwarf_location_atom op, flip_op;
14100 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14101
14102 switch (kind)
14103 {
14104 case LT_EXPR:
14105 op = DW_OP_lt;
14106 break;
14107 case LE_EXPR:
14108 op = DW_OP_le;
14109 break;
14110 case GT_EXPR:
14111 op = DW_OP_gt;
14112 break;
14113 case GE_EXPR:
14114 op = DW_OP_ge;
14115 break;
14116 default:
14117 gcc_unreachable ();
14118 }
14119
14120 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14121 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14122
14123 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14124 possible to perform unsigned comparisons: we just have to distinguish
14125 three cases:
14126
14127 1. when a and b have the same sign (as signed integers); then we should
14128 return: a OP(signed) b;
14129
14130 2. when a is a negative signed integer while b is a positive one, then a
14131 is a greater unsigned integer than b; likewise when a and b's roles
14132 are flipped.
14133
14134 So first, compare the sign of the two operands. */
14135 ret = new_loc_descr (DW_OP_over, 0, 0);
14136 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14137 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14138 /* If they have different signs (i.e. they have different sign bits), then
14139 the stack top value has now the sign bit set and thus it's smaller than
14140 zero. */
14141 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14142 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14143 add_loc_descr (&ret, bra_node);
14144
14145 /* We are in case 1. At this point, we know both operands have the same
14146 sign, to it's safe to use the built-in signed comparison. */
14147 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14148 add_loc_descr (&ret, jmp_node);
14149
14150 /* We are in case 2. Here, we know both operands do not have the same sign,
14151 so we have to flip the signed comparison. */
14152 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14153 tmp = new_loc_descr (flip_op, 0, 0);
14154 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14155 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14156 add_loc_descr (&ret, tmp);
14157
14158 /* This dummy operation is necessary to make the two branches join. */
14159 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14160 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14161 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14162 add_loc_descr (&ret, tmp);
14163
14164 return ret;
14165 }
14166
14167 /* Likewise, but takes the location description lists (might be destructive on
14168 them). Return NULL if either is NULL or if concatenation fails. */
14169
14170 static dw_loc_list_ref
14171 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14172 enum tree_code kind)
14173 {
14174 if (left == NULL || right == NULL)
14175 return NULL;
14176
14177 add_loc_list (&left, right);
14178 if (left == NULL)
14179 return NULL;
14180
14181 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14182 return left;
14183 }
14184
14185 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14186 without actually allocating it. */
14187
14188 static unsigned long
14189 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14190 {
14191 return size_of_int_loc_descriptor (i >> shift)
14192 + size_of_int_loc_descriptor (shift)
14193 + 1;
14194 }
14195
14196 /* Return size_of_locs (int_loc_descriptor (i)) without
14197 actually allocating it. */
14198
14199 static unsigned long
14200 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14201 {
14202 unsigned long s;
14203
14204 if (i >= 0)
14205 {
14206 int clz, ctz;
14207 if (i <= 31)
14208 return 1;
14209 else if (i <= 0xff)
14210 return 2;
14211 else if (i <= 0xffff)
14212 return 3;
14213 clz = clz_hwi (i);
14214 ctz = ctz_hwi (i);
14215 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14216 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14217 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14218 - clz - 5);
14219 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14220 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14221 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14222 - clz - 8);
14223 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14224 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14225 <= 4)
14226 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14227 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14228 return 5;
14229 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14230 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14231 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14232 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14233 - clz - 8);
14234 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14235 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14236 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14237 - clz - 16);
14238 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14239 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14240 && s > 6)
14241 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14242 - clz - 32);
14243 else
14244 return 1 + s;
14245 }
14246 else
14247 {
14248 if (i >= -0x80)
14249 return 2;
14250 else if (i >= -0x8000)
14251 return 3;
14252 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14253 {
14254 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14255 {
14256 s = size_of_int_loc_descriptor (-i) + 1;
14257 if (s < 5)
14258 return s;
14259 }
14260 return 5;
14261 }
14262 else
14263 {
14264 unsigned long r = 1 + size_of_sleb128 (i);
14265 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14266 {
14267 s = size_of_int_loc_descriptor (-i) + 1;
14268 if (s < r)
14269 return s;
14270 }
14271 return r;
14272 }
14273 }
14274 }
14275
14276 /* Return loc description representing "address" of integer value.
14277 This can appear only as toplevel expression. */
14278
14279 static dw_loc_descr_ref
14280 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14281 {
14282 int litsize;
14283 dw_loc_descr_ref loc_result = NULL;
14284
14285 if (!(dwarf_version >= 4 || !dwarf_strict))
14286 return NULL;
14287
14288 litsize = size_of_int_loc_descriptor (i);
14289 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14290 is more compact. For DW_OP_stack_value we need:
14291 litsize + 1 (DW_OP_stack_value)
14292 and for DW_OP_implicit_value:
14293 1 (DW_OP_implicit_value) + 1 (length) + size. */
14294 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14295 {
14296 loc_result = int_loc_descriptor (i);
14297 add_loc_descr (&loc_result,
14298 new_loc_descr (DW_OP_stack_value, 0, 0));
14299 return loc_result;
14300 }
14301
14302 loc_result = new_loc_descr (DW_OP_implicit_value,
14303 size, 0);
14304 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14305 loc_result->dw_loc_oprnd2.v.val_int = i;
14306 return loc_result;
14307 }
14308
14309 /* Return a location descriptor that designates a base+offset location. */
14310
14311 static dw_loc_descr_ref
14312 based_loc_descr (rtx reg, poly_int64 offset,
14313 enum var_init_status initialized)
14314 {
14315 unsigned int regno;
14316 dw_loc_descr_ref result;
14317 dw_fde_ref fde = cfun->fde;
14318
14319 /* We only use "frame base" when we're sure we're talking about the
14320 post-prologue local stack frame. We do this by *not* running
14321 register elimination until this point, and recognizing the special
14322 argument pointer and soft frame pointer rtx's. */
14323 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14324 {
14325 rtx elim = (ira_use_lra_p
14326 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14327 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14328
14329 if (elim != reg)
14330 {
14331 /* Allow hard frame pointer here even if frame pointer
14332 isn't used since hard frame pointer is encoded with
14333 DW_OP_fbreg which uses the DW_AT_frame_base attribute,
14334 not hard frame pointer directly. */
14335 elim = strip_offset_and_add (elim, &offset);
14336 gcc_assert (elim == hard_frame_pointer_rtx
14337 || elim == stack_pointer_rtx);
14338
14339 /* If drap register is used to align stack, use frame
14340 pointer + offset to access stack variables. If stack
14341 is aligned without drap, use stack pointer + offset to
14342 access stack variables. */
14343 if (crtl->stack_realign_tried
14344 && reg == frame_pointer_rtx)
14345 {
14346 int base_reg
14347 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14348 ? HARD_FRAME_POINTER_REGNUM
14349 : REGNO (elim));
14350 return new_reg_loc_descr (base_reg, offset);
14351 }
14352
14353 gcc_assert (frame_pointer_fb_offset_valid);
14354 offset += frame_pointer_fb_offset;
14355 HOST_WIDE_INT const_offset;
14356 if (offset.is_constant (&const_offset))
14357 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14358 else
14359 {
14360 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14361 loc_descr_plus_const (&ret, offset);
14362 return ret;
14363 }
14364 }
14365 }
14366
14367 regno = REGNO (reg);
14368 #ifdef LEAF_REG_REMAP
14369 if (crtl->uses_only_leaf_regs)
14370 {
14371 int leaf_reg = LEAF_REG_REMAP (regno);
14372 if (leaf_reg != -1)
14373 regno = (unsigned) leaf_reg;
14374 }
14375 #endif
14376 regno = DWARF_FRAME_REGNUM (regno);
14377
14378 HOST_WIDE_INT const_offset;
14379 if (!optimize && fde
14380 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14381 && offset.is_constant (&const_offset))
14382 {
14383 /* Use cfa+offset to represent the location of arguments passed
14384 on the stack when drap is used to align stack.
14385 Only do this when not optimizing, for optimized code var-tracking
14386 is supposed to track where the arguments live and the register
14387 used as vdrap or drap in some spot might be used for something
14388 else in other part of the routine. */
14389 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14390 }
14391
14392 result = new_reg_loc_descr (regno, offset);
14393
14394 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14395 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14396
14397 return result;
14398 }
14399
14400 /* Return true if this RTL expression describes a base+offset calculation. */
14401
14402 static inline int
14403 is_based_loc (const_rtx rtl)
14404 {
14405 return (GET_CODE (rtl) == PLUS
14406 && ((REG_P (XEXP (rtl, 0))
14407 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14408 && CONST_INT_P (XEXP (rtl, 1)))));
14409 }
14410
14411 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14412 failed. */
14413
14414 static dw_loc_descr_ref
14415 tls_mem_loc_descriptor (rtx mem)
14416 {
14417 tree base;
14418 dw_loc_descr_ref loc_result;
14419
14420 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14421 return NULL;
14422
14423 base = get_base_address (MEM_EXPR (mem));
14424 if (base == NULL
14425 || !VAR_P (base)
14426 || !DECL_THREAD_LOCAL_P (base))
14427 return NULL;
14428
14429 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14430 if (loc_result == NULL)
14431 return NULL;
14432
14433 if (maybe_ne (MEM_OFFSET (mem), 0))
14434 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14435
14436 return loc_result;
14437 }
14438
14439 /* Output debug info about reason why we failed to expand expression as dwarf
14440 expression. */
14441
14442 static void
14443 expansion_failed (tree expr, rtx rtl, char const *reason)
14444 {
14445 if (dump_file && (dump_flags & TDF_DETAILS))
14446 {
14447 fprintf (dump_file, "Failed to expand as dwarf: ");
14448 if (expr)
14449 print_generic_expr (dump_file, expr, dump_flags);
14450 if (rtl)
14451 {
14452 fprintf (dump_file, "\n");
14453 print_rtl (dump_file, rtl);
14454 }
14455 fprintf (dump_file, "\nReason: %s\n", reason);
14456 }
14457 }
14458
14459 /* Helper function for const_ok_for_output. */
14460
14461 static bool
14462 const_ok_for_output_1 (rtx rtl)
14463 {
14464 if (targetm.const_not_ok_for_debug_p (rtl))
14465 {
14466 if (GET_CODE (rtl) != UNSPEC)
14467 {
14468 expansion_failed (NULL_TREE, rtl,
14469 "Expression rejected for debug by the backend.\n");
14470 return false;
14471 }
14472
14473 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14474 the target hook doesn't explicitly allow it in debug info, assume
14475 we can't express it in the debug info. */
14476 /* Don't complain about TLS UNSPECs, those are just too hard to
14477 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14478 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14479 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14480 if (flag_checking
14481 && (XVECLEN (rtl, 0) == 0
14482 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14483 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14484 inform (current_function_decl
14485 ? DECL_SOURCE_LOCATION (current_function_decl)
14486 : UNKNOWN_LOCATION,
14487 #if NUM_UNSPEC_VALUES > 0
14488 "non-delegitimized UNSPEC %s (%d) found in variable location",
14489 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14490 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14491 XINT (rtl, 1));
14492 #else
14493 "non-delegitimized UNSPEC %d found in variable location",
14494 XINT (rtl, 1));
14495 #endif
14496 expansion_failed (NULL_TREE, rtl,
14497 "UNSPEC hasn't been delegitimized.\n");
14498 return false;
14499 }
14500
14501 if (CONST_POLY_INT_P (rtl))
14502 return false;
14503
14504 if (targetm.const_not_ok_for_debug_p (rtl))
14505 {
14506 expansion_failed (NULL_TREE, rtl,
14507 "Expression rejected for debug by the backend.\n");
14508 return false;
14509 }
14510
14511 /* FIXME: Refer to PR60655. It is possible for simplification
14512 of rtl expressions in var tracking to produce such expressions.
14513 We should really identify / validate expressions
14514 enclosed in CONST that can be handled by assemblers on various
14515 targets and only handle legitimate cases here. */
14516 switch (GET_CODE (rtl))
14517 {
14518 case SYMBOL_REF:
14519 break;
14520 case NOT:
14521 case NEG:
14522 return false;
14523 default:
14524 return true;
14525 }
14526
14527 if (CONSTANT_POOL_ADDRESS_P (rtl))
14528 {
14529 bool marked;
14530 get_pool_constant_mark (rtl, &marked);
14531 /* If all references to this pool constant were optimized away,
14532 it was not output and thus we can't represent it. */
14533 if (!marked)
14534 {
14535 expansion_failed (NULL_TREE, rtl,
14536 "Constant was removed from constant pool.\n");
14537 return false;
14538 }
14539 }
14540
14541 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14542 return false;
14543
14544 /* Avoid references to external symbols in debug info, on several targets
14545 the linker might even refuse to link when linking a shared library,
14546 and in many other cases the relocations for .debug_info/.debug_loc are
14547 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14548 to be defined within the same shared library or executable are fine. */
14549 if (SYMBOL_REF_EXTERNAL_P (rtl))
14550 {
14551 tree decl = SYMBOL_REF_DECL (rtl);
14552
14553 if (decl == NULL || !targetm.binds_local_p (decl))
14554 {
14555 expansion_failed (NULL_TREE, rtl,
14556 "Symbol not defined in current TU.\n");
14557 return false;
14558 }
14559 }
14560
14561 return true;
14562 }
14563
14564 /* Return true if constant RTL can be emitted in DW_OP_addr or
14565 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14566 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14567
14568 static bool
14569 const_ok_for_output (rtx rtl)
14570 {
14571 if (GET_CODE (rtl) == SYMBOL_REF)
14572 return const_ok_for_output_1 (rtl);
14573
14574 if (GET_CODE (rtl) == CONST)
14575 {
14576 subrtx_var_iterator::array_type array;
14577 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14578 if (!const_ok_for_output_1 (*iter))
14579 return false;
14580 return true;
14581 }
14582
14583 return true;
14584 }
14585
14586 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14587 if possible, NULL otherwise. */
14588
14589 static dw_die_ref
14590 base_type_for_mode (machine_mode mode, bool unsignedp)
14591 {
14592 dw_die_ref type_die;
14593 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14594
14595 if (type == NULL)
14596 return NULL;
14597 switch (TREE_CODE (type))
14598 {
14599 case INTEGER_TYPE:
14600 case REAL_TYPE:
14601 break;
14602 default:
14603 return NULL;
14604 }
14605 type_die = lookup_type_die (type);
14606 if (!type_die)
14607 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14608 comp_unit_die ());
14609 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14610 return NULL;
14611 return type_die;
14612 }
14613
14614 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14615 type matching MODE, or, if MODE is narrower than or as wide as
14616 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14617 possible. */
14618
14619 static dw_loc_descr_ref
14620 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14621 {
14622 machine_mode outer_mode = mode;
14623 dw_die_ref type_die;
14624 dw_loc_descr_ref cvt;
14625
14626 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14627 {
14628 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14629 return op;
14630 }
14631 type_die = base_type_for_mode (outer_mode, 1);
14632 if (type_die == NULL)
14633 return NULL;
14634 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14635 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14636 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14637 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14638 add_loc_descr (&op, cvt);
14639 return op;
14640 }
14641
14642 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14643
14644 static dw_loc_descr_ref
14645 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14646 dw_loc_descr_ref op1)
14647 {
14648 dw_loc_descr_ref ret = op0;
14649 add_loc_descr (&ret, op1);
14650 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14651 if (STORE_FLAG_VALUE != 1)
14652 {
14653 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14654 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14655 }
14656 return ret;
14657 }
14658
14659 /* Subroutine of scompare_loc_descriptor for the case in which we're
14660 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14661 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14662
14663 static dw_loc_descr_ref
14664 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14665 scalar_int_mode op_mode,
14666 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14667 {
14668 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14669 dw_loc_descr_ref cvt;
14670
14671 if (type_die == NULL)
14672 return NULL;
14673 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14674 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14675 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14676 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14677 add_loc_descr (&op0, cvt);
14678 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14679 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14680 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14681 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14682 add_loc_descr (&op1, cvt);
14683 return compare_loc_descriptor (op, op0, op1);
14684 }
14685
14686 /* Subroutine of scompare_loc_descriptor for the case in which we're
14687 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14688 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14689
14690 static dw_loc_descr_ref
14691 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14692 scalar_int_mode op_mode,
14693 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14694 {
14695 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14696 /* For eq/ne, if the operands are known to be zero-extended,
14697 there is no need to do the fancy shifting up. */
14698 if (op == DW_OP_eq || op == DW_OP_ne)
14699 {
14700 dw_loc_descr_ref last0, last1;
14701 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14702 ;
14703 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14704 ;
14705 /* deref_size zero extends, and for constants we can check
14706 whether they are zero extended or not. */
14707 if (((last0->dw_loc_opc == DW_OP_deref_size
14708 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14709 || (CONST_INT_P (XEXP (rtl, 0))
14710 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14711 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14712 && ((last1->dw_loc_opc == DW_OP_deref_size
14713 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14714 || (CONST_INT_P (XEXP (rtl, 1))
14715 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14716 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14717 return compare_loc_descriptor (op, op0, op1);
14718
14719 /* EQ/NE comparison against constant in narrower type than
14720 DWARF2_ADDR_SIZE can be performed either as
14721 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14722 DW_OP_{eq,ne}
14723 or
14724 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14725 DW_OP_{eq,ne}. Pick whatever is shorter. */
14726 if (CONST_INT_P (XEXP (rtl, 1))
14727 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14728 && (size_of_int_loc_descriptor (shift) + 1
14729 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14730 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14731 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14732 & GET_MODE_MASK (op_mode))))
14733 {
14734 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14735 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14736 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14737 & GET_MODE_MASK (op_mode));
14738 return compare_loc_descriptor (op, op0, op1);
14739 }
14740 }
14741 add_loc_descr (&op0, int_loc_descriptor (shift));
14742 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14743 if (CONST_INT_P (XEXP (rtl, 1)))
14744 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14745 else
14746 {
14747 add_loc_descr (&op1, int_loc_descriptor (shift));
14748 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14749 }
14750 return compare_loc_descriptor (op, op0, op1);
14751 }
14752
14753 /* Return location descriptor for unsigned comparison OP RTL. */
14754
14755 static dw_loc_descr_ref
14756 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14757 machine_mode mem_mode)
14758 {
14759 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14760 dw_loc_descr_ref op0, op1;
14761
14762 if (op_mode == VOIDmode)
14763 op_mode = GET_MODE (XEXP (rtl, 1));
14764 if (op_mode == VOIDmode)
14765 return NULL;
14766
14767 scalar_int_mode int_op_mode;
14768 if (dwarf_strict
14769 && dwarf_version < 5
14770 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14771 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14772 return NULL;
14773
14774 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14775 VAR_INIT_STATUS_INITIALIZED);
14776 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14777 VAR_INIT_STATUS_INITIALIZED);
14778
14779 if (op0 == NULL || op1 == NULL)
14780 return NULL;
14781
14782 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14783 {
14784 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14785 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14786
14787 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14788 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14789 }
14790 return compare_loc_descriptor (op, op0, op1);
14791 }
14792
14793 /* Return location descriptor for unsigned comparison OP RTL. */
14794
14795 static dw_loc_descr_ref
14796 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14797 machine_mode mem_mode)
14798 {
14799 dw_loc_descr_ref op0, op1;
14800
14801 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14802 if (test_op_mode == VOIDmode)
14803 test_op_mode = GET_MODE (XEXP (rtl, 1));
14804
14805 scalar_int_mode op_mode;
14806 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14807 return NULL;
14808
14809 if (dwarf_strict
14810 && dwarf_version < 5
14811 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14812 return NULL;
14813
14814 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14815 VAR_INIT_STATUS_INITIALIZED);
14816 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14817 VAR_INIT_STATUS_INITIALIZED);
14818
14819 if (op0 == NULL || op1 == NULL)
14820 return NULL;
14821
14822 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14823 {
14824 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14825 dw_loc_descr_ref last0, last1;
14826 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14827 ;
14828 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14829 ;
14830 if (CONST_INT_P (XEXP (rtl, 0)))
14831 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14832 /* deref_size zero extends, so no need to mask it again. */
14833 else if (last0->dw_loc_opc != DW_OP_deref_size
14834 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14835 {
14836 add_loc_descr (&op0, int_loc_descriptor (mask));
14837 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14838 }
14839 if (CONST_INT_P (XEXP (rtl, 1)))
14840 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14841 /* deref_size zero extends, so no need to mask it again. */
14842 else if (last1->dw_loc_opc != DW_OP_deref_size
14843 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14844 {
14845 add_loc_descr (&op1, int_loc_descriptor (mask));
14846 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14847 }
14848 }
14849 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14850 {
14851 HOST_WIDE_INT bias = 1;
14852 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14853 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14854 if (CONST_INT_P (XEXP (rtl, 1)))
14855 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14856 + INTVAL (XEXP (rtl, 1)));
14857 else
14858 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14859 bias, 0));
14860 }
14861 return compare_loc_descriptor (op, op0, op1);
14862 }
14863
14864 /* Return location descriptor for {U,S}{MIN,MAX}. */
14865
14866 static dw_loc_descr_ref
14867 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14868 machine_mode mem_mode)
14869 {
14870 enum dwarf_location_atom op;
14871 dw_loc_descr_ref op0, op1, ret;
14872 dw_loc_descr_ref bra_node, drop_node;
14873
14874 scalar_int_mode int_mode;
14875 if (dwarf_strict
14876 && dwarf_version < 5
14877 && (!is_a <scalar_int_mode> (mode, &int_mode)
14878 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14879 return NULL;
14880
14881 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14882 VAR_INIT_STATUS_INITIALIZED);
14883 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14884 VAR_INIT_STATUS_INITIALIZED);
14885
14886 if (op0 == NULL || op1 == NULL)
14887 return NULL;
14888
14889 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14890 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14891 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14892 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14893 {
14894 /* Checked by the caller. */
14895 int_mode = as_a <scalar_int_mode> (mode);
14896 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14897 {
14898 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14899 add_loc_descr (&op0, int_loc_descriptor (mask));
14900 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14901 add_loc_descr (&op1, int_loc_descriptor (mask));
14902 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14903 }
14904 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14905 {
14906 HOST_WIDE_INT bias = 1;
14907 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14908 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14909 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14910 }
14911 }
14912 else if (is_a <scalar_int_mode> (mode, &int_mode)
14913 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14914 {
14915 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14916 add_loc_descr (&op0, int_loc_descriptor (shift));
14917 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14918 add_loc_descr (&op1, int_loc_descriptor (shift));
14919 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14920 }
14921 else if (is_a <scalar_int_mode> (mode, &int_mode)
14922 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14923 {
14924 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14925 dw_loc_descr_ref cvt;
14926 if (type_die == NULL)
14927 return NULL;
14928 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14929 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14930 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14931 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14932 add_loc_descr (&op0, cvt);
14933 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14934 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14935 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14936 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14937 add_loc_descr (&op1, cvt);
14938 }
14939
14940 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14941 op = DW_OP_lt;
14942 else
14943 op = DW_OP_gt;
14944 ret = op0;
14945 add_loc_descr (&ret, op1);
14946 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14947 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14948 add_loc_descr (&ret, bra_node);
14949 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14950 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14951 add_loc_descr (&ret, drop_node);
14952 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14953 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14954 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14955 && is_a <scalar_int_mode> (mode, &int_mode)
14956 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14957 ret = convert_descriptor_to_mode (int_mode, ret);
14958 return ret;
14959 }
14960
14961 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14962 but after converting arguments to type_die, afterwards
14963 convert back to unsigned. */
14964
14965 static dw_loc_descr_ref
14966 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14967 scalar_int_mode mode, machine_mode mem_mode)
14968 {
14969 dw_loc_descr_ref cvt, op0, op1;
14970
14971 if (type_die == NULL)
14972 return NULL;
14973 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14974 VAR_INIT_STATUS_INITIALIZED);
14975 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14976 VAR_INIT_STATUS_INITIALIZED);
14977 if (op0 == NULL || op1 == NULL)
14978 return NULL;
14979 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14980 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14981 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14982 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14983 add_loc_descr (&op0, cvt);
14984 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14985 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14986 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14987 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14988 add_loc_descr (&op1, cvt);
14989 add_loc_descr (&op0, op1);
14990 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14991 return convert_descriptor_to_mode (mode, op0);
14992 }
14993
14994 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14995 const0 is DW_OP_lit0 or corresponding typed constant,
14996 const1 is DW_OP_lit1 or corresponding typed constant
14997 and constMSB is constant with just the MSB bit set
14998 for the mode):
14999 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15000 L1: const0 DW_OP_swap
15001 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
15002 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15003 L3: DW_OP_drop
15004 L4: DW_OP_nop
15005
15006 CTZ is similar:
15007 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15008 L1: const0 DW_OP_swap
15009 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15010 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15011 L3: DW_OP_drop
15012 L4: DW_OP_nop
15013
15014 FFS is similar:
15015 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
15016 L1: const1 DW_OP_swap
15017 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15018 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15019 L3: DW_OP_drop
15020 L4: DW_OP_nop */
15021
15022 static dw_loc_descr_ref
15023 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
15024 machine_mode mem_mode)
15025 {
15026 dw_loc_descr_ref op0, ret, tmp;
15027 HOST_WIDE_INT valv;
15028 dw_loc_descr_ref l1jump, l1label;
15029 dw_loc_descr_ref l2jump, l2label;
15030 dw_loc_descr_ref l3jump, l3label;
15031 dw_loc_descr_ref l4jump, l4label;
15032 rtx msb;
15033
15034 if (GET_MODE (XEXP (rtl, 0)) != mode)
15035 return NULL;
15036
15037 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15038 VAR_INIT_STATUS_INITIALIZED);
15039 if (op0 == NULL)
15040 return NULL;
15041 ret = op0;
15042 if (GET_CODE (rtl) == CLZ)
15043 {
15044 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15045 valv = GET_MODE_BITSIZE (mode);
15046 }
15047 else if (GET_CODE (rtl) == FFS)
15048 valv = 0;
15049 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15050 valv = GET_MODE_BITSIZE (mode);
15051 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15052 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15053 add_loc_descr (&ret, l1jump);
15054 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15055 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15056 VAR_INIT_STATUS_INITIALIZED);
15057 if (tmp == NULL)
15058 return NULL;
15059 add_loc_descr (&ret, tmp);
15060 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15061 add_loc_descr (&ret, l4jump);
15062 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15063 ? const1_rtx : const0_rtx,
15064 mode, mem_mode,
15065 VAR_INIT_STATUS_INITIALIZED);
15066 if (l1label == NULL)
15067 return NULL;
15068 add_loc_descr (&ret, l1label);
15069 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15070 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15071 add_loc_descr (&ret, l2label);
15072 if (GET_CODE (rtl) != CLZ)
15073 msb = const1_rtx;
15074 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15075 msb = GEN_INT (HOST_WIDE_INT_1U
15076 << (GET_MODE_BITSIZE (mode) - 1));
15077 else
15078 msb = immed_wide_int_const
15079 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15080 GET_MODE_PRECISION (mode)), mode);
15081 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15082 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15083 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15084 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15085 else
15086 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15087 VAR_INIT_STATUS_INITIALIZED);
15088 if (tmp == NULL)
15089 return NULL;
15090 add_loc_descr (&ret, tmp);
15091 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15092 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15093 add_loc_descr (&ret, l3jump);
15094 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15095 VAR_INIT_STATUS_INITIALIZED);
15096 if (tmp == NULL)
15097 return NULL;
15098 add_loc_descr (&ret, tmp);
15099 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15100 ? DW_OP_shl : DW_OP_shr, 0, 0));
15101 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15102 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15103 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15104 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15105 add_loc_descr (&ret, l2jump);
15106 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15107 add_loc_descr (&ret, l3label);
15108 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15109 add_loc_descr (&ret, l4label);
15110 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15111 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15112 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15113 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15114 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15115 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15116 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15117 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15118 return ret;
15119 }
15120
15121 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15122 const1 is DW_OP_lit1 or corresponding typed constant):
15123 const0 DW_OP_swap
15124 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15125 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15126 L2: DW_OP_drop
15127
15128 PARITY is similar:
15129 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15130 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15131 L2: DW_OP_drop */
15132
15133 static dw_loc_descr_ref
15134 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15135 machine_mode mem_mode)
15136 {
15137 dw_loc_descr_ref op0, ret, tmp;
15138 dw_loc_descr_ref l1jump, l1label;
15139 dw_loc_descr_ref l2jump, l2label;
15140
15141 if (GET_MODE (XEXP (rtl, 0)) != mode)
15142 return NULL;
15143
15144 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15145 VAR_INIT_STATUS_INITIALIZED);
15146 if (op0 == NULL)
15147 return NULL;
15148 ret = op0;
15149 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15150 VAR_INIT_STATUS_INITIALIZED);
15151 if (tmp == NULL)
15152 return NULL;
15153 add_loc_descr (&ret, tmp);
15154 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15155 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15156 add_loc_descr (&ret, l1label);
15157 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15158 add_loc_descr (&ret, l2jump);
15159 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15160 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15161 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15162 VAR_INIT_STATUS_INITIALIZED);
15163 if (tmp == NULL)
15164 return NULL;
15165 add_loc_descr (&ret, tmp);
15166 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15167 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15168 ? DW_OP_plus : DW_OP_xor, 0, 0));
15169 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15170 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15171 VAR_INIT_STATUS_INITIALIZED);
15172 add_loc_descr (&ret, tmp);
15173 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15174 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15175 add_loc_descr (&ret, l1jump);
15176 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15177 add_loc_descr (&ret, l2label);
15178 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15179 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15180 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15181 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15182 return ret;
15183 }
15184
15185 /* BSWAP (constS is initial shift count, either 56 or 24):
15186 constS const0
15187 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15188 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15189 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15190 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15191 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15192
15193 static dw_loc_descr_ref
15194 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15195 machine_mode mem_mode)
15196 {
15197 dw_loc_descr_ref op0, ret, tmp;
15198 dw_loc_descr_ref l1jump, l1label;
15199 dw_loc_descr_ref l2jump, l2label;
15200
15201 if (BITS_PER_UNIT != 8
15202 || (GET_MODE_BITSIZE (mode) != 32
15203 && GET_MODE_BITSIZE (mode) != 64))
15204 return NULL;
15205
15206 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15207 VAR_INIT_STATUS_INITIALIZED);
15208 if (op0 == NULL)
15209 return NULL;
15210
15211 ret = op0;
15212 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15213 mode, mem_mode,
15214 VAR_INIT_STATUS_INITIALIZED);
15215 if (tmp == NULL)
15216 return NULL;
15217 add_loc_descr (&ret, tmp);
15218 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15219 VAR_INIT_STATUS_INITIALIZED);
15220 if (tmp == NULL)
15221 return NULL;
15222 add_loc_descr (&ret, tmp);
15223 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15224 add_loc_descr (&ret, l1label);
15225 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15226 mode, mem_mode,
15227 VAR_INIT_STATUS_INITIALIZED);
15228 add_loc_descr (&ret, tmp);
15229 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15230 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15231 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15232 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15233 VAR_INIT_STATUS_INITIALIZED);
15234 if (tmp == NULL)
15235 return NULL;
15236 add_loc_descr (&ret, tmp);
15237 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15238 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15239 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15240 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15241 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15242 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15243 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15244 VAR_INIT_STATUS_INITIALIZED);
15245 add_loc_descr (&ret, tmp);
15246 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15247 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15248 add_loc_descr (&ret, l2jump);
15249 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15250 VAR_INIT_STATUS_INITIALIZED);
15251 add_loc_descr (&ret, tmp);
15252 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15253 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15254 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15255 add_loc_descr (&ret, l1jump);
15256 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15257 add_loc_descr (&ret, l2label);
15258 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15259 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15260 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15261 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15262 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15263 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15264 return ret;
15265 }
15266
15267 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15268 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15269 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15270 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15271
15272 ROTATERT is similar:
15273 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15274 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15275 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15276
15277 static dw_loc_descr_ref
15278 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15279 machine_mode mem_mode)
15280 {
15281 rtx rtlop1 = XEXP (rtl, 1);
15282 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15283 int i;
15284
15285 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15286 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15287 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15288 VAR_INIT_STATUS_INITIALIZED);
15289 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15290 VAR_INIT_STATUS_INITIALIZED);
15291 if (op0 == NULL || op1 == NULL)
15292 return NULL;
15293 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15294 for (i = 0; i < 2; i++)
15295 {
15296 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15297 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15298 mode, mem_mode,
15299 VAR_INIT_STATUS_INITIALIZED);
15300 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15301 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15302 ? DW_OP_const4u
15303 : HOST_BITS_PER_WIDE_INT == 64
15304 ? DW_OP_const8u : DW_OP_constu,
15305 GET_MODE_MASK (mode), 0);
15306 else
15307 mask[i] = NULL;
15308 if (mask[i] == NULL)
15309 return NULL;
15310 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15311 }
15312 ret = op0;
15313 add_loc_descr (&ret, op1);
15314 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15315 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15316 if (GET_CODE (rtl) == ROTATERT)
15317 {
15318 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15319 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15320 GET_MODE_BITSIZE (mode), 0));
15321 }
15322 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15323 if (mask[0] != NULL)
15324 add_loc_descr (&ret, mask[0]);
15325 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15326 if (mask[1] != NULL)
15327 {
15328 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15329 add_loc_descr (&ret, mask[1]);
15330 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15331 }
15332 if (GET_CODE (rtl) == ROTATE)
15333 {
15334 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15335 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15336 GET_MODE_BITSIZE (mode), 0));
15337 }
15338 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15339 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15340 return ret;
15341 }
15342
15343 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15344 for DEBUG_PARAMETER_REF RTL. */
15345
15346 static dw_loc_descr_ref
15347 parameter_ref_descriptor (rtx rtl)
15348 {
15349 dw_loc_descr_ref ret;
15350 dw_die_ref ref;
15351
15352 if (dwarf_strict)
15353 return NULL;
15354 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15355 /* With LTO during LTRANS we get the late DIE that refers to the early
15356 DIE, thus we add another indirection here. This seems to confuse
15357 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15358 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15359 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15360 if (ref)
15361 {
15362 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15363 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15364 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15365 }
15366 else
15367 {
15368 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15369 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15370 }
15371 return ret;
15372 }
15373
15374 /* The following routine converts the RTL for a variable or parameter
15375 (resident in memory) into an equivalent Dwarf representation of a
15376 mechanism for getting the address of that same variable onto the top of a
15377 hypothetical "address evaluation" stack.
15378
15379 When creating memory location descriptors, we are effectively transforming
15380 the RTL for a memory-resident object into its Dwarf postfix expression
15381 equivalent. This routine recursively descends an RTL tree, turning
15382 it into Dwarf postfix code as it goes.
15383
15384 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15385
15386 MEM_MODE is the mode of the memory reference, needed to handle some
15387 autoincrement addressing modes.
15388
15389 Return 0 if we can't represent the location. */
15390
15391 dw_loc_descr_ref
15392 mem_loc_descriptor (rtx rtl, machine_mode mode,
15393 machine_mode mem_mode,
15394 enum var_init_status initialized)
15395 {
15396 dw_loc_descr_ref mem_loc_result = NULL;
15397 enum dwarf_location_atom op;
15398 dw_loc_descr_ref op0, op1;
15399 rtx inner = NULL_RTX;
15400 poly_int64 offset;
15401
15402 if (mode == VOIDmode)
15403 mode = GET_MODE (rtl);
15404
15405 /* Note that for a dynamically sized array, the location we will generate a
15406 description of here will be the lowest numbered location which is
15407 actually within the array. That's *not* necessarily the same as the
15408 zeroth element of the array. */
15409
15410 rtl = targetm.delegitimize_address (rtl);
15411
15412 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15413 return NULL;
15414
15415 scalar_int_mode int_mode, inner_mode, op1_mode;
15416 switch (GET_CODE (rtl))
15417 {
15418 case POST_INC:
15419 case POST_DEC:
15420 case POST_MODIFY:
15421 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15422
15423 case SUBREG:
15424 /* The case of a subreg may arise when we have a local (register)
15425 variable or a formal (register) parameter which doesn't quite fill
15426 up an entire register. For now, just assume that it is
15427 legitimate to make the Dwarf info refer to the whole register which
15428 contains the given subreg. */
15429 if (!subreg_lowpart_p (rtl))
15430 break;
15431 inner = SUBREG_REG (rtl);
15432 /* FALLTHRU */
15433 case TRUNCATE:
15434 if (inner == NULL_RTX)
15435 inner = XEXP (rtl, 0);
15436 if (is_a <scalar_int_mode> (mode, &int_mode)
15437 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15438 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15439 #ifdef POINTERS_EXTEND_UNSIGNED
15440 || (int_mode == Pmode && mem_mode != VOIDmode)
15441 #endif
15442 )
15443 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15444 {
15445 mem_loc_result = mem_loc_descriptor (inner,
15446 inner_mode,
15447 mem_mode, initialized);
15448 break;
15449 }
15450 if (dwarf_strict && dwarf_version < 5)
15451 break;
15452 if (is_a <scalar_int_mode> (mode, &int_mode)
15453 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15454 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15455 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15456 {
15457 dw_die_ref type_die;
15458 dw_loc_descr_ref cvt;
15459
15460 mem_loc_result = mem_loc_descriptor (inner,
15461 GET_MODE (inner),
15462 mem_mode, initialized);
15463 if (mem_loc_result == NULL)
15464 break;
15465 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15466 if (type_die == NULL)
15467 {
15468 mem_loc_result = NULL;
15469 break;
15470 }
15471 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15472 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15473 else
15474 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15475 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15476 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15477 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15478 add_loc_descr (&mem_loc_result, cvt);
15479 if (is_a <scalar_int_mode> (mode, &int_mode)
15480 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15481 {
15482 /* Convert it to untyped afterwards. */
15483 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15484 add_loc_descr (&mem_loc_result, cvt);
15485 }
15486 }
15487 break;
15488
15489 case REG:
15490 if (!is_a <scalar_int_mode> (mode, &int_mode)
15491 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15492 && rtl != arg_pointer_rtx
15493 && rtl != frame_pointer_rtx
15494 #ifdef POINTERS_EXTEND_UNSIGNED
15495 && (int_mode != Pmode || mem_mode == VOIDmode)
15496 #endif
15497 ))
15498 {
15499 dw_die_ref type_die;
15500 unsigned int dbx_regnum;
15501
15502 if (dwarf_strict && dwarf_version < 5)
15503 break;
15504 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15505 break;
15506 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15507 if (type_die == NULL)
15508 break;
15509
15510 dbx_regnum = dbx_reg_number (rtl);
15511 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15512 break;
15513 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15514 dbx_regnum, 0);
15515 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15516 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15517 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15518 break;
15519 }
15520 /* Whenever a register number forms a part of the description of the
15521 method for calculating the (dynamic) address of a memory resident
15522 object, DWARF rules require the register number be referred to as
15523 a "base register". This distinction is not based in any way upon
15524 what category of register the hardware believes the given register
15525 belongs to. This is strictly DWARF terminology we're dealing with
15526 here. Note that in cases where the location of a memory-resident
15527 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15528 OP_CONST (0)) the actual DWARF location descriptor that we generate
15529 may just be OP_BASEREG (basereg). This may look deceptively like
15530 the object in question was allocated to a register (rather than in
15531 memory) so DWARF consumers need to be aware of the subtle
15532 distinction between OP_REG and OP_BASEREG. */
15533 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15534 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15535 else if (stack_realign_drap
15536 && crtl->drap_reg
15537 && crtl->args.internal_arg_pointer == rtl
15538 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15539 {
15540 /* If RTL is internal_arg_pointer, which has been optimized
15541 out, use DRAP instead. */
15542 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15543 VAR_INIT_STATUS_INITIALIZED);
15544 }
15545 break;
15546
15547 case SIGN_EXTEND:
15548 case ZERO_EXTEND:
15549 if (!is_a <scalar_int_mode> (mode, &int_mode)
15550 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15551 break;
15552 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15553 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15554 if (op0 == 0)
15555 break;
15556 else if (GET_CODE (rtl) == ZERO_EXTEND
15557 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15558 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15559 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15560 to expand zero extend as two shifts instead of
15561 masking. */
15562 && GET_MODE_SIZE (inner_mode) <= 4)
15563 {
15564 mem_loc_result = op0;
15565 add_loc_descr (&mem_loc_result,
15566 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15567 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15568 }
15569 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15570 {
15571 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15572 shift *= BITS_PER_UNIT;
15573 if (GET_CODE (rtl) == SIGN_EXTEND)
15574 op = DW_OP_shra;
15575 else
15576 op = DW_OP_shr;
15577 mem_loc_result = op0;
15578 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15579 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15580 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15581 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15582 }
15583 else if (!dwarf_strict || dwarf_version >= 5)
15584 {
15585 dw_die_ref type_die1, type_die2;
15586 dw_loc_descr_ref cvt;
15587
15588 type_die1 = base_type_for_mode (inner_mode,
15589 GET_CODE (rtl) == ZERO_EXTEND);
15590 if (type_die1 == NULL)
15591 break;
15592 type_die2 = base_type_for_mode (int_mode, 1);
15593 if (type_die2 == NULL)
15594 break;
15595 mem_loc_result = op0;
15596 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15597 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15598 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15599 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15600 add_loc_descr (&mem_loc_result, cvt);
15601 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15602 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15603 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15604 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15605 add_loc_descr (&mem_loc_result, cvt);
15606 }
15607 break;
15608
15609 case MEM:
15610 {
15611 rtx new_rtl = avoid_constant_pool_reference (rtl);
15612 if (new_rtl != rtl)
15613 {
15614 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15615 initialized);
15616 if (mem_loc_result != NULL)
15617 return mem_loc_result;
15618 }
15619 }
15620 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15621 get_address_mode (rtl), mode,
15622 VAR_INIT_STATUS_INITIALIZED);
15623 if (mem_loc_result == NULL)
15624 mem_loc_result = tls_mem_loc_descriptor (rtl);
15625 if (mem_loc_result != NULL)
15626 {
15627 if (!is_a <scalar_int_mode> (mode, &int_mode)
15628 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15629 {
15630 dw_die_ref type_die;
15631 dw_loc_descr_ref deref;
15632 HOST_WIDE_INT size;
15633
15634 if (dwarf_strict && dwarf_version < 5)
15635 return NULL;
15636 if (!GET_MODE_SIZE (mode).is_constant (&size))
15637 return NULL;
15638 type_die
15639 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15640 if (type_die == NULL)
15641 return NULL;
15642 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15643 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15644 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15645 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15646 add_loc_descr (&mem_loc_result, deref);
15647 }
15648 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15649 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15650 else
15651 add_loc_descr (&mem_loc_result,
15652 new_loc_descr (DW_OP_deref_size,
15653 GET_MODE_SIZE (int_mode), 0));
15654 }
15655 break;
15656
15657 case LO_SUM:
15658 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15659
15660 case LABEL_REF:
15661 /* Some ports can transform a symbol ref into a label ref, because
15662 the symbol ref is too far away and has to be dumped into a constant
15663 pool. */
15664 case CONST:
15665 case SYMBOL_REF:
15666 if (!is_a <scalar_int_mode> (mode, &int_mode)
15667 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15668 #ifdef POINTERS_EXTEND_UNSIGNED
15669 && (int_mode != Pmode || mem_mode == VOIDmode)
15670 #endif
15671 ))
15672 break;
15673 if (GET_CODE (rtl) == SYMBOL_REF
15674 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15675 {
15676 dw_loc_descr_ref temp;
15677
15678 /* If this is not defined, we have no way to emit the data. */
15679 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15680 break;
15681
15682 temp = new_addr_loc_descr (rtl, dtprel_true);
15683
15684 /* We check for DWARF 5 here because gdb did not implement
15685 DW_OP_form_tls_address until after 7.12. */
15686 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15687 ? DW_OP_form_tls_address
15688 : DW_OP_GNU_push_tls_address),
15689 0, 0);
15690 add_loc_descr (&mem_loc_result, temp);
15691
15692 break;
15693 }
15694
15695 if (!const_ok_for_output (rtl))
15696 {
15697 if (GET_CODE (rtl) == CONST)
15698 switch (GET_CODE (XEXP (rtl, 0)))
15699 {
15700 case NOT:
15701 op = DW_OP_not;
15702 goto try_const_unop;
15703 case NEG:
15704 op = DW_OP_neg;
15705 goto try_const_unop;
15706 try_const_unop:
15707 rtx arg;
15708 arg = XEXP (XEXP (rtl, 0), 0);
15709 if (!CONSTANT_P (arg))
15710 arg = gen_rtx_CONST (int_mode, arg);
15711 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15712 initialized);
15713 if (op0)
15714 {
15715 mem_loc_result = op0;
15716 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15717 }
15718 break;
15719 default:
15720 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15721 mem_mode, initialized);
15722 break;
15723 }
15724 break;
15725 }
15726
15727 symref:
15728 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15729 vec_safe_push (used_rtx_array, rtl);
15730 break;
15731
15732 case CONCAT:
15733 case CONCATN:
15734 case VAR_LOCATION:
15735 case DEBUG_IMPLICIT_PTR:
15736 expansion_failed (NULL_TREE, rtl,
15737 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15738 return 0;
15739
15740 case ENTRY_VALUE:
15741 if (dwarf_strict && dwarf_version < 5)
15742 return NULL;
15743 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15744 {
15745 if (!is_a <scalar_int_mode> (mode, &int_mode)
15746 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15747 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15748 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15749 else
15750 {
15751 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15752 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15753 return NULL;
15754 op0 = one_reg_loc_descriptor (dbx_regnum,
15755 VAR_INIT_STATUS_INITIALIZED);
15756 }
15757 }
15758 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15759 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15760 {
15761 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15762 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15763 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15764 return NULL;
15765 }
15766 else
15767 gcc_unreachable ();
15768 if (op0 == NULL)
15769 return NULL;
15770 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15771 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15772 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15773 break;
15774
15775 case DEBUG_PARAMETER_REF:
15776 mem_loc_result = parameter_ref_descriptor (rtl);
15777 break;
15778
15779 case PRE_MODIFY:
15780 /* Extract the PLUS expression nested inside and fall into
15781 PLUS code below. */
15782 rtl = XEXP (rtl, 1);
15783 goto plus;
15784
15785 case PRE_INC:
15786 case PRE_DEC:
15787 /* Turn these into a PLUS expression and fall into the PLUS code
15788 below. */
15789 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15790 gen_int_mode (GET_CODE (rtl) == PRE_INC
15791 ? GET_MODE_UNIT_SIZE (mem_mode)
15792 : -GET_MODE_UNIT_SIZE (mem_mode),
15793 mode));
15794
15795 /* fall through */
15796
15797 case PLUS:
15798 plus:
15799 if (is_based_loc (rtl)
15800 && is_a <scalar_int_mode> (mode, &int_mode)
15801 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15802 || XEXP (rtl, 0) == arg_pointer_rtx
15803 || XEXP (rtl, 0) == frame_pointer_rtx))
15804 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15805 INTVAL (XEXP (rtl, 1)),
15806 VAR_INIT_STATUS_INITIALIZED);
15807 else
15808 {
15809 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15810 VAR_INIT_STATUS_INITIALIZED);
15811 if (mem_loc_result == 0)
15812 break;
15813
15814 if (CONST_INT_P (XEXP (rtl, 1))
15815 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15816 <= DWARF2_ADDR_SIZE))
15817 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15818 else
15819 {
15820 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15821 VAR_INIT_STATUS_INITIALIZED);
15822 if (op1 == 0)
15823 return NULL;
15824 add_loc_descr (&mem_loc_result, op1);
15825 add_loc_descr (&mem_loc_result,
15826 new_loc_descr (DW_OP_plus, 0, 0));
15827 }
15828 }
15829 break;
15830
15831 /* If a pseudo-reg is optimized away, it is possible for it to
15832 be replaced with a MEM containing a multiply or shift. */
15833 case MINUS:
15834 op = DW_OP_minus;
15835 goto do_binop;
15836
15837 case MULT:
15838 op = DW_OP_mul;
15839 goto do_binop;
15840
15841 case DIV:
15842 if ((!dwarf_strict || dwarf_version >= 5)
15843 && is_a <scalar_int_mode> (mode, &int_mode)
15844 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15845 {
15846 mem_loc_result = typed_binop (DW_OP_div, rtl,
15847 base_type_for_mode (mode, 0),
15848 int_mode, mem_mode);
15849 break;
15850 }
15851 op = DW_OP_div;
15852 goto do_binop;
15853
15854 case UMOD:
15855 op = DW_OP_mod;
15856 goto do_binop;
15857
15858 case ASHIFT:
15859 op = DW_OP_shl;
15860 goto do_shift;
15861
15862 case ASHIFTRT:
15863 op = DW_OP_shra;
15864 goto do_shift;
15865
15866 case LSHIFTRT:
15867 op = DW_OP_shr;
15868 goto do_shift;
15869
15870 do_shift:
15871 if (!is_a <scalar_int_mode> (mode, &int_mode))
15872 break;
15873 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15874 VAR_INIT_STATUS_INITIALIZED);
15875 {
15876 rtx rtlop1 = XEXP (rtl, 1);
15877 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15878 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15879 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15880 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15881 VAR_INIT_STATUS_INITIALIZED);
15882 }
15883
15884 if (op0 == 0 || op1 == 0)
15885 break;
15886
15887 mem_loc_result = op0;
15888 add_loc_descr (&mem_loc_result, op1);
15889 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15890 break;
15891
15892 case AND:
15893 op = DW_OP_and;
15894 goto do_binop;
15895
15896 case IOR:
15897 op = DW_OP_or;
15898 goto do_binop;
15899
15900 case XOR:
15901 op = DW_OP_xor;
15902 goto do_binop;
15903
15904 do_binop:
15905 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15906 VAR_INIT_STATUS_INITIALIZED);
15907 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15908 VAR_INIT_STATUS_INITIALIZED);
15909
15910 if (op0 == 0 || op1 == 0)
15911 break;
15912
15913 mem_loc_result = op0;
15914 add_loc_descr (&mem_loc_result, op1);
15915 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15916 break;
15917
15918 case MOD:
15919 if ((!dwarf_strict || dwarf_version >= 5)
15920 && is_a <scalar_int_mode> (mode, &int_mode)
15921 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15922 {
15923 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15924 base_type_for_mode (mode, 0),
15925 int_mode, mem_mode);
15926 break;
15927 }
15928
15929 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15930 VAR_INIT_STATUS_INITIALIZED);
15931 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15932 VAR_INIT_STATUS_INITIALIZED);
15933
15934 if (op0 == 0 || op1 == 0)
15935 break;
15936
15937 mem_loc_result = op0;
15938 add_loc_descr (&mem_loc_result, op1);
15939 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15940 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15941 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15942 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15943 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15944 break;
15945
15946 case UDIV:
15947 if ((!dwarf_strict || dwarf_version >= 5)
15948 && is_a <scalar_int_mode> (mode, &int_mode))
15949 {
15950 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15951 {
15952 op = DW_OP_div;
15953 goto do_binop;
15954 }
15955 mem_loc_result = typed_binop (DW_OP_div, rtl,
15956 base_type_for_mode (int_mode, 1),
15957 int_mode, mem_mode);
15958 }
15959 break;
15960
15961 case NOT:
15962 op = DW_OP_not;
15963 goto do_unop;
15964
15965 case ABS:
15966 op = DW_OP_abs;
15967 goto do_unop;
15968
15969 case NEG:
15970 op = DW_OP_neg;
15971 goto do_unop;
15972
15973 do_unop:
15974 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15975 VAR_INIT_STATUS_INITIALIZED);
15976
15977 if (op0 == 0)
15978 break;
15979
15980 mem_loc_result = op0;
15981 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15982 break;
15983
15984 case CONST_INT:
15985 if (!is_a <scalar_int_mode> (mode, &int_mode)
15986 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15987 #ifdef POINTERS_EXTEND_UNSIGNED
15988 || (int_mode == Pmode
15989 && mem_mode != VOIDmode
15990 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15991 #endif
15992 )
15993 {
15994 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15995 break;
15996 }
15997 if ((!dwarf_strict || dwarf_version >= 5)
15998 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15999 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
16000 {
16001 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
16002 scalar_int_mode amode;
16003 if (type_die == NULL)
16004 return NULL;
16005 if (INTVAL (rtl) >= 0
16006 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
16007 .exists (&amode))
16008 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
16009 /* const DW_OP_convert <XXX> vs.
16010 DW_OP_const_type <XXX, 1, const>. */
16011 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
16012 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
16013 {
16014 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16015 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16016 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16017 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16018 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
16019 add_loc_descr (&mem_loc_result, op0);
16020 return mem_loc_result;
16021 }
16022 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
16023 INTVAL (rtl));
16024 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16025 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16026 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16027 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16028 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16029 else
16030 {
16031 mem_loc_result->dw_loc_oprnd2.val_class
16032 = dw_val_class_const_double;
16033 mem_loc_result->dw_loc_oprnd2.v.val_double
16034 = double_int::from_shwi (INTVAL (rtl));
16035 }
16036 }
16037 break;
16038
16039 case CONST_DOUBLE:
16040 if (!dwarf_strict || dwarf_version >= 5)
16041 {
16042 dw_die_ref type_die;
16043
16044 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16045 CONST_DOUBLE rtx could represent either a large integer
16046 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16047 the value is always a floating point constant.
16048
16049 When it is an integer, a CONST_DOUBLE is used whenever
16050 the constant requires 2 HWIs to be adequately represented.
16051 We output CONST_DOUBLEs as blocks. */
16052 if (mode == VOIDmode
16053 || (GET_MODE (rtl) == VOIDmode
16054 && maybe_ne (GET_MODE_BITSIZE (mode),
16055 HOST_BITS_PER_DOUBLE_INT)))
16056 break;
16057 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16058 if (type_die == NULL)
16059 return NULL;
16060 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16061 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16062 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16063 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16064 #if TARGET_SUPPORTS_WIDE_INT == 0
16065 if (!SCALAR_FLOAT_MODE_P (mode))
16066 {
16067 mem_loc_result->dw_loc_oprnd2.val_class
16068 = dw_val_class_const_double;
16069 mem_loc_result->dw_loc_oprnd2.v.val_double
16070 = rtx_to_double_int (rtl);
16071 }
16072 else
16073 #endif
16074 {
16075 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16076 unsigned int length = GET_MODE_SIZE (float_mode);
16077 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16078
16079 insert_float (rtl, array);
16080 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16081 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16082 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16083 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16084 }
16085 }
16086 break;
16087
16088 case CONST_WIDE_INT:
16089 if (!dwarf_strict || dwarf_version >= 5)
16090 {
16091 dw_die_ref type_die;
16092
16093 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16094 if (type_die == NULL)
16095 return NULL;
16096 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16097 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16098 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16099 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16100 mem_loc_result->dw_loc_oprnd2.val_class
16101 = dw_val_class_wide_int;
16102 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16103 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16104 }
16105 break;
16106
16107 case CONST_POLY_INT:
16108 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16109 break;
16110
16111 case EQ:
16112 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16113 break;
16114
16115 case GE:
16116 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16117 break;
16118
16119 case GT:
16120 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16121 break;
16122
16123 case LE:
16124 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16125 break;
16126
16127 case LT:
16128 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16129 break;
16130
16131 case NE:
16132 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16133 break;
16134
16135 case GEU:
16136 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16137 break;
16138
16139 case GTU:
16140 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16141 break;
16142
16143 case LEU:
16144 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16145 break;
16146
16147 case LTU:
16148 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16149 break;
16150
16151 case UMIN:
16152 case UMAX:
16153 if (!SCALAR_INT_MODE_P (mode))
16154 break;
16155 /* FALLTHRU */
16156 case SMIN:
16157 case SMAX:
16158 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16159 break;
16160
16161 case ZERO_EXTRACT:
16162 case SIGN_EXTRACT:
16163 if (CONST_INT_P (XEXP (rtl, 1))
16164 && CONST_INT_P (XEXP (rtl, 2))
16165 && is_a <scalar_int_mode> (mode, &int_mode)
16166 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16167 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16168 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16169 && ((unsigned) INTVAL (XEXP (rtl, 1))
16170 + (unsigned) INTVAL (XEXP (rtl, 2))
16171 <= GET_MODE_BITSIZE (int_mode)))
16172 {
16173 int shift, size;
16174 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16175 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16176 if (op0 == 0)
16177 break;
16178 if (GET_CODE (rtl) == SIGN_EXTRACT)
16179 op = DW_OP_shra;
16180 else
16181 op = DW_OP_shr;
16182 mem_loc_result = op0;
16183 size = INTVAL (XEXP (rtl, 1));
16184 shift = INTVAL (XEXP (rtl, 2));
16185 if (BITS_BIG_ENDIAN)
16186 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16187 if (shift + size != (int) DWARF2_ADDR_SIZE)
16188 {
16189 add_loc_descr (&mem_loc_result,
16190 int_loc_descriptor (DWARF2_ADDR_SIZE
16191 - shift - size));
16192 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16193 }
16194 if (size != (int) DWARF2_ADDR_SIZE)
16195 {
16196 add_loc_descr (&mem_loc_result,
16197 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16198 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16199 }
16200 }
16201 break;
16202
16203 case IF_THEN_ELSE:
16204 {
16205 dw_loc_descr_ref op2, bra_node, drop_node;
16206 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16207 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16208 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16209 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16210 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16211 VAR_INIT_STATUS_INITIALIZED);
16212 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16213 VAR_INIT_STATUS_INITIALIZED);
16214 if (op0 == NULL || op1 == NULL || op2 == NULL)
16215 break;
16216
16217 mem_loc_result = op1;
16218 add_loc_descr (&mem_loc_result, op2);
16219 add_loc_descr (&mem_loc_result, op0);
16220 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16221 add_loc_descr (&mem_loc_result, bra_node);
16222 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16223 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16224 add_loc_descr (&mem_loc_result, drop_node);
16225 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16226 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16227 }
16228 break;
16229
16230 case FLOAT_EXTEND:
16231 case FLOAT_TRUNCATE:
16232 case FLOAT:
16233 case UNSIGNED_FLOAT:
16234 case FIX:
16235 case UNSIGNED_FIX:
16236 if (!dwarf_strict || dwarf_version >= 5)
16237 {
16238 dw_die_ref type_die;
16239 dw_loc_descr_ref cvt;
16240
16241 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16242 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16243 if (op0 == NULL)
16244 break;
16245 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16246 && (GET_CODE (rtl) == FLOAT
16247 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16248 {
16249 type_die = base_type_for_mode (int_mode,
16250 GET_CODE (rtl) == UNSIGNED_FLOAT);
16251 if (type_die == NULL)
16252 break;
16253 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16254 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16255 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16256 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16257 add_loc_descr (&op0, cvt);
16258 }
16259 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16260 if (type_die == NULL)
16261 break;
16262 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16263 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16264 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16265 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16266 add_loc_descr (&op0, cvt);
16267 if (is_a <scalar_int_mode> (mode, &int_mode)
16268 && (GET_CODE (rtl) == FIX
16269 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16270 {
16271 op0 = convert_descriptor_to_mode (int_mode, op0);
16272 if (op0 == NULL)
16273 break;
16274 }
16275 mem_loc_result = op0;
16276 }
16277 break;
16278
16279 case CLZ:
16280 case CTZ:
16281 case FFS:
16282 if (is_a <scalar_int_mode> (mode, &int_mode))
16283 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16284 break;
16285
16286 case POPCOUNT:
16287 case PARITY:
16288 if (is_a <scalar_int_mode> (mode, &int_mode))
16289 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16290 break;
16291
16292 case BSWAP:
16293 if (is_a <scalar_int_mode> (mode, &int_mode))
16294 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16295 break;
16296
16297 case ROTATE:
16298 case ROTATERT:
16299 if (is_a <scalar_int_mode> (mode, &int_mode))
16300 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16301 break;
16302
16303 case COMPARE:
16304 /* In theory, we could implement the above. */
16305 /* DWARF cannot represent the unsigned compare operations
16306 natively. */
16307 case SS_MULT:
16308 case US_MULT:
16309 case SS_DIV:
16310 case US_DIV:
16311 case SS_PLUS:
16312 case US_PLUS:
16313 case SS_MINUS:
16314 case US_MINUS:
16315 case SS_NEG:
16316 case US_NEG:
16317 case SS_ABS:
16318 case SS_ASHIFT:
16319 case US_ASHIFT:
16320 case SS_TRUNCATE:
16321 case US_TRUNCATE:
16322 case UNORDERED:
16323 case ORDERED:
16324 case UNEQ:
16325 case UNGE:
16326 case UNGT:
16327 case UNLE:
16328 case UNLT:
16329 case LTGT:
16330 case FRACT_CONVERT:
16331 case UNSIGNED_FRACT_CONVERT:
16332 case SAT_FRACT:
16333 case UNSIGNED_SAT_FRACT:
16334 case SQRT:
16335 case ASM_OPERANDS:
16336 case VEC_MERGE:
16337 case VEC_SELECT:
16338 case VEC_CONCAT:
16339 case VEC_DUPLICATE:
16340 case VEC_SERIES:
16341 case UNSPEC:
16342 case HIGH:
16343 case FMA:
16344 case STRICT_LOW_PART:
16345 case CONST_VECTOR:
16346 case CONST_FIXED:
16347 case CLRSB:
16348 case CLOBBER:
16349 case CLOBBER_HIGH:
16350 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16351 can't express it in the debug info. This can happen e.g. with some
16352 TLS UNSPECs. */
16353 break;
16354
16355 case CONST_STRING:
16356 resolve_one_addr (&rtl);
16357 goto symref;
16358
16359 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16360 the expression. An UNSPEC rtx represents a raw DWARF operation,
16361 new_loc_descr is called for it to build the operation directly.
16362 Otherwise mem_loc_descriptor is called recursively. */
16363 case PARALLEL:
16364 {
16365 int index = 0;
16366 dw_loc_descr_ref exp_result = NULL;
16367
16368 for (; index < XVECLEN (rtl, 0); index++)
16369 {
16370 rtx elem = XVECEXP (rtl, 0, index);
16371 if (GET_CODE (elem) == UNSPEC)
16372 {
16373 /* Each DWARF operation UNSPEC contain two operands, if
16374 one operand is not used for the operation, const0_rtx is
16375 passed. */
16376 gcc_assert (XVECLEN (elem, 0) == 2);
16377
16378 HOST_WIDE_INT dw_op = XINT (elem, 1);
16379 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16380 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16381 exp_result
16382 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16383 oprnd2);
16384 }
16385 else
16386 exp_result
16387 = mem_loc_descriptor (elem, mode, mem_mode,
16388 VAR_INIT_STATUS_INITIALIZED);
16389
16390 if (!mem_loc_result)
16391 mem_loc_result = exp_result;
16392 else
16393 add_loc_descr (&mem_loc_result, exp_result);
16394 }
16395
16396 break;
16397 }
16398
16399 default:
16400 if (flag_checking)
16401 {
16402 print_rtl (stderr, rtl);
16403 gcc_unreachable ();
16404 }
16405 break;
16406 }
16407
16408 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16409 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16410
16411 return mem_loc_result;
16412 }
16413
16414 /* Return a descriptor that describes the concatenation of two locations.
16415 This is typically a complex variable. */
16416
16417 static dw_loc_descr_ref
16418 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16419 {
16420 /* At present we only track constant-sized pieces. */
16421 unsigned int size0, size1;
16422 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16423 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16424 return 0;
16425
16426 dw_loc_descr_ref cc_loc_result = NULL;
16427 dw_loc_descr_ref x0_ref
16428 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16429 dw_loc_descr_ref x1_ref
16430 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16431
16432 if (x0_ref == 0 || x1_ref == 0)
16433 return 0;
16434
16435 cc_loc_result = x0_ref;
16436 add_loc_descr_op_piece (&cc_loc_result, size0);
16437
16438 add_loc_descr (&cc_loc_result, x1_ref);
16439 add_loc_descr_op_piece (&cc_loc_result, size1);
16440
16441 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16442 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16443
16444 return cc_loc_result;
16445 }
16446
16447 /* Return a descriptor that describes the concatenation of N
16448 locations. */
16449
16450 static dw_loc_descr_ref
16451 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16452 {
16453 unsigned int i;
16454 dw_loc_descr_ref cc_loc_result = NULL;
16455 unsigned int n = XVECLEN (concatn, 0);
16456 unsigned int size;
16457
16458 for (i = 0; i < n; ++i)
16459 {
16460 dw_loc_descr_ref ref;
16461 rtx x = XVECEXP (concatn, 0, i);
16462
16463 /* At present we only track constant-sized pieces. */
16464 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16465 return NULL;
16466
16467 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16468 if (ref == NULL)
16469 return NULL;
16470
16471 add_loc_descr (&cc_loc_result, ref);
16472 add_loc_descr_op_piece (&cc_loc_result, size);
16473 }
16474
16475 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16476 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16477
16478 return cc_loc_result;
16479 }
16480
16481 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16482 for DEBUG_IMPLICIT_PTR RTL. */
16483
16484 static dw_loc_descr_ref
16485 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16486 {
16487 dw_loc_descr_ref ret;
16488 dw_die_ref ref;
16489
16490 if (dwarf_strict && dwarf_version < 5)
16491 return NULL;
16492 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16493 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16494 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16495 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16496 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16497 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16498 if (ref)
16499 {
16500 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16501 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16502 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16503 }
16504 else
16505 {
16506 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16507 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16508 }
16509 return ret;
16510 }
16511
16512 /* Output a proper Dwarf location descriptor for a variable or parameter
16513 which is either allocated in a register or in a memory location. For a
16514 register, we just generate an OP_REG and the register number. For a
16515 memory location we provide a Dwarf postfix expression describing how to
16516 generate the (dynamic) address of the object onto the address stack.
16517
16518 MODE is mode of the decl if this loc_descriptor is going to be used in
16519 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16520 allowed, VOIDmode otherwise.
16521
16522 If we don't know how to describe it, return 0. */
16523
16524 static dw_loc_descr_ref
16525 loc_descriptor (rtx rtl, machine_mode mode,
16526 enum var_init_status initialized)
16527 {
16528 dw_loc_descr_ref loc_result = NULL;
16529 scalar_int_mode int_mode;
16530
16531 switch (GET_CODE (rtl))
16532 {
16533 case SUBREG:
16534 /* The case of a subreg may arise when we have a local (register)
16535 variable or a formal (register) parameter which doesn't quite fill
16536 up an entire register. For now, just assume that it is
16537 legitimate to make the Dwarf info refer to the whole register which
16538 contains the given subreg. */
16539 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16540 loc_result = loc_descriptor (SUBREG_REG (rtl),
16541 GET_MODE (SUBREG_REG (rtl)), initialized);
16542 else
16543 goto do_default;
16544 break;
16545
16546 case REG:
16547 loc_result = reg_loc_descriptor (rtl, initialized);
16548 break;
16549
16550 case MEM:
16551 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16552 GET_MODE (rtl), initialized);
16553 if (loc_result == NULL)
16554 loc_result = tls_mem_loc_descriptor (rtl);
16555 if (loc_result == NULL)
16556 {
16557 rtx new_rtl = avoid_constant_pool_reference (rtl);
16558 if (new_rtl != rtl)
16559 loc_result = loc_descriptor (new_rtl, mode, initialized);
16560 }
16561 break;
16562
16563 case CONCAT:
16564 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16565 initialized);
16566 break;
16567
16568 case CONCATN:
16569 loc_result = concatn_loc_descriptor (rtl, initialized);
16570 break;
16571
16572 case VAR_LOCATION:
16573 /* Single part. */
16574 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16575 {
16576 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16577 if (GET_CODE (loc) == EXPR_LIST)
16578 loc = XEXP (loc, 0);
16579 loc_result = loc_descriptor (loc, mode, initialized);
16580 break;
16581 }
16582
16583 rtl = XEXP (rtl, 1);
16584 /* FALLTHRU */
16585
16586 case PARALLEL:
16587 {
16588 rtvec par_elems = XVEC (rtl, 0);
16589 int num_elem = GET_NUM_ELEM (par_elems);
16590 machine_mode mode;
16591 int i, size;
16592
16593 /* Create the first one, so we have something to add to. */
16594 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16595 VOIDmode, initialized);
16596 if (loc_result == NULL)
16597 return NULL;
16598 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16599 /* At present we only track constant-sized pieces. */
16600 if (!GET_MODE_SIZE (mode).is_constant (&size))
16601 return NULL;
16602 add_loc_descr_op_piece (&loc_result, size);
16603 for (i = 1; i < num_elem; i++)
16604 {
16605 dw_loc_descr_ref temp;
16606
16607 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16608 VOIDmode, initialized);
16609 if (temp == NULL)
16610 return NULL;
16611 add_loc_descr (&loc_result, temp);
16612 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16613 /* At present we only track constant-sized pieces. */
16614 if (!GET_MODE_SIZE (mode).is_constant (&size))
16615 return NULL;
16616 add_loc_descr_op_piece (&loc_result, size);
16617 }
16618 }
16619 break;
16620
16621 case CONST_INT:
16622 if (mode != VOIDmode && mode != BLKmode)
16623 {
16624 int_mode = as_a <scalar_int_mode> (mode);
16625 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16626 INTVAL (rtl));
16627 }
16628 break;
16629
16630 case CONST_DOUBLE:
16631 if (mode == VOIDmode)
16632 mode = GET_MODE (rtl);
16633
16634 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16635 {
16636 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16637
16638 /* Note that a CONST_DOUBLE rtx could represent either an integer
16639 or a floating-point constant. A CONST_DOUBLE is used whenever
16640 the constant requires more than one word in order to be
16641 adequately represented. We output CONST_DOUBLEs as blocks. */
16642 scalar_mode smode = as_a <scalar_mode> (mode);
16643 loc_result = new_loc_descr (DW_OP_implicit_value,
16644 GET_MODE_SIZE (smode), 0);
16645 #if TARGET_SUPPORTS_WIDE_INT == 0
16646 if (!SCALAR_FLOAT_MODE_P (smode))
16647 {
16648 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16649 loc_result->dw_loc_oprnd2.v.val_double
16650 = rtx_to_double_int (rtl);
16651 }
16652 else
16653 #endif
16654 {
16655 unsigned int length = GET_MODE_SIZE (smode);
16656 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16657
16658 insert_float (rtl, array);
16659 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16660 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16661 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16662 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16663 }
16664 }
16665 break;
16666
16667 case CONST_WIDE_INT:
16668 if (mode == VOIDmode)
16669 mode = GET_MODE (rtl);
16670
16671 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16672 {
16673 int_mode = as_a <scalar_int_mode> (mode);
16674 loc_result = new_loc_descr (DW_OP_implicit_value,
16675 GET_MODE_SIZE (int_mode), 0);
16676 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16677 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16678 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16679 }
16680 break;
16681
16682 case CONST_VECTOR:
16683 if (mode == VOIDmode)
16684 mode = GET_MODE (rtl);
16685
16686 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16687 {
16688 unsigned int length;
16689 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16690 return NULL;
16691
16692 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16693 unsigned char *array
16694 = ggc_vec_alloc<unsigned char> (length * elt_size);
16695 unsigned int i;
16696 unsigned char *p;
16697 machine_mode imode = GET_MODE_INNER (mode);
16698
16699 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16700 switch (GET_MODE_CLASS (mode))
16701 {
16702 case MODE_VECTOR_INT:
16703 for (i = 0, p = array; i < length; i++, p += elt_size)
16704 {
16705 rtx elt = CONST_VECTOR_ELT (rtl, i);
16706 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16707 }
16708 break;
16709
16710 case MODE_VECTOR_FLOAT:
16711 for (i = 0, p = array; i < length; i++, p += elt_size)
16712 {
16713 rtx elt = CONST_VECTOR_ELT (rtl, i);
16714 insert_float (elt, p);
16715 }
16716 break;
16717
16718 default:
16719 gcc_unreachable ();
16720 }
16721
16722 loc_result = new_loc_descr (DW_OP_implicit_value,
16723 length * elt_size, 0);
16724 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16725 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16726 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16727 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16728 }
16729 break;
16730
16731 case CONST:
16732 if (mode == VOIDmode
16733 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16734 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16735 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16736 {
16737 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16738 break;
16739 }
16740 /* FALLTHROUGH */
16741 case SYMBOL_REF:
16742 if (!const_ok_for_output (rtl))
16743 break;
16744 /* FALLTHROUGH */
16745 case LABEL_REF:
16746 if (is_a <scalar_int_mode> (mode, &int_mode)
16747 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16748 && (dwarf_version >= 4 || !dwarf_strict))
16749 {
16750 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16751 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16752 vec_safe_push (used_rtx_array, rtl);
16753 }
16754 break;
16755
16756 case DEBUG_IMPLICIT_PTR:
16757 loc_result = implicit_ptr_descriptor (rtl, 0);
16758 break;
16759
16760 case PLUS:
16761 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16762 && CONST_INT_P (XEXP (rtl, 1)))
16763 {
16764 loc_result
16765 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16766 break;
16767 }
16768 /* FALLTHRU */
16769 do_default:
16770 default:
16771 if ((is_a <scalar_int_mode> (mode, &int_mode)
16772 && GET_MODE (rtl) == int_mode
16773 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16774 && dwarf_version >= 4)
16775 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16776 {
16777 /* Value expression. */
16778 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16779 if (loc_result)
16780 add_loc_descr (&loc_result,
16781 new_loc_descr (DW_OP_stack_value, 0, 0));
16782 }
16783 break;
16784 }
16785
16786 return loc_result;
16787 }
16788
16789 /* We need to figure out what section we should use as the base for the
16790 address ranges where a given location is valid.
16791 1. If this particular DECL has a section associated with it, use that.
16792 2. If this function has a section associated with it, use that.
16793 3. Otherwise, use the text section.
16794 XXX: If you split a variable across multiple sections, we won't notice. */
16795
16796 static const char *
16797 secname_for_decl (const_tree decl)
16798 {
16799 const char *secname;
16800
16801 if (VAR_OR_FUNCTION_DECL_P (decl)
16802 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16803 && DECL_SECTION_NAME (decl))
16804 secname = DECL_SECTION_NAME (decl);
16805 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16806 secname = DECL_SECTION_NAME (current_function_decl);
16807 else if (cfun && in_cold_section_p)
16808 secname = crtl->subsections.cold_section_label;
16809 else
16810 secname = text_section_label;
16811
16812 return secname;
16813 }
16814
16815 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16816
16817 static bool
16818 decl_by_reference_p (tree decl)
16819 {
16820 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16821 || VAR_P (decl))
16822 && DECL_BY_REFERENCE (decl));
16823 }
16824
16825 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16826 for VARLOC. */
16827
16828 static dw_loc_descr_ref
16829 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16830 enum var_init_status initialized)
16831 {
16832 int have_address = 0;
16833 dw_loc_descr_ref descr;
16834 machine_mode mode;
16835
16836 if (want_address != 2)
16837 {
16838 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16839 /* Single part. */
16840 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16841 {
16842 varloc = PAT_VAR_LOCATION_LOC (varloc);
16843 if (GET_CODE (varloc) == EXPR_LIST)
16844 varloc = XEXP (varloc, 0);
16845 mode = GET_MODE (varloc);
16846 if (MEM_P (varloc))
16847 {
16848 rtx addr = XEXP (varloc, 0);
16849 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16850 mode, initialized);
16851 if (descr)
16852 have_address = 1;
16853 else
16854 {
16855 rtx x = avoid_constant_pool_reference (varloc);
16856 if (x != varloc)
16857 descr = mem_loc_descriptor (x, mode, VOIDmode,
16858 initialized);
16859 }
16860 }
16861 else
16862 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16863 }
16864 else
16865 return 0;
16866 }
16867 else
16868 {
16869 if (GET_CODE (varloc) == VAR_LOCATION)
16870 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16871 else
16872 mode = DECL_MODE (loc);
16873 descr = loc_descriptor (varloc, mode, initialized);
16874 have_address = 1;
16875 }
16876
16877 if (!descr)
16878 return 0;
16879
16880 if (want_address == 2 && !have_address
16881 && (dwarf_version >= 4 || !dwarf_strict))
16882 {
16883 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16884 {
16885 expansion_failed (loc, NULL_RTX,
16886 "DWARF address size mismatch");
16887 return 0;
16888 }
16889 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16890 have_address = 1;
16891 }
16892 /* Show if we can't fill the request for an address. */
16893 if (want_address && !have_address)
16894 {
16895 expansion_failed (loc, NULL_RTX,
16896 "Want address and only have value");
16897 return 0;
16898 }
16899
16900 /* If we've got an address and don't want one, dereference. */
16901 if (!want_address && have_address)
16902 {
16903 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16904 enum dwarf_location_atom op;
16905
16906 if (size > DWARF2_ADDR_SIZE || size == -1)
16907 {
16908 expansion_failed (loc, NULL_RTX,
16909 "DWARF address size mismatch");
16910 return 0;
16911 }
16912 else if (size == DWARF2_ADDR_SIZE)
16913 op = DW_OP_deref;
16914 else
16915 op = DW_OP_deref_size;
16916
16917 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16918 }
16919
16920 return descr;
16921 }
16922
16923 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16924 if it is not possible. */
16925
16926 static dw_loc_descr_ref
16927 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16928 {
16929 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16930 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16931 else if (dwarf_version >= 3 || !dwarf_strict)
16932 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16933 else
16934 return NULL;
16935 }
16936
16937 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16938 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16939
16940 static dw_loc_descr_ref
16941 dw_sra_loc_expr (tree decl, rtx loc)
16942 {
16943 rtx p;
16944 unsigned HOST_WIDE_INT padsize = 0;
16945 dw_loc_descr_ref descr, *descr_tail;
16946 unsigned HOST_WIDE_INT decl_size;
16947 rtx varloc;
16948 enum var_init_status initialized;
16949
16950 if (DECL_SIZE (decl) == NULL
16951 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16952 return NULL;
16953
16954 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16955 descr = NULL;
16956 descr_tail = &descr;
16957
16958 for (p = loc; p; p = XEXP (p, 1))
16959 {
16960 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16961 rtx loc_note = *decl_piece_varloc_ptr (p);
16962 dw_loc_descr_ref cur_descr;
16963 dw_loc_descr_ref *tail, last = NULL;
16964 unsigned HOST_WIDE_INT opsize = 0;
16965
16966 if (loc_note == NULL_RTX
16967 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16968 {
16969 padsize += bitsize;
16970 continue;
16971 }
16972 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16973 varloc = NOTE_VAR_LOCATION (loc_note);
16974 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16975 if (cur_descr == NULL)
16976 {
16977 padsize += bitsize;
16978 continue;
16979 }
16980
16981 /* Check that cur_descr either doesn't use
16982 DW_OP_*piece operations, or their sum is equal
16983 to bitsize. Otherwise we can't embed it. */
16984 for (tail = &cur_descr; *tail != NULL;
16985 tail = &(*tail)->dw_loc_next)
16986 if ((*tail)->dw_loc_opc == DW_OP_piece)
16987 {
16988 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16989 * BITS_PER_UNIT;
16990 last = *tail;
16991 }
16992 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16993 {
16994 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16995 last = *tail;
16996 }
16997
16998 if (last != NULL && opsize != bitsize)
16999 {
17000 padsize += bitsize;
17001 /* Discard the current piece of the descriptor and release any
17002 addr_table entries it uses. */
17003 remove_loc_list_addr_table_entries (cur_descr);
17004 continue;
17005 }
17006
17007 /* If there is a hole, add DW_OP_*piece after empty DWARF
17008 expression, which means that those bits are optimized out. */
17009 if (padsize)
17010 {
17011 if (padsize > decl_size)
17012 {
17013 remove_loc_list_addr_table_entries (cur_descr);
17014 goto discard_descr;
17015 }
17016 decl_size -= padsize;
17017 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
17018 if (*descr_tail == NULL)
17019 {
17020 remove_loc_list_addr_table_entries (cur_descr);
17021 goto discard_descr;
17022 }
17023 descr_tail = &(*descr_tail)->dw_loc_next;
17024 padsize = 0;
17025 }
17026 *descr_tail = cur_descr;
17027 descr_tail = tail;
17028 if (bitsize > decl_size)
17029 goto discard_descr;
17030 decl_size -= bitsize;
17031 if (last == NULL)
17032 {
17033 HOST_WIDE_INT offset = 0;
17034 if (GET_CODE (varloc) == VAR_LOCATION
17035 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17036 {
17037 varloc = PAT_VAR_LOCATION_LOC (varloc);
17038 if (GET_CODE (varloc) == EXPR_LIST)
17039 varloc = XEXP (varloc, 0);
17040 }
17041 do
17042 {
17043 if (GET_CODE (varloc) == CONST
17044 || GET_CODE (varloc) == SIGN_EXTEND
17045 || GET_CODE (varloc) == ZERO_EXTEND)
17046 varloc = XEXP (varloc, 0);
17047 else if (GET_CODE (varloc) == SUBREG)
17048 varloc = SUBREG_REG (varloc);
17049 else
17050 break;
17051 }
17052 while (1);
17053 /* DW_OP_bit_size offset should be zero for register
17054 or implicit location descriptions and empty location
17055 descriptions, but for memory addresses needs big endian
17056 adjustment. */
17057 if (MEM_P (varloc))
17058 {
17059 unsigned HOST_WIDE_INT memsize;
17060 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17061 goto discard_descr;
17062 memsize *= BITS_PER_UNIT;
17063 if (memsize != bitsize)
17064 {
17065 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17066 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17067 goto discard_descr;
17068 if (memsize < bitsize)
17069 goto discard_descr;
17070 if (BITS_BIG_ENDIAN)
17071 offset = memsize - bitsize;
17072 }
17073 }
17074
17075 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17076 if (*descr_tail == NULL)
17077 goto discard_descr;
17078 descr_tail = &(*descr_tail)->dw_loc_next;
17079 }
17080 }
17081
17082 /* If there were any non-empty expressions, add padding till the end of
17083 the decl. */
17084 if (descr != NULL && decl_size != 0)
17085 {
17086 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17087 if (*descr_tail == NULL)
17088 goto discard_descr;
17089 }
17090 return descr;
17091
17092 discard_descr:
17093 /* Discard the descriptor and release any addr_table entries it uses. */
17094 remove_loc_list_addr_table_entries (descr);
17095 return NULL;
17096 }
17097
17098 /* Return the dwarf representation of the location list LOC_LIST of
17099 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17100 function. */
17101
17102 static dw_loc_list_ref
17103 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17104 {
17105 const char *endname, *secname;
17106 var_loc_view endview;
17107 rtx varloc;
17108 enum var_init_status initialized;
17109 struct var_loc_node *node;
17110 dw_loc_descr_ref descr;
17111 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17112 dw_loc_list_ref list = NULL;
17113 dw_loc_list_ref *listp = &list;
17114
17115 /* Now that we know what section we are using for a base,
17116 actually construct the list of locations.
17117 The first location information is what is passed to the
17118 function that creates the location list, and the remaining
17119 locations just get added on to that list.
17120 Note that we only know the start address for a location
17121 (IE location changes), so to build the range, we use
17122 the range [current location start, next location start].
17123 This means we have to special case the last node, and generate
17124 a range of [last location start, end of function label]. */
17125
17126 if (cfun && crtl->has_bb_partition)
17127 {
17128 bool save_in_cold_section_p = in_cold_section_p;
17129 in_cold_section_p = first_function_block_is_cold;
17130 if (loc_list->last_before_switch == NULL)
17131 in_cold_section_p = !in_cold_section_p;
17132 secname = secname_for_decl (decl);
17133 in_cold_section_p = save_in_cold_section_p;
17134 }
17135 else
17136 secname = secname_for_decl (decl);
17137
17138 for (node = loc_list->first; node; node = node->next)
17139 {
17140 bool range_across_switch = false;
17141 if (GET_CODE (node->loc) == EXPR_LIST
17142 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17143 {
17144 if (GET_CODE (node->loc) == EXPR_LIST)
17145 {
17146 descr = NULL;
17147 /* This requires DW_OP_{,bit_}piece, which is not usable
17148 inside DWARF expressions. */
17149 if (want_address == 2)
17150 descr = dw_sra_loc_expr (decl, node->loc);
17151 }
17152 else
17153 {
17154 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17155 varloc = NOTE_VAR_LOCATION (node->loc);
17156 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17157 }
17158 if (descr)
17159 {
17160 /* If section switch happens in between node->label
17161 and node->next->label (or end of function) and
17162 we can't emit it as a single entry list,
17163 emit two ranges, first one ending at the end
17164 of first partition and second one starting at the
17165 beginning of second partition. */
17166 if (node == loc_list->last_before_switch
17167 && (node != loc_list->first || loc_list->first->next
17168 /* If we are to emit a view number, we will emit
17169 a loclist rather than a single location
17170 expression for the entire function (see
17171 loc_list_has_views), so we have to split the
17172 range that straddles across partitions. */
17173 || !ZERO_VIEW_P (node->view))
17174 && current_function_decl)
17175 {
17176 endname = cfun->fde->dw_fde_end;
17177 endview = 0;
17178 range_across_switch = true;
17179 }
17180 /* The variable has a location between NODE->LABEL and
17181 NODE->NEXT->LABEL. */
17182 else if (node->next)
17183 endname = node->next->label, endview = node->next->view;
17184 /* If the variable has a location at the last label
17185 it keeps its location until the end of function. */
17186 else if (!current_function_decl)
17187 endname = text_end_label, endview = 0;
17188 else
17189 {
17190 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17191 current_function_funcdef_no);
17192 endname = ggc_strdup (label_id);
17193 endview = 0;
17194 }
17195
17196 *listp = new_loc_list (descr, node->label, node->view,
17197 endname, endview, secname);
17198 if (TREE_CODE (decl) == PARM_DECL
17199 && node == loc_list->first
17200 && NOTE_P (node->loc)
17201 && strcmp (node->label, endname) == 0)
17202 (*listp)->force = true;
17203 listp = &(*listp)->dw_loc_next;
17204 }
17205 }
17206
17207 if (cfun
17208 && crtl->has_bb_partition
17209 && node == loc_list->last_before_switch)
17210 {
17211 bool save_in_cold_section_p = in_cold_section_p;
17212 in_cold_section_p = !first_function_block_is_cold;
17213 secname = secname_for_decl (decl);
17214 in_cold_section_p = save_in_cold_section_p;
17215 }
17216
17217 if (range_across_switch)
17218 {
17219 if (GET_CODE (node->loc) == EXPR_LIST)
17220 descr = dw_sra_loc_expr (decl, node->loc);
17221 else
17222 {
17223 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17224 varloc = NOTE_VAR_LOCATION (node->loc);
17225 descr = dw_loc_list_1 (decl, varloc, want_address,
17226 initialized);
17227 }
17228 gcc_assert (descr);
17229 /* The variable has a location between NODE->LABEL and
17230 NODE->NEXT->LABEL. */
17231 if (node->next)
17232 endname = node->next->label, endview = node->next->view;
17233 else
17234 endname = cfun->fde->dw_fde_second_end, endview = 0;
17235 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17236 endname, endview, secname);
17237 listp = &(*listp)->dw_loc_next;
17238 }
17239 }
17240
17241 /* Try to avoid the overhead of a location list emitting a location
17242 expression instead, but only if we didn't have more than one
17243 location entry in the first place. If some entries were not
17244 representable, we don't want to pretend a single entry that was
17245 applies to the entire scope in which the variable is
17246 available. */
17247 if (list && loc_list->first->next)
17248 gen_llsym (list);
17249 else
17250 maybe_gen_llsym (list);
17251
17252 return list;
17253 }
17254
17255 /* Return if the loc_list has only single element and thus can be represented
17256 as location description. */
17257
17258 static bool
17259 single_element_loc_list_p (dw_loc_list_ref list)
17260 {
17261 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17262 return !list->ll_symbol;
17263 }
17264
17265 /* Duplicate a single element of location list. */
17266
17267 static inline dw_loc_descr_ref
17268 copy_loc_descr (dw_loc_descr_ref ref)
17269 {
17270 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17271 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17272 return copy;
17273 }
17274
17275 /* To each location in list LIST append loc descr REF. */
17276
17277 static void
17278 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17279 {
17280 dw_loc_descr_ref copy;
17281 add_loc_descr (&list->expr, ref);
17282 list = list->dw_loc_next;
17283 while (list)
17284 {
17285 copy = copy_loc_descr (ref);
17286 add_loc_descr (&list->expr, copy);
17287 while (copy->dw_loc_next)
17288 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17289 list = list->dw_loc_next;
17290 }
17291 }
17292
17293 /* To each location in list LIST prepend loc descr REF. */
17294
17295 static void
17296 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17297 {
17298 dw_loc_descr_ref copy;
17299 dw_loc_descr_ref ref_end = list->expr;
17300 add_loc_descr (&ref, list->expr);
17301 list->expr = ref;
17302 list = list->dw_loc_next;
17303 while (list)
17304 {
17305 dw_loc_descr_ref end = list->expr;
17306 list->expr = copy = copy_loc_descr (ref);
17307 while (copy->dw_loc_next != ref_end)
17308 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17309 copy->dw_loc_next = end;
17310 list = list->dw_loc_next;
17311 }
17312 }
17313
17314 /* Given two lists RET and LIST
17315 produce location list that is result of adding expression in LIST
17316 to expression in RET on each position in program.
17317 Might be destructive on both RET and LIST.
17318
17319 TODO: We handle only simple cases of RET or LIST having at most one
17320 element. General case would involve sorting the lists in program order
17321 and merging them that will need some additional work.
17322 Adding that will improve quality of debug info especially for SRA-ed
17323 structures. */
17324
17325 static void
17326 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17327 {
17328 if (!list)
17329 return;
17330 if (!*ret)
17331 {
17332 *ret = list;
17333 return;
17334 }
17335 if (!list->dw_loc_next)
17336 {
17337 add_loc_descr_to_each (*ret, list->expr);
17338 return;
17339 }
17340 if (!(*ret)->dw_loc_next)
17341 {
17342 prepend_loc_descr_to_each (list, (*ret)->expr);
17343 *ret = list;
17344 return;
17345 }
17346 expansion_failed (NULL_TREE, NULL_RTX,
17347 "Don't know how to merge two non-trivial"
17348 " location lists.\n");
17349 *ret = NULL;
17350 return;
17351 }
17352
17353 /* LOC is constant expression. Try a luck, look it up in constant
17354 pool and return its loc_descr of its address. */
17355
17356 static dw_loc_descr_ref
17357 cst_pool_loc_descr (tree loc)
17358 {
17359 /* Get an RTL for this, if something has been emitted. */
17360 rtx rtl = lookup_constant_def (loc);
17361
17362 if (!rtl || !MEM_P (rtl))
17363 {
17364 gcc_assert (!rtl);
17365 return 0;
17366 }
17367 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17368
17369 /* TODO: We might get more coverage if we was actually delaying expansion
17370 of all expressions till end of compilation when constant pools are fully
17371 populated. */
17372 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17373 {
17374 expansion_failed (loc, NULL_RTX,
17375 "CST value in contant pool but not marked.");
17376 return 0;
17377 }
17378 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17379 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17380 }
17381
17382 /* Return dw_loc_list representing address of addr_expr LOC
17383 by looking for inner INDIRECT_REF expression and turning
17384 it into simple arithmetics.
17385
17386 See loc_list_from_tree for the meaning of CONTEXT. */
17387
17388 static dw_loc_list_ref
17389 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17390 loc_descr_context *context)
17391 {
17392 tree obj, offset;
17393 poly_int64 bitsize, bitpos, bytepos;
17394 machine_mode mode;
17395 int unsignedp, reversep, volatilep = 0;
17396 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17397
17398 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17399 &bitsize, &bitpos, &offset, &mode,
17400 &unsignedp, &reversep, &volatilep);
17401 STRIP_NOPS (obj);
17402 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17403 {
17404 expansion_failed (loc, NULL_RTX, "bitfield access");
17405 return 0;
17406 }
17407 if (!INDIRECT_REF_P (obj))
17408 {
17409 expansion_failed (obj,
17410 NULL_RTX, "no indirect ref in inner refrence");
17411 return 0;
17412 }
17413 if (!offset && known_eq (bitpos, 0))
17414 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17415 context);
17416 else if (toplev
17417 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17418 && (dwarf_version >= 4 || !dwarf_strict))
17419 {
17420 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17421 if (!list_ret)
17422 return 0;
17423 if (offset)
17424 {
17425 /* Variable offset. */
17426 list_ret1 = loc_list_from_tree (offset, 0, context);
17427 if (list_ret1 == 0)
17428 return 0;
17429 add_loc_list (&list_ret, list_ret1);
17430 if (!list_ret)
17431 return 0;
17432 add_loc_descr_to_each (list_ret,
17433 new_loc_descr (DW_OP_plus, 0, 0));
17434 }
17435 HOST_WIDE_INT value;
17436 if (bytepos.is_constant (&value) && value > 0)
17437 add_loc_descr_to_each (list_ret,
17438 new_loc_descr (DW_OP_plus_uconst, value, 0));
17439 else if (maybe_ne (bytepos, 0))
17440 loc_list_plus_const (list_ret, bytepos);
17441 add_loc_descr_to_each (list_ret,
17442 new_loc_descr (DW_OP_stack_value, 0, 0));
17443 }
17444 return list_ret;
17445 }
17446
17447 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17448 all operations from LOC are nops, move to the last one. Insert in NOPS all
17449 operations that are skipped. */
17450
17451 static void
17452 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17453 hash_set<dw_loc_descr_ref> &nops)
17454 {
17455 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17456 {
17457 nops.add (loc);
17458 loc = loc->dw_loc_next;
17459 }
17460 }
17461
17462 /* Helper for loc_descr_without_nops: free the location description operation
17463 P. */
17464
17465 bool
17466 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17467 {
17468 ggc_free (loc);
17469 return true;
17470 }
17471
17472 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17473 finishes LOC. */
17474
17475 static void
17476 loc_descr_without_nops (dw_loc_descr_ref &loc)
17477 {
17478 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17479 return;
17480
17481 /* Set of all DW_OP_nop operations we remove. */
17482 hash_set<dw_loc_descr_ref> nops;
17483
17484 /* First, strip all prefix NOP operations in order to keep the head of the
17485 operations list. */
17486 loc_descr_to_next_no_nop (loc, nops);
17487
17488 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17489 {
17490 /* For control flow operations: strip "prefix" nops in destination
17491 labels. */
17492 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17493 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17494 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17495 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17496
17497 /* Do the same for the operations that follow, then move to the next
17498 iteration. */
17499 if (cur->dw_loc_next != NULL)
17500 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17501 cur = cur->dw_loc_next;
17502 }
17503
17504 nops.traverse<void *, free_loc_descr> (NULL);
17505 }
17506
17507
17508 struct dwarf_procedure_info;
17509
17510 /* Helper structure for location descriptions generation. */
17511 struct loc_descr_context
17512 {
17513 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17514 NULL_TREE if DW_OP_push_object_address in invalid for this location
17515 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17516 tree context_type;
17517 /* The ..._DECL node that should be translated as a
17518 DW_OP_push_object_address operation. */
17519 tree base_decl;
17520 /* Information about the DWARF procedure we are currently generating. NULL if
17521 we are not generating a DWARF procedure. */
17522 struct dwarf_procedure_info *dpi;
17523 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17524 by consumer. Used for DW_TAG_generic_subrange attributes. */
17525 bool placeholder_arg;
17526 /* True if PLACEHOLDER_EXPR has been seen. */
17527 bool placeholder_seen;
17528 };
17529
17530 /* DWARF procedures generation
17531
17532 DWARF expressions (aka. location descriptions) are used to encode variable
17533 things such as sizes or offsets. Such computations can have redundant parts
17534 that can be factorized in order to reduce the size of the output debug
17535 information. This is the whole point of DWARF procedures.
17536
17537 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17538 already factorized into functions ("size functions") in order to handle very
17539 big and complex types. Such functions are quite simple: they have integral
17540 arguments, they return an integral result and their body contains only a
17541 return statement with arithmetic expressions. This is the only kind of
17542 function we are interested in translating into DWARF procedures, here.
17543
17544 DWARF expressions and DWARF procedure are executed using a stack, so we have
17545 to define some calling convention for them to interact. Let's say that:
17546
17547 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17548 all arguments in reverse order (right-to-left) so that when the DWARF
17549 procedure execution starts, the first argument is the top of the stack.
17550
17551 - Then, when returning, the DWARF procedure must have consumed all arguments
17552 on the stack, must have pushed the result and touched nothing else.
17553
17554 - Each integral argument and the result are integral types can be hold in a
17555 single stack slot.
17556
17557 - We call "frame offset" the number of stack slots that are "under DWARF
17558 procedure control": it includes the arguments slots, the temporaries and
17559 the result slot. Thus, it is equal to the number of arguments when the
17560 procedure execution starts and must be equal to one (the result) when it
17561 returns. */
17562
17563 /* Helper structure used when generating operations for a DWARF procedure. */
17564 struct dwarf_procedure_info
17565 {
17566 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17567 currently translated. */
17568 tree fndecl;
17569 /* The number of arguments FNDECL takes. */
17570 unsigned args_count;
17571 };
17572
17573 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17574 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17575 equate it to this DIE. */
17576
17577 static dw_die_ref
17578 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17579 dw_die_ref parent_die)
17580 {
17581 dw_die_ref dwarf_proc_die;
17582
17583 if ((dwarf_version < 3 && dwarf_strict)
17584 || location == NULL)
17585 return NULL;
17586
17587 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17588 if (fndecl)
17589 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17590 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17591 return dwarf_proc_die;
17592 }
17593
17594 /* Return whether TYPE is a supported type as a DWARF procedure argument
17595 type or return type (we handle only scalar types and pointer types that
17596 aren't wider than the DWARF expression evaluation stack. */
17597
17598 static bool
17599 is_handled_procedure_type (tree type)
17600 {
17601 return ((INTEGRAL_TYPE_P (type)
17602 || TREE_CODE (type) == OFFSET_TYPE
17603 || TREE_CODE (type) == POINTER_TYPE)
17604 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17605 }
17606
17607 /* Helper for resolve_args_picking: do the same but stop when coming across
17608 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17609 offset *before* evaluating the corresponding operation. */
17610
17611 static bool
17612 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17613 struct dwarf_procedure_info *dpi,
17614 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17615 {
17616 /* The "frame_offset" identifier is already used to name a macro... */
17617 unsigned frame_offset_ = initial_frame_offset;
17618 dw_loc_descr_ref l;
17619
17620 for (l = loc; l != NULL;)
17621 {
17622 bool existed;
17623 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17624
17625 /* If we already met this node, there is nothing to compute anymore. */
17626 if (existed)
17627 {
17628 /* Make sure that the stack size is consistent wherever the execution
17629 flow comes from. */
17630 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17631 break;
17632 }
17633 l_frame_offset = frame_offset_;
17634
17635 /* If needed, relocate the picking offset with respect to the frame
17636 offset. */
17637 if (l->frame_offset_rel)
17638 {
17639 unsigned HOST_WIDE_INT off;
17640 switch (l->dw_loc_opc)
17641 {
17642 case DW_OP_pick:
17643 off = l->dw_loc_oprnd1.v.val_unsigned;
17644 break;
17645 case DW_OP_dup:
17646 off = 0;
17647 break;
17648 case DW_OP_over:
17649 off = 1;
17650 break;
17651 default:
17652 gcc_unreachable ();
17653 }
17654 /* frame_offset_ is the size of the current stack frame, including
17655 incoming arguments. Besides, the arguments are pushed
17656 right-to-left. Thus, in order to access the Nth argument from
17657 this operation node, the picking has to skip temporaries *plus*
17658 one stack slot per argument (0 for the first one, 1 for the second
17659 one, etc.).
17660
17661 The targetted argument number (N) is already set as the operand,
17662 and the number of temporaries can be computed with:
17663 frame_offsets_ - dpi->args_count */
17664 off += frame_offset_ - dpi->args_count;
17665
17666 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17667 if (off > 255)
17668 return false;
17669
17670 if (off == 0)
17671 {
17672 l->dw_loc_opc = DW_OP_dup;
17673 l->dw_loc_oprnd1.v.val_unsigned = 0;
17674 }
17675 else if (off == 1)
17676 {
17677 l->dw_loc_opc = DW_OP_over;
17678 l->dw_loc_oprnd1.v.val_unsigned = 0;
17679 }
17680 else
17681 {
17682 l->dw_loc_opc = DW_OP_pick;
17683 l->dw_loc_oprnd1.v.val_unsigned = off;
17684 }
17685 }
17686
17687 /* Update frame_offset according to the effect the current operation has
17688 on the stack. */
17689 switch (l->dw_loc_opc)
17690 {
17691 case DW_OP_deref:
17692 case DW_OP_swap:
17693 case DW_OP_rot:
17694 case DW_OP_abs:
17695 case DW_OP_neg:
17696 case DW_OP_not:
17697 case DW_OP_plus_uconst:
17698 case DW_OP_skip:
17699 case DW_OP_reg0:
17700 case DW_OP_reg1:
17701 case DW_OP_reg2:
17702 case DW_OP_reg3:
17703 case DW_OP_reg4:
17704 case DW_OP_reg5:
17705 case DW_OP_reg6:
17706 case DW_OP_reg7:
17707 case DW_OP_reg8:
17708 case DW_OP_reg9:
17709 case DW_OP_reg10:
17710 case DW_OP_reg11:
17711 case DW_OP_reg12:
17712 case DW_OP_reg13:
17713 case DW_OP_reg14:
17714 case DW_OP_reg15:
17715 case DW_OP_reg16:
17716 case DW_OP_reg17:
17717 case DW_OP_reg18:
17718 case DW_OP_reg19:
17719 case DW_OP_reg20:
17720 case DW_OP_reg21:
17721 case DW_OP_reg22:
17722 case DW_OP_reg23:
17723 case DW_OP_reg24:
17724 case DW_OP_reg25:
17725 case DW_OP_reg26:
17726 case DW_OP_reg27:
17727 case DW_OP_reg28:
17728 case DW_OP_reg29:
17729 case DW_OP_reg30:
17730 case DW_OP_reg31:
17731 case DW_OP_bregx:
17732 case DW_OP_piece:
17733 case DW_OP_deref_size:
17734 case DW_OP_nop:
17735 case DW_OP_bit_piece:
17736 case DW_OP_implicit_value:
17737 case DW_OP_stack_value:
17738 break;
17739
17740 case DW_OP_addr:
17741 case DW_OP_const1u:
17742 case DW_OP_const1s:
17743 case DW_OP_const2u:
17744 case DW_OP_const2s:
17745 case DW_OP_const4u:
17746 case DW_OP_const4s:
17747 case DW_OP_const8u:
17748 case DW_OP_const8s:
17749 case DW_OP_constu:
17750 case DW_OP_consts:
17751 case DW_OP_dup:
17752 case DW_OP_over:
17753 case DW_OP_pick:
17754 case DW_OP_lit0:
17755 case DW_OP_lit1:
17756 case DW_OP_lit2:
17757 case DW_OP_lit3:
17758 case DW_OP_lit4:
17759 case DW_OP_lit5:
17760 case DW_OP_lit6:
17761 case DW_OP_lit7:
17762 case DW_OP_lit8:
17763 case DW_OP_lit9:
17764 case DW_OP_lit10:
17765 case DW_OP_lit11:
17766 case DW_OP_lit12:
17767 case DW_OP_lit13:
17768 case DW_OP_lit14:
17769 case DW_OP_lit15:
17770 case DW_OP_lit16:
17771 case DW_OP_lit17:
17772 case DW_OP_lit18:
17773 case DW_OP_lit19:
17774 case DW_OP_lit20:
17775 case DW_OP_lit21:
17776 case DW_OP_lit22:
17777 case DW_OP_lit23:
17778 case DW_OP_lit24:
17779 case DW_OP_lit25:
17780 case DW_OP_lit26:
17781 case DW_OP_lit27:
17782 case DW_OP_lit28:
17783 case DW_OP_lit29:
17784 case DW_OP_lit30:
17785 case DW_OP_lit31:
17786 case DW_OP_breg0:
17787 case DW_OP_breg1:
17788 case DW_OP_breg2:
17789 case DW_OP_breg3:
17790 case DW_OP_breg4:
17791 case DW_OP_breg5:
17792 case DW_OP_breg6:
17793 case DW_OP_breg7:
17794 case DW_OP_breg8:
17795 case DW_OP_breg9:
17796 case DW_OP_breg10:
17797 case DW_OP_breg11:
17798 case DW_OP_breg12:
17799 case DW_OP_breg13:
17800 case DW_OP_breg14:
17801 case DW_OP_breg15:
17802 case DW_OP_breg16:
17803 case DW_OP_breg17:
17804 case DW_OP_breg18:
17805 case DW_OP_breg19:
17806 case DW_OP_breg20:
17807 case DW_OP_breg21:
17808 case DW_OP_breg22:
17809 case DW_OP_breg23:
17810 case DW_OP_breg24:
17811 case DW_OP_breg25:
17812 case DW_OP_breg26:
17813 case DW_OP_breg27:
17814 case DW_OP_breg28:
17815 case DW_OP_breg29:
17816 case DW_OP_breg30:
17817 case DW_OP_breg31:
17818 case DW_OP_fbreg:
17819 case DW_OP_push_object_address:
17820 case DW_OP_call_frame_cfa:
17821 case DW_OP_GNU_variable_value:
17822 ++frame_offset_;
17823 break;
17824
17825 case DW_OP_drop:
17826 case DW_OP_xderef:
17827 case DW_OP_and:
17828 case DW_OP_div:
17829 case DW_OP_minus:
17830 case DW_OP_mod:
17831 case DW_OP_mul:
17832 case DW_OP_or:
17833 case DW_OP_plus:
17834 case DW_OP_shl:
17835 case DW_OP_shr:
17836 case DW_OP_shra:
17837 case DW_OP_xor:
17838 case DW_OP_bra:
17839 case DW_OP_eq:
17840 case DW_OP_ge:
17841 case DW_OP_gt:
17842 case DW_OP_le:
17843 case DW_OP_lt:
17844 case DW_OP_ne:
17845 case DW_OP_regx:
17846 case DW_OP_xderef_size:
17847 --frame_offset_;
17848 break;
17849
17850 case DW_OP_call2:
17851 case DW_OP_call4:
17852 case DW_OP_call_ref:
17853 {
17854 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17855 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17856
17857 if (stack_usage == NULL)
17858 return false;
17859 frame_offset_ += *stack_usage;
17860 break;
17861 }
17862
17863 case DW_OP_implicit_pointer:
17864 case DW_OP_entry_value:
17865 case DW_OP_const_type:
17866 case DW_OP_regval_type:
17867 case DW_OP_deref_type:
17868 case DW_OP_convert:
17869 case DW_OP_reinterpret:
17870 case DW_OP_form_tls_address:
17871 case DW_OP_GNU_push_tls_address:
17872 case DW_OP_GNU_uninit:
17873 case DW_OP_GNU_encoded_addr:
17874 case DW_OP_GNU_implicit_pointer:
17875 case DW_OP_GNU_entry_value:
17876 case DW_OP_GNU_const_type:
17877 case DW_OP_GNU_regval_type:
17878 case DW_OP_GNU_deref_type:
17879 case DW_OP_GNU_convert:
17880 case DW_OP_GNU_reinterpret:
17881 case DW_OP_GNU_parameter_ref:
17882 /* loc_list_from_tree will probably not output these operations for
17883 size functions, so assume they will not appear here. */
17884 /* Fall through... */
17885
17886 default:
17887 gcc_unreachable ();
17888 }
17889
17890 /* Now, follow the control flow (except subroutine calls). */
17891 switch (l->dw_loc_opc)
17892 {
17893 case DW_OP_bra:
17894 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17895 frame_offsets))
17896 return false;
17897 /* Fall through. */
17898
17899 case DW_OP_skip:
17900 l = l->dw_loc_oprnd1.v.val_loc;
17901 break;
17902
17903 case DW_OP_stack_value:
17904 return true;
17905
17906 default:
17907 l = l->dw_loc_next;
17908 break;
17909 }
17910 }
17911
17912 return true;
17913 }
17914
17915 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17916 operations) in order to resolve the operand of DW_OP_pick operations that
17917 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17918 offset *before* LOC is executed. Return if all relocations were
17919 successful. */
17920
17921 static bool
17922 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17923 struct dwarf_procedure_info *dpi)
17924 {
17925 /* Associate to all visited operations the frame offset *before* evaluating
17926 this operation. */
17927 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17928
17929 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17930 frame_offsets);
17931 }
17932
17933 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17934 Return NULL if it is not possible. */
17935
17936 static dw_die_ref
17937 function_to_dwarf_procedure (tree fndecl)
17938 {
17939 struct loc_descr_context ctx;
17940 struct dwarf_procedure_info dpi;
17941 dw_die_ref dwarf_proc_die;
17942 tree tree_body = DECL_SAVED_TREE (fndecl);
17943 dw_loc_descr_ref loc_body, epilogue;
17944
17945 tree cursor;
17946 unsigned i;
17947
17948 /* Do not generate multiple DWARF procedures for the same function
17949 declaration. */
17950 dwarf_proc_die = lookup_decl_die (fndecl);
17951 if (dwarf_proc_die != NULL)
17952 return dwarf_proc_die;
17953
17954 /* DWARF procedures are available starting with the DWARFv3 standard. */
17955 if (dwarf_version < 3 && dwarf_strict)
17956 return NULL;
17957
17958 /* We handle only functions for which we still have a body, that return a
17959 supported type and that takes arguments with supported types. Note that
17960 there is no point translating functions that return nothing. */
17961 if (tree_body == NULL_TREE
17962 || DECL_RESULT (fndecl) == NULL_TREE
17963 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17964 return NULL;
17965
17966 for (cursor = DECL_ARGUMENTS (fndecl);
17967 cursor != NULL_TREE;
17968 cursor = TREE_CHAIN (cursor))
17969 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17970 return NULL;
17971
17972 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17973 if (TREE_CODE (tree_body) != RETURN_EXPR)
17974 return NULL;
17975 tree_body = TREE_OPERAND (tree_body, 0);
17976 if (TREE_CODE (tree_body) != MODIFY_EXPR
17977 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17978 return NULL;
17979 tree_body = TREE_OPERAND (tree_body, 1);
17980
17981 /* Try to translate the body expression itself. Note that this will probably
17982 cause an infinite recursion if its call graph has a cycle. This is very
17983 unlikely for size functions, however, so don't bother with such things at
17984 the moment. */
17985 ctx.context_type = NULL_TREE;
17986 ctx.base_decl = NULL_TREE;
17987 ctx.dpi = &dpi;
17988 ctx.placeholder_arg = false;
17989 ctx.placeholder_seen = false;
17990 dpi.fndecl = fndecl;
17991 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17992 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17993 if (!loc_body)
17994 return NULL;
17995
17996 /* After evaluating all operands in "loc_body", we should still have on the
17997 stack all arguments plus the desired function result (top of the stack).
17998 Generate code in order to keep only the result in our stack frame. */
17999 epilogue = NULL;
18000 for (i = 0; i < dpi.args_count; ++i)
18001 {
18002 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
18003 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
18004 op_couple->dw_loc_next->dw_loc_next = epilogue;
18005 epilogue = op_couple;
18006 }
18007 add_loc_descr (&loc_body, epilogue);
18008 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
18009 return NULL;
18010
18011 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
18012 because they are considered useful. Now there is an epilogue, they are
18013 not anymore, so give it another try. */
18014 loc_descr_without_nops (loc_body);
18015
18016 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
18017 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
18018 though, given that size functions do not come from source, so they should
18019 not have a dedicated DW_TAG_subprogram DIE. */
18020 dwarf_proc_die
18021 = new_dwarf_proc_die (loc_body, fndecl,
18022 get_context_die (DECL_CONTEXT (fndecl)));
18023
18024 /* The called DWARF procedure consumes one stack slot per argument and
18025 returns one stack slot. */
18026 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
18027
18028 return dwarf_proc_die;
18029 }
18030
18031
18032 /* Generate Dwarf location list representing LOC.
18033 If WANT_ADDRESS is false, expression computing LOC will be computed
18034 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18035 if WANT_ADDRESS is 2, expression computing address useable in location
18036 will be returned (i.e. DW_OP_reg can be used
18037 to refer to register values).
18038
18039 CONTEXT provides information to customize the location descriptions
18040 generation. Its context_type field specifies what type is implicitly
18041 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18042 will not be generated.
18043
18044 Its DPI field determines whether we are generating a DWARF expression for a
18045 DWARF procedure, so PARM_DECL references are processed specifically.
18046
18047 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18048 and dpi fields were null. */
18049
18050 static dw_loc_list_ref
18051 loc_list_from_tree_1 (tree loc, int want_address,
18052 struct loc_descr_context *context)
18053 {
18054 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18055 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18056 int have_address = 0;
18057 enum dwarf_location_atom op;
18058
18059 /* ??? Most of the time we do not take proper care for sign/zero
18060 extending the values properly. Hopefully this won't be a real
18061 problem... */
18062
18063 if (context != NULL
18064 && context->base_decl == loc
18065 && want_address == 0)
18066 {
18067 if (dwarf_version >= 3 || !dwarf_strict)
18068 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18069 NULL, 0, NULL, 0, NULL);
18070 else
18071 return NULL;
18072 }
18073
18074 switch (TREE_CODE (loc))
18075 {
18076 case ERROR_MARK:
18077 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18078 return 0;
18079
18080 case PLACEHOLDER_EXPR:
18081 /* This case involves extracting fields from an object to determine the
18082 position of other fields. It is supposed to appear only as the first
18083 operand of COMPONENT_REF nodes and to reference precisely the type
18084 that the context allows. */
18085 if (context != NULL
18086 && TREE_TYPE (loc) == context->context_type
18087 && want_address >= 1)
18088 {
18089 if (dwarf_version >= 3 || !dwarf_strict)
18090 {
18091 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18092 have_address = 1;
18093 break;
18094 }
18095 else
18096 return NULL;
18097 }
18098 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18099 the single argument passed by consumer. */
18100 else if (context != NULL
18101 && context->placeholder_arg
18102 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18103 && want_address == 0)
18104 {
18105 ret = new_loc_descr (DW_OP_pick, 0, 0);
18106 ret->frame_offset_rel = 1;
18107 context->placeholder_seen = true;
18108 break;
18109 }
18110 else
18111 expansion_failed (loc, NULL_RTX,
18112 "PLACEHOLDER_EXPR for an unexpected type");
18113 break;
18114
18115 case CALL_EXPR:
18116 {
18117 const int nargs = call_expr_nargs (loc);
18118 tree callee = get_callee_fndecl (loc);
18119 int i;
18120 dw_die_ref dwarf_proc;
18121
18122 if (callee == NULL_TREE)
18123 goto call_expansion_failed;
18124
18125 /* We handle only functions that return an integer. */
18126 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18127 goto call_expansion_failed;
18128
18129 dwarf_proc = function_to_dwarf_procedure (callee);
18130 if (dwarf_proc == NULL)
18131 goto call_expansion_failed;
18132
18133 /* Evaluate arguments right-to-left so that the first argument will
18134 be the top-most one on the stack. */
18135 for (i = nargs - 1; i >= 0; --i)
18136 {
18137 dw_loc_descr_ref loc_descr
18138 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18139 context);
18140
18141 if (loc_descr == NULL)
18142 goto call_expansion_failed;
18143
18144 add_loc_descr (&ret, loc_descr);
18145 }
18146
18147 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18148 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18149 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18150 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18151 add_loc_descr (&ret, ret1);
18152 break;
18153
18154 call_expansion_failed:
18155 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18156 /* There are no opcodes for these operations. */
18157 return 0;
18158 }
18159
18160 case PREINCREMENT_EXPR:
18161 case PREDECREMENT_EXPR:
18162 case POSTINCREMENT_EXPR:
18163 case POSTDECREMENT_EXPR:
18164 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18165 /* There are no opcodes for these operations. */
18166 return 0;
18167
18168 case ADDR_EXPR:
18169 /* If we already want an address, see if there is INDIRECT_REF inside
18170 e.g. for &this->field. */
18171 if (want_address)
18172 {
18173 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18174 (loc, want_address == 2, context);
18175 if (list_ret)
18176 have_address = 1;
18177 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18178 && (ret = cst_pool_loc_descr (loc)))
18179 have_address = 1;
18180 }
18181 /* Otherwise, process the argument and look for the address. */
18182 if (!list_ret && !ret)
18183 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18184 else
18185 {
18186 if (want_address)
18187 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18188 return NULL;
18189 }
18190 break;
18191
18192 case VAR_DECL:
18193 if (DECL_THREAD_LOCAL_P (loc))
18194 {
18195 rtx rtl;
18196 enum dwarf_location_atom tls_op;
18197 enum dtprel_bool dtprel = dtprel_false;
18198
18199 if (targetm.have_tls)
18200 {
18201 /* If this is not defined, we have no way to emit the
18202 data. */
18203 if (!targetm.asm_out.output_dwarf_dtprel)
18204 return 0;
18205
18206 /* The way DW_OP_GNU_push_tls_address is specified, we
18207 can only look up addresses of objects in the current
18208 module. We used DW_OP_addr as first op, but that's
18209 wrong, because DW_OP_addr is relocated by the debug
18210 info consumer, while DW_OP_GNU_push_tls_address
18211 operand shouldn't be. */
18212 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18213 return 0;
18214 dtprel = dtprel_true;
18215 /* We check for DWARF 5 here because gdb did not implement
18216 DW_OP_form_tls_address until after 7.12. */
18217 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18218 : DW_OP_GNU_push_tls_address);
18219 }
18220 else
18221 {
18222 if (!targetm.emutls.debug_form_tls_address
18223 || !(dwarf_version >= 3 || !dwarf_strict))
18224 return 0;
18225 /* We stuffed the control variable into the DECL_VALUE_EXPR
18226 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18227 no longer appear in gimple code. We used the control
18228 variable in specific so that we could pick it up here. */
18229 loc = DECL_VALUE_EXPR (loc);
18230 tls_op = DW_OP_form_tls_address;
18231 }
18232
18233 rtl = rtl_for_decl_location (loc);
18234 if (rtl == NULL_RTX)
18235 return 0;
18236
18237 if (!MEM_P (rtl))
18238 return 0;
18239 rtl = XEXP (rtl, 0);
18240 if (! CONSTANT_P (rtl))
18241 return 0;
18242
18243 ret = new_addr_loc_descr (rtl, dtprel);
18244 ret1 = new_loc_descr (tls_op, 0, 0);
18245 add_loc_descr (&ret, ret1);
18246
18247 have_address = 1;
18248 break;
18249 }
18250 /* FALLTHRU */
18251
18252 case PARM_DECL:
18253 if (context != NULL && context->dpi != NULL
18254 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18255 {
18256 /* We are generating code for a DWARF procedure and we want to access
18257 one of its arguments: find the appropriate argument offset and let
18258 the resolve_args_picking pass compute the offset that complies
18259 with the stack frame size. */
18260 unsigned i = 0;
18261 tree cursor;
18262
18263 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18264 cursor != NULL_TREE && cursor != loc;
18265 cursor = TREE_CHAIN (cursor), ++i)
18266 ;
18267 /* If we are translating a DWARF procedure, all referenced parameters
18268 must belong to the current function. */
18269 gcc_assert (cursor != NULL_TREE);
18270
18271 ret = new_loc_descr (DW_OP_pick, i, 0);
18272 ret->frame_offset_rel = 1;
18273 break;
18274 }
18275 /* FALLTHRU */
18276
18277 case RESULT_DECL:
18278 if (DECL_HAS_VALUE_EXPR_P (loc))
18279 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18280 want_address, context);
18281 /* FALLTHRU */
18282
18283 case FUNCTION_DECL:
18284 {
18285 rtx rtl;
18286 var_loc_list *loc_list = lookup_decl_loc (loc);
18287
18288 if (loc_list && loc_list->first)
18289 {
18290 list_ret = dw_loc_list (loc_list, loc, want_address);
18291 have_address = want_address != 0;
18292 break;
18293 }
18294 rtl = rtl_for_decl_location (loc);
18295 if (rtl == NULL_RTX)
18296 {
18297 if (TREE_CODE (loc) != FUNCTION_DECL
18298 && early_dwarf
18299 && current_function_decl
18300 && want_address != 1
18301 && ! DECL_IGNORED_P (loc)
18302 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18303 || POINTER_TYPE_P (TREE_TYPE (loc)))
18304 && DECL_CONTEXT (loc) == current_function_decl
18305 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18306 <= DWARF2_ADDR_SIZE))
18307 {
18308 dw_die_ref ref = lookup_decl_die (loc);
18309 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18310 if (ref)
18311 {
18312 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18313 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18314 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18315 }
18316 else
18317 {
18318 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18319 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18320 }
18321 break;
18322 }
18323 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18324 return 0;
18325 }
18326 else if (CONST_INT_P (rtl))
18327 {
18328 HOST_WIDE_INT val = INTVAL (rtl);
18329 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18330 val &= GET_MODE_MASK (DECL_MODE (loc));
18331 ret = int_loc_descriptor (val);
18332 }
18333 else if (GET_CODE (rtl) == CONST_STRING)
18334 {
18335 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18336 return 0;
18337 }
18338 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18339 ret = new_addr_loc_descr (rtl, dtprel_false);
18340 else
18341 {
18342 machine_mode mode, mem_mode;
18343
18344 /* Certain constructs can only be represented at top-level. */
18345 if (want_address == 2)
18346 {
18347 ret = loc_descriptor (rtl, VOIDmode,
18348 VAR_INIT_STATUS_INITIALIZED);
18349 have_address = 1;
18350 }
18351 else
18352 {
18353 mode = GET_MODE (rtl);
18354 mem_mode = VOIDmode;
18355 if (MEM_P (rtl))
18356 {
18357 mem_mode = mode;
18358 mode = get_address_mode (rtl);
18359 rtl = XEXP (rtl, 0);
18360 have_address = 1;
18361 }
18362 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18363 VAR_INIT_STATUS_INITIALIZED);
18364 }
18365 if (!ret)
18366 expansion_failed (loc, rtl,
18367 "failed to produce loc descriptor for rtl");
18368 }
18369 }
18370 break;
18371
18372 case MEM_REF:
18373 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18374 {
18375 have_address = 1;
18376 goto do_plus;
18377 }
18378 /* Fallthru. */
18379 case INDIRECT_REF:
18380 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18381 have_address = 1;
18382 break;
18383
18384 case TARGET_MEM_REF:
18385 case SSA_NAME:
18386 case DEBUG_EXPR_DECL:
18387 return NULL;
18388
18389 case COMPOUND_EXPR:
18390 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18391 context);
18392
18393 CASE_CONVERT:
18394 case VIEW_CONVERT_EXPR:
18395 case SAVE_EXPR:
18396 case MODIFY_EXPR:
18397 case NON_LVALUE_EXPR:
18398 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18399 context);
18400
18401 case COMPONENT_REF:
18402 case BIT_FIELD_REF:
18403 case ARRAY_REF:
18404 case ARRAY_RANGE_REF:
18405 case REALPART_EXPR:
18406 case IMAGPART_EXPR:
18407 {
18408 tree obj, offset;
18409 poly_int64 bitsize, bitpos, bytepos;
18410 machine_mode mode;
18411 int unsignedp, reversep, volatilep = 0;
18412
18413 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18414 &unsignedp, &reversep, &volatilep);
18415
18416 gcc_assert (obj != loc);
18417
18418 list_ret = loc_list_from_tree_1 (obj,
18419 want_address == 2
18420 && known_eq (bitpos, 0)
18421 && !offset ? 2 : 1,
18422 context);
18423 /* TODO: We can extract value of the small expression via shifting even
18424 for nonzero bitpos. */
18425 if (list_ret == 0)
18426 return 0;
18427 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18428 || !multiple_p (bitsize, BITS_PER_UNIT))
18429 {
18430 expansion_failed (loc, NULL_RTX,
18431 "bitfield access");
18432 return 0;
18433 }
18434
18435 if (offset != NULL_TREE)
18436 {
18437 /* Variable offset. */
18438 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18439 if (list_ret1 == 0)
18440 return 0;
18441 add_loc_list (&list_ret, list_ret1);
18442 if (!list_ret)
18443 return 0;
18444 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18445 }
18446
18447 HOST_WIDE_INT value;
18448 if (bytepos.is_constant (&value) && value > 0)
18449 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18450 value, 0));
18451 else if (maybe_ne (bytepos, 0))
18452 loc_list_plus_const (list_ret, bytepos);
18453
18454 have_address = 1;
18455 break;
18456 }
18457
18458 case INTEGER_CST:
18459 if ((want_address || !tree_fits_shwi_p (loc))
18460 && (ret = cst_pool_loc_descr (loc)))
18461 have_address = 1;
18462 else if (want_address == 2
18463 && tree_fits_shwi_p (loc)
18464 && (ret = address_of_int_loc_descriptor
18465 (int_size_in_bytes (TREE_TYPE (loc)),
18466 tree_to_shwi (loc))))
18467 have_address = 1;
18468 else if (tree_fits_shwi_p (loc))
18469 ret = int_loc_descriptor (tree_to_shwi (loc));
18470 else if (tree_fits_uhwi_p (loc))
18471 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18472 else
18473 {
18474 expansion_failed (loc, NULL_RTX,
18475 "Integer operand is not host integer");
18476 return 0;
18477 }
18478 break;
18479
18480 case CONSTRUCTOR:
18481 case REAL_CST:
18482 case STRING_CST:
18483 case COMPLEX_CST:
18484 if ((ret = cst_pool_loc_descr (loc)))
18485 have_address = 1;
18486 else if (TREE_CODE (loc) == CONSTRUCTOR)
18487 {
18488 tree type = TREE_TYPE (loc);
18489 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18490 unsigned HOST_WIDE_INT offset = 0;
18491 unsigned HOST_WIDE_INT cnt;
18492 constructor_elt *ce;
18493
18494 if (TREE_CODE (type) == RECORD_TYPE)
18495 {
18496 /* This is very limited, but it's enough to output
18497 pointers to member functions, as long as the
18498 referenced function is defined in the current
18499 translation unit. */
18500 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18501 {
18502 tree val = ce->value;
18503
18504 tree field = ce->index;
18505
18506 if (val)
18507 STRIP_NOPS (val);
18508
18509 if (!field || DECL_BIT_FIELD (field))
18510 {
18511 expansion_failed (loc, NULL_RTX,
18512 "bitfield in record type constructor");
18513 size = offset = (unsigned HOST_WIDE_INT)-1;
18514 ret = NULL;
18515 break;
18516 }
18517
18518 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18519 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18520 gcc_assert (pos + fieldsize <= size);
18521 if (pos < offset)
18522 {
18523 expansion_failed (loc, NULL_RTX,
18524 "out-of-order fields in record constructor");
18525 size = offset = (unsigned HOST_WIDE_INT)-1;
18526 ret = NULL;
18527 break;
18528 }
18529 if (pos > offset)
18530 {
18531 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18532 add_loc_descr (&ret, ret1);
18533 offset = pos;
18534 }
18535 if (val && fieldsize != 0)
18536 {
18537 ret1 = loc_descriptor_from_tree (val, want_address, context);
18538 if (!ret1)
18539 {
18540 expansion_failed (loc, NULL_RTX,
18541 "unsupported expression in field");
18542 size = offset = (unsigned HOST_WIDE_INT)-1;
18543 ret = NULL;
18544 break;
18545 }
18546 add_loc_descr (&ret, ret1);
18547 }
18548 if (fieldsize)
18549 {
18550 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18551 add_loc_descr (&ret, ret1);
18552 offset = pos + fieldsize;
18553 }
18554 }
18555
18556 if (offset != size)
18557 {
18558 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18559 add_loc_descr (&ret, ret1);
18560 offset = size;
18561 }
18562
18563 have_address = !!want_address;
18564 }
18565 else
18566 expansion_failed (loc, NULL_RTX,
18567 "constructor of non-record type");
18568 }
18569 else
18570 /* We can construct small constants here using int_loc_descriptor. */
18571 expansion_failed (loc, NULL_RTX,
18572 "constructor or constant not in constant pool");
18573 break;
18574
18575 case TRUTH_AND_EXPR:
18576 case TRUTH_ANDIF_EXPR:
18577 case BIT_AND_EXPR:
18578 op = DW_OP_and;
18579 goto do_binop;
18580
18581 case TRUTH_XOR_EXPR:
18582 case BIT_XOR_EXPR:
18583 op = DW_OP_xor;
18584 goto do_binop;
18585
18586 case TRUTH_OR_EXPR:
18587 case TRUTH_ORIF_EXPR:
18588 case BIT_IOR_EXPR:
18589 op = DW_OP_or;
18590 goto do_binop;
18591
18592 case FLOOR_DIV_EXPR:
18593 case CEIL_DIV_EXPR:
18594 case ROUND_DIV_EXPR:
18595 case TRUNC_DIV_EXPR:
18596 case EXACT_DIV_EXPR:
18597 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18598 return 0;
18599 op = DW_OP_div;
18600 goto do_binop;
18601
18602 case MINUS_EXPR:
18603 op = DW_OP_minus;
18604 goto do_binop;
18605
18606 case FLOOR_MOD_EXPR:
18607 case CEIL_MOD_EXPR:
18608 case ROUND_MOD_EXPR:
18609 case TRUNC_MOD_EXPR:
18610 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18611 {
18612 op = DW_OP_mod;
18613 goto do_binop;
18614 }
18615 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18616 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18617 if (list_ret == 0 || list_ret1 == 0)
18618 return 0;
18619
18620 add_loc_list (&list_ret, list_ret1);
18621 if (list_ret == 0)
18622 return 0;
18623 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18624 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18625 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18626 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18627 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18628 break;
18629
18630 case MULT_EXPR:
18631 op = DW_OP_mul;
18632 goto do_binop;
18633
18634 case LSHIFT_EXPR:
18635 op = DW_OP_shl;
18636 goto do_binop;
18637
18638 case RSHIFT_EXPR:
18639 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18640 goto do_binop;
18641
18642 case POINTER_PLUS_EXPR:
18643 case PLUS_EXPR:
18644 do_plus:
18645 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18646 {
18647 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18648 smarter to encode their opposite. The DW_OP_plus_uconst operation
18649 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18650 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18651 bytes, Y being the size of the operation that pushes the opposite
18652 of the addend. So let's choose the smallest representation. */
18653 const tree tree_addend = TREE_OPERAND (loc, 1);
18654 offset_int wi_addend;
18655 HOST_WIDE_INT shwi_addend;
18656 dw_loc_descr_ref loc_naddend;
18657
18658 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18659 if (list_ret == 0)
18660 return 0;
18661
18662 /* Try to get the literal to push. It is the opposite of the addend,
18663 so as we rely on wrapping during DWARF evaluation, first decode
18664 the literal as a "DWARF-sized" signed number. */
18665 wi_addend = wi::to_offset (tree_addend);
18666 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18667 shwi_addend = wi_addend.to_shwi ();
18668 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18669 ? int_loc_descriptor (-shwi_addend)
18670 : NULL;
18671
18672 if (loc_naddend != NULL
18673 && ((unsigned) size_of_uleb128 (shwi_addend)
18674 > size_of_loc_descr (loc_naddend)))
18675 {
18676 add_loc_descr_to_each (list_ret, loc_naddend);
18677 add_loc_descr_to_each (list_ret,
18678 new_loc_descr (DW_OP_minus, 0, 0));
18679 }
18680 else
18681 {
18682 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18683 {
18684 loc_naddend = loc_cur;
18685 loc_cur = loc_cur->dw_loc_next;
18686 ggc_free (loc_naddend);
18687 }
18688 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18689 }
18690 break;
18691 }
18692
18693 op = DW_OP_plus;
18694 goto do_binop;
18695
18696 case LE_EXPR:
18697 op = DW_OP_le;
18698 goto do_comp_binop;
18699
18700 case GE_EXPR:
18701 op = DW_OP_ge;
18702 goto do_comp_binop;
18703
18704 case LT_EXPR:
18705 op = DW_OP_lt;
18706 goto do_comp_binop;
18707
18708 case GT_EXPR:
18709 op = DW_OP_gt;
18710 goto do_comp_binop;
18711
18712 do_comp_binop:
18713 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18714 {
18715 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18716 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18717 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18718 TREE_CODE (loc));
18719 break;
18720 }
18721 else
18722 goto do_binop;
18723
18724 case EQ_EXPR:
18725 op = DW_OP_eq;
18726 goto do_binop;
18727
18728 case NE_EXPR:
18729 op = DW_OP_ne;
18730 goto do_binop;
18731
18732 do_binop:
18733 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18734 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18735 if (list_ret == 0 || list_ret1 == 0)
18736 return 0;
18737
18738 add_loc_list (&list_ret, list_ret1);
18739 if (list_ret == 0)
18740 return 0;
18741 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18742 break;
18743
18744 case TRUTH_NOT_EXPR:
18745 case BIT_NOT_EXPR:
18746 op = DW_OP_not;
18747 goto do_unop;
18748
18749 case ABS_EXPR:
18750 op = DW_OP_abs;
18751 goto do_unop;
18752
18753 case NEGATE_EXPR:
18754 op = DW_OP_neg;
18755 goto do_unop;
18756
18757 do_unop:
18758 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18759 if (list_ret == 0)
18760 return 0;
18761
18762 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18763 break;
18764
18765 case MIN_EXPR:
18766 case MAX_EXPR:
18767 {
18768 const enum tree_code code =
18769 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18770
18771 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18772 build2 (code, integer_type_node,
18773 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18774 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18775 }
18776
18777 /* fall through */
18778
18779 case COND_EXPR:
18780 {
18781 dw_loc_descr_ref lhs
18782 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18783 dw_loc_list_ref rhs
18784 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18785 dw_loc_descr_ref bra_node, jump_node, tmp;
18786
18787 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18788 if (list_ret == 0 || lhs == 0 || rhs == 0)
18789 return 0;
18790
18791 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18792 add_loc_descr_to_each (list_ret, bra_node);
18793
18794 add_loc_list (&list_ret, rhs);
18795 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18796 add_loc_descr_to_each (list_ret, jump_node);
18797
18798 add_loc_descr_to_each (list_ret, lhs);
18799 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18800 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18801
18802 /* ??? Need a node to point the skip at. Use a nop. */
18803 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18804 add_loc_descr_to_each (list_ret, tmp);
18805 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18806 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18807 }
18808 break;
18809
18810 case FIX_TRUNC_EXPR:
18811 return 0;
18812
18813 default:
18814 /* Leave front-end specific codes as simply unknown. This comes
18815 up, for instance, with the C STMT_EXPR. */
18816 if ((unsigned int) TREE_CODE (loc)
18817 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18818 {
18819 expansion_failed (loc, NULL_RTX,
18820 "language specific tree node");
18821 return 0;
18822 }
18823
18824 /* Otherwise this is a generic code; we should just lists all of
18825 these explicitly. We forgot one. */
18826 if (flag_checking)
18827 gcc_unreachable ();
18828
18829 /* In a release build, we want to degrade gracefully: better to
18830 generate incomplete debugging information than to crash. */
18831 return NULL;
18832 }
18833
18834 if (!ret && !list_ret)
18835 return 0;
18836
18837 if (want_address == 2 && !have_address
18838 && (dwarf_version >= 4 || !dwarf_strict))
18839 {
18840 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18841 {
18842 expansion_failed (loc, NULL_RTX,
18843 "DWARF address size mismatch");
18844 return 0;
18845 }
18846 if (ret)
18847 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18848 else
18849 add_loc_descr_to_each (list_ret,
18850 new_loc_descr (DW_OP_stack_value, 0, 0));
18851 have_address = 1;
18852 }
18853 /* Show if we can't fill the request for an address. */
18854 if (want_address && !have_address)
18855 {
18856 expansion_failed (loc, NULL_RTX,
18857 "Want address and only have value");
18858 return 0;
18859 }
18860
18861 gcc_assert (!ret || !list_ret);
18862
18863 /* If we've got an address and don't want one, dereference. */
18864 if (!want_address && have_address)
18865 {
18866 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18867
18868 if (size > DWARF2_ADDR_SIZE || size == -1)
18869 {
18870 expansion_failed (loc, NULL_RTX,
18871 "DWARF address size mismatch");
18872 return 0;
18873 }
18874 else if (size == DWARF2_ADDR_SIZE)
18875 op = DW_OP_deref;
18876 else
18877 op = DW_OP_deref_size;
18878
18879 if (ret)
18880 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18881 else
18882 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18883 }
18884 if (ret)
18885 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18886
18887 return list_ret;
18888 }
18889
18890 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18891 expressions. */
18892
18893 static dw_loc_list_ref
18894 loc_list_from_tree (tree loc, int want_address,
18895 struct loc_descr_context *context)
18896 {
18897 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18898
18899 for (dw_loc_list_ref loc_cur = result;
18900 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18901 loc_descr_without_nops (loc_cur->expr);
18902 return result;
18903 }
18904
18905 /* Same as above but return only single location expression. */
18906 static dw_loc_descr_ref
18907 loc_descriptor_from_tree (tree loc, int want_address,
18908 struct loc_descr_context *context)
18909 {
18910 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18911 if (!ret)
18912 return NULL;
18913 if (ret->dw_loc_next)
18914 {
18915 expansion_failed (loc, NULL_RTX,
18916 "Location list where only loc descriptor needed");
18917 return NULL;
18918 }
18919 return ret->expr;
18920 }
18921
18922 /* Given a value, round it up to the lowest multiple of `boundary'
18923 which is not less than the value itself. */
18924
18925 static inline HOST_WIDE_INT
18926 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18927 {
18928 return (((value + boundary - 1) / boundary) * boundary);
18929 }
18930
18931 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18932 pointer to the declared type for the relevant field variable, or return
18933 `integer_type_node' if the given node turns out to be an
18934 ERROR_MARK node. */
18935
18936 static inline tree
18937 field_type (const_tree decl)
18938 {
18939 tree type;
18940
18941 if (TREE_CODE (decl) == ERROR_MARK)
18942 return integer_type_node;
18943
18944 type = DECL_BIT_FIELD_TYPE (decl);
18945 if (type == NULL_TREE)
18946 type = TREE_TYPE (decl);
18947
18948 return type;
18949 }
18950
18951 /* Given a pointer to a tree node, return the alignment in bits for
18952 it, or else return BITS_PER_WORD if the node actually turns out to
18953 be an ERROR_MARK node. */
18954
18955 static inline unsigned
18956 simple_type_align_in_bits (const_tree type)
18957 {
18958 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18959 }
18960
18961 static inline unsigned
18962 simple_decl_align_in_bits (const_tree decl)
18963 {
18964 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18965 }
18966
18967 /* Return the result of rounding T up to ALIGN. */
18968
18969 static inline offset_int
18970 round_up_to_align (const offset_int &t, unsigned int align)
18971 {
18972 return wi::udiv_trunc (t + align - 1, align) * align;
18973 }
18974
18975 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18976 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18977 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18978 if we fail to return the size in one of these two forms. */
18979
18980 static dw_loc_descr_ref
18981 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18982 {
18983 tree tree_size;
18984 struct loc_descr_context ctx;
18985
18986 /* Return a constant integer in priority, if possible. */
18987 *cst_size = int_size_in_bytes (type);
18988 if (*cst_size != -1)
18989 return NULL;
18990
18991 ctx.context_type = const_cast<tree> (type);
18992 ctx.base_decl = NULL_TREE;
18993 ctx.dpi = NULL;
18994 ctx.placeholder_arg = false;
18995 ctx.placeholder_seen = false;
18996
18997 type = TYPE_MAIN_VARIANT (type);
18998 tree_size = TYPE_SIZE_UNIT (type);
18999 return ((tree_size != NULL_TREE)
19000 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
19001 : NULL);
19002 }
19003
19004 /* Helper structure for RECORD_TYPE processing. */
19005 struct vlr_context
19006 {
19007 /* Root RECORD_TYPE. It is needed to generate data member location
19008 descriptions in variable-length records (VLR), but also to cope with
19009 variants, which are composed of nested structures multiplexed with
19010 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
19011 function processing a FIELD_DECL, it is required to be non null. */
19012 tree struct_type;
19013 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
19014 QUAL_UNION_TYPE), this holds an expression that computes the offset for
19015 this variant part as part of the root record (in storage units). For
19016 regular records, it must be NULL_TREE. */
19017 tree variant_part_offset;
19018 };
19019
19020 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
19021 addressed byte of the "containing object" for the given FIELD_DECL. If
19022 possible, return a native constant through CST_OFFSET (in which case NULL is
19023 returned); otherwise return a DWARF expression that computes the offset.
19024
19025 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
19026 that offset is, either because the argument turns out to be a pointer to an
19027 ERROR_MARK node, or because the offset expression is too complex for us.
19028
19029 CTX is required: see the comment for VLR_CONTEXT. */
19030
19031 static dw_loc_descr_ref
19032 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19033 HOST_WIDE_INT *cst_offset)
19034 {
19035 tree tree_result;
19036 dw_loc_list_ref loc_result;
19037
19038 *cst_offset = 0;
19039
19040 if (TREE_CODE (decl) == ERROR_MARK)
19041 return NULL;
19042 else
19043 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19044
19045 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19046 case. */
19047 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19048 return NULL;
19049
19050 #ifdef PCC_BITFIELD_TYPE_MATTERS
19051 /* We used to handle only constant offsets in all cases. Now, we handle
19052 properly dynamic byte offsets only when PCC bitfield type doesn't
19053 matter. */
19054 if (PCC_BITFIELD_TYPE_MATTERS
19055 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19056 {
19057 offset_int object_offset_in_bits;
19058 offset_int object_offset_in_bytes;
19059 offset_int bitpos_int;
19060 tree type;
19061 tree field_size_tree;
19062 offset_int deepest_bitpos;
19063 offset_int field_size_in_bits;
19064 unsigned int type_align_in_bits;
19065 unsigned int decl_align_in_bits;
19066 offset_int type_size_in_bits;
19067
19068 bitpos_int = wi::to_offset (bit_position (decl));
19069 type = field_type (decl);
19070 type_size_in_bits = offset_int_type_size_in_bits (type);
19071 type_align_in_bits = simple_type_align_in_bits (type);
19072
19073 field_size_tree = DECL_SIZE (decl);
19074
19075 /* The size could be unspecified if there was an error, or for
19076 a flexible array member. */
19077 if (!field_size_tree)
19078 field_size_tree = bitsize_zero_node;
19079
19080 /* If the size of the field is not constant, use the type size. */
19081 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19082 field_size_in_bits = wi::to_offset (field_size_tree);
19083 else
19084 field_size_in_bits = type_size_in_bits;
19085
19086 decl_align_in_bits = simple_decl_align_in_bits (decl);
19087
19088 /* The GCC front-end doesn't make any attempt to keep track of the
19089 starting bit offset (relative to the start of the containing
19090 structure type) of the hypothetical "containing object" for a
19091 bit-field. Thus, when computing the byte offset value for the
19092 start of the "containing object" of a bit-field, we must deduce
19093 this information on our own. This can be rather tricky to do in
19094 some cases. For example, handling the following structure type
19095 definition when compiling for an i386/i486 target (which only
19096 aligns long long's to 32-bit boundaries) can be very tricky:
19097
19098 struct S { int field1; long long field2:31; };
19099
19100 Fortunately, there is a simple rule-of-thumb which can be used
19101 in such cases. When compiling for an i386/i486, GCC will
19102 allocate 8 bytes for the structure shown above. It decides to
19103 do this based upon one simple rule for bit-field allocation.
19104 GCC allocates each "containing object" for each bit-field at
19105 the first (i.e. lowest addressed) legitimate alignment boundary
19106 (based upon the required minimum alignment for the declared
19107 type of the field) which it can possibly use, subject to the
19108 condition that there is still enough available space remaining
19109 in the containing object (when allocated at the selected point)
19110 to fully accommodate all of the bits of the bit-field itself.
19111
19112 This simple rule makes it obvious why GCC allocates 8 bytes for
19113 each object of the structure type shown above. When looking
19114 for a place to allocate the "containing object" for `field2',
19115 the compiler simply tries to allocate a 64-bit "containing
19116 object" at each successive 32-bit boundary (starting at zero)
19117 until it finds a place to allocate that 64- bit field such that
19118 at least 31 contiguous (and previously unallocated) bits remain
19119 within that selected 64 bit field. (As it turns out, for the
19120 example above, the compiler finds it is OK to allocate the
19121 "containing object" 64-bit field at bit-offset zero within the
19122 structure type.)
19123
19124 Here we attempt to work backwards from the limited set of facts
19125 we're given, and we try to deduce from those facts, where GCC
19126 must have believed that the containing object started (within
19127 the structure type). The value we deduce is then used (by the
19128 callers of this routine) to generate DW_AT_location and
19129 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19130 the case of DW_AT_location, regular fields as well). */
19131
19132 /* Figure out the bit-distance from the start of the structure to
19133 the "deepest" bit of the bit-field. */
19134 deepest_bitpos = bitpos_int + field_size_in_bits;
19135
19136 /* This is the tricky part. Use some fancy footwork to deduce
19137 where the lowest addressed bit of the containing object must
19138 be. */
19139 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19140
19141 /* Round up to type_align by default. This works best for
19142 bitfields. */
19143 object_offset_in_bits
19144 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19145
19146 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19147 {
19148 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19149
19150 /* Round up to decl_align instead. */
19151 object_offset_in_bits
19152 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19153 }
19154
19155 object_offset_in_bytes
19156 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19157 if (ctx->variant_part_offset == NULL_TREE)
19158 {
19159 *cst_offset = object_offset_in_bytes.to_shwi ();
19160 return NULL;
19161 }
19162 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19163 }
19164 else
19165 #endif /* PCC_BITFIELD_TYPE_MATTERS */
19166 tree_result = byte_position (decl);
19167
19168 if (ctx->variant_part_offset != NULL_TREE)
19169 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19170 ctx->variant_part_offset, tree_result);
19171
19172 /* If the byte offset is a constant, it's simplier to handle a native
19173 constant rather than a DWARF expression. */
19174 if (TREE_CODE (tree_result) == INTEGER_CST)
19175 {
19176 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19177 return NULL;
19178 }
19179 struct loc_descr_context loc_ctx = {
19180 ctx->struct_type, /* context_type */
19181 NULL_TREE, /* base_decl */
19182 NULL, /* dpi */
19183 false, /* placeholder_arg */
19184 false /* placeholder_seen */
19185 };
19186 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19187
19188 /* We want a DWARF expression: abort if we only have a location list with
19189 multiple elements. */
19190 if (!loc_result || !single_element_loc_list_p (loc_result))
19191 return NULL;
19192 else
19193 return loc_result->expr;
19194 }
19195 \f
19196 /* The following routines define various Dwarf attributes and any data
19197 associated with them. */
19198
19199 /* Add a location description attribute value to a DIE.
19200
19201 This emits location attributes suitable for whole variables and
19202 whole parameters. Note that the location attributes for struct fields are
19203 generated by the routine `data_member_location_attribute' below. */
19204
19205 static inline void
19206 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19207 dw_loc_list_ref descr)
19208 {
19209 bool check_no_locviews = true;
19210 if (descr == 0)
19211 return;
19212 if (single_element_loc_list_p (descr))
19213 add_AT_loc (die, attr_kind, descr->expr);
19214 else
19215 {
19216 add_AT_loc_list (die, attr_kind, descr);
19217 gcc_assert (descr->ll_symbol);
19218 if (attr_kind == DW_AT_location && descr->vl_symbol
19219 && dwarf2out_locviews_in_attribute ())
19220 {
19221 add_AT_view_list (die, DW_AT_GNU_locviews);
19222 check_no_locviews = false;
19223 }
19224 }
19225
19226 if (check_no_locviews)
19227 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19228 }
19229
19230 /* Add DW_AT_accessibility attribute to DIE if needed. */
19231
19232 static void
19233 add_accessibility_attribute (dw_die_ref die, tree decl)
19234 {
19235 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19236 children, otherwise the default is DW_ACCESS_public. In DWARF2
19237 the default has always been DW_ACCESS_public. */
19238 if (TREE_PROTECTED (decl))
19239 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19240 else if (TREE_PRIVATE (decl))
19241 {
19242 if (dwarf_version == 2
19243 || die->die_parent == NULL
19244 || die->die_parent->die_tag != DW_TAG_class_type)
19245 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19246 }
19247 else if (dwarf_version > 2
19248 && die->die_parent
19249 && die->die_parent->die_tag == DW_TAG_class_type)
19250 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19251 }
19252
19253 /* Attach the specialized form of location attribute used for data members of
19254 struct and union types. In the special case of a FIELD_DECL node which
19255 represents a bit-field, the "offset" part of this special location
19256 descriptor must indicate the distance in bytes from the lowest-addressed
19257 byte of the containing struct or union type to the lowest-addressed byte of
19258 the "containing object" for the bit-field. (See the `field_byte_offset'
19259 function above).
19260
19261 For any given bit-field, the "containing object" is a hypothetical object
19262 (of some integral or enum type) within which the given bit-field lives. The
19263 type of this hypothetical "containing object" is always the same as the
19264 declared type of the individual bit-field itself (for GCC anyway... the
19265 DWARF spec doesn't actually mandate this). Note that it is the size (in
19266 bytes) of the hypothetical "containing object" which will be given in the
19267 DW_AT_byte_size attribute for this bit-field. (See the
19268 `byte_size_attribute' function below.) It is also used when calculating the
19269 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19270 function below.)
19271
19272 CTX is required: see the comment for VLR_CONTEXT. */
19273
19274 static void
19275 add_data_member_location_attribute (dw_die_ref die,
19276 tree decl,
19277 struct vlr_context *ctx)
19278 {
19279 HOST_WIDE_INT offset;
19280 dw_loc_descr_ref loc_descr = 0;
19281
19282 if (TREE_CODE (decl) == TREE_BINFO)
19283 {
19284 /* We're working on the TAG_inheritance for a base class. */
19285 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19286 {
19287 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19288 aren't at a fixed offset from all (sub)objects of the same
19289 type. We need to extract the appropriate offset from our
19290 vtable. The following dwarf expression means
19291
19292 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19293
19294 This is specific to the V3 ABI, of course. */
19295
19296 dw_loc_descr_ref tmp;
19297
19298 /* Make a copy of the object address. */
19299 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19300 add_loc_descr (&loc_descr, tmp);
19301
19302 /* Extract the vtable address. */
19303 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19304 add_loc_descr (&loc_descr, tmp);
19305
19306 /* Calculate the address of the offset. */
19307 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19308 gcc_assert (offset < 0);
19309
19310 tmp = int_loc_descriptor (-offset);
19311 add_loc_descr (&loc_descr, tmp);
19312 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19313 add_loc_descr (&loc_descr, tmp);
19314
19315 /* Extract the offset. */
19316 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19317 add_loc_descr (&loc_descr, tmp);
19318
19319 /* Add it to the object address. */
19320 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19321 add_loc_descr (&loc_descr, tmp);
19322 }
19323 else
19324 offset = tree_to_shwi (BINFO_OFFSET (decl));
19325 }
19326 else
19327 {
19328 loc_descr = field_byte_offset (decl, ctx, &offset);
19329
19330 /* If loc_descr is available then we know the field offset is dynamic.
19331 However, GDB does not handle dynamic field offsets very well at the
19332 moment. */
19333 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19334 {
19335 loc_descr = NULL;
19336 offset = 0;
19337 }
19338
19339 /* Data member location evalutation starts with the base address on the
19340 stack. Compute the field offset and add it to this base address. */
19341 else if (loc_descr != NULL)
19342 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19343 }
19344
19345 if (! loc_descr)
19346 {
19347 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19348 e.g. GDB only added support to it in November 2016. For DWARF5
19349 we need newer debug info consumers anyway. We might change this
19350 to dwarf_version >= 4 once most consumers catched up. */
19351 if (dwarf_version >= 5
19352 && TREE_CODE (decl) == FIELD_DECL
19353 && DECL_BIT_FIELD_TYPE (decl))
19354 {
19355 tree off = bit_position (decl);
19356 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19357 {
19358 remove_AT (die, DW_AT_byte_size);
19359 remove_AT (die, DW_AT_bit_offset);
19360 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19361 return;
19362 }
19363 }
19364 if (dwarf_version > 2)
19365 {
19366 /* Don't need to output a location expression, just the constant. */
19367 if (offset < 0)
19368 add_AT_int (die, DW_AT_data_member_location, offset);
19369 else
19370 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19371 return;
19372 }
19373 else
19374 {
19375 enum dwarf_location_atom op;
19376
19377 /* The DWARF2 standard says that we should assume that the structure
19378 address is already on the stack, so we can specify a structure
19379 field address by using DW_OP_plus_uconst. */
19380 op = DW_OP_plus_uconst;
19381 loc_descr = new_loc_descr (op, offset, 0);
19382 }
19383 }
19384
19385 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19386 }
19387
19388 /* Writes integer values to dw_vec_const array. */
19389
19390 static void
19391 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19392 {
19393 while (size != 0)
19394 {
19395 *dest++ = val & 0xff;
19396 val >>= 8;
19397 --size;
19398 }
19399 }
19400
19401 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19402
19403 static HOST_WIDE_INT
19404 extract_int (const unsigned char *src, unsigned int size)
19405 {
19406 HOST_WIDE_INT val = 0;
19407
19408 src += size;
19409 while (size != 0)
19410 {
19411 val <<= 8;
19412 val |= *--src & 0xff;
19413 --size;
19414 }
19415 return val;
19416 }
19417
19418 /* Writes wide_int values to dw_vec_const array. */
19419
19420 static void
19421 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19422 {
19423 int i;
19424
19425 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19426 {
19427 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19428 return;
19429 }
19430
19431 /* We'd have to extend this code to support odd sizes. */
19432 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19433
19434 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19435
19436 if (WORDS_BIG_ENDIAN)
19437 for (i = n - 1; i >= 0; i--)
19438 {
19439 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19440 dest += sizeof (HOST_WIDE_INT);
19441 }
19442 else
19443 for (i = 0; i < n; i++)
19444 {
19445 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19446 dest += sizeof (HOST_WIDE_INT);
19447 }
19448 }
19449
19450 /* Writes floating point values to dw_vec_const array. */
19451
19452 static void
19453 insert_float (const_rtx rtl, unsigned char *array)
19454 {
19455 long val[4];
19456 int i;
19457 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19458
19459 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19460
19461 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19462 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19463 {
19464 insert_int (val[i], 4, array);
19465 array += 4;
19466 }
19467 }
19468
19469 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19470 does not have a "location" either in memory or in a register. These
19471 things can arise in GNU C when a constant is passed as an actual parameter
19472 to an inlined function. They can also arise in C++ where declared
19473 constants do not necessarily get memory "homes". */
19474
19475 static bool
19476 add_const_value_attribute (dw_die_ref die, rtx rtl)
19477 {
19478 switch (GET_CODE (rtl))
19479 {
19480 case CONST_INT:
19481 {
19482 HOST_WIDE_INT val = INTVAL (rtl);
19483
19484 if (val < 0)
19485 add_AT_int (die, DW_AT_const_value, val);
19486 else
19487 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19488 }
19489 return true;
19490
19491 case CONST_WIDE_INT:
19492 {
19493 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19494 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19495 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19496 wide_int w = wi::zext (w1, prec);
19497 add_AT_wide (die, DW_AT_const_value, w);
19498 }
19499 return true;
19500
19501 case CONST_DOUBLE:
19502 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19503 floating-point constant. A CONST_DOUBLE is used whenever the
19504 constant requires more than one word in order to be adequately
19505 represented. */
19506 if (TARGET_SUPPORTS_WIDE_INT == 0
19507 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19508 add_AT_double (die, DW_AT_const_value,
19509 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19510 else
19511 {
19512 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19513 unsigned int length = GET_MODE_SIZE (mode);
19514 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19515
19516 insert_float (rtl, array);
19517 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19518 }
19519 return true;
19520
19521 case CONST_VECTOR:
19522 {
19523 unsigned int length;
19524 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19525 return false;
19526
19527 machine_mode mode = GET_MODE (rtl);
19528 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19529 unsigned char *array
19530 = ggc_vec_alloc<unsigned char> (length * elt_size);
19531 unsigned int i;
19532 unsigned char *p;
19533 machine_mode imode = GET_MODE_INNER (mode);
19534
19535 switch (GET_MODE_CLASS (mode))
19536 {
19537 case MODE_VECTOR_INT:
19538 for (i = 0, p = array; i < length; i++, p += elt_size)
19539 {
19540 rtx elt = CONST_VECTOR_ELT (rtl, i);
19541 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19542 }
19543 break;
19544
19545 case MODE_VECTOR_FLOAT:
19546 for (i = 0, p = array; i < length; i++, p += elt_size)
19547 {
19548 rtx elt = CONST_VECTOR_ELT (rtl, i);
19549 insert_float (elt, p);
19550 }
19551 break;
19552
19553 default:
19554 gcc_unreachable ();
19555 }
19556
19557 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19558 }
19559 return true;
19560
19561 case CONST_STRING:
19562 if (dwarf_version >= 4 || !dwarf_strict)
19563 {
19564 dw_loc_descr_ref loc_result;
19565 resolve_one_addr (&rtl);
19566 rtl_addr:
19567 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19568 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19569 add_AT_loc (die, DW_AT_location, loc_result);
19570 vec_safe_push (used_rtx_array, rtl);
19571 return true;
19572 }
19573 return false;
19574
19575 case CONST:
19576 if (CONSTANT_P (XEXP (rtl, 0)))
19577 return add_const_value_attribute (die, XEXP (rtl, 0));
19578 /* FALLTHROUGH */
19579 case SYMBOL_REF:
19580 if (!const_ok_for_output (rtl))
19581 return false;
19582 /* FALLTHROUGH */
19583 case LABEL_REF:
19584 if (dwarf_version >= 4 || !dwarf_strict)
19585 goto rtl_addr;
19586 return false;
19587
19588 case PLUS:
19589 /* In cases where an inlined instance of an inline function is passed
19590 the address of an `auto' variable (which is local to the caller) we
19591 can get a situation where the DECL_RTL of the artificial local
19592 variable (for the inlining) which acts as a stand-in for the
19593 corresponding formal parameter (of the inline function) will look
19594 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19595 exactly a compile-time constant expression, but it isn't the address
19596 of the (artificial) local variable either. Rather, it represents the
19597 *value* which the artificial local variable always has during its
19598 lifetime. We currently have no way to represent such quasi-constant
19599 values in Dwarf, so for now we just punt and generate nothing. */
19600 return false;
19601
19602 case HIGH:
19603 case CONST_FIXED:
19604 return false;
19605
19606 case MEM:
19607 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19608 && MEM_READONLY_P (rtl)
19609 && GET_MODE (rtl) == BLKmode)
19610 {
19611 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19612 return true;
19613 }
19614 return false;
19615
19616 default:
19617 /* No other kinds of rtx should be possible here. */
19618 gcc_unreachable ();
19619 }
19620 return false;
19621 }
19622
19623 /* Determine whether the evaluation of EXPR references any variables
19624 or functions which aren't otherwise used (and therefore may not be
19625 output). */
19626 static tree
19627 reference_to_unused (tree * tp, int * walk_subtrees,
19628 void * data ATTRIBUTE_UNUSED)
19629 {
19630 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19631 *walk_subtrees = 0;
19632
19633 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19634 && ! TREE_ASM_WRITTEN (*tp))
19635 return *tp;
19636 /* ??? The C++ FE emits debug information for using decls, so
19637 putting gcc_unreachable here falls over. See PR31899. For now
19638 be conservative. */
19639 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19640 return *tp;
19641 else if (VAR_P (*tp))
19642 {
19643 varpool_node *node = varpool_node::get (*tp);
19644 if (!node || !node->definition)
19645 return *tp;
19646 }
19647 else if (TREE_CODE (*tp) == FUNCTION_DECL
19648 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19649 {
19650 /* The call graph machinery must have finished analyzing,
19651 optimizing and gimplifying the CU by now.
19652 So if *TP has no call graph node associated
19653 to it, it means *TP will not be emitted. */
19654 if (!cgraph_node::get (*tp))
19655 return *tp;
19656 }
19657 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19658 return *tp;
19659
19660 return NULL_TREE;
19661 }
19662
19663 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19664 for use in a later add_const_value_attribute call. */
19665
19666 static rtx
19667 rtl_for_decl_init (tree init, tree type)
19668 {
19669 rtx rtl = NULL_RTX;
19670
19671 STRIP_NOPS (init);
19672
19673 /* If a variable is initialized with a string constant without embedded
19674 zeros, build CONST_STRING. */
19675 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19676 {
19677 tree enttype = TREE_TYPE (type);
19678 tree domain = TYPE_DOMAIN (type);
19679 scalar_int_mode mode;
19680
19681 if (is_int_mode (TYPE_MODE (enttype), &mode)
19682 && GET_MODE_SIZE (mode) == 1
19683 && domain
19684 && TYPE_MAX_VALUE (domain)
19685 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19686 && integer_zerop (TYPE_MIN_VALUE (domain))
19687 && compare_tree_int (TYPE_MAX_VALUE (domain),
19688 TREE_STRING_LENGTH (init) - 1) == 0
19689 && ((size_t) TREE_STRING_LENGTH (init)
19690 == strlen (TREE_STRING_POINTER (init)) + 1))
19691 {
19692 rtl = gen_rtx_CONST_STRING (VOIDmode,
19693 ggc_strdup (TREE_STRING_POINTER (init)));
19694 rtl = gen_rtx_MEM (BLKmode, rtl);
19695 MEM_READONLY_P (rtl) = 1;
19696 }
19697 }
19698 /* Other aggregates, and complex values, could be represented using
19699 CONCAT: FIXME! */
19700 else if (AGGREGATE_TYPE_P (type)
19701 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19702 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19703 || TREE_CODE (type) == COMPLEX_TYPE)
19704 ;
19705 /* Vectors only work if their mode is supported by the target.
19706 FIXME: generic vectors ought to work too. */
19707 else if (TREE_CODE (type) == VECTOR_TYPE
19708 && !VECTOR_MODE_P (TYPE_MODE (type)))
19709 ;
19710 /* If the initializer is something that we know will expand into an
19711 immediate RTL constant, expand it now. We must be careful not to
19712 reference variables which won't be output. */
19713 else if (initializer_constant_valid_p (init, type)
19714 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19715 {
19716 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19717 possible. */
19718 if (TREE_CODE (type) == VECTOR_TYPE)
19719 switch (TREE_CODE (init))
19720 {
19721 case VECTOR_CST:
19722 break;
19723 case CONSTRUCTOR:
19724 if (TREE_CONSTANT (init))
19725 {
19726 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19727 bool constant_p = true;
19728 tree value;
19729 unsigned HOST_WIDE_INT ix;
19730
19731 /* Even when ctor is constant, it might contain non-*_CST
19732 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19733 belong into VECTOR_CST nodes. */
19734 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19735 if (!CONSTANT_CLASS_P (value))
19736 {
19737 constant_p = false;
19738 break;
19739 }
19740
19741 if (constant_p)
19742 {
19743 init = build_vector_from_ctor (type, elts);
19744 break;
19745 }
19746 }
19747 /* FALLTHRU */
19748
19749 default:
19750 return NULL;
19751 }
19752
19753 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19754
19755 /* If expand_expr returns a MEM, it wasn't immediate. */
19756 gcc_assert (!rtl || !MEM_P (rtl));
19757 }
19758
19759 return rtl;
19760 }
19761
19762 /* Generate RTL for the variable DECL to represent its location. */
19763
19764 static rtx
19765 rtl_for_decl_location (tree decl)
19766 {
19767 rtx rtl;
19768
19769 /* Here we have to decide where we are going to say the parameter "lives"
19770 (as far as the debugger is concerned). We only have a couple of
19771 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19772
19773 DECL_RTL normally indicates where the parameter lives during most of the
19774 activation of the function. If optimization is enabled however, this
19775 could be either NULL or else a pseudo-reg. Both of those cases indicate
19776 that the parameter doesn't really live anywhere (as far as the code
19777 generation parts of GCC are concerned) during most of the function's
19778 activation. That will happen (for example) if the parameter is never
19779 referenced within the function.
19780
19781 We could just generate a location descriptor here for all non-NULL
19782 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19783 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19784 where DECL_RTL is NULL or is a pseudo-reg.
19785
19786 Note however that we can only get away with using DECL_INCOMING_RTL as
19787 a backup substitute for DECL_RTL in certain limited cases. In cases
19788 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19789 we can be sure that the parameter was passed using the same type as it is
19790 declared to have within the function, and that its DECL_INCOMING_RTL
19791 points us to a place where a value of that type is passed.
19792
19793 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19794 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19795 because in these cases DECL_INCOMING_RTL points us to a value of some
19796 type which is *different* from the type of the parameter itself. Thus,
19797 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19798 such cases, the debugger would end up (for example) trying to fetch a
19799 `float' from a place which actually contains the first part of a
19800 `double'. That would lead to really incorrect and confusing
19801 output at debug-time.
19802
19803 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19804 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19805 are a couple of exceptions however. On little-endian machines we can
19806 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19807 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19808 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19809 when (on a little-endian machine) a non-prototyped function has a
19810 parameter declared to be of type `short' or `char'. In such cases,
19811 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19812 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19813 passed `int' value. If the debugger then uses that address to fetch
19814 a `short' or a `char' (on a little-endian machine) the result will be
19815 the correct data, so we allow for such exceptional cases below.
19816
19817 Note that our goal here is to describe the place where the given formal
19818 parameter lives during most of the function's activation (i.e. between the
19819 end of the prologue and the start of the epilogue). We'll do that as best
19820 as we can. Note however that if the given formal parameter is modified
19821 sometime during the execution of the function, then a stack backtrace (at
19822 debug-time) will show the function as having been called with the *new*
19823 value rather than the value which was originally passed in. This happens
19824 rarely enough that it is not a major problem, but it *is* a problem, and
19825 I'd like to fix it.
19826
19827 A future version of dwarf2out.c may generate two additional attributes for
19828 any given DW_TAG_formal_parameter DIE which will describe the "passed
19829 type" and the "passed location" for the given formal parameter in addition
19830 to the attributes we now generate to indicate the "declared type" and the
19831 "active location" for each parameter. This additional set of attributes
19832 could be used by debuggers for stack backtraces. Separately, note that
19833 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19834 This happens (for example) for inlined-instances of inline function formal
19835 parameters which are never referenced. This really shouldn't be
19836 happening. All PARM_DECL nodes should get valid non-NULL
19837 DECL_INCOMING_RTL values. FIXME. */
19838
19839 /* Use DECL_RTL as the "location" unless we find something better. */
19840 rtl = DECL_RTL_IF_SET (decl);
19841
19842 /* When generating abstract instances, ignore everything except
19843 constants, symbols living in memory, and symbols living in
19844 fixed registers. */
19845 if (! reload_completed)
19846 {
19847 if (rtl
19848 && (CONSTANT_P (rtl)
19849 || (MEM_P (rtl)
19850 && CONSTANT_P (XEXP (rtl, 0)))
19851 || (REG_P (rtl)
19852 && VAR_P (decl)
19853 && TREE_STATIC (decl))))
19854 {
19855 rtl = targetm.delegitimize_address (rtl);
19856 return rtl;
19857 }
19858 rtl = NULL_RTX;
19859 }
19860 else if (TREE_CODE (decl) == PARM_DECL)
19861 {
19862 if (rtl == NULL_RTX
19863 || is_pseudo_reg (rtl)
19864 || (MEM_P (rtl)
19865 && is_pseudo_reg (XEXP (rtl, 0))
19866 && DECL_INCOMING_RTL (decl)
19867 && MEM_P (DECL_INCOMING_RTL (decl))
19868 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19869 {
19870 tree declared_type = TREE_TYPE (decl);
19871 tree passed_type = DECL_ARG_TYPE (decl);
19872 machine_mode dmode = TYPE_MODE (declared_type);
19873 machine_mode pmode = TYPE_MODE (passed_type);
19874
19875 /* This decl represents a formal parameter which was optimized out.
19876 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19877 all cases where (rtl == NULL_RTX) just below. */
19878 if (dmode == pmode)
19879 rtl = DECL_INCOMING_RTL (decl);
19880 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19881 && SCALAR_INT_MODE_P (dmode)
19882 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19883 && DECL_INCOMING_RTL (decl))
19884 {
19885 rtx inc = DECL_INCOMING_RTL (decl);
19886 if (REG_P (inc))
19887 rtl = inc;
19888 else if (MEM_P (inc))
19889 {
19890 if (BYTES_BIG_ENDIAN)
19891 rtl = adjust_address_nv (inc, dmode,
19892 GET_MODE_SIZE (pmode)
19893 - GET_MODE_SIZE (dmode));
19894 else
19895 rtl = inc;
19896 }
19897 }
19898 }
19899
19900 /* If the parm was passed in registers, but lives on the stack, then
19901 make a big endian correction if the mode of the type of the
19902 parameter is not the same as the mode of the rtl. */
19903 /* ??? This is the same series of checks that are made in dbxout.c before
19904 we reach the big endian correction code there. It isn't clear if all
19905 of these checks are necessary here, but keeping them all is the safe
19906 thing to do. */
19907 else if (MEM_P (rtl)
19908 && XEXP (rtl, 0) != const0_rtx
19909 && ! CONSTANT_P (XEXP (rtl, 0))
19910 /* Not passed in memory. */
19911 && !MEM_P (DECL_INCOMING_RTL (decl))
19912 /* Not passed by invisible reference. */
19913 && (!REG_P (XEXP (rtl, 0))
19914 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19915 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19916 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19917 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19918 #endif
19919 )
19920 /* Big endian correction check. */
19921 && BYTES_BIG_ENDIAN
19922 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19923 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19924 UNITS_PER_WORD))
19925 {
19926 machine_mode addr_mode = get_address_mode (rtl);
19927 poly_int64 offset = (UNITS_PER_WORD
19928 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19929
19930 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19931 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19932 }
19933 }
19934 else if (VAR_P (decl)
19935 && rtl
19936 && MEM_P (rtl)
19937 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19938 {
19939 machine_mode addr_mode = get_address_mode (rtl);
19940 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19941 GET_MODE (rtl));
19942
19943 /* If a variable is declared "register" yet is smaller than
19944 a register, then if we store the variable to memory, it
19945 looks like we're storing a register-sized value, when in
19946 fact we are not. We need to adjust the offset of the
19947 storage location to reflect the actual value's bytes,
19948 else gdb will not be able to display it. */
19949 if (maybe_ne (offset, 0))
19950 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19951 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19952 }
19953
19954 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19955 and will have been substituted directly into all expressions that use it.
19956 C does not have such a concept, but C++ and other languages do. */
19957 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19958 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19959
19960 if (rtl)
19961 rtl = targetm.delegitimize_address (rtl);
19962
19963 /* If we don't look past the constant pool, we risk emitting a
19964 reference to a constant pool entry that isn't referenced from
19965 code, and thus is not emitted. */
19966 if (rtl)
19967 rtl = avoid_constant_pool_reference (rtl);
19968
19969 /* Try harder to get a rtl. If this symbol ends up not being emitted
19970 in the current CU, resolve_addr will remove the expression referencing
19971 it. */
19972 if (rtl == NULL_RTX
19973 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
19974 && VAR_P (decl)
19975 && !DECL_EXTERNAL (decl)
19976 && TREE_STATIC (decl)
19977 && DECL_NAME (decl)
19978 && !DECL_HARD_REGISTER (decl)
19979 && DECL_MODE (decl) != VOIDmode)
19980 {
19981 rtl = make_decl_rtl_for_debug (decl);
19982 if (!MEM_P (rtl)
19983 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19984 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19985 rtl = NULL_RTX;
19986 }
19987
19988 return rtl;
19989 }
19990
19991 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19992 returned. If so, the decl for the COMMON block is returned, and the
19993 value is the offset into the common block for the symbol. */
19994
19995 static tree
19996 fortran_common (tree decl, HOST_WIDE_INT *value)
19997 {
19998 tree val_expr, cvar;
19999 machine_mode mode;
20000 poly_int64 bitsize, bitpos;
20001 tree offset;
20002 HOST_WIDE_INT cbitpos;
20003 int unsignedp, reversep, volatilep = 0;
20004
20005 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
20006 it does not have a value (the offset into the common area), or if it
20007 is thread local (as opposed to global) then it isn't common, and shouldn't
20008 be handled as such. */
20009 if (!VAR_P (decl)
20010 || !TREE_STATIC (decl)
20011 || !DECL_HAS_VALUE_EXPR_P (decl)
20012 || !is_fortran ())
20013 return NULL_TREE;
20014
20015 val_expr = DECL_VALUE_EXPR (decl);
20016 if (TREE_CODE (val_expr) != COMPONENT_REF)
20017 return NULL_TREE;
20018
20019 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
20020 &unsignedp, &reversep, &volatilep);
20021
20022 if (cvar == NULL_TREE
20023 || !VAR_P (cvar)
20024 || DECL_ARTIFICIAL (cvar)
20025 || !TREE_PUBLIC (cvar)
20026 /* We don't expect to have to cope with variable offsets,
20027 since at present all static data must have a constant size. */
20028 || !bitpos.is_constant (&cbitpos))
20029 return NULL_TREE;
20030
20031 *value = 0;
20032 if (offset != NULL)
20033 {
20034 if (!tree_fits_shwi_p (offset))
20035 return NULL_TREE;
20036 *value = tree_to_shwi (offset);
20037 }
20038 if (cbitpos != 0)
20039 *value += cbitpos / BITS_PER_UNIT;
20040
20041 return cvar;
20042 }
20043
20044 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20045 data attribute for a variable or a parameter. We generate the
20046 DW_AT_const_value attribute only in those cases where the given variable
20047 or parameter does not have a true "location" either in memory or in a
20048 register. This can happen (for example) when a constant is passed as an
20049 actual argument in a call to an inline function. (It's possible that
20050 these things can crop up in other ways also.) Note that one type of
20051 constant value which can be passed into an inlined function is a constant
20052 pointer. This can happen for example if an actual argument in an inlined
20053 function call evaluates to a compile-time constant address.
20054
20055 CACHE_P is true if it is worth caching the location list for DECL,
20056 so that future calls can reuse it rather than regenerate it from scratch.
20057 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20058 since we will need to refer to them each time the function is inlined. */
20059
20060 static bool
20061 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20062 {
20063 rtx rtl;
20064 dw_loc_list_ref list;
20065 var_loc_list *loc_list;
20066 cached_dw_loc_list *cache;
20067
20068 if (early_dwarf)
20069 return false;
20070
20071 if (TREE_CODE (decl) == ERROR_MARK)
20072 return false;
20073
20074 if (get_AT (die, DW_AT_location)
20075 || get_AT (die, DW_AT_const_value))
20076 return true;
20077
20078 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20079 || TREE_CODE (decl) == RESULT_DECL);
20080
20081 /* Try to get some constant RTL for this decl, and use that as the value of
20082 the location. */
20083
20084 rtl = rtl_for_decl_location (decl);
20085 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20086 && add_const_value_attribute (die, rtl))
20087 return true;
20088
20089 /* See if we have single element location list that is equivalent to
20090 a constant value. That way we are better to use add_const_value_attribute
20091 rather than expanding constant value equivalent. */
20092 loc_list = lookup_decl_loc (decl);
20093 if (loc_list
20094 && loc_list->first
20095 && loc_list->first->next == NULL
20096 && NOTE_P (loc_list->first->loc)
20097 && NOTE_VAR_LOCATION (loc_list->first->loc)
20098 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20099 {
20100 struct var_loc_node *node;
20101
20102 node = loc_list->first;
20103 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20104 if (GET_CODE (rtl) == EXPR_LIST)
20105 rtl = XEXP (rtl, 0);
20106 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20107 && add_const_value_attribute (die, rtl))
20108 return true;
20109 }
20110 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20111 list several times. See if we've already cached the contents. */
20112 list = NULL;
20113 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20114 cache_p = false;
20115 if (cache_p)
20116 {
20117 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20118 if (cache)
20119 list = cache->loc_list;
20120 }
20121 if (list == NULL)
20122 {
20123 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20124 NULL);
20125 /* It is usually worth caching this result if the decl is from
20126 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20127 if (cache_p && list && list->dw_loc_next)
20128 {
20129 cached_dw_loc_list **slot
20130 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20131 DECL_UID (decl),
20132 INSERT);
20133 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20134 cache->decl_id = DECL_UID (decl);
20135 cache->loc_list = list;
20136 *slot = cache;
20137 }
20138 }
20139 if (list)
20140 {
20141 add_AT_location_description (die, DW_AT_location, list);
20142 return true;
20143 }
20144 /* None of that worked, so it must not really have a location;
20145 try adding a constant value attribute from the DECL_INITIAL. */
20146 return tree_add_const_value_attribute_for_decl (die, decl);
20147 }
20148
20149 /* Helper function for tree_add_const_value_attribute. Natively encode
20150 initializer INIT into an array. Return true if successful. */
20151
20152 static bool
20153 native_encode_initializer (tree init, unsigned char *array, int size)
20154 {
20155 tree type;
20156
20157 if (init == NULL_TREE)
20158 return false;
20159
20160 STRIP_NOPS (init);
20161 switch (TREE_CODE (init))
20162 {
20163 case STRING_CST:
20164 type = TREE_TYPE (init);
20165 if (TREE_CODE (type) == ARRAY_TYPE)
20166 {
20167 tree enttype = TREE_TYPE (type);
20168 scalar_int_mode mode;
20169
20170 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20171 || GET_MODE_SIZE (mode) != 1)
20172 return false;
20173 if (int_size_in_bytes (type) != size)
20174 return false;
20175 if (size > TREE_STRING_LENGTH (init))
20176 {
20177 memcpy (array, TREE_STRING_POINTER (init),
20178 TREE_STRING_LENGTH (init));
20179 memset (array + TREE_STRING_LENGTH (init),
20180 '\0', size - TREE_STRING_LENGTH (init));
20181 }
20182 else
20183 memcpy (array, TREE_STRING_POINTER (init), size);
20184 return true;
20185 }
20186 return false;
20187 case CONSTRUCTOR:
20188 type = TREE_TYPE (init);
20189 if (int_size_in_bytes (type) != size)
20190 return false;
20191 if (TREE_CODE (type) == ARRAY_TYPE)
20192 {
20193 HOST_WIDE_INT min_index;
20194 unsigned HOST_WIDE_INT cnt;
20195 int curpos = 0, fieldsize;
20196 constructor_elt *ce;
20197
20198 if (TYPE_DOMAIN (type) == NULL_TREE
20199 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20200 return false;
20201
20202 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20203 if (fieldsize <= 0)
20204 return false;
20205
20206 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20207 memset (array, '\0', size);
20208 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20209 {
20210 tree val = ce->value;
20211 tree index = ce->index;
20212 int pos = curpos;
20213 if (index && TREE_CODE (index) == RANGE_EXPR)
20214 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20215 * fieldsize;
20216 else if (index)
20217 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20218
20219 if (val)
20220 {
20221 STRIP_NOPS (val);
20222 if (!native_encode_initializer (val, array + pos, fieldsize))
20223 return false;
20224 }
20225 curpos = pos + fieldsize;
20226 if (index && TREE_CODE (index) == RANGE_EXPR)
20227 {
20228 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20229 - tree_to_shwi (TREE_OPERAND (index, 0));
20230 while (count-- > 0)
20231 {
20232 if (val)
20233 memcpy (array + curpos, array + pos, fieldsize);
20234 curpos += fieldsize;
20235 }
20236 }
20237 gcc_assert (curpos <= size);
20238 }
20239 return true;
20240 }
20241 else if (TREE_CODE (type) == RECORD_TYPE
20242 || TREE_CODE (type) == UNION_TYPE)
20243 {
20244 tree field = NULL_TREE;
20245 unsigned HOST_WIDE_INT cnt;
20246 constructor_elt *ce;
20247
20248 if (int_size_in_bytes (type) != size)
20249 return false;
20250
20251 if (TREE_CODE (type) == RECORD_TYPE)
20252 field = TYPE_FIELDS (type);
20253
20254 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20255 {
20256 tree val = ce->value;
20257 int pos, fieldsize;
20258
20259 if (ce->index != 0)
20260 field = ce->index;
20261
20262 if (val)
20263 STRIP_NOPS (val);
20264
20265 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20266 return false;
20267
20268 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20269 && TYPE_DOMAIN (TREE_TYPE (field))
20270 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20271 return false;
20272 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20273 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20274 return false;
20275 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20276 pos = int_byte_position (field);
20277 gcc_assert (pos + fieldsize <= size);
20278 if (val && fieldsize != 0
20279 && !native_encode_initializer (val, array + pos, fieldsize))
20280 return false;
20281 }
20282 return true;
20283 }
20284 return false;
20285 case VIEW_CONVERT_EXPR:
20286 case NON_LVALUE_EXPR:
20287 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20288 default:
20289 return native_encode_expr (init, array, size) == size;
20290 }
20291 }
20292
20293 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20294 attribute is the const value T. */
20295
20296 static bool
20297 tree_add_const_value_attribute (dw_die_ref die, tree t)
20298 {
20299 tree init;
20300 tree type = TREE_TYPE (t);
20301 rtx rtl;
20302
20303 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20304 return false;
20305
20306 init = t;
20307 gcc_assert (!DECL_P (init));
20308
20309 if (TREE_CODE (init) == INTEGER_CST)
20310 {
20311 if (tree_fits_uhwi_p (init))
20312 {
20313 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20314 return true;
20315 }
20316 if (tree_fits_shwi_p (init))
20317 {
20318 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20319 return true;
20320 }
20321 }
20322 if (! early_dwarf)
20323 {
20324 rtl = rtl_for_decl_init (init, type);
20325 if (rtl)
20326 return add_const_value_attribute (die, rtl);
20327 }
20328 /* If the host and target are sane, try harder. */
20329 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20330 && initializer_constant_valid_p (init, type))
20331 {
20332 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20333 if (size > 0 && (int) size == size)
20334 {
20335 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20336
20337 if (native_encode_initializer (init, array, size))
20338 {
20339 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20340 return true;
20341 }
20342 ggc_free (array);
20343 }
20344 }
20345 return false;
20346 }
20347
20348 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20349 attribute is the const value of T, where T is an integral constant
20350 variable with static storage duration
20351 (so it can't be a PARM_DECL or a RESULT_DECL). */
20352
20353 static bool
20354 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20355 {
20356
20357 if (!decl
20358 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20359 || (VAR_P (decl) && !TREE_STATIC (decl)))
20360 return false;
20361
20362 if (TREE_READONLY (decl)
20363 && ! TREE_THIS_VOLATILE (decl)
20364 && DECL_INITIAL (decl))
20365 /* OK */;
20366 else
20367 return false;
20368
20369 /* Don't add DW_AT_const_value if abstract origin already has one. */
20370 if (get_AT (var_die, DW_AT_const_value))
20371 return false;
20372
20373 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20374 }
20375
20376 /* Convert the CFI instructions for the current function into a
20377 location list. This is used for DW_AT_frame_base when we targeting
20378 a dwarf2 consumer that does not support the dwarf3
20379 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20380 expressions. */
20381
20382 static dw_loc_list_ref
20383 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20384 {
20385 int ix;
20386 dw_fde_ref fde;
20387 dw_loc_list_ref list, *list_tail;
20388 dw_cfi_ref cfi;
20389 dw_cfa_location last_cfa, next_cfa;
20390 const char *start_label, *last_label, *section;
20391 dw_cfa_location remember;
20392
20393 fde = cfun->fde;
20394 gcc_assert (fde != NULL);
20395
20396 section = secname_for_decl (current_function_decl);
20397 list_tail = &list;
20398 list = NULL;
20399
20400 memset (&next_cfa, 0, sizeof (next_cfa));
20401 next_cfa.reg = INVALID_REGNUM;
20402 remember = next_cfa;
20403
20404 start_label = fde->dw_fde_begin;
20405
20406 /* ??? Bald assumption that the CIE opcode list does not contain
20407 advance opcodes. */
20408 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20409 lookup_cfa_1 (cfi, &next_cfa, &remember);
20410
20411 last_cfa = next_cfa;
20412 last_label = start_label;
20413
20414 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20415 {
20416 /* If the first partition contained no CFI adjustments, the
20417 CIE opcodes apply to the whole first partition. */
20418 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20419 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20420 list_tail =&(*list_tail)->dw_loc_next;
20421 start_label = last_label = fde->dw_fde_second_begin;
20422 }
20423
20424 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20425 {
20426 switch (cfi->dw_cfi_opc)
20427 {
20428 case DW_CFA_set_loc:
20429 case DW_CFA_advance_loc1:
20430 case DW_CFA_advance_loc2:
20431 case DW_CFA_advance_loc4:
20432 if (!cfa_equal_p (&last_cfa, &next_cfa))
20433 {
20434 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20435 start_label, 0, last_label, 0, section);
20436
20437 list_tail = &(*list_tail)->dw_loc_next;
20438 last_cfa = next_cfa;
20439 start_label = last_label;
20440 }
20441 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20442 break;
20443
20444 case DW_CFA_advance_loc:
20445 /* The encoding is complex enough that we should never emit this. */
20446 gcc_unreachable ();
20447
20448 default:
20449 lookup_cfa_1 (cfi, &next_cfa, &remember);
20450 break;
20451 }
20452 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20453 {
20454 if (!cfa_equal_p (&last_cfa, &next_cfa))
20455 {
20456 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20457 start_label, 0, last_label, 0, section);
20458
20459 list_tail = &(*list_tail)->dw_loc_next;
20460 last_cfa = next_cfa;
20461 start_label = last_label;
20462 }
20463 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20464 start_label, 0, fde->dw_fde_end, 0, section);
20465 list_tail = &(*list_tail)->dw_loc_next;
20466 start_label = last_label = fde->dw_fde_second_begin;
20467 }
20468 }
20469
20470 if (!cfa_equal_p (&last_cfa, &next_cfa))
20471 {
20472 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20473 start_label, 0, last_label, 0, section);
20474 list_tail = &(*list_tail)->dw_loc_next;
20475 start_label = last_label;
20476 }
20477
20478 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20479 start_label, 0,
20480 fde->dw_fde_second_begin
20481 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20482 section);
20483
20484 maybe_gen_llsym (list);
20485
20486 return list;
20487 }
20488
20489 /* Compute a displacement from the "steady-state frame pointer" to the
20490 frame base (often the same as the CFA), and store it in
20491 frame_pointer_fb_offset. OFFSET is added to the displacement
20492 before the latter is negated. */
20493
20494 static void
20495 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20496 {
20497 rtx reg, elim;
20498
20499 #ifdef FRAME_POINTER_CFA_OFFSET
20500 reg = frame_pointer_rtx;
20501 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20502 #else
20503 reg = arg_pointer_rtx;
20504 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20505 #endif
20506
20507 elim = (ira_use_lra_p
20508 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20509 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20510 elim = strip_offset_and_add (elim, &offset);
20511
20512 frame_pointer_fb_offset = -offset;
20513
20514 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20515 in which to eliminate. This is because it's stack pointer isn't
20516 directly accessible as a register within the ISA. To work around
20517 this, assume that while we cannot provide a proper value for
20518 frame_pointer_fb_offset, we won't need one either. We can use
20519 hard frame pointer in debug info even if frame pointer isn't used
20520 since hard frame pointer in debug info is encoded with DW_OP_fbreg
20521 which uses the DW_AT_frame_base attribute, not hard frame pointer
20522 directly. */
20523 frame_pointer_fb_offset_valid
20524 = (elim == hard_frame_pointer_rtx || elim == stack_pointer_rtx);
20525 }
20526
20527 /* Generate a DW_AT_name attribute given some string value to be included as
20528 the value of the attribute. */
20529
20530 static void
20531 add_name_attribute (dw_die_ref die, const char *name_string)
20532 {
20533 if (name_string != NULL && *name_string != 0)
20534 {
20535 if (demangle_name_func)
20536 name_string = (*demangle_name_func) (name_string);
20537
20538 add_AT_string (die, DW_AT_name, name_string);
20539 }
20540 }
20541
20542 /* Generate a DW_AT_description attribute given some string value to be included
20543 as the value of the attribute. */
20544
20545 static void
20546 add_desc_attribute (dw_die_ref die, const char *name_string)
20547 {
20548 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20549 return;
20550
20551 if (name_string == NULL || *name_string == 0)
20552 return;
20553
20554 if (demangle_name_func)
20555 name_string = (*demangle_name_func) (name_string);
20556
20557 add_AT_string (die, DW_AT_description, name_string);
20558 }
20559
20560 /* Generate a DW_AT_description attribute given some decl to be included
20561 as the value of the attribute. */
20562
20563 static void
20564 add_desc_attribute (dw_die_ref die, tree decl)
20565 {
20566 tree decl_name;
20567
20568 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20569 return;
20570
20571 if (decl == NULL_TREE || !DECL_P (decl))
20572 return;
20573 decl_name = DECL_NAME (decl);
20574
20575 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20576 {
20577 const char *name = dwarf2_name (decl, 0);
20578 add_desc_attribute (die, name ? name : IDENTIFIER_POINTER (decl_name));
20579 }
20580 else
20581 {
20582 char *desc = print_generic_expr_to_str (decl);
20583 add_desc_attribute (die, desc);
20584 free (desc);
20585 }
20586 }
20587
20588 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20589 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20590 of TYPE accordingly.
20591
20592 ??? This is a temporary measure until after we're able to generate
20593 regular DWARF for the complex Ada type system. */
20594
20595 static void
20596 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20597 dw_die_ref context_die)
20598 {
20599 tree dtype;
20600 dw_die_ref dtype_die;
20601
20602 if (!lang_hooks.types.descriptive_type)
20603 return;
20604
20605 dtype = lang_hooks.types.descriptive_type (type);
20606 if (!dtype)
20607 return;
20608
20609 dtype_die = lookup_type_die (dtype);
20610 if (!dtype_die)
20611 {
20612 gen_type_die (dtype, context_die);
20613 dtype_die = lookup_type_die (dtype);
20614 gcc_assert (dtype_die);
20615 }
20616
20617 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20618 }
20619
20620 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20621
20622 static const char *
20623 comp_dir_string (void)
20624 {
20625 const char *wd;
20626 char *wd1;
20627 static const char *cached_wd = NULL;
20628
20629 if (cached_wd != NULL)
20630 return cached_wd;
20631
20632 wd = get_src_pwd ();
20633 if (wd == NULL)
20634 return NULL;
20635
20636 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20637 {
20638 int wdlen;
20639
20640 wdlen = strlen (wd);
20641 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20642 strcpy (wd1, wd);
20643 wd1 [wdlen] = DIR_SEPARATOR;
20644 wd1 [wdlen + 1] = 0;
20645 wd = wd1;
20646 }
20647
20648 cached_wd = remap_debug_filename (wd);
20649 return cached_wd;
20650 }
20651
20652 /* Generate a DW_AT_comp_dir attribute for DIE. */
20653
20654 static void
20655 add_comp_dir_attribute (dw_die_ref die)
20656 {
20657 const char * wd = comp_dir_string ();
20658 if (wd != NULL)
20659 add_AT_string (die, DW_AT_comp_dir, wd);
20660 }
20661
20662 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20663 pointer computation, ...), output a representation for that bound according
20664 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20665 loc_list_from_tree for the meaning of CONTEXT. */
20666
20667 static void
20668 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20669 int forms, struct loc_descr_context *context)
20670 {
20671 dw_die_ref context_die, decl_die = NULL;
20672 dw_loc_list_ref list;
20673 bool strip_conversions = true;
20674 bool placeholder_seen = false;
20675
20676 while (strip_conversions)
20677 switch (TREE_CODE (value))
20678 {
20679 case ERROR_MARK:
20680 case SAVE_EXPR:
20681 return;
20682
20683 CASE_CONVERT:
20684 case VIEW_CONVERT_EXPR:
20685 value = TREE_OPERAND (value, 0);
20686 break;
20687
20688 default:
20689 strip_conversions = false;
20690 break;
20691 }
20692
20693 /* If possible and permitted, output the attribute as a constant. */
20694 if ((forms & dw_scalar_form_constant) != 0
20695 && TREE_CODE (value) == INTEGER_CST)
20696 {
20697 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20698
20699 /* If HOST_WIDE_INT is big enough then represent the bound as
20700 a constant value. We need to choose a form based on
20701 whether the type is signed or unsigned. We cannot just
20702 call add_AT_unsigned if the value itself is positive
20703 (add_AT_unsigned might add the unsigned value encoded as
20704 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20705 bounds type and then sign extend any unsigned values found
20706 for signed types. This is needed only for
20707 DW_AT_{lower,upper}_bound, since for most other attributes,
20708 consumers will treat DW_FORM_data[1248] as unsigned values,
20709 regardless of the underlying type. */
20710 if (prec <= HOST_BITS_PER_WIDE_INT
20711 || tree_fits_uhwi_p (value))
20712 {
20713 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20714 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20715 else
20716 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20717 }
20718 else
20719 /* Otherwise represent the bound as an unsigned value with
20720 the precision of its type. The precision and signedness
20721 of the type will be necessary to re-interpret it
20722 unambiguously. */
20723 add_AT_wide (die, attr, wi::to_wide (value));
20724 return;
20725 }
20726
20727 /* Otherwise, if it's possible and permitted too, output a reference to
20728 another DIE. */
20729 if ((forms & dw_scalar_form_reference) != 0)
20730 {
20731 tree decl = NULL_TREE;
20732
20733 /* Some type attributes reference an outer type. For instance, the upper
20734 bound of an array may reference an embedding record (this happens in
20735 Ada). */
20736 if (TREE_CODE (value) == COMPONENT_REF
20737 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20738 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20739 decl = TREE_OPERAND (value, 1);
20740
20741 else if (VAR_P (value)
20742 || TREE_CODE (value) == PARM_DECL
20743 || TREE_CODE (value) == RESULT_DECL)
20744 decl = value;
20745
20746 if (decl != NULL_TREE)
20747 {
20748 decl_die = lookup_decl_die (decl);
20749
20750 /* ??? Can this happen, or should the variable have been bound
20751 first? Probably it can, since I imagine that we try to create
20752 the types of parameters in the order in which they exist in
20753 the list, and won't have created a forward reference to a
20754 later parameter. */
20755 if (decl_die != NULL)
20756 {
20757 if (get_AT (decl_die, DW_AT_location)
20758 || get_AT (decl_die, DW_AT_const_value))
20759 {
20760 add_AT_die_ref (die, attr, decl_die);
20761 return;
20762 }
20763 }
20764 }
20765 }
20766
20767 /* Last chance: try to create a stack operation procedure to evaluate the
20768 value. Do nothing if even that is not possible or permitted. */
20769 if ((forms & dw_scalar_form_exprloc) == 0)
20770 return;
20771
20772 list = loc_list_from_tree (value, 2, context);
20773 if (context && context->placeholder_arg)
20774 {
20775 placeholder_seen = context->placeholder_seen;
20776 context->placeholder_seen = false;
20777 }
20778 if (list == NULL || single_element_loc_list_p (list))
20779 {
20780 /* If this attribute is not a reference nor constant, it is
20781 a DWARF expression rather than location description. For that
20782 loc_list_from_tree (value, 0, &context) is needed. */
20783 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20784 if (list2 && single_element_loc_list_p (list2))
20785 {
20786 if (placeholder_seen)
20787 {
20788 struct dwarf_procedure_info dpi;
20789 dpi.fndecl = NULL_TREE;
20790 dpi.args_count = 1;
20791 if (!resolve_args_picking (list2->expr, 1, &dpi))
20792 return;
20793 }
20794 add_AT_loc (die, attr, list2->expr);
20795 return;
20796 }
20797 }
20798
20799 /* If that failed to give a single element location list, fall back to
20800 outputting this as a reference... still if permitted. */
20801 if (list == NULL
20802 || (forms & dw_scalar_form_reference) == 0
20803 || placeholder_seen)
20804 return;
20805
20806 if (!decl_die)
20807 {
20808 if (current_function_decl == 0)
20809 context_die = comp_unit_die ();
20810 else
20811 context_die = lookup_decl_die (current_function_decl);
20812
20813 decl_die = new_die (DW_TAG_variable, context_die, value);
20814 add_AT_flag (decl_die, DW_AT_artificial, 1);
20815 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20816 context_die);
20817 }
20818
20819 add_AT_location_description (decl_die, DW_AT_location, list);
20820 add_AT_die_ref (die, attr, decl_die);
20821 }
20822
20823 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20824 default. */
20825
20826 static int
20827 lower_bound_default (void)
20828 {
20829 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20830 {
20831 case DW_LANG_C:
20832 case DW_LANG_C89:
20833 case DW_LANG_C99:
20834 case DW_LANG_C11:
20835 case DW_LANG_C_plus_plus:
20836 case DW_LANG_C_plus_plus_11:
20837 case DW_LANG_C_plus_plus_14:
20838 case DW_LANG_ObjC:
20839 case DW_LANG_ObjC_plus_plus:
20840 return 0;
20841 case DW_LANG_Fortran77:
20842 case DW_LANG_Fortran90:
20843 case DW_LANG_Fortran95:
20844 case DW_LANG_Fortran03:
20845 case DW_LANG_Fortran08:
20846 return 1;
20847 case DW_LANG_UPC:
20848 case DW_LANG_D:
20849 case DW_LANG_Python:
20850 return dwarf_version >= 4 ? 0 : -1;
20851 case DW_LANG_Ada95:
20852 case DW_LANG_Ada83:
20853 case DW_LANG_Cobol74:
20854 case DW_LANG_Cobol85:
20855 case DW_LANG_Modula2:
20856 case DW_LANG_PLI:
20857 return dwarf_version >= 4 ? 1 : -1;
20858 default:
20859 return -1;
20860 }
20861 }
20862
20863 /* Given a tree node describing an array bound (either lower or upper) output
20864 a representation for that bound. */
20865
20866 static void
20867 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20868 tree bound, struct loc_descr_context *context)
20869 {
20870 int dflt;
20871
20872 while (1)
20873 switch (TREE_CODE (bound))
20874 {
20875 /* Strip all conversions. */
20876 CASE_CONVERT:
20877 case VIEW_CONVERT_EXPR:
20878 bound = TREE_OPERAND (bound, 0);
20879 break;
20880
20881 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20882 are even omitted when they are the default. */
20883 case INTEGER_CST:
20884 /* If the value for this bound is the default one, we can even omit the
20885 attribute. */
20886 if (bound_attr == DW_AT_lower_bound
20887 && tree_fits_shwi_p (bound)
20888 && (dflt = lower_bound_default ()) != -1
20889 && tree_to_shwi (bound) == dflt)
20890 return;
20891
20892 /* FALLTHRU */
20893
20894 default:
20895 /* Because of the complex interaction there can be with other GNAT
20896 encodings, GDB isn't ready yet to handle proper DWARF description
20897 for self-referencial subrange bounds: let GNAT encodings do the
20898 magic in such a case. */
20899 if (is_ada ()
20900 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20901 && contains_placeholder_p (bound))
20902 return;
20903
20904 add_scalar_info (subrange_die, bound_attr, bound,
20905 dw_scalar_form_constant
20906 | dw_scalar_form_exprloc
20907 | dw_scalar_form_reference,
20908 context);
20909 return;
20910 }
20911 }
20912
20913 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20914 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20915 Note that the block of subscript information for an array type also
20916 includes information about the element type of the given array type.
20917
20918 This function reuses previously set type and bound information if
20919 available. */
20920
20921 static void
20922 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20923 {
20924 unsigned dimension_number;
20925 tree lower, upper;
20926 dw_die_ref child = type_die->die_child;
20927
20928 for (dimension_number = 0;
20929 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20930 type = TREE_TYPE (type), dimension_number++)
20931 {
20932 tree domain = TYPE_DOMAIN (type);
20933
20934 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20935 break;
20936
20937 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20938 and (in GNU C only) variable bounds. Handle all three forms
20939 here. */
20940
20941 /* Find and reuse a previously generated DW_TAG_subrange_type if
20942 available.
20943
20944 For multi-dimensional arrays, as we iterate through the
20945 various dimensions in the enclosing for loop above, we also
20946 iterate through the DIE children and pick at each
20947 DW_TAG_subrange_type previously generated (if available).
20948 Each child DW_TAG_subrange_type DIE describes the range of
20949 the current dimension. At this point we should have as many
20950 DW_TAG_subrange_type's as we have dimensions in the
20951 array. */
20952 dw_die_ref subrange_die = NULL;
20953 if (child)
20954 while (1)
20955 {
20956 child = child->die_sib;
20957 if (child->die_tag == DW_TAG_subrange_type)
20958 subrange_die = child;
20959 if (child == type_die->die_child)
20960 {
20961 /* If we wrapped around, stop looking next time. */
20962 child = NULL;
20963 break;
20964 }
20965 if (child->die_tag == DW_TAG_subrange_type)
20966 break;
20967 }
20968 if (!subrange_die)
20969 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20970
20971 if (domain)
20972 {
20973 /* We have an array type with specified bounds. */
20974 lower = TYPE_MIN_VALUE (domain);
20975 upper = TYPE_MAX_VALUE (domain);
20976
20977 /* Define the index type. */
20978 if (TREE_TYPE (domain)
20979 && !get_AT (subrange_die, DW_AT_type))
20980 {
20981 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20982 TREE_TYPE field. We can't emit debug info for this
20983 because it is an unnamed integral type. */
20984 if (TREE_CODE (domain) == INTEGER_TYPE
20985 && TYPE_NAME (domain) == NULL_TREE
20986 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20987 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20988 ;
20989 else
20990 add_type_attribute (subrange_die, TREE_TYPE (domain),
20991 TYPE_UNQUALIFIED, false, type_die);
20992 }
20993
20994 /* ??? If upper is NULL, the array has unspecified length,
20995 but it does have a lower bound. This happens with Fortran
20996 dimension arr(N:*)
20997 Since the debugger is definitely going to need to know N
20998 to produce useful results, go ahead and output the lower
20999 bound solo, and hope the debugger can cope. */
21000
21001 if (!get_AT (subrange_die, DW_AT_lower_bound))
21002 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
21003 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
21004 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
21005 }
21006
21007 /* Otherwise we have an array type with an unspecified length. The
21008 DWARF-2 spec does not say how to handle this; let's just leave out the
21009 bounds. */
21010 }
21011 }
21012
21013 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
21014
21015 static void
21016 add_byte_size_attribute (dw_die_ref die, tree tree_node)
21017 {
21018 dw_die_ref decl_die;
21019 HOST_WIDE_INT size;
21020 dw_loc_descr_ref size_expr = NULL;
21021
21022 switch (TREE_CODE (tree_node))
21023 {
21024 case ERROR_MARK:
21025 size = 0;
21026 break;
21027 case ENUMERAL_TYPE:
21028 case RECORD_TYPE:
21029 case UNION_TYPE:
21030 case QUAL_UNION_TYPE:
21031 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
21032 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
21033 {
21034 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
21035 return;
21036 }
21037 size_expr = type_byte_size (tree_node, &size);
21038 break;
21039 case FIELD_DECL:
21040 /* For a data member of a struct or union, the DW_AT_byte_size is
21041 generally given as the number of bytes normally allocated for an
21042 object of the *declared* type of the member itself. This is true
21043 even for bit-fields. */
21044 size = int_size_in_bytes (field_type (tree_node));
21045 break;
21046 default:
21047 gcc_unreachable ();
21048 }
21049
21050 /* Support for dynamically-sized objects was introduced by DWARFv3.
21051 At the moment, GDB does not handle variable byte sizes very well,
21052 though. */
21053 if ((dwarf_version >= 3 || !dwarf_strict)
21054 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
21055 && size_expr != NULL)
21056 add_AT_loc (die, DW_AT_byte_size, size_expr);
21057
21058 /* Note that `size' might be -1 when we get to this point. If it is, that
21059 indicates that the byte size of the entity in question is variable and
21060 that we could not generate a DWARF expression that computes it. */
21061 if (size >= 0)
21062 add_AT_unsigned (die, DW_AT_byte_size, size);
21063 }
21064
21065 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
21066 alignment. */
21067
21068 static void
21069 add_alignment_attribute (dw_die_ref die, tree tree_node)
21070 {
21071 if (dwarf_version < 5 && dwarf_strict)
21072 return;
21073
21074 unsigned align;
21075
21076 if (DECL_P (tree_node))
21077 {
21078 if (!DECL_USER_ALIGN (tree_node))
21079 return;
21080
21081 align = DECL_ALIGN_UNIT (tree_node);
21082 }
21083 else if (TYPE_P (tree_node))
21084 {
21085 if (!TYPE_USER_ALIGN (tree_node))
21086 return;
21087
21088 align = TYPE_ALIGN_UNIT (tree_node);
21089 }
21090 else
21091 gcc_unreachable ();
21092
21093 add_AT_unsigned (die, DW_AT_alignment, align);
21094 }
21095
21096 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21097 which specifies the distance in bits from the highest order bit of the
21098 "containing object" for the bit-field to the highest order bit of the
21099 bit-field itself.
21100
21101 For any given bit-field, the "containing object" is a hypothetical object
21102 (of some integral or enum type) within which the given bit-field lives. The
21103 type of this hypothetical "containing object" is always the same as the
21104 declared type of the individual bit-field itself. The determination of the
21105 exact location of the "containing object" for a bit-field is rather
21106 complicated. It's handled by the `field_byte_offset' function (above).
21107
21108 CTX is required: see the comment for VLR_CONTEXT.
21109
21110 Note that it is the size (in bytes) of the hypothetical "containing object"
21111 which will be given in the DW_AT_byte_size attribute for this bit-field.
21112 (See `byte_size_attribute' above). */
21113
21114 static inline void
21115 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21116 {
21117 HOST_WIDE_INT object_offset_in_bytes;
21118 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21119 HOST_WIDE_INT bitpos_int;
21120 HOST_WIDE_INT highest_order_object_bit_offset;
21121 HOST_WIDE_INT highest_order_field_bit_offset;
21122 HOST_WIDE_INT bit_offset;
21123
21124 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21125
21126 /* Must be a field and a bit field. */
21127 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21128
21129 /* We can't yet handle bit-fields whose offsets are variable, so if we
21130 encounter such things, just return without generating any attribute
21131 whatsoever. Likewise for variable or too large size. */
21132 if (! tree_fits_shwi_p (bit_position (decl))
21133 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21134 return;
21135
21136 bitpos_int = int_bit_position (decl);
21137
21138 /* Note that the bit offset is always the distance (in bits) from the
21139 highest-order bit of the "containing object" to the highest-order bit of
21140 the bit-field itself. Since the "high-order end" of any object or field
21141 is different on big-endian and little-endian machines, the computation
21142 below must take account of these differences. */
21143 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21144 highest_order_field_bit_offset = bitpos_int;
21145
21146 if (! BYTES_BIG_ENDIAN)
21147 {
21148 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21149 highest_order_object_bit_offset +=
21150 simple_type_size_in_bits (original_type);
21151 }
21152
21153 bit_offset
21154 = (! BYTES_BIG_ENDIAN
21155 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21156 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21157
21158 if (bit_offset < 0)
21159 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21160 else
21161 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21162 }
21163
21164 /* For a FIELD_DECL node which represents a bit field, output an attribute
21165 which specifies the length in bits of the given field. */
21166
21167 static inline void
21168 add_bit_size_attribute (dw_die_ref die, tree decl)
21169 {
21170 /* Must be a field and a bit field. */
21171 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21172 && DECL_BIT_FIELD_TYPE (decl));
21173
21174 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21175 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21176 }
21177
21178 /* If the compiled language is ANSI C, then add a 'prototyped'
21179 attribute, if arg types are given for the parameters of a function. */
21180
21181 static inline void
21182 add_prototyped_attribute (dw_die_ref die, tree func_type)
21183 {
21184 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21185 {
21186 case DW_LANG_C:
21187 case DW_LANG_C89:
21188 case DW_LANG_C99:
21189 case DW_LANG_C11:
21190 case DW_LANG_ObjC:
21191 if (prototype_p (func_type))
21192 add_AT_flag (die, DW_AT_prototyped, 1);
21193 break;
21194 default:
21195 break;
21196 }
21197 }
21198
21199 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21200 by looking in the type declaration, the object declaration equate table or
21201 the block mapping. */
21202
21203 static inline dw_die_ref
21204 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21205 {
21206 dw_die_ref origin_die = NULL;
21207
21208 if (DECL_P (origin))
21209 {
21210 dw_die_ref c;
21211 origin_die = lookup_decl_die (origin);
21212 /* "Unwrap" the decls DIE which we put in the imported unit context.
21213 We are looking for the abstract copy here. */
21214 if (in_lto_p
21215 && origin_die
21216 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
21217 /* ??? Identify this better. */
21218 && c->with_offset)
21219 origin_die = c;
21220 }
21221 else if (TYPE_P (origin))
21222 origin_die = lookup_type_die (origin);
21223 else if (TREE_CODE (origin) == BLOCK)
21224 origin_die = BLOCK_DIE (origin);
21225
21226 /* XXX: Functions that are never lowered don't always have correct block
21227 trees (in the case of java, they simply have no block tree, in some other
21228 languages). For these functions, there is nothing we can really do to
21229 output correct debug info for inlined functions in all cases. Rather
21230 than die, we'll just produce deficient debug info now, in that we will
21231 have variables without a proper abstract origin. In the future, when all
21232 functions are lowered, we should re-add a gcc_assert (origin_die)
21233 here. */
21234
21235 if (origin_die)
21236 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21237 return origin_die;
21238 }
21239
21240 /* We do not currently support the pure_virtual attribute. */
21241
21242 static inline void
21243 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21244 {
21245 if (DECL_VINDEX (func_decl))
21246 {
21247 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21248
21249 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21250 add_AT_loc (die, DW_AT_vtable_elem_location,
21251 new_loc_descr (DW_OP_constu,
21252 tree_to_shwi (DECL_VINDEX (func_decl)),
21253 0));
21254
21255 /* GNU extension: Record what type this method came from originally. */
21256 if (debug_info_level > DINFO_LEVEL_TERSE
21257 && DECL_CONTEXT (func_decl))
21258 add_AT_die_ref (die, DW_AT_containing_type,
21259 lookup_type_die (DECL_CONTEXT (func_decl)));
21260 }
21261 }
21262 \f
21263 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21264 given decl. This used to be a vendor extension until after DWARF 4
21265 standardized it. */
21266
21267 static void
21268 add_linkage_attr (dw_die_ref die, tree decl)
21269 {
21270 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21271
21272 /* Mimic what assemble_name_raw does with a leading '*'. */
21273 if (name[0] == '*')
21274 name = &name[1];
21275
21276 if (dwarf_version >= 4)
21277 add_AT_string (die, DW_AT_linkage_name, name);
21278 else
21279 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21280 }
21281
21282 /* Add source coordinate attributes for the given decl. */
21283
21284 static void
21285 add_src_coords_attributes (dw_die_ref die, tree decl)
21286 {
21287 expanded_location s;
21288
21289 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21290 return;
21291 s = expand_location (DECL_SOURCE_LOCATION (decl));
21292 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21293 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21294 if (debug_column_info && s.column)
21295 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21296 }
21297
21298 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21299
21300 static void
21301 add_linkage_name_raw (dw_die_ref die, tree decl)
21302 {
21303 /* Defer until we have an assembler name set. */
21304 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21305 {
21306 limbo_die_node *asm_name;
21307
21308 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21309 asm_name->die = die;
21310 asm_name->created_for = decl;
21311 asm_name->next = deferred_asm_name;
21312 deferred_asm_name = asm_name;
21313 }
21314 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21315 add_linkage_attr (die, decl);
21316 }
21317
21318 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21319
21320 static void
21321 add_linkage_name (dw_die_ref die, tree decl)
21322 {
21323 if (debug_info_level > DINFO_LEVEL_NONE
21324 && VAR_OR_FUNCTION_DECL_P (decl)
21325 && TREE_PUBLIC (decl)
21326 && !(VAR_P (decl) && DECL_REGISTER (decl))
21327 && die->die_tag != DW_TAG_member)
21328 add_linkage_name_raw (die, decl);
21329 }
21330
21331 /* Add a DW_AT_name attribute and source coordinate attribute for the
21332 given decl, but only if it actually has a name. */
21333
21334 static void
21335 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21336 bool no_linkage_name)
21337 {
21338 tree decl_name;
21339
21340 decl_name = DECL_NAME (decl);
21341 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21342 {
21343 const char *name = dwarf2_name (decl, 0);
21344 if (name)
21345 add_name_attribute (die, name);
21346 else
21347 add_desc_attribute (die, decl);
21348
21349 if (! DECL_ARTIFICIAL (decl))
21350 add_src_coords_attributes (die, decl);
21351
21352 if (!no_linkage_name)
21353 add_linkage_name (die, decl);
21354 }
21355 else
21356 add_desc_attribute (die, decl);
21357
21358 #ifdef VMS_DEBUGGING_INFO
21359 /* Get the function's name, as described by its RTL. This may be different
21360 from the DECL_NAME name used in the source file. */
21361 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21362 {
21363 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21364 XEXP (DECL_RTL (decl), 0), false);
21365 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21366 }
21367 #endif /* VMS_DEBUGGING_INFO */
21368 }
21369
21370 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21371
21372 static void
21373 add_discr_value (dw_die_ref die, dw_discr_value *value)
21374 {
21375 dw_attr_node attr;
21376
21377 attr.dw_attr = DW_AT_discr_value;
21378 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21379 attr.dw_attr_val.val_entry = NULL;
21380 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21381 if (value->pos)
21382 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21383 else
21384 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21385 add_dwarf_attr (die, &attr);
21386 }
21387
21388 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21389
21390 static void
21391 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21392 {
21393 dw_attr_node attr;
21394
21395 attr.dw_attr = DW_AT_discr_list;
21396 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21397 attr.dw_attr_val.val_entry = NULL;
21398 attr.dw_attr_val.v.val_discr_list = discr_list;
21399 add_dwarf_attr (die, &attr);
21400 }
21401
21402 static inline dw_discr_list_ref
21403 AT_discr_list (dw_attr_node *attr)
21404 {
21405 return attr->dw_attr_val.v.val_discr_list;
21406 }
21407
21408 #ifdef VMS_DEBUGGING_INFO
21409 /* Output the debug main pointer die for VMS */
21410
21411 void
21412 dwarf2out_vms_debug_main_pointer (void)
21413 {
21414 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21415 dw_die_ref die;
21416
21417 /* Allocate the VMS debug main subprogram die. */
21418 die = new_die_raw (DW_TAG_subprogram);
21419 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21420 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21421 current_function_funcdef_no);
21422 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21423
21424 /* Make it the first child of comp_unit_die (). */
21425 die->die_parent = comp_unit_die ();
21426 if (comp_unit_die ()->die_child)
21427 {
21428 die->die_sib = comp_unit_die ()->die_child->die_sib;
21429 comp_unit_die ()->die_child->die_sib = die;
21430 }
21431 else
21432 {
21433 die->die_sib = die;
21434 comp_unit_die ()->die_child = die;
21435 }
21436 }
21437 #endif /* VMS_DEBUGGING_INFO */
21438
21439 /* walk_tree helper function for uses_local_type, below. */
21440
21441 static tree
21442 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21443 {
21444 if (!TYPE_P (*tp))
21445 *walk_subtrees = 0;
21446 else
21447 {
21448 tree name = TYPE_NAME (*tp);
21449 if (name && DECL_P (name) && decl_function_context (name))
21450 return *tp;
21451 }
21452 return NULL_TREE;
21453 }
21454
21455 /* If TYPE involves a function-local type (including a local typedef to a
21456 non-local type), returns that type; otherwise returns NULL_TREE. */
21457
21458 static tree
21459 uses_local_type (tree type)
21460 {
21461 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21462 return used;
21463 }
21464
21465 /* Return the DIE for the scope that immediately contains this type.
21466 Non-named types that do not involve a function-local type get global
21467 scope. Named types nested in namespaces or other types get their
21468 containing scope. All other types (i.e. function-local named types) get
21469 the current active scope. */
21470
21471 static dw_die_ref
21472 scope_die_for (tree t, dw_die_ref context_die)
21473 {
21474 dw_die_ref scope_die = NULL;
21475 tree containing_scope;
21476
21477 /* Non-types always go in the current scope. */
21478 gcc_assert (TYPE_P (t));
21479
21480 /* Use the scope of the typedef, rather than the scope of the type
21481 it refers to. */
21482 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21483 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21484 else
21485 containing_scope = TYPE_CONTEXT (t);
21486
21487 /* Use the containing namespace if there is one. */
21488 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21489 {
21490 if (context_die == lookup_decl_die (containing_scope))
21491 /* OK */;
21492 else if (debug_info_level > DINFO_LEVEL_TERSE)
21493 context_die = get_context_die (containing_scope);
21494 else
21495 containing_scope = NULL_TREE;
21496 }
21497
21498 /* Ignore function type "scopes" from the C frontend. They mean that
21499 a tagged type is local to a parmlist of a function declarator, but
21500 that isn't useful to DWARF. */
21501 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21502 containing_scope = NULL_TREE;
21503
21504 if (SCOPE_FILE_SCOPE_P (containing_scope))
21505 {
21506 /* If T uses a local type keep it local as well, to avoid references
21507 to function-local DIEs from outside the function. */
21508 if (current_function_decl && uses_local_type (t))
21509 scope_die = context_die;
21510 else
21511 scope_die = comp_unit_die ();
21512 }
21513 else if (TYPE_P (containing_scope))
21514 {
21515 /* For types, we can just look up the appropriate DIE. */
21516 if (debug_info_level > DINFO_LEVEL_TERSE)
21517 scope_die = get_context_die (containing_scope);
21518 else
21519 {
21520 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21521 if (scope_die == NULL)
21522 scope_die = comp_unit_die ();
21523 }
21524 }
21525 else
21526 scope_die = context_die;
21527
21528 return scope_die;
21529 }
21530
21531 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21532
21533 static inline int
21534 local_scope_p (dw_die_ref context_die)
21535 {
21536 for (; context_die; context_die = context_die->die_parent)
21537 if (context_die->die_tag == DW_TAG_inlined_subroutine
21538 || context_die->die_tag == DW_TAG_subprogram)
21539 return 1;
21540
21541 return 0;
21542 }
21543
21544 /* Returns nonzero if CONTEXT_DIE is a class. */
21545
21546 static inline int
21547 class_scope_p (dw_die_ref context_die)
21548 {
21549 return (context_die
21550 && (context_die->die_tag == DW_TAG_structure_type
21551 || context_die->die_tag == DW_TAG_class_type
21552 || context_die->die_tag == DW_TAG_interface_type
21553 || context_die->die_tag == DW_TAG_union_type));
21554 }
21555
21556 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21557 whether or not to treat a DIE in this context as a declaration. */
21558
21559 static inline int
21560 class_or_namespace_scope_p (dw_die_ref context_die)
21561 {
21562 return (class_scope_p (context_die)
21563 || (context_die && context_die->die_tag == DW_TAG_namespace));
21564 }
21565
21566 /* Many forms of DIEs require a "type description" attribute. This
21567 routine locates the proper "type descriptor" die for the type given
21568 by 'type' plus any additional qualifiers given by 'cv_quals', and
21569 adds a DW_AT_type attribute below the given die. */
21570
21571 static void
21572 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21573 bool reverse, dw_die_ref context_die)
21574 {
21575 enum tree_code code = TREE_CODE (type);
21576 dw_die_ref type_die = NULL;
21577
21578 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21579 or fixed-point type, use the inner type. This is because we have no
21580 support for unnamed types in base_type_die. This can happen if this is
21581 an Ada subrange type. Correct solution is emit a subrange type die. */
21582 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21583 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21584 type = TREE_TYPE (type), code = TREE_CODE (type);
21585
21586 if (code == ERROR_MARK
21587 /* Handle a special case. For functions whose return type is void, we
21588 generate *no* type attribute. (Note that no object may have type
21589 `void', so this only applies to function return types). */
21590 || code == VOID_TYPE)
21591 return;
21592
21593 type_die = modified_type_die (type,
21594 cv_quals | TYPE_QUALS (type),
21595 reverse,
21596 context_die);
21597
21598 if (type_die != NULL)
21599 add_AT_die_ref (object_die, DW_AT_type, type_die);
21600 }
21601
21602 /* Given an object die, add the calling convention attribute for the
21603 function call type. */
21604 static void
21605 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21606 {
21607 enum dwarf_calling_convention value = DW_CC_normal;
21608
21609 value = ((enum dwarf_calling_convention)
21610 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21611
21612 if (is_fortran ()
21613 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21614 {
21615 /* DWARF 2 doesn't provide a way to identify a program's source-level
21616 entry point. DW_AT_calling_convention attributes are only meant
21617 to describe functions' calling conventions. However, lacking a
21618 better way to signal the Fortran main program, we used this for
21619 a long time, following existing custom. Now, DWARF 4 has
21620 DW_AT_main_subprogram, which we add below, but some tools still
21621 rely on the old way, which we thus keep. */
21622 value = DW_CC_program;
21623
21624 if (dwarf_version >= 4 || !dwarf_strict)
21625 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21626 }
21627
21628 /* Only add the attribute if the backend requests it, and
21629 is not DW_CC_normal. */
21630 if (value && (value != DW_CC_normal))
21631 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21632 }
21633
21634 /* Given a tree pointer to a struct, class, union, or enum type node, return
21635 a pointer to the (string) tag name for the given type, or zero if the type
21636 was declared without a tag. */
21637
21638 static const char *
21639 type_tag (const_tree type)
21640 {
21641 const char *name = 0;
21642
21643 if (TYPE_NAME (type) != 0)
21644 {
21645 tree t = 0;
21646
21647 /* Find the IDENTIFIER_NODE for the type name. */
21648 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21649 && !TYPE_NAMELESS (type))
21650 t = TYPE_NAME (type);
21651
21652 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21653 a TYPE_DECL node, regardless of whether or not a `typedef' was
21654 involved. */
21655 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21656 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21657 {
21658 /* We want to be extra verbose. Don't call dwarf_name if
21659 DECL_NAME isn't set. The default hook for decl_printable_name
21660 doesn't like that, and in this context it's correct to return
21661 0, instead of "<anonymous>" or the like. */
21662 if (DECL_NAME (TYPE_NAME (type))
21663 && !DECL_NAMELESS (TYPE_NAME (type)))
21664 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21665 }
21666
21667 /* Now get the name as a string, or invent one. */
21668 if (!name && t != 0)
21669 name = IDENTIFIER_POINTER (t);
21670 }
21671
21672 return (name == 0 || *name == '\0') ? 0 : name;
21673 }
21674
21675 /* Return the type associated with a data member, make a special check
21676 for bit field types. */
21677
21678 static inline tree
21679 member_declared_type (const_tree member)
21680 {
21681 return (DECL_BIT_FIELD_TYPE (member)
21682 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21683 }
21684
21685 /* Get the decl's label, as described by its RTL. This may be different
21686 from the DECL_NAME name used in the source file. */
21687
21688 #if 0
21689 static const char *
21690 decl_start_label (tree decl)
21691 {
21692 rtx x;
21693 const char *fnname;
21694
21695 x = DECL_RTL (decl);
21696 gcc_assert (MEM_P (x));
21697
21698 x = XEXP (x, 0);
21699 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21700
21701 fnname = XSTR (x, 0);
21702 return fnname;
21703 }
21704 #endif
21705 \f
21706 /* For variable-length arrays that have been previously generated, but
21707 may be incomplete due to missing subscript info, fill the subscript
21708 info. Return TRUE if this is one of those cases. */
21709 static bool
21710 fill_variable_array_bounds (tree type)
21711 {
21712 if (TREE_ASM_WRITTEN (type)
21713 && TREE_CODE (type) == ARRAY_TYPE
21714 && variably_modified_type_p (type, NULL))
21715 {
21716 dw_die_ref array_die = lookup_type_die (type);
21717 if (!array_die)
21718 return false;
21719 add_subscript_info (array_die, type, !is_ada ());
21720 return true;
21721 }
21722 return false;
21723 }
21724
21725 /* These routines generate the internal representation of the DIE's for
21726 the compilation unit. Debugging information is collected by walking
21727 the declaration trees passed in from dwarf2out_decl(). */
21728
21729 static void
21730 gen_array_type_die (tree type, dw_die_ref context_die)
21731 {
21732 dw_die_ref array_die;
21733
21734 /* GNU compilers represent multidimensional array types as sequences of one
21735 dimensional array types whose element types are themselves array types.
21736 We sometimes squish that down to a single array_type DIE with multiple
21737 subscripts in the Dwarf debugging info. The draft Dwarf specification
21738 say that we are allowed to do this kind of compression in C, because
21739 there is no difference between an array of arrays and a multidimensional
21740 array. We don't do this for Ada to remain as close as possible to the
21741 actual representation, which is especially important against the language
21742 flexibilty wrt arrays of variable size. */
21743
21744 bool collapse_nested_arrays = !is_ada ();
21745
21746 if (fill_variable_array_bounds (type))
21747 return;
21748
21749 dw_die_ref scope_die = scope_die_for (type, context_die);
21750 tree element_type;
21751
21752 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21753 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21754 if (TYPE_STRING_FLAG (type)
21755 && TREE_CODE (type) == ARRAY_TYPE
21756 && is_fortran ()
21757 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21758 {
21759 HOST_WIDE_INT size;
21760
21761 array_die = new_die (DW_TAG_string_type, scope_die, type);
21762 add_name_attribute (array_die, type_tag (type));
21763 equate_type_number_to_die (type, array_die);
21764 size = int_size_in_bytes (type);
21765 if (size >= 0)
21766 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21767 /* ??? We can't annotate types late, but for LTO we may not
21768 generate a location early either (gfortran.dg/save_6.f90). */
21769 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21770 && TYPE_DOMAIN (type) != NULL_TREE
21771 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21772 {
21773 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21774 tree rszdecl = szdecl;
21775
21776 size = int_size_in_bytes (TREE_TYPE (szdecl));
21777 if (!DECL_P (szdecl))
21778 {
21779 if (TREE_CODE (szdecl) == INDIRECT_REF
21780 && DECL_P (TREE_OPERAND (szdecl, 0)))
21781 {
21782 rszdecl = TREE_OPERAND (szdecl, 0);
21783 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21784 != DWARF2_ADDR_SIZE)
21785 size = 0;
21786 }
21787 else
21788 size = 0;
21789 }
21790 if (size > 0)
21791 {
21792 dw_loc_list_ref loc
21793 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21794 NULL);
21795 if (loc)
21796 {
21797 add_AT_location_description (array_die, DW_AT_string_length,
21798 loc);
21799 if (size != DWARF2_ADDR_SIZE)
21800 add_AT_unsigned (array_die, dwarf_version >= 5
21801 ? DW_AT_string_length_byte_size
21802 : DW_AT_byte_size, size);
21803 }
21804 }
21805 }
21806 return;
21807 }
21808
21809 array_die = new_die (DW_TAG_array_type, scope_die, type);
21810 add_name_attribute (array_die, type_tag (type));
21811 equate_type_number_to_die (type, array_die);
21812
21813 if (TREE_CODE (type) == VECTOR_TYPE)
21814 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21815
21816 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21817 if (is_fortran ()
21818 && TREE_CODE (type) == ARRAY_TYPE
21819 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21820 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21821 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21822
21823 #if 0
21824 /* We default the array ordering. Debuggers will probably do the right
21825 things even if DW_AT_ordering is not present. It's not even an issue
21826 until we start to get into multidimensional arrays anyway. If a debugger
21827 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21828 then we'll have to put the DW_AT_ordering attribute back in. (But if
21829 and when we find out that we need to put these in, we will only do so
21830 for multidimensional arrays. */
21831 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21832 #endif
21833
21834 if (TREE_CODE (type) == VECTOR_TYPE)
21835 {
21836 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21837 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21838 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21839 add_bound_info (subrange_die, DW_AT_upper_bound,
21840 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21841 }
21842 else
21843 add_subscript_info (array_die, type, collapse_nested_arrays);
21844
21845 /* Add representation of the type of the elements of this array type and
21846 emit the corresponding DIE if we haven't done it already. */
21847 element_type = TREE_TYPE (type);
21848 if (collapse_nested_arrays)
21849 while (TREE_CODE (element_type) == ARRAY_TYPE)
21850 {
21851 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21852 break;
21853 element_type = TREE_TYPE (element_type);
21854 }
21855
21856 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21857 TREE_CODE (type) == ARRAY_TYPE
21858 && TYPE_REVERSE_STORAGE_ORDER (type),
21859 context_die);
21860
21861 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21862 if (TYPE_ARTIFICIAL (type))
21863 add_AT_flag (array_die, DW_AT_artificial, 1);
21864
21865 if (get_AT (array_die, DW_AT_name))
21866 add_pubtype (type, array_die);
21867
21868 add_alignment_attribute (array_die, type);
21869 }
21870
21871 /* This routine generates DIE for array with hidden descriptor, details
21872 are filled into *info by a langhook. */
21873
21874 static void
21875 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21876 dw_die_ref context_die)
21877 {
21878 const dw_die_ref scope_die = scope_die_for (type, context_die);
21879 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21880 struct loc_descr_context context = { type, info->base_decl, NULL,
21881 false, false };
21882 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21883 int dim;
21884
21885 add_name_attribute (array_die, type_tag (type));
21886 equate_type_number_to_die (type, array_die);
21887
21888 if (info->ndimensions > 1)
21889 switch (info->ordering)
21890 {
21891 case array_descr_ordering_row_major:
21892 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21893 break;
21894 case array_descr_ordering_column_major:
21895 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21896 break;
21897 default:
21898 break;
21899 }
21900
21901 if (dwarf_version >= 3 || !dwarf_strict)
21902 {
21903 if (info->data_location)
21904 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21905 dw_scalar_form_exprloc, &context);
21906 if (info->associated)
21907 add_scalar_info (array_die, DW_AT_associated, info->associated,
21908 dw_scalar_form_constant
21909 | dw_scalar_form_exprloc
21910 | dw_scalar_form_reference, &context);
21911 if (info->allocated)
21912 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21913 dw_scalar_form_constant
21914 | dw_scalar_form_exprloc
21915 | dw_scalar_form_reference, &context);
21916 if (info->stride)
21917 {
21918 const enum dwarf_attribute attr
21919 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21920 const int forms
21921 = (info->stride_in_bits)
21922 ? dw_scalar_form_constant
21923 : (dw_scalar_form_constant
21924 | dw_scalar_form_exprloc
21925 | dw_scalar_form_reference);
21926
21927 add_scalar_info (array_die, attr, info->stride, forms, &context);
21928 }
21929 }
21930 if (dwarf_version >= 5)
21931 {
21932 if (info->rank)
21933 {
21934 add_scalar_info (array_die, DW_AT_rank, info->rank,
21935 dw_scalar_form_constant
21936 | dw_scalar_form_exprloc, &context);
21937 subrange_tag = DW_TAG_generic_subrange;
21938 context.placeholder_arg = true;
21939 }
21940 }
21941
21942 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21943
21944 for (dim = 0; dim < info->ndimensions; dim++)
21945 {
21946 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21947
21948 if (info->dimen[dim].bounds_type)
21949 add_type_attribute (subrange_die,
21950 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21951 false, context_die);
21952 if (info->dimen[dim].lower_bound)
21953 add_bound_info (subrange_die, DW_AT_lower_bound,
21954 info->dimen[dim].lower_bound, &context);
21955 if (info->dimen[dim].upper_bound)
21956 add_bound_info (subrange_die, DW_AT_upper_bound,
21957 info->dimen[dim].upper_bound, &context);
21958 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21959 add_scalar_info (subrange_die, DW_AT_byte_stride,
21960 info->dimen[dim].stride,
21961 dw_scalar_form_constant
21962 | dw_scalar_form_exprloc
21963 | dw_scalar_form_reference,
21964 &context);
21965 }
21966
21967 gen_type_die (info->element_type, context_die);
21968 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21969 TREE_CODE (type) == ARRAY_TYPE
21970 && TYPE_REVERSE_STORAGE_ORDER (type),
21971 context_die);
21972
21973 if (get_AT (array_die, DW_AT_name))
21974 add_pubtype (type, array_die);
21975
21976 add_alignment_attribute (array_die, type);
21977 }
21978
21979 #if 0
21980 static void
21981 gen_entry_point_die (tree decl, dw_die_ref context_die)
21982 {
21983 tree origin = decl_ultimate_origin (decl);
21984 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21985
21986 if (origin != NULL)
21987 add_abstract_origin_attribute (decl_die, origin);
21988 else
21989 {
21990 add_name_and_src_coords_attributes (decl_die, decl);
21991 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21992 TYPE_UNQUALIFIED, false, context_die);
21993 }
21994
21995 if (DECL_ABSTRACT_P (decl))
21996 equate_decl_number_to_die (decl, decl_die);
21997 else
21998 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21999 }
22000 #endif
22001
22002 /* Walk through the list of incomplete types again, trying once more to
22003 emit full debugging info for them. */
22004
22005 static void
22006 retry_incomplete_types (void)
22007 {
22008 set_early_dwarf s;
22009 int i;
22010
22011 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
22012 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
22013 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
22014 vec_safe_truncate (incomplete_types, 0);
22015 }
22016
22017 /* Determine what tag to use for a record type. */
22018
22019 static enum dwarf_tag
22020 record_type_tag (tree type)
22021 {
22022 if (! lang_hooks.types.classify_record)
22023 return DW_TAG_structure_type;
22024
22025 switch (lang_hooks.types.classify_record (type))
22026 {
22027 case RECORD_IS_STRUCT:
22028 return DW_TAG_structure_type;
22029
22030 case RECORD_IS_CLASS:
22031 return DW_TAG_class_type;
22032
22033 case RECORD_IS_INTERFACE:
22034 if (dwarf_version >= 3 || !dwarf_strict)
22035 return DW_TAG_interface_type;
22036 return DW_TAG_structure_type;
22037
22038 default:
22039 gcc_unreachable ();
22040 }
22041 }
22042
22043 /* Generate a DIE to represent an enumeration type. Note that these DIEs
22044 include all of the information about the enumeration values also. Each
22045 enumerated type name/value is listed as a child of the enumerated type
22046 DIE. */
22047
22048 static dw_die_ref
22049 gen_enumeration_type_die (tree type, dw_die_ref context_die)
22050 {
22051 dw_die_ref type_die = lookup_type_die (type);
22052 dw_die_ref orig_type_die = type_die;
22053
22054 if (type_die == NULL)
22055 {
22056 type_die = new_die (DW_TAG_enumeration_type,
22057 scope_die_for (type, context_die), type);
22058 equate_type_number_to_die (type, type_die);
22059 add_name_attribute (type_die, type_tag (type));
22060 if ((dwarf_version >= 4 || !dwarf_strict)
22061 && ENUM_IS_SCOPED (type))
22062 add_AT_flag (type_die, DW_AT_enum_class, 1);
22063 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
22064 add_AT_flag (type_die, DW_AT_declaration, 1);
22065 if (!dwarf_strict)
22066 add_AT_unsigned (type_die, DW_AT_encoding,
22067 TYPE_UNSIGNED (type)
22068 ? DW_ATE_unsigned
22069 : DW_ATE_signed);
22070 }
22071 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22072 return type_die;
22073 else
22074 remove_AT (type_die, DW_AT_declaration);
22075
22076 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22077 given enum type is incomplete, do not generate the DW_AT_byte_size
22078 attribute or the DW_AT_element_list attribute. */
22079 if (TYPE_SIZE (type))
22080 {
22081 tree link;
22082
22083 if (!ENUM_IS_OPAQUE (type))
22084 TREE_ASM_WRITTEN (type) = 1;
22085 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22086 add_byte_size_attribute (type_die, type);
22087 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22088 add_alignment_attribute (type_die, type);
22089 if ((dwarf_version >= 3 || !dwarf_strict)
22090 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22091 {
22092 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22093 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22094 context_die);
22095 }
22096 if (TYPE_STUB_DECL (type) != NULL_TREE)
22097 {
22098 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22099 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22100 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22101 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22102 }
22103
22104 /* If the first reference to this type was as the return type of an
22105 inline function, then it may not have a parent. Fix this now. */
22106 if (type_die->die_parent == NULL)
22107 add_child_die (scope_die_for (type, context_die), type_die);
22108
22109 for (link = TYPE_VALUES (type);
22110 link != NULL; link = TREE_CHAIN (link))
22111 {
22112 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22113 tree value = TREE_VALUE (link);
22114
22115 gcc_assert (!ENUM_IS_OPAQUE (type));
22116 add_name_attribute (enum_die,
22117 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22118
22119 if (TREE_CODE (value) == CONST_DECL)
22120 value = DECL_INITIAL (value);
22121
22122 if (simple_type_size_in_bits (TREE_TYPE (value))
22123 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22124 {
22125 /* For constant forms created by add_AT_unsigned DWARF
22126 consumers (GDB, elfutils, etc.) always zero extend
22127 the value. Only when the actual value is negative
22128 do we need to use add_AT_int to generate a constant
22129 form that can represent negative values. */
22130 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22131 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22132 add_AT_unsigned (enum_die, DW_AT_const_value,
22133 (unsigned HOST_WIDE_INT) val);
22134 else
22135 add_AT_int (enum_die, DW_AT_const_value, val);
22136 }
22137 else
22138 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22139 that here. TODO: This should be re-worked to use correct
22140 signed/unsigned double tags for all cases. */
22141 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22142 }
22143
22144 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22145 if (TYPE_ARTIFICIAL (type)
22146 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22147 add_AT_flag (type_die, DW_AT_artificial, 1);
22148 }
22149 else
22150 add_AT_flag (type_die, DW_AT_declaration, 1);
22151
22152 add_pubtype (type, type_die);
22153
22154 return type_die;
22155 }
22156
22157 /* Generate a DIE to represent either a real live formal parameter decl or to
22158 represent just the type of some formal parameter position in some function
22159 type.
22160
22161 Note that this routine is a bit unusual because its argument may be a
22162 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22163 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22164 node. If it's the former then this function is being called to output a
22165 DIE to represent a formal parameter object (or some inlining thereof). If
22166 it's the latter, then this function is only being called to output a
22167 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22168 argument type of some subprogram type.
22169 If EMIT_NAME_P is true, name and source coordinate attributes
22170 are emitted. */
22171
22172 static dw_die_ref
22173 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22174 dw_die_ref context_die)
22175 {
22176 tree node_or_origin = node ? node : origin;
22177 tree ultimate_origin;
22178 dw_die_ref parm_die = NULL;
22179
22180 if (DECL_P (node_or_origin))
22181 {
22182 parm_die = lookup_decl_die (node);
22183
22184 /* If the contexts differ, we may not be talking about the same
22185 thing.
22186 ??? When in LTO the DIE parent is the "abstract" copy and the
22187 context_die is the specification "copy". But this whole block
22188 should eventually be no longer needed. */
22189 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22190 {
22191 if (!DECL_ABSTRACT_P (node))
22192 {
22193 /* This can happen when creating an inlined instance, in
22194 which case we need to create a new DIE that will get
22195 annotated with DW_AT_abstract_origin. */
22196 parm_die = NULL;
22197 }
22198 else
22199 gcc_unreachable ();
22200 }
22201
22202 if (parm_die && parm_die->die_parent == NULL)
22203 {
22204 /* Check that parm_die already has the right attributes that
22205 we would have added below. If any attributes are
22206 missing, fall through to add them. */
22207 if (! DECL_ABSTRACT_P (node_or_origin)
22208 && !get_AT (parm_die, DW_AT_location)
22209 && !get_AT (parm_die, DW_AT_const_value))
22210 /* We are missing location info, and are about to add it. */
22211 ;
22212 else
22213 {
22214 add_child_die (context_die, parm_die);
22215 return parm_die;
22216 }
22217 }
22218 }
22219
22220 /* If we have a previously generated DIE, use it, unless this is an
22221 concrete instance (origin != NULL), in which case we need a new
22222 DIE with a corresponding DW_AT_abstract_origin. */
22223 bool reusing_die;
22224 if (parm_die && origin == NULL)
22225 reusing_die = true;
22226 else
22227 {
22228 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22229 reusing_die = false;
22230 }
22231
22232 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22233 {
22234 case tcc_declaration:
22235 ultimate_origin = decl_ultimate_origin (node_or_origin);
22236 if (node || ultimate_origin)
22237 origin = ultimate_origin;
22238
22239 if (reusing_die)
22240 goto add_location;
22241
22242 if (origin != NULL)
22243 add_abstract_origin_attribute (parm_die, origin);
22244 else if (emit_name_p)
22245 add_name_and_src_coords_attributes (parm_die, node);
22246 if (origin == NULL
22247 || (! DECL_ABSTRACT_P (node_or_origin)
22248 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22249 decl_function_context
22250 (node_or_origin))))
22251 {
22252 tree type = TREE_TYPE (node_or_origin);
22253 if (decl_by_reference_p (node_or_origin))
22254 add_type_attribute (parm_die, TREE_TYPE (type),
22255 TYPE_UNQUALIFIED,
22256 false, context_die);
22257 else
22258 add_type_attribute (parm_die, type,
22259 decl_quals (node_or_origin),
22260 false, context_die);
22261 }
22262 if (origin == NULL && DECL_ARTIFICIAL (node))
22263 add_AT_flag (parm_die, DW_AT_artificial, 1);
22264 add_location:
22265 if (node && node != origin)
22266 equate_decl_number_to_die (node, parm_die);
22267 if (! DECL_ABSTRACT_P (node_or_origin))
22268 add_location_or_const_value_attribute (parm_die, node_or_origin,
22269 node == NULL);
22270
22271 break;
22272
22273 case tcc_type:
22274 /* We were called with some kind of a ..._TYPE node. */
22275 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22276 context_die);
22277 break;
22278
22279 default:
22280 gcc_unreachable ();
22281 }
22282
22283 return parm_die;
22284 }
22285
22286 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22287 children DW_TAG_formal_parameter DIEs representing the arguments of the
22288 parameter pack.
22289
22290 PARM_PACK must be a function parameter pack.
22291 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22292 must point to the subsequent arguments of the function PACK_ARG belongs to.
22293 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22294 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22295 following the last one for which a DIE was generated. */
22296
22297 static dw_die_ref
22298 gen_formal_parameter_pack_die (tree parm_pack,
22299 tree pack_arg,
22300 dw_die_ref subr_die,
22301 tree *next_arg)
22302 {
22303 tree arg;
22304 dw_die_ref parm_pack_die;
22305
22306 gcc_assert (parm_pack
22307 && lang_hooks.function_parameter_pack_p (parm_pack)
22308 && subr_die);
22309
22310 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22311 add_src_coords_attributes (parm_pack_die, parm_pack);
22312
22313 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22314 {
22315 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22316 parm_pack))
22317 break;
22318 gen_formal_parameter_die (arg, NULL,
22319 false /* Don't emit name attribute. */,
22320 parm_pack_die);
22321 }
22322 if (next_arg)
22323 *next_arg = arg;
22324 return parm_pack_die;
22325 }
22326
22327 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22328 at the end of an (ANSI prototyped) formal parameters list. */
22329
22330 static void
22331 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22332 {
22333 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22334 }
22335
22336 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22337 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22338 parameters as specified in some function type specification (except for
22339 those which appear as part of a function *definition*). */
22340
22341 static void
22342 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22343 {
22344 tree link;
22345 tree formal_type = NULL;
22346 tree first_parm_type;
22347 tree arg;
22348
22349 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22350 {
22351 arg = DECL_ARGUMENTS (function_or_method_type);
22352 function_or_method_type = TREE_TYPE (function_or_method_type);
22353 }
22354 else
22355 arg = NULL_TREE;
22356
22357 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22358
22359 /* Make our first pass over the list of formal parameter types and output a
22360 DW_TAG_formal_parameter DIE for each one. */
22361 for (link = first_parm_type; link; )
22362 {
22363 dw_die_ref parm_die;
22364
22365 formal_type = TREE_VALUE (link);
22366 if (formal_type == void_type_node)
22367 break;
22368
22369 /* Output a (nameless) DIE to represent the formal parameter itself. */
22370 parm_die = gen_formal_parameter_die (formal_type, NULL,
22371 true /* Emit name attribute. */,
22372 context_die);
22373 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22374 && link == first_parm_type)
22375 {
22376 add_AT_flag (parm_die, DW_AT_artificial, 1);
22377 if (dwarf_version >= 3 || !dwarf_strict)
22378 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22379 }
22380 else if (arg && DECL_ARTIFICIAL (arg))
22381 add_AT_flag (parm_die, DW_AT_artificial, 1);
22382
22383 link = TREE_CHAIN (link);
22384 if (arg)
22385 arg = DECL_CHAIN (arg);
22386 }
22387
22388 /* If this function type has an ellipsis, add a
22389 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22390 if (formal_type != void_type_node)
22391 gen_unspecified_parameters_die (function_or_method_type, context_die);
22392
22393 /* Make our second (and final) pass over the list of formal parameter types
22394 and output DIEs to represent those types (as necessary). */
22395 for (link = TYPE_ARG_TYPES (function_or_method_type);
22396 link && TREE_VALUE (link);
22397 link = TREE_CHAIN (link))
22398 gen_type_die (TREE_VALUE (link), context_die);
22399 }
22400
22401 /* We want to generate the DIE for TYPE so that we can generate the
22402 die for MEMBER, which has been defined; we will need to refer back
22403 to the member declaration nested within TYPE. If we're trying to
22404 generate minimal debug info for TYPE, processing TYPE won't do the
22405 trick; we need to attach the member declaration by hand. */
22406
22407 static void
22408 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22409 {
22410 gen_type_die (type, context_die);
22411
22412 /* If we're trying to avoid duplicate debug info, we may not have
22413 emitted the member decl for this function. Emit it now. */
22414 if (TYPE_STUB_DECL (type)
22415 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22416 && ! lookup_decl_die (member))
22417 {
22418 dw_die_ref type_die;
22419 gcc_assert (!decl_ultimate_origin (member));
22420
22421 type_die = lookup_type_die_strip_naming_typedef (type);
22422 if (TREE_CODE (member) == FUNCTION_DECL)
22423 gen_subprogram_die (member, type_die);
22424 else if (TREE_CODE (member) == FIELD_DECL)
22425 {
22426 /* Ignore the nameless fields that are used to skip bits but handle
22427 C++ anonymous unions and structs. */
22428 if (DECL_NAME (member) != NULL_TREE
22429 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22430 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22431 {
22432 struct vlr_context vlr_ctx = {
22433 DECL_CONTEXT (member), /* struct_type */
22434 NULL_TREE /* variant_part_offset */
22435 };
22436 gen_type_die (member_declared_type (member), type_die);
22437 gen_field_die (member, &vlr_ctx, type_die);
22438 }
22439 }
22440 else
22441 gen_variable_die (member, NULL_TREE, type_die);
22442 }
22443 }
22444 \f
22445 /* Forward declare these functions, because they are mutually recursive
22446 with their set_block_* pairing functions. */
22447 static void set_decl_origin_self (tree);
22448
22449 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22450 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22451 that it points to the node itself, thus indicating that the node is its
22452 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22453 the given node is NULL, recursively descend the decl/block tree which
22454 it is the root of, and for each other ..._DECL or BLOCK node contained
22455 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22456 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22457 values to point to themselves. */
22458
22459 static void
22460 set_block_origin_self (tree stmt)
22461 {
22462 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22463 {
22464 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22465
22466 {
22467 tree local_decl;
22468
22469 for (local_decl = BLOCK_VARS (stmt);
22470 local_decl != NULL_TREE;
22471 local_decl = DECL_CHAIN (local_decl))
22472 /* Do not recurse on nested functions since the inlining status
22473 of parent and child can be different as per the DWARF spec. */
22474 if (TREE_CODE (local_decl) != FUNCTION_DECL
22475 && !DECL_EXTERNAL (local_decl))
22476 set_decl_origin_self (local_decl);
22477 }
22478
22479 {
22480 tree subblock;
22481
22482 for (subblock = BLOCK_SUBBLOCKS (stmt);
22483 subblock != NULL_TREE;
22484 subblock = BLOCK_CHAIN (subblock))
22485 set_block_origin_self (subblock); /* Recurse. */
22486 }
22487 }
22488 }
22489
22490 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22491 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22492 node to so that it points to the node itself, thus indicating that the
22493 node represents its own (abstract) origin. Additionally, if the
22494 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22495 the decl/block tree of which the given node is the root of, and for
22496 each other ..._DECL or BLOCK node contained therein whose
22497 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22498 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22499 point to themselves. */
22500
22501 static void
22502 set_decl_origin_self (tree decl)
22503 {
22504 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22505 {
22506 DECL_ABSTRACT_ORIGIN (decl) = decl;
22507 if (TREE_CODE (decl) == FUNCTION_DECL)
22508 {
22509 tree arg;
22510
22511 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22512 DECL_ABSTRACT_ORIGIN (arg) = arg;
22513 if (DECL_INITIAL (decl) != NULL_TREE
22514 && DECL_INITIAL (decl) != error_mark_node)
22515 set_block_origin_self (DECL_INITIAL (decl));
22516 }
22517 }
22518 }
22519 \f
22520 /* Mark the early DIE for DECL as the abstract instance. */
22521
22522 static void
22523 dwarf2out_abstract_function (tree decl)
22524 {
22525 dw_die_ref old_die;
22526
22527 /* Make sure we have the actual abstract inline, not a clone. */
22528 decl = DECL_ORIGIN (decl);
22529
22530 if (DECL_IGNORED_P (decl))
22531 return;
22532
22533 old_die = lookup_decl_die (decl);
22534 /* With early debug we always have an old DIE unless we are in LTO
22535 and the user did not compile but only link with debug. */
22536 if (in_lto_p && ! old_die)
22537 return;
22538 gcc_assert (old_die != NULL);
22539 if (get_AT (old_die, DW_AT_inline)
22540 || get_AT (old_die, DW_AT_abstract_origin))
22541 /* We've already generated the abstract instance. */
22542 return;
22543
22544 /* Go ahead and put DW_AT_inline on the DIE. */
22545 if (DECL_DECLARED_INLINE_P (decl))
22546 {
22547 if (cgraph_function_possibly_inlined_p (decl))
22548 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22549 else
22550 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22551 }
22552 else
22553 {
22554 if (cgraph_function_possibly_inlined_p (decl))
22555 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22556 else
22557 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22558 }
22559
22560 if (DECL_DECLARED_INLINE_P (decl)
22561 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22562 add_AT_flag (old_die, DW_AT_artificial, 1);
22563
22564 set_decl_origin_self (decl);
22565 }
22566
22567 /* Helper function of premark_used_types() which gets called through
22568 htab_traverse.
22569
22570 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22571 marked as unused by prune_unused_types. */
22572
22573 bool
22574 premark_used_types_helper (tree const &type, void *)
22575 {
22576 dw_die_ref die;
22577
22578 die = lookup_type_die (type);
22579 if (die != NULL)
22580 die->die_perennial_p = 1;
22581 return true;
22582 }
22583
22584 /* Helper function of premark_types_used_by_global_vars which gets called
22585 through htab_traverse.
22586
22587 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22588 marked as unused by prune_unused_types. The DIE of the type is marked
22589 only if the global variable using the type will actually be emitted. */
22590
22591 int
22592 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22593 void *)
22594 {
22595 struct types_used_by_vars_entry *entry;
22596 dw_die_ref die;
22597
22598 entry = (struct types_used_by_vars_entry *) *slot;
22599 gcc_assert (entry->type != NULL
22600 && entry->var_decl != NULL);
22601 die = lookup_type_die (entry->type);
22602 if (die)
22603 {
22604 /* Ask cgraph if the global variable really is to be emitted.
22605 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22606 varpool_node *node = varpool_node::get (entry->var_decl);
22607 if (node && node->definition)
22608 {
22609 die->die_perennial_p = 1;
22610 /* Keep the parent DIEs as well. */
22611 while ((die = die->die_parent) && die->die_perennial_p == 0)
22612 die->die_perennial_p = 1;
22613 }
22614 }
22615 return 1;
22616 }
22617
22618 /* Mark all members of used_types_hash as perennial. */
22619
22620 static void
22621 premark_used_types (struct function *fun)
22622 {
22623 if (fun && fun->used_types_hash)
22624 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22625 }
22626
22627 /* Mark all members of types_used_by_vars_entry as perennial. */
22628
22629 static void
22630 premark_types_used_by_global_vars (void)
22631 {
22632 if (types_used_by_vars_hash)
22633 types_used_by_vars_hash
22634 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22635 }
22636
22637 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22638 for CA_LOC call arg loc node. */
22639
22640 static dw_die_ref
22641 gen_call_site_die (tree decl, dw_die_ref subr_die,
22642 struct call_arg_loc_node *ca_loc)
22643 {
22644 dw_die_ref stmt_die = NULL, die;
22645 tree block = ca_loc->block;
22646
22647 while (block
22648 && block != DECL_INITIAL (decl)
22649 && TREE_CODE (block) == BLOCK)
22650 {
22651 stmt_die = BLOCK_DIE (block);
22652 if (stmt_die)
22653 break;
22654 block = BLOCK_SUPERCONTEXT (block);
22655 }
22656 if (stmt_die == NULL)
22657 stmt_die = subr_die;
22658 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22659 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22660 if (ca_loc->tail_call_p)
22661 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22662 if (ca_loc->symbol_ref)
22663 {
22664 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22665 if (tdie)
22666 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22667 else
22668 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22669 false);
22670 }
22671 return die;
22672 }
22673
22674 /* Generate a DIE to represent a declared function (either file-scope or
22675 block-local). */
22676
22677 static void
22678 gen_subprogram_die (tree decl, dw_die_ref context_die)
22679 {
22680 tree origin = decl_ultimate_origin (decl);
22681 dw_die_ref subr_die;
22682 dw_die_ref old_die = lookup_decl_die (decl);
22683
22684 /* This function gets called multiple times for different stages of
22685 the debug process. For example, for func() in this code:
22686
22687 namespace S
22688 {
22689 void func() { ... }
22690 }
22691
22692 ...we get called 4 times. Twice in early debug and twice in
22693 late debug:
22694
22695 Early debug
22696 -----------
22697
22698 1. Once while generating func() within the namespace. This is
22699 the declaration. The declaration bit below is set, as the
22700 context is the namespace.
22701
22702 A new DIE will be generated with DW_AT_declaration set.
22703
22704 2. Once for func() itself. This is the specification. The
22705 declaration bit below is clear as the context is the CU.
22706
22707 We will use the cached DIE from (1) to create a new DIE with
22708 DW_AT_specification pointing to the declaration in (1).
22709
22710 Late debug via rest_of_handle_final()
22711 -------------------------------------
22712
22713 3. Once generating func() within the namespace. This is also the
22714 declaration, as in (1), but this time we will early exit below
22715 as we have a cached DIE and a declaration needs no additional
22716 annotations (no locations), as the source declaration line
22717 info is enough.
22718
22719 4. Once for func() itself. As in (2), this is the specification,
22720 but this time we will re-use the cached DIE, and just annotate
22721 it with the location information that should now be available.
22722
22723 For something without namespaces, but with abstract instances, we
22724 are also called a multiple times:
22725
22726 class Base
22727 {
22728 public:
22729 Base (); // constructor declaration (1)
22730 };
22731
22732 Base::Base () { } // constructor specification (2)
22733
22734 Early debug
22735 -----------
22736
22737 1. Once for the Base() constructor by virtue of it being a
22738 member of the Base class. This is done via
22739 rest_of_type_compilation.
22740
22741 This is a declaration, so a new DIE will be created with
22742 DW_AT_declaration.
22743
22744 2. Once for the Base() constructor definition, but this time
22745 while generating the abstract instance of the base
22746 constructor (__base_ctor) which is being generated via early
22747 debug of reachable functions.
22748
22749 Even though we have a cached version of the declaration (1),
22750 we will create a DW_AT_specification of the declaration DIE
22751 in (1).
22752
22753 3. Once for the __base_ctor itself, but this time, we generate
22754 an DW_AT_abstract_origin version of the DW_AT_specification in
22755 (2).
22756
22757 Late debug via rest_of_handle_final
22758 -----------------------------------
22759
22760 4. One final time for the __base_ctor (which will have a cached
22761 DIE with DW_AT_abstract_origin created in (3). This time,
22762 we will just annotate the location information now
22763 available.
22764 */
22765 int declaration = (current_function_decl != decl
22766 || class_or_namespace_scope_p (context_die));
22767
22768 /* A declaration that has been previously dumped needs no
22769 additional information. */
22770 if (old_die && declaration)
22771 return;
22772
22773 /* Now that the C++ front end lazily declares artificial member fns, we
22774 might need to retrofit the declaration into its class. */
22775 if (!declaration && !origin && !old_die
22776 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22777 && !class_or_namespace_scope_p (context_die)
22778 && debug_info_level > DINFO_LEVEL_TERSE)
22779 old_die = force_decl_die (decl);
22780
22781 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22782 if (origin != NULL)
22783 {
22784 gcc_assert (!declaration || local_scope_p (context_die));
22785
22786 /* Fixup die_parent for the abstract instance of a nested
22787 inline function. */
22788 if (old_die && old_die->die_parent == NULL)
22789 add_child_die (context_die, old_die);
22790
22791 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22792 {
22793 /* If we have a DW_AT_abstract_origin we have a working
22794 cached version. */
22795 subr_die = old_die;
22796 }
22797 else
22798 {
22799 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22800 add_abstract_origin_attribute (subr_die, origin);
22801 /* This is where the actual code for a cloned function is.
22802 Let's emit linkage name attribute for it. This helps
22803 debuggers to e.g, set breakpoints into
22804 constructors/destructors when the user asks "break
22805 K::K". */
22806 add_linkage_name (subr_die, decl);
22807 }
22808 }
22809 /* A cached copy, possibly from early dwarf generation. Reuse as
22810 much as possible. */
22811 else if (old_die)
22812 {
22813 if (!get_AT_flag (old_die, DW_AT_declaration)
22814 /* We can have a normal definition following an inline one in the
22815 case of redefinition of GNU C extern inlines.
22816 It seems reasonable to use AT_specification in this case. */
22817 && !get_AT (old_die, DW_AT_inline))
22818 {
22819 /* Detect and ignore this case, where we are trying to output
22820 something we have already output. */
22821 if (get_AT (old_die, DW_AT_low_pc)
22822 || get_AT (old_die, DW_AT_ranges))
22823 return;
22824
22825 /* If we have no location information, this must be a
22826 partially generated DIE from early dwarf generation.
22827 Fall through and generate it. */
22828 }
22829
22830 /* If the definition comes from the same place as the declaration,
22831 maybe use the old DIE. We always want the DIE for this function
22832 that has the *_pc attributes to be under comp_unit_die so the
22833 debugger can find it. We also need to do this for abstract
22834 instances of inlines, since the spec requires the out-of-line copy
22835 to have the same parent. For local class methods, this doesn't
22836 apply; we just use the old DIE. */
22837 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22838 struct dwarf_file_data * file_index = lookup_filename (s.file);
22839 if (((is_unit_die (old_die->die_parent)
22840 /* This condition fixes the inconsistency/ICE with the
22841 following Fortran test (or some derivative thereof) while
22842 building libgfortran:
22843
22844 module some_m
22845 contains
22846 logical function funky (FLAG)
22847 funky = .true.
22848 end function
22849 end module
22850 */
22851 || (old_die->die_parent
22852 && old_die->die_parent->die_tag == DW_TAG_module)
22853 || local_scope_p (old_die->die_parent)
22854 || context_die == NULL)
22855 && (DECL_ARTIFICIAL (decl)
22856 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22857 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22858 == (unsigned) s.line)
22859 && (!debug_column_info
22860 || s.column == 0
22861 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22862 == (unsigned) s.column)))))
22863 /* With LTO if there's an abstract instance for
22864 the old DIE, this is a concrete instance and
22865 thus re-use the DIE. */
22866 || get_AT (old_die, DW_AT_abstract_origin))
22867 {
22868 subr_die = old_die;
22869
22870 /* Clear out the declaration attribute, but leave the
22871 parameters so they can be augmented with location
22872 information later. Unless this was a declaration, in
22873 which case, wipe out the nameless parameters and recreate
22874 them further down. */
22875 if (remove_AT (subr_die, DW_AT_declaration))
22876 {
22877
22878 remove_AT (subr_die, DW_AT_object_pointer);
22879 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22880 }
22881 }
22882 /* Make a specification pointing to the previously built
22883 declaration. */
22884 else
22885 {
22886 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22887 add_AT_specification (subr_die, old_die);
22888 add_pubname (decl, subr_die);
22889 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22890 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22891 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22892 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22893 if (debug_column_info
22894 && s.column
22895 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22896 != (unsigned) s.column))
22897 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22898
22899 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22900 emit the real type on the definition die. */
22901 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22902 {
22903 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22904 if (die == auto_die || die == decltype_auto_die)
22905 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22906 TYPE_UNQUALIFIED, false, context_die);
22907 }
22908
22909 /* When we process the method declaration, we haven't seen
22910 the out-of-class defaulted definition yet, so we have to
22911 recheck now. */
22912 if ((dwarf_version >= 5 || ! dwarf_strict)
22913 && !get_AT (subr_die, DW_AT_defaulted))
22914 {
22915 int defaulted
22916 = lang_hooks.decls.decl_dwarf_attribute (decl,
22917 DW_AT_defaulted);
22918 if (defaulted != -1)
22919 {
22920 /* Other values must have been handled before. */
22921 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22922 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22923 }
22924 }
22925 }
22926 }
22927 /* Create a fresh DIE for anything else. */
22928 else
22929 {
22930 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22931
22932 if (TREE_PUBLIC (decl))
22933 add_AT_flag (subr_die, DW_AT_external, 1);
22934
22935 add_name_and_src_coords_attributes (subr_die, decl);
22936 add_pubname (decl, subr_die);
22937 if (debug_info_level > DINFO_LEVEL_TERSE)
22938 {
22939 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22940 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22941 TYPE_UNQUALIFIED, false, context_die);
22942 }
22943
22944 add_pure_or_virtual_attribute (subr_die, decl);
22945 if (DECL_ARTIFICIAL (decl))
22946 add_AT_flag (subr_die, DW_AT_artificial, 1);
22947
22948 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22949 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22950
22951 add_alignment_attribute (subr_die, decl);
22952
22953 add_accessibility_attribute (subr_die, decl);
22954 }
22955
22956 /* Unless we have an existing non-declaration DIE, equate the new
22957 DIE. */
22958 if (!old_die || is_declaration_die (old_die))
22959 equate_decl_number_to_die (decl, subr_die);
22960
22961 if (declaration)
22962 {
22963 if (!old_die || !get_AT (old_die, DW_AT_inline))
22964 {
22965 add_AT_flag (subr_die, DW_AT_declaration, 1);
22966
22967 /* If this is an explicit function declaration then generate
22968 a DW_AT_explicit attribute. */
22969 if ((dwarf_version >= 3 || !dwarf_strict)
22970 && lang_hooks.decls.decl_dwarf_attribute (decl,
22971 DW_AT_explicit) == 1)
22972 add_AT_flag (subr_die, DW_AT_explicit, 1);
22973
22974 /* If this is a C++11 deleted special function member then generate
22975 a DW_AT_deleted attribute. */
22976 if ((dwarf_version >= 5 || !dwarf_strict)
22977 && lang_hooks.decls.decl_dwarf_attribute (decl,
22978 DW_AT_deleted) == 1)
22979 add_AT_flag (subr_die, DW_AT_deleted, 1);
22980
22981 /* If this is a C++11 defaulted special function member then
22982 generate a DW_AT_defaulted attribute. */
22983 if (dwarf_version >= 5 || !dwarf_strict)
22984 {
22985 int defaulted
22986 = lang_hooks.decls.decl_dwarf_attribute (decl,
22987 DW_AT_defaulted);
22988 if (defaulted != -1)
22989 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22990 }
22991
22992 /* If this is a C++11 non-static member function with & ref-qualifier
22993 then generate a DW_AT_reference attribute. */
22994 if ((dwarf_version >= 5 || !dwarf_strict)
22995 && lang_hooks.decls.decl_dwarf_attribute (decl,
22996 DW_AT_reference) == 1)
22997 add_AT_flag (subr_die, DW_AT_reference, 1);
22998
22999 /* If this is a C++11 non-static member function with &&
23000 ref-qualifier then generate a DW_AT_reference attribute. */
23001 if ((dwarf_version >= 5 || !dwarf_strict)
23002 && lang_hooks.decls.decl_dwarf_attribute (decl,
23003 DW_AT_rvalue_reference)
23004 == 1)
23005 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
23006 }
23007 }
23008 /* For non DECL_EXTERNALs, if range information is available, fill
23009 the DIE with it. */
23010 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
23011 {
23012 HOST_WIDE_INT cfa_fb_offset;
23013
23014 struct function *fun = DECL_STRUCT_FUNCTION (decl);
23015
23016 if (!crtl->has_bb_partition)
23017 {
23018 dw_fde_ref fde = fun->fde;
23019 if (fde->dw_fde_begin)
23020 {
23021 /* We have already generated the labels. */
23022 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23023 fde->dw_fde_end, false);
23024 }
23025 else
23026 {
23027 /* Create start/end labels and add the range. */
23028 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
23029 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
23030 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
23031 current_function_funcdef_no);
23032 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
23033 current_function_funcdef_no);
23034 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
23035 false);
23036 }
23037
23038 #if VMS_DEBUGGING_INFO
23039 /* HP OpenVMS Industry Standard 64: DWARF Extensions
23040 Section 2.3 Prologue and Epilogue Attributes:
23041 When a breakpoint is set on entry to a function, it is generally
23042 desirable for execution to be suspended, not on the very first
23043 instruction of the function, but rather at a point after the
23044 function's frame has been set up, after any language defined local
23045 declaration processing has been completed, and before execution of
23046 the first statement of the function begins. Debuggers generally
23047 cannot properly determine where this point is. Similarly for a
23048 breakpoint set on exit from a function. The prologue and epilogue
23049 attributes allow a compiler to communicate the location(s) to use. */
23050
23051 {
23052 if (fde->dw_fde_vms_end_prologue)
23053 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
23054 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
23055
23056 if (fde->dw_fde_vms_begin_epilogue)
23057 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
23058 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
23059 }
23060 #endif
23061
23062 }
23063 else
23064 {
23065 /* Generate pubnames entries for the split function code ranges. */
23066 dw_fde_ref fde = fun->fde;
23067
23068 if (fde->dw_fde_second_begin)
23069 {
23070 if (dwarf_version >= 3 || !dwarf_strict)
23071 {
23072 /* We should use ranges for non-contiguous code section
23073 addresses. Use the actual code range for the initial
23074 section, since the HOT/COLD labels might precede an
23075 alignment offset. */
23076 bool range_list_added = false;
23077 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23078 fde->dw_fde_end, &range_list_added,
23079 false);
23080 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23081 fde->dw_fde_second_end,
23082 &range_list_added, false);
23083 if (range_list_added)
23084 add_ranges (NULL);
23085 }
23086 else
23087 {
23088 /* There is no real support in DW2 for this .. so we make
23089 a work-around. First, emit the pub name for the segment
23090 containing the function label. Then make and emit a
23091 simplified subprogram DIE for the second segment with the
23092 name pre-fixed by __hot/cold_sect_of_. We use the same
23093 linkage name for the second die so that gdb will find both
23094 sections when given "b foo". */
23095 const char *name = NULL;
23096 tree decl_name = DECL_NAME (decl);
23097 dw_die_ref seg_die;
23098
23099 /* Do the 'primary' section. */
23100 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23101 fde->dw_fde_end, false);
23102
23103 /* Build a minimal DIE for the secondary section. */
23104 seg_die = new_die (DW_TAG_subprogram,
23105 subr_die->die_parent, decl);
23106
23107 if (TREE_PUBLIC (decl))
23108 add_AT_flag (seg_die, DW_AT_external, 1);
23109
23110 if (decl_name != NULL
23111 && IDENTIFIER_POINTER (decl_name) != NULL)
23112 {
23113 name = dwarf2_name (decl, 1);
23114 if (! DECL_ARTIFICIAL (decl))
23115 add_src_coords_attributes (seg_die, decl);
23116
23117 add_linkage_name (seg_die, decl);
23118 }
23119 gcc_assert (name != NULL);
23120 add_pure_or_virtual_attribute (seg_die, decl);
23121 if (DECL_ARTIFICIAL (decl))
23122 add_AT_flag (seg_die, DW_AT_artificial, 1);
23123
23124 name = concat ("__second_sect_of_", name, NULL);
23125 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23126 fde->dw_fde_second_end, false);
23127 add_name_attribute (seg_die, name);
23128 if (want_pubnames ())
23129 add_pubname_string (name, seg_die);
23130 }
23131 }
23132 else
23133 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23134 false);
23135 }
23136
23137 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23138
23139 /* We define the "frame base" as the function's CFA. This is more
23140 convenient for several reasons: (1) It's stable across the prologue
23141 and epilogue, which makes it better than just a frame pointer,
23142 (2) With dwarf3, there exists a one-byte encoding that allows us
23143 to reference the .debug_frame data by proxy, but failing that,
23144 (3) We can at least reuse the code inspection and interpretation
23145 code that determines the CFA position at various points in the
23146 function. */
23147 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23148 {
23149 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23150 add_AT_loc (subr_die, DW_AT_frame_base, op);
23151 }
23152 else
23153 {
23154 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23155 if (list->dw_loc_next)
23156 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23157 else
23158 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23159 }
23160
23161 /* Compute a displacement from the "steady-state frame pointer" to
23162 the CFA. The former is what all stack slots and argument slots
23163 will reference in the rtl; the latter is what we've told the
23164 debugger about. We'll need to adjust all frame_base references
23165 by this displacement. */
23166 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23167
23168 if (fun->static_chain_decl)
23169 {
23170 /* DWARF requires here a location expression that computes the
23171 address of the enclosing subprogram's frame base. The machinery
23172 in tree-nested.c is supposed to store this specific address in the
23173 last field of the FRAME record. */
23174 const tree frame_type
23175 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23176 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23177
23178 tree fb_expr
23179 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23180 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23181 fb_expr, fb_decl, NULL_TREE);
23182
23183 add_AT_location_description (subr_die, DW_AT_static_link,
23184 loc_list_from_tree (fb_expr, 0, NULL));
23185 }
23186
23187 resolve_variable_values ();
23188 }
23189
23190 /* Generate child dies for template paramaters. */
23191 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23192 gen_generic_params_dies (decl);
23193
23194 /* Now output descriptions of the arguments for this function. This gets
23195 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23196 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23197 `...' at the end of the formal parameter list. In order to find out if
23198 there was a trailing ellipsis or not, we must instead look at the type
23199 associated with the FUNCTION_DECL. This will be a node of type
23200 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23201 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23202 an ellipsis at the end. */
23203
23204 /* In the case where we are describing a mere function declaration, all we
23205 need to do here (and all we *can* do here) is to describe the *types* of
23206 its formal parameters. */
23207 if (debug_info_level <= DINFO_LEVEL_TERSE)
23208 ;
23209 else if (declaration)
23210 gen_formal_types_die (decl, subr_die);
23211 else
23212 {
23213 /* Generate DIEs to represent all known formal parameters. */
23214 tree parm = DECL_ARGUMENTS (decl);
23215 tree generic_decl = early_dwarf
23216 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23217 tree generic_decl_parm = generic_decl
23218 ? DECL_ARGUMENTS (generic_decl)
23219 : NULL;
23220
23221 /* Now we want to walk the list of parameters of the function and
23222 emit their relevant DIEs.
23223
23224 We consider the case of DECL being an instance of a generic function
23225 as well as it being a normal function.
23226
23227 If DECL is an instance of a generic function we walk the
23228 parameters of the generic function declaration _and_ the parameters of
23229 DECL itself. This is useful because we want to emit specific DIEs for
23230 function parameter packs and those are declared as part of the
23231 generic function declaration. In that particular case,
23232 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23233 That DIE has children DIEs representing the set of arguments
23234 of the pack. Note that the set of pack arguments can be empty.
23235 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23236 children DIE.
23237
23238 Otherwise, we just consider the parameters of DECL. */
23239 while (generic_decl_parm || parm)
23240 {
23241 if (generic_decl_parm
23242 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23243 gen_formal_parameter_pack_die (generic_decl_parm,
23244 parm, subr_die,
23245 &parm);
23246 else if (parm)
23247 {
23248 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23249
23250 if (early_dwarf
23251 && parm == DECL_ARGUMENTS (decl)
23252 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23253 && parm_die
23254 && (dwarf_version >= 3 || !dwarf_strict))
23255 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23256
23257 parm = DECL_CHAIN (parm);
23258 }
23259 else if (parm)
23260 parm = DECL_CHAIN (parm);
23261
23262 if (generic_decl_parm)
23263 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23264 }
23265
23266 /* Decide whether we need an unspecified_parameters DIE at the end.
23267 There are 2 more cases to do this for: 1) the ansi ... declaration -
23268 this is detectable when the end of the arg list is not a
23269 void_type_node 2) an unprototyped function declaration (not a
23270 definition). This just means that we have no info about the
23271 parameters at all. */
23272 if (early_dwarf)
23273 {
23274 if (prototype_p (TREE_TYPE (decl)))
23275 {
23276 /* This is the prototyped case, check for.... */
23277 if (stdarg_p (TREE_TYPE (decl)))
23278 gen_unspecified_parameters_die (decl, subr_die);
23279 }
23280 else if (DECL_INITIAL (decl) == NULL_TREE)
23281 gen_unspecified_parameters_die (decl, subr_die);
23282 }
23283 }
23284
23285 if (subr_die != old_die)
23286 /* Add the calling convention attribute if requested. */
23287 add_calling_convention_attribute (subr_die, decl);
23288
23289 /* Output Dwarf info for all of the stuff within the body of the function
23290 (if it has one - it may be just a declaration).
23291
23292 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23293 a function. This BLOCK actually represents the outermost binding contour
23294 for the function, i.e. the contour in which the function's formal
23295 parameters and labels get declared. Curiously, it appears that the front
23296 end doesn't actually put the PARM_DECL nodes for the current function onto
23297 the BLOCK_VARS list for this outer scope, but are strung off of the
23298 DECL_ARGUMENTS list for the function instead.
23299
23300 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23301 the LABEL_DECL nodes for the function however, and we output DWARF info
23302 for those in decls_for_scope. Just within the `outer_scope' there will be
23303 a BLOCK node representing the function's outermost pair of curly braces,
23304 and any blocks used for the base and member initializers of a C++
23305 constructor function. */
23306 tree outer_scope = DECL_INITIAL (decl);
23307 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23308 {
23309 int call_site_note_count = 0;
23310 int tail_call_site_note_count = 0;
23311
23312 /* Emit a DW_TAG_variable DIE for a named return value. */
23313 if (DECL_NAME (DECL_RESULT (decl)))
23314 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23315
23316 /* The first time through decls_for_scope we will generate the
23317 DIEs for the locals. The second time, we fill in the
23318 location info. */
23319 decls_for_scope (outer_scope, subr_die);
23320
23321 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23322 {
23323 struct call_arg_loc_node *ca_loc;
23324 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23325 {
23326 dw_die_ref die = NULL;
23327 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23328 rtx arg, next_arg;
23329 tree arg_decl = NULL_TREE;
23330
23331 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23332 ? XEXP (ca_loc->call_arg_loc_note, 0)
23333 : NULL_RTX);
23334 arg; arg = next_arg)
23335 {
23336 dw_loc_descr_ref reg, val;
23337 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23338 dw_die_ref cdie, tdie = NULL;
23339
23340 next_arg = XEXP (arg, 1);
23341 if (REG_P (XEXP (XEXP (arg, 0), 0))
23342 && next_arg
23343 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23344 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23345 && REGNO (XEXP (XEXP (arg, 0), 0))
23346 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23347 next_arg = XEXP (next_arg, 1);
23348 if (mode == VOIDmode)
23349 {
23350 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23351 if (mode == VOIDmode)
23352 mode = GET_MODE (XEXP (arg, 0));
23353 }
23354 if (mode == VOIDmode || mode == BLKmode)
23355 continue;
23356 /* Get dynamic information about call target only if we
23357 have no static information: we cannot generate both
23358 DW_AT_call_origin and DW_AT_call_target
23359 attributes. */
23360 if (ca_loc->symbol_ref == NULL_RTX)
23361 {
23362 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23363 {
23364 tloc = XEXP (XEXP (arg, 0), 1);
23365 continue;
23366 }
23367 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23368 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23369 {
23370 tlocc = XEXP (XEXP (arg, 0), 1);
23371 continue;
23372 }
23373 }
23374 reg = NULL;
23375 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23376 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23377 VAR_INIT_STATUS_INITIALIZED);
23378 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23379 {
23380 rtx mem = XEXP (XEXP (arg, 0), 0);
23381 reg = mem_loc_descriptor (XEXP (mem, 0),
23382 get_address_mode (mem),
23383 GET_MODE (mem),
23384 VAR_INIT_STATUS_INITIALIZED);
23385 }
23386 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23387 == DEBUG_PARAMETER_REF)
23388 {
23389 tree tdecl
23390 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23391 tdie = lookup_decl_die (tdecl);
23392 if (tdie == NULL)
23393 continue;
23394 arg_decl = tdecl;
23395 }
23396 else
23397 continue;
23398 if (reg == NULL
23399 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23400 != DEBUG_PARAMETER_REF)
23401 continue;
23402 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23403 VOIDmode,
23404 VAR_INIT_STATUS_INITIALIZED);
23405 if (val == NULL)
23406 continue;
23407 if (die == NULL)
23408 die = gen_call_site_die (decl, subr_die, ca_loc);
23409 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23410 NULL_TREE);
23411 add_desc_attribute (cdie, arg_decl);
23412 if (reg != NULL)
23413 add_AT_loc (cdie, DW_AT_location, reg);
23414 else if (tdie != NULL)
23415 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23416 tdie);
23417 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23418 if (next_arg != XEXP (arg, 1))
23419 {
23420 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23421 if (mode == VOIDmode)
23422 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23423 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23424 0), 1),
23425 mode, VOIDmode,
23426 VAR_INIT_STATUS_INITIALIZED);
23427 if (val != NULL)
23428 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23429 val);
23430 }
23431 }
23432 if (die == NULL
23433 && (ca_loc->symbol_ref || tloc))
23434 die = gen_call_site_die (decl, subr_die, ca_loc);
23435 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23436 {
23437 dw_loc_descr_ref tval = NULL;
23438
23439 if (tloc != NULL_RTX)
23440 tval = mem_loc_descriptor (tloc,
23441 GET_MODE (tloc) == VOIDmode
23442 ? Pmode : GET_MODE (tloc),
23443 VOIDmode,
23444 VAR_INIT_STATUS_INITIALIZED);
23445 if (tval)
23446 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23447 else if (tlocc != NULL_RTX)
23448 {
23449 tval = mem_loc_descriptor (tlocc,
23450 GET_MODE (tlocc) == VOIDmode
23451 ? Pmode : GET_MODE (tlocc),
23452 VOIDmode,
23453 VAR_INIT_STATUS_INITIALIZED);
23454 if (tval)
23455 add_AT_loc (die,
23456 dwarf_AT (DW_AT_call_target_clobbered),
23457 tval);
23458 }
23459 }
23460 if (die != NULL)
23461 {
23462 call_site_note_count++;
23463 if (ca_loc->tail_call_p)
23464 tail_call_site_note_count++;
23465 }
23466 }
23467 }
23468 call_arg_locations = NULL;
23469 call_arg_loc_last = NULL;
23470 if (tail_call_site_count >= 0
23471 && tail_call_site_count == tail_call_site_note_count
23472 && (!dwarf_strict || dwarf_version >= 5))
23473 {
23474 if (call_site_count >= 0
23475 && call_site_count == call_site_note_count)
23476 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23477 else
23478 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23479 }
23480 call_site_count = -1;
23481 tail_call_site_count = -1;
23482 }
23483
23484 /* Mark used types after we have created DIEs for the functions scopes. */
23485 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23486 }
23487
23488 /* Returns a hash value for X (which really is a die_struct). */
23489
23490 hashval_t
23491 block_die_hasher::hash (die_struct *d)
23492 {
23493 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23494 }
23495
23496 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23497 as decl_id and die_parent of die_struct Y. */
23498
23499 bool
23500 block_die_hasher::equal (die_struct *x, die_struct *y)
23501 {
23502 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23503 }
23504
23505 /* Hold information about markers for inlined entry points. */
23506 struct GTY ((for_user)) inline_entry_data
23507 {
23508 /* The block that's the inlined_function_outer_scope for an inlined
23509 function. */
23510 tree block;
23511
23512 /* The label at the inlined entry point. */
23513 const char *label_pfx;
23514 unsigned int label_num;
23515
23516 /* The view number to be used as the inlined entry point. */
23517 var_loc_view view;
23518 };
23519
23520 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23521 {
23522 typedef tree compare_type;
23523 static inline hashval_t hash (const inline_entry_data *);
23524 static inline bool equal (const inline_entry_data *, const_tree);
23525 };
23526
23527 /* Hash table routines for inline_entry_data. */
23528
23529 inline hashval_t
23530 inline_entry_data_hasher::hash (const inline_entry_data *data)
23531 {
23532 return htab_hash_pointer (data->block);
23533 }
23534
23535 inline bool
23536 inline_entry_data_hasher::equal (const inline_entry_data *data,
23537 const_tree block)
23538 {
23539 return data->block == block;
23540 }
23541
23542 /* Inlined entry points pending DIE creation in this compilation unit. */
23543
23544 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23545
23546
23547 /* Return TRUE if DECL, which may have been previously generated as
23548 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23549 true if decl (or its origin) is either an extern declaration or a
23550 class/namespace scoped declaration.
23551
23552 The declare_in_namespace support causes us to get two DIEs for one
23553 variable, both of which are declarations. We want to avoid
23554 considering one to be a specification, so we must test for
23555 DECLARATION and DW_AT_declaration. */
23556 static inline bool
23557 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23558 {
23559 return (old_die && TREE_STATIC (decl) && !declaration
23560 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23561 }
23562
23563 /* Return true if DECL is a local static. */
23564
23565 static inline bool
23566 local_function_static (tree decl)
23567 {
23568 gcc_assert (VAR_P (decl));
23569 return TREE_STATIC (decl)
23570 && DECL_CONTEXT (decl)
23571 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23572 }
23573
23574 /* Generate a DIE to represent a declared data object.
23575 Either DECL or ORIGIN must be non-null. */
23576
23577 static void
23578 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23579 {
23580 HOST_WIDE_INT off = 0;
23581 tree com_decl;
23582 tree decl_or_origin = decl ? decl : origin;
23583 tree ultimate_origin;
23584 dw_die_ref var_die;
23585 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23586 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23587 || class_or_namespace_scope_p (context_die));
23588 bool specialization_p = false;
23589 bool no_linkage_name = false;
23590
23591 /* While C++ inline static data members have definitions inside of the
23592 class, force the first DIE to be a declaration, then let gen_member_die
23593 reparent it to the class context and call gen_variable_die again
23594 to create the outside of the class DIE for the definition. */
23595 if (!declaration
23596 && old_die == NULL
23597 && decl
23598 && DECL_CONTEXT (decl)
23599 && TYPE_P (DECL_CONTEXT (decl))
23600 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23601 {
23602 declaration = true;
23603 if (dwarf_version < 5)
23604 no_linkage_name = true;
23605 }
23606
23607 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23608 if (decl || ultimate_origin)
23609 origin = ultimate_origin;
23610 com_decl = fortran_common (decl_or_origin, &off);
23611
23612 /* Symbol in common gets emitted as a child of the common block, in the form
23613 of a data member. */
23614 if (com_decl)
23615 {
23616 dw_die_ref com_die;
23617 dw_loc_list_ref loc = NULL;
23618 die_node com_die_arg;
23619
23620 var_die = lookup_decl_die (decl_or_origin);
23621 if (var_die)
23622 {
23623 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23624 {
23625 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23626 if (loc)
23627 {
23628 if (off)
23629 {
23630 /* Optimize the common case. */
23631 if (single_element_loc_list_p (loc)
23632 && loc->expr->dw_loc_opc == DW_OP_addr
23633 && loc->expr->dw_loc_next == NULL
23634 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23635 == SYMBOL_REF)
23636 {
23637 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23638 loc->expr->dw_loc_oprnd1.v.val_addr
23639 = plus_constant (GET_MODE (x), x , off);
23640 }
23641 else
23642 loc_list_plus_const (loc, off);
23643 }
23644 add_AT_location_description (var_die, DW_AT_location, loc);
23645 remove_AT (var_die, DW_AT_declaration);
23646 }
23647 }
23648 return;
23649 }
23650
23651 if (common_block_die_table == NULL)
23652 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23653
23654 com_die_arg.decl_id = DECL_UID (com_decl);
23655 com_die_arg.die_parent = context_die;
23656 com_die = common_block_die_table->find (&com_die_arg);
23657 if (! early_dwarf)
23658 loc = loc_list_from_tree (com_decl, 2, NULL);
23659 if (com_die == NULL)
23660 {
23661 const char *cnam
23662 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23663 die_node **slot;
23664
23665 com_die = new_die (DW_TAG_common_block, context_die, decl);
23666 add_name_and_src_coords_attributes (com_die, com_decl);
23667 if (loc)
23668 {
23669 add_AT_location_description (com_die, DW_AT_location, loc);
23670 /* Avoid sharing the same loc descriptor between
23671 DW_TAG_common_block and DW_TAG_variable. */
23672 loc = loc_list_from_tree (com_decl, 2, NULL);
23673 }
23674 else if (DECL_EXTERNAL (decl_or_origin))
23675 add_AT_flag (com_die, DW_AT_declaration, 1);
23676 if (want_pubnames ())
23677 add_pubname_string (cnam, com_die); /* ??? needed? */
23678 com_die->decl_id = DECL_UID (com_decl);
23679 slot = common_block_die_table->find_slot (com_die, INSERT);
23680 *slot = com_die;
23681 }
23682 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23683 {
23684 add_AT_location_description (com_die, DW_AT_location, loc);
23685 loc = loc_list_from_tree (com_decl, 2, NULL);
23686 remove_AT (com_die, DW_AT_declaration);
23687 }
23688 var_die = new_die (DW_TAG_variable, com_die, decl);
23689 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23690 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23691 decl_quals (decl_or_origin), false,
23692 context_die);
23693 add_alignment_attribute (var_die, decl);
23694 add_AT_flag (var_die, DW_AT_external, 1);
23695 if (loc)
23696 {
23697 if (off)
23698 {
23699 /* Optimize the common case. */
23700 if (single_element_loc_list_p (loc)
23701 && loc->expr->dw_loc_opc == DW_OP_addr
23702 && loc->expr->dw_loc_next == NULL
23703 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23704 {
23705 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23706 loc->expr->dw_loc_oprnd1.v.val_addr
23707 = plus_constant (GET_MODE (x), x, off);
23708 }
23709 else
23710 loc_list_plus_const (loc, off);
23711 }
23712 add_AT_location_description (var_die, DW_AT_location, loc);
23713 }
23714 else if (DECL_EXTERNAL (decl_or_origin))
23715 add_AT_flag (var_die, DW_AT_declaration, 1);
23716 if (decl)
23717 equate_decl_number_to_die (decl, var_die);
23718 return;
23719 }
23720
23721 if (old_die)
23722 {
23723 if (declaration)
23724 {
23725 /* A declaration that has been previously dumped, needs no
23726 further annotations, since it doesn't need location on
23727 the second pass. */
23728 return;
23729 }
23730 else if (decl_will_get_specification_p (old_die, decl, declaration)
23731 && !get_AT (old_die, DW_AT_specification))
23732 {
23733 /* Fall-thru so we can make a new variable die along with a
23734 DW_AT_specification. */
23735 }
23736 else if (origin && old_die->die_parent != context_die)
23737 {
23738 /* If we will be creating an inlined instance, we need a
23739 new DIE that will get annotated with
23740 DW_AT_abstract_origin. */
23741 gcc_assert (!DECL_ABSTRACT_P (decl));
23742 }
23743 else
23744 {
23745 /* If a DIE was dumped early, it still needs location info.
23746 Skip to where we fill the location bits. */
23747 var_die = old_die;
23748
23749 /* ??? In LTRANS we cannot annotate early created variably
23750 modified type DIEs without copying them and adjusting all
23751 references to them. Thus we dumped them again. Also add a
23752 reference to them but beware of -g0 compile and -g link
23753 in which case the reference will be already present. */
23754 tree type = TREE_TYPE (decl_or_origin);
23755 if (in_lto_p
23756 && ! get_AT (var_die, DW_AT_type)
23757 && variably_modified_type_p
23758 (type, decl_function_context (decl_or_origin)))
23759 {
23760 if (decl_by_reference_p (decl_or_origin))
23761 add_type_attribute (var_die, TREE_TYPE (type),
23762 TYPE_UNQUALIFIED, false, context_die);
23763 else
23764 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23765 false, context_die);
23766 }
23767
23768 goto gen_variable_die_location;
23769 }
23770 }
23771
23772 /* For static data members, the declaration in the class is supposed
23773 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23774 also in DWARF2; the specification should still be DW_TAG_variable
23775 referencing the DW_TAG_member DIE. */
23776 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23777 var_die = new_die (DW_TAG_member, context_die, decl);
23778 else
23779 var_die = new_die (DW_TAG_variable, context_die, decl);
23780
23781 if (origin != NULL)
23782 add_abstract_origin_attribute (var_die, origin);
23783
23784 /* Loop unrolling can create multiple blocks that refer to the same
23785 static variable, so we must test for the DW_AT_declaration flag.
23786
23787 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23788 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23789 sharing them.
23790
23791 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23792 else if (decl_will_get_specification_p (old_die, decl, declaration))
23793 {
23794 /* This is a definition of a C++ class level static. */
23795 add_AT_specification (var_die, old_die);
23796 specialization_p = true;
23797 if (DECL_NAME (decl))
23798 {
23799 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23800 struct dwarf_file_data * file_index = lookup_filename (s.file);
23801
23802 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23803 add_AT_file (var_die, DW_AT_decl_file, file_index);
23804
23805 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23806 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23807
23808 if (debug_column_info
23809 && s.column
23810 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23811 != (unsigned) s.column))
23812 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23813
23814 if (old_die->die_tag == DW_TAG_member)
23815 add_linkage_name (var_die, decl);
23816 }
23817 }
23818 else
23819 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23820
23821 if ((origin == NULL && !specialization_p)
23822 || (origin != NULL
23823 && !DECL_ABSTRACT_P (decl_or_origin)
23824 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23825 decl_function_context
23826 (decl_or_origin))))
23827 {
23828 tree type = TREE_TYPE (decl_or_origin);
23829
23830 if (decl_by_reference_p (decl_or_origin))
23831 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23832 context_die);
23833 else
23834 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23835 context_die);
23836 }
23837
23838 if (origin == NULL && !specialization_p)
23839 {
23840 if (TREE_PUBLIC (decl))
23841 add_AT_flag (var_die, DW_AT_external, 1);
23842
23843 if (DECL_ARTIFICIAL (decl))
23844 add_AT_flag (var_die, DW_AT_artificial, 1);
23845
23846 add_alignment_attribute (var_die, decl);
23847
23848 add_accessibility_attribute (var_die, decl);
23849 }
23850
23851 if (declaration)
23852 add_AT_flag (var_die, DW_AT_declaration, 1);
23853
23854 if (decl && (DECL_ABSTRACT_P (decl)
23855 || !old_die || is_declaration_die (old_die)))
23856 equate_decl_number_to_die (decl, var_die);
23857
23858 gen_variable_die_location:
23859 if (! declaration
23860 && (! DECL_ABSTRACT_P (decl_or_origin)
23861 /* Local static vars are shared between all clones/inlines,
23862 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23863 already set. */
23864 || (VAR_P (decl_or_origin)
23865 && TREE_STATIC (decl_or_origin)
23866 && DECL_RTL_SET_P (decl_or_origin))))
23867 {
23868 if (early_dwarf)
23869 add_pubname (decl_or_origin, var_die);
23870 else
23871 add_location_or_const_value_attribute (var_die, decl_or_origin,
23872 decl == NULL);
23873 }
23874 else
23875 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23876
23877 if ((dwarf_version >= 4 || !dwarf_strict)
23878 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23879 DW_AT_const_expr) == 1
23880 && !get_AT (var_die, DW_AT_const_expr)
23881 && !specialization_p)
23882 add_AT_flag (var_die, DW_AT_const_expr, 1);
23883
23884 if (!dwarf_strict)
23885 {
23886 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23887 DW_AT_inline);
23888 if (inl != -1
23889 && !get_AT (var_die, DW_AT_inline)
23890 && !specialization_p)
23891 add_AT_unsigned (var_die, DW_AT_inline, inl);
23892 }
23893 }
23894
23895 /* Generate a DIE to represent a named constant. */
23896
23897 static void
23898 gen_const_die (tree decl, dw_die_ref context_die)
23899 {
23900 dw_die_ref const_die;
23901 tree type = TREE_TYPE (decl);
23902
23903 const_die = lookup_decl_die (decl);
23904 if (const_die)
23905 return;
23906
23907 const_die = new_die (DW_TAG_constant, context_die, decl);
23908 equate_decl_number_to_die (decl, const_die);
23909 add_name_and_src_coords_attributes (const_die, decl);
23910 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23911 if (TREE_PUBLIC (decl))
23912 add_AT_flag (const_die, DW_AT_external, 1);
23913 if (DECL_ARTIFICIAL (decl))
23914 add_AT_flag (const_die, DW_AT_artificial, 1);
23915 tree_add_const_value_attribute_for_decl (const_die, decl);
23916 }
23917
23918 /* Generate a DIE to represent a label identifier. */
23919
23920 static void
23921 gen_label_die (tree decl, dw_die_ref context_die)
23922 {
23923 tree origin = decl_ultimate_origin (decl);
23924 dw_die_ref lbl_die = lookup_decl_die (decl);
23925 rtx insn;
23926 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23927
23928 if (!lbl_die)
23929 {
23930 lbl_die = new_die (DW_TAG_label, context_die, decl);
23931 equate_decl_number_to_die (decl, lbl_die);
23932
23933 if (origin != NULL)
23934 add_abstract_origin_attribute (lbl_die, origin);
23935 else
23936 add_name_and_src_coords_attributes (lbl_die, decl);
23937 }
23938
23939 if (DECL_ABSTRACT_P (decl))
23940 equate_decl_number_to_die (decl, lbl_die);
23941 else if (! early_dwarf)
23942 {
23943 insn = DECL_RTL_IF_SET (decl);
23944
23945 /* Deleted labels are programmer specified labels which have been
23946 eliminated because of various optimizations. We still emit them
23947 here so that it is possible to put breakpoints on them. */
23948 if (insn
23949 && (LABEL_P (insn)
23950 || ((NOTE_P (insn)
23951 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23952 {
23953 /* When optimization is enabled (via -O) some parts of the compiler
23954 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23955 represent source-level labels which were explicitly declared by
23956 the user. This really shouldn't be happening though, so catch
23957 it if it ever does happen. */
23958 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23959
23960 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23961 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23962 }
23963 else if (insn
23964 && NOTE_P (insn)
23965 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23966 && CODE_LABEL_NUMBER (insn) != -1)
23967 {
23968 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23969 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23970 }
23971 }
23972 }
23973
23974 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23975 attributes to the DIE for a block STMT, to describe where the inlined
23976 function was called from. This is similar to add_src_coords_attributes. */
23977
23978 static inline void
23979 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23980 {
23981 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23982
23983 if (dwarf_version >= 3 || !dwarf_strict)
23984 {
23985 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23986 add_AT_unsigned (die, DW_AT_call_line, s.line);
23987 if (debug_column_info && s.column)
23988 add_AT_unsigned (die, DW_AT_call_column, s.column);
23989 }
23990 }
23991
23992
23993 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23994 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23995
23996 static inline void
23997 add_high_low_attributes (tree stmt, dw_die_ref die)
23998 {
23999 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24000
24001 if (inline_entry_data **iedp
24002 = !inline_entry_data_table ? NULL
24003 : inline_entry_data_table->find_slot_with_hash (stmt,
24004 htab_hash_pointer (stmt),
24005 NO_INSERT))
24006 {
24007 inline_entry_data *ied = *iedp;
24008 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
24009 gcc_assert (debug_inline_points);
24010 gcc_assert (inlined_function_outer_scope_p (stmt));
24011
24012 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
24013 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24014
24015 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
24016 && !dwarf_strict)
24017 {
24018 if (!output_asm_line_debug_info ())
24019 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
24020 else
24021 {
24022 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
24023 /* FIXME: this will resolve to a small number. Could we
24024 possibly emit smaller data? Ideally we'd emit a
24025 uleb128, but that would make the size of DIEs
24026 impossible for the compiler to compute, since it's
24027 the assembler that computes the value of the view
24028 label in this case. Ideally, we'd have a single form
24029 encompassing both the address and the view, and
24030 indirecting them through a table might make things
24031 easier, but even that would be more wasteful,
24032 space-wise, than what we have now. */
24033 add_AT_symview (die, DW_AT_GNU_entry_view, label);
24034 }
24035 }
24036
24037 inline_entry_data_table->clear_slot (iedp);
24038 }
24039
24040 if (BLOCK_FRAGMENT_CHAIN (stmt)
24041 && (dwarf_version >= 3 || !dwarf_strict))
24042 {
24043 tree chain, superblock = NULL_TREE;
24044 dw_die_ref pdie;
24045 dw_attr_node *attr = NULL;
24046
24047 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
24048 {
24049 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24050 BLOCK_NUMBER (stmt));
24051 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24052 }
24053
24054 /* Optimize duplicate .debug_ranges lists or even tails of
24055 lists. If this BLOCK has same ranges as its supercontext,
24056 lookup DW_AT_ranges attribute in the supercontext (and
24057 recursively so), verify that the ranges_table contains the
24058 right values and use it instead of adding a new .debug_range. */
24059 for (chain = stmt, pdie = die;
24060 BLOCK_SAME_RANGE (chain);
24061 chain = BLOCK_SUPERCONTEXT (chain))
24062 {
24063 dw_attr_node *new_attr;
24064
24065 pdie = pdie->die_parent;
24066 if (pdie == NULL)
24067 break;
24068 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
24069 break;
24070 new_attr = get_AT (pdie, DW_AT_ranges);
24071 if (new_attr == NULL
24072 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24073 break;
24074 attr = new_attr;
24075 superblock = BLOCK_SUPERCONTEXT (chain);
24076 }
24077 if (attr != NULL
24078 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24079 == BLOCK_NUMBER (superblock))
24080 && BLOCK_FRAGMENT_CHAIN (superblock))
24081 {
24082 unsigned long off = attr->dw_attr_val.v.val_offset;
24083 unsigned long supercnt = 0, thiscnt = 0;
24084 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24085 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24086 {
24087 ++supercnt;
24088 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24089 == BLOCK_NUMBER (chain));
24090 }
24091 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24092 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24093 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24094 ++thiscnt;
24095 gcc_assert (supercnt >= thiscnt);
24096 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24097 false);
24098 note_rnglist_head (off + supercnt - thiscnt);
24099 return;
24100 }
24101
24102 unsigned int offset = add_ranges (stmt, true);
24103 add_AT_range_list (die, DW_AT_ranges, offset, false);
24104 note_rnglist_head (offset);
24105
24106 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24107 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24108 do
24109 {
24110 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24111 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24112 chain = BLOCK_FRAGMENT_CHAIN (chain);
24113 }
24114 while (chain);
24115 add_ranges (NULL);
24116 }
24117 else
24118 {
24119 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24120 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24121 BLOCK_NUMBER (stmt));
24122 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24123 BLOCK_NUMBER (stmt));
24124 add_AT_low_high_pc (die, label, label_high, false);
24125 }
24126 }
24127
24128 /* Generate a DIE for a lexical block. */
24129
24130 static void
24131 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24132 {
24133 dw_die_ref old_die = BLOCK_DIE (stmt);
24134 dw_die_ref stmt_die = NULL;
24135 if (!old_die)
24136 {
24137 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24138 BLOCK_DIE (stmt) = stmt_die;
24139 }
24140
24141 if (BLOCK_ABSTRACT (stmt))
24142 {
24143 if (old_die)
24144 {
24145 /* This must have been generated early and it won't even
24146 need location information since it's a DW_AT_inline
24147 function. */
24148 if (flag_checking)
24149 for (dw_die_ref c = context_die; c; c = c->die_parent)
24150 if (c->die_tag == DW_TAG_inlined_subroutine
24151 || c->die_tag == DW_TAG_subprogram)
24152 {
24153 gcc_assert (get_AT (c, DW_AT_inline));
24154 break;
24155 }
24156 return;
24157 }
24158 }
24159 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
24160 {
24161 /* If this is an inlined instance, create a new lexical die for
24162 anything below to attach DW_AT_abstract_origin to. */
24163 if (old_die)
24164 {
24165 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24166 BLOCK_DIE (stmt) = stmt_die;
24167 old_die = NULL;
24168 }
24169
24170 tree origin = block_ultimate_origin (stmt);
24171 if (origin != NULL_TREE && origin != stmt)
24172 add_abstract_origin_attribute (stmt_die, origin);
24173 }
24174
24175 if (old_die)
24176 stmt_die = old_die;
24177
24178 /* A non abstract block whose blocks have already been reordered
24179 should have the instruction range for this block. If so, set the
24180 high/low attributes. */
24181 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
24182 {
24183 gcc_assert (stmt_die);
24184 add_high_low_attributes (stmt, stmt_die);
24185 }
24186
24187 decls_for_scope (stmt, stmt_die);
24188 }
24189
24190 /* Generate a DIE for an inlined subprogram. */
24191
24192 static void
24193 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24194 {
24195 tree decl;
24196
24197 /* The instance of function that is effectively being inlined shall not
24198 be abstract. */
24199 gcc_assert (! BLOCK_ABSTRACT (stmt));
24200
24201 decl = block_ultimate_origin (stmt);
24202
24203 /* Make sure any inlined functions are known to be inlineable. */
24204 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24205 || cgraph_function_possibly_inlined_p (decl));
24206
24207 if (! BLOCK_ABSTRACT (stmt))
24208 {
24209 dw_die_ref subr_die
24210 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24211
24212 if (call_arg_locations || debug_inline_points)
24213 BLOCK_DIE (stmt) = subr_die;
24214 add_abstract_origin_attribute (subr_die, decl);
24215 if (TREE_ASM_WRITTEN (stmt))
24216 add_high_low_attributes (stmt, subr_die);
24217 add_call_src_coords_attributes (stmt, subr_die);
24218
24219 decls_for_scope (stmt, subr_die);
24220 }
24221 }
24222
24223 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24224 the comment for VLR_CONTEXT. */
24225
24226 static void
24227 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24228 {
24229 dw_die_ref decl_die;
24230
24231 if (TREE_TYPE (decl) == error_mark_node)
24232 return;
24233
24234 decl_die = new_die (DW_TAG_member, context_die, decl);
24235 add_name_and_src_coords_attributes (decl_die, decl);
24236 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24237 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24238 context_die);
24239
24240 if (DECL_BIT_FIELD_TYPE (decl))
24241 {
24242 add_byte_size_attribute (decl_die, decl);
24243 add_bit_size_attribute (decl_die, decl);
24244 add_bit_offset_attribute (decl_die, decl, ctx);
24245 }
24246
24247 add_alignment_attribute (decl_die, decl);
24248
24249 /* If we have a variant part offset, then we are supposed to process a member
24250 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24251 trees. */
24252 gcc_assert (ctx->variant_part_offset == NULL_TREE
24253 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24254 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24255 add_data_member_location_attribute (decl_die, decl, ctx);
24256
24257 if (DECL_ARTIFICIAL (decl))
24258 add_AT_flag (decl_die, DW_AT_artificial, 1);
24259
24260 add_accessibility_attribute (decl_die, decl);
24261
24262 /* Equate decl number to die, so that we can look up this decl later on. */
24263 equate_decl_number_to_die (decl, decl_die);
24264 }
24265
24266 /* Generate a DIE for a pointer to a member type. TYPE can be an
24267 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24268 pointer to member function. */
24269
24270 static void
24271 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24272 {
24273 if (lookup_type_die (type))
24274 return;
24275
24276 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24277 scope_die_for (type, context_die), type);
24278
24279 equate_type_number_to_die (type, ptr_die);
24280 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24281 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24282 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24283 context_die);
24284 add_alignment_attribute (ptr_die, type);
24285
24286 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24287 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24288 {
24289 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24290 add_AT_loc (ptr_die, DW_AT_use_location, op);
24291 }
24292 }
24293
24294 static char *producer_string;
24295
24296 /* Return a heap allocated producer string including command line options
24297 if -grecord-gcc-switches. */
24298
24299 static char *
24300 gen_producer_string (void)
24301 {
24302 size_t j;
24303 auto_vec<const char *> switches;
24304 const char *language_string = lang_hooks.name;
24305 char *producer, *tail;
24306 const char *p;
24307 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24308 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24309
24310 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24311 switch (save_decoded_options[j].opt_index)
24312 {
24313 case OPT_o:
24314 case OPT_d:
24315 case OPT_dumpbase:
24316 case OPT_dumpdir:
24317 case OPT_auxbase:
24318 case OPT_auxbase_strip:
24319 case OPT_quiet:
24320 case OPT_version:
24321 case OPT_v:
24322 case OPT_w:
24323 case OPT_L:
24324 case OPT_D:
24325 case OPT_I:
24326 case OPT_U:
24327 case OPT_SPECIAL_unknown:
24328 case OPT_SPECIAL_ignore:
24329 case OPT_SPECIAL_deprecated:
24330 case OPT_SPECIAL_program_name:
24331 case OPT_SPECIAL_input_file:
24332 case OPT_grecord_gcc_switches:
24333 case OPT__output_pch_:
24334 case OPT_fdiagnostics_show_location_:
24335 case OPT_fdiagnostics_show_option:
24336 case OPT_fdiagnostics_show_caret:
24337 case OPT_fdiagnostics_show_labels:
24338 case OPT_fdiagnostics_show_line_numbers:
24339 case OPT_fdiagnostics_color_:
24340 case OPT_fverbose_asm:
24341 case OPT____:
24342 case OPT__sysroot_:
24343 case OPT_nostdinc:
24344 case OPT_nostdinc__:
24345 case OPT_fpreprocessed:
24346 case OPT_fltrans_output_list_:
24347 case OPT_fresolution_:
24348 case OPT_fdebug_prefix_map_:
24349 case OPT_fmacro_prefix_map_:
24350 case OPT_ffile_prefix_map_:
24351 case OPT_fcompare_debug:
24352 case OPT_fchecking:
24353 case OPT_fchecking_:
24354 /* Ignore these. */
24355 continue;
24356 default:
24357 if (cl_options[save_decoded_options[j].opt_index].flags
24358 & CL_NO_DWARF_RECORD)
24359 continue;
24360 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24361 == '-');
24362 switch (save_decoded_options[j].canonical_option[0][1])
24363 {
24364 case 'M':
24365 case 'i':
24366 case 'W':
24367 continue;
24368 case 'f':
24369 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24370 "dump", 4) == 0)
24371 continue;
24372 break;
24373 default:
24374 break;
24375 }
24376 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24377 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24378 break;
24379 }
24380
24381 producer = XNEWVEC (char, plen + 1 + len + 1);
24382 tail = producer;
24383 sprintf (tail, "%s %s", language_string, version_string);
24384 tail += plen;
24385
24386 FOR_EACH_VEC_ELT (switches, j, p)
24387 {
24388 len = strlen (p);
24389 *tail = ' ';
24390 memcpy (tail + 1, p, len);
24391 tail += len + 1;
24392 }
24393
24394 *tail = '\0';
24395 return producer;
24396 }
24397
24398 /* Given a C and/or C++ language/version string return the "highest".
24399 C++ is assumed to be "higher" than C in this case. Used for merging
24400 LTO translation unit languages. */
24401 static const char *
24402 highest_c_language (const char *lang1, const char *lang2)
24403 {
24404 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24405 return "GNU C++17";
24406 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24407 return "GNU C++14";
24408 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24409 return "GNU C++11";
24410 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24411 return "GNU C++98";
24412
24413 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24414 return "GNU C17";
24415 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24416 return "GNU C11";
24417 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24418 return "GNU C99";
24419 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24420 return "GNU C89";
24421
24422 gcc_unreachable ();
24423 }
24424
24425
24426 /* Generate the DIE for the compilation unit. */
24427
24428 static dw_die_ref
24429 gen_compile_unit_die (const char *filename)
24430 {
24431 dw_die_ref die;
24432 const char *language_string = lang_hooks.name;
24433 int language;
24434
24435 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24436
24437 if (filename)
24438 {
24439 add_name_attribute (die, filename);
24440 /* Don't add cwd for <built-in>. */
24441 if (filename[0] != '<')
24442 add_comp_dir_attribute (die);
24443 }
24444
24445 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24446
24447 /* If our producer is LTO try to figure out a common language to use
24448 from the global list of translation units. */
24449 if (strcmp (language_string, "GNU GIMPLE") == 0)
24450 {
24451 unsigned i;
24452 tree t;
24453 const char *common_lang = NULL;
24454
24455 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24456 {
24457 if (!TRANSLATION_UNIT_LANGUAGE (t))
24458 continue;
24459 if (!common_lang)
24460 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24461 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24462 ;
24463 else if (strncmp (common_lang, "GNU C", 5) == 0
24464 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24465 /* Mixing C and C++ is ok, use C++ in that case. */
24466 common_lang = highest_c_language (common_lang,
24467 TRANSLATION_UNIT_LANGUAGE (t));
24468 else
24469 {
24470 /* Fall back to C. */
24471 common_lang = NULL;
24472 break;
24473 }
24474 }
24475
24476 if (common_lang)
24477 language_string = common_lang;
24478 }
24479
24480 language = DW_LANG_C;
24481 if (strncmp (language_string, "GNU C", 5) == 0
24482 && ISDIGIT (language_string[5]))
24483 {
24484 language = DW_LANG_C89;
24485 if (dwarf_version >= 3 || !dwarf_strict)
24486 {
24487 if (strcmp (language_string, "GNU C89") != 0)
24488 language = DW_LANG_C99;
24489
24490 if (dwarf_version >= 5 /* || !dwarf_strict */)
24491 if (strcmp (language_string, "GNU C11") == 0
24492 || strcmp (language_string, "GNU C17") == 0)
24493 language = DW_LANG_C11;
24494 }
24495 }
24496 else if (strncmp (language_string, "GNU C++", 7) == 0)
24497 {
24498 language = DW_LANG_C_plus_plus;
24499 if (dwarf_version >= 5 /* || !dwarf_strict */)
24500 {
24501 if (strcmp (language_string, "GNU C++11") == 0)
24502 language = DW_LANG_C_plus_plus_11;
24503 else if (strcmp (language_string, "GNU C++14") == 0)
24504 language = DW_LANG_C_plus_plus_14;
24505 else if (strcmp (language_string, "GNU C++17") == 0)
24506 /* For now. */
24507 language = DW_LANG_C_plus_plus_14;
24508 }
24509 }
24510 else if (strcmp (language_string, "GNU F77") == 0)
24511 language = DW_LANG_Fortran77;
24512 else if (dwarf_version >= 3 || !dwarf_strict)
24513 {
24514 if (strcmp (language_string, "GNU Ada") == 0)
24515 language = DW_LANG_Ada95;
24516 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24517 {
24518 language = DW_LANG_Fortran95;
24519 if (dwarf_version >= 5 /* || !dwarf_strict */)
24520 {
24521 if (strcmp (language_string, "GNU Fortran2003") == 0)
24522 language = DW_LANG_Fortran03;
24523 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24524 language = DW_LANG_Fortran08;
24525 }
24526 }
24527 else if (strcmp (language_string, "GNU Objective-C") == 0)
24528 language = DW_LANG_ObjC;
24529 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24530 language = DW_LANG_ObjC_plus_plus;
24531 else if (dwarf_version >= 5 || !dwarf_strict)
24532 {
24533 if (strcmp (language_string, "GNU Go") == 0)
24534 language = DW_LANG_Go;
24535 }
24536 }
24537 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24538 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24539 language = DW_LANG_Fortran90;
24540 /* Likewise for Ada. */
24541 else if (strcmp (language_string, "GNU Ada") == 0)
24542 language = DW_LANG_Ada83;
24543
24544 add_AT_unsigned (die, DW_AT_language, language);
24545
24546 switch (language)
24547 {
24548 case DW_LANG_Fortran77:
24549 case DW_LANG_Fortran90:
24550 case DW_LANG_Fortran95:
24551 case DW_LANG_Fortran03:
24552 case DW_LANG_Fortran08:
24553 /* Fortran has case insensitive identifiers and the front-end
24554 lowercases everything. */
24555 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24556 break;
24557 default:
24558 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24559 break;
24560 }
24561 return die;
24562 }
24563
24564 /* Generate the DIE for a base class. */
24565
24566 static void
24567 gen_inheritance_die (tree binfo, tree access, tree type,
24568 dw_die_ref context_die)
24569 {
24570 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24571 struct vlr_context ctx = { type, NULL };
24572
24573 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24574 context_die);
24575 add_data_member_location_attribute (die, binfo, &ctx);
24576
24577 if (BINFO_VIRTUAL_P (binfo))
24578 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24579
24580 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24581 children, otherwise the default is DW_ACCESS_public. In DWARF2
24582 the default has always been DW_ACCESS_private. */
24583 if (access == access_public_node)
24584 {
24585 if (dwarf_version == 2
24586 || context_die->die_tag == DW_TAG_class_type)
24587 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24588 }
24589 else if (access == access_protected_node)
24590 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24591 else if (dwarf_version > 2
24592 && context_die->die_tag != DW_TAG_class_type)
24593 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24594 }
24595
24596 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24597 structure. */
24598 static bool
24599 is_variant_part (tree decl)
24600 {
24601 return (TREE_CODE (decl) == FIELD_DECL
24602 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24603 }
24604
24605 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24606 return the FIELD_DECL. Return NULL_TREE otherwise. */
24607
24608 static tree
24609 analyze_discr_in_predicate (tree operand, tree struct_type)
24610 {
24611 bool continue_stripping = true;
24612 while (continue_stripping)
24613 switch (TREE_CODE (operand))
24614 {
24615 CASE_CONVERT:
24616 operand = TREE_OPERAND (operand, 0);
24617 break;
24618 default:
24619 continue_stripping = false;
24620 break;
24621 }
24622
24623 /* Match field access to members of struct_type only. */
24624 if (TREE_CODE (operand) == COMPONENT_REF
24625 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24626 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24627 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24628 return TREE_OPERAND (operand, 1);
24629 else
24630 return NULL_TREE;
24631 }
24632
24633 /* Check that SRC is a constant integer that can be represented as a native
24634 integer constant (either signed or unsigned). If so, store it into DEST and
24635 return true. Return false otherwise. */
24636
24637 static bool
24638 get_discr_value (tree src, dw_discr_value *dest)
24639 {
24640 tree discr_type = TREE_TYPE (src);
24641
24642 if (lang_hooks.types.get_debug_type)
24643 {
24644 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24645 if (debug_type != NULL)
24646 discr_type = debug_type;
24647 }
24648
24649 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24650 return false;
24651
24652 /* Signedness can vary between the original type and the debug type. This
24653 can happen for character types in Ada for instance: the character type
24654 used for code generation can be signed, to be compatible with the C one,
24655 but from a debugger point of view, it must be unsigned. */
24656 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24657 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24658
24659 if (is_orig_unsigned != is_debug_unsigned)
24660 src = fold_convert (discr_type, src);
24661
24662 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24663 return false;
24664
24665 dest->pos = is_debug_unsigned;
24666 if (is_debug_unsigned)
24667 dest->v.uval = tree_to_uhwi (src);
24668 else
24669 dest->v.sval = tree_to_shwi (src);
24670
24671 return true;
24672 }
24673
24674 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24675 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24676 store NULL_TREE in DISCR_DECL. Otherwise:
24677
24678 - store the discriminant field in STRUCT_TYPE that controls the variant
24679 part to *DISCR_DECL
24680
24681 - put in *DISCR_LISTS_P an array where for each variant, the item
24682 represents the corresponding matching list of discriminant values.
24683
24684 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24685 the above array.
24686
24687 Note that when the array is allocated (i.e. when the analysis is
24688 successful), it is up to the caller to free the array. */
24689
24690 static void
24691 analyze_variants_discr (tree variant_part_decl,
24692 tree struct_type,
24693 tree *discr_decl,
24694 dw_discr_list_ref **discr_lists_p,
24695 unsigned *discr_lists_length)
24696 {
24697 tree variant_part_type = TREE_TYPE (variant_part_decl);
24698 tree variant;
24699 dw_discr_list_ref *discr_lists;
24700 unsigned i;
24701
24702 /* Compute how many variants there are in this variant part. */
24703 *discr_lists_length = 0;
24704 for (variant = TYPE_FIELDS (variant_part_type);
24705 variant != NULL_TREE;
24706 variant = DECL_CHAIN (variant))
24707 ++*discr_lists_length;
24708
24709 *discr_decl = NULL_TREE;
24710 *discr_lists_p
24711 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24712 sizeof (**discr_lists_p));
24713 discr_lists = *discr_lists_p;
24714
24715 /* And then analyze all variants to extract discriminant information for all
24716 of them. This analysis is conservative: as soon as we detect something we
24717 do not support, abort everything and pretend we found nothing. */
24718 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24719 variant != NULL_TREE;
24720 variant = DECL_CHAIN (variant), ++i)
24721 {
24722 tree match_expr = DECL_QUALIFIER (variant);
24723
24724 /* Now, try to analyze the predicate and deduce a discriminant for
24725 it. */
24726 if (match_expr == boolean_true_node)
24727 /* Typically happens for the default variant: it matches all cases that
24728 previous variants rejected. Don't output any matching value for
24729 this one. */
24730 continue;
24731
24732 /* The following loop tries to iterate over each discriminant
24733 possibility: single values or ranges. */
24734 while (match_expr != NULL_TREE)
24735 {
24736 tree next_round_match_expr;
24737 tree candidate_discr = NULL_TREE;
24738 dw_discr_list_ref new_node = NULL;
24739
24740 /* Possibilities are matched one after the other by nested
24741 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24742 continue with the rest at next iteration. */
24743 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24744 {
24745 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24746 match_expr = TREE_OPERAND (match_expr, 1);
24747 }
24748 else
24749 next_round_match_expr = NULL_TREE;
24750
24751 if (match_expr == boolean_false_node)
24752 /* This sub-expression matches nothing: just wait for the next
24753 one. */
24754 ;
24755
24756 else if (TREE_CODE (match_expr) == EQ_EXPR)
24757 {
24758 /* We are matching: <discr_field> == <integer_cst>
24759 This sub-expression matches a single value. */
24760 tree integer_cst = TREE_OPERAND (match_expr, 1);
24761
24762 candidate_discr
24763 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24764 struct_type);
24765
24766 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24767 if (!get_discr_value (integer_cst,
24768 &new_node->dw_discr_lower_bound))
24769 goto abort;
24770 new_node->dw_discr_range = false;
24771 }
24772
24773 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24774 {
24775 /* We are matching:
24776 <discr_field> > <integer_cst>
24777 && <discr_field> < <integer_cst>.
24778 This sub-expression matches the range of values between the
24779 two matched integer constants. Note that comparisons can be
24780 inclusive or exclusive. */
24781 tree candidate_discr_1, candidate_discr_2;
24782 tree lower_cst, upper_cst;
24783 bool lower_cst_included, upper_cst_included;
24784 tree lower_op = TREE_OPERAND (match_expr, 0);
24785 tree upper_op = TREE_OPERAND (match_expr, 1);
24786
24787 /* When the comparison is exclusive, the integer constant is not
24788 the discriminant range bound we are looking for: we will have
24789 to increment or decrement it. */
24790 if (TREE_CODE (lower_op) == GE_EXPR)
24791 lower_cst_included = true;
24792 else if (TREE_CODE (lower_op) == GT_EXPR)
24793 lower_cst_included = false;
24794 else
24795 goto abort;
24796
24797 if (TREE_CODE (upper_op) == LE_EXPR)
24798 upper_cst_included = true;
24799 else if (TREE_CODE (upper_op) == LT_EXPR)
24800 upper_cst_included = false;
24801 else
24802 goto abort;
24803
24804 /* Extract the discriminant from the first operand and check it
24805 is consistant with the same analysis in the second
24806 operand. */
24807 candidate_discr_1
24808 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24809 struct_type);
24810 candidate_discr_2
24811 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24812 struct_type);
24813 if (candidate_discr_1 == candidate_discr_2)
24814 candidate_discr = candidate_discr_1;
24815 else
24816 goto abort;
24817
24818 /* Extract bounds from both. */
24819 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24820 lower_cst = TREE_OPERAND (lower_op, 1);
24821 upper_cst = TREE_OPERAND (upper_op, 1);
24822
24823 if (!lower_cst_included)
24824 lower_cst
24825 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24826 build_int_cst (TREE_TYPE (lower_cst), 1));
24827 if (!upper_cst_included)
24828 upper_cst
24829 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24830 build_int_cst (TREE_TYPE (upper_cst), 1));
24831
24832 if (!get_discr_value (lower_cst,
24833 &new_node->dw_discr_lower_bound)
24834 || !get_discr_value (upper_cst,
24835 &new_node->dw_discr_upper_bound))
24836 goto abort;
24837
24838 new_node->dw_discr_range = true;
24839 }
24840
24841 else
24842 /* Unsupported sub-expression: we cannot determine the set of
24843 matching discriminant values. Abort everything. */
24844 goto abort;
24845
24846 /* If the discriminant info is not consistant with what we saw so
24847 far, consider the analysis failed and abort everything. */
24848 if (candidate_discr == NULL_TREE
24849 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24850 goto abort;
24851 else
24852 *discr_decl = candidate_discr;
24853
24854 if (new_node != NULL)
24855 {
24856 new_node->dw_discr_next = discr_lists[i];
24857 discr_lists[i] = new_node;
24858 }
24859 match_expr = next_round_match_expr;
24860 }
24861 }
24862
24863 /* If we reach this point, we could match everything we were interested
24864 in. */
24865 return;
24866
24867 abort:
24868 /* Clean all data structure and return no result. */
24869 free (*discr_lists_p);
24870 *discr_lists_p = NULL;
24871 *discr_decl = NULL_TREE;
24872 }
24873
24874 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24875 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24876 under CONTEXT_DIE.
24877
24878 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24879 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24880 this type, which are record types, represent the available variants and each
24881 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24882 values are inferred from these attributes.
24883
24884 In trees, the offsets for the fields inside these sub-records are relative
24885 to the variant part itself, whereas the corresponding DIEs should have
24886 offset attributes that are relative to the embedding record base address.
24887 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24888 must be an expression that computes the offset of the variant part to
24889 describe in DWARF. */
24890
24891 static void
24892 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24893 dw_die_ref context_die)
24894 {
24895 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24896 tree variant_part_offset = vlr_ctx->variant_part_offset;
24897 struct loc_descr_context ctx = {
24898 vlr_ctx->struct_type, /* context_type */
24899 NULL_TREE, /* base_decl */
24900 NULL, /* dpi */
24901 false, /* placeholder_arg */
24902 false /* placeholder_seen */
24903 };
24904
24905 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24906 NULL_TREE if there is no such field. */
24907 tree discr_decl = NULL_TREE;
24908 dw_discr_list_ref *discr_lists;
24909 unsigned discr_lists_length = 0;
24910 unsigned i;
24911
24912 dw_die_ref dwarf_proc_die = NULL;
24913 dw_die_ref variant_part_die
24914 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24915
24916 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24917
24918 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24919 &discr_decl, &discr_lists, &discr_lists_length);
24920
24921 if (discr_decl != NULL_TREE)
24922 {
24923 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24924
24925 if (discr_die)
24926 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24927 else
24928 /* We have no DIE for the discriminant, so just discard all
24929 discrimimant information in the output. */
24930 discr_decl = NULL_TREE;
24931 }
24932
24933 /* If the offset for this variant part is more complex than a constant,
24934 create a DWARF procedure for it so that we will not have to generate DWARF
24935 expressions for it for each member. */
24936 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24937 && (dwarf_version >= 3 || !dwarf_strict))
24938 {
24939 const tree dwarf_proc_fndecl
24940 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24941 build_function_type (TREE_TYPE (variant_part_offset),
24942 NULL_TREE));
24943 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24944 const dw_loc_descr_ref dwarf_proc_body
24945 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24946
24947 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24948 dwarf_proc_fndecl, context_die);
24949 if (dwarf_proc_die != NULL)
24950 variant_part_offset = dwarf_proc_call;
24951 }
24952
24953 /* Output DIEs for all variants. */
24954 i = 0;
24955 for (tree variant = TYPE_FIELDS (variant_part_type);
24956 variant != NULL_TREE;
24957 variant = DECL_CHAIN (variant), ++i)
24958 {
24959 tree variant_type = TREE_TYPE (variant);
24960 dw_die_ref variant_die;
24961
24962 /* All variants (i.e. members of a variant part) are supposed to be
24963 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24964 under these records. */
24965 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24966
24967 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24968 equate_decl_number_to_die (variant, variant_die);
24969
24970 /* Output discriminant values this variant matches, if any. */
24971 if (discr_decl == NULL || discr_lists[i] == NULL)
24972 /* In the case we have discriminant information at all, this is
24973 probably the default variant: as the standard says, don't
24974 output any discriminant value/list attribute. */
24975 ;
24976 else if (discr_lists[i]->dw_discr_next == NULL
24977 && !discr_lists[i]->dw_discr_range)
24978 /* If there is only one accepted value, don't bother outputting a
24979 list. */
24980 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24981 else
24982 add_discr_list (variant_die, discr_lists[i]);
24983
24984 for (tree member = TYPE_FIELDS (variant_type);
24985 member != NULL_TREE;
24986 member = DECL_CHAIN (member))
24987 {
24988 struct vlr_context vlr_sub_ctx = {
24989 vlr_ctx->struct_type, /* struct_type */
24990 NULL /* variant_part_offset */
24991 };
24992 if (is_variant_part (member))
24993 {
24994 /* All offsets for fields inside variant parts are relative to
24995 the top-level embedding RECORD_TYPE's base address. On the
24996 other hand, offsets in GCC's types are relative to the
24997 nested-most variant part. So we have to sum offsets each time
24998 we recurse. */
24999
25000 vlr_sub_ctx.variant_part_offset
25001 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
25002 variant_part_offset, byte_position (member));
25003 gen_variant_part (member, &vlr_sub_ctx, variant_die);
25004 }
25005 else
25006 {
25007 vlr_sub_ctx.variant_part_offset = variant_part_offset;
25008 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
25009 }
25010 }
25011 }
25012
25013 free (discr_lists);
25014 }
25015
25016 /* Generate a DIE for a class member. */
25017
25018 static void
25019 gen_member_die (tree type, dw_die_ref context_die)
25020 {
25021 tree member;
25022 tree binfo = TYPE_BINFO (type);
25023
25024 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
25025
25026 /* If this is not an incomplete type, output descriptions of each of its
25027 members. Note that as we output the DIEs necessary to represent the
25028 members of this record or union type, we will also be trying to output
25029 DIEs to represent the *types* of those members. However the `type'
25030 function (above) will specifically avoid generating type DIEs for member
25031 types *within* the list of member DIEs for this (containing) type except
25032 for those types (of members) which are explicitly marked as also being
25033 members of this (containing) type themselves. The g++ front- end can
25034 force any given type to be treated as a member of some other (containing)
25035 type by setting the TYPE_CONTEXT of the given (member) type to point to
25036 the TREE node representing the appropriate (containing) type. */
25037
25038 /* First output info about the base classes. */
25039 if (binfo)
25040 {
25041 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
25042 int i;
25043 tree base;
25044
25045 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
25046 gen_inheritance_die (base,
25047 (accesses ? (*accesses)[i] : access_public_node),
25048 type,
25049 context_die);
25050 }
25051
25052 /* Now output info about the data members and type members. */
25053 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
25054 {
25055 struct vlr_context vlr_ctx = { type, NULL_TREE };
25056 bool static_inline_p
25057 = (TREE_STATIC (member)
25058 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
25059 != -1));
25060
25061 /* Ignore clones. */
25062 if (DECL_ABSTRACT_ORIGIN (member))
25063 continue;
25064
25065 /* If we thought we were generating minimal debug info for TYPE
25066 and then changed our minds, some of the member declarations
25067 may have already been defined. Don't define them again, but
25068 do put them in the right order. */
25069
25070 if (dw_die_ref child = lookup_decl_die (member))
25071 {
25072 /* Handle inline static data members, which only have in-class
25073 declarations. */
25074 dw_die_ref ref = NULL;
25075 if (child->die_tag == DW_TAG_variable
25076 && child->die_parent == comp_unit_die ())
25077 {
25078 ref = get_AT_ref (child, DW_AT_specification);
25079 /* For C++17 inline static data members followed by redundant
25080 out of class redeclaration, we might get here with
25081 child being the DIE created for the out of class
25082 redeclaration and with its DW_AT_specification being
25083 the DIE created for in-class definition. We want to
25084 reparent the latter, and don't want to create another
25085 DIE with DW_AT_specification in that case, because
25086 we already have one. */
25087 if (ref
25088 && static_inline_p
25089 && ref->die_tag == DW_TAG_variable
25090 && ref->die_parent == comp_unit_die ()
25091 && get_AT (ref, DW_AT_specification) == NULL)
25092 {
25093 child = ref;
25094 ref = NULL;
25095 static_inline_p = false;
25096 }
25097 }
25098
25099 if (child->die_tag == DW_TAG_variable
25100 && child->die_parent == comp_unit_die ()
25101 && ref == NULL)
25102 {
25103 reparent_child (child, context_die);
25104 if (dwarf_version < 5)
25105 child->die_tag = DW_TAG_member;
25106 }
25107 else
25108 splice_child_die (context_die, child);
25109 }
25110
25111 /* Do not generate standard DWARF for variant parts if we are generating
25112 the corresponding GNAT encodings: DIEs generated for both would
25113 conflict in our mappings. */
25114 else if (is_variant_part (member)
25115 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25116 {
25117 vlr_ctx.variant_part_offset = byte_position (member);
25118 gen_variant_part (member, &vlr_ctx, context_die);
25119 }
25120 else
25121 {
25122 vlr_ctx.variant_part_offset = NULL_TREE;
25123 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25124 }
25125
25126 /* For C++ inline static data members emit immediately a DW_TAG_variable
25127 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25128 DW_AT_specification. */
25129 if (static_inline_p)
25130 {
25131 int old_extern = DECL_EXTERNAL (member);
25132 DECL_EXTERNAL (member) = 0;
25133 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25134 DECL_EXTERNAL (member) = old_extern;
25135 }
25136 }
25137 }
25138
25139 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25140 is set, we pretend that the type was never defined, so we only get the
25141 member DIEs needed by later specification DIEs. */
25142
25143 static void
25144 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25145 enum debug_info_usage usage)
25146 {
25147 if (TREE_ASM_WRITTEN (type))
25148 {
25149 /* Fill in the bound of variable-length fields in late dwarf if
25150 still incomplete. */
25151 if (!early_dwarf && variably_modified_type_p (type, NULL))
25152 for (tree member = TYPE_FIELDS (type);
25153 member;
25154 member = DECL_CHAIN (member))
25155 fill_variable_array_bounds (TREE_TYPE (member));
25156 return;
25157 }
25158
25159 dw_die_ref type_die = lookup_type_die (type);
25160 dw_die_ref scope_die = 0;
25161 int nested = 0;
25162 int complete = (TYPE_SIZE (type)
25163 && (! TYPE_STUB_DECL (type)
25164 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25165 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25166 complete = complete && should_emit_struct_debug (type, usage);
25167
25168 if (type_die && ! complete)
25169 return;
25170
25171 if (TYPE_CONTEXT (type) != NULL_TREE
25172 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25173 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25174 nested = 1;
25175
25176 scope_die = scope_die_for (type, context_die);
25177
25178 /* Generate child dies for template paramaters. */
25179 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25180 schedule_generic_params_dies_gen (type);
25181
25182 if (! type_die || (nested && is_cu_die (scope_die)))
25183 /* First occurrence of type or toplevel definition of nested class. */
25184 {
25185 dw_die_ref old_die = type_die;
25186
25187 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25188 ? record_type_tag (type) : DW_TAG_union_type,
25189 scope_die, type);
25190 equate_type_number_to_die (type, type_die);
25191 if (old_die)
25192 add_AT_specification (type_die, old_die);
25193 else
25194 add_name_attribute (type_die, type_tag (type));
25195 }
25196 else
25197 remove_AT (type_die, DW_AT_declaration);
25198
25199 /* If this type has been completed, then give it a byte_size attribute and
25200 then give a list of members. */
25201 if (complete && !ns_decl)
25202 {
25203 /* Prevent infinite recursion in cases where the type of some member of
25204 this type is expressed in terms of this type itself. */
25205 TREE_ASM_WRITTEN (type) = 1;
25206 add_byte_size_attribute (type_die, type);
25207 add_alignment_attribute (type_die, type);
25208 if (TYPE_STUB_DECL (type) != NULL_TREE)
25209 {
25210 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25211 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25212 }
25213
25214 /* If the first reference to this type was as the return type of an
25215 inline function, then it may not have a parent. Fix this now. */
25216 if (type_die->die_parent == NULL)
25217 add_child_die (scope_die, type_die);
25218
25219 gen_member_die (type, type_die);
25220
25221 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25222 if (TYPE_ARTIFICIAL (type))
25223 add_AT_flag (type_die, DW_AT_artificial, 1);
25224
25225 /* GNU extension: Record what type our vtable lives in. */
25226 if (TYPE_VFIELD (type))
25227 {
25228 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25229
25230 gen_type_die (vtype, context_die);
25231 add_AT_die_ref (type_die, DW_AT_containing_type,
25232 lookup_type_die (vtype));
25233 }
25234 }
25235 else
25236 {
25237 add_AT_flag (type_die, DW_AT_declaration, 1);
25238
25239 /* We don't need to do this for function-local types. */
25240 if (TYPE_STUB_DECL (type)
25241 && ! decl_function_context (TYPE_STUB_DECL (type)))
25242 vec_safe_push (incomplete_types, type);
25243 }
25244
25245 if (get_AT (type_die, DW_AT_name))
25246 add_pubtype (type, type_die);
25247 }
25248
25249 /* Generate a DIE for a subroutine _type_. */
25250
25251 static void
25252 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25253 {
25254 tree return_type = TREE_TYPE (type);
25255 dw_die_ref subr_die
25256 = new_die (DW_TAG_subroutine_type,
25257 scope_die_for (type, context_die), type);
25258
25259 equate_type_number_to_die (type, subr_die);
25260 add_prototyped_attribute (subr_die, type);
25261 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25262 context_die);
25263 add_alignment_attribute (subr_die, type);
25264 gen_formal_types_die (type, subr_die);
25265
25266 if (get_AT (subr_die, DW_AT_name))
25267 add_pubtype (type, subr_die);
25268 if ((dwarf_version >= 5 || !dwarf_strict)
25269 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25270 add_AT_flag (subr_die, DW_AT_reference, 1);
25271 if ((dwarf_version >= 5 || !dwarf_strict)
25272 && lang_hooks.types.type_dwarf_attribute (type,
25273 DW_AT_rvalue_reference) != -1)
25274 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25275 }
25276
25277 /* Generate a DIE for a type definition. */
25278
25279 static void
25280 gen_typedef_die (tree decl, dw_die_ref context_die)
25281 {
25282 dw_die_ref type_die;
25283 tree type;
25284
25285 if (TREE_ASM_WRITTEN (decl))
25286 {
25287 if (DECL_ORIGINAL_TYPE (decl))
25288 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25289 return;
25290 }
25291
25292 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25293 checks in process_scope_var and modified_type_die), this should be called
25294 only for original types. */
25295 gcc_assert (decl_ultimate_origin (decl) == NULL
25296 || decl_ultimate_origin (decl) == decl);
25297
25298 TREE_ASM_WRITTEN (decl) = 1;
25299 type_die = new_die (DW_TAG_typedef, context_die, decl);
25300
25301 add_name_and_src_coords_attributes (type_die, decl);
25302 if (DECL_ORIGINAL_TYPE (decl))
25303 {
25304 type = DECL_ORIGINAL_TYPE (decl);
25305 if (type == error_mark_node)
25306 return;
25307
25308 gcc_assert (type != TREE_TYPE (decl));
25309 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25310 }
25311 else
25312 {
25313 type = TREE_TYPE (decl);
25314 if (type == error_mark_node)
25315 return;
25316
25317 if (is_naming_typedef_decl (TYPE_NAME (type)))
25318 {
25319 /* Here, we are in the case of decl being a typedef naming
25320 an anonymous type, e.g:
25321 typedef struct {...} foo;
25322 In that case TREE_TYPE (decl) is not a typedef variant
25323 type and TYPE_NAME of the anonymous type is set to the
25324 TYPE_DECL of the typedef. This construct is emitted by
25325 the C++ FE.
25326
25327 TYPE is the anonymous struct named by the typedef
25328 DECL. As we need the DW_AT_type attribute of the
25329 DW_TAG_typedef to point to the DIE of TYPE, let's
25330 generate that DIE right away. add_type_attribute
25331 called below will then pick (via lookup_type_die) that
25332 anonymous struct DIE. */
25333 if (!TREE_ASM_WRITTEN (type))
25334 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25335
25336 /* This is a GNU Extension. We are adding a
25337 DW_AT_linkage_name attribute to the DIE of the
25338 anonymous struct TYPE. The value of that attribute
25339 is the name of the typedef decl naming the anonymous
25340 struct. This greatly eases the work of consumers of
25341 this debug info. */
25342 add_linkage_name_raw (lookup_type_die (type), decl);
25343 }
25344 }
25345
25346 add_type_attribute (type_die, type, decl_quals (decl), false,
25347 context_die);
25348
25349 if (is_naming_typedef_decl (decl))
25350 /* We want that all subsequent calls to lookup_type_die with
25351 TYPE in argument yield the DW_TAG_typedef we have just
25352 created. */
25353 equate_type_number_to_die (type, type_die);
25354
25355 add_alignment_attribute (type_die, TREE_TYPE (decl));
25356
25357 add_accessibility_attribute (type_die, decl);
25358
25359 if (DECL_ABSTRACT_P (decl))
25360 equate_decl_number_to_die (decl, type_die);
25361
25362 if (get_AT (type_die, DW_AT_name))
25363 add_pubtype (decl, type_die);
25364 }
25365
25366 /* Generate a DIE for a struct, class, enum or union type. */
25367
25368 static void
25369 gen_tagged_type_die (tree type,
25370 dw_die_ref context_die,
25371 enum debug_info_usage usage)
25372 {
25373 if (type == NULL_TREE
25374 || !is_tagged_type (type))
25375 return;
25376
25377 if (TREE_ASM_WRITTEN (type))
25378 ;
25379 /* If this is a nested type whose containing class hasn't been written
25380 out yet, writing it out will cover this one, too. This does not apply
25381 to instantiations of member class templates; they need to be added to
25382 the containing class as they are generated. FIXME: This hurts the
25383 idea of combining type decls from multiple TUs, since we can't predict
25384 what set of template instantiations we'll get. */
25385 else if (TYPE_CONTEXT (type)
25386 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25387 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25388 {
25389 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25390
25391 if (TREE_ASM_WRITTEN (type))
25392 return;
25393
25394 /* If that failed, attach ourselves to the stub. */
25395 context_die = lookup_type_die (TYPE_CONTEXT (type));
25396 }
25397 else if (TYPE_CONTEXT (type) != NULL_TREE
25398 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25399 {
25400 /* If this type is local to a function that hasn't been written
25401 out yet, use a NULL context for now; it will be fixed up in
25402 decls_for_scope. */
25403 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25404 /* A declaration DIE doesn't count; nested types need to go in the
25405 specification. */
25406 if (context_die && is_declaration_die (context_die))
25407 context_die = NULL;
25408 }
25409 else
25410 context_die = declare_in_namespace (type, context_die);
25411
25412 if (TREE_CODE (type) == ENUMERAL_TYPE)
25413 {
25414 /* This might have been written out by the call to
25415 declare_in_namespace. */
25416 if (!TREE_ASM_WRITTEN (type))
25417 gen_enumeration_type_die (type, context_die);
25418 }
25419 else
25420 gen_struct_or_union_type_die (type, context_die, usage);
25421
25422 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25423 it up if it is ever completed. gen_*_type_die will set it for us
25424 when appropriate. */
25425 }
25426
25427 /* Generate a type description DIE. */
25428
25429 static void
25430 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25431 enum debug_info_usage usage)
25432 {
25433 struct array_descr_info info;
25434
25435 if (type == NULL_TREE || type == error_mark_node)
25436 return;
25437
25438 if (flag_checking && type)
25439 verify_type (type);
25440
25441 if (TYPE_NAME (type) != NULL_TREE
25442 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25443 && is_redundant_typedef (TYPE_NAME (type))
25444 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25445 /* The DECL of this type is a typedef we don't want to emit debug
25446 info for but we want debug info for its underlying typedef.
25447 This can happen for e.g, the injected-class-name of a C++
25448 type. */
25449 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25450
25451 /* If TYPE is a typedef type variant, let's generate debug info
25452 for the parent typedef which TYPE is a type of. */
25453 if (typedef_variant_p (type))
25454 {
25455 if (TREE_ASM_WRITTEN (type))
25456 return;
25457
25458 tree name = TYPE_NAME (type);
25459 tree origin = decl_ultimate_origin (name);
25460 if (origin != NULL && origin != name)
25461 {
25462 gen_decl_die (origin, NULL, NULL, context_die);
25463 return;
25464 }
25465
25466 /* Prevent broken recursion; we can't hand off to the same type. */
25467 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25468
25469 /* Give typedefs the right scope. */
25470 context_die = scope_die_for (type, context_die);
25471
25472 TREE_ASM_WRITTEN (type) = 1;
25473
25474 gen_decl_die (name, NULL, NULL, context_die);
25475 return;
25476 }
25477
25478 /* If type is an anonymous tagged type named by a typedef, let's
25479 generate debug info for the typedef. */
25480 if (is_naming_typedef_decl (TYPE_NAME (type)))
25481 {
25482 /* Give typedefs the right scope. */
25483 context_die = scope_die_for (type, context_die);
25484
25485 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25486 return;
25487 }
25488
25489 if (lang_hooks.types.get_debug_type)
25490 {
25491 tree debug_type = lang_hooks.types.get_debug_type (type);
25492
25493 if (debug_type != NULL_TREE && debug_type != type)
25494 {
25495 gen_type_die_with_usage (debug_type, context_die, usage);
25496 return;
25497 }
25498 }
25499
25500 /* We are going to output a DIE to represent the unqualified version
25501 of this type (i.e. without any const or volatile qualifiers) so
25502 get the main variant (i.e. the unqualified version) of this type
25503 now. (Vectors and arrays are special because the debugging info is in the
25504 cloned type itself. Similarly function/method types can contain extra
25505 ref-qualification). */
25506 if (TREE_CODE (type) == FUNCTION_TYPE
25507 || TREE_CODE (type) == METHOD_TYPE)
25508 {
25509 /* For function/method types, can't use type_main_variant here,
25510 because that can have different ref-qualifiers for C++,
25511 but try to canonicalize. */
25512 tree main = TYPE_MAIN_VARIANT (type);
25513 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25514 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25515 && check_base_type (t, main)
25516 && check_lang_type (t, type))
25517 {
25518 type = t;
25519 break;
25520 }
25521 }
25522 else if (TREE_CODE (type) != VECTOR_TYPE
25523 && TREE_CODE (type) != ARRAY_TYPE)
25524 type = type_main_variant (type);
25525
25526 /* If this is an array type with hidden descriptor, handle it first. */
25527 if (!TREE_ASM_WRITTEN (type)
25528 && lang_hooks.types.get_array_descr_info)
25529 {
25530 memset (&info, 0, sizeof (info));
25531 if (lang_hooks.types.get_array_descr_info (type, &info))
25532 {
25533 /* Fortran sometimes emits array types with no dimension. */
25534 gcc_assert (info.ndimensions >= 0
25535 && (info.ndimensions
25536 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25537 gen_descr_array_type_die (type, &info, context_die);
25538 TREE_ASM_WRITTEN (type) = 1;
25539 return;
25540 }
25541 }
25542
25543 if (TREE_ASM_WRITTEN (type))
25544 {
25545 /* Variable-length types may be incomplete even if
25546 TREE_ASM_WRITTEN. For such types, fall through to
25547 gen_array_type_die() and possibly fill in
25548 DW_AT_{upper,lower}_bound attributes. */
25549 if ((TREE_CODE (type) != ARRAY_TYPE
25550 && TREE_CODE (type) != RECORD_TYPE
25551 && TREE_CODE (type) != UNION_TYPE
25552 && TREE_CODE (type) != QUAL_UNION_TYPE)
25553 || !variably_modified_type_p (type, NULL))
25554 return;
25555 }
25556
25557 switch (TREE_CODE (type))
25558 {
25559 case ERROR_MARK:
25560 break;
25561
25562 case POINTER_TYPE:
25563 case REFERENCE_TYPE:
25564 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25565 ensures that the gen_type_die recursion will terminate even if the
25566 type is recursive. Recursive types are possible in Ada. */
25567 /* ??? We could perhaps do this for all types before the switch
25568 statement. */
25569 TREE_ASM_WRITTEN (type) = 1;
25570
25571 /* For these types, all that is required is that we output a DIE (or a
25572 set of DIEs) to represent the "basis" type. */
25573 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25574 DINFO_USAGE_IND_USE);
25575 break;
25576
25577 case OFFSET_TYPE:
25578 /* This code is used for C++ pointer-to-data-member types.
25579 Output a description of the relevant class type. */
25580 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25581 DINFO_USAGE_IND_USE);
25582
25583 /* Output a description of the type of the object pointed to. */
25584 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25585 DINFO_USAGE_IND_USE);
25586
25587 /* Now output a DIE to represent this pointer-to-data-member type
25588 itself. */
25589 gen_ptr_to_mbr_type_die (type, context_die);
25590 break;
25591
25592 case FUNCTION_TYPE:
25593 /* Force out return type (in case it wasn't forced out already). */
25594 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25595 DINFO_USAGE_DIR_USE);
25596 gen_subroutine_type_die (type, context_die);
25597 break;
25598
25599 case METHOD_TYPE:
25600 /* Force out return type (in case it wasn't forced out already). */
25601 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25602 DINFO_USAGE_DIR_USE);
25603 gen_subroutine_type_die (type, context_die);
25604 break;
25605
25606 case ARRAY_TYPE:
25607 case VECTOR_TYPE:
25608 gen_array_type_die (type, context_die);
25609 break;
25610
25611 case ENUMERAL_TYPE:
25612 case RECORD_TYPE:
25613 case UNION_TYPE:
25614 case QUAL_UNION_TYPE:
25615 gen_tagged_type_die (type, context_die, usage);
25616 return;
25617
25618 case VOID_TYPE:
25619 case INTEGER_TYPE:
25620 case REAL_TYPE:
25621 case FIXED_POINT_TYPE:
25622 case COMPLEX_TYPE:
25623 case BOOLEAN_TYPE:
25624 /* No DIEs needed for fundamental types. */
25625 break;
25626
25627 case NULLPTR_TYPE:
25628 case LANG_TYPE:
25629 /* Just use DW_TAG_unspecified_type. */
25630 {
25631 dw_die_ref type_die = lookup_type_die (type);
25632 if (type_die == NULL)
25633 {
25634 tree name = TYPE_IDENTIFIER (type);
25635 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25636 type);
25637 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25638 equate_type_number_to_die (type, type_die);
25639 }
25640 }
25641 break;
25642
25643 default:
25644 if (is_cxx_auto (type))
25645 {
25646 tree name = TYPE_IDENTIFIER (type);
25647 dw_die_ref *die = (name == get_identifier ("auto")
25648 ? &auto_die : &decltype_auto_die);
25649 if (!*die)
25650 {
25651 *die = new_die (DW_TAG_unspecified_type,
25652 comp_unit_die (), NULL_TREE);
25653 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25654 }
25655 equate_type_number_to_die (type, *die);
25656 break;
25657 }
25658 gcc_unreachable ();
25659 }
25660
25661 TREE_ASM_WRITTEN (type) = 1;
25662 }
25663
25664 static void
25665 gen_type_die (tree type, dw_die_ref context_die)
25666 {
25667 if (type != error_mark_node)
25668 {
25669 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25670 if (flag_checking)
25671 {
25672 dw_die_ref die = lookup_type_die (type);
25673 if (die)
25674 check_die (die);
25675 }
25676 }
25677 }
25678
25679 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25680 things which are local to the given block. */
25681
25682 static void
25683 gen_block_die (tree stmt, dw_die_ref context_die)
25684 {
25685 int must_output_die = 0;
25686 bool inlined_func;
25687
25688 /* Ignore blocks that are NULL. */
25689 if (stmt == NULL_TREE)
25690 return;
25691
25692 inlined_func = inlined_function_outer_scope_p (stmt);
25693
25694 /* If the block is one fragment of a non-contiguous block, do not
25695 process the variables, since they will have been done by the
25696 origin block. Do process subblocks. */
25697 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25698 {
25699 tree sub;
25700
25701 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25702 gen_block_die (sub, context_die);
25703
25704 return;
25705 }
25706
25707 /* Determine if we need to output any Dwarf DIEs at all to represent this
25708 block. */
25709 if (inlined_func)
25710 /* The outer scopes for inlinings *must* always be represented. We
25711 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25712 must_output_die = 1;
25713 else if (BLOCK_DIE (stmt))
25714 /* If we already have a DIE then it was filled early. Meanwhile
25715 we might have pruned all BLOCK_VARS as optimized out but we
25716 still want to generate high/low PC attributes so output it. */
25717 must_output_die = 1;
25718 else if (TREE_USED (stmt)
25719 || TREE_ASM_WRITTEN (stmt)
25720 || BLOCK_ABSTRACT (stmt))
25721 {
25722 /* Determine if this block directly contains any "significant"
25723 local declarations which we will need to output DIEs for. */
25724 if (debug_info_level > DINFO_LEVEL_TERSE)
25725 {
25726 /* We are not in terse mode so any local declaration that
25727 is not ignored for debug purposes counts as being a
25728 "significant" one. */
25729 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25730 must_output_die = 1;
25731 else
25732 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25733 if (!DECL_IGNORED_P (var))
25734 {
25735 must_output_die = 1;
25736 break;
25737 }
25738 }
25739 else if (!dwarf2out_ignore_block (stmt))
25740 must_output_die = 1;
25741 }
25742
25743 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25744 DIE for any block which contains no significant local declarations at
25745 all. Rather, in such cases we just call `decls_for_scope' so that any
25746 needed Dwarf info for any sub-blocks will get properly generated. Note
25747 that in terse mode, our definition of what constitutes a "significant"
25748 local declaration gets restricted to include only inlined function
25749 instances and local (nested) function definitions. */
25750 if (must_output_die)
25751 {
25752 if (inlined_func)
25753 {
25754 /* If STMT block is abstract, that means we have been called
25755 indirectly from dwarf2out_abstract_function.
25756 That function rightfully marks the descendent blocks (of
25757 the abstract function it is dealing with) as being abstract,
25758 precisely to prevent us from emitting any
25759 DW_TAG_inlined_subroutine DIE as a descendent
25760 of an abstract function instance. So in that case, we should
25761 not call gen_inlined_subroutine_die.
25762
25763 Later though, when cgraph asks dwarf2out to emit info
25764 for the concrete instance of the function decl into which
25765 the concrete instance of STMT got inlined, the later will lead
25766 to the generation of a DW_TAG_inlined_subroutine DIE. */
25767 if (! BLOCK_ABSTRACT (stmt))
25768 gen_inlined_subroutine_die (stmt, context_die);
25769 }
25770 else
25771 gen_lexical_block_die (stmt, context_die);
25772 }
25773 else
25774 decls_for_scope (stmt, context_die);
25775 }
25776
25777 /* Process variable DECL (or variable with origin ORIGIN) within
25778 block STMT and add it to CONTEXT_DIE. */
25779 static void
25780 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25781 {
25782 dw_die_ref die;
25783 tree decl_or_origin = decl ? decl : origin;
25784
25785 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25786 die = lookup_decl_die (decl_or_origin);
25787 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25788 {
25789 if (TYPE_DECL_IS_STUB (decl_or_origin))
25790 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25791 else
25792 die = lookup_decl_die (decl_or_origin);
25793 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25794 if (! die && ! early_dwarf)
25795 return;
25796 }
25797 else
25798 die = NULL;
25799
25800 /* Avoid creating DIEs for local typedefs and concrete static variables that
25801 will only be pruned later. */
25802 if ((origin || decl_ultimate_origin (decl))
25803 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25804 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25805 {
25806 origin = decl_ultimate_origin (decl_or_origin);
25807 if (decl && VAR_P (decl) && die != NULL)
25808 {
25809 die = lookup_decl_die (origin);
25810 if (die != NULL)
25811 equate_decl_number_to_die (decl, die);
25812 }
25813 return;
25814 }
25815
25816 if (die != NULL && die->die_parent == NULL)
25817 add_child_die (context_die, die);
25818 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25819 {
25820 if (early_dwarf)
25821 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25822 stmt, context_die);
25823 }
25824 else
25825 {
25826 if (decl && DECL_P (decl))
25827 {
25828 die = lookup_decl_die (decl);
25829
25830 /* Early created DIEs do not have a parent as the decls refer
25831 to the function as DECL_CONTEXT rather than the BLOCK. */
25832 if (die && die->die_parent == NULL)
25833 {
25834 gcc_assert (in_lto_p);
25835 add_child_die (context_die, die);
25836 }
25837 }
25838
25839 gen_decl_die (decl, origin, NULL, context_die);
25840 }
25841 }
25842
25843 /* Generate all of the decls declared within a given scope and (recursively)
25844 all of its sub-blocks. */
25845
25846 static void
25847 decls_for_scope (tree stmt, dw_die_ref context_die)
25848 {
25849 tree decl;
25850 unsigned int i;
25851 tree subblocks;
25852
25853 /* Ignore NULL blocks. */
25854 if (stmt == NULL_TREE)
25855 return;
25856
25857 /* Output the DIEs to represent all of the data objects and typedefs
25858 declared directly within this block but not within any nested
25859 sub-blocks. Also, nested function and tag DIEs have been
25860 generated with a parent of NULL; fix that up now. We don't
25861 have to do this if we're at -g1. */
25862 if (debug_info_level > DINFO_LEVEL_TERSE)
25863 {
25864 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25865 process_scope_var (stmt, decl, NULL_TREE, context_die);
25866 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25867 origin - avoid doing this twice as we have no good way to see
25868 if we've done it once already. */
25869 if (! early_dwarf)
25870 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25871 {
25872 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25873 if (decl == current_function_decl)
25874 /* Ignore declarations of the current function, while they
25875 are declarations, gen_subprogram_die would treat them
25876 as definitions again, because they are equal to
25877 current_function_decl and endlessly recurse. */;
25878 else if (TREE_CODE (decl) == FUNCTION_DECL)
25879 process_scope_var (stmt, decl, NULL_TREE, context_die);
25880 else
25881 process_scope_var (stmt, NULL_TREE, decl, context_die);
25882 }
25883 }
25884
25885 /* Even if we're at -g1, we need to process the subblocks in order to get
25886 inlined call information. */
25887
25888 /* Output the DIEs to represent all sub-blocks (and the items declared
25889 therein) of this block. */
25890 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25891 subblocks != NULL;
25892 subblocks = BLOCK_CHAIN (subblocks))
25893 gen_block_die (subblocks, context_die);
25894 }
25895
25896 /* Is this a typedef we can avoid emitting? */
25897
25898 static bool
25899 is_redundant_typedef (const_tree decl)
25900 {
25901 if (TYPE_DECL_IS_STUB (decl))
25902 return true;
25903
25904 if (DECL_ARTIFICIAL (decl)
25905 && DECL_CONTEXT (decl)
25906 && is_tagged_type (DECL_CONTEXT (decl))
25907 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25908 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25909 /* Also ignore the artificial member typedef for the class name. */
25910 return true;
25911
25912 return false;
25913 }
25914
25915 /* Return TRUE if TYPE is a typedef that names a type for linkage
25916 purposes. This kind of typedefs is produced by the C++ FE for
25917 constructs like:
25918
25919 typedef struct {...} foo;
25920
25921 In that case, there is no typedef variant type produced for foo.
25922 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25923 struct type. */
25924
25925 static bool
25926 is_naming_typedef_decl (const_tree decl)
25927 {
25928 if (decl == NULL_TREE
25929 || TREE_CODE (decl) != TYPE_DECL
25930 || DECL_NAMELESS (decl)
25931 || !is_tagged_type (TREE_TYPE (decl))
25932 || DECL_IS_BUILTIN (decl)
25933 || is_redundant_typedef (decl)
25934 /* It looks like Ada produces TYPE_DECLs that are very similar
25935 to C++ naming typedefs but that have different
25936 semantics. Let's be specific to c++ for now. */
25937 || !is_cxx (decl))
25938 return FALSE;
25939
25940 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25941 && TYPE_NAME (TREE_TYPE (decl)) == decl
25942 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25943 != TYPE_NAME (TREE_TYPE (decl))));
25944 }
25945
25946 /* Looks up the DIE for a context. */
25947
25948 static inline dw_die_ref
25949 lookup_context_die (tree context)
25950 {
25951 if (context)
25952 {
25953 /* Find die that represents this context. */
25954 if (TYPE_P (context))
25955 {
25956 context = TYPE_MAIN_VARIANT (context);
25957 dw_die_ref ctx = lookup_type_die (context);
25958 if (!ctx)
25959 return NULL;
25960 return strip_naming_typedef (context, ctx);
25961 }
25962 else
25963 return lookup_decl_die (context);
25964 }
25965 return comp_unit_die ();
25966 }
25967
25968 /* Returns the DIE for a context. */
25969
25970 static inline dw_die_ref
25971 get_context_die (tree context)
25972 {
25973 if (context)
25974 {
25975 /* Find die that represents this context. */
25976 if (TYPE_P (context))
25977 {
25978 context = TYPE_MAIN_VARIANT (context);
25979 return strip_naming_typedef (context, force_type_die (context));
25980 }
25981 else
25982 return force_decl_die (context);
25983 }
25984 return comp_unit_die ();
25985 }
25986
25987 /* Returns the DIE for decl. A DIE will always be returned. */
25988
25989 static dw_die_ref
25990 force_decl_die (tree decl)
25991 {
25992 dw_die_ref decl_die;
25993 unsigned saved_external_flag;
25994 tree save_fn = NULL_TREE;
25995 decl_die = lookup_decl_die (decl);
25996 if (!decl_die)
25997 {
25998 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25999
26000 decl_die = lookup_decl_die (decl);
26001 if (decl_die)
26002 return decl_die;
26003
26004 switch (TREE_CODE (decl))
26005 {
26006 case FUNCTION_DECL:
26007 /* Clear current_function_decl, so that gen_subprogram_die thinks
26008 that this is a declaration. At this point, we just want to force
26009 declaration die. */
26010 save_fn = current_function_decl;
26011 current_function_decl = NULL_TREE;
26012 gen_subprogram_die (decl, context_die);
26013 current_function_decl = save_fn;
26014 break;
26015
26016 case VAR_DECL:
26017 /* Set external flag to force declaration die. Restore it after
26018 gen_decl_die() call. */
26019 saved_external_flag = DECL_EXTERNAL (decl);
26020 DECL_EXTERNAL (decl) = 1;
26021 gen_decl_die (decl, NULL, NULL, context_die);
26022 DECL_EXTERNAL (decl) = saved_external_flag;
26023 break;
26024
26025 case NAMESPACE_DECL:
26026 if (dwarf_version >= 3 || !dwarf_strict)
26027 dwarf2out_decl (decl);
26028 else
26029 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
26030 decl_die = comp_unit_die ();
26031 break;
26032
26033 case TRANSLATION_UNIT_DECL:
26034 decl_die = comp_unit_die ();
26035 break;
26036
26037 default:
26038 gcc_unreachable ();
26039 }
26040
26041 /* We should be able to find the DIE now. */
26042 if (!decl_die)
26043 decl_die = lookup_decl_die (decl);
26044 gcc_assert (decl_die);
26045 }
26046
26047 return decl_die;
26048 }
26049
26050 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
26051 always returned. */
26052
26053 static dw_die_ref
26054 force_type_die (tree type)
26055 {
26056 dw_die_ref type_die;
26057
26058 type_die = lookup_type_die (type);
26059 if (!type_die)
26060 {
26061 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
26062
26063 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
26064 false, context_die);
26065 gcc_assert (type_die);
26066 }
26067 return type_die;
26068 }
26069
26070 /* Force out any required namespaces to be able to output DECL,
26071 and return the new context_die for it, if it's changed. */
26072
26073 static dw_die_ref
26074 setup_namespace_context (tree thing, dw_die_ref context_die)
26075 {
26076 tree context = (DECL_P (thing)
26077 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26078 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26079 /* Force out the namespace. */
26080 context_die = force_decl_die (context);
26081
26082 return context_die;
26083 }
26084
26085 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26086 type) within its namespace, if appropriate.
26087
26088 For compatibility with older debuggers, namespace DIEs only contain
26089 declarations; all definitions are emitted at CU scope, with
26090 DW_AT_specification pointing to the declaration (like with class
26091 members). */
26092
26093 static dw_die_ref
26094 declare_in_namespace (tree thing, dw_die_ref context_die)
26095 {
26096 dw_die_ref ns_context;
26097
26098 if (debug_info_level <= DINFO_LEVEL_TERSE)
26099 return context_die;
26100
26101 /* External declarations in the local scope only need to be emitted
26102 once, not once in the namespace and once in the scope.
26103
26104 This avoids declaring the `extern' below in the
26105 namespace DIE as well as in the innermost scope:
26106
26107 namespace S
26108 {
26109 int i=5;
26110 int foo()
26111 {
26112 int i=8;
26113 extern int i;
26114 return i;
26115 }
26116 }
26117 */
26118 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26119 return context_die;
26120
26121 /* If this decl is from an inlined function, then don't try to emit it in its
26122 namespace, as we will get confused. It would have already been emitted
26123 when the abstract instance of the inline function was emitted anyways. */
26124 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26125 return context_die;
26126
26127 ns_context = setup_namespace_context (thing, context_die);
26128
26129 if (ns_context != context_die)
26130 {
26131 if (is_fortran ())
26132 return ns_context;
26133 if (DECL_P (thing))
26134 gen_decl_die (thing, NULL, NULL, ns_context);
26135 else
26136 gen_type_die (thing, ns_context);
26137 }
26138 return context_die;
26139 }
26140
26141 /* Generate a DIE for a namespace or namespace alias. */
26142
26143 static void
26144 gen_namespace_die (tree decl, dw_die_ref context_die)
26145 {
26146 dw_die_ref namespace_die;
26147
26148 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26149 they are an alias of. */
26150 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26151 {
26152 /* Output a real namespace or module. */
26153 context_die = setup_namespace_context (decl, comp_unit_die ());
26154 namespace_die = new_die (is_fortran ()
26155 ? DW_TAG_module : DW_TAG_namespace,
26156 context_die, decl);
26157 /* For Fortran modules defined in different CU don't add src coords. */
26158 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26159 {
26160 const char *name = dwarf2_name (decl, 0);
26161 if (name)
26162 add_name_attribute (namespace_die, name);
26163 }
26164 else
26165 add_name_and_src_coords_attributes (namespace_die, decl);
26166 if (DECL_EXTERNAL (decl))
26167 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26168 equate_decl_number_to_die (decl, namespace_die);
26169 }
26170 else
26171 {
26172 /* Output a namespace alias. */
26173
26174 /* Force out the namespace we are an alias of, if necessary. */
26175 dw_die_ref origin_die
26176 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26177
26178 if (DECL_FILE_SCOPE_P (decl)
26179 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26180 context_die = setup_namespace_context (decl, comp_unit_die ());
26181 /* Now create the namespace alias DIE. */
26182 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26183 add_name_and_src_coords_attributes (namespace_die, decl);
26184 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26185 equate_decl_number_to_die (decl, namespace_die);
26186 }
26187 if ((dwarf_version >= 5 || !dwarf_strict)
26188 && lang_hooks.decls.decl_dwarf_attribute (decl,
26189 DW_AT_export_symbols) == 1)
26190 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26191
26192 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26193 if (want_pubnames ())
26194 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26195 }
26196
26197 /* Generate Dwarf debug information for a decl described by DECL.
26198 The return value is currently only meaningful for PARM_DECLs,
26199 for all other decls it returns NULL.
26200
26201 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26202 It can be NULL otherwise. */
26203
26204 static dw_die_ref
26205 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26206 dw_die_ref context_die)
26207 {
26208 tree decl_or_origin = decl ? decl : origin;
26209 tree class_origin = NULL, ultimate_origin;
26210
26211 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26212 return NULL;
26213
26214 switch (TREE_CODE (decl_or_origin))
26215 {
26216 case ERROR_MARK:
26217 break;
26218
26219 case CONST_DECL:
26220 if (!is_fortran () && !is_ada ())
26221 {
26222 /* The individual enumerators of an enum type get output when we output
26223 the Dwarf representation of the relevant enum type itself. */
26224 break;
26225 }
26226
26227 /* Emit its type. */
26228 gen_type_die (TREE_TYPE (decl), context_die);
26229
26230 /* And its containing namespace. */
26231 context_die = declare_in_namespace (decl, context_die);
26232
26233 gen_const_die (decl, context_die);
26234 break;
26235
26236 case FUNCTION_DECL:
26237 #if 0
26238 /* FIXME */
26239 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26240 on local redeclarations of global functions. That seems broken. */
26241 if (current_function_decl != decl)
26242 /* This is only a declaration. */;
26243 #endif
26244
26245 /* We should have abstract copies already and should not generate
26246 stray type DIEs in late LTO dumping. */
26247 if (! early_dwarf)
26248 ;
26249
26250 /* If we're emitting a clone, emit info for the abstract instance. */
26251 else if (origin || DECL_ORIGIN (decl) != decl)
26252 dwarf2out_abstract_function (origin
26253 ? DECL_ORIGIN (origin)
26254 : DECL_ABSTRACT_ORIGIN (decl));
26255
26256 /* If we're emitting a possibly inlined function emit it as
26257 abstract instance. */
26258 else if (cgraph_function_possibly_inlined_p (decl)
26259 && ! DECL_ABSTRACT_P (decl)
26260 && ! class_or_namespace_scope_p (context_die)
26261 /* dwarf2out_abstract_function won't emit a die if this is just
26262 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26263 that case, because that works only if we have a die. */
26264 && DECL_INITIAL (decl) != NULL_TREE)
26265 dwarf2out_abstract_function (decl);
26266
26267 /* Otherwise we're emitting the primary DIE for this decl. */
26268 else if (debug_info_level > DINFO_LEVEL_TERSE)
26269 {
26270 /* Before we describe the FUNCTION_DECL itself, make sure that we
26271 have its containing type. */
26272 if (!origin)
26273 origin = decl_class_context (decl);
26274 if (origin != NULL_TREE)
26275 gen_type_die (origin, context_die);
26276
26277 /* And its return type. */
26278 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26279
26280 /* And its virtual context. */
26281 if (DECL_VINDEX (decl) != NULL_TREE)
26282 gen_type_die (DECL_CONTEXT (decl), context_die);
26283
26284 /* Make sure we have a member DIE for decl. */
26285 if (origin != NULL_TREE)
26286 gen_type_die_for_member (origin, decl, context_die);
26287
26288 /* And its containing namespace. */
26289 context_die = declare_in_namespace (decl, context_die);
26290 }
26291
26292 /* Now output a DIE to represent the function itself. */
26293 if (decl)
26294 gen_subprogram_die (decl, context_die);
26295 break;
26296
26297 case TYPE_DECL:
26298 /* If we are in terse mode, don't generate any DIEs to represent any
26299 actual typedefs. */
26300 if (debug_info_level <= DINFO_LEVEL_TERSE)
26301 break;
26302
26303 /* In the special case of a TYPE_DECL node representing the declaration
26304 of some type tag, if the given TYPE_DECL is marked as having been
26305 instantiated from some other (original) TYPE_DECL node (e.g. one which
26306 was generated within the original definition of an inline function) we
26307 used to generate a special (abbreviated) DW_TAG_structure_type,
26308 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26309 should be actually referencing those DIEs, as variable DIEs with that
26310 type would be emitted already in the abstract origin, so it was always
26311 removed during unused type prunning. Don't add anything in this
26312 case. */
26313 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26314 break;
26315
26316 if (is_redundant_typedef (decl))
26317 gen_type_die (TREE_TYPE (decl), context_die);
26318 else
26319 /* Output a DIE to represent the typedef itself. */
26320 gen_typedef_die (decl, context_die);
26321 break;
26322
26323 case LABEL_DECL:
26324 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26325 gen_label_die (decl, context_die);
26326 break;
26327
26328 case VAR_DECL:
26329 case RESULT_DECL:
26330 /* If we are in terse mode, don't generate any DIEs to represent any
26331 variable declarations or definitions. */
26332 if (debug_info_level <= DINFO_LEVEL_TERSE)
26333 break;
26334
26335 /* Avoid generating stray type DIEs during late dwarf dumping.
26336 All types have been dumped early. */
26337 if (early_dwarf
26338 /* ??? But in LTRANS we cannot annotate early created variably
26339 modified type DIEs without copying them and adjusting all
26340 references to them. Dump them again as happens for inlining
26341 which copies both the decl and the types. */
26342 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26343 in VLA bound information for example. */
26344 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26345 current_function_decl)))
26346 {
26347 /* Output any DIEs that are needed to specify the type of this data
26348 object. */
26349 if (decl_by_reference_p (decl_or_origin))
26350 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26351 else
26352 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26353 }
26354
26355 if (early_dwarf)
26356 {
26357 /* And its containing type. */
26358 class_origin = decl_class_context (decl_or_origin);
26359 if (class_origin != NULL_TREE)
26360 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26361
26362 /* And its containing namespace. */
26363 context_die = declare_in_namespace (decl_or_origin, context_die);
26364 }
26365
26366 /* Now output the DIE to represent the data object itself. This gets
26367 complicated because of the possibility that the VAR_DECL really
26368 represents an inlined instance of a formal parameter for an inline
26369 function. */
26370 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26371 if (ultimate_origin != NULL_TREE
26372 && TREE_CODE (ultimate_origin) == PARM_DECL)
26373 gen_formal_parameter_die (decl, origin,
26374 true /* Emit name attribute. */,
26375 context_die);
26376 else
26377 gen_variable_die (decl, origin, context_die);
26378 break;
26379
26380 case FIELD_DECL:
26381 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26382 /* Ignore the nameless fields that are used to skip bits but handle C++
26383 anonymous unions and structs. */
26384 if (DECL_NAME (decl) != NULL_TREE
26385 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26386 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26387 {
26388 gen_type_die (member_declared_type (decl), context_die);
26389 gen_field_die (decl, ctx, context_die);
26390 }
26391 break;
26392
26393 case PARM_DECL:
26394 /* Avoid generating stray type DIEs during late dwarf dumping.
26395 All types have been dumped early. */
26396 if (early_dwarf
26397 /* ??? But in LTRANS we cannot annotate early created variably
26398 modified type DIEs without copying them and adjusting all
26399 references to them. Dump them again as happens for inlining
26400 which copies both the decl and the types. */
26401 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26402 in VLA bound information for example. */
26403 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26404 current_function_decl)))
26405 {
26406 if (DECL_BY_REFERENCE (decl_or_origin))
26407 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26408 else
26409 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26410 }
26411 return gen_formal_parameter_die (decl, origin,
26412 true /* Emit name attribute. */,
26413 context_die);
26414
26415 case NAMESPACE_DECL:
26416 if (dwarf_version >= 3 || !dwarf_strict)
26417 gen_namespace_die (decl, context_die);
26418 break;
26419
26420 case IMPORTED_DECL:
26421 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26422 DECL_CONTEXT (decl), context_die);
26423 break;
26424
26425 case NAMELIST_DECL:
26426 gen_namelist_decl (DECL_NAME (decl), context_die,
26427 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26428 break;
26429
26430 default:
26431 /* Probably some frontend-internal decl. Assume we don't care. */
26432 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26433 break;
26434 }
26435
26436 return NULL;
26437 }
26438 \f
26439 /* Output initial debug information for global DECL. Called at the
26440 end of the parsing process.
26441
26442 This is the initial debug generation process. As such, the DIEs
26443 generated may be incomplete. A later debug generation pass
26444 (dwarf2out_late_global_decl) will augment the information generated
26445 in this pass (e.g., with complete location info). */
26446
26447 static void
26448 dwarf2out_early_global_decl (tree decl)
26449 {
26450 set_early_dwarf s;
26451
26452 /* gen_decl_die() will set DECL_ABSTRACT because
26453 cgraph_function_possibly_inlined_p() returns true. This is in
26454 turn will cause DW_AT_inline attributes to be set.
26455
26456 This happens because at early dwarf generation, there is no
26457 cgraph information, causing cgraph_function_possibly_inlined_p()
26458 to return true. Trick cgraph_function_possibly_inlined_p()
26459 while we generate dwarf early. */
26460 bool save = symtab->global_info_ready;
26461 symtab->global_info_ready = true;
26462
26463 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26464 other DECLs and they can point to template types or other things
26465 that dwarf2out can't handle when done via dwarf2out_decl. */
26466 if (TREE_CODE (decl) != TYPE_DECL
26467 && TREE_CODE (decl) != PARM_DECL)
26468 {
26469 if (TREE_CODE (decl) == FUNCTION_DECL)
26470 {
26471 tree save_fndecl = current_function_decl;
26472
26473 /* For nested functions, make sure we have DIEs for the parents first
26474 so that all nested DIEs are generated at the proper scope in the
26475 first shot. */
26476 tree context = decl_function_context (decl);
26477 if (context != NULL)
26478 {
26479 dw_die_ref context_die = lookup_decl_die (context);
26480 current_function_decl = context;
26481
26482 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26483 enough so that it lands in its own context. This avoids type
26484 pruning issues later on. */
26485 if (context_die == NULL || is_declaration_die (context_die))
26486 dwarf2out_decl (context);
26487 }
26488
26489 /* Emit an abstract origin of a function first. This happens
26490 with C++ constructor clones for example and makes
26491 dwarf2out_abstract_function happy which requires the early
26492 DIE of the abstract instance to be present. */
26493 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26494 dw_die_ref origin_die;
26495 if (origin != NULL
26496 /* Do not emit the DIE multiple times but make sure to
26497 process it fully here in case we just saw a declaration. */
26498 && ((origin_die = lookup_decl_die (origin)) == NULL
26499 || is_declaration_die (origin_die)))
26500 {
26501 current_function_decl = origin;
26502 dwarf2out_decl (origin);
26503 }
26504
26505 /* Emit the DIE for decl but avoid doing that multiple times. */
26506 dw_die_ref old_die;
26507 if ((old_die = lookup_decl_die (decl)) == NULL
26508 || is_declaration_die (old_die))
26509 {
26510 current_function_decl = decl;
26511 dwarf2out_decl (decl);
26512 }
26513
26514 current_function_decl = save_fndecl;
26515 }
26516 else
26517 dwarf2out_decl (decl);
26518 }
26519 symtab->global_info_ready = save;
26520 }
26521
26522 /* Return whether EXPR is an expression with the following pattern:
26523 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26524
26525 static bool
26526 is_trivial_indirect_ref (tree expr)
26527 {
26528 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26529 return false;
26530
26531 tree nop = TREE_OPERAND (expr, 0);
26532 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26533 return false;
26534
26535 tree int_cst = TREE_OPERAND (nop, 0);
26536 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26537 }
26538
26539 /* Output debug information for global decl DECL. Called from
26540 toplev.c after compilation proper has finished. */
26541
26542 static void
26543 dwarf2out_late_global_decl (tree decl)
26544 {
26545 /* Fill-in any location information we were unable to determine
26546 on the first pass. */
26547 if (VAR_P (decl))
26548 {
26549 dw_die_ref die = lookup_decl_die (decl);
26550
26551 /* We may have to generate early debug late for LTO in case debug
26552 was not enabled at compile-time or the target doesn't support
26553 the LTO early debug scheme. */
26554 if (! die && in_lto_p)
26555 {
26556 dwarf2out_decl (decl);
26557 die = lookup_decl_die (decl);
26558 }
26559
26560 if (die)
26561 {
26562 /* We get called via the symtab code invoking late_global_decl
26563 for symbols that are optimized out.
26564
26565 Do not add locations for those, except if they have a
26566 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26567 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26568 INDIRECT_REF expression, as this could generate relocations to
26569 text symbols in LTO object files, which is invalid. */
26570 varpool_node *node = varpool_node::get (decl);
26571 if ((! node || ! node->definition)
26572 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26573 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26574 tree_add_const_value_attribute_for_decl (die, decl);
26575 else
26576 add_location_or_const_value_attribute (die, decl, false);
26577 }
26578 }
26579 }
26580
26581 /* Output debug information for type decl DECL. Called from toplev.c
26582 and from language front ends (to record built-in types). */
26583 static void
26584 dwarf2out_type_decl (tree decl, int local)
26585 {
26586 if (!local)
26587 {
26588 set_early_dwarf s;
26589 dwarf2out_decl (decl);
26590 }
26591 }
26592
26593 /* Output debug information for imported module or decl DECL.
26594 NAME is non-NULL name in the lexical block if the decl has been renamed.
26595 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26596 that DECL belongs to.
26597 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26598 static void
26599 dwarf2out_imported_module_or_decl_1 (tree decl,
26600 tree name,
26601 tree lexical_block,
26602 dw_die_ref lexical_block_die)
26603 {
26604 expanded_location xloc;
26605 dw_die_ref imported_die = NULL;
26606 dw_die_ref at_import_die;
26607
26608 if (TREE_CODE (decl) == IMPORTED_DECL)
26609 {
26610 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26611 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26612 gcc_assert (decl);
26613 }
26614 else
26615 xloc = expand_location (input_location);
26616
26617 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26618 {
26619 at_import_die = force_type_die (TREE_TYPE (decl));
26620 /* For namespace N { typedef void T; } using N::T; base_type_die
26621 returns NULL, but DW_TAG_imported_declaration requires
26622 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26623 if (!at_import_die)
26624 {
26625 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26626 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26627 at_import_die = lookup_type_die (TREE_TYPE (decl));
26628 gcc_assert (at_import_die);
26629 }
26630 }
26631 else
26632 {
26633 at_import_die = lookup_decl_die (decl);
26634 if (!at_import_die)
26635 {
26636 /* If we're trying to avoid duplicate debug info, we may not have
26637 emitted the member decl for this field. Emit it now. */
26638 if (TREE_CODE (decl) == FIELD_DECL)
26639 {
26640 tree type = DECL_CONTEXT (decl);
26641
26642 if (TYPE_CONTEXT (type)
26643 && TYPE_P (TYPE_CONTEXT (type))
26644 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26645 DINFO_USAGE_DIR_USE))
26646 return;
26647 gen_type_die_for_member (type, decl,
26648 get_context_die (TYPE_CONTEXT (type)));
26649 }
26650 if (TREE_CODE (decl) == NAMELIST_DECL)
26651 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26652 get_context_die (DECL_CONTEXT (decl)),
26653 NULL_TREE);
26654 else
26655 at_import_die = force_decl_die (decl);
26656 }
26657 }
26658
26659 if (TREE_CODE (decl) == NAMESPACE_DECL)
26660 {
26661 if (dwarf_version >= 3 || !dwarf_strict)
26662 imported_die = new_die (DW_TAG_imported_module,
26663 lexical_block_die,
26664 lexical_block);
26665 else
26666 return;
26667 }
26668 else
26669 imported_die = new_die (DW_TAG_imported_declaration,
26670 lexical_block_die,
26671 lexical_block);
26672
26673 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26674 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26675 if (debug_column_info && xloc.column)
26676 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26677 if (name)
26678 add_AT_string (imported_die, DW_AT_name,
26679 IDENTIFIER_POINTER (name));
26680 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26681 }
26682
26683 /* Output debug information for imported module or decl DECL.
26684 NAME is non-NULL name in context if the decl has been renamed.
26685 CHILD is true if decl is one of the renamed decls as part of
26686 importing whole module.
26687 IMPLICIT is set if this hook is called for an implicit import
26688 such as inline namespace. */
26689
26690 static void
26691 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26692 bool child, bool implicit)
26693 {
26694 /* dw_die_ref at_import_die; */
26695 dw_die_ref scope_die;
26696
26697 if (debug_info_level <= DINFO_LEVEL_TERSE)
26698 return;
26699
26700 gcc_assert (decl);
26701
26702 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26703 should be enough, for DWARF4 and older even if we emit as extension
26704 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26705 for the benefit of consumers unaware of DW_AT_export_symbols. */
26706 if (implicit
26707 && dwarf_version >= 5
26708 && lang_hooks.decls.decl_dwarf_attribute (decl,
26709 DW_AT_export_symbols) == 1)
26710 return;
26711
26712 set_early_dwarf s;
26713
26714 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26715 We need decl DIE for reference and scope die. First, get DIE for the decl
26716 itself. */
26717
26718 /* Get the scope die for decl context. Use comp_unit_die for global module
26719 or decl. If die is not found for non globals, force new die. */
26720 if (context
26721 && TYPE_P (context)
26722 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26723 return;
26724
26725 scope_die = get_context_die (context);
26726
26727 if (child)
26728 {
26729 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26730 there is nothing we can do, here. */
26731 if (dwarf_version < 3 && dwarf_strict)
26732 return;
26733
26734 gcc_assert (scope_die->die_child);
26735 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26736 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26737 scope_die = scope_die->die_child;
26738 }
26739
26740 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26741 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26742 }
26743
26744 /* Output debug information for namelists. */
26745
26746 static dw_die_ref
26747 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26748 {
26749 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26750 tree value;
26751 unsigned i;
26752
26753 if (debug_info_level <= DINFO_LEVEL_TERSE)
26754 return NULL;
26755
26756 gcc_assert (scope_die != NULL);
26757 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26758 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26759
26760 /* If there are no item_decls, we have a nondefining namelist, e.g.
26761 with USE association; hence, set DW_AT_declaration. */
26762 if (item_decls == NULL_TREE)
26763 {
26764 add_AT_flag (nml_die, DW_AT_declaration, 1);
26765 return nml_die;
26766 }
26767
26768 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26769 {
26770 nml_item_ref_die = lookup_decl_die (value);
26771 if (!nml_item_ref_die)
26772 nml_item_ref_die = force_decl_die (value);
26773
26774 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26775 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26776 }
26777 return nml_die;
26778 }
26779
26780
26781 /* Write the debugging output for DECL and return the DIE. */
26782
26783 static void
26784 dwarf2out_decl (tree decl)
26785 {
26786 dw_die_ref context_die = comp_unit_die ();
26787
26788 switch (TREE_CODE (decl))
26789 {
26790 case ERROR_MARK:
26791 return;
26792
26793 case FUNCTION_DECL:
26794 /* If we're a nested function, initially use a parent of NULL; if we're
26795 a plain function, this will be fixed up in decls_for_scope. If
26796 we're a method, it will be ignored, since we already have a DIE.
26797 Avoid doing this late though since clones of class methods may
26798 otherwise end up in limbo and create type DIEs late. */
26799 if (early_dwarf
26800 && decl_function_context (decl)
26801 /* But if we're in terse mode, we don't care about scope. */
26802 && debug_info_level > DINFO_LEVEL_TERSE)
26803 context_die = NULL;
26804 break;
26805
26806 case VAR_DECL:
26807 /* For local statics lookup proper context die. */
26808 if (local_function_static (decl))
26809 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26810
26811 /* If we are in terse mode, don't generate any DIEs to represent any
26812 variable declarations or definitions. */
26813 if (debug_info_level <= DINFO_LEVEL_TERSE)
26814 return;
26815 break;
26816
26817 case CONST_DECL:
26818 if (debug_info_level <= DINFO_LEVEL_TERSE)
26819 return;
26820 if (!is_fortran () && !is_ada ())
26821 return;
26822 if (TREE_STATIC (decl) && decl_function_context (decl))
26823 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26824 break;
26825
26826 case NAMESPACE_DECL:
26827 case IMPORTED_DECL:
26828 if (debug_info_level <= DINFO_LEVEL_TERSE)
26829 return;
26830 if (lookup_decl_die (decl) != NULL)
26831 return;
26832 break;
26833
26834 case TYPE_DECL:
26835 /* Don't emit stubs for types unless they are needed by other DIEs. */
26836 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26837 return;
26838
26839 /* Don't bother trying to generate any DIEs to represent any of the
26840 normal built-in types for the language we are compiling. */
26841 if (DECL_IS_BUILTIN (decl))
26842 return;
26843
26844 /* If we are in terse mode, don't generate any DIEs for types. */
26845 if (debug_info_level <= DINFO_LEVEL_TERSE)
26846 return;
26847
26848 /* If we're a function-scope tag, initially use a parent of NULL;
26849 this will be fixed up in decls_for_scope. */
26850 if (decl_function_context (decl))
26851 context_die = NULL;
26852
26853 break;
26854
26855 case NAMELIST_DECL:
26856 break;
26857
26858 default:
26859 return;
26860 }
26861
26862 gen_decl_die (decl, NULL, NULL, context_die);
26863
26864 if (flag_checking)
26865 {
26866 dw_die_ref die = lookup_decl_die (decl);
26867 if (die)
26868 check_die (die);
26869 }
26870 }
26871
26872 /* Write the debugging output for DECL. */
26873
26874 static void
26875 dwarf2out_function_decl (tree decl)
26876 {
26877 dwarf2out_decl (decl);
26878 call_arg_locations = NULL;
26879 call_arg_loc_last = NULL;
26880 call_site_count = -1;
26881 tail_call_site_count = -1;
26882 decl_loc_table->empty ();
26883 cached_dw_loc_list_table->empty ();
26884 }
26885
26886 /* Output a marker (i.e. a label) for the beginning of the generated code for
26887 a lexical block. */
26888
26889 static void
26890 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26891 unsigned int blocknum)
26892 {
26893 switch_to_section (current_function_section ());
26894 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26895 }
26896
26897 /* Output a marker (i.e. a label) for the end of the generated code for a
26898 lexical block. */
26899
26900 static void
26901 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26902 {
26903 switch_to_section (current_function_section ());
26904 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26905 }
26906
26907 /* Returns nonzero if it is appropriate not to emit any debugging
26908 information for BLOCK, because it doesn't contain any instructions.
26909
26910 Don't allow this for blocks with nested functions or local classes
26911 as we would end up with orphans, and in the presence of scheduling
26912 we may end up calling them anyway. */
26913
26914 static bool
26915 dwarf2out_ignore_block (const_tree block)
26916 {
26917 tree decl;
26918 unsigned int i;
26919
26920 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26921 if (TREE_CODE (decl) == FUNCTION_DECL
26922 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26923 return 0;
26924 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26925 {
26926 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26927 if (TREE_CODE (decl) == FUNCTION_DECL
26928 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26929 return 0;
26930 }
26931
26932 return 1;
26933 }
26934
26935 /* Hash table routines for file_hash. */
26936
26937 bool
26938 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26939 {
26940 return filename_cmp (p1->filename, p2) == 0;
26941 }
26942
26943 hashval_t
26944 dwarf_file_hasher::hash (dwarf_file_data *p)
26945 {
26946 return htab_hash_string (p->filename);
26947 }
26948
26949 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26950 dwarf2out.c) and return its "index". The index of each (known) filename is
26951 just a unique number which is associated with only that one filename. We
26952 need such numbers for the sake of generating labels (in the .debug_sfnames
26953 section) and references to those files numbers (in the .debug_srcinfo
26954 and .debug_macinfo sections). If the filename given as an argument is not
26955 found in our current list, add it to the list and assign it the next
26956 available unique index number. */
26957
26958 static struct dwarf_file_data *
26959 lookup_filename (const char *file_name)
26960 {
26961 struct dwarf_file_data * created;
26962
26963 if (!file_name)
26964 return NULL;
26965
26966 dwarf_file_data **slot
26967 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26968 INSERT);
26969 if (*slot)
26970 return *slot;
26971
26972 created = ggc_alloc<dwarf_file_data> ();
26973 created->filename = file_name;
26974 created->emitted_number = 0;
26975 *slot = created;
26976 return created;
26977 }
26978
26979 /* If the assembler will construct the file table, then translate the compiler
26980 internal file table number into the assembler file table number, and emit
26981 a .file directive if we haven't already emitted one yet. The file table
26982 numbers are different because we prune debug info for unused variables and
26983 types, which may include filenames. */
26984
26985 static int
26986 maybe_emit_file (struct dwarf_file_data * fd)
26987 {
26988 if (! fd->emitted_number)
26989 {
26990 if (last_emitted_file)
26991 fd->emitted_number = last_emitted_file->emitted_number + 1;
26992 else
26993 fd->emitted_number = 1;
26994 last_emitted_file = fd;
26995
26996 if (output_asm_line_debug_info ())
26997 {
26998 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26999 output_quoted_string (asm_out_file,
27000 remap_debug_filename (fd->filename));
27001 fputc ('\n', asm_out_file);
27002 }
27003 }
27004
27005 return fd->emitted_number;
27006 }
27007
27008 /* Schedule generation of a DW_AT_const_value attribute to DIE.
27009 That generation should happen after function debug info has been
27010 generated. The value of the attribute is the constant value of ARG. */
27011
27012 static void
27013 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
27014 {
27015 die_arg_entry entry;
27016
27017 if (!die || !arg)
27018 return;
27019
27020 gcc_assert (early_dwarf);
27021
27022 if (!tmpl_value_parm_die_table)
27023 vec_alloc (tmpl_value_parm_die_table, 32);
27024
27025 entry.die = die;
27026 entry.arg = arg;
27027 vec_safe_push (tmpl_value_parm_die_table, entry);
27028 }
27029
27030 /* Return TRUE if T is an instance of generic type, FALSE
27031 otherwise. */
27032
27033 static bool
27034 generic_type_p (tree t)
27035 {
27036 if (t == NULL_TREE || !TYPE_P (t))
27037 return false;
27038 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
27039 }
27040
27041 /* Schedule the generation of the generic parameter dies for the
27042 instance of generic type T. The proper generation itself is later
27043 done by gen_scheduled_generic_parms_dies. */
27044
27045 static void
27046 schedule_generic_params_dies_gen (tree t)
27047 {
27048 if (!generic_type_p (t))
27049 return;
27050
27051 gcc_assert (early_dwarf);
27052
27053 if (!generic_type_instances)
27054 vec_alloc (generic_type_instances, 256);
27055
27056 vec_safe_push (generic_type_instances, t);
27057 }
27058
27059 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
27060 by append_entry_to_tmpl_value_parm_die_table. This function must
27061 be called after function DIEs have been generated. */
27062
27063 static void
27064 gen_remaining_tmpl_value_param_die_attribute (void)
27065 {
27066 if (tmpl_value_parm_die_table)
27067 {
27068 unsigned i, j;
27069 die_arg_entry *e;
27070
27071 /* We do this in two phases - first get the cases we can
27072 handle during early-finish, preserving those we cannot
27073 (containing symbolic constants where we don't yet know
27074 whether we are going to output the referenced symbols).
27075 For those we try again at late-finish. */
27076 j = 0;
27077 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27078 {
27079 if (!e->die->removed
27080 && !tree_add_const_value_attribute (e->die, e->arg))
27081 {
27082 dw_loc_descr_ref loc = NULL;
27083 if (! early_dwarf
27084 && (dwarf_version >= 5 || !dwarf_strict))
27085 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27086 if (loc)
27087 add_AT_loc (e->die, DW_AT_location, loc);
27088 else
27089 (*tmpl_value_parm_die_table)[j++] = *e;
27090 }
27091 }
27092 tmpl_value_parm_die_table->truncate (j);
27093 }
27094 }
27095
27096 /* Generate generic parameters DIEs for instances of generic types
27097 that have been previously scheduled by
27098 schedule_generic_params_dies_gen. This function must be called
27099 after all the types of the CU have been laid out. */
27100
27101 static void
27102 gen_scheduled_generic_parms_dies (void)
27103 {
27104 unsigned i;
27105 tree t;
27106
27107 if (!generic_type_instances)
27108 return;
27109
27110 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27111 if (COMPLETE_TYPE_P (t))
27112 gen_generic_params_dies (t);
27113
27114 generic_type_instances = NULL;
27115 }
27116
27117
27118 /* Replace DW_AT_name for the decl with name. */
27119
27120 static void
27121 dwarf2out_set_name (tree decl, tree name)
27122 {
27123 dw_die_ref die;
27124 dw_attr_node *attr;
27125 const char *dname;
27126
27127 die = TYPE_SYMTAB_DIE (decl);
27128 if (!die)
27129 return;
27130
27131 dname = dwarf2_name (name, 0);
27132 if (!dname)
27133 return;
27134
27135 attr = get_AT (die, DW_AT_name);
27136 if (attr)
27137 {
27138 struct indirect_string_node *node;
27139
27140 node = find_AT_string (dname);
27141 /* replace the string. */
27142 attr->dw_attr_val.v.val_str = node;
27143 }
27144
27145 else
27146 add_name_attribute (die, dname);
27147 }
27148
27149 /* True if before or during processing of the first function being emitted. */
27150 static bool in_first_function_p = true;
27151 /* True if loc_note during dwarf2out_var_location call might still be
27152 before first real instruction at address equal to .Ltext0. */
27153 static bool maybe_at_text_label_p = true;
27154 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27155 static unsigned int first_loclabel_num_not_at_text_label;
27156
27157 /* Look ahead for a real insn, or for a begin stmt marker. */
27158
27159 static rtx_insn *
27160 dwarf2out_next_real_insn (rtx_insn *loc_note)
27161 {
27162 rtx_insn *next_real = NEXT_INSN (loc_note);
27163
27164 while (next_real)
27165 if (INSN_P (next_real))
27166 break;
27167 else
27168 next_real = NEXT_INSN (next_real);
27169
27170 return next_real;
27171 }
27172
27173 /* Called by the final INSN scan whenever we see a var location. We
27174 use it to drop labels in the right places, and throw the location in
27175 our lookup table. */
27176
27177 static void
27178 dwarf2out_var_location (rtx_insn *loc_note)
27179 {
27180 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27181 struct var_loc_node *newloc;
27182 rtx_insn *next_real, *next_note;
27183 rtx_insn *call_insn = NULL;
27184 static const char *last_label;
27185 static const char *last_postcall_label;
27186 static bool last_in_cold_section_p;
27187 static rtx_insn *expected_next_loc_note;
27188 tree decl;
27189 bool var_loc_p;
27190 var_loc_view view = 0;
27191
27192 if (!NOTE_P (loc_note))
27193 {
27194 if (CALL_P (loc_note))
27195 {
27196 maybe_reset_location_view (loc_note, cur_line_info_table);
27197 call_site_count++;
27198 if (SIBLING_CALL_P (loc_note))
27199 tail_call_site_count++;
27200 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27201 {
27202 call_insn = loc_note;
27203 loc_note = NULL;
27204 var_loc_p = false;
27205
27206 next_real = dwarf2out_next_real_insn (call_insn);
27207 next_note = NULL;
27208 cached_next_real_insn = NULL;
27209 goto create_label;
27210 }
27211 if (optimize == 0 && !flag_var_tracking)
27212 {
27213 /* When the var-tracking pass is not running, there is no note
27214 for indirect calls whose target is compile-time known. In this
27215 case, process such calls specifically so that we generate call
27216 sites for them anyway. */
27217 rtx x = PATTERN (loc_note);
27218 if (GET_CODE (x) == PARALLEL)
27219 x = XVECEXP (x, 0, 0);
27220 if (GET_CODE (x) == SET)
27221 x = SET_SRC (x);
27222 if (GET_CODE (x) == CALL)
27223 x = XEXP (x, 0);
27224 if (!MEM_P (x)
27225 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27226 || !SYMBOL_REF_DECL (XEXP (x, 0))
27227 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27228 != FUNCTION_DECL))
27229 {
27230 call_insn = loc_note;
27231 loc_note = NULL;
27232 var_loc_p = false;
27233
27234 next_real = dwarf2out_next_real_insn (call_insn);
27235 next_note = NULL;
27236 cached_next_real_insn = NULL;
27237 goto create_label;
27238 }
27239 }
27240 }
27241 else if (!debug_variable_location_views)
27242 gcc_unreachable ();
27243 else
27244 maybe_reset_location_view (loc_note, cur_line_info_table);
27245
27246 return;
27247 }
27248
27249 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27250 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27251 return;
27252
27253 /* Optimize processing a large consecutive sequence of location
27254 notes so we don't spend too much time in next_real_insn. If the
27255 next insn is another location note, remember the next_real_insn
27256 calculation for next time. */
27257 next_real = cached_next_real_insn;
27258 if (next_real)
27259 {
27260 if (expected_next_loc_note != loc_note)
27261 next_real = NULL;
27262 }
27263
27264 next_note = NEXT_INSN (loc_note);
27265 if (! next_note
27266 || next_note->deleted ()
27267 || ! NOTE_P (next_note)
27268 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27269 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27270 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27271 next_note = NULL;
27272
27273 if (! next_real)
27274 next_real = dwarf2out_next_real_insn (loc_note);
27275
27276 if (next_note)
27277 {
27278 expected_next_loc_note = next_note;
27279 cached_next_real_insn = next_real;
27280 }
27281 else
27282 cached_next_real_insn = NULL;
27283
27284 /* If there are no instructions which would be affected by this note,
27285 don't do anything. */
27286 if (var_loc_p
27287 && next_real == NULL_RTX
27288 && !NOTE_DURING_CALL_P (loc_note))
27289 return;
27290
27291 create_label:
27292
27293 if (next_real == NULL_RTX)
27294 next_real = get_last_insn ();
27295
27296 /* If there were any real insns between note we processed last time
27297 and this note (or if it is the first note), clear
27298 last_{,postcall_}label so that they are not reused this time. */
27299 if (last_var_location_insn == NULL_RTX
27300 || last_var_location_insn != next_real
27301 || last_in_cold_section_p != in_cold_section_p)
27302 {
27303 last_label = NULL;
27304 last_postcall_label = NULL;
27305 }
27306
27307 if (var_loc_p)
27308 {
27309 const char *label
27310 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27311 view = cur_line_info_table->view;
27312 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27313 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27314 if (newloc == NULL)
27315 return;
27316 }
27317 else
27318 {
27319 decl = NULL_TREE;
27320 newloc = NULL;
27321 }
27322
27323 /* If there were no real insns between note we processed last time
27324 and this note, use the label we emitted last time. Otherwise
27325 create a new label and emit it. */
27326 if (last_label == NULL)
27327 {
27328 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27329 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27330 loclabel_num++;
27331 last_label = ggc_strdup (loclabel);
27332 /* See if loclabel might be equal to .Ltext0. If yes,
27333 bump first_loclabel_num_not_at_text_label. */
27334 if (!have_multiple_function_sections
27335 && in_first_function_p
27336 && maybe_at_text_label_p)
27337 {
27338 static rtx_insn *last_start;
27339 rtx_insn *insn;
27340 for (insn = loc_note; insn; insn = previous_insn (insn))
27341 if (insn == last_start)
27342 break;
27343 else if (!NONDEBUG_INSN_P (insn))
27344 continue;
27345 else
27346 {
27347 rtx body = PATTERN (insn);
27348 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27349 continue;
27350 /* Inline asm could occupy zero bytes. */
27351 else if (GET_CODE (body) == ASM_INPUT
27352 || asm_noperands (body) >= 0)
27353 continue;
27354 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27355 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27356 continue;
27357 #endif
27358 else
27359 {
27360 /* Assume insn has non-zero length. */
27361 maybe_at_text_label_p = false;
27362 break;
27363 }
27364 }
27365 if (maybe_at_text_label_p)
27366 {
27367 last_start = loc_note;
27368 first_loclabel_num_not_at_text_label = loclabel_num;
27369 }
27370 }
27371 }
27372
27373 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27374 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27375
27376 if (!var_loc_p)
27377 {
27378 struct call_arg_loc_node *ca_loc
27379 = ggc_cleared_alloc<call_arg_loc_node> ();
27380 rtx_insn *prev = call_insn;
27381
27382 ca_loc->call_arg_loc_note
27383 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27384 ca_loc->next = NULL;
27385 ca_loc->label = last_label;
27386 gcc_assert (prev
27387 && (CALL_P (prev)
27388 || (NONJUMP_INSN_P (prev)
27389 && GET_CODE (PATTERN (prev)) == SEQUENCE
27390 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27391 if (!CALL_P (prev))
27392 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27393 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27394
27395 /* Look for a SYMBOL_REF in the "prev" instruction. */
27396 rtx x = get_call_rtx_from (PATTERN (prev));
27397 if (x)
27398 {
27399 /* Try to get the call symbol, if any. */
27400 if (MEM_P (XEXP (x, 0)))
27401 x = XEXP (x, 0);
27402 /* First, look for a memory access to a symbol_ref. */
27403 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27404 && SYMBOL_REF_DECL (XEXP (x, 0))
27405 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27406 ca_loc->symbol_ref = XEXP (x, 0);
27407 /* Otherwise, look at a compile-time known user-level function
27408 declaration. */
27409 else if (MEM_P (x)
27410 && MEM_EXPR (x)
27411 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27412 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27413 }
27414
27415 ca_loc->block = insn_scope (prev);
27416 if (call_arg_locations)
27417 call_arg_loc_last->next = ca_loc;
27418 else
27419 call_arg_locations = ca_loc;
27420 call_arg_loc_last = ca_loc;
27421 }
27422 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27423 {
27424 newloc->label = last_label;
27425 newloc->view = view;
27426 }
27427 else
27428 {
27429 if (!last_postcall_label)
27430 {
27431 sprintf (loclabel, "%s-1", last_label);
27432 last_postcall_label = ggc_strdup (loclabel);
27433 }
27434 newloc->label = last_postcall_label;
27435 /* ??? This view is at last_label, not last_label-1, but we
27436 could only assume view at last_label-1 is zero if we could
27437 assume calls always have length greater than one. This is
27438 probably true in general, though there might be a rare
27439 exception to this rule, e.g. if a call insn is optimized out
27440 by target magic. Then, even the -1 in the label will be
27441 wrong, which might invalidate the range. Anyway, using view,
27442 though technically possibly incorrect, will work as far as
27443 ranges go: since L-1 is in the middle of the call insn,
27444 (L-1).0 and (L-1).V shouldn't make any difference, and having
27445 the loclist entry refer to the .loc entry might be useful, so
27446 leave it like this. */
27447 newloc->view = view;
27448 }
27449
27450 if (var_loc_p && flag_debug_asm)
27451 {
27452 const char *name, *sep, *patstr;
27453 if (decl && DECL_NAME (decl))
27454 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27455 else
27456 name = "";
27457 if (NOTE_VAR_LOCATION_LOC (loc_note))
27458 {
27459 sep = " => ";
27460 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27461 }
27462 else
27463 {
27464 sep = " ";
27465 patstr = "RESET";
27466 }
27467 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27468 name, sep, patstr);
27469 }
27470
27471 last_var_location_insn = next_real;
27472 last_in_cold_section_p = in_cold_section_p;
27473 }
27474
27475 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27476 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27477 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27478 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27479 BLOCK_FRAGMENT_ORIGIN links. */
27480 static bool
27481 block_within_block_p (tree block, tree outer, bool bothways)
27482 {
27483 if (block == outer)
27484 return true;
27485
27486 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27487 for (tree context = BLOCK_SUPERCONTEXT (block);
27488 context != outer;
27489 context = BLOCK_SUPERCONTEXT (context))
27490 if (!context || TREE_CODE (context) != BLOCK)
27491 return false;
27492
27493 if (!bothways)
27494 return true;
27495
27496 /* Now check that each block is actually referenced by its
27497 parent. */
27498 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27499 context = BLOCK_SUPERCONTEXT (context))
27500 {
27501 if (BLOCK_FRAGMENT_ORIGIN (context))
27502 {
27503 gcc_assert (!BLOCK_SUBBLOCKS (context));
27504 context = BLOCK_FRAGMENT_ORIGIN (context);
27505 }
27506 for (tree sub = BLOCK_SUBBLOCKS (context);
27507 sub != block;
27508 sub = BLOCK_CHAIN (sub))
27509 if (!sub)
27510 return false;
27511 if (context == outer)
27512 return true;
27513 else
27514 block = context;
27515 }
27516 }
27517
27518 /* Called during final while assembling the marker of the entry point
27519 for an inlined function. */
27520
27521 static void
27522 dwarf2out_inline_entry (tree block)
27523 {
27524 gcc_assert (debug_inline_points);
27525
27526 /* If we can't represent it, don't bother. */
27527 if (!(dwarf_version >= 3 || !dwarf_strict))
27528 return;
27529
27530 gcc_assert (DECL_P (block_ultimate_origin (block)));
27531
27532 /* Sanity check the block tree. This would catch a case in which
27533 BLOCK got removed from the tree reachable from the outermost
27534 lexical block, but got retained in markers. It would still link
27535 back to its parents, but some ancestor would be missing a link
27536 down the path to the sub BLOCK. If the block got removed, its
27537 BLOCK_NUMBER will not be a usable value. */
27538 if (flag_checking)
27539 gcc_assert (block_within_block_p (block,
27540 DECL_INITIAL (current_function_decl),
27541 true));
27542
27543 gcc_assert (inlined_function_outer_scope_p (block));
27544 gcc_assert (!BLOCK_DIE (block));
27545
27546 if (BLOCK_FRAGMENT_ORIGIN (block))
27547 block = BLOCK_FRAGMENT_ORIGIN (block);
27548 /* Can the entry point ever not be at the beginning of an
27549 unfragmented lexical block? */
27550 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27551 || (cur_line_info_table
27552 && !ZERO_VIEW_P (cur_line_info_table->view))))
27553 return;
27554
27555 if (!inline_entry_data_table)
27556 inline_entry_data_table
27557 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27558
27559
27560 inline_entry_data **iedp
27561 = inline_entry_data_table->find_slot_with_hash (block,
27562 htab_hash_pointer (block),
27563 INSERT);
27564 if (*iedp)
27565 /* ??? Ideally, we'd record all entry points for the same inlined
27566 function (some may have been duplicated by e.g. unrolling), but
27567 we have no way to represent that ATM. */
27568 return;
27569
27570 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27571 ied->block = block;
27572 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27573 ied->label_num = BLOCK_NUMBER (block);
27574 if (cur_line_info_table)
27575 ied->view = cur_line_info_table->view;
27576
27577 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27578
27579 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27580 BLOCK_NUMBER (block));
27581 ASM_OUTPUT_LABEL (asm_out_file, label);
27582 }
27583
27584 /* Called from finalize_size_functions for size functions so that their body
27585 can be encoded in the debug info to describe the layout of variable-length
27586 structures. */
27587
27588 static void
27589 dwarf2out_size_function (tree decl)
27590 {
27591 function_to_dwarf_procedure (decl);
27592 }
27593
27594 /* Note in one location list that text section has changed. */
27595
27596 int
27597 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27598 {
27599 var_loc_list *list = *slot;
27600 if (list->first)
27601 list->last_before_switch
27602 = list->last->next ? list->last->next : list->last;
27603 return 1;
27604 }
27605
27606 /* Note in all location lists that text section has changed. */
27607
27608 static void
27609 var_location_switch_text_section (void)
27610 {
27611 if (decl_loc_table == NULL)
27612 return;
27613
27614 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27615 }
27616
27617 /* Create a new line number table. */
27618
27619 static dw_line_info_table *
27620 new_line_info_table (void)
27621 {
27622 dw_line_info_table *table;
27623
27624 table = ggc_cleared_alloc<dw_line_info_table> ();
27625 table->file_num = 1;
27626 table->line_num = 1;
27627 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27628 FORCE_RESET_NEXT_VIEW (table->view);
27629 table->symviews_since_reset = 0;
27630
27631 return table;
27632 }
27633
27634 /* Lookup the "current" table into which we emit line info, so
27635 that we don't have to do it for every source line. */
27636
27637 static void
27638 set_cur_line_info_table (section *sec)
27639 {
27640 dw_line_info_table *table;
27641
27642 if (sec == text_section)
27643 table = text_section_line_info;
27644 else if (sec == cold_text_section)
27645 {
27646 table = cold_text_section_line_info;
27647 if (!table)
27648 {
27649 cold_text_section_line_info = table = new_line_info_table ();
27650 table->end_label = cold_end_label;
27651 }
27652 }
27653 else
27654 {
27655 const char *end_label;
27656
27657 if (crtl->has_bb_partition)
27658 {
27659 if (in_cold_section_p)
27660 end_label = crtl->subsections.cold_section_end_label;
27661 else
27662 end_label = crtl->subsections.hot_section_end_label;
27663 }
27664 else
27665 {
27666 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27667 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27668 current_function_funcdef_no);
27669 end_label = ggc_strdup (label);
27670 }
27671
27672 table = new_line_info_table ();
27673 table->end_label = end_label;
27674
27675 vec_safe_push (separate_line_info, table);
27676 }
27677
27678 if (output_asm_line_debug_info ())
27679 table->is_stmt = (cur_line_info_table
27680 ? cur_line_info_table->is_stmt
27681 : DWARF_LINE_DEFAULT_IS_STMT_START);
27682 cur_line_info_table = table;
27683 }
27684
27685
27686 /* We need to reset the locations at the beginning of each
27687 function. We can't do this in the end_function hook, because the
27688 declarations that use the locations won't have been output when
27689 that hook is called. Also compute have_multiple_function_sections here. */
27690
27691 static void
27692 dwarf2out_begin_function (tree fun)
27693 {
27694 section *sec = function_section (fun);
27695
27696 if (sec != text_section)
27697 have_multiple_function_sections = true;
27698
27699 if (crtl->has_bb_partition && !cold_text_section)
27700 {
27701 gcc_assert (current_function_decl == fun);
27702 cold_text_section = unlikely_text_section ();
27703 switch_to_section (cold_text_section);
27704 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27705 switch_to_section (sec);
27706 }
27707
27708 dwarf2out_note_section_used ();
27709 call_site_count = 0;
27710 tail_call_site_count = 0;
27711
27712 set_cur_line_info_table (sec);
27713 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27714 }
27715
27716 /* Helper function of dwarf2out_end_function, called only after emitting
27717 the very first function into assembly. Check if some .debug_loc range
27718 might end with a .LVL* label that could be equal to .Ltext0.
27719 In that case we must force using absolute addresses in .debug_loc ranges,
27720 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27721 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27722 list terminator.
27723 Set have_multiple_function_sections to true in that case and
27724 terminate htab traversal. */
27725
27726 int
27727 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27728 {
27729 var_loc_list *entry = *slot;
27730 struct var_loc_node *node;
27731
27732 node = entry->first;
27733 if (node && node->next && node->next->label)
27734 {
27735 unsigned int i;
27736 const char *label = node->next->label;
27737 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27738
27739 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27740 {
27741 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27742 if (strcmp (label, loclabel) == 0)
27743 {
27744 have_multiple_function_sections = true;
27745 return 0;
27746 }
27747 }
27748 }
27749 return 1;
27750 }
27751
27752 /* Hook called after emitting a function into assembly.
27753 This does something only for the very first function emitted. */
27754
27755 static void
27756 dwarf2out_end_function (unsigned int)
27757 {
27758 if (in_first_function_p
27759 && !have_multiple_function_sections
27760 && first_loclabel_num_not_at_text_label
27761 && decl_loc_table)
27762 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27763 in_first_function_p = false;
27764 maybe_at_text_label_p = false;
27765 }
27766
27767 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27768 front-ends register a translation unit even before dwarf2out_init is
27769 called. */
27770 static tree main_translation_unit = NULL_TREE;
27771
27772 /* Hook called by front-ends after they built their main translation unit.
27773 Associate comp_unit_die to UNIT. */
27774
27775 static void
27776 dwarf2out_register_main_translation_unit (tree unit)
27777 {
27778 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27779 && main_translation_unit == NULL_TREE);
27780 main_translation_unit = unit;
27781 /* If dwarf2out_init has not been called yet, it will perform the association
27782 itself looking at main_translation_unit. */
27783 if (decl_die_table != NULL)
27784 equate_decl_number_to_die (unit, comp_unit_die ());
27785 }
27786
27787 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27788
27789 static void
27790 push_dw_line_info_entry (dw_line_info_table *table,
27791 enum dw_line_info_opcode opcode, unsigned int val)
27792 {
27793 dw_line_info_entry e;
27794 e.opcode = opcode;
27795 e.val = val;
27796 vec_safe_push (table->entries, e);
27797 }
27798
27799 /* Output a label to mark the beginning of a source code line entry
27800 and record information relating to this source line, in
27801 'line_info_table' for later output of the .debug_line section. */
27802 /* ??? The discriminator parameter ought to be unsigned. */
27803
27804 static void
27805 dwarf2out_source_line (unsigned int line, unsigned int column,
27806 const char *filename,
27807 int discriminator, bool is_stmt)
27808 {
27809 unsigned int file_num;
27810 dw_line_info_table *table;
27811 static var_loc_view lvugid;
27812
27813 if (debug_info_level < DINFO_LEVEL_TERSE)
27814 return;
27815
27816 table = cur_line_info_table;
27817
27818 if (line == 0)
27819 {
27820 if (debug_variable_location_views
27821 && output_asm_line_debug_info ()
27822 && table && !RESETTING_VIEW_P (table->view))
27823 {
27824 /* If we're using the assembler to compute view numbers, we
27825 can't issue a .loc directive for line zero, so we can't
27826 get a view number at this point. We might attempt to
27827 compute it from the previous view, or equate it to a
27828 subsequent view (though it might not be there!), but
27829 since we're omitting the line number entry, we might as
27830 well omit the view number as well. That means pretending
27831 it's a view number zero, which might very well turn out
27832 to be correct. ??? Extend the assembler so that the
27833 compiler could emit e.g. ".locview .LVU#", to output a
27834 view without changing line number information. We'd then
27835 have to count it in symviews_since_reset; when it's omitted,
27836 it doesn't count. */
27837 if (!zero_view_p)
27838 zero_view_p = BITMAP_GGC_ALLOC ();
27839 bitmap_set_bit (zero_view_p, table->view);
27840 if (flag_debug_asm)
27841 {
27842 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27843 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27844 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27845 ASM_COMMENT_START);
27846 assemble_name (asm_out_file, label);
27847 putc ('\n', asm_out_file);
27848 }
27849 table->view = ++lvugid;
27850 }
27851 return;
27852 }
27853
27854 /* The discriminator column was added in dwarf4. Simplify the below
27855 by simply removing it if we're not supposed to output it. */
27856 if (dwarf_version < 4 && dwarf_strict)
27857 discriminator = 0;
27858
27859 if (!debug_column_info)
27860 column = 0;
27861
27862 file_num = maybe_emit_file (lookup_filename (filename));
27863
27864 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27865 the debugger has used the second (possibly duplicate) line number
27866 at the beginning of the function to mark the end of the prologue.
27867 We could eliminate any other duplicates within the function. For
27868 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27869 that second line number entry. */
27870 /* Recall that this end-of-prologue indication is *not* the same thing
27871 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27872 to which the hook corresponds, follows the last insn that was
27873 emitted by gen_prologue. What we need is to precede the first insn
27874 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27875 insn that corresponds to something the user wrote. These may be
27876 very different locations once scheduling is enabled. */
27877
27878 if (0 && file_num == table->file_num
27879 && line == table->line_num
27880 && column == table->column_num
27881 && discriminator == table->discrim_num
27882 && is_stmt == table->is_stmt)
27883 return;
27884
27885 switch_to_section (current_function_section ());
27886
27887 /* If requested, emit something human-readable. */
27888 if (flag_debug_asm)
27889 {
27890 if (debug_column_info)
27891 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27892 filename, line, column);
27893 else
27894 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27895 filename, line);
27896 }
27897
27898 if (output_asm_line_debug_info ())
27899 {
27900 /* Emit the .loc directive understood by GNU as. */
27901 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27902 file_num, line, is_stmt, discriminator */
27903 fputs ("\t.loc ", asm_out_file);
27904 fprint_ul (asm_out_file, file_num);
27905 putc (' ', asm_out_file);
27906 fprint_ul (asm_out_file, line);
27907 putc (' ', asm_out_file);
27908 fprint_ul (asm_out_file, column);
27909
27910 if (is_stmt != table->is_stmt)
27911 {
27912 fputs (" is_stmt ", asm_out_file);
27913 putc (is_stmt ? '1' : '0', asm_out_file);
27914 }
27915 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27916 {
27917 gcc_assert (discriminator > 0);
27918 fputs (" discriminator ", asm_out_file);
27919 fprint_ul (asm_out_file, (unsigned long) discriminator);
27920 }
27921 if (debug_variable_location_views)
27922 {
27923 if (!RESETTING_VIEW_P (table->view))
27924 {
27925 table->symviews_since_reset++;
27926 if (table->symviews_since_reset > symview_upper_bound)
27927 symview_upper_bound = table->symviews_since_reset;
27928 /* When we're using the assembler to compute view
27929 numbers, we output symbolic labels after "view" in
27930 .loc directives, and the assembler will set them for
27931 us, so that we can refer to the view numbers in
27932 location lists. The only exceptions are when we know
27933 a view will be zero: "-0" is a forced reset, used
27934 e.g. in the beginning of functions, whereas "0" tells
27935 the assembler to check that there was a PC change
27936 since the previous view, in a way that implicitly
27937 resets the next view. */
27938 fputs (" view ", asm_out_file);
27939 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27940 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27941 assemble_name (asm_out_file, label);
27942 table->view = ++lvugid;
27943 }
27944 else
27945 {
27946 table->symviews_since_reset = 0;
27947 if (FORCE_RESETTING_VIEW_P (table->view))
27948 fputs (" view -0", asm_out_file);
27949 else
27950 fputs (" view 0", asm_out_file);
27951 /* Mark the present view as a zero view. Earlier debug
27952 binds may have already added its id to loclists to be
27953 emitted later, so we can't reuse the id for something
27954 else. However, it's good to know whether a view is
27955 known to be zero, because then we may be able to
27956 optimize out locviews that are all zeros, so take
27957 note of it in zero_view_p. */
27958 if (!zero_view_p)
27959 zero_view_p = BITMAP_GGC_ALLOC ();
27960 bitmap_set_bit (zero_view_p, lvugid);
27961 table->view = ++lvugid;
27962 }
27963 }
27964 putc ('\n', asm_out_file);
27965 }
27966 else
27967 {
27968 unsigned int label_num = ++line_info_label_num;
27969
27970 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27971
27972 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27973 push_dw_line_info_entry (table, LI_adv_address, label_num);
27974 else
27975 push_dw_line_info_entry (table, LI_set_address, label_num);
27976 if (debug_variable_location_views)
27977 {
27978 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27979 if (resetting)
27980 table->view = 0;
27981
27982 if (flag_debug_asm)
27983 fprintf (asm_out_file, "\t%s view %s%d\n",
27984 ASM_COMMENT_START,
27985 resetting ? "-" : "",
27986 table->view);
27987
27988 table->view++;
27989 }
27990 if (file_num != table->file_num)
27991 push_dw_line_info_entry (table, LI_set_file, file_num);
27992 if (discriminator != table->discrim_num)
27993 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27994 if (is_stmt != table->is_stmt)
27995 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27996 push_dw_line_info_entry (table, LI_set_line, line);
27997 if (debug_column_info)
27998 push_dw_line_info_entry (table, LI_set_column, column);
27999 }
28000
28001 table->file_num = file_num;
28002 table->line_num = line;
28003 table->column_num = column;
28004 table->discrim_num = discriminator;
28005 table->is_stmt = is_stmt;
28006 table->in_use = true;
28007 }
28008
28009 /* Record the beginning of a new source file. */
28010
28011 static void
28012 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
28013 {
28014 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28015 {
28016 macinfo_entry e;
28017 e.code = DW_MACINFO_start_file;
28018 e.lineno = lineno;
28019 e.info = ggc_strdup (filename);
28020 vec_safe_push (macinfo_table, e);
28021 }
28022 }
28023
28024 /* Record the end of a source file. */
28025
28026 static void
28027 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
28028 {
28029 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28030 {
28031 macinfo_entry e;
28032 e.code = DW_MACINFO_end_file;
28033 e.lineno = lineno;
28034 e.info = NULL;
28035 vec_safe_push (macinfo_table, e);
28036 }
28037 }
28038
28039 /* Called from debug_define in toplev.c. The `buffer' parameter contains
28040 the tail part of the directive line, i.e. the part which is past the
28041 initial whitespace, #, whitespace, directive-name, whitespace part. */
28042
28043 static void
28044 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
28045 const char *buffer ATTRIBUTE_UNUSED)
28046 {
28047 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28048 {
28049 macinfo_entry e;
28050 /* Insert a dummy first entry to be able to optimize the whole
28051 predefined macro block using DW_MACRO_import. */
28052 if (macinfo_table->is_empty () && lineno <= 1)
28053 {
28054 e.code = 0;
28055 e.lineno = 0;
28056 e.info = NULL;
28057 vec_safe_push (macinfo_table, e);
28058 }
28059 e.code = DW_MACINFO_define;
28060 e.lineno = lineno;
28061 e.info = ggc_strdup (buffer);
28062 vec_safe_push (macinfo_table, e);
28063 }
28064 }
28065
28066 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
28067 the tail part of the directive line, i.e. the part which is past the
28068 initial whitespace, #, whitespace, directive-name, whitespace part. */
28069
28070 static void
28071 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28072 const char *buffer ATTRIBUTE_UNUSED)
28073 {
28074 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28075 {
28076 macinfo_entry e;
28077 /* Insert a dummy first entry to be able to optimize the whole
28078 predefined macro block using DW_MACRO_import. */
28079 if (macinfo_table->is_empty () && lineno <= 1)
28080 {
28081 e.code = 0;
28082 e.lineno = 0;
28083 e.info = NULL;
28084 vec_safe_push (macinfo_table, e);
28085 }
28086 e.code = DW_MACINFO_undef;
28087 e.lineno = lineno;
28088 e.info = ggc_strdup (buffer);
28089 vec_safe_push (macinfo_table, e);
28090 }
28091 }
28092
28093 /* Helpers to manipulate hash table of CUs. */
28094
28095 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28096 {
28097 static inline hashval_t hash (const macinfo_entry *);
28098 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28099 };
28100
28101 inline hashval_t
28102 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28103 {
28104 return htab_hash_string (entry->info);
28105 }
28106
28107 inline bool
28108 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28109 const macinfo_entry *entry2)
28110 {
28111 return !strcmp (entry1->info, entry2->info);
28112 }
28113
28114 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28115
28116 /* Output a single .debug_macinfo entry. */
28117
28118 static void
28119 output_macinfo_op (macinfo_entry *ref)
28120 {
28121 int file_num;
28122 size_t len;
28123 struct indirect_string_node *node;
28124 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28125 struct dwarf_file_data *fd;
28126
28127 switch (ref->code)
28128 {
28129 case DW_MACINFO_start_file:
28130 fd = lookup_filename (ref->info);
28131 file_num = maybe_emit_file (fd);
28132 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28133 dw2_asm_output_data_uleb128 (ref->lineno,
28134 "Included from line number %lu",
28135 (unsigned long) ref->lineno);
28136 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28137 break;
28138 case DW_MACINFO_end_file:
28139 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28140 break;
28141 case DW_MACINFO_define:
28142 case DW_MACINFO_undef:
28143 len = strlen (ref->info) + 1;
28144 if (!dwarf_strict
28145 && len > DWARF_OFFSET_SIZE
28146 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28147 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28148 {
28149 ref->code = ref->code == DW_MACINFO_define
28150 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28151 output_macinfo_op (ref);
28152 return;
28153 }
28154 dw2_asm_output_data (1, ref->code,
28155 ref->code == DW_MACINFO_define
28156 ? "Define macro" : "Undefine macro");
28157 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28158 (unsigned long) ref->lineno);
28159 dw2_asm_output_nstring (ref->info, -1, "The macro");
28160 break;
28161 case DW_MACRO_define_strp:
28162 case DW_MACRO_undef_strp:
28163 node = find_AT_string (ref->info);
28164 gcc_assert (node
28165 && (node->form == DW_FORM_strp
28166 || node->form == dwarf_FORM (DW_FORM_strx)));
28167 dw2_asm_output_data (1, ref->code,
28168 ref->code == DW_MACRO_define_strp
28169 ? "Define macro strp"
28170 : "Undefine macro strp");
28171 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28172 (unsigned long) ref->lineno);
28173 if (node->form == DW_FORM_strp)
28174 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28175 debug_str_section, "The macro: \"%s\"",
28176 ref->info);
28177 else
28178 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28179 ref->info);
28180 break;
28181 case DW_MACRO_import:
28182 dw2_asm_output_data (1, ref->code, "Import");
28183 ASM_GENERATE_INTERNAL_LABEL (label,
28184 DEBUG_MACRO_SECTION_LABEL,
28185 ref->lineno + macinfo_label_base);
28186 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28187 break;
28188 default:
28189 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28190 ASM_COMMENT_START, (unsigned long) ref->code);
28191 break;
28192 }
28193 }
28194
28195 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28196 other compilation unit .debug_macinfo sections. IDX is the first
28197 index of a define/undef, return the number of ops that should be
28198 emitted in a comdat .debug_macinfo section and emit
28199 a DW_MACRO_import entry referencing it.
28200 If the define/undef entry should be emitted normally, return 0. */
28201
28202 static unsigned
28203 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28204 macinfo_hash_type **macinfo_htab)
28205 {
28206 macinfo_entry *first, *second, *cur, *inc;
28207 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28208 unsigned char checksum[16];
28209 struct md5_ctx ctx;
28210 char *grp_name, *tail;
28211 const char *base;
28212 unsigned int i, count, encoded_filename_len, linebuf_len;
28213 macinfo_entry **slot;
28214
28215 first = &(*macinfo_table)[idx];
28216 second = &(*macinfo_table)[idx + 1];
28217
28218 /* Optimize only if there are at least two consecutive define/undef ops,
28219 and either all of them are before first DW_MACINFO_start_file
28220 with lineno {0,1} (i.e. predefined macro block), or all of them are
28221 in some included header file. */
28222 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28223 return 0;
28224 if (vec_safe_is_empty (files))
28225 {
28226 if (first->lineno > 1 || second->lineno > 1)
28227 return 0;
28228 }
28229 else if (first->lineno == 0)
28230 return 0;
28231
28232 /* Find the last define/undef entry that can be grouped together
28233 with first and at the same time compute md5 checksum of their
28234 codes, linenumbers and strings. */
28235 md5_init_ctx (&ctx);
28236 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28237 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28238 break;
28239 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28240 break;
28241 else
28242 {
28243 unsigned char code = cur->code;
28244 md5_process_bytes (&code, 1, &ctx);
28245 checksum_uleb128 (cur->lineno, &ctx);
28246 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28247 }
28248 md5_finish_ctx (&ctx, checksum);
28249 count = i - idx;
28250
28251 /* From the containing include filename (if any) pick up just
28252 usable characters from its basename. */
28253 if (vec_safe_is_empty (files))
28254 base = "";
28255 else
28256 base = lbasename (files->last ().info);
28257 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28258 if (ISIDNUM (base[i]) || base[i] == '.')
28259 encoded_filename_len++;
28260 /* Count . at the end. */
28261 if (encoded_filename_len)
28262 encoded_filename_len++;
28263
28264 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28265 linebuf_len = strlen (linebuf);
28266
28267 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28268 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28269 + 16 * 2 + 1);
28270 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28271 tail = grp_name + 4;
28272 if (encoded_filename_len)
28273 {
28274 for (i = 0; base[i]; i++)
28275 if (ISIDNUM (base[i]) || base[i] == '.')
28276 *tail++ = base[i];
28277 *tail++ = '.';
28278 }
28279 memcpy (tail, linebuf, linebuf_len);
28280 tail += linebuf_len;
28281 *tail++ = '.';
28282 for (i = 0; i < 16; i++)
28283 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28284
28285 /* Construct a macinfo_entry for DW_MACRO_import
28286 in the empty vector entry before the first define/undef. */
28287 inc = &(*macinfo_table)[idx - 1];
28288 inc->code = DW_MACRO_import;
28289 inc->lineno = 0;
28290 inc->info = ggc_strdup (grp_name);
28291 if (!*macinfo_htab)
28292 *macinfo_htab = new macinfo_hash_type (10);
28293 /* Avoid emitting duplicates. */
28294 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28295 if (*slot != NULL)
28296 {
28297 inc->code = 0;
28298 inc->info = NULL;
28299 /* If such an entry has been used before, just emit
28300 a DW_MACRO_import op. */
28301 inc = *slot;
28302 output_macinfo_op (inc);
28303 /* And clear all macinfo_entry in the range to avoid emitting them
28304 in the second pass. */
28305 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28306 {
28307 cur->code = 0;
28308 cur->info = NULL;
28309 }
28310 }
28311 else
28312 {
28313 *slot = inc;
28314 inc->lineno = (*macinfo_htab)->elements ();
28315 output_macinfo_op (inc);
28316 }
28317 return count;
28318 }
28319
28320 /* Save any strings needed by the macinfo table in the debug str
28321 table. All strings must be collected into the table by the time
28322 index_string is called. */
28323
28324 static void
28325 save_macinfo_strings (void)
28326 {
28327 unsigned len;
28328 unsigned i;
28329 macinfo_entry *ref;
28330
28331 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28332 {
28333 switch (ref->code)
28334 {
28335 /* Match the logic in output_macinfo_op to decide on
28336 indirect strings. */
28337 case DW_MACINFO_define:
28338 case DW_MACINFO_undef:
28339 len = strlen (ref->info) + 1;
28340 if (!dwarf_strict
28341 && len > DWARF_OFFSET_SIZE
28342 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28343 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28344 set_indirect_string (find_AT_string (ref->info));
28345 break;
28346 case DW_MACINFO_start_file:
28347 /* -gsplit-dwarf -g3 will also output filename as indirect
28348 string. */
28349 if (!dwarf_split_debug_info)
28350 break;
28351 /* Fall through. */
28352 case DW_MACRO_define_strp:
28353 case DW_MACRO_undef_strp:
28354 set_indirect_string (find_AT_string (ref->info));
28355 break;
28356 default:
28357 break;
28358 }
28359 }
28360 }
28361
28362 /* Output macinfo section(s). */
28363
28364 static void
28365 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28366 {
28367 unsigned i;
28368 unsigned long length = vec_safe_length (macinfo_table);
28369 macinfo_entry *ref;
28370 vec<macinfo_entry, va_gc> *files = NULL;
28371 macinfo_hash_type *macinfo_htab = NULL;
28372 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28373
28374 if (! length)
28375 return;
28376
28377 /* output_macinfo* uses these interchangeably. */
28378 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28379 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28380 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28381 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28382
28383 /* AIX Assembler inserts the length, so adjust the reference to match the
28384 offset expected by debuggers. */
28385 strcpy (dl_section_ref, debug_line_label);
28386 if (XCOFF_DEBUGGING_INFO)
28387 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28388
28389 /* For .debug_macro emit the section header. */
28390 if (!dwarf_strict || dwarf_version >= 5)
28391 {
28392 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28393 "DWARF macro version number");
28394 if (DWARF_OFFSET_SIZE == 8)
28395 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28396 else
28397 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28398 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28399 debug_line_section, NULL);
28400 }
28401
28402 /* In the first loop, it emits the primary .debug_macinfo section
28403 and after each emitted op the macinfo_entry is cleared.
28404 If a longer range of define/undef ops can be optimized using
28405 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28406 the vector before the first define/undef in the range and the
28407 whole range of define/undef ops is not emitted and kept. */
28408 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28409 {
28410 switch (ref->code)
28411 {
28412 case DW_MACINFO_start_file:
28413 vec_safe_push (files, *ref);
28414 break;
28415 case DW_MACINFO_end_file:
28416 if (!vec_safe_is_empty (files))
28417 files->pop ();
28418 break;
28419 case DW_MACINFO_define:
28420 case DW_MACINFO_undef:
28421 if ((!dwarf_strict || dwarf_version >= 5)
28422 && HAVE_COMDAT_GROUP
28423 && vec_safe_length (files) != 1
28424 && i > 0
28425 && i + 1 < length
28426 && (*macinfo_table)[i - 1].code == 0)
28427 {
28428 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28429 if (count)
28430 {
28431 i += count - 1;
28432 continue;
28433 }
28434 }
28435 break;
28436 case 0:
28437 /* A dummy entry may be inserted at the beginning to be able
28438 to optimize the whole block of predefined macros. */
28439 if (i == 0)
28440 continue;
28441 default:
28442 break;
28443 }
28444 output_macinfo_op (ref);
28445 ref->info = NULL;
28446 ref->code = 0;
28447 }
28448
28449 if (!macinfo_htab)
28450 return;
28451
28452 /* Save the number of transparent includes so we can adjust the
28453 label number for the fat LTO object DWARF. */
28454 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28455
28456 delete macinfo_htab;
28457 macinfo_htab = NULL;
28458
28459 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28460 terminate the current chain and switch to a new comdat .debug_macinfo
28461 section and emit the define/undef entries within it. */
28462 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28463 switch (ref->code)
28464 {
28465 case 0:
28466 continue;
28467 case DW_MACRO_import:
28468 {
28469 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28470 tree comdat_key = get_identifier (ref->info);
28471 /* Terminate the previous .debug_macinfo section. */
28472 dw2_asm_output_data (1, 0, "End compilation unit");
28473 targetm.asm_out.named_section (debug_macinfo_section_name,
28474 SECTION_DEBUG
28475 | SECTION_LINKONCE
28476 | (early_lto_debug
28477 ? SECTION_EXCLUDE : 0),
28478 comdat_key);
28479 ASM_GENERATE_INTERNAL_LABEL (label,
28480 DEBUG_MACRO_SECTION_LABEL,
28481 ref->lineno + macinfo_label_base);
28482 ASM_OUTPUT_LABEL (asm_out_file, label);
28483 ref->code = 0;
28484 ref->info = NULL;
28485 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28486 "DWARF macro version number");
28487 if (DWARF_OFFSET_SIZE == 8)
28488 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28489 else
28490 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28491 }
28492 break;
28493 case DW_MACINFO_define:
28494 case DW_MACINFO_undef:
28495 output_macinfo_op (ref);
28496 ref->code = 0;
28497 ref->info = NULL;
28498 break;
28499 default:
28500 gcc_unreachable ();
28501 }
28502
28503 macinfo_label_base += macinfo_label_base_adj;
28504 }
28505
28506 /* Initialize the various sections and labels for dwarf output and prefix
28507 them with PREFIX if non-NULL. Returns the generation (zero based
28508 number of times function was called). */
28509
28510 static unsigned
28511 init_sections_and_labels (bool early_lto_debug)
28512 {
28513 /* As we may get called multiple times have a generation count for
28514 labels. */
28515 static unsigned generation = 0;
28516
28517 if (early_lto_debug)
28518 {
28519 if (!dwarf_split_debug_info)
28520 {
28521 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28522 SECTION_DEBUG | SECTION_EXCLUDE,
28523 NULL);
28524 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28525 SECTION_DEBUG | SECTION_EXCLUDE,
28526 NULL);
28527 debug_macinfo_section_name
28528 = ((dwarf_strict && dwarf_version < 5)
28529 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28530 debug_macinfo_section = get_section (debug_macinfo_section_name,
28531 SECTION_DEBUG
28532 | SECTION_EXCLUDE, NULL);
28533 }
28534 else
28535 {
28536 /* ??? Which of the following do we need early? */
28537 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28538 SECTION_DEBUG | SECTION_EXCLUDE,
28539 NULL);
28540 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28541 SECTION_DEBUG | SECTION_EXCLUDE,
28542 NULL);
28543 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28544 SECTION_DEBUG
28545 | SECTION_EXCLUDE, NULL);
28546 debug_skeleton_abbrev_section
28547 = get_section (DEBUG_LTO_ABBREV_SECTION,
28548 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28549 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28550 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28551 generation);
28552
28553 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28554 stay in the main .o, but the skeleton_line goes into the split
28555 off dwo. */
28556 debug_skeleton_line_section
28557 = get_section (DEBUG_LTO_LINE_SECTION,
28558 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28559 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28560 DEBUG_SKELETON_LINE_SECTION_LABEL,
28561 generation);
28562 debug_str_offsets_section
28563 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28564 SECTION_DEBUG | SECTION_EXCLUDE,
28565 NULL);
28566 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28567 DEBUG_SKELETON_INFO_SECTION_LABEL,
28568 generation);
28569 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28570 DEBUG_STR_DWO_SECTION_FLAGS,
28571 NULL);
28572 debug_macinfo_section_name
28573 = ((dwarf_strict && dwarf_version < 5)
28574 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28575 debug_macinfo_section = get_section (debug_macinfo_section_name,
28576 SECTION_DEBUG | SECTION_EXCLUDE,
28577 NULL);
28578 }
28579 /* For macro info and the file table we have to refer to a
28580 debug_line section. */
28581 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28582 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28583 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28584 DEBUG_LINE_SECTION_LABEL, generation);
28585
28586 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28587 DEBUG_STR_SECTION_FLAGS
28588 | SECTION_EXCLUDE, NULL);
28589 if (!dwarf_split_debug_info)
28590 debug_line_str_section
28591 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28592 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28593 }
28594 else
28595 {
28596 if (!dwarf_split_debug_info)
28597 {
28598 debug_info_section = get_section (DEBUG_INFO_SECTION,
28599 SECTION_DEBUG, NULL);
28600 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28601 SECTION_DEBUG, NULL);
28602 debug_loc_section = get_section (dwarf_version >= 5
28603 ? DEBUG_LOCLISTS_SECTION
28604 : DEBUG_LOC_SECTION,
28605 SECTION_DEBUG, NULL);
28606 debug_macinfo_section_name
28607 = ((dwarf_strict && dwarf_version < 5)
28608 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28609 debug_macinfo_section = get_section (debug_macinfo_section_name,
28610 SECTION_DEBUG, NULL);
28611 }
28612 else
28613 {
28614 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28615 SECTION_DEBUG | SECTION_EXCLUDE,
28616 NULL);
28617 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28618 SECTION_DEBUG | SECTION_EXCLUDE,
28619 NULL);
28620 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28621 SECTION_DEBUG, NULL);
28622 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28623 SECTION_DEBUG, NULL);
28624 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28625 SECTION_DEBUG, NULL);
28626 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28627 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28628 generation);
28629
28630 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28631 stay in the main .o, but the skeleton_line goes into the
28632 split off dwo. */
28633 debug_skeleton_line_section
28634 = get_section (DEBUG_DWO_LINE_SECTION,
28635 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28636 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28637 DEBUG_SKELETON_LINE_SECTION_LABEL,
28638 generation);
28639 debug_str_offsets_section
28640 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28641 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28642 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28643 DEBUG_SKELETON_INFO_SECTION_LABEL,
28644 generation);
28645 debug_loc_section = get_section (dwarf_version >= 5
28646 ? DEBUG_DWO_LOCLISTS_SECTION
28647 : DEBUG_DWO_LOC_SECTION,
28648 SECTION_DEBUG | SECTION_EXCLUDE,
28649 NULL);
28650 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28651 DEBUG_STR_DWO_SECTION_FLAGS,
28652 NULL);
28653 debug_macinfo_section_name
28654 = ((dwarf_strict && dwarf_version < 5)
28655 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28656 debug_macinfo_section = get_section (debug_macinfo_section_name,
28657 SECTION_DEBUG | SECTION_EXCLUDE,
28658 NULL);
28659 }
28660 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28661 SECTION_DEBUG, NULL);
28662 debug_line_section = get_section (DEBUG_LINE_SECTION,
28663 SECTION_DEBUG, NULL);
28664 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28665 SECTION_DEBUG, NULL);
28666 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28667 SECTION_DEBUG, NULL);
28668 debug_str_section = get_section (DEBUG_STR_SECTION,
28669 DEBUG_STR_SECTION_FLAGS, NULL);
28670 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28671 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28672 DEBUG_STR_SECTION_FLAGS, NULL);
28673
28674 debug_ranges_section = get_section (dwarf_version >= 5
28675 ? DEBUG_RNGLISTS_SECTION
28676 : DEBUG_RANGES_SECTION,
28677 SECTION_DEBUG, NULL);
28678 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28679 SECTION_DEBUG, NULL);
28680 }
28681
28682 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28683 DEBUG_ABBREV_SECTION_LABEL, generation);
28684 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28685 DEBUG_INFO_SECTION_LABEL, generation);
28686 info_section_emitted = false;
28687 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28688 DEBUG_LINE_SECTION_LABEL, generation);
28689 /* There are up to 4 unique ranges labels per generation.
28690 See also output_rnglists. */
28691 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28692 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28693 if (dwarf_version >= 5 && dwarf_split_debug_info)
28694 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28695 DEBUG_RANGES_SECTION_LABEL,
28696 1 + generation * 4);
28697 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28698 DEBUG_ADDR_SECTION_LABEL, generation);
28699 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28700 (dwarf_strict && dwarf_version < 5)
28701 ? DEBUG_MACINFO_SECTION_LABEL
28702 : DEBUG_MACRO_SECTION_LABEL, generation);
28703 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28704 generation);
28705
28706 ++generation;
28707 return generation - 1;
28708 }
28709
28710 /* Set up for Dwarf output at the start of compilation. */
28711
28712 static void
28713 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28714 {
28715 /* Allocate the file_table. */
28716 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28717
28718 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28719 /* Allocate the decl_die_table. */
28720 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28721
28722 /* Allocate the decl_loc_table. */
28723 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28724
28725 /* Allocate the cached_dw_loc_list_table. */
28726 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28727
28728 /* Allocate the initial hunk of the abbrev_die_table. */
28729 vec_alloc (abbrev_die_table, 256);
28730 /* Zero-th entry is allocated, but unused. */
28731 abbrev_die_table->quick_push (NULL);
28732
28733 /* Allocate the dwarf_proc_stack_usage_map. */
28734 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28735
28736 /* Allocate the pubtypes and pubnames vectors. */
28737 vec_alloc (pubname_table, 32);
28738 vec_alloc (pubtype_table, 32);
28739
28740 vec_alloc (incomplete_types, 64);
28741
28742 vec_alloc (used_rtx_array, 32);
28743
28744 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28745 vec_alloc (macinfo_table, 64);
28746 #endif
28747
28748 /* If front-ends already registered a main translation unit but we were not
28749 ready to perform the association, do this now. */
28750 if (main_translation_unit != NULL_TREE)
28751 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28752 }
28753
28754 /* Called before compile () starts outputtting functions, variables
28755 and toplevel asms into assembly. */
28756
28757 static void
28758 dwarf2out_assembly_start (void)
28759 {
28760 if (text_section_line_info)
28761 return;
28762
28763 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28764 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28765 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28766 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28767 COLD_TEXT_SECTION_LABEL, 0);
28768 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28769
28770 switch_to_section (text_section);
28771 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28772 #endif
28773
28774 /* Make sure the line number table for .text always exists. */
28775 text_section_line_info = new_line_info_table ();
28776 text_section_line_info->end_label = text_end_label;
28777
28778 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28779 cur_line_info_table = text_section_line_info;
28780 #endif
28781
28782 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28783 && dwarf2out_do_cfi_asm ()
28784 && !dwarf2out_do_eh_frame ())
28785 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28786 }
28787
28788 /* A helper function for dwarf2out_finish called through
28789 htab_traverse. Assign a string its index. All strings must be
28790 collected into the table by the time index_string is called,
28791 because the indexing code relies on htab_traverse to traverse nodes
28792 in the same order for each run. */
28793
28794 int
28795 index_string (indirect_string_node **h, unsigned int *index)
28796 {
28797 indirect_string_node *node = *h;
28798
28799 find_string_form (node);
28800 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28801 {
28802 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28803 node->index = *index;
28804 *index += 1;
28805 }
28806 return 1;
28807 }
28808
28809 /* A helper function for output_indirect_strings called through
28810 htab_traverse. Output the offset to a string and update the
28811 current offset. */
28812
28813 int
28814 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28815 {
28816 indirect_string_node *node = *h;
28817
28818 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28819 {
28820 /* Assert that this node has been assigned an index. */
28821 gcc_assert (node->index != NO_INDEX_ASSIGNED
28822 && node->index != NOT_INDEXED);
28823 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28824 "indexed string 0x%x: %s", node->index, node->str);
28825 *offset += strlen (node->str) + 1;
28826 }
28827 return 1;
28828 }
28829
28830 /* A helper function for dwarf2out_finish called through
28831 htab_traverse. Output the indexed string. */
28832
28833 int
28834 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28835 {
28836 struct indirect_string_node *node = *h;
28837
28838 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28839 {
28840 /* Assert that the strings are output in the same order as their
28841 indexes were assigned. */
28842 gcc_assert (*cur_idx == node->index);
28843 assemble_string (node->str, strlen (node->str) + 1);
28844 *cur_idx += 1;
28845 }
28846 return 1;
28847 }
28848
28849 /* A helper function for output_indirect_strings. Counts the number
28850 of index strings offsets. Must match the logic of the functions
28851 output_index_string[_offsets] above. */
28852 int
28853 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28854 {
28855 struct indirect_string_node *node = *h;
28856
28857 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28858 *last_idx += 1;
28859 return 1;
28860 }
28861
28862 /* A helper function for dwarf2out_finish called through
28863 htab_traverse. Emit one queued .debug_str string. */
28864
28865 int
28866 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28867 {
28868 struct indirect_string_node *node = *h;
28869
28870 node->form = find_string_form (node);
28871 if (node->form == form && node->refcount > 0)
28872 {
28873 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28874 assemble_string (node->str, strlen (node->str) + 1);
28875 }
28876
28877 return 1;
28878 }
28879
28880 /* Output the indexed string table. */
28881
28882 static void
28883 output_indirect_strings (void)
28884 {
28885 switch_to_section (debug_str_section);
28886 if (!dwarf_split_debug_info)
28887 debug_str_hash->traverse<enum dwarf_form,
28888 output_indirect_string> (DW_FORM_strp);
28889 else
28890 {
28891 unsigned int offset = 0;
28892 unsigned int cur_idx = 0;
28893
28894 if (skeleton_debug_str_hash)
28895 skeleton_debug_str_hash->traverse<enum dwarf_form,
28896 output_indirect_string> (DW_FORM_strp);
28897
28898 switch_to_section (debug_str_offsets_section);
28899 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
28900 header. Note that we don't need to generate a label to the
28901 actual index table following the header here, because this is
28902 for the split dwarf case only. In an .dwo file there is only
28903 one string offsets table (and one debug info section). But
28904 if we would start using string offset tables for the main (or
28905 skeleton) unit, then we have to add a DW_AT_str_offsets_base
28906 pointing to the actual index after the header. Split dwarf
28907 units will never have a string offsets base attribute. When
28908 a split unit is moved into a .dwp file the string offsets can
28909 be found through the .debug_cu_index section table. */
28910 if (dwarf_version >= 5)
28911 {
28912 unsigned int last_idx = 0;
28913 unsigned long str_offsets_length;
28914
28915 debug_str_hash->traverse_noresize
28916 <unsigned int *, count_index_strings> (&last_idx);
28917 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
28918 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
28919 dw2_asm_output_data (4, 0xffffffff,
28920 "Escape value for 64-bit DWARF extension");
28921 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
28922 "Length of string offsets unit");
28923 dw2_asm_output_data (2, 5, "DWARF string offsets version");
28924 dw2_asm_output_data (2, 0, "Header zero padding");
28925 }
28926 debug_str_hash->traverse_noresize
28927 <unsigned int *, output_index_string_offset> (&offset);
28928 switch_to_section (debug_str_dwo_section);
28929 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28930 (&cur_idx);
28931 }
28932 }
28933
28934 /* Callback for htab_traverse to assign an index to an entry in the
28935 table, and to write that entry to the .debug_addr section. */
28936
28937 int
28938 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28939 {
28940 addr_table_entry *entry = *slot;
28941
28942 if (entry->refcount == 0)
28943 {
28944 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28945 || entry->index == NOT_INDEXED);
28946 return 1;
28947 }
28948
28949 gcc_assert (entry->index == *cur_index);
28950 (*cur_index)++;
28951
28952 switch (entry->kind)
28953 {
28954 case ate_kind_rtx:
28955 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28956 "0x%x", entry->index);
28957 break;
28958 case ate_kind_rtx_dtprel:
28959 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28960 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28961 DWARF2_ADDR_SIZE,
28962 entry->addr.rtl);
28963 fputc ('\n', asm_out_file);
28964 break;
28965 case ate_kind_label:
28966 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28967 "0x%x", entry->index);
28968 break;
28969 default:
28970 gcc_unreachable ();
28971 }
28972 return 1;
28973 }
28974
28975 /* A helper function for dwarf2out_finish. Counts the number
28976 of indexed addresses. Must match the logic of the functions
28977 output_addr_table_entry above. */
28978 int
28979 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
28980 {
28981 addr_table_entry *entry = *slot;
28982
28983 if (entry->refcount > 0)
28984 *last_idx += 1;
28985 return 1;
28986 }
28987
28988 /* Produce the .debug_addr section. */
28989
28990 static void
28991 output_addr_table (void)
28992 {
28993 unsigned int index = 0;
28994 if (addr_index_table == NULL || addr_index_table->size () == 0)
28995 return;
28996
28997 switch_to_section (debug_addr_section);
28998 addr_index_table
28999 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
29000 }
29001
29002 #if ENABLE_ASSERT_CHECKING
29003 /* Verify that all marks are clear. */
29004
29005 static void
29006 verify_marks_clear (dw_die_ref die)
29007 {
29008 dw_die_ref c;
29009
29010 gcc_assert (! die->die_mark);
29011 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
29012 }
29013 #endif /* ENABLE_ASSERT_CHECKING */
29014
29015 /* Clear the marks for a die and its children.
29016 Be cool if the mark isn't set. */
29017
29018 static void
29019 prune_unmark_dies (dw_die_ref die)
29020 {
29021 dw_die_ref c;
29022
29023 if (die->die_mark)
29024 die->die_mark = 0;
29025 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
29026 }
29027
29028 /* Given LOC that is referenced by a DIE we're marking as used, find all
29029 referenced DWARF procedures it references and mark them as used. */
29030
29031 static void
29032 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
29033 {
29034 for (; loc != NULL; loc = loc->dw_loc_next)
29035 switch (loc->dw_loc_opc)
29036 {
29037 case DW_OP_implicit_pointer:
29038 case DW_OP_convert:
29039 case DW_OP_reinterpret:
29040 case DW_OP_GNU_implicit_pointer:
29041 case DW_OP_GNU_convert:
29042 case DW_OP_GNU_reinterpret:
29043 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
29044 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29045 break;
29046 case DW_OP_GNU_variable_value:
29047 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29048 {
29049 dw_die_ref ref
29050 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29051 if (ref == NULL)
29052 break;
29053 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29054 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29055 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29056 }
29057 /* FALLTHRU */
29058 case DW_OP_call2:
29059 case DW_OP_call4:
29060 case DW_OP_call_ref:
29061 case DW_OP_const_type:
29062 case DW_OP_GNU_const_type:
29063 case DW_OP_GNU_parameter_ref:
29064 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
29065 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29066 break;
29067 case DW_OP_regval_type:
29068 case DW_OP_deref_type:
29069 case DW_OP_GNU_regval_type:
29070 case DW_OP_GNU_deref_type:
29071 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
29072 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
29073 break;
29074 case DW_OP_entry_value:
29075 case DW_OP_GNU_entry_value:
29076 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29077 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29078 break;
29079 default:
29080 break;
29081 }
29082 }
29083
29084 /* Given DIE that we're marking as used, find any other dies
29085 it references as attributes and mark them as used. */
29086
29087 static void
29088 prune_unused_types_walk_attribs (dw_die_ref die)
29089 {
29090 dw_attr_node *a;
29091 unsigned ix;
29092
29093 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29094 {
29095 switch (AT_class (a))
29096 {
29097 /* Make sure DWARF procedures referenced by location descriptions will
29098 get emitted. */
29099 case dw_val_class_loc:
29100 prune_unused_types_walk_loc_descr (AT_loc (a));
29101 break;
29102 case dw_val_class_loc_list:
29103 for (dw_loc_list_ref list = AT_loc_list (a);
29104 list != NULL;
29105 list = list->dw_loc_next)
29106 prune_unused_types_walk_loc_descr (list->expr);
29107 break;
29108
29109 case dw_val_class_view_list:
29110 /* This points to a loc_list in another attribute, so it's
29111 already covered. */
29112 break;
29113
29114 case dw_val_class_die_ref:
29115 /* A reference to another DIE.
29116 Make sure that it will get emitted.
29117 If it was broken out into a comdat group, don't follow it. */
29118 if (! AT_ref (a)->comdat_type_p
29119 || a->dw_attr == DW_AT_specification)
29120 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29121 break;
29122
29123 case dw_val_class_str:
29124 /* Set the string's refcount to 0 so that prune_unused_types_mark
29125 accounts properly for it. */
29126 a->dw_attr_val.v.val_str->refcount = 0;
29127 break;
29128
29129 default:
29130 break;
29131 }
29132 }
29133 }
29134
29135 /* Mark the generic parameters and arguments children DIEs of DIE. */
29136
29137 static void
29138 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29139 {
29140 dw_die_ref c;
29141
29142 if (die == NULL || die->die_child == NULL)
29143 return;
29144 c = die->die_child;
29145 do
29146 {
29147 if (is_template_parameter (c))
29148 prune_unused_types_mark (c, 1);
29149 c = c->die_sib;
29150 } while (c && c != die->die_child);
29151 }
29152
29153 /* Mark DIE as being used. If DOKIDS is true, then walk down
29154 to DIE's children. */
29155
29156 static void
29157 prune_unused_types_mark (dw_die_ref die, int dokids)
29158 {
29159 dw_die_ref c;
29160
29161 if (die->die_mark == 0)
29162 {
29163 /* We haven't done this node yet. Mark it as used. */
29164 die->die_mark = 1;
29165 /* If this is the DIE of a generic type instantiation,
29166 mark the children DIEs that describe its generic parms and
29167 args. */
29168 prune_unused_types_mark_generic_parms_dies (die);
29169
29170 /* We also have to mark its parents as used.
29171 (But we don't want to mark our parent's kids due to this,
29172 unless it is a class.) */
29173 if (die->die_parent)
29174 prune_unused_types_mark (die->die_parent,
29175 class_scope_p (die->die_parent));
29176
29177 /* Mark any referenced nodes. */
29178 prune_unused_types_walk_attribs (die);
29179
29180 /* If this node is a specification,
29181 also mark the definition, if it exists. */
29182 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29183 prune_unused_types_mark (die->die_definition, 1);
29184 }
29185
29186 if (dokids && die->die_mark != 2)
29187 {
29188 /* We need to walk the children, but haven't done so yet.
29189 Remember that we've walked the kids. */
29190 die->die_mark = 2;
29191
29192 /* If this is an array type, we need to make sure our
29193 kids get marked, even if they're types. If we're
29194 breaking out types into comdat sections, do this
29195 for all type definitions. */
29196 if (die->die_tag == DW_TAG_array_type
29197 || (use_debug_types
29198 && is_type_die (die) && ! is_declaration_die (die)))
29199 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29200 else
29201 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29202 }
29203 }
29204
29205 /* For local classes, look if any static member functions were emitted
29206 and if so, mark them. */
29207
29208 static void
29209 prune_unused_types_walk_local_classes (dw_die_ref die)
29210 {
29211 dw_die_ref c;
29212
29213 if (die->die_mark == 2)
29214 return;
29215
29216 switch (die->die_tag)
29217 {
29218 case DW_TAG_structure_type:
29219 case DW_TAG_union_type:
29220 case DW_TAG_class_type:
29221 break;
29222
29223 case DW_TAG_subprogram:
29224 if (!get_AT_flag (die, DW_AT_declaration)
29225 || die->die_definition != NULL)
29226 prune_unused_types_mark (die, 1);
29227 return;
29228
29229 default:
29230 return;
29231 }
29232
29233 /* Mark children. */
29234 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29235 }
29236
29237 /* Walk the tree DIE and mark types that we actually use. */
29238
29239 static void
29240 prune_unused_types_walk (dw_die_ref die)
29241 {
29242 dw_die_ref c;
29243
29244 /* Don't do anything if this node is already marked and
29245 children have been marked as well. */
29246 if (die->die_mark == 2)
29247 return;
29248
29249 switch (die->die_tag)
29250 {
29251 case DW_TAG_structure_type:
29252 case DW_TAG_union_type:
29253 case DW_TAG_class_type:
29254 if (die->die_perennial_p)
29255 break;
29256
29257 for (c = die->die_parent; c; c = c->die_parent)
29258 if (c->die_tag == DW_TAG_subprogram)
29259 break;
29260
29261 /* Finding used static member functions inside of classes
29262 is needed just for local classes, because for other classes
29263 static member function DIEs with DW_AT_specification
29264 are emitted outside of the DW_TAG_*_type. If we ever change
29265 it, we'd need to call this even for non-local classes. */
29266 if (c)
29267 prune_unused_types_walk_local_classes (die);
29268
29269 /* It's a type node --- don't mark it. */
29270 return;
29271
29272 case DW_TAG_const_type:
29273 case DW_TAG_packed_type:
29274 case DW_TAG_pointer_type:
29275 case DW_TAG_reference_type:
29276 case DW_TAG_rvalue_reference_type:
29277 case DW_TAG_volatile_type:
29278 case DW_TAG_typedef:
29279 case DW_TAG_array_type:
29280 case DW_TAG_interface_type:
29281 case DW_TAG_friend:
29282 case DW_TAG_enumeration_type:
29283 case DW_TAG_subroutine_type:
29284 case DW_TAG_string_type:
29285 case DW_TAG_set_type:
29286 case DW_TAG_subrange_type:
29287 case DW_TAG_ptr_to_member_type:
29288 case DW_TAG_file_type:
29289 /* Type nodes are useful only when other DIEs reference them --- don't
29290 mark them. */
29291 /* FALLTHROUGH */
29292
29293 case DW_TAG_dwarf_procedure:
29294 /* Likewise for DWARF procedures. */
29295
29296 if (die->die_perennial_p)
29297 break;
29298
29299 return;
29300
29301 default:
29302 /* Mark everything else. */
29303 break;
29304 }
29305
29306 if (die->die_mark == 0)
29307 {
29308 die->die_mark = 1;
29309
29310 /* Now, mark any dies referenced from here. */
29311 prune_unused_types_walk_attribs (die);
29312 }
29313
29314 die->die_mark = 2;
29315
29316 /* Mark children. */
29317 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29318 }
29319
29320 /* Increment the string counts on strings referred to from DIE's
29321 attributes. */
29322
29323 static void
29324 prune_unused_types_update_strings (dw_die_ref die)
29325 {
29326 dw_attr_node *a;
29327 unsigned ix;
29328
29329 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29330 if (AT_class (a) == dw_val_class_str)
29331 {
29332 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29333 s->refcount++;
29334 /* Avoid unnecessarily putting strings that are used less than
29335 twice in the hash table. */
29336 if (s->refcount
29337 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29338 {
29339 indirect_string_node **slot
29340 = debug_str_hash->find_slot_with_hash (s->str,
29341 htab_hash_string (s->str),
29342 INSERT);
29343 gcc_assert (*slot == NULL);
29344 *slot = s;
29345 }
29346 }
29347 }
29348
29349 /* Mark DIE and its children as removed. */
29350
29351 static void
29352 mark_removed (dw_die_ref die)
29353 {
29354 dw_die_ref c;
29355 die->removed = true;
29356 FOR_EACH_CHILD (die, c, mark_removed (c));
29357 }
29358
29359 /* Remove from the tree DIE any dies that aren't marked. */
29360
29361 static void
29362 prune_unused_types_prune (dw_die_ref die)
29363 {
29364 dw_die_ref c;
29365
29366 gcc_assert (die->die_mark);
29367 prune_unused_types_update_strings (die);
29368
29369 if (! die->die_child)
29370 return;
29371
29372 c = die->die_child;
29373 do {
29374 dw_die_ref prev = c, next;
29375 for (c = c->die_sib; ! c->die_mark; c = next)
29376 if (c == die->die_child)
29377 {
29378 /* No marked children between 'prev' and the end of the list. */
29379 if (prev == c)
29380 /* No marked children at all. */
29381 die->die_child = NULL;
29382 else
29383 {
29384 prev->die_sib = c->die_sib;
29385 die->die_child = prev;
29386 }
29387 c->die_sib = NULL;
29388 mark_removed (c);
29389 return;
29390 }
29391 else
29392 {
29393 next = c->die_sib;
29394 c->die_sib = NULL;
29395 mark_removed (c);
29396 }
29397
29398 if (c != prev->die_sib)
29399 prev->die_sib = c;
29400 prune_unused_types_prune (c);
29401 } while (c != die->die_child);
29402 }
29403
29404 /* Remove dies representing declarations that we never use. */
29405
29406 static void
29407 prune_unused_types (void)
29408 {
29409 unsigned int i;
29410 limbo_die_node *node;
29411 comdat_type_node *ctnode;
29412 pubname_entry *pub;
29413 dw_die_ref base_type;
29414
29415 #if ENABLE_ASSERT_CHECKING
29416 /* All the marks should already be clear. */
29417 verify_marks_clear (comp_unit_die ());
29418 for (node = limbo_die_list; node; node = node->next)
29419 verify_marks_clear (node->die);
29420 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29421 verify_marks_clear (ctnode->root_die);
29422 #endif /* ENABLE_ASSERT_CHECKING */
29423
29424 /* Mark types that are used in global variables. */
29425 premark_types_used_by_global_vars ();
29426
29427 /* Set the mark on nodes that are actually used. */
29428 prune_unused_types_walk (comp_unit_die ());
29429 for (node = limbo_die_list; node; node = node->next)
29430 prune_unused_types_walk (node->die);
29431 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29432 {
29433 prune_unused_types_walk (ctnode->root_die);
29434 prune_unused_types_mark (ctnode->type_die, 1);
29435 }
29436
29437 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29438 are unusual in that they are pubnames that are the children of pubtypes.
29439 They should only be marked via their parent DW_TAG_enumeration_type die,
29440 not as roots in themselves. */
29441 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29442 if (pub->die->die_tag != DW_TAG_enumerator)
29443 prune_unused_types_mark (pub->die, 1);
29444 for (i = 0; base_types.iterate (i, &base_type); i++)
29445 prune_unused_types_mark (base_type, 1);
29446
29447 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29448 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29449 callees). */
29450 cgraph_node *cnode;
29451 FOR_EACH_FUNCTION (cnode)
29452 if (cnode->referred_to_p (false))
29453 {
29454 dw_die_ref die = lookup_decl_die (cnode->decl);
29455 if (die == NULL || die->die_mark)
29456 continue;
29457 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29458 if (e->caller != cnode
29459 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29460 {
29461 prune_unused_types_mark (die, 1);
29462 break;
29463 }
29464 }
29465
29466 if (debug_str_hash)
29467 debug_str_hash->empty ();
29468 if (skeleton_debug_str_hash)
29469 skeleton_debug_str_hash->empty ();
29470 prune_unused_types_prune (comp_unit_die ());
29471 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29472 {
29473 node = *pnode;
29474 if (!node->die->die_mark)
29475 *pnode = node->next;
29476 else
29477 {
29478 prune_unused_types_prune (node->die);
29479 pnode = &node->next;
29480 }
29481 }
29482 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29483 prune_unused_types_prune (ctnode->root_die);
29484
29485 /* Leave the marks clear. */
29486 prune_unmark_dies (comp_unit_die ());
29487 for (node = limbo_die_list; node; node = node->next)
29488 prune_unmark_dies (node->die);
29489 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29490 prune_unmark_dies (ctnode->root_die);
29491 }
29492
29493 /* Helpers to manipulate hash table of comdat type units. */
29494
29495 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29496 {
29497 static inline hashval_t hash (const comdat_type_node *);
29498 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29499 };
29500
29501 inline hashval_t
29502 comdat_type_hasher::hash (const comdat_type_node *type_node)
29503 {
29504 hashval_t h;
29505 memcpy (&h, type_node->signature, sizeof (h));
29506 return h;
29507 }
29508
29509 inline bool
29510 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29511 const comdat_type_node *type_node_2)
29512 {
29513 return (! memcmp (type_node_1->signature, type_node_2->signature,
29514 DWARF_TYPE_SIGNATURE_SIZE));
29515 }
29516
29517 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29518 to the location it would have been added, should we know its
29519 DECL_ASSEMBLER_NAME when we added other attributes. This will
29520 probably improve compactness of debug info, removing equivalent
29521 abbrevs, and hide any differences caused by deferring the
29522 computation of the assembler name, triggered by e.g. PCH. */
29523
29524 static inline void
29525 move_linkage_attr (dw_die_ref die)
29526 {
29527 unsigned ix = vec_safe_length (die->die_attr);
29528 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29529
29530 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29531 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29532
29533 while (--ix > 0)
29534 {
29535 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29536
29537 if (prev->dw_attr == DW_AT_decl_line
29538 || prev->dw_attr == DW_AT_decl_column
29539 || prev->dw_attr == DW_AT_name)
29540 break;
29541 }
29542
29543 if (ix != vec_safe_length (die->die_attr) - 1)
29544 {
29545 die->die_attr->pop ();
29546 die->die_attr->quick_insert (ix, linkage);
29547 }
29548 }
29549
29550 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29551 referenced from typed stack ops and count how often they are used. */
29552
29553 static void
29554 mark_base_types (dw_loc_descr_ref loc)
29555 {
29556 dw_die_ref base_type = NULL;
29557
29558 for (; loc; loc = loc->dw_loc_next)
29559 {
29560 switch (loc->dw_loc_opc)
29561 {
29562 case DW_OP_regval_type:
29563 case DW_OP_deref_type:
29564 case DW_OP_GNU_regval_type:
29565 case DW_OP_GNU_deref_type:
29566 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29567 break;
29568 case DW_OP_convert:
29569 case DW_OP_reinterpret:
29570 case DW_OP_GNU_convert:
29571 case DW_OP_GNU_reinterpret:
29572 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29573 continue;
29574 /* FALLTHRU */
29575 case DW_OP_const_type:
29576 case DW_OP_GNU_const_type:
29577 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29578 break;
29579 case DW_OP_entry_value:
29580 case DW_OP_GNU_entry_value:
29581 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29582 continue;
29583 default:
29584 continue;
29585 }
29586 gcc_assert (base_type->die_parent == comp_unit_die ());
29587 if (base_type->die_mark)
29588 base_type->die_mark++;
29589 else
29590 {
29591 base_types.safe_push (base_type);
29592 base_type->die_mark = 1;
29593 }
29594 }
29595 }
29596
29597 /* Comparison function for sorting marked base types. */
29598
29599 static int
29600 base_type_cmp (const void *x, const void *y)
29601 {
29602 dw_die_ref dx = *(const dw_die_ref *) x;
29603 dw_die_ref dy = *(const dw_die_ref *) y;
29604 unsigned int byte_size1, byte_size2;
29605 unsigned int encoding1, encoding2;
29606 unsigned int align1, align2;
29607 if (dx->die_mark > dy->die_mark)
29608 return -1;
29609 if (dx->die_mark < dy->die_mark)
29610 return 1;
29611 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29612 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29613 if (byte_size1 < byte_size2)
29614 return 1;
29615 if (byte_size1 > byte_size2)
29616 return -1;
29617 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29618 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29619 if (encoding1 < encoding2)
29620 return 1;
29621 if (encoding1 > encoding2)
29622 return -1;
29623 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29624 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29625 if (align1 < align2)
29626 return 1;
29627 if (align1 > align2)
29628 return -1;
29629 return 0;
29630 }
29631
29632 /* Move base types marked by mark_base_types as early as possible
29633 in the CU, sorted by decreasing usage count both to make the
29634 uleb128 references as small as possible and to make sure they
29635 will have die_offset already computed by calc_die_sizes when
29636 sizes of typed stack loc ops is computed. */
29637
29638 static void
29639 move_marked_base_types (void)
29640 {
29641 unsigned int i;
29642 dw_die_ref base_type, die, c;
29643
29644 if (base_types.is_empty ())
29645 return;
29646
29647 /* Sort by decreasing usage count, they will be added again in that
29648 order later on. */
29649 base_types.qsort (base_type_cmp);
29650 die = comp_unit_die ();
29651 c = die->die_child;
29652 do
29653 {
29654 dw_die_ref prev = c;
29655 c = c->die_sib;
29656 while (c->die_mark)
29657 {
29658 remove_child_with_prev (c, prev);
29659 /* As base types got marked, there must be at least
29660 one node other than DW_TAG_base_type. */
29661 gcc_assert (die->die_child != NULL);
29662 c = prev->die_sib;
29663 }
29664 }
29665 while (c != die->die_child);
29666 gcc_assert (die->die_child);
29667 c = die->die_child;
29668 for (i = 0; base_types.iterate (i, &base_type); i++)
29669 {
29670 base_type->die_mark = 0;
29671 base_type->die_sib = c->die_sib;
29672 c->die_sib = base_type;
29673 c = base_type;
29674 }
29675 }
29676
29677 /* Helper function for resolve_addr, attempt to resolve
29678 one CONST_STRING, return true if successful. Similarly verify that
29679 SYMBOL_REFs refer to variables emitted in the current CU. */
29680
29681 static bool
29682 resolve_one_addr (rtx *addr)
29683 {
29684 rtx rtl = *addr;
29685
29686 if (GET_CODE (rtl) == CONST_STRING)
29687 {
29688 size_t len = strlen (XSTR (rtl, 0)) + 1;
29689 tree t = build_string (len, XSTR (rtl, 0));
29690 tree tlen = size_int (len - 1);
29691 TREE_TYPE (t)
29692 = build_array_type (char_type_node, build_index_type (tlen));
29693 rtl = lookup_constant_def (t);
29694 if (!rtl || !MEM_P (rtl))
29695 return false;
29696 rtl = XEXP (rtl, 0);
29697 if (GET_CODE (rtl) == SYMBOL_REF
29698 && SYMBOL_REF_DECL (rtl)
29699 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29700 return false;
29701 vec_safe_push (used_rtx_array, rtl);
29702 *addr = rtl;
29703 return true;
29704 }
29705
29706 if (GET_CODE (rtl) == SYMBOL_REF
29707 && SYMBOL_REF_DECL (rtl))
29708 {
29709 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29710 {
29711 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29712 return false;
29713 }
29714 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29715 return false;
29716 }
29717
29718 if (GET_CODE (rtl) == CONST)
29719 {
29720 subrtx_ptr_iterator::array_type array;
29721 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29722 if (!resolve_one_addr (*iter))
29723 return false;
29724 }
29725
29726 return true;
29727 }
29728
29729 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29730 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29731 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29732
29733 static rtx
29734 string_cst_pool_decl (tree t)
29735 {
29736 rtx rtl = output_constant_def (t, 1);
29737 unsigned char *array;
29738 dw_loc_descr_ref l;
29739 tree decl;
29740 size_t len;
29741 dw_die_ref ref;
29742
29743 if (!rtl || !MEM_P (rtl))
29744 return NULL_RTX;
29745 rtl = XEXP (rtl, 0);
29746 if (GET_CODE (rtl) != SYMBOL_REF
29747 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29748 return NULL_RTX;
29749
29750 decl = SYMBOL_REF_DECL (rtl);
29751 if (!lookup_decl_die (decl))
29752 {
29753 len = TREE_STRING_LENGTH (t);
29754 vec_safe_push (used_rtx_array, rtl);
29755 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29756 array = ggc_vec_alloc<unsigned char> (len);
29757 memcpy (array, TREE_STRING_POINTER (t), len);
29758 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29759 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29760 l->dw_loc_oprnd2.v.val_vec.length = len;
29761 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29762 l->dw_loc_oprnd2.v.val_vec.array = array;
29763 add_AT_loc (ref, DW_AT_location, l);
29764 equate_decl_number_to_die (decl, ref);
29765 }
29766 return rtl;
29767 }
29768
29769 /* Helper function of resolve_addr_in_expr. LOC is
29770 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29771 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29772 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29773 with DW_OP_implicit_pointer if possible
29774 and return true, if unsuccessful, return false. */
29775
29776 static bool
29777 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29778 {
29779 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29780 HOST_WIDE_INT offset = 0;
29781 dw_die_ref ref = NULL;
29782 tree decl;
29783
29784 if (GET_CODE (rtl) == CONST
29785 && GET_CODE (XEXP (rtl, 0)) == PLUS
29786 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29787 {
29788 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29789 rtl = XEXP (XEXP (rtl, 0), 0);
29790 }
29791 if (GET_CODE (rtl) == CONST_STRING)
29792 {
29793 size_t len = strlen (XSTR (rtl, 0)) + 1;
29794 tree t = build_string (len, XSTR (rtl, 0));
29795 tree tlen = size_int (len - 1);
29796
29797 TREE_TYPE (t)
29798 = build_array_type (char_type_node, build_index_type (tlen));
29799 rtl = string_cst_pool_decl (t);
29800 if (!rtl)
29801 return false;
29802 }
29803 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29804 {
29805 decl = SYMBOL_REF_DECL (rtl);
29806 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29807 {
29808 ref = lookup_decl_die (decl);
29809 if (ref && (get_AT (ref, DW_AT_location)
29810 || get_AT (ref, DW_AT_const_value)))
29811 {
29812 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29813 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29814 loc->dw_loc_oprnd1.val_entry = NULL;
29815 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29816 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29817 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29818 loc->dw_loc_oprnd2.v.val_int = offset;
29819 return true;
29820 }
29821 }
29822 }
29823 return false;
29824 }
29825
29826 /* Helper function for resolve_addr, handle one location
29827 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29828 the location list couldn't be resolved. */
29829
29830 static bool
29831 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29832 {
29833 dw_loc_descr_ref keep = NULL;
29834 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29835 switch (loc->dw_loc_opc)
29836 {
29837 case DW_OP_addr:
29838 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29839 {
29840 if ((prev == NULL
29841 || prev->dw_loc_opc == DW_OP_piece
29842 || prev->dw_loc_opc == DW_OP_bit_piece)
29843 && loc->dw_loc_next
29844 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29845 && (!dwarf_strict || dwarf_version >= 5)
29846 && optimize_one_addr_into_implicit_ptr (loc))
29847 break;
29848 return false;
29849 }
29850 break;
29851 case DW_OP_GNU_addr_index:
29852 case DW_OP_addrx:
29853 case DW_OP_GNU_const_index:
29854 case DW_OP_constx:
29855 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29856 || loc->dw_loc_opc == DW_OP_addrx)
29857 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29858 || loc->dw_loc_opc == DW_OP_constx)
29859 && loc->dtprel))
29860 {
29861 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29862 if (!resolve_one_addr (&rtl))
29863 return false;
29864 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29865 loc->dw_loc_oprnd1.val_entry
29866 = add_addr_table_entry (rtl, ate_kind_rtx);
29867 }
29868 break;
29869 case DW_OP_const4u:
29870 case DW_OP_const8u:
29871 if (loc->dtprel
29872 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29873 return false;
29874 break;
29875 case DW_OP_plus_uconst:
29876 if (size_of_loc_descr (loc)
29877 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29878 + 1
29879 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29880 {
29881 dw_loc_descr_ref repl
29882 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29883 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29884 add_loc_descr (&repl, loc->dw_loc_next);
29885 *loc = *repl;
29886 }
29887 break;
29888 case DW_OP_implicit_value:
29889 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29890 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29891 return false;
29892 break;
29893 case DW_OP_implicit_pointer:
29894 case DW_OP_GNU_implicit_pointer:
29895 case DW_OP_GNU_parameter_ref:
29896 case DW_OP_GNU_variable_value:
29897 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29898 {
29899 dw_die_ref ref
29900 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29901 if (ref == NULL)
29902 return false;
29903 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29904 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29905 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29906 }
29907 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29908 {
29909 if (prev == NULL
29910 && loc->dw_loc_next == NULL
29911 && AT_class (a) == dw_val_class_loc)
29912 switch (a->dw_attr)
29913 {
29914 /* Following attributes allow both exprloc and reference,
29915 so if the whole expression is DW_OP_GNU_variable_value
29916 alone we could transform it into reference. */
29917 case DW_AT_byte_size:
29918 case DW_AT_bit_size:
29919 case DW_AT_lower_bound:
29920 case DW_AT_upper_bound:
29921 case DW_AT_bit_stride:
29922 case DW_AT_count:
29923 case DW_AT_allocated:
29924 case DW_AT_associated:
29925 case DW_AT_byte_stride:
29926 a->dw_attr_val.val_class = dw_val_class_die_ref;
29927 a->dw_attr_val.val_entry = NULL;
29928 a->dw_attr_val.v.val_die_ref.die
29929 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29930 a->dw_attr_val.v.val_die_ref.external = 0;
29931 return true;
29932 default:
29933 break;
29934 }
29935 if (dwarf_strict)
29936 return false;
29937 }
29938 break;
29939 case DW_OP_const_type:
29940 case DW_OP_regval_type:
29941 case DW_OP_deref_type:
29942 case DW_OP_convert:
29943 case DW_OP_reinterpret:
29944 case DW_OP_GNU_const_type:
29945 case DW_OP_GNU_regval_type:
29946 case DW_OP_GNU_deref_type:
29947 case DW_OP_GNU_convert:
29948 case DW_OP_GNU_reinterpret:
29949 while (loc->dw_loc_next
29950 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29951 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29952 {
29953 dw_die_ref base1, base2;
29954 unsigned enc1, enc2, size1, size2;
29955 if (loc->dw_loc_opc == DW_OP_regval_type
29956 || loc->dw_loc_opc == DW_OP_deref_type
29957 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29958 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29959 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29960 else if (loc->dw_loc_oprnd1.val_class
29961 == dw_val_class_unsigned_const)
29962 break;
29963 else
29964 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29965 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29966 == dw_val_class_unsigned_const)
29967 break;
29968 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29969 gcc_assert (base1->die_tag == DW_TAG_base_type
29970 && base2->die_tag == DW_TAG_base_type);
29971 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29972 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29973 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29974 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29975 if (size1 == size2
29976 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29977 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29978 && loc != keep)
29979 || enc1 == enc2))
29980 {
29981 /* Optimize away next DW_OP_convert after
29982 adjusting LOC's base type die reference. */
29983 if (loc->dw_loc_opc == DW_OP_regval_type
29984 || loc->dw_loc_opc == DW_OP_deref_type
29985 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29986 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29987 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29988 else
29989 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29990 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29991 continue;
29992 }
29993 /* Don't change integer DW_OP_convert after e.g. floating
29994 point typed stack entry. */
29995 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29996 keep = loc->dw_loc_next;
29997 break;
29998 }
29999 break;
30000 default:
30001 break;
30002 }
30003 return true;
30004 }
30005
30006 /* Helper function of resolve_addr. DIE had DW_AT_location of
30007 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
30008 and DW_OP_addr couldn't be resolved. resolve_addr has already
30009 removed the DW_AT_location attribute. This function attempts to
30010 add a new DW_AT_location attribute with DW_OP_implicit_pointer
30011 to it or DW_AT_const_value attribute, if possible. */
30012
30013 static void
30014 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
30015 {
30016 if (!VAR_P (decl)
30017 || lookup_decl_die (decl) != die
30018 || DECL_EXTERNAL (decl)
30019 || !TREE_STATIC (decl)
30020 || DECL_INITIAL (decl) == NULL_TREE
30021 || DECL_P (DECL_INITIAL (decl))
30022 || get_AT (die, DW_AT_const_value))
30023 return;
30024
30025 tree init = DECL_INITIAL (decl);
30026 HOST_WIDE_INT offset = 0;
30027 /* For variables that have been optimized away and thus
30028 don't have a memory location, see if we can emit
30029 DW_AT_const_value instead. */
30030 if (tree_add_const_value_attribute (die, init))
30031 return;
30032 if (dwarf_strict && dwarf_version < 5)
30033 return;
30034 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
30035 and ADDR_EXPR refers to a decl that has DW_AT_location or
30036 DW_AT_const_value (but isn't addressable, otherwise
30037 resolving the original DW_OP_addr wouldn't fail), see if
30038 we can add DW_OP_implicit_pointer. */
30039 STRIP_NOPS (init);
30040 if (TREE_CODE (init) == POINTER_PLUS_EXPR
30041 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
30042 {
30043 offset = tree_to_shwi (TREE_OPERAND (init, 1));
30044 init = TREE_OPERAND (init, 0);
30045 STRIP_NOPS (init);
30046 }
30047 if (TREE_CODE (init) != ADDR_EXPR)
30048 return;
30049 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
30050 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
30051 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
30052 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
30053 && TREE_OPERAND (init, 0) != decl))
30054 {
30055 dw_die_ref ref;
30056 dw_loc_descr_ref l;
30057
30058 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
30059 {
30060 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
30061 if (!rtl)
30062 return;
30063 decl = SYMBOL_REF_DECL (rtl);
30064 }
30065 else
30066 decl = TREE_OPERAND (init, 0);
30067 ref = lookup_decl_die (decl);
30068 if (ref == NULL
30069 || (!get_AT (ref, DW_AT_location)
30070 && !get_AT (ref, DW_AT_const_value)))
30071 return;
30072 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
30073 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30074 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30075 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30076 add_AT_loc (die, DW_AT_location, l);
30077 }
30078 }
30079
30080 /* Return NULL if l is a DWARF expression, or first op that is not
30081 valid DWARF expression. */
30082
30083 static dw_loc_descr_ref
30084 non_dwarf_expression (dw_loc_descr_ref l)
30085 {
30086 while (l)
30087 {
30088 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30089 return l;
30090 switch (l->dw_loc_opc)
30091 {
30092 case DW_OP_regx:
30093 case DW_OP_implicit_value:
30094 case DW_OP_stack_value:
30095 case DW_OP_implicit_pointer:
30096 case DW_OP_GNU_implicit_pointer:
30097 case DW_OP_GNU_parameter_ref:
30098 case DW_OP_piece:
30099 case DW_OP_bit_piece:
30100 return l;
30101 default:
30102 break;
30103 }
30104 l = l->dw_loc_next;
30105 }
30106 return NULL;
30107 }
30108
30109 /* Return adjusted copy of EXPR:
30110 If it is empty DWARF expression, return it.
30111 If it is valid non-empty DWARF expression,
30112 return copy of EXPR with DW_OP_deref appended to it.
30113 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30114 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30115 If it is DWARF expression followed by DW_OP_stack_value, return
30116 copy of the DWARF expression without anything appended.
30117 Otherwise, return NULL. */
30118
30119 static dw_loc_descr_ref
30120 copy_deref_exprloc (dw_loc_descr_ref expr)
30121 {
30122 dw_loc_descr_ref tail = NULL;
30123
30124 if (expr == NULL)
30125 return NULL;
30126
30127 dw_loc_descr_ref l = non_dwarf_expression (expr);
30128 if (l && l->dw_loc_next)
30129 return NULL;
30130
30131 if (l)
30132 {
30133 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30134 tail = new_loc_descr ((enum dwarf_location_atom)
30135 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30136 0, 0);
30137 else
30138 switch (l->dw_loc_opc)
30139 {
30140 case DW_OP_regx:
30141 tail = new_loc_descr (DW_OP_bregx,
30142 l->dw_loc_oprnd1.v.val_unsigned, 0);
30143 break;
30144 case DW_OP_stack_value:
30145 break;
30146 default:
30147 return NULL;
30148 }
30149 }
30150 else
30151 tail = new_loc_descr (DW_OP_deref, 0, 0);
30152
30153 dw_loc_descr_ref ret = NULL, *p = &ret;
30154 while (expr != l)
30155 {
30156 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30157 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30158 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30159 p = &(*p)->dw_loc_next;
30160 expr = expr->dw_loc_next;
30161 }
30162 *p = tail;
30163 return ret;
30164 }
30165
30166 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30167 reference to a variable or argument, adjust it if needed and return:
30168 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30169 attribute if present should be removed
30170 0 keep the attribute perhaps with minor modifications, no need to rescan
30171 1 if the attribute has been successfully adjusted. */
30172
30173 static int
30174 optimize_string_length (dw_attr_node *a)
30175 {
30176 dw_loc_descr_ref l = AT_loc (a), lv;
30177 dw_die_ref die;
30178 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30179 {
30180 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30181 die = lookup_decl_die (decl);
30182 if (die)
30183 {
30184 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30185 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30186 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30187 }
30188 else
30189 return -1;
30190 }
30191 else
30192 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30193
30194 /* DWARF5 allows reference class, so we can then reference the DIE.
30195 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30196 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30197 {
30198 a->dw_attr_val.val_class = dw_val_class_die_ref;
30199 a->dw_attr_val.val_entry = NULL;
30200 a->dw_attr_val.v.val_die_ref.die = die;
30201 a->dw_attr_val.v.val_die_ref.external = 0;
30202 return 0;
30203 }
30204
30205 dw_attr_node *av = get_AT (die, DW_AT_location);
30206 dw_loc_list_ref d;
30207 bool non_dwarf_expr = false;
30208
30209 if (av == NULL)
30210 return dwarf_strict ? -1 : 0;
30211 switch (AT_class (av))
30212 {
30213 case dw_val_class_loc_list:
30214 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30215 if (d->expr && non_dwarf_expression (d->expr))
30216 non_dwarf_expr = true;
30217 break;
30218 case dw_val_class_view_list:
30219 gcc_unreachable ();
30220 case dw_val_class_loc:
30221 lv = AT_loc (av);
30222 if (lv == NULL)
30223 return dwarf_strict ? -1 : 0;
30224 if (non_dwarf_expression (lv))
30225 non_dwarf_expr = true;
30226 break;
30227 default:
30228 return dwarf_strict ? -1 : 0;
30229 }
30230
30231 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30232 into DW_OP_call4 or DW_OP_GNU_variable_value into
30233 DW_OP_call4 DW_OP_deref, do so. */
30234 if (!non_dwarf_expr
30235 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30236 {
30237 l->dw_loc_opc = DW_OP_call4;
30238 if (l->dw_loc_next)
30239 l->dw_loc_next = NULL;
30240 else
30241 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30242 return 0;
30243 }
30244
30245 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30246 copy over the DW_AT_location attribute from die to a. */
30247 if (l->dw_loc_next != NULL)
30248 {
30249 a->dw_attr_val = av->dw_attr_val;
30250 return 1;
30251 }
30252
30253 dw_loc_list_ref list, *p;
30254 switch (AT_class (av))
30255 {
30256 case dw_val_class_loc_list:
30257 p = &list;
30258 list = NULL;
30259 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30260 {
30261 lv = copy_deref_exprloc (d->expr);
30262 if (lv)
30263 {
30264 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30265 p = &(*p)->dw_loc_next;
30266 }
30267 else if (!dwarf_strict && d->expr)
30268 return 0;
30269 }
30270 if (list == NULL)
30271 return dwarf_strict ? -1 : 0;
30272 a->dw_attr_val.val_class = dw_val_class_loc_list;
30273 gen_llsym (list);
30274 *AT_loc_list_ptr (a) = list;
30275 return 1;
30276 case dw_val_class_loc:
30277 lv = copy_deref_exprloc (AT_loc (av));
30278 if (lv == NULL)
30279 return dwarf_strict ? -1 : 0;
30280 a->dw_attr_val.v.val_loc = lv;
30281 return 1;
30282 default:
30283 gcc_unreachable ();
30284 }
30285 }
30286
30287 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30288 an address in .rodata section if the string literal is emitted there,
30289 or remove the containing location list or replace DW_AT_const_value
30290 with DW_AT_location and empty location expression, if it isn't found
30291 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30292 to something that has been emitted in the current CU. */
30293
30294 static void
30295 resolve_addr (dw_die_ref die)
30296 {
30297 dw_die_ref c;
30298 dw_attr_node *a;
30299 dw_loc_list_ref *curr, *start, loc;
30300 unsigned ix;
30301 bool remove_AT_byte_size = false;
30302
30303 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30304 switch (AT_class (a))
30305 {
30306 case dw_val_class_loc_list:
30307 start = curr = AT_loc_list_ptr (a);
30308 loc = *curr;
30309 gcc_assert (loc);
30310 /* The same list can be referenced more than once. See if we have
30311 already recorded the result from a previous pass. */
30312 if (loc->replaced)
30313 *curr = loc->dw_loc_next;
30314 else if (!loc->resolved_addr)
30315 {
30316 /* As things stand, we do not expect or allow one die to
30317 reference a suffix of another die's location list chain.
30318 References must be identical or completely separate.
30319 There is therefore no need to cache the result of this
30320 pass on any list other than the first; doing so
30321 would lead to unnecessary writes. */
30322 while (*curr)
30323 {
30324 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30325 if (!resolve_addr_in_expr (a, (*curr)->expr))
30326 {
30327 dw_loc_list_ref next = (*curr)->dw_loc_next;
30328 dw_loc_descr_ref l = (*curr)->expr;
30329
30330 if (next && (*curr)->ll_symbol)
30331 {
30332 gcc_assert (!next->ll_symbol);
30333 next->ll_symbol = (*curr)->ll_symbol;
30334 next->vl_symbol = (*curr)->vl_symbol;
30335 }
30336 if (dwarf_split_debug_info)
30337 remove_loc_list_addr_table_entries (l);
30338 *curr = next;
30339 }
30340 else
30341 {
30342 mark_base_types ((*curr)->expr);
30343 curr = &(*curr)->dw_loc_next;
30344 }
30345 }
30346 if (loc == *start)
30347 loc->resolved_addr = 1;
30348 else
30349 {
30350 loc->replaced = 1;
30351 loc->dw_loc_next = *start;
30352 }
30353 }
30354 if (!*start)
30355 {
30356 remove_AT (die, a->dw_attr);
30357 ix--;
30358 }
30359 break;
30360 case dw_val_class_view_list:
30361 {
30362 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30363 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30364 dw_val_node *llnode
30365 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30366 /* If we no longer have a loclist, or it no longer needs
30367 views, drop this attribute. */
30368 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30369 {
30370 remove_AT (die, a->dw_attr);
30371 ix--;
30372 }
30373 break;
30374 }
30375 case dw_val_class_loc:
30376 {
30377 dw_loc_descr_ref l = AT_loc (a);
30378 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30379 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30380 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30381 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30382 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30383 with DW_FORM_ref referencing the same DIE as
30384 DW_OP_GNU_variable_value used to reference. */
30385 if (a->dw_attr == DW_AT_string_length
30386 && l
30387 && l->dw_loc_opc == DW_OP_GNU_variable_value
30388 && (l->dw_loc_next == NULL
30389 || (l->dw_loc_next->dw_loc_next == NULL
30390 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30391 {
30392 switch (optimize_string_length (a))
30393 {
30394 case -1:
30395 remove_AT (die, a->dw_attr);
30396 ix--;
30397 /* If we drop DW_AT_string_length, we need to drop also
30398 DW_AT_{string_length_,}byte_size. */
30399 remove_AT_byte_size = true;
30400 continue;
30401 default:
30402 break;
30403 case 1:
30404 /* Even if we keep the optimized DW_AT_string_length,
30405 it might have changed AT_class, so process it again. */
30406 ix--;
30407 continue;
30408 }
30409 }
30410 /* For -gdwarf-2 don't attempt to optimize
30411 DW_AT_data_member_location containing
30412 DW_OP_plus_uconst - older consumers might
30413 rely on it being that op instead of a more complex,
30414 but shorter, location description. */
30415 if ((dwarf_version > 2
30416 || a->dw_attr != DW_AT_data_member_location
30417 || l == NULL
30418 || l->dw_loc_opc != DW_OP_plus_uconst
30419 || l->dw_loc_next != NULL)
30420 && !resolve_addr_in_expr (a, l))
30421 {
30422 if (dwarf_split_debug_info)
30423 remove_loc_list_addr_table_entries (l);
30424 if (l != NULL
30425 && l->dw_loc_next == NULL
30426 && l->dw_loc_opc == DW_OP_addr
30427 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30428 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30429 && a->dw_attr == DW_AT_location)
30430 {
30431 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30432 remove_AT (die, a->dw_attr);
30433 ix--;
30434 optimize_location_into_implicit_ptr (die, decl);
30435 break;
30436 }
30437 if (a->dw_attr == DW_AT_string_length)
30438 /* If we drop DW_AT_string_length, we need to drop also
30439 DW_AT_{string_length_,}byte_size. */
30440 remove_AT_byte_size = true;
30441 remove_AT (die, a->dw_attr);
30442 ix--;
30443 }
30444 else
30445 mark_base_types (l);
30446 }
30447 break;
30448 case dw_val_class_addr:
30449 if (a->dw_attr == DW_AT_const_value
30450 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30451 {
30452 if (AT_index (a) != NOT_INDEXED)
30453 remove_addr_table_entry (a->dw_attr_val.val_entry);
30454 remove_AT (die, a->dw_attr);
30455 ix--;
30456 }
30457 if ((die->die_tag == DW_TAG_call_site
30458 && a->dw_attr == DW_AT_call_origin)
30459 || (die->die_tag == DW_TAG_GNU_call_site
30460 && a->dw_attr == DW_AT_abstract_origin))
30461 {
30462 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30463 dw_die_ref tdie = lookup_decl_die (tdecl);
30464 dw_die_ref cdie;
30465 if (tdie == NULL
30466 && DECL_EXTERNAL (tdecl)
30467 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30468 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30469 {
30470 dw_die_ref pdie = cdie;
30471 /* Make sure we don't add these DIEs into type units.
30472 We could emit skeleton DIEs for context (namespaces,
30473 outer structs/classes) and a skeleton DIE for the
30474 innermost context with DW_AT_signature pointing to the
30475 type unit. See PR78835. */
30476 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30477 pdie = pdie->die_parent;
30478 if (pdie == NULL)
30479 {
30480 /* Creating a full DIE for tdecl is overly expensive and
30481 at this point even wrong when in the LTO phase
30482 as it can end up generating new type DIEs we didn't
30483 output and thus optimize_external_refs will crash. */
30484 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30485 add_AT_flag (tdie, DW_AT_external, 1);
30486 add_AT_flag (tdie, DW_AT_declaration, 1);
30487 add_linkage_attr (tdie, tdecl);
30488 add_name_and_src_coords_attributes (tdie, tdecl, true);
30489 equate_decl_number_to_die (tdecl, tdie);
30490 }
30491 }
30492 if (tdie)
30493 {
30494 a->dw_attr_val.val_class = dw_val_class_die_ref;
30495 a->dw_attr_val.v.val_die_ref.die = tdie;
30496 a->dw_attr_val.v.val_die_ref.external = 0;
30497 }
30498 else
30499 {
30500 if (AT_index (a) != NOT_INDEXED)
30501 remove_addr_table_entry (a->dw_attr_val.val_entry);
30502 remove_AT (die, a->dw_attr);
30503 ix--;
30504 }
30505 }
30506 break;
30507 default:
30508 break;
30509 }
30510
30511 if (remove_AT_byte_size)
30512 remove_AT (die, dwarf_version >= 5
30513 ? DW_AT_string_length_byte_size
30514 : DW_AT_byte_size);
30515
30516 FOR_EACH_CHILD (die, c, resolve_addr (c));
30517 }
30518 \f
30519 /* Helper routines for optimize_location_lists.
30520 This pass tries to share identical local lists in .debug_loc
30521 section. */
30522
30523 /* Iteratively hash operands of LOC opcode into HSTATE. */
30524
30525 static void
30526 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30527 {
30528 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30529 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30530
30531 switch (loc->dw_loc_opc)
30532 {
30533 case DW_OP_const4u:
30534 case DW_OP_const8u:
30535 if (loc->dtprel)
30536 goto hash_addr;
30537 /* FALLTHRU */
30538 case DW_OP_const1u:
30539 case DW_OP_const1s:
30540 case DW_OP_const2u:
30541 case DW_OP_const2s:
30542 case DW_OP_const4s:
30543 case DW_OP_const8s:
30544 case DW_OP_constu:
30545 case DW_OP_consts:
30546 case DW_OP_pick:
30547 case DW_OP_plus_uconst:
30548 case DW_OP_breg0:
30549 case DW_OP_breg1:
30550 case DW_OP_breg2:
30551 case DW_OP_breg3:
30552 case DW_OP_breg4:
30553 case DW_OP_breg5:
30554 case DW_OP_breg6:
30555 case DW_OP_breg7:
30556 case DW_OP_breg8:
30557 case DW_OP_breg9:
30558 case DW_OP_breg10:
30559 case DW_OP_breg11:
30560 case DW_OP_breg12:
30561 case DW_OP_breg13:
30562 case DW_OP_breg14:
30563 case DW_OP_breg15:
30564 case DW_OP_breg16:
30565 case DW_OP_breg17:
30566 case DW_OP_breg18:
30567 case DW_OP_breg19:
30568 case DW_OP_breg20:
30569 case DW_OP_breg21:
30570 case DW_OP_breg22:
30571 case DW_OP_breg23:
30572 case DW_OP_breg24:
30573 case DW_OP_breg25:
30574 case DW_OP_breg26:
30575 case DW_OP_breg27:
30576 case DW_OP_breg28:
30577 case DW_OP_breg29:
30578 case DW_OP_breg30:
30579 case DW_OP_breg31:
30580 case DW_OP_regx:
30581 case DW_OP_fbreg:
30582 case DW_OP_piece:
30583 case DW_OP_deref_size:
30584 case DW_OP_xderef_size:
30585 hstate.add_object (val1->v.val_int);
30586 break;
30587 case DW_OP_skip:
30588 case DW_OP_bra:
30589 {
30590 int offset;
30591
30592 gcc_assert (val1->val_class == dw_val_class_loc);
30593 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30594 hstate.add_object (offset);
30595 }
30596 break;
30597 case DW_OP_implicit_value:
30598 hstate.add_object (val1->v.val_unsigned);
30599 switch (val2->val_class)
30600 {
30601 case dw_val_class_const:
30602 hstate.add_object (val2->v.val_int);
30603 break;
30604 case dw_val_class_vec:
30605 {
30606 unsigned int elt_size = val2->v.val_vec.elt_size;
30607 unsigned int len = val2->v.val_vec.length;
30608
30609 hstate.add_int (elt_size);
30610 hstate.add_int (len);
30611 hstate.add (val2->v.val_vec.array, len * elt_size);
30612 }
30613 break;
30614 case dw_val_class_const_double:
30615 hstate.add_object (val2->v.val_double.low);
30616 hstate.add_object (val2->v.val_double.high);
30617 break;
30618 case dw_val_class_wide_int:
30619 hstate.add (val2->v.val_wide->get_val (),
30620 get_full_len (*val2->v.val_wide)
30621 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30622 break;
30623 case dw_val_class_addr:
30624 inchash::add_rtx (val2->v.val_addr, hstate);
30625 break;
30626 default:
30627 gcc_unreachable ();
30628 }
30629 break;
30630 case DW_OP_bregx:
30631 case DW_OP_bit_piece:
30632 hstate.add_object (val1->v.val_int);
30633 hstate.add_object (val2->v.val_int);
30634 break;
30635 case DW_OP_addr:
30636 hash_addr:
30637 if (loc->dtprel)
30638 {
30639 unsigned char dtprel = 0xd1;
30640 hstate.add_object (dtprel);
30641 }
30642 inchash::add_rtx (val1->v.val_addr, hstate);
30643 break;
30644 case DW_OP_GNU_addr_index:
30645 case DW_OP_addrx:
30646 case DW_OP_GNU_const_index:
30647 case DW_OP_constx:
30648 {
30649 if (loc->dtprel)
30650 {
30651 unsigned char dtprel = 0xd1;
30652 hstate.add_object (dtprel);
30653 }
30654 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30655 }
30656 break;
30657 case DW_OP_implicit_pointer:
30658 case DW_OP_GNU_implicit_pointer:
30659 hstate.add_int (val2->v.val_int);
30660 break;
30661 case DW_OP_entry_value:
30662 case DW_OP_GNU_entry_value:
30663 hstate.add_object (val1->v.val_loc);
30664 break;
30665 case DW_OP_regval_type:
30666 case DW_OP_deref_type:
30667 case DW_OP_GNU_regval_type:
30668 case DW_OP_GNU_deref_type:
30669 {
30670 unsigned int byte_size
30671 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30672 unsigned int encoding
30673 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30674 hstate.add_object (val1->v.val_int);
30675 hstate.add_object (byte_size);
30676 hstate.add_object (encoding);
30677 }
30678 break;
30679 case DW_OP_convert:
30680 case DW_OP_reinterpret:
30681 case DW_OP_GNU_convert:
30682 case DW_OP_GNU_reinterpret:
30683 if (val1->val_class == dw_val_class_unsigned_const)
30684 {
30685 hstate.add_object (val1->v.val_unsigned);
30686 break;
30687 }
30688 /* FALLTHRU */
30689 case DW_OP_const_type:
30690 case DW_OP_GNU_const_type:
30691 {
30692 unsigned int byte_size
30693 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30694 unsigned int encoding
30695 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30696 hstate.add_object (byte_size);
30697 hstate.add_object (encoding);
30698 if (loc->dw_loc_opc != DW_OP_const_type
30699 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30700 break;
30701 hstate.add_object (val2->val_class);
30702 switch (val2->val_class)
30703 {
30704 case dw_val_class_const:
30705 hstate.add_object (val2->v.val_int);
30706 break;
30707 case dw_val_class_vec:
30708 {
30709 unsigned int elt_size = val2->v.val_vec.elt_size;
30710 unsigned int len = val2->v.val_vec.length;
30711
30712 hstate.add_object (elt_size);
30713 hstate.add_object (len);
30714 hstate.add (val2->v.val_vec.array, len * elt_size);
30715 }
30716 break;
30717 case dw_val_class_const_double:
30718 hstate.add_object (val2->v.val_double.low);
30719 hstate.add_object (val2->v.val_double.high);
30720 break;
30721 case dw_val_class_wide_int:
30722 hstate.add (val2->v.val_wide->get_val (),
30723 get_full_len (*val2->v.val_wide)
30724 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30725 break;
30726 default:
30727 gcc_unreachable ();
30728 }
30729 }
30730 break;
30731
30732 default:
30733 /* Other codes have no operands. */
30734 break;
30735 }
30736 }
30737
30738 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30739
30740 static inline void
30741 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30742 {
30743 dw_loc_descr_ref l;
30744 bool sizes_computed = false;
30745 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30746 size_of_locs (loc);
30747
30748 for (l = loc; l != NULL; l = l->dw_loc_next)
30749 {
30750 enum dwarf_location_atom opc = l->dw_loc_opc;
30751 hstate.add_object (opc);
30752 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30753 {
30754 size_of_locs (loc);
30755 sizes_computed = true;
30756 }
30757 hash_loc_operands (l, hstate);
30758 }
30759 }
30760
30761 /* Compute hash of the whole location list LIST_HEAD. */
30762
30763 static inline void
30764 hash_loc_list (dw_loc_list_ref list_head)
30765 {
30766 dw_loc_list_ref curr = list_head;
30767 inchash::hash hstate;
30768
30769 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30770 {
30771 hstate.add (curr->begin, strlen (curr->begin) + 1);
30772 hstate.add (curr->end, strlen (curr->end) + 1);
30773 hstate.add_object (curr->vbegin);
30774 hstate.add_object (curr->vend);
30775 if (curr->section)
30776 hstate.add (curr->section, strlen (curr->section) + 1);
30777 hash_locs (curr->expr, hstate);
30778 }
30779 list_head->hash = hstate.end ();
30780 }
30781
30782 /* Return true if X and Y opcodes have the same operands. */
30783
30784 static inline bool
30785 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30786 {
30787 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30788 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30789 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30790 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30791
30792 switch (x->dw_loc_opc)
30793 {
30794 case DW_OP_const4u:
30795 case DW_OP_const8u:
30796 if (x->dtprel)
30797 goto hash_addr;
30798 /* FALLTHRU */
30799 case DW_OP_const1u:
30800 case DW_OP_const1s:
30801 case DW_OP_const2u:
30802 case DW_OP_const2s:
30803 case DW_OP_const4s:
30804 case DW_OP_const8s:
30805 case DW_OP_constu:
30806 case DW_OP_consts:
30807 case DW_OP_pick:
30808 case DW_OP_plus_uconst:
30809 case DW_OP_breg0:
30810 case DW_OP_breg1:
30811 case DW_OP_breg2:
30812 case DW_OP_breg3:
30813 case DW_OP_breg4:
30814 case DW_OP_breg5:
30815 case DW_OP_breg6:
30816 case DW_OP_breg7:
30817 case DW_OP_breg8:
30818 case DW_OP_breg9:
30819 case DW_OP_breg10:
30820 case DW_OP_breg11:
30821 case DW_OP_breg12:
30822 case DW_OP_breg13:
30823 case DW_OP_breg14:
30824 case DW_OP_breg15:
30825 case DW_OP_breg16:
30826 case DW_OP_breg17:
30827 case DW_OP_breg18:
30828 case DW_OP_breg19:
30829 case DW_OP_breg20:
30830 case DW_OP_breg21:
30831 case DW_OP_breg22:
30832 case DW_OP_breg23:
30833 case DW_OP_breg24:
30834 case DW_OP_breg25:
30835 case DW_OP_breg26:
30836 case DW_OP_breg27:
30837 case DW_OP_breg28:
30838 case DW_OP_breg29:
30839 case DW_OP_breg30:
30840 case DW_OP_breg31:
30841 case DW_OP_regx:
30842 case DW_OP_fbreg:
30843 case DW_OP_piece:
30844 case DW_OP_deref_size:
30845 case DW_OP_xderef_size:
30846 return valx1->v.val_int == valy1->v.val_int;
30847 case DW_OP_skip:
30848 case DW_OP_bra:
30849 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30850 can cause irrelevant differences in dw_loc_addr. */
30851 gcc_assert (valx1->val_class == dw_val_class_loc
30852 && valy1->val_class == dw_val_class_loc
30853 && (dwarf_split_debug_info
30854 || x->dw_loc_addr == y->dw_loc_addr));
30855 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30856 case DW_OP_implicit_value:
30857 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30858 || valx2->val_class != valy2->val_class)
30859 return false;
30860 switch (valx2->val_class)
30861 {
30862 case dw_val_class_const:
30863 return valx2->v.val_int == valy2->v.val_int;
30864 case dw_val_class_vec:
30865 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30866 && valx2->v.val_vec.length == valy2->v.val_vec.length
30867 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30868 valx2->v.val_vec.elt_size
30869 * valx2->v.val_vec.length) == 0;
30870 case dw_val_class_const_double:
30871 return valx2->v.val_double.low == valy2->v.val_double.low
30872 && valx2->v.val_double.high == valy2->v.val_double.high;
30873 case dw_val_class_wide_int:
30874 return *valx2->v.val_wide == *valy2->v.val_wide;
30875 case dw_val_class_addr:
30876 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30877 default:
30878 gcc_unreachable ();
30879 }
30880 case DW_OP_bregx:
30881 case DW_OP_bit_piece:
30882 return valx1->v.val_int == valy1->v.val_int
30883 && valx2->v.val_int == valy2->v.val_int;
30884 case DW_OP_addr:
30885 hash_addr:
30886 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30887 case DW_OP_GNU_addr_index:
30888 case DW_OP_addrx:
30889 case DW_OP_GNU_const_index:
30890 case DW_OP_constx:
30891 {
30892 rtx ax1 = valx1->val_entry->addr.rtl;
30893 rtx ay1 = valy1->val_entry->addr.rtl;
30894 return rtx_equal_p (ax1, ay1);
30895 }
30896 case DW_OP_implicit_pointer:
30897 case DW_OP_GNU_implicit_pointer:
30898 return valx1->val_class == dw_val_class_die_ref
30899 && valx1->val_class == valy1->val_class
30900 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30901 && valx2->v.val_int == valy2->v.val_int;
30902 case DW_OP_entry_value:
30903 case DW_OP_GNU_entry_value:
30904 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30905 case DW_OP_const_type:
30906 case DW_OP_GNU_const_type:
30907 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30908 || valx2->val_class != valy2->val_class)
30909 return false;
30910 switch (valx2->val_class)
30911 {
30912 case dw_val_class_const:
30913 return valx2->v.val_int == valy2->v.val_int;
30914 case dw_val_class_vec:
30915 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30916 && valx2->v.val_vec.length == valy2->v.val_vec.length
30917 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30918 valx2->v.val_vec.elt_size
30919 * valx2->v.val_vec.length) == 0;
30920 case dw_val_class_const_double:
30921 return valx2->v.val_double.low == valy2->v.val_double.low
30922 && valx2->v.val_double.high == valy2->v.val_double.high;
30923 case dw_val_class_wide_int:
30924 return *valx2->v.val_wide == *valy2->v.val_wide;
30925 default:
30926 gcc_unreachable ();
30927 }
30928 case DW_OP_regval_type:
30929 case DW_OP_deref_type:
30930 case DW_OP_GNU_regval_type:
30931 case DW_OP_GNU_deref_type:
30932 return valx1->v.val_int == valy1->v.val_int
30933 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30934 case DW_OP_convert:
30935 case DW_OP_reinterpret:
30936 case DW_OP_GNU_convert:
30937 case DW_OP_GNU_reinterpret:
30938 if (valx1->val_class != valy1->val_class)
30939 return false;
30940 if (valx1->val_class == dw_val_class_unsigned_const)
30941 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30942 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30943 case DW_OP_GNU_parameter_ref:
30944 return valx1->val_class == dw_val_class_die_ref
30945 && valx1->val_class == valy1->val_class
30946 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30947 default:
30948 /* Other codes have no operands. */
30949 return true;
30950 }
30951 }
30952
30953 /* Return true if DWARF location expressions X and Y are the same. */
30954
30955 static inline bool
30956 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30957 {
30958 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30959 if (x->dw_loc_opc != y->dw_loc_opc
30960 || x->dtprel != y->dtprel
30961 || !compare_loc_operands (x, y))
30962 break;
30963 return x == NULL && y == NULL;
30964 }
30965
30966 /* Hashtable helpers. */
30967
30968 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30969 {
30970 static inline hashval_t hash (const dw_loc_list_struct *);
30971 static inline bool equal (const dw_loc_list_struct *,
30972 const dw_loc_list_struct *);
30973 };
30974
30975 /* Return precomputed hash of location list X. */
30976
30977 inline hashval_t
30978 loc_list_hasher::hash (const dw_loc_list_struct *x)
30979 {
30980 return x->hash;
30981 }
30982
30983 /* Return true if location lists A and B are the same. */
30984
30985 inline bool
30986 loc_list_hasher::equal (const dw_loc_list_struct *a,
30987 const dw_loc_list_struct *b)
30988 {
30989 if (a == b)
30990 return 1;
30991 if (a->hash != b->hash)
30992 return 0;
30993 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30994 if (strcmp (a->begin, b->begin) != 0
30995 || strcmp (a->end, b->end) != 0
30996 || (a->section == NULL) != (b->section == NULL)
30997 || (a->section && strcmp (a->section, b->section) != 0)
30998 || a->vbegin != b->vbegin || a->vend != b->vend
30999 || !compare_locs (a->expr, b->expr))
31000 break;
31001 return a == NULL && b == NULL;
31002 }
31003
31004 typedef hash_table<loc_list_hasher> loc_list_hash_type;
31005
31006
31007 /* Recursively optimize location lists referenced from DIE
31008 children and share them whenever possible. */
31009
31010 static void
31011 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
31012 {
31013 dw_die_ref c;
31014 dw_attr_node *a;
31015 unsigned ix;
31016 dw_loc_list_struct **slot;
31017 bool drop_locviews = false;
31018 bool has_locviews = false;
31019
31020 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31021 if (AT_class (a) == dw_val_class_loc_list)
31022 {
31023 dw_loc_list_ref list = AT_loc_list (a);
31024 /* TODO: perform some optimizations here, before hashing
31025 it and storing into the hash table. */
31026 hash_loc_list (list);
31027 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
31028 if (*slot == NULL)
31029 {
31030 *slot = list;
31031 if (loc_list_has_views (list))
31032 gcc_assert (list->vl_symbol);
31033 else if (list->vl_symbol)
31034 {
31035 drop_locviews = true;
31036 list->vl_symbol = NULL;
31037 }
31038 }
31039 else
31040 {
31041 if (list->vl_symbol && !(*slot)->vl_symbol)
31042 drop_locviews = true;
31043 a->dw_attr_val.v.val_loc_list = *slot;
31044 }
31045 }
31046 else if (AT_class (a) == dw_val_class_view_list)
31047 {
31048 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
31049 has_locviews = true;
31050 }
31051
31052
31053 if (drop_locviews && has_locviews)
31054 remove_AT (die, DW_AT_GNU_locviews);
31055
31056 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
31057 }
31058
31059
31060 /* Recursively assign each location list a unique index into the debug_addr
31061 section. */
31062
31063 static void
31064 index_location_lists (dw_die_ref die)
31065 {
31066 dw_die_ref c;
31067 dw_attr_node *a;
31068 unsigned ix;
31069
31070 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31071 if (AT_class (a) == dw_val_class_loc_list)
31072 {
31073 dw_loc_list_ref list = AT_loc_list (a);
31074 dw_loc_list_ref curr;
31075 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31076 {
31077 /* Don't index an entry that has already been indexed
31078 or won't be output. Make sure skip_loc_list_entry doesn't
31079 call size_of_locs, because that might cause circular dependency,
31080 index_location_lists requiring address table indexes to be
31081 computed, but adding new indexes through add_addr_table_entry
31082 and address table index computation requiring no new additions
31083 to the hash table. In the rare case of DWARF[234] >= 64KB
31084 location expression, we'll just waste unused address table entry
31085 for it. */
31086 if (curr->begin_entry != NULL
31087 || skip_loc_list_entry (curr))
31088 continue;
31089
31090 curr->begin_entry
31091 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31092 }
31093 }
31094
31095 FOR_EACH_CHILD (die, c, index_location_lists (c));
31096 }
31097
31098 /* Optimize location lists referenced from DIE
31099 children and share them whenever possible. */
31100
31101 static void
31102 optimize_location_lists (dw_die_ref die)
31103 {
31104 loc_list_hash_type htab (500);
31105 optimize_location_lists_1 (die, &htab);
31106 }
31107 \f
31108 /* Traverse the limbo die list, and add parent/child links. The only
31109 dies without parents that should be here are concrete instances of
31110 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31111 For concrete instances, we can get the parent die from the abstract
31112 instance. */
31113
31114 static void
31115 flush_limbo_die_list (void)
31116 {
31117 limbo_die_node *node;
31118
31119 /* get_context_die calls force_decl_die, which can put new DIEs on the
31120 limbo list in LTO mode when nested functions are put in a different
31121 partition than that of their parent function. */
31122 while ((node = limbo_die_list))
31123 {
31124 dw_die_ref die = node->die;
31125 limbo_die_list = node->next;
31126
31127 if (die->die_parent == NULL)
31128 {
31129 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31130
31131 if (origin && origin->die_parent)
31132 add_child_die (origin->die_parent, die);
31133 else if (is_cu_die (die))
31134 ;
31135 else if (seen_error ())
31136 /* It's OK to be confused by errors in the input. */
31137 add_child_die (comp_unit_die (), die);
31138 else
31139 {
31140 /* In certain situations, the lexical block containing a
31141 nested function can be optimized away, which results
31142 in the nested function die being orphaned. Likewise
31143 with the return type of that nested function. Force
31144 this to be a child of the containing function.
31145
31146 It may happen that even the containing function got fully
31147 inlined and optimized out. In that case we are lost and
31148 assign the empty child. This should not be big issue as
31149 the function is likely unreachable too. */
31150 gcc_assert (node->created_for);
31151
31152 if (DECL_P (node->created_for))
31153 origin = get_context_die (DECL_CONTEXT (node->created_for));
31154 else if (TYPE_P (node->created_for))
31155 origin = scope_die_for (node->created_for, comp_unit_die ());
31156 else
31157 origin = comp_unit_die ();
31158
31159 add_child_die (origin, die);
31160 }
31161 }
31162 }
31163 }
31164
31165 /* Reset DIEs so we can output them again. */
31166
31167 static void
31168 reset_dies (dw_die_ref die)
31169 {
31170 dw_die_ref c;
31171
31172 /* Remove stuff we re-generate. */
31173 die->die_mark = 0;
31174 die->die_offset = 0;
31175 die->die_abbrev = 0;
31176 remove_AT (die, DW_AT_sibling);
31177
31178 FOR_EACH_CHILD (die, c, reset_dies (c));
31179 }
31180
31181 /* Output stuff that dwarf requires at the end of every file,
31182 and generate the DWARF-2 debugging info. */
31183
31184 static void
31185 dwarf2out_finish (const char *filename)
31186 {
31187 comdat_type_node *ctnode;
31188 dw_die_ref main_comp_unit_die;
31189 unsigned char checksum[16];
31190 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31191
31192 /* Flush out any latecomers to the limbo party. */
31193 flush_limbo_die_list ();
31194
31195 if (inline_entry_data_table)
31196 gcc_assert (inline_entry_data_table->elements () == 0);
31197
31198 if (flag_checking)
31199 {
31200 verify_die (comp_unit_die ());
31201 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31202 verify_die (node->die);
31203 }
31204
31205 /* We shouldn't have any symbols with delayed asm names for
31206 DIEs generated after early finish. */
31207 gcc_assert (deferred_asm_name == NULL);
31208
31209 gen_remaining_tmpl_value_param_die_attribute ();
31210
31211 if (flag_generate_lto || flag_generate_offload)
31212 {
31213 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31214
31215 /* Prune stuff so that dwarf2out_finish runs successfully
31216 for the fat part of the object. */
31217 reset_dies (comp_unit_die ());
31218 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31219 reset_dies (node->die);
31220
31221 hash_table<comdat_type_hasher> comdat_type_table (100);
31222 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31223 {
31224 comdat_type_node **slot
31225 = comdat_type_table.find_slot (ctnode, INSERT);
31226
31227 /* Don't reset types twice. */
31228 if (*slot != HTAB_EMPTY_ENTRY)
31229 continue;
31230
31231 /* Remove the pointer to the line table. */
31232 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31233
31234 if (debug_info_level >= DINFO_LEVEL_TERSE)
31235 reset_dies (ctnode->root_die);
31236
31237 *slot = ctnode;
31238 }
31239
31240 /* Reset die CU symbol so we don't output it twice. */
31241 comp_unit_die ()->die_id.die_symbol = NULL;
31242
31243 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31244 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31245 if (have_macinfo)
31246 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31247
31248 /* Remove indirect string decisions. */
31249 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31250 if (debug_line_str_hash)
31251 {
31252 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31253 debug_line_str_hash = NULL;
31254 }
31255 }
31256
31257 #if ENABLE_ASSERT_CHECKING
31258 {
31259 dw_die_ref die = comp_unit_die (), c;
31260 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31261 }
31262 #endif
31263 resolve_addr (comp_unit_die ());
31264 move_marked_base_types ();
31265
31266 if (dump_file)
31267 {
31268 fprintf (dump_file, "DWARF for %s\n", filename);
31269 print_die (comp_unit_die (), dump_file);
31270 }
31271
31272 /* Initialize sections and labels used for actual assembler output. */
31273 unsigned generation = init_sections_and_labels (false);
31274
31275 /* Traverse the DIE's and add sibling attributes to those DIE's that
31276 have children. */
31277 add_sibling_attributes (comp_unit_die ());
31278 limbo_die_node *node;
31279 for (node = cu_die_list; node; node = node->next)
31280 add_sibling_attributes (node->die);
31281 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31282 add_sibling_attributes (ctnode->root_die);
31283
31284 /* When splitting DWARF info, we put some attributes in the
31285 skeleton compile_unit DIE that remains in the .o, while
31286 most attributes go in the DWO compile_unit_die. */
31287 if (dwarf_split_debug_info)
31288 {
31289 limbo_die_node *cu;
31290 main_comp_unit_die = gen_compile_unit_die (NULL);
31291 if (dwarf_version >= 5)
31292 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31293 cu = limbo_die_list;
31294 gcc_assert (cu->die == main_comp_unit_die);
31295 limbo_die_list = limbo_die_list->next;
31296 cu->next = cu_die_list;
31297 cu_die_list = cu;
31298 }
31299 else
31300 main_comp_unit_die = comp_unit_die ();
31301
31302 /* Output a terminator label for the .text section. */
31303 switch_to_section (text_section);
31304 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31305 if (cold_text_section)
31306 {
31307 switch_to_section (cold_text_section);
31308 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31309 }
31310
31311 /* We can only use the low/high_pc attributes if all of the code was
31312 in .text. */
31313 if (!have_multiple_function_sections
31314 || (dwarf_version < 3 && dwarf_strict))
31315 {
31316 /* Don't add if the CU has no associated code. */
31317 if (text_section_used)
31318 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31319 text_end_label, true);
31320 }
31321 else
31322 {
31323 unsigned fde_idx;
31324 dw_fde_ref fde;
31325 bool range_list_added = false;
31326
31327 if (text_section_used)
31328 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31329 text_end_label, &range_list_added, true);
31330 if (cold_text_section_used)
31331 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31332 cold_end_label, &range_list_added, true);
31333
31334 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31335 {
31336 if (DECL_IGNORED_P (fde->decl))
31337 continue;
31338 if (!fde->in_std_section)
31339 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31340 fde->dw_fde_end, &range_list_added,
31341 true);
31342 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31343 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31344 fde->dw_fde_second_end, &range_list_added,
31345 true);
31346 }
31347
31348 if (range_list_added)
31349 {
31350 /* We need to give .debug_loc and .debug_ranges an appropriate
31351 "base address". Use zero so that these addresses become
31352 absolute. Historically, we've emitted the unexpected
31353 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31354 Emit both to give time for other tools to adapt. */
31355 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31356 if (! dwarf_strict && dwarf_version < 4)
31357 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31358
31359 add_ranges (NULL);
31360 }
31361 }
31362
31363 /* AIX Assembler inserts the length, so adjust the reference to match the
31364 offset expected by debuggers. */
31365 strcpy (dl_section_ref, debug_line_section_label);
31366 if (XCOFF_DEBUGGING_INFO)
31367 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31368
31369 if (debug_info_level >= DINFO_LEVEL_TERSE)
31370 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31371 dl_section_ref);
31372
31373 if (have_macinfo)
31374 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31375 macinfo_section_label);
31376
31377 if (dwarf_split_debug_info)
31378 {
31379 if (have_location_lists)
31380 {
31381 /* Since we generate the loclists in the split DWARF .dwo
31382 file itself, we don't need to generate a loclists_base
31383 attribute for the split compile unit DIE. That attribute
31384 (and using relocatable sec_offset FORMs) isn't allowed
31385 for a split compile unit. Only if the .debug_loclists
31386 section was in the main file, would we need to generate a
31387 loclists_base attribute here (for the full or skeleton
31388 unit DIE). */
31389
31390 /* optimize_location_lists calculates the size of the lists,
31391 so index them first, and assign indices to the entries.
31392 Although optimize_location_lists will remove entries from
31393 the table, it only does so for duplicates, and therefore
31394 only reduces ref_counts to 1. */
31395 index_location_lists (comp_unit_die ());
31396 }
31397
31398 if (addr_index_table != NULL)
31399 {
31400 unsigned int index = 0;
31401 addr_index_table
31402 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31403 (&index);
31404 }
31405 }
31406
31407 loc_list_idx = 0;
31408 if (have_location_lists)
31409 {
31410 optimize_location_lists (comp_unit_die ());
31411 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31412 if (dwarf_version >= 5 && dwarf_split_debug_info)
31413 assign_location_list_indexes (comp_unit_die ());
31414 }
31415
31416 save_macinfo_strings ();
31417
31418 if (dwarf_split_debug_info)
31419 {
31420 unsigned int index = 0;
31421
31422 /* Add attributes common to skeleton compile_units and
31423 type_units. Because these attributes include strings, it
31424 must be done before freezing the string table. Top-level
31425 skeleton die attrs are added when the skeleton type unit is
31426 created, so ensure it is created by this point. */
31427 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31428 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31429 }
31430
31431 /* Output all of the compilation units. We put the main one last so that
31432 the offsets are available to output_pubnames. */
31433 for (node = cu_die_list; node; node = node->next)
31434 output_comp_unit (node->die, 0, NULL);
31435
31436 hash_table<comdat_type_hasher> comdat_type_table (100);
31437 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31438 {
31439 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31440
31441 /* Don't output duplicate types. */
31442 if (*slot != HTAB_EMPTY_ENTRY)
31443 continue;
31444
31445 /* Add a pointer to the line table for the main compilation unit
31446 so that the debugger can make sense of DW_AT_decl_file
31447 attributes. */
31448 if (debug_info_level >= DINFO_LEVEL_TERSE)
31449 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31450 (!dwarf_split_debug_info
31451 ? dl_section_ref
31452 : debug_skeleton_line_section_label));
31453
31454 output_comdat_type_unit (ctnode);
31455 *slot = ctnode;
31456 }
31457
31458 if (dwarf_split_debug_info)
31459 {
31460 int mark;
31461 struct md5_ctx ctx;
31462
31463 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31464 index_rnglists ();
31465
31466 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31467 md5_init_ctx (&ctx);
31468 mark = 0;
31469 die_checksum (comp_unit_die (), &ctx, &mark);
31470 unmark_all_dies (comp_unit_die ());
31471 md5_finish_ctx (&ctx, checksum);
31472
31473 if (dwarf_version < 5)
31474 {
31475 /* Use the first 8 bytes of the checksum as the dwo_id,
31476 and add it to both comp-unit DIEs. */
31477 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31478 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31479 }
31480
31481 /* Add the base offset of the ranges table to the skeleton
31482 comp-unit DIE. */
31483 if (!vec_safe_is_empty (ranges_table))
31484 {
31485 if (dwarf_version >= 5)
31486 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31487 ranges_base_label);
31488 else
31489 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31490 ranges_section_label);
31491 }
31492
31493 switch_to_section (debug_addr_section);
31494 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
31495 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
31496 before DWARF5, didn't have a header for .debug_addr units.
31497 DWARF5 specifies a small header when address tables are used. */
31498 if (dwarf_version >= 5)
31499 {
31500 unsigned int last_idx = 0;
31501 unsigned long addrs_length;
31502
31503 addr_index_table->traverse_noresize
31504 <unsigned int *, count_index_addrs> (&last_idx);
31505 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
31506
31507 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31508 dw2_asm_output_data (4, 0xffffffff,
31509 "Escape value for 64-bit DWARF extension");
31510 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
31511 "Length of Address Unit");
31512 dw2_asm_output_data (2, 5, "DWARF addr version");
31513 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
31514 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
31515 }
31516 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31517 output_addr_table ();
31518 }
31519
31520 /* Output the main compilation unit if non-empty or if .debug_macinfo
31521 or .debug_macro will be emitted. */
31522 output_comp_unit (comp_unit_die (), have_macinfo,
31523 dwarf_split_debug_info ? checksum : NULL);
31524
31525 if (dwarf_split_debug_info && info_section_emitted)
31526 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31527
31528 /* Output the abbreviation table. */
31529 if (vec_safe_length (abbrev_die_table) != 1)
31530 {
31531 switch_to_section (debug_abbrev_section);
31532 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31533 output_abbrev_section ();
31534 }
31535
31536 /* Output location list section if necessary. */
31537 if (have_location_lists)
31538 {
31539 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31540 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31541 /* Output the location lists info. */
31542 switch_to_section (debug_loc_section);
31543 if (dwarf_version >= 5)
31544 {
31545 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31546 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31547 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31548 dw2_asm_output_data (4, 0xffffffff,
31549 "Initial length escape value indicating "
31550 "64-bit DWARF extension");
31551 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31552 "Length of Location Lists");
31553 ASM_OUTPUT_LABEL (asm_out_file, l1);
31554 output_dwarf_version ();
31555 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31556 dw2_asm_output_data (1, 0, "Segment Size");
31557 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31558 "Offset Entry Count");
31559 }
31560 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31561 if (dwarf_version >= 5 && dwarf_split_debug_info)
31562 {
31563 unsigned int save_loc_list_idx = loc_list_idx;
31564 loc_list_idx = 0;
31565 output_loclists_offsets (comp_unit_die ());
31566 gcc_assert (save_loc_list_idx == loc_list_idx);
31567 }
31568 output_location_lists (comp_unit_die ());
31569 if (dwarf_version >= 5)
31570 ASM_OUTPUT_LABEL (asm_out_file, l2);
31571 }
31572
31573 output_pubtables ();
31574
31575 /* Output the address range information if a CU (.debug_info section)
31576 was emitted. We output an empty table even if we had no functions
31577 to put in it. This because the consumer has no way to tell the
31578 difference between an empty table that we omitted and failure to
31579 generate a table that would have contained data. */
31580 if (info_section_emitted)
31581 {
31582 switch_to_section (debug_aranges_section);
31583 output_aranges ();
31584 }
31585
31586 /* Output ranges section if necessary. */
31587 if (!vec_safe_is_empty (ranges_table))
31588 {
31589 if (dwarf_version >= 5)
31590 output_rnglists (generation);
31591 else
31592 output_ranges ();
31593 }
31594
31595 /* Have to end the macro section. */
31596 if (have_macinfo)
31597 {
31598 switch_to_section (debug_macinfo_section);
31599 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31600 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31601 : debug_skeleton_line_section_label, false);
31602 dw2_asm_output_data (1, 0, "End compilation unit");
31603 }
31604
31605 /* Output the source line correspondence table. We must do this
31606 even if there is no line information. Otherwise, on an empty
31607 translation unit, we will generate a present, but empty,
31608 .debug_info section. IRIX 6.5 `nm' will then complain when
31609 examining the file. This is done late so that any filenames
31610 used by the debug_info section are marked as 'used'. */
31611 switch_to_section (debug_line_section);
31612 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31613 if (! output_asm_line_debug_info ())
31614 output_line_info (false);
31615
31616 if (dwarf_split_debug_info && info_section_emitted)
31617 {
31618 switch_to_section (debug_skeleton_line_section);
31619 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31620 output_line_info (true);
31621 }
31622
31623 /* If we emitted any indirect strings, output the string table too. */
31624 if (debug_str_hash || skeleton_debug_str_hash)
31625 output_indirect_strings ();
31626 if (debug_line_str_hash)
31627 {
31628 switch_to_section (debug_line_str_section);
31629 const enum dwarf_form form = DW_FORM_line_strp;
31630 debug_line_str_hash->traverse<enum dwarf_form,
31631 output_indirect_string> (form);
31632 }
31633
31634 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31635 symview_upper_bound = 0;
31636 if (zero_view_p)
31637 bitmap_clear (zero_view_p);
31638 }
31639
31640 /* Returns a hash value for X (which really is a variable_value_struct). */
31641
31642 inline hashval_t
31643 variable_value_hasher::hash (variable_value_struct *x)
31644 {
31645 return (hashval_t) x->decl_id;
31646 }
31647
31648 /* Return nonzero if decl_id of variable_value_struct X is the same as
31649 UID of decl Y. */
31650
31651 inline bool
31652 variable_value_hasher::equal (variable_value_struct *x, tree y)
31653 {
31654 return x->decl_id == DECL_UID (y);
31655 }
31656
31657 /* Helper function for resolve_variable_value, handle
31658 DW_OP_GNU_variable_value in one location expression.
31659 Return true if exprloc has been changed into loclist. */
31660
31661 static bool
31662 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31663 {
31664 dw_loc_descr_ref next;
31665 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31666 {
31667 next = loc->dw_loc_next;
31668 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31669 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31670 continue;
31671
31672 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31673 if (DECL_CONTEXT (decl) != current_function_decl)
31674 continue;
31675
31676 dw_die_ref ref = lookup_decl_die (decl);
31677 if (ref)
31678 {
31679 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31680 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31681 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31682 continue;
31683 }
31684 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31685 if (l == NULL)
31686 continue;
31687 if (l->dw_loc_next)
31688 {
31689 if (AT_class (a) != dw_val_class_loc)
31690 continue;
31691 switch (a->dw_attr)
31692 {
31693 /* Following attributes allow both exprloc and loclist
31694 classes, so we can change them into a loclist. */
31695 case DW_AT_location:
31696 case DW_AT_string_length:
31697 case DW_AT_return_addr:
31698 case DW_AT_data_member_location:
31699 case DW_AT_frame_base:
31700 case DW_AT_segment:
31701 case DW_AT_static_link:
31702 case DW_AT_use_location:
31703 case DW_AT_vtable_elem_location:
31704 if (prev)
31705 {
31706 prev->dw_loc_next = NULL;
31707 prepend_loc_descr_to_each (l, AT_loc (a));
31708 }
31709 if (next)
31710 add_loc_descr_to_each (l, next);
31711 a->dw_attr_val.val_class = dw_val_class_loc_list;
31712 a->dw_attr_val.val_entry = NULL;
31713 a->dw_attr_val.v.val_loc_list = l;
31714 have_location_lists = true;
31715 return true;
31716 /* Following attributes allow both exprloc and reference,
31717 so if the whole expression is DW_OP_GNU_variable_value alone
31718 we could transform it into reference. */
31719 case DW_AT_byte_size:
31720 case DW_AT_bit_size:
31721 case DW_AT_lower_bound:
31722 case DW_AT_upper_bound:
31723 case DW_AT_bit_stride:
31724 case DW_AT_count:
31725 case DW_AT_allocated:
31726 case DW_AT_associated:
31727 case DW_AT_byte_stride:
31728 if (prev == NULL && next == NULL)
31729 break;
31730 /* FALLTHRU */
31731 default:
31732 if (dwarf_strict)
31733 continue;
31734 break;
31735 }
31736 /* Create DW_TAG_variable that we can refer to. */
31737 gen_decl_die (decl, NULL_TREE, NULL,
31738 lookup_decl_die (current_function_decl));
31739 ref = lookup_decl_die (decl);
31740 if (ref)
31741 {
31742 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31743 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31744 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31745 }
31746 continue;
31747 }
31748 if (prev)
31749 {
31750 prev->dw_loc_next = l->expr;
31751 add_loc_descr (&prev->dw_loc_next, next);
31752 free_loc_descr (loc, NULL);
31753 next = prev->dw_loc_next;
31754 }
31755 else
31756 {
31757 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31758 add_loc_descr (&loc, next);
31759 next = loc;
31760 }
31761 loc = prev;
31762 }
31763 return false;
31764 }
31765
31766 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31767
31768 static void
31769 resolve_variable_value (dw_die_ref die)
31770 {
31771 dw_attr_node *a;
31772 dw_loc_list_ref loc;
31773 unsigned ix;
31774
31775 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31776 switch (AT_class (a))
31777 {
31778 case dw_val_class_loc:
31779 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31780 break;
31781 /* FALLTHRU */
31782 case dw_val_class_loc_list:
31783 loc = AT_loc_list (a);
31784 gcc_assert (loc);
31785 for (; loc; loc = loc->dw_loc_next)
31786 resolve_variable_value_in_expr (a, loc->expr);
31787 break;
31788 default:
31789 break;
31790 }
31791 }
31792
31793 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31794 temporaries in the current function. */
31795
31796 static void
31797 resolve_variable_values (void)
31798 {
31799 if (!variable_value_hash || !current_function_decl)
31800 return;
31801
31802 struct variable_value_struct *node
31803 = variable_value_hash->find_with_hash (current_function_decl,
31804 DECL_UID (current_function_decl));
31805
31806 if (node == NULL)
31807 return;
31808
31809 unsigned int i;
31810 dw_die_ref die;
31811 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31812 resolve_variable_value (die);
31813 }
31814
31815 /* Helper function for note_variable_value, handle one location
31816 expression. */
31817
31818 static void
31819 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31820 {
31821 for (; loc; loc = loc->dw_loc_next)
31822 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31823 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31824 {
31825 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31826 dw_die_ref ref = lookup_decl_die (decl);
31827 if (! ref && (flag_generate_lto || flag_generate_offload))
31828 {
31829 /* ??? This is somewhat a hack because we do not create DIEs
31830 for variables not in BLOCK trees early but when generating
31831 early LTO output we need the dw_val_class_decl_ref to be
31832 fully resolved. For fat LTO objects we'd also like to
31833 undo this after LTO dwarf output. */
31834 gcc_assert (DECL_CONTEXT (decl));
31835 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31836 gcc_assert (ctx != NULL);
31837 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31838 ref = lookup_decl_die (decl);
31839 gcc_assert (ref != NULL);
31840 }
31841 if (ref)
31842 {
31843 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31844 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31845 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31846 continue;
31847 }
31848 if (VAR_P (decl)
31849 && DECL_CONTEXT (decl)
31850 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31851 && lookup_decl_die (DECL_CONTEXT (decl)))
31852 {
31853 if (!variable_value_hash)
31854 variable_value_hash
31855 = hash_table<variable_value_hasher>::create_ggc (10);
31856
31857 tree fndecl = DECL_CONTEXT (decl);
31858 struct variable_value_struct *node;
31859 struct variable_value_struct **slot
31860 = variable_value_hash->find_slot_with_hash (fndecl,
31861 DECL_UID (fndecl),
31862 INSERT);
31863 if (*slot == NULL)
31864 {
31865 node = ggc_cleared_alloc<variable_value_struct> ();
31866 node->decl_id = DECL_UID (fndecl);
31867 *slot = node;
31868 }
31869 else
31870 node = *slot;
31871
31872 vec_safe_push (node->dies, die);
31873 }
31874 }
31875 }
31876
31877 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31878 with dw_val_class_decl_ref operand. */
31879
31880 static void
31881 note_variable_value (dw_die_ref die)
31882 {
31883 dw_die_ref c;
31884 dw_attr_node *a;
31885 dw_loc_list_ref loc;
31886 unsigned ix;
31887
31888 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31889 switch (AT_class (a))
31890 {
31891 case dw_val_class_loc_list:
31892 loc = AT_loc_list (a);
31893 gcc_assert (loc);
31894 if (!loc->noted_variable_value)
31895 {
31896 loc->noted_variable_value = 1;
31897 for (; loc; loc = loc->dw_loc_next)
31898 note_variable_value_in_expr (die, loc->expr);
31899 }
31900 break;
31901 case dw_val_class_loc:
31902 note_variable_value_in_expr (die, AT_loc (a));
31903 break;
31904 default:
31905 break;
31906 }
31907
31908 /* Mark children. */
31909 FOR_EACH_CHILD (die, c, note_variable_value (c));
31910 }
31911
31912 /* Perform any cleanups needed after the early debug generation pass
31913 has run. */
31914
31915 static void
31916 dwarf2out_early_finish (const char *filename)
31917 {
31918 set_early_dwarf s;
31919 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31920
31921 /* PCH might result in DW_AT_producer string being restored from the
31922 header compilation, so always fill it with empty string initially
31923 and overwrite only here. */
31924 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31925 producer_string = gen_producer_string ();
31926 producer->dw_attr_val.v.val_str->refcount--;
31927 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31928
31929 /* Add the name for the main input file now. We delayed this from
31930 dwarf2out_init to avoid complications with PCH. */
31931 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31932 add_comp_dir_attribute (comp_unit_die ());
31933
31934 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31935 DW_AT_comp_dir into .debug_line_str section. */
31936 if (!output_asm_line_debug_info ()
31937 && dwarf_version >= 5
31938 && DWARF5_USE_DEBUG_LINE_STR)
31939 {
31940 for (int i = 0; i < 2; i++)
31941 {
31942 dw_attr_node *a = get_AT (comp_unit_die (),
31943 i ? DW_AT_comp_dir : DW_AT_name);
31944 if (a == NULL
31945 || AT_class (a) != dw_val_class_str
31946 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31947 continue;
31948
31949 if (! debug_line_str_hash)
31950 debug_line_str_hash
31951 = hash_table<indirect_string_hasher>::create_ggc (10);
31952
31953 struct indirect_string_node *node
31954 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31955 set_indirect_string (node);
31956 node->form = DW_FORM_line_strp;
31957 a->dw_attr_val.v.val_str->refcount--;
31958 a->dw_attr_val.v.val_str = node;
31959 }
31960 }
31961
31962 /* With LTO early dwarf was really finished at compile-time, so make
31963 sure to adjust the phase after annotating the LTRANS CU DIE. */
31964 if (in_lto_p)
31965 {
31966 early_dwarf_finished = true;
31967 if (dump_file)
31968 {
31969 fprintf (dump_file, "LTO EARLY DWARF for %s\n", filename);
31970 print_die (comp_unit_die (), dump_file);
31971 }
31972 return;
31973 }
31974
31975 /* Walk through the list of incomplete types again, trying once more to
31976 emit full debugging info for them. */
31977 retry_incomplete_types ();
31978
31979 /* The point here is to flush out the limbo list so that it is empty
31980 and we don't need to stream it for LTO. */
31981 flush_limbo_die_list ();
31982
31983 gen_scheduled_generic_parms_dies ();
31984 gen_remaining_tmpl_value_param_die_attribute ();
31985
31986 /* Add DW_AT_linkage_name for all deferred DIEs. */
31987 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31988 {
31989 tree decl = node->created_for;
31990 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31991 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31992 ended up in deferred_asm_name before we knew it was
31993 constant and never written to disk. */
31994 && DECL_ASSEMBLER_NAME (decl))
31995 {
31996 add_linkage_attr (node->die, decl);
31997 move_linkage_attr (node->die);
31998 }
31999 }
32000 deferred_asm_name = NULL;
32001
32002 if (flag_eliminate_unused_debug_types)
32003 prune_unused_types ();
32004
32005 /* Generate separate COMDAT sections for type DIEs. */
32006 if (use_debug_types)
32007 {
32008 break_out_comdat_types (comp_unit_die ());
32009
32010 /* Each new type_unit DIE was added to the limbo die list when created.
32011 Since these have all been added to comdat_type_list, clear the
32012 limbo die list. */
32013 limbo_die_list = NULL;
32014
32015 /* For each new comdat type unit, copy declarations for incomplete
32016 types to make the new unit self-contained (i.e., no direct
32017 references to the main compile unit). */
32018 for (comdat_type_node *ctnode = comdat_type_list;
32019 ctnode != NULL; ctnode = ctnode->next)
32020 copy_decls_for_unworthy_types (ctnode->root_die);
32021 copy_decls_for_unworthy_types (comp_unit_die ());
32022
32023 /* In the process of copying declarations from one unit to another,
32024 we may have left some declarations behind that are no longer
32025 referenced. Prune them. */
32026 prune_unused_types ();
32027 }
32028
32029 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
32030 with dw_val_class_decl_ref operand. */
32031 note_variable_value (comp_unit_die ());
32032 for (limbo_die_node *node = cu_die_list; node; node = node->next)
32033 note_variable_value (node->die);
32034 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
32035 ctnode = ctnode->next)
32036 note_variable_value (ctnode->root_die);
32037 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32038 note_variable_value (node->die);
32039
32040 /* The AT_pubnames attribute needs to go in all skeleton dies, including
32041 both the main_cu and all skeleton TUs. Making this call unconditional
32042 would end up either adding a second copy of the AT_pubnames attribute, or
32043 requiring a special case in add_top_level_skeleton_die_attrs. */
32044 if (!dwarf_split_debug_info)
32045 add_AT_pubnames (comp_unit_die ());
32046
32047 /* The early debug phase is now finished. */
32048 early_dwarf_finished = true;
32049 if (dump_file)
32050 {
32051 fprintf (dump_file, "EARLY DWARF for %s\n", filename);
32052 print_die (comp_unit_die (), dump_file);
32053 }
32054
32055 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
32056 if ((!flag_generate_lto && !flag_generate_offload)
32057 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
32058 copy_lto_debug_sections operation of the simple object support in
32059 libiberty is not implemented for them yet. */
32060 || TARGET_PECOFF || TARGET_COFF)
32061 return;
32062
32063 /* Now as we are going to output for LTO initialize sections and labels
32064 to the LTO variants. We don't need a random-seed postfix as other
32065 LTO sections as linking the LTO debug sections into one in a partial
32066 link is fine. */
32067 init_sections_and_labels (true);
32068
32069 /* The output below is modeled after dwarf2out_finish with all
32070 location related output removed and some LTO specific changes.
32071 Some refactoring might make both smaller and easier to match up. */
32072
32073 /* Traverse the DIE's and add add sibling attributes to those DIE's
32074 that have children. */
32075 add_sibling_attributes (comp_unit_die ());
32076 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32077 add_sibling_attributes (node->die);
32078 for (comdat_type_node *ctnode = comdat_type_list;
32079 ctnode != NULL; ctnode = ctnode->next)
32080 add_sibling_attributes (ctnode->root_die);
32081
32082 /* AIX Assembler inserts the length, so adjust the reference to match the
32083 offset expected by debuggers. */
32084 strcpy (dl_section_ref, debug_line_section_label);
32085 if (XCOFF_DEBUGGING_INFO)
32086 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
32087
32088 if (debug_info_level >= DINFO_LEVEL_TERSE)
32089 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
32090
32091 if (have_macinfo)
32092 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
32093 macinfo_section_label);
32094
32095 save_macinfo_strings ();
32096
32097 if (dwarf_split_debug_info)
32098 {
32099 unsigned int index = 0;
32100 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32101 }
32102
32103 /* Output all of the compilation units. We put the main one last so that
32104 the offsets are available to output_pubnames. */
32105 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32106 output_comp_unit (node->die, 0, NULL);
32107
32108 hash_table<comdat_type_hasher> comdat_type_table (100);
32109 for (comdat_type_node *ctnode = comdat_type_list;
32110 ctnode != NULL; ctnode = ctnode->next)
32111 {
32112 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32113
32114 /* Don't output duplicate types. */
32115 if (*slot != HTAB_EMPTY_ENTRY)
32116 continue;
32117
32118 /* Add a pointer to the line table for the main compilation unit
32119 so that the debugger can make sense of DW_AT_decl_file
32120 attributes. */
32121 if (debug_info_level >= DINFO_LEVEL_TERSE)
32122 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32123 (!dwarf_split_debug_info
32124 ? debug_line_section_label
32125 : debug_skeleton_line_section_label));
32126
32127 output_comdat_type_unit (ctnode);
32128 *slot = ctnode;
32129 }
32130
32131 /* Stick a unique symbol to the main debuginfo section. */
32132 compute_comp_unit_symbol (comp_unit_die ());
32133
32134 /* Output the main compilation unit. We always need it if only for
32135 the CU symbol. */
32136 output_comp_unit (comp_unit_die (), true, NULL);
32137
32138 /* Output the abbreviation table. */
32139 if (vec_safe_length (abbrev_die_table) != 1)
32140 {
32141 switch_to_section (debug_abbrev_section);
32142 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32143 output_abbrev_section ();
32144 }
32145
32146 /* Have to end the macro section. */
32147 if (have_macinfo)
32148 {
32149 /* We have to save macinfo state if we need to output it again
32150 for the FAT part of the object. */
32151 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32152 if (flag_fat_lto_objects)
32153 macinfo_table = macinfo_table->copy ();
32154
32155 switch_to_section (debug_macinfo_section);
32156 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32157 output_macinfo (debug_line_section_label, true);
32158 dw2_asm_output_data (1, 0, "End compilation unit");
32159
32160 if (flag_fat_lto_objects)
32161 {
32162 vec_free (macinfo_table);
32163 macinfo_table = saved_macinfo_table;
32164 }
32165 }
32166
32167 /* Emit a skeleton debug_line section. */
32168 switch_to_section (debug_line_section);
32169 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32170 output_line_info (true);
32171
32172 /* If we emitted any indirect strings, output the string table too. */
32173 if (debug_str_hash || skeleton_debug_str_hash)
32174 output_indirect_strings ();
32175 if (debug_line_str_hash)
32176 {
32177 switch_to_section (debug_line_str_section);
32178 const enum dwarf_form form = DW_FORM_line_strp;
32179 debug_line_str_hash->traverse<enum dwarf_form,
32180 output_indirect_string> (form);
32181 }
32182
32183 /* Switch back to the text section. */
32184 switch_to_section (text_section);
32185 }
32186
32187 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32188 within the same process. For use by toplev::finalize. */
32189
32190 void
32191 dwarf2out_c_finalize (void)
32192 {
32193 last_var_location_insn = NULL;
32194 cached_next_real_insn = NULL;
32195 used_rtx_array = NULL;
32196 incomplete_types = NULL;
32197 debug_info_section = NULL;
32198 debug_skeleton_info_section = NULL;
32199 debug_abbrev_section = NULL;
32200 debug_skeleton_abbrev_section = NULL;
32201 debug_aranges_section = NULL;
32202 debug_addr_section = NULL;
32203 debug_macinfo_section = NULL;
32204 debug_line_section = NULL;
32205 debug_skeleton_line_section = NULL;
32206 debug_loc_section = NULL;
32207 debug_pubnames_section = NULL;
32208 debug_pubtypes_section = NULL;
32209 debug_str_section = NULL;
32210 debug_line_str_section = NULL;
32211 debug_str_dwo_section = NULL;
32212 debug_str_offsets_section = NULL;
32213 debug_ranges_section = NULL;
32214 debug_frame_section = NULL;
32215 fde_vec = NULL;
32216 debug_str_hash = NULL;
32217 debug_line_str_hash = NULL;
32218 skeleton_debug_str_hash = NULL;
32219 dw2_string_counter = 0;
32220 have_multiple_function_sections = false;
32221 text_section_used = false;
32222 cold_text_section_used = false;
32223 cold_text_section = NULL;
32224 current_unit_personality = NULL;
32225
32226 early_dwarf = false;
32227 early_dwarf_finished = false;
32228
32229 next_die_offset = 0;
32230 single_comp_unit_die = NULL;
32231 comdat_type_list = NULL;
32232 limbo_die_list = NULL;
32233 file_table = NULL;
32234 decl_die_table = NULL;
32235 common_block_die_table = NULL;
32236 decl_loc_table = NULL;
32237 call_arg_locations = NULL;
32238 call_arg_loc_last = NULL;
32239 call_site_count = -1;
32240 tail_call_site_count = -1;
32241 cached_dw_loc_list_table = NULL;
32242 abbrev_die_table = NULL;
32243 delete dwarf_proc_stack_usage_map;
32244 dwarf_proc_stack_usage_map = NULL;
32245 line_info_label_num = 0;
32246 cur_line_info_table = NULL;
32247 text_section_line_info = NULL;
32248 cold_text_section_line_info = NULL;
32249 separate_line_info = NULL;
32250 info_section_emitted = false;
32251 pubname_table = NULL;
32252 pubtype_table = NULL;
32253 macinfo_table = NULL;
32254 ranges_table = NULL;
32255 ranges_by_label = NULL;
32256 rnglist_idx = 0;
32257 have_location_lists = false;
32258 loclabel_num = 0;
32259 poc_label_num = 0;
32260 last_emitted_file = NULL;
32261 label_num = 0;
32262 tmpl_value_parm_die_table = NULL;
32263 generic_type_instances = NULL;
32264 frame_pointer_fb_offset = 0;
32265 frame_pointer_fb_offset_valid = false;
32266 base_types.release ();
32267 XDELETEVEC (producer_string);
32268 producer_string = NULL;
32269 }
32270
32271 #include "gt-dwarf2out.h"