re PR debug/83941 (Debug info generated with -flto contains useless forwarders)
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105 static bool is_redundant_typedef (const_tree);
106
107 #ifndef XCOFF_DEBUGGING_INFO
108 #define XCOFF_DEBUGGING_INFO 0
109 #endif
110
111 #ifndef HAVE_XCOFF_DWARF_EXTRAS
112 #define HAVE_XCOFF_DWARF_EXTRAS 0
113 #endif
114
115 #ifdef VMS_DEBUGGING_INFO
116 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
117
118 /* Define this macro to be a nonzero value if the directory specifications
119 which are output in the debug info should end with a separator. */
120 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
121 /* Define this macro to evaluate to a nonzero value if GCC should refrain
122 from generating indirect strings in DWARF2 debug information, for instance
123 if your target is stuck with an old version of GDB that is unable to
124 process them properly or uses VMS Debug. */
125 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
126 #else
127 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
128 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
129 #endif
130
131 /* ??? Poison these here until it can be done generically. They've been
132 totally replaced in this file; make sure it stays that way. */
133 #undef DWARF2_UNWIND_INFO
134 #undef DWARF2_FRAME_INFO
135 #if (GCC_VERSION >= 3000)
136 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
137 #endif
138
139 /* The size of the target's pointer type. */
140 #ifndef PTR_SIZE
141 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
142 #endif
143
144 /* Array of RTXes referenced by the debugging information, which therefore
145 must be kept around forever. */
146 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
147
148 /* A pointer to the base of a list of incomplete types which might be
149 completed at some later time. incomplete_types_list needs to be a
150 vec<tree, va_gc> *because we want to tell the garbage collector about
151 it. */
152 static GTY(()) vec<tree, va_gc> *incomplete_types;
153
154 /* Pointers to various DWARF2 sections. */
155 static GTY(()) section *debug_info_section;
156 static GTY(()) section *debug_skeleton_info_section;
157 static GTY(()) section *debug_abbrev_section;
158 static GTY(()) section *debug_skeleton_abbrev_section;
159 static GTY(()) section *debug_aranges_section;
160 static GTY(()) section *debug_addr_section;
161 static GTY(()) section *debug_macinfo_section;
162 static const char *debug_macinfo_section_name;
163 static unsigned macinfo_label_base = 1;
164 static GTY(()) section *debug_line_section;
165 static GTY(()) section *debug_skeleton_line_section;
166 static GTY(()) section *debug_loc_section;
167 static GTY(()) section *debug_pubnames_section;
168 static GTY(()) section *debug_pubtypes_section;
169 static GTY(()) section *debug_str_section;
170 static GTY(()) section *debug_line_str_section;
171 static GTY(()) section *debug_str_dwo_section;
172 static GTY(()) section *debug_str_offsets_section;
173 static GTY(()) section *debug_ranges_section;
174 static GTY(()) section *debug_frame_section;
175
176 /* Maximum size (in bytes) of an artificially generated label. */
177 #define MAX_ARTIFICIAL_LABEL_BYTES 40
178
179 /* According to the (draft) DWARF 3 specification, the initial length
180 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
181 bytes are 0xffffffff, followed by the length stored in the next 8
182 bytes.
183
184 However, the SGI/MIPS ABI uses an initial length which is equal to
185 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
186
187 #ifndef DWARF_INITIAL_LENGTH_SIZE
188 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
189 #endif
190
191 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
192 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
193 #endif
194
195 /* Round SIZE up to the nearest BOUNDARY. */
196 #define DWARF_ROUND(SIZE,BOUNDARY) \
197 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
198
199 /* CIE identifier. */
200 #if HOST_BITS_PER_WIDE_INT >= 64
201 #define DWARF_CIE_ID \
202 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
203 #else
204 #define DWARF_CIE_ID DW_CIE_ID
205 #endif
206
207
208 /* A vector for a table that contains frame description
209 information for each routine. */
210 #define NOT_INDEXED (-1U)
211 #define NO_INDEX_ASSIGNED (-2U)
212
213 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
214
215 struct GTY((for_user)) indirect_string_node {
216 const char *str;
217 unsigned int refcount;
218 enum dwarf_form form;
219 char *label;
220 unsigned int index;
221 };
222
223 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
224 {
225 typedef const char *compare_type;
226
227 static hashval_t hash (indirect_string_node *);
228 static bool equal (indirect_string_node *, const char *);
229 };
230
231 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
232
233 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
234
235 /* With split_debug_info, both the comp_dir and dwo_name go in the
236 main object file, rather than the dwo, similar to the force_direct
237 parameter elsewhere but with additional complications:
238
239 1) The string is needed in both the main object file and the dwo.
240 That is, the comp_dir and dwo_name will appear in both places.
241
242 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
243 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
244
245 3) GCC chooses the form to use late, depending on the size and
246 reference count.
247
248 Rather than forcing the all debug string handling functions and
249 callers to deal with these complications, simply use a separate,
250 special-cased string table for any attribute that should go in the
251 main object file. This limits the complexity to just the places
252 that need it. */
253
254 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
255
256 static GTY(()) int dw2_string_counter;
257
258 /* True if the compilation unit places functions in more than one section. */
259 static GTY(()) bool have_multiple_function_sections = false;
260
261 /* Whether the default text and cold text sections have been used at all. */
262 static GTY(()) bool text_section_used = false;
263 static GTY(()) bool cold_text_section_used = false;
264
265 /* The default cold text section. */
266 static GTY(()) section *cold_text_section;
267
268 /* The DIE for C++14 'auto' in a function return type. */
269 static GTY(()) dw_die_ref auto_die;
270
271 /* The DIE for C++14 'decltype(auto)' in a function return type. */
272 static GTY(()) dw_die_ref decltype_auto_die;
273
274 /* Forward declarations for functions defined in this file. */
275
276 static void output_call_frame_info (int);
277 static void dwarf2out_note_section_used (void);
278
279 /* Personality decl of current unit. Used only when assembler does not support
280 personality CFI. */
281 static GTY(()) rtx current_unit_personality;
282
283 /* Whether an eh_frame section is required. */
284 static GTY(()) bool do_eh_frame = false;
285
286 /* .debug_rnglists next index. */
287 static unsigned int rnglist_idx;
288
289 /* Data and reference forms for relocatable data. */
290 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
291 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
292
293 #ifndef DEBUG_FRAME_SECTION
294 #define DEBUG_FRAME_SECTION ".debug_frame"
295 #endif
296
297 #ifndef FUNC_BEGIN_LABEL
298 #define FUNC_BEGIN_LABEL "LFB"
299 #endif
300
301 #ifndef FUNC_SECOND_SECT_LABEL
302 #define FUNC_SECOND_SECT_LABEL "LFSB"
303 #endif
304
305 #ifndef FUNC_END_LABEL
306 #define FUNC_END_LABEL "LFE"
307 #endif
308
309 #ifndef PROLOGUE_END_LABEL
310 #define PROLOGUE_END_LABEL "LPE"
311 #endif
312
313 #ifndef EPILOGUE_BEGIN_LABEL
314 #define EPILOGUE_BEGIN_LABEL "LEB"
315 #endif
316
317 #ifndef FRAME_BEGIN_LABEL
318 #define FRAME_BEGIN_LABEL "Lframe"
319 #endif
320 #define CIE_AFTER_SIZE_LABEL "LSCIE"
321 #define CIE_END_LABEL "LECIE"
322 #define FDE_LABEL "LSFDE"
323 #define FDE_AFTER_SIZE_LABEL "LASFDE"
324 #define FDE_END_LABEL "LEFDE"
325 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
326 #define LINE_NUMBER_END_LABEL "LELT"
327 #define LN_PROLOG_AS_LABEL "LASLTP"
328 #define LN_PROLOG_END_LABEL "LELTP"
329 #define DIE_LABEL_PREFIX "DW"
330 \f
331 /* Match the base name of a file to the base name of a compilation unit. */
332
333 static int
334 matches_main_base (const char *path)
335 {
336 /* Cache the last query. */
337 static const char *last_path = NULL;
338 static int last_match = 0;
339 if (path != last_path)
340 {
341 const char *base;
342 int length = base_of_path (path, &base);
343 last_path = path;
344 last_match = (length == main_input_baselength
345 && memcmp (base, main_input_basename, length) == 0);
346 }
347 return last_match;
348 }
349
350 #ifdef DEBUG_DEBUG_STRUCT
351
352 static int
353 dump_struct_debug (tree type, enum debug_info_usage usage,
354 enum debug_struct_file criterion, int generic,
355 int matches, int result)
356 {
357 /* Find the type name. */
358 tree type_decl = TYPE_STUB_DECL (type);
359 tree t = type_decl;
360 const char *name = 0;
361 if (TREE_CODE (t) == TYPE_DECL)
362 t = DECL_NAME (t);
363 if (t)
364 name = IDENTIFIER_POINTER (t);
365
366 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
367 criterion,
368 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
369 matches ? "bas" : "hdr",
370 generic ? "gen" : "ord",
371 usage == DINFO_USAGE_DFN ? ";" :
372 usage == DINFO_USAGE_DIR_USE ? "." : "*",
373 result,
374 (void*) type_decl, name);
375 return result;
376 }
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 dump_struct_debug (type, usage, criterion, generic, matches, result)
379
380 #else
381
382 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
383 (result)
384
385 #endif
386
387 /* Get the number of HOST_WIDE_INTs needed to represent the precision
388 of the number. Some constants have a large uniform precision, so
389 we get the precision needed for the actual value of the number. */
390
391 static unsigned int
392 get_full_len (const wide_int &op)
393 {
394 int prec = wi::min_precision (op, UNSIGNED);
395 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
396 / HOST_BITS_PER_WIDE_INT);
397 }
398
399 static bool
400 should_emit_struct_debug (tree type, enum debug_info_usage usage)
401 {
402 enum debug_struct_file criterion;
403 tree type_decl;
404 bool generic = lang_hooks.types.generic_p (type);
405
406 if (generic)
407 criterion = debug_struct_generic[usage];
408 else
409 criterion = debug_struct_ordinary[usage];
410
411 if (criterion == DINFO_STRUCT_FILE_NONE)
412 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
413 if (criterion == DINFO_STRUCT_FILE_ANY)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
415
416 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
417
418 if (type_decl != NULL)
419 {
420 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
421 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
422
423 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
424 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
425 }
426
427 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
428 }
429 \f
430 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
431 switch to the data section instead, and write out a synthetic start label
432 for collect2 the first time around. */
433
434 static void
435 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
436 {
437 if (eh_frame_section == 0)
438 {
439 int flags;
440
441 if (EH_TABLES_CAN_BE_READ_ONLY)
442 {
443 int fde_encoding;
444 int per_encoding;
445 int lsda_encoding;
446
447 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
448 /*global=*/0);
449 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
450 /*global=*/1);
451 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
452 /*global=*/0);
453 flags = ((! flag_pic
454 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
455 && (fde_encoding & 0x70) != DW_EH_PE_aligned
456 && (per_encoding & 0x70) != DW_EH_PE_absptr
457 && (per_encoding & 0x70) != DW_EH_PE_aligned
458 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
459 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
460 ? 0 : SECTION_WRITE);
461 }
462 else
463 flags = SECTION_WRITE;
464
465 #ifdef EH_FRAME_SECTION_NAME
466 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
467 #else
468 eh_frame_section = ((flags == SECTION_WRITE)
469 ? data_section : readonly_data_section);
470 #endif /* EH_FRAME_SECTION_NAME */
471 }
472
473 switch_to_section (eh_frame_section);
474
475 #ifdef EH_FRAME_THROUGH_COLLECT2
476 /* We have no special eh_frame section. Emit special labels to guide
477 collect2. */
478 if (!back)
479 {
480 tree label = get_file_function_name ("F");
481 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
482 targetm.asm_out.globalize_label (asm_out_file,
483 IDENTIFIER_POINTER (label));
484 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
485 }
486 #endif
487 }
488
489 /* Switch [BACK] to the eh or debug frame table section, depending on
490 FOR_EH. */
491
492 static void
493 switch_to_frame_table_section (int for_eh, bool back)
494 {
495 if (for_eh)
496 switch_to_eh_frame_section (back);
497 else
498 {
499 if (!debug_frame_section)
500 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
501 SECTION_DEBUG, NULL);
502 switch_to_section (debug_frame_section);
503 }
504 }
505
506 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
507
508 enum dw_cfi_oprnd_type
509 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
510 {
511 switch (cfi)
512 {
513 case DW_CFA_nop:
514 case DW_CFA_GNU_window_save:
515 case DW_CFA_remember_state:
516 case DW_CFA_restore_state:
517 return dw_cfi_oprnd_unused;
518
519 case DW_CFA_set_loc:
520 case DW_CFA_advance_loc1:
521 case DW_CFA_advance_loc2:
522 case DW_CFA_advance_loc4:
523 case DW_CFA_MIPS_advance_loc8:
524 return dw_cfi_oprnd_addr;
525
526 case DW_CFA_offset:
527 case DW_CFA_offset_extended:
528 case DW_CFA_def_cfa:
529 case DW_CFA_offset_extended_sf:
530 case DW_CFA_def_cfa_sf:
531 case DW_CFA_restore:
532 case DW_CFA_restore_extended:
533 case DW_CFA_undefined:
534 case DW_CFA_same_value:
535 case DW_CFA_def_cfa_register:
536 case DW_CFA_register:
537 case DW_CFA_expression:
538 case DW_CFA_val_expression:
539 return dw_cfi_oprnd_reg_num;
540
541 case DW_CFA_def_cfa_offset:
542 case DW_CFA_GNU_args_size:
543 case DW_CFA_def_cfa_offset_sf:
544 return dw_cfi_oprnd_offset;
545
546 case DW_CFA_def_cfa_expression:
547 return dw_cfi_oprnd_loc;
548
549 default:
550 gcc_unreachable ();
551 }
552 }
553
554 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
555
556 enum dw_cfi_oprnd_type
557 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
558 {
559 switch (cfi)
560 {
561 case DW_CFA_def_cfa:
562 case DW_CFA_def_cfa_sf:
563 case DW_CFA_offset:
564 case DW_CFA_offset_extended_sf:
565 case DW_CFA_offset_extended:
566 return dw_cfi_oprnd_offset;
567
568 case DW_CFA_register:
569 return dw_cfi_oprnd_reg_num;
570
571 case DW_CFA_expression:
572 case DW_CFA_val_expression:
573 return dw_cfi_oprnd_loc;
574
575 case DW_CFA_def_cfa_expression:
576 return dw_cfi_oprnd_cfa_loc;
577
578 default:
579 return dw_cfi_oprnd_unused;
580 }
581 }
582
583 /* Output one FDE. */
584
585 static void
586 output_fde (dw_fde_ref fde, bool for_eh, bool second,
587 char *section_start_label, int fde_encoding, char *augmentation,
588 bool any_lsda_needed, int lsda_encoding)
589 {
590 const char *begin, *end;
591 static unsigned int j;
592 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
593
594 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
595 /* empty */ 0);
596 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
597 for_eh + j);
598 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
599 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
600 if (!XCOFF_DEBUGGING_INFO || for_eh)
601 {
602 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
603 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
604 " indicating 64-bit DWARF extension");
605 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
606 "FDE Length");
607 }
608 ASM_OUTPUT_LABEL (asm_out_file, l1);
609
610 if (for_eh)
611 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
612 else
613 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
614 debug_frame_section, "FDE CIE offset");
615
616 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
617 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
618
619 if (for_eh)
620 {
621 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
622 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
623 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
624 "FDE initial location");
625 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
626 end, begin, "FDE address range");
627 }
628 else
629 {
630 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
631 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
632 }
633
634 if (augmentation[0])
635 {
636 if (any_lsda_needed)
637 {
638 int size = size_of_encoded_value (lsda_encoding);
639
640 if (lsda_encoding == DW_EH_PE_aligned)
641 {
642 int offset = ( 4 /* Length */
643 + 4 /* CIE offset */
644 + 2 * size_of_encoded_value (fde_encoding)
645 + 1 /* Augmentation size */ );
646 int pad = -offset & (PTR_SIZE - 1);
647
648 size += pad;
649 gcc_assert (size_of_uleb128 (size) == 1);
650 }
651
652 dw2_asm_output_data_uleb128 (size, "Augmentation size");
653
654 if (fde->uses_eh_lsda)
655 {
656 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
657 fde->funcdef_number);
658 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
659 gen_rtx_SYMBOL_REF (Pmode, l1),
660 false,
661 "Language Specific Data Area");
662 }
663 else
664 {
665 if (lsda_encoding == DW_EH_PE_aligned)
666 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
667 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
668 "Language Specific Data Area (none)");
669 }
670 }
671 else
672 dw2_asm_output_data_uleb128 (0, "Augmentation size");
673 }
674
675 /* Loop through the Call Frame Instructions associated with this FDE. */
676 fde->dw_fde_current_label = begin;
677 {
678 size_t from, until, i;
679
680 from = 0;
681 until = vec_safe_length (fde->dw_fde_cfi);
682
683 if (fde->dw_fde_second_begin == NULL)
684 ;
685 else if (!second)
686 until = fde->dw_fde_switch_cfi_index;
687 else
688 from = fde->dw_fde_switch_cfi_index;
689
690 for (i = from; i < until; i++)
691 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
692 }
693
694 /* If we are to emit a ref/link from function bodies to their frame tables,
695 do it now. This is typically performed to make sure that tables
696 associated with functions are dragged with them and not discarded in
697 garbage collecting links. We need to do this on a per function basis to
698 cope with -ffunction-sections. */
699
700 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
701 /* Switch to the function section, emit the ref to the tables, and
702 switch *back* into the table section. */
703 switch_to_section (function_section (fde->decl));
704 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
705 switch_to_frame_table_section (for_eh, true);
706 #endif
707
708 /* Pad the FDE out to an address sized boundary. */
709 ASM_OUTPUT_ALIGN (asm_out_file,
710 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
711 ASM_OUTPUT_LABEL (asm_out_file, l2);
712
713 j += 2;
714 }
715
716 /* Return true if frame description entry FDE is needed for EH. */
717
718 static bool
719 fde_needed_for_eh_p (dw_fde_ref fde)
720 {
721 if (flag_asynchronous_unwind_tables)
722 return true;
723
724 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
725 return true;
726
727 if (fde->uses_eh_lsda)
728 return true;
729
730 /* If exceptions are enabled, we have collected nothrow info. */
731 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
732 return false;
733
734 return true;
735 }
736
737 /* Output the call frame information used to record information
738 that relates to calculating the frame pointer, and records the
739 location of saved registers. */
740
741 static void
742 output_call_frame_info (int for_eh)
743 {
744 unsigned int i;
745 dw_fde_ref fde;
746 dw_cfi_ref cfi;
747 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
748 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
749 bool any_lsda_needed = false;
750 char augmentation[6];
751 int augmentation_size;
752 int fde_encoding = DW_EH_PE_absptr;
753 int per_encoding = DW_EH_PE_absptr;
754 int lsda_encoding = DW_EH_PE_absptr;
755 int return_reg;
756 rtx personality = NULL;
757 int dw_cie_version;
758
759 /* Don't emit a CIE if there won't be any FDEs. */
760 if (!fde_vec)
761 return;
762
763 /* Nothing to do if the assembler's doing it all. */
764 if (dwarf2out_do_cfi_asm ())
765 return;
766
767 /* If we don't have any functions we'll want to unwind out of, don't emit
768 any EH unwind information. If we make FDEs linkonce, we may have to
769 emit an empty label for an FDE that wouldn't otherwise be emitted. We
770 want to avoid having an FDE kept around when the function it refers to
771 is discarded. Example where this matters: a primary function template
772 in C++ requires EH information, an explicit specialization doesn't. */
773 if (for_eh)
774 {
775 bool any_eh_needed = false;
776
777 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
778 {
779 if (fde->uses_eh_lsda)
780 any_eh_needed = any_lsda_needed = true;
781 else if (fde_needed_for_eh_p (fde))
782 any_eh_needed = true;
783 else if (TARGET_USES_WEAK_UNWIND_INFO)
784 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
785 }
786
787 if (!any_eh_needed)
788 return;
789 }
790
791 /* We're going to be generating comments, so turn on app. */
792 if (flag_debug_asm)
793 app_enable ();
794
795 /* Switch to the proper frame section, first time. */
796 switch_to_frame_table_section (for_eh, false);
797
798 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
799 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
800
801 /* Output the CIE. */
802 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
803 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
804 if (!XCOFF_DEBUGGING_INFO || for_eh)
805 {
806 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
807 dw2_asm_output_data (4, 0xffffffff,
808 "Initial length escape value indicating 64-bit DWARF extension");
809 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
810 "Length of Common Information Entry");
811 }
812 ASM_OUTPUT_LABEL (asm_out_file, l1);
813
814 /* Now that the CIE pointer is PC-relative for EH,
815 use 0 to identify the CIE. */
816 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
817 (for_eh ? 0 : DWARF_CIE_ID),
818 "CIE Identifier Tag");
819
820 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
821 use CIE version 1, unless that would produce incorrect results
822 due to overflowing the return register column. */
823 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
824 dw_cie_version = 1;
825 if (return_reg >= 256 || dwarf_version > 2)
826 dw_cie_version = 3;
827 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
828
829 augmentation[0] = 0;
830 augmentation_size = 0;
831
832 personality = current_unit_personality;
833 if (for_eh)
834 {
835 char *p;
836
837 /* Augmentation:
838 z Indicates that a uleb128 is present to size the
839 augmentation section.
840 L Indicates the encoding (and thus presence) of
841 an LSDA pointer in the FDE augmentation.
842 R Indicates a non-default pointer encoding for
843 FDE code pointers.
844 P Indicates the presence of an encoding + language
845 personality routine in the CIE augmentation. */
846
847 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
848 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
849 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
850
851 p = augmentation + 1;
852 if (personality)
853 {
854 *p++ = 'P';
855 augmentation_size += 1 + size_of_encoded_value (per_encoding);
856 assemble_external_libcall (personality);
857 }
858 if (any_lsda_needed)
859 {
860 *p++ = 'L';
861 augmentation_size += 1;
862 }
863 if (fde_encoding != DW_EH_PE_absptr)
864 {
865 *p++ = 'R';
866 augmentation_size += 1;
867 }
868 if (p > augmentation + 1)
869 {
870 augmentation[0] = 'z';
871 *p = '\0';
872 }
873
874 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
875 if (personality && per_encoding == DW_EH_PE_aligned)
876 {
877 int offset = ( 4 /* Length */
878 + 4 /* CIE Id */
879 + 1 /* CIE version */
880 + strlen (augmentation) + 1 /* Augmentation */
881 + size_of_uleb128 (1) /* Code alignment */
882 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
883 + 1 /* RA column */
884 + 1 /* Augmentation size */
885 + 1 /* Personality encoding */ );
886 int pad = -offset & (PTR_SIZE - 1);
887
888 augmentation_size += pad;
889
890 /* Augmentations should be small, so there's scarce need to
891 iterate for a solution. Die if we exceed one uleb128 byte. */
892 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
893 }
894 }
895
896 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
897 if (dw_cie_version >= 4)
898 {
899 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
900 dw2_asm_output_data (1, 0, "CIE Segment Size");
901 }
902 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
903 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
904 "CIE Data Alignment Factor");
905
906 if (dw_cie_version == 1)
907 dw2_asm_output_data (1, return_reg, "CIE RA Column");
908 else
909 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
910
911 if (augmentation[0])
912 {
913 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
914 if (personality)
915 {
916 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
917 eh_data_format_name (per_encoding));
918 dw2_asm_output_encoded_addr_rtx (per_encoding,
919 personality,
920 true, NULL);
921 }
922
923 if (any_lsda_needed)
924 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
925 eh_data_format_name (lsda_encoding));
926
927 if (fde_encoding != DW_EH_PE_absptr)
928 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
929 eh_data_format_name (fde_encoding));
930 }
931
932 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
933 output_cfi (cfi, NULL, for_eh);
934
935 /* Pad the CIE out to an address sized boundary. */
936 ASM_OUTPUT_ALIGN (asm_out_file,
937 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
938 ASM_OUTPUT_LABEL (asm_out_file, l2);
939
940 /* Loop through all of the FDE's. */
941 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
942 {
943 unsigned int k;
944
945 /* Don't emit EH unwind info for leaf functions that don't need it. */
946 if (for_eh && !fde_needed_for_eh_p (fde))
947 continue;
948
949 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
950 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
951 augmentation, any_lsda_needed, lsda_encoding);
952 }
953
954 if (for_eh && targetm.terminate_dw2_eh_frame_info)
955 dw2_asm_output_data (4, 0, "End of Table");
956
957 /* Turn off app to make assembly quicker. */
958 if (flag_debug_asm)
959 app_disable ();
960 }
961
962 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
963
964 static void
965 dwarf2out_do_cfi_startproc (bool second)
966 {
967 int enc;
968 rtx ref;
969
970 fprintf (asm_out_file, "\t.cfi_startproc\n");
971
972 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
973 eh unwinders. */
974 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
975 return;
976
977 rtx personality = get_personality_function (current_function_decl);
978
979 if (personality)
980 {
981 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
982 ref = personality;
983
984 /* ??? The GAS support isn't entirely consistent. We have to
985 handle indirect support ourselves, but PC-relative is done
986 in the assembler. Further, the assembler can't handle any
987 of the weirder relocation types. */
988 if (enc & DW_EH_PE_indirect)
989 ref = dw2_force_const_mem (ref, true);
990
991 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
992 output_addr_const (asm_out_file, ref);
993 fputc ('\n', asm_out_file);
994 }
995
996 if (crtl->uses_eh_lsda)
997 {
998 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
999
1000 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1001 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1002 current_function_funcdef_no);
1003 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1004 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1005
1006 if (enc & DW_EH_PE_indirect)
1007 ref = dw2_force_const_mem (ref, true);
1008
1009 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1010 output_addr_const (asm_out_file, ref);
1011 fputc ('\n', asm_out_file);
1012 }
1013 }
1014
1015 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1016 this allocation may be done before pass_final. */
1017
1018 dw_fde_ref
1019 dwarf2out_alloc_current_fde (void)
1020 {
1021 dw_fde_ref fde;
1022
1023 fde = ggc_cleared_alloc<dw_fde_node> ();
1024 fde->decl = current_function_decl;
1025 fde->funcdef_number = current_function_funcdef_no;
1026 fde->fde_index = vec_safe_length (fde_vec);
1027 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1028 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1029 fde->nothrow = crtl->nothrow;
1030 fde->drap_reg = INVALID_REGNUM;
1031 fde->vdrap_reg = INVALID_REGNUM;
1032
1033 /* Record the FDE associated with this function. */
1034 cfun->fde = fde;
1035 vec_safe_push (fde_vec, fde);
1036
1037 return fde;
1038 }
1039
1040 /* Output a marker (i.e. a label) for the beginning of a function, before
1041 the prologue. */
1042
1043 void
1044 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1045 unsigned int column ATTRIBUTE_UNUSED,
1046 const char *file ATTRIBUTE_UNUSED)
1047 {
1048 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1049 char * dup_label;
1050 dw_fde_ref fde;
1051 section *fnsec;
1052 bool do_frame;
1053
1054 current_function_func_begin_label = NULL;
1055
1056 do_frame = dwarf2out_do_frame ();
1057
1058 /* ??? current_function_func_begin_label is also used by except.c for
1059 call-site information. We must emit this label if it might be used. */
1060 if (!do_frame
1061 && (!flag_exceptions
1062 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1063 return;
1064
1065 fnsec = function_section (current_function_decl);
1066 switch_to_section (fnsec);
1067 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1068 current_function_funcdef_no);
1069 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 dup_label = xstrdup (label);
1072 current_function_func_begin_label = dup_label;
1073
1074 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1075 if (!do_frame)
1076 return;
1077
1078 /* Unlike the debug version, the EH version of frame unwind info is a per-
1079 function setting so we need to record whether we need it for the unit. */
1080 do_eh_frame |= dwarf2out_do_eh_frame ();
1081
1082 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1083 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1084 would include pass_dwarf2_frame. If we've not created the FDE yet,
1085 do so now. */
1086 fde = cfun->fde;
1087 if (fde == NULL)
1088 fde = dwarf2out_alloc_current_fde ();
1089
1090 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1091 fde->dw_fde_begin = dup_label;
1092 fde->dw_fde_current_label = dup_label;
1093 fde->in_std_section = (fnsec == text_section
1094 || (cold_text_section && fnsec == cold_text_section));
1095
1096 /* We only want to output line number information for the genuine dwarf2
1097 prologue case, not the eh frame case. */
1098 #ifdef DWARF2_DEBUGGING_INFO
1099 if (file)
1100 dwarf2out_source_line (line, column, file, 0, true);
1101 #endif
1102
1103 if (dwarf2out_do_cfi_asm ())
1104 dwarf2out_do_cfi_startproc (false);
1105 else
1106 {
1107 rtx personality = get_personality_function (current_function_decl);
1108 if (!current_unit_personality)
1109 current_unit_personality = personality;
1110
1111 /* We cannot keep a current personality per function as without CFI
1112 asm, at the point where we emit the CFI data, there is no current
1113 function anymore. */
1114 if (personality && current_unit_personality != personality)
1115 sorry ("multiple EH personalities are supported only with assemblers "
1116 "supporting .cfi_personality directive");
1117 }
1118 }
1119
1120 /* Output a marker (i.e. a label) for the end of the generated code
1121 for a function prologue. This gets called *after* the prologue code has
1122 been generated. */
1123
1124 void
1125 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1126 const char *file ATTRIBUTE_UNUSED)
1127 {
1128 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1129
1130 /* Output a label to mark the endpoint of the code generated for this
1131 function. */
1132 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1133 current_function_funcdef_no);
1134 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1137 }
1138
1139 /* Output a marker (i.e. a label) for the beginning of the generated code
1140 for a function epilogue. This gets called *before* the prologue code has
1141 been generated. */
1142
1143 void
1144 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1145 const char *file ATTRIBUTE_UNUSED)
1146 {
1147 dw_fde_ref fde = cfun->fde;
1148 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1149
1150 if (fde->dw_fde_vms_begin_epilogue)
1151 return;
1152
1153 /* Output a label to mark the endpoint of the code generated for this
1154 function. */
1155 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1156 current_function_funcdef_no);
1157 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1160 }
1161
1162 /* Output a marker (i.e. a label) for the absolute end of the generated code
1163 for a function definition. This gets called *after* the epilogue code has
1164 been generated. */
1165
1166 void
1167 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1168 const char *file ATTRIBUTE_UNUSED)
1169 {
1170 dw_fde_ref fde;
1171 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1172
1173 last_var_location_insn = NULL;
1174 cached_next_real_insn = NULL;
1175
1176 if (dwarf2out_do_cfi_asm ())
1177 fprintf (asm_out_file, "\t.cfi_endproc\n");
1178
1179 /* Output a label to mark the endpoint of the code generated for this
1180 function. */
1181 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1182 current_function_funcdef_no);
1183 ASM_OUTPUT_LABEL (asm_out_file, label);
1184 fde = cfun->fde;
1185 gcc_assert (fde != NULL);
1186 if (fde->dw_fde_second_begin == NULL)
1187 fde->dw_fde_end = xstrdup (label);
1188 }
1189
1190 void
1191 dwarf2out_frame_finish (void)
1192 {
1193 /* Output call frame information. */
1194 if (targetm.debug_unwind_info () == UI_DWARF2)
1195 output_call_frame_info (0);
1196
1197 /* Output another copy for the unwinder. */
1198 if (do_eh_frame)
1199 output_call_frame_info (1);
1200 }
1201
1202 /* Note that the current function section is being used for code. */
1203
1204 static void
1205 dwarf2out_note_section_used (void)
1206 {
1207 section *sec = current_function_section ();
1208 if (sec == text_section)
1209 text_section_used = true;
1210 else if (sec == cold_text_section)
1211 cold_text_section_used = true;
1212 }
1213
1214 static void var_location_switch_text_section (void);
1215 static void set_cur_line_info_table (section *);
1216
1217 void
1218 dwarf2out_switch_text_section (void)
1219 {
1220 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1221 section *sect;
1222 dw_fde_ref fde = cfun->fde;
1223
1224 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1225
1226 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL,
1227 current_function_funcdef_no);
1228
1229 fde->dw_fde_second_begin = ggc_strdup (label);
1230 if (!in_cold_section_p)
1231 {
1232 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1233 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1234 }
1235 else
1236 {
1237 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1238 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1239 }
1240 have_multiple_function_sections = true;
1241
1242 /* There is no need to mark used sections when not debugging. */
1243 if (cold_text_section != NULL)
1244 dwarf2out_note_section_used ();
1245
1246 if (dwarf2out_do_cfi_asm ())
1247 fprintf (asm_out_file, "\t.cfi_endproc\n");
1248
1249 /* Now do the real section switch. */
1250 sect = current_function_section ();
1251 switch_to_section (sect);
1252
1253 fde->second_in_std_section
1254 = (sect == text_section
1255 || (cold_text_section && sect == cold_text_section));
1256
1257 if (dwarf2out_do_cfi_asm ())
1258 dwarf2out_do_cfi_startproc (true);
1259
1260 var_location_switch_text_section ();
1261
1262 if (cold_text_section != NULL)
1263 set_cur_line_info_table (sect);
1264 }
1265 \f
1266 /* And now, the subset of the debugging information support code necessary
1267 for emitting location expressions. */
1268
1269 /* Data about a single source file. */
1270 struct GTY((for_user)) dwarf_file_data {
1271 const char * filename;
1272 int emitted_number;
1273 };
1274
1275 /* Describe an entry into the .debug_addr section. */
1276
1277 enum ate_kind {
1278 ate_kind_rtx,
1279 ate_kind_rtx_dtprel,
1280 ate_kind_label
1281 };
1282
1283 struct GTY((for_user)) addr_table_entry {
1284 enum ate_kind kind;
1285 unsigned int refcount;
1286 unsigned int index;
1287 union addr_table_entry_struct_union
1288 {
1289 rtx GTY ((tag ("0"))) rtl;
1290 char * GTY ((tag ("1"))) label;
1291 }
1292 GTY ((desc ("%1.kind"))) addr;
1293 };
1294
1295 typedef unsigned int var_loc_view;
1296
1297 /* Location lists are ranges + location descriptions for that range,
1298 so you can track variables that are in different places over
1299 their entire life. */
1300 typedef struct GTY(()) dw_loc_list_struct {
1301 dw_loc_list_ref dw_loc_next;
1302 const char *begin; /* Label and addr_entry for start of range */
1303 addr_table_entry *begin_entry;
1304 const char *end; /* Label for end of range */
1305 char *ll_symbol; /* Label for beginning of location list.
1306 Only on head of list. */
1307 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1308 const char *section; /* Section this loclist is relative to */
1309 dw_loc_descr_ref expr;
1310 var_loc_view vbegin, vend;
1311 hashval_t hash;
1312 /* True if all addresses in this and subsequent lists are known to be
1313 resolved. */
1314 bool resolved_addr;
1315 /* True if this list has been replaced by dw_loc_next. */
1316 bool replaced;
1317 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1318 section. */
1319 unsigned char emitted : 1;
1320 /* True if hash field is index rather than hash value. */
1321 unsigned char num_assigned : 1;
1322 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1323 unsigned char offset_emitted : 1;
1324 /* True if note_variable_value_in_expr has been called on it. */
1325 unsigned char noted_variable_value : 1;
1326 /* True if the range should be emitted even if begin and end
1327 are the same. */
1328 bool force;
1329 } dw_loc_list_node;
1330
1331 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1332 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1333
1334 /* Convert a DWARF stack opcode into its string name. */
1335
1336 static const char *
1337 dwarf_stack_op_name (unsigned int op)
1338 {
1339 const char *name = get_DW_OP_name (op);
1340
1341 if (name != NULL)
1342 return name;
1343
1344 return "OP_<unknown>";
1345 }
1346
1347 /* Return TRUE iff we're to output location view lists as a separate
1348 attribute next to the location lists, as an extension compatible
1349 with DWARF 2 and above. */
1350
1351 static inline bool
1352 dwarf2out_locviews_in_attribute ()
1353 {
1354 return debug_variable_location_views == 1;
1355 }
1356
1357 /* Return TRUE iff we're to output location view lists as part of the
1358 location lists, as proposed for standardization after DWARF 5. */
1359
1360 static inline bool
1361 dwarf2out_locviews_in_loclist ()
1362 {
1363 #ifndef DW_LLE_view_pair
1364 return false;
1365 #else
1366 return debug_variable_location_views == -1;
1367 #endif
1368 }
1369
1370 /* Return a pointer to a newly allocated location description. Location
1371 descriptions are simple expression terms that can be strung
1372 together to form more complicated location (address) descriptions. */
1373
1374 static inline dw_loc_descr_ref
1375 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1376 unsigned HOST_WIDE_INT oprnd2)
1377 {
1378 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1379
1380 descr->dw_loc_opc = op;
1381 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1382 descr->dw_loc_oprnd1.val_entry = NULL;
1383 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1384 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1385 descr->dw_loc_oprnd2.val_entry = NULL;
1386 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1387
1388 return descr;
1389 }
1390
1391 /* Add a location description term to a location description expression. */
1392
1393 static inline void
1394 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1395 {
1396 dw_loc_descr_ref *d;
1397
1398 /* Find the end of the chain. */
1399 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1400 ;
1401
1402 *d = descr;
1403 }
1404
1405 /* Compare two location operands for exact equality. */
1406
1407 static bool
1408 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1409 {
1410 if (a->val_class != b->val_class)
1411 return false;
1412 switch (a->val_class)
1413 {
1414 case dw_val_class_none:
1415 return true;
1416 case dw_val_class_addr:
1417 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1418
1419 case dw_val_class_offset:
1420 case dw_val_class_unsigned_const:
1421 case dw_val_class_const:
1422 case dw_val_class_unsigned_const_implicit:
1423 case dw_val_class_const_implicit:
1424 case dw_val_class_range_list:
1425 /* These are all HOST_WIDE_INT, signed or unsigned. */
1426 return a->v.val_unsigned == b->v.val_unsigned;
1427
1428 case dw_val_class_loc:
1429 return a->v.val_loc == b->v.val_loc;
1430 case dw_val_class_loc_list:
1431 return a->v.val_loc_list == b->v.val_loc_list;
1432 case dw_val_class_view_list:
1433 return a->v.val_view_list == b->v.val_view_list;
1434 case dw_val_class_die_ref:
1435 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1436 case dw_val_class_fde_ref:
1437 return a->v.val_fde_index == b->v.val_fde_index;
1438 case dw_val_class_symview:
1439 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1440 case dw_val_class_lbl_id:
1441 case dw_val_class_lineptr:
1442 case dw_val_class_macptr:
1443 case dw_val_class_loclistsptr:
1444 case dw_val_class_high_pc:
1445 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1446 case dw_val_class_str:
1447 return a->v.val_str == b->v.val_str;
1448 case dw_val_class_flag:
1449 return a->v.val_flag == b->v.val_flag;
1450 case dw_val_class_file:
1451 case dw_val_class_file_implicit:
1452 return a->v.val_file == b->v.val_file;
1453 case dw_val_class_decl_ref:
1454 return a->v.val_decl_ref == b->v.val_decl_ref;
1455
1456 case dw_val_class_const_double:
1457 return (a->v.val_double.high == b->v.val_double.high
1458 && a->v.val_double.low == b->v.val_double.low);
1459
1460 case dw_val_class_wide_int:
1461 return *a->v.val_wide == *b->v.val_wide;
1462
1463 case dw_val_class_vec:
1464 {
1465 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1466 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1467
1468 return (a_len == b_len
1469 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1470 }
1471
1472 case dw_val_class_data8:
1473 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1474
1475 case dw_val_class_vms_delta:
1476 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1477 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1478
1479 case dw_val_class_discr_value:
1480 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1481 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1482 case dw_val_class_discr_list:
1483 /* It makes no sense comparing two discriminant value lists. */
1484 return false;
1485 }
1486 gcc_unreachable ();
1487 }
1488
1489 /* Compare two location atoms for exact equality. */
1490
1491 static bool
1492 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1493 {
1494 if (a->dw_loc_opc != b->dw_loc_opc)
1495 return false;
1496
1497 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1498 address size, but since we always allocate cleared storage it
1499 should be zero for other types of locations. */
1500 if (a->dtprel != b->dtprel)
1501 return false;
1502
1503 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1504 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1505 }
1506
1507 /* Compare two complete location expressions for exact equality. */
1508
1509 bool
1510 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1511 {
1512 while (1)
1513 {
1514 if (a == b)
1515 return true;
1516 if (a == NULL || b == NULL)
1517 return false;
1518 if (!loc_descr_equal_p_1 (a, b))
1519 return false;
1520
1521 a = a->dw_loc_next;
1522 b = b->dw_loc_next;
1523 }
1524 }
1525
1526
1527 /* Add a constant POLY_OFFSET to a location expression. */
1528
1529 static void
1530 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1531 {
1532 dw_loc_descr_ref loc;
1533 HOST_WIDE_INT *p;
1534
1535 gcc_assert (*list_head != NULL);
1536
1537 if (known_eq (poly_offset, 0))
1538 return;
1539
1540 /* Find the end of the chain. */
1541 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1542 ;
1543
1544 HOST_WIDE_INT offset;
1545 if (!poly_offset.is_constant (&offset))
1546 {
1547 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1548 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1549 return;
1550 }
1551
1552 p = NULL;
1553 if (loc->dw_loc_opc == DW_OP_fbreg
1554 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1555 p = &loc->dw_loc_oprnd1.v.val_int;
1556 else if (loc->dw_loc_opc == DW_OP_bregx)
1557 p = &loc->dw_loc_oprnd2.v.val_int;
1558
1559 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1560 offset. Don't optimize if an signed integer overflow would happen. */
1561 if (p != NULL
1562 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1563 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1564 *p += offset;
1565
1566 else if (offset > 0)
1567 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1568
1569 else
1570 {
1571 loc->dw_loc_next
1572 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1573 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1574 }
1575 }
1576
1577 /* Return a pointer to a newly allocated location description for
1578 REG and OFFSET. */
1579
1580 static inline dw_loc_descr_ref
1581 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1582 {
1583 HOST_WIDE_INT const_offset;
1584 if (offset.is_constant (&const_offset))
1585 {
1586 if (reg <= 31)
1587 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1588 const_offset, 0);
1589 else
1590 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1591 }
1592 else
1593 {
1594 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1595 loc_descr_plus_const (&ret, offset);
1596 return ret;
1597 }
1598 }
1599
1600 /* Add a constant OFFSET to a location list. */
1601
1602 static void
1603 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1604 {
1605 dw_loc_list_ref d;
1606 for (d = list_head; d != NULL; d = d->dw_loc_next)
1607 loc_descr_plus_const (&d->expr, offset);
1608 }
1609
1610 #define DWARF_REF_SIZE \
1611 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1612
1613 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1614 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1615 DW_FORM_data16 with 128 bits. */
1616 #define DWARF_LARGEST_DATA_FORM_BITS \
1617 (dwarf_version >= 5 ? 128 : 64)
1618
1619 /* Utility inline function for construction of ops that were GNU extension
1620 before DWARF 5. */
1621 static inline enum dwarf_location_atom
1622 dwarf_OP (enum dwarf_location_atom op)
1623 {
1624 switch (op)
1625 {
1626 case DW_OP_implicit_pointer:
1627 if (dwarf_version < 5)
1628 return DW_OP_GNU_implicit_pointer;
1629 break;
1630
1631 case DW_OP_entry_value:
1632 if (dwarf_version < 5)
1633 return DW_OP_GNU_entry_value;
1634 break;
1635
1636 case DW_OP_const_type:
1637 if (dwarf_version < 5)
1638 return DW_OP_GNU_const_type;
1639 break;
1640
1641 case DW_OP_regval_type:
1642 if (dwarf_version < 5)
1643 return DW_OP_GNU_regval_type;
1644 break;
1645
1646 case DW_OP_deref_type:
1647 if (dwarf_version < 5)
1648 return DW_OP_GNU_deref_type;
1649 break;
1650
1651 case DW_OP_convert:
1652 if (dwarf_version < 5)
1653 return DW_OP_GNU_convert;
1654 break;
1655
1656 case DW_OP_reinterpret:
1657 if (dwarf_version < 5)
1658 return DW_OP_GNU_reinterpret;
1659 break;
1660
1661 case DW_OP_addrx:
1662 if (dwarf_version < 5)
1663 return DW_OP_GNU_addr_index;
1664 break;
1665
1666 case DW_OP_constx:
1667 if (dwarf_version < 5)
1668 return DW_OP_GNU_const_index;
1669 break;
1670
1671 default:
1672 break;
1673 }
1674 return op;
1675 }
1676
1677 /* Similarly for attributes. */
1678 static inline enum dwarf_attribute
1679 dwarf_AT (enum dwarf_attribute at)
1680 {
1681 switch (at)
1682 {
1683 case DW_AT_call_return_pc:
1684 if (dwarf_version < 5)
1685 return DW_AT_low_pc;
1686 break;
1687
1688 case DW_AT_call_tail_call:
1689 if (dwarf_version < 5)
1690 return DW_AT_GNU_tail_call;
1691 break;
1692
1693 case DW_AT_call_origin:
1694 if (dwarf_version < 5)
1695 return DW_AT_abstract_origin;
1696 break;
1697
1698 case DW_AT_call_target:
1699 if (dwarf_version < 5)
1700 return DW_AT_GNU_call_site_target;
1701 break;
1702
1703 case DW_AT_call_target_clobbered:
1704 if (dwarf_version < 5)
1705 return DW_AT_GNU_call_site_target_clobbered;
1706 break;
1707
1708 case DW_AT_call_parameter:
1709 if (dwarf_version < 5)
1710 return DW_AT_abstract_origin;
1711 break;
1712
1713 case DW_AT_call_value:
1714 if (dwarf_version < 5)
1715 return DW_AT_GNU_call_site_value;
1716 break;
1717
1718 case DW_AT_call_data_value:
1719 if (dwarf_version < 5)
1720 return DW_AT_GNU_call_site_data_value;
1721 break;
1722
1723 case DW_AT_call_all_calls:
1724 if (dwarf_version < 5)
1725 return DW_AT_GNU_all_call_sites;
1726 break;
1727
1728 case DW_AT_call_all_tail_calls:
1729 if (dwarf_version < 5)
1730 return DW_AT_GNU_all_tail_call_sites;
1731 break;
1732
1733 case DW_AT_dwo_name:
1734 if (dwarf_version < 5)
1735 return DW_AT_GNU_dwo_name;
1736 break;
1737
1738 case DW_AT_addr_base:
1739 if (dwarf_version < 5)
1740 return DW_AT_GNU_addr_base;
1741 break;
1742
1743 default:
1744 break;
1745 }
1746 return at;
1747 }
1748
1749 /* And similarly for tags. */
1750 static inline enum dwarf_tag
1751 dwarf_TAG (enum dwarf_tag tag)
1752 {
1753 switch (tag)
1754 {
1755 case DW_TAG_call_site:
1756 if (dwarf_version < 5)
1757 return DW_TAG_GNU_call_site;
1758 break;
1759
1760 case DW_TAG_call_site_parameter:
1761 if (dwarf_version < 5)
1762 return DW_TAG_GNU_call_site_parameter;
1763 break;
1764
1765 default:
1766 break;
1767 }
1768 return tag;
1769 }
1770
1771 /* And similarly for forms. */
1772 static inline enum dwarf_form
1773 dwarf_FORM (enum dwarf_form form)
1774 {
1775 switch (form)
1776 {
1777 case DW_FORM_addrx:
1778 if (dwarf_version < 5)
1779 return DW_FORM_GNU_addr_index;
1780 break;
1781
1782 case DW_FORM_strx:
1783 if (dwarf_version < 5)
1784 return DW_FORM_GNU_str_index;
1785 break;
1786
1787 default:
1788 break;
1789 }
1790 return form;
1791 }
1792
1793 static unsigned long int get_base_type_offset (dw_die_ref);
1794
1795 /* Return the size of a location descriptor. */
1796
1797 static unsigned long
1798 size_of_loc_descr (dw_loc_descr_ref loc)
1799 {
1800 unsigned long size = 1;
1801
1802 switch (loc->dw_loc_opc)
1803 {
1804 case DW_OP_addr:
1805 size += DWARF2_ADDR_SIZE;
1806 break;
1807 case DW_OP_GNU_addr_index:
1808 case DW_OP_addrx:
1809 case DW_OP_GNU_const_index:
1810 case DW_OP_constx:
1811 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1812 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1813 break;
1814 case DW_OP_const1u:
1815 case DW_OP_const1s:
1816 size += 1;
1817 break;
1818 case DW_OP_const2u:
1819 case DW_OP_const2s:
1820 size += 2;
1821 break;
1822 case DW_OP_const4u:
1823 case DW_OP_const4s:
1824 size += 4;
1825 break;
1826 case DW_OP_const8u:
1827 case DW_OP_const8s:
1828 size += 8;
1829 break;
1830 case DW_OP_constu:
1831 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1832 break;
1833 case DW_OP_consts:
1834 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1835 break;
1836 case DW_OP_pick:
1837 size += 1;
1838 break;
1839 case DW_OP_plus_uconst:
1840 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1841 break;
1842 case DW_OP_skip:
1843 case DW_OP_bra:
1844 size += 2;
1845 break;
1846 case DW_OP_breg0:
1847 case DW_OP_breg1:
1848 case DW_OP_breg2:
1849 case DW_OP_breg3:
1850 case DW_OP_breg4:
1851 case DW_OP_breg5:
1852 case DW_OP_breg6:
1853 case DW_OP_breg7:
1854 case DW_OP_breg8:
1855 case DW_OP_breg9:
1856 case DW_OP_breg10:
1857 case DW_OP_breg11:
1858 case DW_OP_breg12:
1859 case DW_OP_breg13:
1860 case DW_OP_breg14:
1861 case DW_OP_breg15:
1862 case DW_OP_breg16:
1863 case DW_OP_breg17:
1864 case DW_OP_breg18:
1865 case DW_OP_breg19:
1866 case DW_OP_breg20:
1867 case DW_OP_breg21:
1868 case DW_OP_breg22:
1869 case DW_OP_breg23:
1870 case DW_OP_breg24:
1871 case DW_OP_breg25:
1872 case DW_OP_breg26:
1873 case DW_OP_breg27:
1874 case DW_OP_breg28:
1875 case DW_OP_breg29:
1876 case DW_OP_breg30:
1877 case DW_OP_breg31:
1878 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1879 break;
1880 case DW_OP_regx:
1881 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1882 break;
1883 case DW_OP_fbreg:
1884 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1885 break;
1886 case DW_OP_bregx:
1887 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1888 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1889 break;
1890 case DW_OP_piece:
1891 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1892 break;
1893 case DW_OP_bit_piece:
1894 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1895 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1896 break;
1897 case DW_OP_deref_size:
1898 case DW_OP_xderef_size:
1899 size += 1;
1900 break;
1901 case DW_OP_call2:
1902 size += 2;
1903 break;
1904 case DW_OP_call4:
1905 size += 4;
1906 break;
1907 case DW_OP_call_ref:
1908 case DW_OP_GNU_variable_value:
1909 size += DWARF_REF_SIZE;
1910 break;
1911 case DW_OP_implicit_value:
1912 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1913 + loc->dw_loc_oprnd1.v.val_unsigned;
1914 break;
1915 case DW_OP_implicit_pointer:
1916 case DW_OP_GNU_implicit_pointer:
1917 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1918 break;
1919 case DW_OP_entry_value:
1920 case DW_OP_GNU_entry_value:
1921 {
1922 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1923 size += size_of_uleb128 (op_size) + op_size;
1924 break;
1925 }
1926 case DW_OP_const_type:
1927 case DW_OP_GNU_const_type:
1928 {
1929 unsigned long o
1930 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1931 size += size_of_uleb128 (o) + 1;
1932 switch (loc->dw_loc_oprnd2.val_class)
1933 {
1934 case dw_val_class_vec:
1935 size += loc->dw_loc_oprnd2.v.val_vec.length
1936 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1937 break;
1938 case dw_val_class_const:
1939 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1940 break;
1941 case dw_val_class_const_double:
1942 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1943 break;
1944 case dw_val_class_wide_int:
1945 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1946 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1947 break;
1948 default:
1949 gcc_unreachable ();
1950 }
1951 break;
1952 }
1953 case DW_OP_regval_type:
1954 case DW_OP_GNU_regval_type:
1955 {
1956 unsigned long o
1957 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1958 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1959 + size_of_uleb128 (o);
1960 }
1961 break;
1962 case DW_OP_deref_type:
1963 case DW_OP_GNU_deref_type:
1964 {
1965 unsigned long o
1966 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1967 size += 1 + size_of_uleb128 (o);
1968 }
1969 break;
1970 case DW_OP_convert:
1971 case DW_OP_reinterpret:
1972 case DW_OP_GNU_convert:
1973 case DW_OP_GNU_reinterpret:
1974 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1975 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1976 else
1977 {
1978 unsigned long o
1979 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1980 size += size_of_uleb128 (o);
1981 }
1982 break;
1983 case DW_OP_GNU_parameter_ref:
1984 size += 4;
1985 break;
1986 default:
1987 break;
1988 }
1989
1990 return size;
1991 }
1992
1993 /* Return the size of a series of location descriptors. */
1994
1995 unsigned long
1996 size_of_locs (dw_loc_descr_ref loc)
1997 {
1998 dw_loc_descr_ref l;
1999 unsigned long size;
2000
2001 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
2002 field, to avoid writing to a PCH file. */
2003 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2004 {
2005 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
2006 break;
2007 size += size_of_loc_descr (l);
2008 }
2009 if (! l)
2010 return size;
2011
2012 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2013 {
2014 l->dw_loc_addr = size;
2015 size += size_of_loc_descr (l);
2016 }
2017
2018 return size;
2019 }
2020
2021 /* Return the size of the value in a DW_AT_discr_value attribute. */
2022
2023 static int
2024 size_of_discr_value (dw_discr_value *discr_value)
2025 {
2026 if (discr_value->pos)
2027 return size_of_uleb128 (discr_value->v.uval);
2028 else
2029 return size_of_sleb128 (discr_value->v.sval);
2030 }
2031
2032 /* Return the size of the value in a DW_AT_discr_list attribute. */
2033
2034 static int
2035 size_of_discr_list (dw_discr_list_ref discr_list)
2036 {
2037 int size = 0;
2038
2039 for (dw_discr_list_ref list = discr_list;
2040 list != NULL;
2041 list = list->dw_discr_next)
2042 {
2043 /* One byte for the discriminant value descriptor, and then one or two
2044 LEB128 numbers, depending on whether it's a single case label or a
2045 range label. */
2046 size += 1;
2047 size += size_of_discr_value (&list->dw_discr_lower_bound);
2048 if (list->dw_discr_range != 0)
2049 size += size_of_discr_value (&list->dw_discr_upper_bound);
2050 }
2051 return size;
2052 }
2053
2054 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2055 static void get_ref_die_offset_label (char *, dw_die_ref);
2056 static unsigned long int get_ref_die_offset (dw_die_ref);
2057
2058 /* Output location description stack opcode's operands (if any).
2059 The for_eh_or_skip parameter controls whether register numbers are
2060 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2061 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2062 info). This should be suppressed for the cases that have not been converted
2063 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2064
2065 static void
2066 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2067 {
2068 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2069 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2070
2071 switch (loc->dw_loc_opc)
2072 {
2073 #ifdef DWARF2_DEBUGGING_INFO
2074 case DW_OP_const2u:
2075 case DW_OP_const2s:
2076 dw2_asm_output_data (2, val1->v.val_int, NULL);
2077 break;
2078 case DW_OP_const4u:
2079 if (loc->dtprel)
2080 {
2081 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2082 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2083 val1->v.val_addr);
2084 fputc ('\n', asm_out_file);
2085 break;
2086 }
2087 /* FALLTHRU */
2088 case DW_OP_const4s:
2089 dw2_asm_output_data (4, val1->v.val_int, NULL);
2090 break;
2091 case DW_OP_const8u:
2092 if (loc->dtprel)
2093 {
2094 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2095 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2096 val1->v.val_addr);
2097 fputc ('\n', asm_out_file);
2098 break;
2099 }
2100 /* FALLTHRU */
2101 case DW_OP_const8s:
2102 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2103 dw2_asm_output_data (8, val1->v.val_int, NULL);
2104 break;
2105 case DW_OP_skip:
2106 case DW_OP_bra:
2107 {
2108 int offset;
2109
2110 gcc_assert (val1->val_class == dw_val_class_loc);
2111 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2112
2113 dw2_asm_output_data (2, offset, NULL);
2114 }
2115 break;
2116 case DW_OP_implicit_value:
2117 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2118 switch (val2->val_class)
2119 {
2120 case dw_val_class_const:
2121 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2122 break;
2123 case dw_val_class_vec:
2124 {
2125 unsigned int elt_size = val2->v.val_vec.elt_size;
2126 unsigned int len = val2->v.val_vec.length;
2127 unsigned int i;
2128 unsigned char *p;
2129
2130 if (elt_size > sizeof (HOST_WIDE_INT))
2131 {
2132 elt_size /= 2;
2133 len *= 2;
2134 }
2135 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2136 i < len;
2137 i++, p += elt_size)
2138 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2139 "fp or vector constant word %u", i);
2140 }
2141 break;
2142 case dw_val_class_const_double:
2143 {
2144 unsigned HOST_WIDE_INT first, second;
2145
2146 if (WORDS_BIG_ENDIAN)
2147 {
2148 first = val2->v.val_double.high;
2149 second = val2->v.val_double.low;
2150 }
2151 else
2152 {
2153 first = val2->v.val_double.low;
2154 second = val2->v.val_double.high;
2155 }
2156 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2157 first, NULL);
2158 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2159 second, NULL);
2160 }
2161 break;
2162 case dw_val_class_wide_int:
2163 {
2164 int i;
2165 int len = get_full_len (*val2->v.val_wide);
2166 if (WORDS_BIG_ENDIAN)
2167 for (i = len - 1; i >= 0; --i)
2168 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2169 val2->v.val_wide->elt (i), NULL);
2170 else
2171 for (i = 0; i < len; ++i)
2172 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2173 val2->v.val_wide->elt (i), NULL);
2174 }
2175 break;
2176 case dw_val_class_addr:
2177 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2178 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2179 break;
2180 default:
2181 gcc_unreachable ();
2182 }
2183 break;
2184 #else
2185 case DW_OP_const2u:
2186 case DW_OP_const2s:
2187 case DW_OP_const4u:
2188 case DW_OP_const4s:
2189 case DW_OP_const8u:
2190 case DW_OP_const8s:
2191 case DW_OP_skip:
2192 case DW_OP_bra:
2193 case DW_OP_implicit_value:
2194 /* We currently don't make any attempt to make sure these are
2195 aligned properly like we do for the main unwind info, so
2196 don't support emitting things larger than a byte if we're
2197 only doing unwinding. */
2198 gcc_unreachable ();
2199 #endif
2200 case DW_OP_const1u:
2201 case DW_OP_const1s:
2202 dw2_asm_output_data (1, val1->v.val_int, NULL);
2203 break;
2204 case DW_OP_constu:
2205 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2206 break;
2207 case DW_OP_consts:
2208 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2209 break;
2210 case DW_OP_pick:
2211 dw2_asm_output_data (1, val1->v.val_int, NULL);
2212 break;
2213 case DW_OP_plus_uconst:
2214 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2215 break;
2216 case DW_OP_breg0:
2217 case DW_OP_breg1:
2218 case DW_OP_breg2:
2219 case DW_OP_breg3:
2220 case DW_OP_breg4:
2221 case DW_OP_breg5:
2222 case DW_OP_breg6:
2223 case DW_OP_breg7:
2224 case DW_OP_breg8:
2225 case DW_OP_breg9:
2226 case DW_OP_breg10:
2227 case DW_OP_breg11:
2228 case DW_OP_breg12:
2229 case DW_OP_breg13:
2230 case DW_OP_breg14:
2231 case DW_OP_breg15:
2232 case DW_OP_breg16:
2233 case DW_OP_breg17:
2234 case DW_OP_breg18:
2235 case DW_OP_breg19:
2236 case DW_OP_breg20:
2237 case DW_OP_breg21:
2238 case DW_OP_breg22:
2239 case DW_OP_breg23:
2240 case DW_OP_breg24:
2241 case DW_OP_breg25:
2242 case DW_OP_breg26:
2243 case DW_OP_breg27:
2244 case DW_OP_breg28:
2245 case DW_OP_breg29:
2246 case DW_OP_breg30:
2247 case DW_OP_breg31:
2248 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2249 break;
2250 case DW_OP_regx:
2251 {
2252 unsigned r = val1->v.val_unsigned;
2253 if (for_eh_or_skip >= 0)
2254 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2255 gcc_assert (size_of_uleb128 (r)
2256 == size_of_uleb128 (val1->v.val_unsigned));
2257 dw2_asm_output_data_uleb128 (r, NULL);
2258 }
2259 break;
2260 case DW_OP_fbreg:
2261 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2262 break;
2263 case DW_OP_bregx:
2264 {
2265 unsigned r = val1->v.val_unsigned;
2266 if (for_eh_or_skip >= 0)
2267 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2268 gcc_assert (size_of_uleb128 (r)
2269 == size_of_uleb128 (val1->v.val_unsigned));
2270 dw2_asm_output_data_uleb128 (r, NULL);
2271 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2272 }
2273 break;
2274 case DW_OP_piece:
2275 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2276 break;
2277 case DW_OP_bit_piece:
2278 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2279 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2280 break;
2281 case DW_OP_deref_size:
2282 case DW_OP_xderef_size:
2283 dw2_asm_output_data (1, val1->v.val_int, NULL);
2284 break;
2285
2286 case DW_OP_addr:
2287 if (loc->dtprel)
2288 {
2289 if (targetm.asm_out.output_dwarf_dtprel)
2290 {
2291 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2292 DWARF2_ADDR_SIZE,
2293 val1->v.val_addr);
2294 fputc ('\n', asm_out_file);
2295 }
2296 else
2297 gcc_unreachable ();
2298 }
2299 else
2300 {
2301 #ifdef DWARF2_DEBUGGING_INFO
2302 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2303 #else
2304 gcc_unreachable ();
2305 #endif
2306 }
2307 break;
2308
2309 case DW_OP_GNU_addr_index:
2310 case DW_OP_addrx:
2311 case DW_OP_GNU_const_index:
2312 case DW_OP_constx:
2313 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2314 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2315 "(index into .debug_addr)");
2316 break;
2317
2318 case DW_OP_call2:
2319 case DW_OP_call4:
2320 {
2321 unsigned long die_offset
2322 = get_ref_die_offset (val1->v.val_die_ref.die);
2323 /* Make sure the offset has been computed and that we can encode it as
2324 an operand. */
2325 gcc_assert (die_offset > 0
2326 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2327 ? 0xffff
2328 : 0xffffffff));
2329 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2330 die_offset, NULL);
2331 }
2332 break;
2333
2334 case DW_OP_call_ref:
2335 case DW_OP_GNU_variable_value:
2336 {
2337 char label[MAX_ARTIFICIAL_LABEL_BYTES
2338 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2339 gcc_assert (val1->val_class == dw_val_class_die_ref);
2340 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2341 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2342 }
2343 break;
2344
2345 case DW_OP_implicit_pointer:
2346 case DW_OP_GNU_implicit_pointer:
2347 {
2348 char label[MAX_ARTIFICIAL_LABEL_BYTES
2349 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2350 gcc_assert (val1->val_class == dw_val_class_die_ref);
2351 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2352 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2353 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2354 }
2355 break;
2356
2357 case DW_OP_entry_value:
2358 case DW_OP_GNU_entry_value:
2359 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2360 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2361 break;
2362
2363 case DW_OP_const_type:
2364 case DW_OP_GNU_const_type:
2365 {
2366 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2367 gcc_assert (o);
2368 dw2_asm_output_data_uleb128 (o, NULL);
2369 switch (val2->val_class)
2370 {
2371 case dw_val_class_const:
2372 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2373 dw2_asm_output_data (1, l, NULL);
2374 dw2_asm_output_data (l, val2->v.val_int, NULL);
2375 break;
2376 case dw_val_class_vec:
2377 {
2378 unsigned int elt_size = val2->v.val_vec.elt_size;
2379 unsigned int len = val2->v.val_vec.length;
2380 unsigned int i;
2381 unsigned char *p;
2382
2383 l = len * elt_size;
2384 dw2_asm_output_data (1, l, NULL);
2385 if (elt_size > sizeof (HOST_WIDE_INT))
2386 {
2387 elt_size /= 2;
2388 len *= 2;
2389 }
2390 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2391 i < len;
2392 i++, p += elt_size)
2393 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2394 "fp or vector constant word %u", i);
2395 }
2396 break;
2397 case dw_val_class_const_double:
2398 {
2399 unsigned HOST_WIDE_INT first, second;
2400 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2401
2402 dw2_asm_output_data (1, 2 * l, NULL);
2403 if (WORDS_BIG_ENDIAN)
2404 {
2405 first = val2->v.val_double.high;
2406 second = val2->v.val_double.low;
2407 }
2408 else
2409 {
2410 first = val2->v.val_double.low;
2411 second = val2->v.val_double.high;
2412 }
2413 dw2_asm_output_data (l, first, NULL);
2414 dw2_asm_output_data (l, second, NULL);
2415 }
2416 break;
2417 case dw_val_class_wide_int:
2418 {
2419 int i;
2420 int len = get_full_len (*val2->v.val_wide);
2421 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2422
2423 dw2_asm_output_data (1, len * l, NULL);
2424 if (WORDS_BIG_ENDIAN)
2425 for (i = len - 1; i >= 0; --i)
2426 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2427 else
2428 for (i = 0; i < len; ++i)
2429 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2430 }
2431 break;
2432 default:
2433 gcc_unreachable ();
2434 }
2435 }
2436 break;
2437 case DW_OP_regval_type:
2438 case DW_OP_GNU_regval_type:
2439 {
2440 unsigned r = val1->v.val_unsigned;
2441 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2442 gcc_assert (o);
2443 if (for_eh_or_skip >= 0)
2444 {
2445 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2446 gcc_assert (size_of_uleb128 (r)
2447 == size_of_uleb128 (val1->v.val_unsigned));
2448 }
2449 dw2_asm_output_data_uleb128 (r, NULL);
2450 dw2_asm_output_data_uleb128 (o, NULL);
2451 }
2452 break;
2453 case DW_OP_deref_type:
2454 case DW_OP_GNU_deref_type:
2455 {
2456 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2457 gcc_assert (o);
2458 dw2_asm_output_data (1, val1->v.val_int, NULL);
2459 dw2_asm_output_data_uleb128 (o, NULL);
2460 }
2461 break;
2462 case DW_OP_convert:
2463 case DW_OP_reinterpret:
2464 case DW_OP_GNU_convert:
2465 case DW_OP_GNU_reinterpret:
2466 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2467 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2468 else
2469 {
2470 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2471 gcc_assert (o);
2472 dw2_asm_output_data_uleb128 (o, NULL);
2473 }
2474 break;
2475
2476 case DW_OP_GNU_parameter_ref:
2477 {
2478 unsigned long o;
2479 gcc_assert (val1->val_class == dw_val_class_die_ref);
2480 o = get_ref_die_offset (val1->v.val_die_ref.die);
2481 dw2_asm_output_data (4, o, NULL);
2482 }
2483 break;
2484
2485 default:
2486 /* Other codes have no operands. */
2487 break;
2488 }
2489 }
2490
2491 /* Output a sequence of location operations.
2492 The for_eh_or_skip parameter controls whether register numbers are
2493 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2494 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2495 info). This should be suppressed for the cases that have not been converted
2496 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2497
2498 void
2499 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2500 {
2501 for (; loc != NULL; loc = loc->dw_loc_next)
2502 {
2503 enum dwarf_location_atom opc = loc->dw_loc_opc;
2504 /* Output the opcode. */
2505 if (for_eh_or_skip >= 0
2506 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2507 {
2508 unsigned r = (opc - DW_OP_breg0);
2509 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2510 gcc_assert (r <= 31);
2511 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2512 }
2513 else if (for_eh_or_skip >= 0
2514 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2515 {
2516 unsigned r = (opc - DW_OP_reg0);
2517 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2518 gcc_assert (r <= 31);
2519 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2520 }
2521
2522 dw2_asm_output_data (1, opc,
2523 "%s", dwarf_stack_op_name (opc));
2524
2525 /* Output the operand(s) (if any). */
2526 output_loc_operands (loc, for_eh_or_skip);
2527 }
2528 }
2529
2530 /* Output location description stack opcode's operands (if any).
2531 The output is single bytes on a line, suitable for .cfi_escape. */
2532
2533 static void
2534 output_loc_operands_raw (dw_loc_descr_ref loc)
2535 {
2536 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2537 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2538
2539 switch (loc->dw_loc_opc)
2540 {
2541 case DW_OP_addr:
2542 case DW_OP_GNU_addr_index:
2543 case DW_OP_addrx:
2544 case DW_OP_GNU_const_index:
2545 case DW_OP_constx:
2546 case DW_OP_implicit_value:
2547 /* We cannot output addresses in .cfi_escape, only bytes. */
2548 gcc_unreachable ();
2549
2550 case DW_OP_const1u:
2551 case DW_OP_const1s:
2552 case DW_OP_pick:
2553 case DW_OP_deref_size:
2554 case DW_OP_xderef_size:
2555 fputc (',', asm_out_file);
2556 dw2_asm_output_data_raw (1, val1->v.val_int);
2557 break;
2558
2559 case DW_OP_const2u:
2560 case DW_OP_const2s:
2561 fputc (',', asm_out_file);
2562 dw2_asm_output_data_raw (2, val1->v.val_int);
2563 break;
2564
2565 case DW_OP_const4u:
2566 case DW_OP_const4s:
2567 fputc (',', asm_out_file);
2568 dw2_asm_output_data_raw (4, val1->v.val_int);
2569 break;
2570
2571 case DW_OP_const8u:
2572 case DW_OP_const8s:
2573 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2574 fputc (',', asm_out_file);
2575 dw2_asm_output_data_raw (8, val1->v.val_int);
2576 break;
2577
2578 case DW_OP_skip:
2579 case DW_OP_bra:
2580 {
2581 int offset;
2582
2583 gcc_assert (val1->val_class == dw_val_class_loc);
2584 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2585
2586 fputc (',', asm_out_file);
2587 dw2_asm_output_data_raw (2, offset);
2588 }
2589 break;
2590
2591 case DW_OP_regx:
2592 {
2593 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2594 gcc_assert (size_of_uleb128 (r)
2595 == size_of_uleb128 (val1->v.val_unsigned));
2596 fputc (',', asm_out_file);
2597 dw2_asm_output_data_uleb128_raw (r);
2598 }
2599 break;
2600
2601 case DW_OP_constu:
2602 case DW_OP_plus_uconst:
2603 case DW_OP_piece:
2604 fputc (',', asm_out_file);
2605 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2606 break;
2607
2608 case DW_OP_bit_piece:
2609 fputc (',', asm_out_file);
2610 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2611 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2612 break;
2613
2614 case DW_OP_consts:
2615 case DW_OP_breg0:
2616 case DW_OP_breg1:
2617 case DW_OP_breg2:
2618 case DW_OP_breg3:
2619 case DW_OP_breg4:
2620 case DW_OP_breg5:
2621 case DW_OP_breg6:
2622 case DW_OP_breg7:
2623 case DW_OP_breg8:
2624 case DW_OP_breg9:
2625 case DW_OP_breg10:
2626 case DW_OP_breg11:
2627 case DW_OP_breg12:
2628 case DW_OP_breg13:
2629 case DW_OP_breg14:
2630 case DW_OP_breg15:
2631 case DW_OP_breg16:
2632 case DW_OP_breg17:
2633 case DW_OP_breg18:
2634 case DW_OP_breg19:
2635 case DW_OP_breg20:
2636 case DW_OP_breg21:
2637 case DW_OP_breg22:
2638 case DW_OP_breg23:
2639 case DW_OP_breg24:
2640 case DW_OP_breg25:
2641 case DW_OP_breg26:
2642 case DW_OP_breg27:
2643 case DW_OP_breg28:
2644 case DW_OP_breg29:
2645 case DW_OP_breg30:
2646 case DW_OP_breg31:
2647 case DW_OP_fbreg:
2648 fputc (',', asm_out_file);
2649 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2650 break;
2651
2652 case DW_OP_bregx:
2653 {
2654 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2655 gcc_assert (size_of_uleb128 (r)
2656 == size_of_uleb128 (val1->v.val_unsigned));
2657 fputc (',', asm_out_file);
2658 dw2_asm_output_data_uleb128_raw (r);
2659 fputc (',', asm_out_file);
2660 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2661 }
2662 break;
2663
2664 case DW_OP_implicit_pointer:
2665 case DW_OP_entry_value:
2666 case DW_OP_const_type:
2667 case DW_OP_regval_type:
2668 case DW_OP_deref_type:
2669 case DW_OP_convert:
2670 case DW_OP_reinterpret:
2671 case DW_OP_GNU_implicit_pointer:
2672 case DW_OP_GNU_entry_value:
2673 case DW_OP_GNU_const_type:
2674 case DW_OP_GNU_regval_type:
2675 case DW_OP_GNU_deref_type:
2676 case DW_OP_GNU_convert:
2677 case DW_OP_GNU_reinterpret:
2678 case DW_OP_GNU_parameter_ref:
2679 gcc_unreachable ();
2680 break;
2681
2682 default:
2683 /* Other codes have no operands. */
2684 break;
2685 }
2686 }
2687
2688 void
2689 output_loc_sequence_raw (dw_loc_descr_ref loc)
2690 {
2691 while (1)
2692 {
2693 enum dwarf_location_atom opc = loc->dw_loc_opc;
2694 /* Output the opcode. */
2695 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2696 {
2697 unsigned r = (opc - DW_OP_breg0);
2698 r = DWARF2_FRAME_REG_OUT (r, 1);
2699 gcc_assert (r <= 31);
2700 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2701 }
2702 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2703 {
2704 unsigned r = (opc - DW_OP_reg0);
2705 r = DWARF2_FRAME_REG_OUT (r, 1);
2706 gcc_assert (r <= 31);
2707 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2708 }
2709 /* Output the opcode. */
2710 fprintf (asm_out_file, "%#x", opc);
2711 output_loc_operands_raw (loc);
2712
2713 if (!loc->dw_loc_next)
2714 break;
2715 loc = loc->dw_loc_next;
2716
2717 fputc (',', asm_out_file);
2718 }
2719 }
2720
2721 /* This function builds a dwarf location descriptor sequence from a
2722 dw_cfa_location, adding the given OFFSET to the result of the
2723 expression. */
2724
2725 struct dw_loc_descr_node *
2726 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2727 {
2728 struct dw_loc_descr_node *head, *tmp;
2729
2730 offset += cfa->offset;
2731
2732 if (cfa->indirect)
2733 {
2734 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2735 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2736 head->dw_loc_oprnd1.val_entry = NULL;
2737 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2738 add_loc_descr (&head, tmp);
2739 loc_descr_plus_const (&head, offset);
2740 }
2741 else
2742 head = new_reg_loc_descr (cfa->reg, offset);
2743
2744 return head;
2745 }
2746
2747 /* This function builds a dwarf location descriptor sequence for
2748 the address at OFFSET from the CFA when stack is aligned to
2749 ALIGNMENT byte. */
2750
2751 struct dw_loc_descr_node *
2752 build_cfa_aligned_loc (dw_cfa_location *cfa,
2753 poly_int64 offset, HOST_WIDE_INT alignment)
2754 {
2755 struct dw_loc_descr_node *head;
2756 unsigned int dwarf_fp
2757 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2758
2759 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2760 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2761 {
2762 head = new_reg_loc_descr (dwarf_fp, 0);
2763 add_loc_descr (&head, int_loc_descriptor (alignment));
2764 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2765 loc_descr_plus_const (&head, offset);
2766 }
2767 else
2768 head = new_reg_loc_descr (dwarf_fp, offset);
2769 return head;
2770 }
2771 \f
2772 /* And now, the support for symbolic debugging information. */
2773
2774 /* .debug_str support. */
2775
2776 static void dwarf2out_init (const char *);
2777 static void dwarf2out_finish (const char *);
2778 static void dwarf2out_early_finish (const char *);
2779 static void dwarf2out_assembly_start (void);
2780 static void dwarf2out_define (unsigned int, const char *);
2781 static void dwarf2out_undef (unsigned int, const char *);
2782 static void dwarf2out_start_source_file (unsigned, const char *);
2783 static void dwarf2out_end_source_file (unsigned);
2784 static void dwarf2out_function_decl (tree);
2785 static void dwarf2out_begin_block (unsigned, unsigned);
2786 static void dwarf2out_end_block (unsigned, unsigned);
2787 static bool dwarf2out_ignore_block (const_tree);
2788 static void dwarf2out_early_global_decl (tree);
2789 static void dwarf2out_late_global_decl (tree);
2790 static void dwarf2out_type_decl (tree, int);
2791 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2792 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2793 dw_die_ref);
2794 static void dwarf2out_abstract_function (tree);
2795 static void dwarf2out_var_location (rtx_insn *);
2796 static void dwarf2out_inline_entry (tree);
2797 static void dwarf2out_size_function (tree);
2798 static void dwarf2out_begin_function (tree);
2799 static void dwarf2out_end_function (unsigned int);
2800 static void dwarf2out_register_main_translation_unit (tree unit);
2801 static void dwarf2out_set_name (tree, tree);
2802 static void dwarf2out_register_external_die (tree decl, const char *sym,
2803 unsigned HOST_WIDE_INT off);
2804 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2805 unsigned HOST_WIDE_INT *off);
2806
2807 /* The debug hooks structure. */
2808
2809 const struct gcc_debug_hooks dwarf2_debug_hooks =
2810 {
2811 dwarf2out_init,
2812 dwarf2out_finish,
2813 dwarf2out_early_finish,
2814 dwarf2out_assembly_start,
2815 dwarf2out_define,
2816 dwarf2out_undef,
2817 dwarf2out_start_source_file,
2818 dwarf2out_end_source_file,
2819 dwarf2out_begin_block,
2820 dwarf2out_end_block,
2821 dwarf2out_ignore_block,
2822 dwarf2out_source_line,
2823 dwarf2out_begin_prologue,
2824 #if VMS_DEBUGGING_INFO
2825 dwarf2out_vms_end_prologue,
2826 dwarf2out_vms_begin_epilogue,
2827 #else
2828 debug_nothing_int_charstar,
2829 debug_nothing_int_charstar,
2830 #endif
2831 dwarf2out_end_epilogue,
2832 dwarf2out_begin_function,
2833 dwarf2out_end_function, /* end_function */
2834 dwarf2out_register_main_translation_unit,
2835 dwarf2out_function_decl, /* function_decl */
2836 dwarf2out_early_global_decl,
2837 dwarf2out_late_global_decl,
2838 dwarf2out_type_decl, /* type_decl */
2839 dwarf2out_imported_module_or_decl,
2840 dwarf2out_die_ref_for_decl,
2841 dwarf2out_register_external_die,
2842 debug_nothing_tree, /* deferred_inline_function */
2843 /* The DWARF 2 backend tries to reduce debugging bloat by not
2844 emitting the abstract description of inline functions until
2845 something tries to reference them. */
2846 dwarf2out_abstract_function, /* outlining_inline_function */
2847 debug_nothing_rtx_code_label, /* label */
2848 debug_nothing_int, /* handle_pch */
2849 dwarf2out_var_location,
2850 dwarf2out_inline_entry, /* inline_entry */
2851 dwarf2out_size_function, /* size_function */
2852 dwarf2out_switch_text_section,
2853 dwarf2out_set_name,
2854 1, /* start_end_main_source_file */
2855 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2856 };
2857
2858 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2859 {
2860 dwarf2out_init,
2861 debug_nothing_charstar,
2862 debug_nothing_charstar,
2863 dwarf2out_assembly_start,
2864 debug_nothing_int_charstar,
2865 debug_nothing_int_charstar,
2866 debug_nothing_int_charstar,
2867 debug_nothing_int,
2868 debug_nothing_int_int, /* begin_block */
2869 debug_nothing_int_int, /* end_block */
2870 debug_true_const_tree, /* ignore_block */
2871 dwarf2out_source_line, /* source_line */
2872 debug_nothing_int_int_charstar, /* begin_prologue */
2873 debug_nothing_int_charstar, /* end_prologue */
2874 debug_nothing_int_charstar, /* begin_epilogue */
2875 debug_nothing_int_charstar, /* end_epilogue */
2876 debug_nothing_tree, /* begin_function */
2877 debug_nothing_int, /* end_function */
2878 debug_nothing_tree, /* register_main_translation_unit */
2879 debug_nothing_tree, /* function_decl */
2880 debug_nothing_tree, /* early_global_decl */
2881 debug_nothing_tree, /* late_global_decl */
2882 debug_nothing_tree_int, /* type_decl */
2883 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2884 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2885 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2886 debug_nothing_tree, /* deferred_inline_function */
2887 debug_nothing_tree, /* outlining_inline_function */
2888 debug_nothing_rtx_code_label, /* label */
2889 debug_nothing_int, /* handle_pch */
2890 debug_nothing_rtx_insn, /* var_location */
2891 debug_nothing_tree, /* inline_entry */
2892 debug_nothing_tree, /* size_function */
2893 debug_nothing_void, /* switch_text_section */
2894 debug_nothing_tree_tree, /* set_name */
2895 0, /* start_end_main_source_file */
2896 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2897 };
2898 \f
2899 /* NOTE: In the comments in this file, many references are made to
2900 "Debugging Information Entries". This term is abbreviated as `DIE'
2901 throughout the remainder of this file. */
2902
2903 /* An internal representation of the DWARF output is built, and then
2904 walked to generate the DWARF debugging info. The walk of the internal
2905 representation is done after the entire program has been compiled.
2906 The types below are used to describe the internal representation. */
2907
2908 /* Whether to put type DIEs into their own section .debug_types instead
2909 of making them part of the .debug_info section. Only supported for
2910 Dwarf V4 or higher and the user didn't disable them through
2911 -fno-debug-types-section. It is more efficient to put them in a
2912 separate comdat sections since the linker will then be able to
2913 remove duplicates. But not all tools support .debug_types sections
2914 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2915 it is DW_UT_type unit type in .debug_info section. */
2916
2917 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2918
2919 /* Various DIE's use offsets relative to the beginning of the
2920 .debug_info section to refer to each other. */
2921
2922 typedef long int dw_offset;
2923
2924 struct comdat_type_node;
2925
2926 /* The entries in the line_info table more-or-less mirror the opcodes
2927 that are used in the real dwarf line table. Arrays of these entries
2928 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2929 supported. */
2930
2931 enum dw_line_info_opcode {
2932 /* Emit DW_LNE_set_address; the operand is the label index. */
2933 LI_set_address,
2934
2935 /* Emit a row to the matrix with the given line. This may be done
2936 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2937 special opcodes. */
2938 LI_set_line,
2939
2940 /* Emit a DW_LNS_set_file. */
2941 LI_set_file,
2942
2943 /* Emit a DW_LNS_set_column. */
2944 LI_set_column,
2945
2946 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2947 LI_negate_stmt,
2948
2949 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2950 LI_set_prologue_end,
2951 LI_set_epilogue_begin,
2952
2953 /* Emit a DW_LNE_set_discriminator. */
2954 LI_set_discriminator,
2955
2956 /* Output a Fixed Advance PC; the target PC is the label index; the
2957 base PC is the previous LI_adv_address or LI_set_address entry.
2958 We only use this when emitting debug views without assembler
2959 support, at explicit user request. Ideally, we should only use
2960 it when the offset might be zero but we can't tell: it's the only
2961 way to maybe change the PC without resetting the view number. */
2962 LI_adv_address
2963 };
2964
2965 typedef struct GTY(()) dw_line_info_struct {
2966 enum dw_line_info_opcode opcode;
2967 unsigned int val;
2968 } dw_line_info_entry;
2969
2970
2971 struct GTY(()) dw_line_info_table {
2972 /* The label that marks the end of this section. */
2973 const char *end_label;
2974
2975 /* The values for the last row of the matrix, as collected in the table.
2976 These are used to minimize the changes to the next row. */
2977 unsigned int file_num;
2978 unsigned int line_num;
2979 unsigned int column_num;
2980 int discrim_num;
2981 bool is_stmt;
2982 bool in_use;
2983
2984 /* This denotes the NEXT view number.
2985
2986 If it is 0, it is known that the NEXT view will be the first view
2987 at the given PC.
2988
2989 If it is -1, we're forcing the view number to be reset, e.g. at a
2990 function entry.
2991
2992 The meaning of other nonzero values depends on whether we're
2993 computing views internally or leaving it for the assembler to do
2994 so. If we're emitting them internally, view denotes the view
2995 number since the last known advance of PC. If we're leaving it
2996 for the assembler, it denotes the LVU label number that we're
2997 going to ask the assembler to assign. */
2998 var_loc_view view;
2999
3000 /* This counts the number of symbolic views emitted in this table
3001 since the latest view reset. Its max value, over all tables,
3002 sets symview_upper_bound. */
3003 var_loc_view symviews_since_reset;
3004
3005 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
3006 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3007 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3008 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3009
3010 vec<dw_line_info_entry, va_gc> *entries;
3011 };
3012
3013 /* This is an upper bound for view numbers that the assembler may
3014 assign to symbolic views output in this translation. It is used to
3015 decide how big a field to use to represent view numbers in
3016 symview-classed attributes. */
3017
3018 static var_loc_view symview_upper_bound;
3019
3020 /* If we're keep track of location views and their reset points, and
3021 INSN is a reset point (i.e., it necessarily advances the PC), mark
3022 the next view in TABLE as reset. */
3023
3024 static void
3025 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3026 {
3027 if (!debug_internal_reset_location_views)
3028 return;
3029
3030 /* Maybe turn (part of?) this test into a default target hook. */
3031 int reset = 0;
3032
3033 if (targetm.reset_location_view)
3034 reset = targetm.reset_location_view (insn);
3035
3036 if (reset)
3037 ;
3038 else if (JUMP_TABLE_DATA_P (insn))
3039 reset = 1;
3040 else if (GET_CODE (insn) == USE
3041 || GET_CODE (insn) == CLOBBER
3042 || GET_CODE (insn) == ASM_INPUT
3043 || asm_noperands (insn) >= 0)
3044 ;
3045 else if (get_attr_min_length (insn) > 0)
3046 reset = 1;
3047
3048 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3049 RESET_NEXT_VIEW (table->view);
3050 }
3051
3052 /* Each DIE attribute has a field specifying the attribute kind,
3053 a link to the next attribute in the chain, and an attribute value.
3054 Attributes are typically linked below the DIE they modify. */
3055
3056 typedef struct GTY(()) dw_attr_struct {
3057 enum dwarf_attribute dw_attr;
3058 dw_val_node dw_attr_val;
3059 }
3060 dw_attr_node;
3061
3062
3063 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3064 The children of each node form a circular list linked by
3065 die_sib. die_child points to the node *before* the "first" child node. */
3066
3067 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3068 union die_symbol_or_type_node
3069 {
3070 const char * GTY ((tag ("0"))) die_symbol;
3071 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3072 }
3073 GTY ((desc ("%0.comdat_type_p"))) die_id;
3074 vec<dw_attr_node, va_gc> *die_attr;
3075 dw_die_ref die_parent;
3076 dw_die_ref die_child;
3077 dw_die_ref die_sib;
3078 dw_die_ref die_definition; /* ref from a specification to its definition */
3079 dw_offset die_offset;
3080 unsigned long die_abbrev;
3081 int die_mark;
3082 unsigned int decl_id;
3083 enum dwarf_tag die_tag;
3084 /* Die is used and must not be pruned as unused. */
3085 BOOL_BITFIELD die_perennial_p : 1;
3086 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3087 /* For an external ref to die_symbol if die_offset contains an extra
3088 offset to that symbol. */
3089 BOOL_BITFIELD with_offset : 1;
3090 /* Whether this DIE was removed from the DIE tree, for example via
3091 prune_unused_types. We don't consider those present from the
3092 DIE lookup routines. */
3093 BOOL_BITFIELD removed : 1;
3094 /* Lots of spare bits. */
3095 }
3096 die_node;
3097
3098 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3099 static bool early_dwarf;
3100 static bool early_dwarf_finished;
3101 struct set_early_dwarf {
3102 bool saved;
3103 set_early_dwarf () : saved(early_dwarf)
3104 {
3105 gcc_assert (! early_dwarf_finished);
3106 early_dwarf = true;
3107 }
3108 ~set_early_dwarf () { early_dwarf = saved; }
3109 };
3110
3111 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3112 #define FOR_EACH_CHILD(die, c, expr) do { \
3113 c = die->die_child; \
3114 if (c) do { \
3115 c = c->die_sib; \
3116 expr; \
3117 } while (c != die->die_child); \
3118 } while (0)
3119
3120 /* The pubname structure */
3121
3122 typedef struct GTY(()) pubname_struct {
3123 dw_die_ref die;
3124 const char *name;
3125 }
3126 pubname_entry;
3127
3128
3129 struct GTY(()) dw_ranges {
3130 const char *label;
3131 /* If this is positive, it's a block number, otherwise it's a
3132 bitwise-negated index into dw_ranges_by_label. */
3133 int num;
3134 /* Index for the range list for DW_FORM_rnglistx. */
3135 unsigned int idx : 31;
3136 /* True if this range might be possibly in a different section
3137 from previous entry. */
3138 unsigned int maybe_new_sec : 1;
3139 };
3140
3141 /* A structure to hold a macinfo entry. */
3142
3143 typedef struct GTY(()) macinfo_struct {
3144 unsigned char code;
3145 unsigned HOST_WIDE_INT lineno;
3146 const char *info;
3147 }
3148 macinfo_entry;
3149
3150
3151 struct GTY(()) dw_ranges_by_label {
3152 const char *begin;
3153 const char *end;
3154 };
3155
3156 /* The comdat type node structure. */
3157 struct GTY(()) comdat_type_node
3158 {
3159 dw_die_ref root_die;
3160 dw_die_ref type_die;
3161 dw_die_ref skeleton_die;
3162 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3163 comdat_type_node *next;
3164 };
3165
3166 /* A list of DIEs for which we can't determine ancestry (parent_die
3167 field) just yet. Later in dwarf2out_finish we will fill in the
3168 missing bits. */
3169 typedef struct GTY(()) limbo_die_struct {
3170 dw_die_ref die;
3171 /* The tree for which this DIE was created. We use this to
3172 determine ancestry later. */
3173 tree created_for;
3174 struct limbo_die_struct *next;
3175 }
3176 limbo_die_node;
3177
3178 typedef struct skeleton_chain_struct
3179 {
3180 dw_die_ref old_die;
3181 dw_die_ref new_die;
3182 struct skeleton_chain_struct *parent;
3183 }
3184 skeleton_chain_node;
3185
3186 /* Define a macro which returns nonzero for a TYPE_DECL which was
3187 implicitly generated for a type.
3188
3189 Note that, unlike the C front-end (which generates a NULL named
3190 TYPE_DECL node for each complete tagged type, each array type,
3191 and each function type node created) the C++ front-end generates
3192 a _named_ TYPE_DECL node for each tagged type node created.
3193 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3194 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3195 front-end, but for each type, tagged or not. */
3196
3197 #define TYPE_DECL_IS_STUB(decl) \
3198 (DECL_NAME (decl) == NULL_TREE \
3199 || (DECL_ARTIFICIAL (decl) \
3200 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3201 /* This is necessary for stub decls that \
3202 appear in nested inline functions. */ \
3203 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3204 && (decl_ultimate_origin (decl) \
3205 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3206
3207 /* Information concerning the compilation unit's programming
3208 language, and compiler version. */
3209
3210 /* Fixed size portion of the DWARF compilation unit header. */
3211 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3212 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3213 + (dwarf_version >= 5 ? 4 : 3))
3214
3215 /* Fixed size portion of the DWARF comdat type unit header. */
3216 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3217 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3218 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3219
3220 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3221 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3222 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3223
3224 /* Fixed size portion of public names info. */
3225 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3226
3227 /* Fixed size portion of the address range info. */
3228 #define DWARF_ARANGES_HEADER_SIZE \
3229 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3230 DWARF2_ADDR_SIZE * 2) \
3231 - DWARF_INITIAL_LENGTH_SIZE)
3232
3233 /* Size of padding portion in the address range info. It must be
3234 aligned to twice the pointer size. */
3235 #define DWARF_ARANGES_PAD_SIZE \
3236 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3237 DWARF2_ADDR_SIZE * 2) \
3238 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3239
3240 /* Use assembler line directives if available. */
3241 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3242 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3243 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3244 #else
3245 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3246 #endif
3247 #endif
3248
3249 /* Use assembler views in line directives if available. */
3250 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3251 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3252 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3253 #else
3254 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3255 #endif
3256 #endif
3257
3258 /* Return true if GCC configure detected assembler support for .loc. */
3259
3260 bool
3261 dwarf2out_default_as_loc_support (void)
3262 {
3263 return DWARF2_ASM_LINE_DEBUG_INFO;
3264 #if (GCC_VERSION >= 3000)
3265 # undef DWARF2_ASM_LINE_DEBUG_INFO
3266 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3267 #endif
3268 }
3269
3270 /* Return true if GCC configure detected assembler support for views
3271 in .loc directives. */
3272
3273 bool
3274 dwarf2out_default_as_locview_support (void)
3275 {
3276 return DWARF2_ASM_VIEW_DEBUG_INFO;
3277 #if (GCC_VERSION >= 3000)
3278 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3279 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3280 #endif
3281 }
3282
3283 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3284 view computation, and it refers to a view identifier for which we
3285 will not emit a label because it is known to map to a view number
3286 zero. We won't allocate the bitmap if we're not using assembler
3287 support for location views, but we have to make the variable
3288 visible for GGC and for code that will be optimized out for lack of
3289 support but that's still parsed and compiled. We could abstract it
3290 out with macros, but it's not worth it. */
3291 static GTY(()) bitmap zero_view_p;
3292
3293 /* Evaluate to TRUE iff N is known to identify the first location view
3294 at its PC. When not using assembler location view computation,
3295 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3296 and views label numbers recorded in it are the ones known to be
3297 zero. */
3298 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3299 || (N) == (var_loc_view)-1 \
3300 || (zero_view_p \
3301 && bitmap_bit_p (zero_view_p, (N))))
3302
3303 /* Return true iff we're to emit .loc directives for the assembler to
3304 generate line number sections.
3305
3306 When we're not emitting views, all we need from the assembler is
3307 support for .loc directives.
3308
3309 If we are emitting views, we can only use the assembler's .loc
3310 support if it also supports views.
3311
3312 When the compiler is emitting the line number programs and
3313 computing view numbers itself, it resets view numbers at known PC
3314 changes and counts from that, and then it emits view numbers as
3315 literal constants in locviewlists. There are cases in which the
3316 compiler is not sure about PC changes, e.g. when extra alignment is
3317 requested for a label. In these cases, the compiler may not reset
3318 the view counter, and the potential PC advance in the line number
3319 program will use an opcode that does not reset the view counter
3320 even if the PC actually changes, so that compiler and debug info
3321 consumer can keep view numbers in sync.
3322
3323 When the compiler defers view computation to the assembler, it
3324 emits symbolic view numbers in locviewlists, with the exception of
3325 views known to be zero (forced resets, or reset after
3326 compiler-visible PC changes): instead of emitting symbols for
3327 these, we emit literal zero and assert the assembler agrees with
3328 the compiler's assessment. We could use symbolic views everywhere,
3329 instead of special-casing zero views, but then we'd be unable to
3330 optimize out locviewlists that contain only zeros. */
3331
3332 static bool
3333 output_asm_line_debug_info (void)
3334 {
3335 return (dwarf2out_as_loc_support
3336 && (dwarf2out_as_locview_support
3337 || !debug_variable_location_views));
3338 }
3339
3340 /* Minimum line offset in a special line info. opcode.
3341 This value was chosen to give a reasonable range of values. */
3342 #define DWARF_LINE_BASE -10
3343
3344 /* First special line opcode - leave room for the standard opcodes. */
3345 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3346
3347 /* Range of line offsets in a special line info. opcode. */
3348 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3349
3350 /* Flag that indicates the initial value of the is_stmt_start flag.
3351 In the present implementation, we do not mark any lines as
3352 the beginning of a source statement, because that information
3353 is not made available by the GCC front-end. */
3354 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3355
3356 /* Maximum number of operations per instruction bundle. */
3357 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3358 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3359 #endif
3360
3361 /* This location is used by calc_die_sizes() to keep track
3362 the offset of each DIE within the .debug_info section. */
3363 static unsigned long next_die_offset;
3364
3365 /* Record the root of the DIE's built for the current compilation unit. */
3366 static GTY(()) dw_die_ref single_comp_unit_die;
3367
3368 /* A list of type DIEs that have been separated into comdat sections. */
3369 static GTY(()) comdat_type_node *comdat_type_list;
3370
3371 /* A list of CU DIEs that have been separated. */
3372 static GTY(()) limbo_die_node *cu_die_list;
3373
3374 /* A list of DIEs with a NULL parent waiting to be relocated. */
3375 static GTY(()) limbo_die_node *limbo_die_list;
3376
3377 /* A list of DIEs for which we may have to generate
3378 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3379 static GTY(()) limbo_die_node *deferred_asm_name;
3380
3381 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3382 {
3383 typedef const char *compare_type;
3384
3385 static hashval_t hash (dwarf_file_data *);
3386 static bool equal (dwarf_file_data *, const char *);
3387 };
3388
3389 /* Filenames referenced by this compilation unit. */
3390 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3391
3392 struct decl_die_hasher : ggc_ptr_hash<die_node>
3393 {
3394 typedef tree compare_type;
3395
3396 static hashval_t hash (die_node *);
3397 static bool equal (die_node *, tree);
3398 };
3399 /* A hash table of references to DIE's that describe declarations.
3400 The key is a DECL_UID() which is a unique number identifying each decl. */
3401 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3402
3403 struct GTY ((for_user)) variable_value_struct {
3404 unsigned int decl_id;
3405 vec<dw_die_ref, va_gc> *dies;
3406 };
3407
3408 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3409 {
3410 typedef tree compare_type;
3411
3412 static hashval_t hash (variable_value_struct *);
3413 static bool equal (variable_value_struct *, tree);
3414 };
3415 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3416 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3417 DECL_CONTEXT of the referenced VAR_DECLs. */
3418 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3419
3420 struct block_die_hasher : ggc_ptr_hash<die_struct>
3421 {
3422 static hashval_t hash (die_struct *);
3423 static bool equal (die_struct *, die_struct *);
3424 };
3425
3426 /* A hash table of references to DIE's that describe COMMON blocks.
3427 The key is DECL_UID() ^ die_parent. */
3428 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3429
3430 typedef struct GTY(()) die_arg_entry_struct {
3431 dw_die_ref die;
3432 tree arg;
3433 } die_arg_entry;
3434
3435
3436 /* Node of the variable location list. */
3437 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3438 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3439 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3440 in mode of the EXPR_LIST node and first EXPR_LIST operand
3441 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3442 location or NULL for padding. For larger bitsizes,
3443 mode is 0 and first operand is a CONCAT with bitsize
3444 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3445 NULL as second operand. */
3446 rtx GTY (()) loc;
3447 const char * GTY (()) label;
3448 struct var_loc_node * GTY (()) next;
3449 var_loc_view view;
3450 };
3451
3452 /* Variable location list. */
3453 struct GTY ((for_user)) var_loc_list_def {
3454 struct var_loc_node * GTY (()) first;
3455
3456 /* Pointer to the last but one or last element of the
3457 chained list. If the list is empty, both first and
3458 last are NULL, if the list contains just one node
3459 or the last node certainly is not redundant, it points
3460 to the last node, otherwise points to the last but one.
3461 Do not mark it for GC because it is marked through the chain. */
3462 struct var_loc_node * GTY ((skip ("%h"))) last;
3463
3464 /* Pointer to the last element before section switch,
3465 if NULL, either sections weren't switched or first
3466 is after section switch. */
3467 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3468
3469 /* DECL_UID of the variable decl. */
3470 unsigned int decl_id;
3471 };
3472 typedef struct var_loc_list_def var_loc_list;
3473
3474 /* Call argument location list. */
3475 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3476 rtx GTY (()) call_arg_loc_note;
3477 const char * GTY (()) label;
3478 tree GTY (()) block;
3479 bool tail_call_p;
3480 rtx GTY (()) symbol_ref;
3481 struct call_arg_loc_node * GTY (()) next;
3482 };
3483
3484
3485 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3486 {
3487 typedef const_tree compare_type;
3488
3489 static hashval_t hash (var_loc_list *);
3490 static bool equal (var_loc_list *, const_tree);
3491 };
3492
3493 /* Table of decl location linked lists. */
3494 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3495
3496 /* Head and tail of call_arg_loc chain. */
3497 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3498 static struct call_arg_loc_node *call_arg_loc_last;
3499
3500 /* Number of call sites in the current function. */
3501 static int call_site_count = -1;
3502 /* Number of tail call sites in the current function. */
3503 static int tail_call_site_count = -1;
3504
3505 /* A cached location list. */
3506 struct GTY ((for_user)) cached_dw_loc_list_def {
3507 /* The DECL_UID of the decl that this entry describes. */
3508 unsigned int decl_id;
3509
3510 /* The cached location list. */
3511 dw_loc_list_ref loc_list;
3512 };
3513 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3514
3515 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3516 {
3517
3518 typedef const_tree compare_type;
3519
3520 static hashval_t hash (cached_dw_loc_list *);
3521 static bool equal (cached_dw_loc_list *, const_tree);
3522 };
3523
3524 /* Table of cached location lists. */
3525 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3526
3527 /* A vector of references to DIE's that are uniquely identified by their tag,
3528 presence/absence of children DIE's, and list of attribute/value pairs. */
3529 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3530
3531 /* A hash map to remember the stack usage for DWARF procedures. The value
3532 stored is the stack size difference between before the DWARF procedure
3533 invokation and after it returned. In other words, for a DWARF procedure
3534 that consumes N stack slots and that pushes M ones, this stores M - N. */
3535 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3536
3537 /* A global counter for generating labels for line number data. */
3538 static unsigned int line_info_label_num;
3539
3540 /* The current table to which we should emit line number information
3541 for the current function. This will be set up at the beginning of
3542 assembly for the function. */
3543 static GTY(()) dw_line_info_table *cur_line_info_table;
3544
3545 /* The two default tables of line number info. */
3546 static GTY(()) dw_line_info_table *text_section_line_info;
3547 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3548
3549 /* The set of all non-default tables of line number info. */
3550 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3551
3552 /* A flag to tell pubnames/types export if there is an info section to
3553 refer to. */
3554 static bool info_section_emitted;
3555
3556 /* A pointer to the base of a table that contains a list of publicly
3557 accessible names. */
3558 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3559
3560 /* A pointer to the base of a table that contains a list of publicly
3561 accessible types. */
3562 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3563
3564 /* A pointer to the base of a table that contains a list of macro
3565 defines/undefines (and file start/end markers). */
3566 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3567
3568 /* True if .debug_macinfo or .debug_macros section is going to be
3569 emitted. */
3570 #define have_macinfo \
3571 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3572 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3573 && !macinfo_table->is_empty ())
3574
3575 /* Vector of dies for which we should generate .debug_ranges info. */
3576 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3577
3578 /* Vector of pairs of labels referenced in ranges_table. */
3579 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3580
3581 /* Whether we have location lists that need outputting */
3582 static GTY(()) bool have_location_lists;
3583
3584 /* Unique label counter. */
3585 static GTY(()) unsigned int loclabel_num;
3586
3587 /* Unique label counter for point-of-call tables. */
3588 static GTY(()) unsigned int poc_label_num;
3589
3590 /* The last file entry emitted by maybe_emit_file(). */
3591 static GTY(()) struct dwarf_file_data * last_emitted_file;
3592
3593 /* Number of internal labels generated by gen_internal_sym(). */
3594 static GTY(()) int label_num;
3595
3596 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3597
3598 /* Instances of generic types for which we need to generate debug
3599 info that describe their generic parameters and arguments. That
3600 generation needs to happen once all types are properly laid out so
3601 we do it at the end of compilation. */
3602 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3603
3604 /* Offset from the "steady-state frame pointer" to the frame base,
3605 within the current function. */
3606 static poly_int64 frame_pointer_fb_offset;
3607 static bool frame_pointer_fb_offset_valid;
3608
3609 static vec<dw_die_ref> base_types;
3610
3611 /* Flags to represent a set of attribute classes for attributes that represent
3612 a scalar value (bounds, pointers, ...). */
3613 enum dw_scalar_form
3614 {
3615 dw_scalar_form_constant = 0x01,
3616 dw_scalar_form_exprloc = 0x02,
3617 dw_scalar_form_reference = 0x04
3618 };
3619
3620 /* Forward declarations for functions defined in this file. */
3621
3622 static int is_pseudo_reg (const_rtx);
3623 static tree type_main_variant (tree);
3624 static int is_tagged_type (const_tree);
3625 static const char *dwarf_tag_name (unsigned);
3626 static const char *dwarf_attr_name (unsigned);
3627 static const char *dwarf_form_name (unsigned);
3628 static tree decl_ultimate_origin (const_tree);
3629 static tree decl_class_context (tree);
3630 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3631 static inline enum dw_val_class AT_class (dw_attr_node *);
3632 static inline unsigned int AT_index (dw_attr_node *);
3633 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3634 static inline unsigned AT_flag (dw_attr_node *);
3635 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3636 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3637 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3638 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3639 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3640 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3641 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3642 unsigned int, unsigned char *);
3643 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3644 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3645 static inline const char *AT_string (dw_attr_node *);
3646 static enum dwarf_form AT_string_form (dw_attr_node *);
3647 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3648 static void add_AT_specification (dw_die_ref, dw_die_ref);
3649 static inline dw_die_ref AT_ref (dw_attr_node *);
3650 static inline int AT_ref_external (dw_attr_node *);
3651 static inline void set_AT_ref_external (dw_attr_node *, int);
3652 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3653 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3654 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3655 dw_loc_list_ref);
3656 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3657 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3658 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3659 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3660 static void remove_addr_table_entry (addr_table_entry *);
3661 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3662 static inline rtx AT_addr (dw_attr_node *);
3663 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3664 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3665 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3666 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3667 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3668 unsigned long, bool);
3669 static inline const char *AT_lbl (dw_attr_node *);
3670 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3671 static const char *get_AT_low_pc (dw_die_ref);
3672 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3673 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3674 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3675 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3676 static bool is_c (void);
3677 static bool is_cxx (void);
3678 static bool is_cxx (const_tree);
3679 static bool is_fortran (void);
3680 static bool is_ada (void);
3681 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3682 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3683 static void add_child_die (dw_die_ref, dw_die_ref);
3684 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3685 static dw_die_ref lookup_type_die (tree);
3686 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3687 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3688 static void equate_type_number_to_die (tree, dw_die_ref);
3689 static dw_die_ref lookup_decl_die (tree);
3690 static var_loc_list *lookup_decl_loc (const_tree);
3691 static void equate_decl_number_to_die (tree, dw_die_ref);
3692 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3693 static void print_spaces (FILE *);
3694 static void print_die (dw_die_ref, FILE *);
3695 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3696 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3697 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3698 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3699 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3700 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3701 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3702 struct md5_ctx *, int *);
3703 struct checksum_attributes;
3704 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3705 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3706 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3707 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3708 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3709 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3710 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3711 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3712 static int is_type_die (dw_die_ref);
3713 static inline bool is_template_instantiation (dw_die_ref);
3714 static int is_declaration_die (dw_die_ref);
3715 static int should_move_die_to_comdat (dw_die_ref);
3716 static dw_die_ref clone_as_declaration (dw_die_ref);
3717 static dw_die_ref clone_die (dw_die_ref);
3718 static dw_die_ref clone_tree (dw_die_ref);
3719 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3720 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3721 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3722 static dw_die_ref generate_skeleton (dw_die_ref);
3723 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3724 dw_die_ref,
3725 dw_die_ref);
3726 static void break_out_comdat_types (dw_die_ref);
3727 static void copy_decls_for_unworthy_types (dw_die_ref);
3728
3729 static void add_sibling_attributes (dw_die_ref);
3730 static void output_location_lists (dw_die_ref);
3731 static int constant_size (unsigned HOST_WIDE_INT);
3732 static unsigned long size_of_die (dw_die_ref);
3733 static void calc_die_sizes (dw_die_ref);
3734 static void calc_base_type_die_sizes (void);
3735 static void mark_dies (dw_die_ref);
3736 static void unmark_dies (dw_die_ref);
3737 static void unmark_all_dies (dw_die_ref);
3738 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3739 static unsigned long size_of_aranges (void);
3740 static enum dwarf_form value_format (dw_attr_node *);
3741 static void output_value_format (dw_attr_node *);
3742 static void output_abbrev_section (void);
3743 static void output_die_abbrevs (unsigned long, dw_die_ref);
3744 static void output_die (dw_die_ref);
3745 static void output_compilation_unit_header (enum dwarf_unit_type);
3746 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3747 static void output_comdat_type_unit (comdat_type_node *);
3748 static const char *dwarf2_name (tree, int);
3749 static void add_pubname (tree, dw_die_ref);
3750 static void add_enumerator_pubname (const char *, dw_die_ref);
3751 static void add_pubname_string (const char *, dw_die_ref);
3752 static void add_pubtype (tree, dw_die_ref);
3753 static void output_pubnames (vec<pubname_entry, va_gc> *);
3754 static void output_aranges (void);
3755 static unsigned int add_ranges (const_tree, bool = false);
3756 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3757 bool *, bool);
3758 static void output_ranges (void);
3759 static dw_line_info_table *new_line_info_table (void);
3760 static void output_line_info (bool);
3761 static void output_file_names (void);
3762 static dw_die_ref base_type_die (tree, bool);
3763 static int is_base_type (tree);
3764 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3765 static int decl_quals (const_tree);
3766 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3767 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3768 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3769 static unsigned int dbx_reg_number (const_rtx);
3770 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3771 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3772 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3773 enum var_init_status);
3774 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3775 enum var_init_status);
3776 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3777 enum var_init_status);
3778 static int is_based_loc (const_rtx);
3779 static bool resolve_one_addr (rtx *);
3780 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3781 enum var_init_status);
3782 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3783 enum var_init_status);
3784 struct loc_descr_context;
3785 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3786 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3787 static dw_loc_list_ref loc_list_from_tree (tree, int,
3788 struct loc_descr_context *);
3789 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3790 struct loc_descr_context *);
3791 static tree field_type (const_tree);
3792 static unsigned int simple_type_align_in_bits (const_tree);
3793 static unsigned int simple_decl_align_in_bits (const_tree);
3794 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3795 struct vlr_context;
3796 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3797 HOST_WIDE_INT *);
3798 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3799 dw_loc_list_ref);
3800 static void add_data_member_location_attribute (dw_die_ref, tree,
3801 struct vlr_context *);
3802 static bool add_const_value_attribute (dw_die_ref, rtx);
3803 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3804 static void insert_wide_int (const wide_int &, unsigned char *, int);
3805 static void insert_float (const_rtx, unsigned char *);
3806 static rtx rtl_for_decl_location (tree);
3807 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3808 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3809 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3810 static void add_name_attribute (dw_die_ref, const char *);
3811 static void add_desc_attribute (dw_die_ref, tree);
3812 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3813 static void add_comp_dir_attribute (dw_die_ref);
3814 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3815 struct loc_descr_context *);
3816 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3817 struct loc_descr_context *);
3818 static void add_subscript_info (dw_die_ref, tree, bool);
3819 static void add_byte_size_attribute (dw_die_ref, tree);
3820 static void add_alignment_attribute (dw_die_ref, tree);
3821 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3822 struct vlr_context *);
3823 static void add_bit_size_attribute (dw_die_ref, tree);
3824 static void add_prototyped_attribute (dw_die_ref, tree);
3825 static void add_abstract_origin_attribute (dw_die_ref, tree);
3826 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3827 static void add_src_coords_attributes (dw_die_ref, tree);
3828 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3829 static void add_discr_value (dw_die_ref, dw_discr_value *);
3830 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3831 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3832 static dw_die_ref scope_die_for (tree, dw_die_ref);
3833 static inline int local_scope_p (dw_die_ref);
3834 static inline int class_scope_p (dw_die_ref);
3835 static inline int class_or_namespace_scope_p (dw_die_ref);
3836 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3837 static void add_calling_convention_attribute (dw_die_ref, tree);
3838 static const char *type_tag (const_tree);
3839 static tree member_declared_type (const_tree);
3840 #if 0
3841 static const char *decl_start_label (tree);
3842 #endif
3843 static void gen_array_type_die (tree, dw_die_ref);
3844 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3845 #if 0
3846 static void gen_entry_point_die (tree, dw_die_ref);
3847 #endif
3848 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3849 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3850 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3851 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3852 static void gen_formal_types_die (tree, dw_die_ref);
3853 static void gen_subprogram_die (tree, dw_die_ref);
3854 static void gen_variable_die (tree, tree, dw_die_ref);
3855 static void gen_const_die (tree, dw_die_ref);
3856 static void gen_label_die (tree, dw_die_ref);
3857 static void gen_lexical_block_die (tree, dw_die_ref);
3858 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3859 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3860 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3861 static dw_die_ref gen_compile_unit_die (const char *);
3862 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3863 static void gen_member_die (tree, dw_die_ref);
3864 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3865 enum debug_info_usage);
3866 static void gen_subroutine_type_die (tree, dw_die_ref);
3867 static void gen_typedef_die (tree, dw_die_ref);
3868 static void gen_type_die (tree, dw_die_ref);
3869 static void gen_block_die (tree, dw_die_ref);
3870 static void decls_for_scope (tree, dw_die_ref);
3871 static bool is_naming_typedef_decl (const_tree);
3872 static inline dw_die_ref get_context_die (tree);
3873 static void gen_namespace_die (tree, dw_die_ref);
3874 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3875 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3876 static dw_die_ref force_decl_die (tree);
3877 static dw_die_ref force_type_die (tree);
3878 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3879 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3880 static struct dwarf_file_data * lookup_filename (const char *);
3881 static void retry_incomplete_types (void);
3882 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3883 static void gen_generic_params_dies (tree);
3884 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3885 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3886 static void splice_child_die (dw_die_ref, dw_die_ref);
3887 static int file_info_cmp (const void *, const void *);
3888 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3889 const char *, var_loc_view, const char *);
3890 static void output_loc_list (dw_loc_list_ref);
3891 static char *gen_internal_sym (const char *);
3892 static bool want_pubnames (void);
3893
3894 static void prune_unmark_dies (dw_die_ref);
3895 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3896 static void prune_unused_types_mark (dw_die_ref, int);
3897 static void prune_unused_types_walk (dw_die_ref);
3898 static void prune_unused_types_walk_attribs (dw_die_ref);
3899 static void prune_unused_types_prune (dw_die_ref);
3900 static void prune_unused_types (void);
3901 static int maybe_emit_file (struct dwarf_file_data *fd);
3902 static inline const char *AT_vms_delta1 (dw_attr_node *);
3903 static inline const char *AT_vms_delta2 (dw_attr_node *);
3904 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3905 static void gen_remaining_tmpl_value_param_die_attribute (void);
3906 static bool generic_type_p (tree);
3907 static void schedule_generic_params_dies_gen (tree t);
3908 static void gen_scheduled_generic_parms_dies (void);
3909 static void resolve_variable_values (void);
3910
3911 static const char *comp_dir_string (void);
3912
3913 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3914
3915 /* enum for tracking thread-local variables whose address is really an offset
3916 relative to the TLS pointer, which will need link-time relocation, but will
3917 not need relocation by the DWARF consumer. */
3918
3919 enum dtprel_bool
3920 {
3921 dtprel_false = 0,
3922 dtprel_true = 1
3923 };
3924
3925 /* Return the operator to use for an address of a variable. For dtprel_true, we
3926 use DW_OP_const*. For regular variables, which need both link-time
3927 relocation and consumer-level relocation (e.g., to account for shared objects
3928 loaded at a random address), we use DW_OP_addr*. */
3929
3930 static inline enum dwarf_location_atom
3931 dw_addr_op (enum dtprel_bool dtprel)
3932 {
3933 if (dtprel == dtprel_true)
3934 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3935 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3936 else
3937 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3938 }
3939
3940 /* Return a pointer to a newly allocated address location description. If
3941 dwarf_split_debug_info is true, then record the address with the appropriate
3942 relocation. */
3943 static inline dw_loc_descr_ref
3944 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3945 {
3946 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3947
3948 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3949 ref->dw_loc_oprnd1.v.val_addr = addr;
3950 ref->dtprel = dtprel;
3951 if (dwarf_split_debug_info)
3952 ref->dw_loc_oprnd1.val_entry
3953 = add_addr_table_entry (addr,
3954 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3955 else
3956 ref->dw_loc_oprnd1.val_entry = NULL;
3957
3958 return ref;
3959 }
3960
3961 /* Section names used to hold DWARF debugging information. */
3962
3963 #ifndef DEBUG_INFO_SECTION
3964 #define DEBUG_INFO_SECTION ".debug_info"
3965 #endif
3966 #ifndef DEBUG_DWO_INFO_SECTION
3967 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3968 #endif
3969 #ifndef DEBUG_LTO_INFO_SECTION
3970 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3971 #endif
3972 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3973 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3974 #endif
3975 #ifndef DEBUG_ABBREV_SECTION
3976 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3977 #endif
3978 #ifndef DEBUG_LTO_ABBREV_SECTION
3979 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3980 #endif
3981 #ifndef DEBUG_DWO_ABBREV_SECTION
3982 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3983 #endif
3984 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3985 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3986 #endif
3987 #ifndef DEBUG_ARANGES_SECTION
3988 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3989 #endif
3990 #ifndef DEBUG_ADDR_SECTION
3991 #define DEBUG_ADDR_SECTION ".debug_addr"
3992 #endif
3993 #ifndef DEBUG_MACINFO_SECTION
3994 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3995 #endif
3996 #ifndef DEBUG_LTO_MACINFO_SECTION
3997 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
3998 #endif
3999 #ifndef DEBUG_DWO_MACINFO_SECTION
4000 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4001 #endif
4002 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4003 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4004 #endif
4005 #ifndef DEBUG_MACRO_SECTION
4006 #define DEBUG_MACRO_SECTION ".debug_macro"
4007 #endif
4008 #ifndef DEBUG_LTO_MACRO_SECTION
4009 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4010 #endif
4011 #ifndef DEBUG_DWO_MACRO_SECTION
4012 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4013 #endif
4014 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4015 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4016 #endif
4017 #ifndef DEBUG_LINE_SECTION
4018 #define DEBUG_LINE_SECTION ".debug_line"
4019 #endif
4020 #ifndef DEBUG_LTO_LINE_SECTION
4021 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4022 #endif
4023 #ifndef DEBUG_DWO_LINE_SECTION
4024 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4025 #endif
4026 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4027 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4028 #endif
4029 #ifndef DEBUG_LOC_SECTION
4030 #define DEBUG_LOC_SECTION ".debug_loc"
4031 #endif
4032 #ifndef DEBUG_DWO_LOC_SECTION
4033 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4034 #endif
4035 #ifndef DEBUG_LOCLISTS_SECTION
4036 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4037 #endif
4038 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4039 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4040 #endif
4041 #ifndef DEBUG_PUBNAMES_SECTION
4042 #define DEBUG_PUBNAMES_SECTION \
4043 ((debug_generate_pub_sections == 2) \
4044 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4045 #endif
4046 #ifndef DEBUG_PUBTYPES_SECTION
4047 #define DEBUG_PUBTYPES_SECTION \
4048 ((debug_generate_pub_sections == 2) \
4049 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4050 #endif
4051 #ifndef DEBUG_STR_OFFSETS_SECTION
4052 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4053 #endif
4054 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4055 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4056 #endif
4057 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4058 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4059 #endif
4060 #ifndef DEBUG_STR_SECTION
4061 #define DEBUG_STR_SECTION ".debug_str"
4062 #endif
4063 #ifndef DEBUG_LTO_STR_SECTION
4064 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4065 #endif
4066 #ifndef DEBUG_STR_DWO_SECTION
4067 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4068 #endif
4069 #ifndef DEBUG_LTO_STR_DWO_SECTION
4070 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4071 #endif
4072 #ifndef DEBUG_RANGES_SECTION
4073 #define DEBUG_RANGES_SECTION ".debug_ranges"
4074 #endif
4075 #ifndef DEBUG_RNGLISTS_SECTION
4076 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4077 #endif
4078 #ifndef DEBUG_LINE_STR_SECTION
4079 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4080 #endif
4081 #ifndef DEBUG_LTO_LINE_STR_SECTION
4082 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4083 #endif
4084
4085 /* Standard ELF section names for compiled code and data. */
4086 #ifndef TEXT_SECTION_NAME
4087 #define TEXT_SECTION_NAME ".text"
4088 #endif
4089
4090 /* Section flags for .debug_str section. */
4091 #define DEBUG_STR_SECTION_FLAGS \
4092 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4093 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4094 : SECTION_DEBUG)
4095
4096 /* Section flags for .debug_str.dwo section. */
4097 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4098
4099 /* Attribute used to refer to the macro section. */
4100 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4101 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4102
4103 /* Labels we insert at beginning sections we can reference instead of
4104 the section names themselves. */
4105
4106 #ifndef TEXT_SECTION_LABEL
4107 #define TEXT_SECTION_LABEL "Ltext"
4108 #endif
4109 #ifndef COLD_TEXT_SECTION_LABEL
4110 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4111 #endif
4112 #ifndef DEBUG_LINE_SECTION_LABEL
4113 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4114 #endif
4115 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4116 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4117 #endif
4118 #ifndef DEBUG_INFO_SECTION_LABEL
4119 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4120 #endif
4121 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4122 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4123 #endif
4124 #ifndef DEBUG_ABBREV_SECTION_LABEL
4125 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4126 #endif
4127 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4128 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4129 #endif
4130 #ifndef DEBUG_ADDR_SECTION_LABEL
4131 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4132 #endif
4133 #ifndef DEBUG_LOC_SECTION_LABEL
4134 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4135 #endif
4136 #ifndef DEBUG_RANGES_SECTION_LABEL
4137 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4138 #endif
4139 #ifndef DEBUG_MACINFO_SECTION_LABEL
4140 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4141 #endif
4142 #ifndef DEBUG_MACRO_SECTION_LABEL
4143 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4144 #endif
4145 #define SKELETON_COMP_DIE_ABBREV 1
4146 #define SKELETON_TYPE_DIE_ABBREV 2
4147
4148 /* Definitions of defaults for formats and names of various special
4149 (artificial) labels which may be generated within this file (when the -g
4150 options is used and DWARF2_DEBUGGING_INFO is in effect.
4151 If necessary, these may be overridden from within the tm.h file, but
4152 typically, overriding these defaults is unnecessary. */
4153
4154 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4155 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4156 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4157 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4158 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4159 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4160 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4161 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4162 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4163 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4164 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4169
4170 #ifndef TEXT_END_LABEL
4171 #define TEXT_END_LABEL "Letext"
4172 #endif
4173 #ifndef COLD_END_LABEL
4174 #define COLD_END_LABEL "Letext_cold"
4175 #endif
4176 #ifndef BLOCK_BEGIN_LABEL
4177 #define BLOCK_BEGIN_LABEL "LBB"
4178 #endif
4179 #ifndef BLOCK_INLINE_ENTRY_LABEL
4180 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4181 #endif
4182 #ifndef BLOCK_END_LABEL
4183 #define BLOCK_END_LABEL "LBE"
4184 #endif
4185 #ifndef LINE_CODE_LABEL
4186 #define LINE_CODE_LABEL "LM"
4187 #endif
4188
4189 \f
4190 /* Return the root of the DIE's built for the current compilation unit. */
4191 static dw_die_ref
4192 comp_unit_die (void)
4193 {
4194 if (!single_comp_unit_die)
4195 single_comp_unit_die = gen_compile_unit_die (NULL);
4196 return single_comp_unit_die;
4197 }
4198
4199 /* We allow a language front-end to designate a function that is to be
4200 called to "demangle" any name before it is put into a DIE. */
4201
4202 static const char *(*demangle_name_func) (const char *);
4203
4204 void
4205 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4206 {
4207 demangle_name_func = func;
4208 }
4209
4210 /* Test if rtl node points to a pseudo register. */
4211
4212 static inline int
4213 is_pseudo_reg (const_rtx rtl)
4214 {
4215 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4216 || (GET_CODE (rtl) == SUBREG
4217 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4218 }
4219
4220 /* Return a reference to a type, with its const and volatile qualifiers
4221 removed. */
4222
4223 static inline tree
4224 type_main_variant (tree type)
4225 {
4226 type = TYPE_MAIN_VARIANT (type);
4227
4228 /* ??? There really should be only one main variant among any group of
4229 variants of a given type (and all of the MAIN_VARIANT values for all
4230 members of the group should point to that one type) but sometimes the C
4231 front-end messes this up for array types, so we work around that bug
4232 here. */
4233 if (TREE_CODE (type) == ARRAY_TYPE)
4234 while (type != TYPE_MAIN_VARIANT (type))
4235 type = TYPE_MAIN_VARIANT (type);
4236
4237 return type;
4238 }
4239
4240 /* Return nonzero if the given type node represents a tagged type. */
4241
4242 static inline int
4243 is_tagged_type (const_tree type)
4244 {
4245 enum tree_code code = TREE_CODE (type);
4246
4247 return (code == RECORD_TYPE || code == UNION_TYPE
4248 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4249 }
4250
4251 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4252
4253 static void
4254 get_ref_die_offset_label (char *label, dw_die_ref ref)
4255 {
4256 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4257 }
4258
4259 /* Return die_offset of a DIE reference to a base type. */
4260
4261 static unsigned long int
4262 get_base_type_offset (dw_die_ref ref)
4263 {
4264 if (ref->die_offset)
4265 return ref->die_offset;
4266 if (comp_unit_die ()->die_abbrev)
4267 {
4268 calc_base_type_die_sizes ();
4269 gcc_assert (ref->die_offset);
4270 }
4271 return ref->die_offset;
4272 }
4273
4274 /* Return die_offset of a DIE reference other than base type. */
4275
4276 static unsigned long int
4277 get_ref_die_offset (dw_die_ref ref)
4278 {
4279 gcc_assert (ref->die_offset);
4280 return ref->die_offset;
4281 }
4282
4283 /* Convert a DIE tag into its string name. */
4284
4285 static const char *
4286 dwarf_tag_name (unsigned int tag)
4287 {
4288 const char *name = get_DW_TAG_name (tag);
4289
4290 if (name != NULL)
4291 return name;
4292
4293 return "DW_TAG_<unknown>";
4294 }
4295
4296 /* Convert a DWARF attribute code into its string name. */
4297
4298 static const char *
4299 dwarf_attr_name (unsigned int attr)
4300 {
4301 const char *name;
4302
4303 switch (attr)
4304 {
4305 #if VMS_DEBUGGING_INFO
4306 case DW_AT_HP_prologue:
4307 return "DW_AT_HP_prologue";
4308 #else
4309 case DW_AT_MIPS_loop_unroll_factor:
4310 return "DW_AT_MIPS_loop_unroll_factor";
4311 #endif
4312
4313 #if VMS_DEBUGGING_INFO
4314 case DW_AT_HP_epilogue:
4315 return "DW_AT_HP_epilogue";
4316 #else
4317 case DW_AT_MIPS_stride:
4318 return "DW_AT_MIPS_stride";
4319 #endif
4320 }
4321
4322 name = get_DW_AT_name (attr);
4323
4324 if (name != NULL)
4325 return name;
4326
4327 return "DW_AT_<unknown>";
4328 }
4329
4330 /* Convert a DWARF value form code into its string name. */
4331
4332 static const char *
4333 dwarf_form_name (unsigned int form)
4334 {
4335 const char *name = get_DW_FORM_name (form);
4336
4337 if (name != NULL)
4338 return name;
4339
4340 return "DW_FORM_<unknown>";
4341 }
4342 \f
4343 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4344 instance of an inlined instance of a decl which is local to an inline
4345 function, so we have to trace all of the way back through the origin chain
4346 to find out what sort of node actually served as the original seed for the
4347 given block. */
4348
4349 static tree
4350 decl_ultimate_origin (const_tree decl)
4351 {
4352 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4353 return NULL_TREE;
4354
4355 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4356 we're trying to output the abstract instance of this function. */
4357 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4358 return NULL_TREE;
4359
4360 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4361 most distant ancestor, this should never happen. */
4362 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4363
4364 return DECL_ABSTRACT_ORIGIN (decl);
4365 }
4366
4367 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4368 of a virtual function may refer to a base class, so we check the 'this'
4369 parameter. */
4370
4371 static tree
4372 decl_class_context (tree decl)
4373 {
4374 tree context = NULL_TREE;
4375
4376 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4377 context = DECL_CONTEXT (decl);
4378 else
4379 context = TYPE_MAIN_VARIANT
4380 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4381
4382 if (context && !TYPE_P (context))
4383 context = NULL_TREE;
4384
4385 return context;
4386 }
4387 \f
4388 /* Add an attribute/value pair to a DIE. */
4389
4390 static inline void
4391 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4392 {
4393 /* Maybe this should be an assert? */
4394 if (die == NULL)
4395 return;
4396
4397 if (flag_checking)
4398 {
4399 /* Check we do not add duplicate attrs. Can't use get_AT here
4400 because that recurses to the specification/abstract origin DIE. */
4401 dw_attr_node *a;
4402 unsigned ix;
4403 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4404 gcc_assert (a->dw_attr != attr->dw_attr);
4405 }
4406
4407 vec_safe_reserve (die->die_attr, 1);
4408 vec_safe_push (die->die_attr, *attr);
4409 }
4410
4411 static inline enum dw_val_class
4412 AT_class (dw_attr_node *a)
4413 {
4414 return a->dw_attr_val.val_class;
4415 }
4416
4417 /* Return the index for any attribute that will be referenced with a
4418 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4419 indices are stored in dw_attr_val.v.val_str for reference counting
4420 pruning. */
4421
4422 static inline unsigned int
4423 AT_index (dw_attr_node *a)
4424 {
4425 if (AT_class (a) == dw_val_class_str)
4426 return a->dw_attr_val.v.val_str->index;
4427 else if (a->dw_attr_val.val_entry != NULL)
4428 return a->dw_attr_val.val_entry->index;
4429 return NOT_INDEXED;
4430 }
4431
4432 /* Add a flag value attribute to a DIE. */
4433
4434 static inline void
4435 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4436 {
4437 dw_attr_node attr;
4438
4439 attr.dw_attr = attr_kind;
4440 attr.dw_attr_val.val_class = dw_val_class_flag;
4441 attr.dw_attr_val.val_entry = NULL;
4442 attr.dw_attr_val.v.val_flag = flag;
4443 add_dwarf_attr (die, &attr);
4444 }
4445
4446 static inline unsigned
4447 AT_flag (dw_attr_node *a)
4448 {
4449 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4450 return a->dw_attr_val.v.val_flag;
4451 }
4452
4453 /* Add a signed integer attribute value to a DIE. */
4454
4455 static inline void
4456 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4457 {
4458 dw_attr_node attr;
4459
4460 attr.dw_attr = attr_kind;
4461 attr.dw_attr_val.val_class = dw_val_class_const;
4462 attr.dw_attr_val.val_entry = NULL;
4463 attr.dw_attr_val.v.val_int = int_val;
4464 add_dwarf_attr (die, &attr);
4465 }
4466
4467 static inline HOST_WIDE_INT
4468 AT_int (dw_attr_node *a)
4469 {
4470 gcc_assert (a && (AT_class (a) == dw_val_class_const
4471 || AT_class (a) == dw_val_class_const_implicit));
4472 return a->dw_attr_val.v.val_int;
4473 }
4474
4475 /* Add an unsigned integer attribute value to a DIE. */
4476
4477 static inline void
4478 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4479 unsigned HOST_WIDE_INT unsigned_val)
4480 {
4481 dw_attr_node attr;
4482
4483 attr.dw_attr = attr_kind;
4484 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4485 attr.dw_attr_val.val_entry = NULL;
4486 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4487 add_dwarf_attr (die, &attr);
4488 }
4489
4490 static inline unsigned HOST_WIDE_INT
4491 AT_unsigned (dw_attr_node *a)
4492 {
4493 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4494 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4495 return a->dw_attr_val.v.val_unsigned;
4496 }
4497
4498 /* Add an unsigned wide integer attribute value to a DIE. */
4499
4500 static inline void
4501 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4502 const wide_int& w)
4503 {
4504 dw_attr_node attr;
4505
4506 attr.dw_attr = attr_kind;
4507 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4508 attr.dw_attr_val.val_entry = NULL;
4509 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4510 *attr.dw_attr_val.v.val_wide = w;
4511 add_dwarf_attr (die, &attr);
4512 }
4513
4514 /* Add an unsigned double integer attribute value to a DIE. */
4515
4516 static inline void
4517 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4518 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4519 {
4520 dw_attr_node attr;
4521
4522 attr.dw_attr = attr_kind;
4523 attr.dw_attr_val.val_class = dw_val_class_const_double;
4524 attr.dw_attr_val.val_entry = NULL;
4525 attr.dw_attr_val.v.val_double.high = high;
4526 attr.dw_attr_val.v.val_double.low = low;
4527 add_dwarf_attr (die, &attr);
4528 }
4529
4530 /* Add a floating point attribute value to a DIE and return it. */
4531
4532 static inline void
4533 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4534 unsigned int length, unsigned int elt_size, unsigned char *array)
4535 {
4536 dw_attr_node attr;
4537
4538 attr.dw_attr = attr_kind;
4539 attr.dw_attr_val.val_class = dw_val_class_vec;
4540 attr.dw_attr_val.val_entry = NULL;
4541 attr.dw_attr_val.v.val_vec.length = length;
4542 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4543 attr.dw_attr_val.v.val_vec.array = array;
4544 add_dwarf_attr (die, &attr);
4545 }
4546
4547 /* Add an 8-byte data attribute value to a DIE. */
4548
4549 static inline void
4550 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4551 unsigned char data8[8])
4552 {
4553 dw_attr_node attr;
4554
4555 attr.dw_attr = attr_kind;
4556 attr.dw_attr_val.val_class = dw_val_class_data8;
4557 attr.dw_attr_val.val_entry = NULL;
4558 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4559 add_dwarf_attr (die, &attr);
4560 }
4561
4562 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4563 dwarf_split_debug_info, address attributes in dies destined for the
4564 final executable have force_direct set to avoid using indexed
4565 references. */
4566
4567 static inline void
4568 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4569 bool force_direct)
4570 {
4571 dw_attr_node attr;
4572 char * lbl_id;
4573
4574 lbl_id = xstrdup (lbl_low);
4575 attr.dw_attr = DW_AT_low_pc;
4576 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4577 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4578 if (dwarf_split_debug_info && !force_direct)
4579 attr.dw_attr_val.val_entry
4580 = add_addr_table_entry (lbl_id, ate_kind_label);
4581 else
4582 attr.dw_attr_val.val_entry = NULL;
4583 add_dwarf_attr (die, &attr);
4584
4585 attr.dw_attr = DW_AT_high_pc;
4586 if (dwarf_version < 4)
4587 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4588 else
4589 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4590 lbl_id = xstrdup (lbl_high);
4591 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4592 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4593 && dwarf_split_debug_info && !force_direct)
4594 attr.dw_attr_val.val_entry
4595 = add_addr_table_entry (lbl_id, ate_kind_label);
4596 else
4597 attr.dw_attr_val.val_entry = NULL;
4598 add_dwarf_attr (die, &attr);
4599 }
4600
4601 /* Hash and equality functions for debug_str_hash. */
4602
4603 hashval_t
4604 indirect_string_hasher::hash (indirect_string_node *x)
4605 {
4606 return htab_hash_string (x->str);
4607 }
4608
4609 bool
4610 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4611 {
4612 return strcmp (x1->str, x2) == 0;
4613 }
4614
4615 /* Add STR to the given string hash table. */
4616
4617 static struct indirect_string_node *
4618 find_AT_string_in_table (const char *str,
4619 hash_table<indirect_string_hasher> *table)
4620 {
4621 struct indirect_string_node *node;
4622
4623 indirect_string_node **slot
4624 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4625 if (*slot == NULL)
4626 {
4627 node = ggc_cleared_alloc<indirect_string_node> ();
4628 node->str = ggc_strdup (str);
4629 *slot = node;
4630 }
4631 else
4632 node = *slot;
4633
4634 node->refcount++;
4635 return node;
4636 }
4637
4638 /* Add STR to the indirect string hash table. */
4639
4640 static struct indirect_string_node *
4641 find_AT_string (const char *str)
4642 {
4643 if (! debug_str_hash)
4644 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4645
4646 return find_AT_string_in_table (str, debug_str_hash);
4647 }
4648
4649 /* Add a string attribute value to a DIE. */
4650
4651 static inline void
4652 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4653 {
4654 dw_attr_node attr;
4655 struct indirect_string_node *node;
4656
4657 node = find_AT_string (str);
4658
4659 attr.dw_attr = attr_kind;
4660 attr.dw_attr_val.val_class = dw_val_class_str;
4661 attr.dw_attr_val.val_entry = NULL;
4662 attr.dw_attr_val.v.val_str = node;
4663 add_dwarf_attr (die, &attr);
4664 }
4665
4666 static inline const char *
4667 AT_string (dw_attr_node *a)
4668 {
4669 gcc_assert (a && AT_class (a) == dw_val_class_str);
4670 return a->dw_attr_val.v.val_str->str;
4671 }
4672
4673 /* Call this function directly to bypass AT_string_form's logic to put
4674 the string inline in the die. */
4675
4676 static void
4677 set_indirect_string (struct indirect_string_node *node)
4678 {
4679 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4680 /* Already indirect is a no op. */
4681 if (node->form == DW_FORM_strp
4682 || node->form == DW_FORM_line_strp
4683 || node->form == dwarf_FORM (DW_FORM_strx))
4684 {
4685 gcc_assert (node->label);
4686 return;
4687 }
4688 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4689 ++dw2_string_counter;
4690 node->label = xstrdup (label);
4691
4692 if (!dwarf_split_debug_info)
4693 {
4694 node->form = DW_FORM_strp;
4695 node->index = NOT_INDEXED;
4696 }
4697 else
4698 {
4699 node->form = dwarf_FORM (DW_FORM_strx);
4700 node->index = NO_INDEX_ASSIGNED;
4701 }
4702 }
4703
4704 /* A helper function for dwarf2out_finish, called to reset indirect
4705 string decisions done for early LTO dwarf output before fat object
4706 dwarf output. */
4707
4708 int
4709 reset_indirect_string (indirect_string_node **h, void *)
4710 {
4711 struct indirect_string_node *node = *h;
4712 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4713 {
4714 free (node->label);
4715 node->label = NULL;
4716 node->form = (dwarf_form) 0;
4717 node->index = 0;
4718 }
4719 return 1;
4720 }
4721
4722 /* Find out whether a string should be output inline in DIE
4723 or out-of-line in .debug_str section. */
4724
4725 static enum dwarf_form
4726 find_string_form (struct indirect_string_node *node)
4727 {
4728 unsigned int len;
4729
4730 if (node->form)
4731 return node->form;
4732
4733 len = strlen (node->str) + 1;
4734
4735 /* If the string is shorter or equal to the size of the reference, it is
4736 always better to put it inline. */
4737 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4738 return node->form = DW_FORM_string;
4739
4740 /* If we cannot expect the linker to merge strings in .debug_str
4741 section, only put it into .debug_str if it is worth even in this
4742 single module. */
4743 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4744 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4745 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4746 return node->form = DW_FORM_string;
4747
4748 set_indirect_string (node);
4749
4750 return node->form;
4751 }
4752
4753 /* Find out whether the string referenced from the attribute should be
4754 output inline in DIE or out-of-line in .debug_str section. */
4755
4756 static enum dwarf_form
4757 AT_string_form (dw_attr_node *a)
4758 {
4759 gcc_assert (a && AT_class (a) == dw_val_class_str);
4760 return find_string_form (a->dw_attr_val.v.val_str);
4761 }
4762
4763 /* Add a DIE reference attribute value to a DIE. */
4764
4765 static inline void
4766 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4767 {
4768 dw_attr_node attr;
4769 gcc_checking_assert (targ_die != NULL);
4770
4771 /* With LTO we can end up trying to reference something we didn't create
4772 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4773 if (targ_die == NULL)
4774 return;
4775
4776 attr.dw_attr = attr_kind;
4777 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4778 attr.dw_attr_val.val_entry = NULL;
4779 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4780 attr.dw_attr_val.v.val_die_ref.external = 0;
4781 add_dwarf_attr (die, &attr);
4782 }
4783
4784 /* Change DIE reference REF to point to NEW_DIE instead. */
4785
4786 static inline void
4787 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4788 {
4789 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4790 ref->dw_attr_val.v.val_die_ref.die = new_die;
4791 ref->dw_attr_val.v.val_die_ref.external = 0;
4792 }
4793
4794 /* Add an AT_specification attribute to a DIE, and also make the back
4795 pointer from the specification to the definition. */
4796
4797 static inline void
4798 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4799 {
4800 add_AT_die_ref (die, DW_AT_specification, targ_die);
4801 gcc_assert (!targ_die->die_definition);
4802 targ_die->die_definition = die;
4803 }
4804
4805 static inline dw_die_ref
4806 AT_ref (dw_attr_node *a)
4807 {
4808 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4809 return a->dw_attr_val.v.val_die_ref.die;
4810 }
4811
4812 static inline int
4813 AT_ref_external (dw_attr_node *a)
4814 {
4815 if (a && AT_class (a) == dw_val_class_die_ref)
4816 return a->dw_attr_val.v.val_die_ref.external;
4817
4818 return 0;
4819 }
4820
4821 static inline void
4822 set_AT_ref_external (dw_attr_node *a, int i)
4823 {
4824 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4825 a->dw_attr_val.v.val_die_ref.external = i;
4826 }
4827
4828 /* Add a location description attribute value to a DIE. */
4829
4830 static inline void
4831 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4832 {
4833 dw_attr_node attr;
4834
4835 attr.dw_attr = attr_kind;
4836 attr.dw_attr_val.val_class = dw_val_class_loc;
4837 attr.dw_attr_val.val_entry = NULL;
4838 attr.dw_attr_val.v.val_loc = loc;
4839 add_dwarf_attr (die, &attr);
4840 }
4841
4842 static inline dw_loc_descr_ref
4843 AT_loc (dw_attr_node *a)
4844 {
4845 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4846 return a->dw_attr_val.v.val_loc;
4847 }
4848
4849 static inline void
4850 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4851 {
4852 dw_attr_node attr;
4853
4854 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4855 return;
4856
4857 attr.dw_attr = attr_kind;
4858 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4859 attr.dw_attr_val.val_entry = NULL;
4860 attr.dw_attr_val.v.val_loc_list = loc_list;
4861 add_dwarf_attr (die, &attr);
4862 have_location_lists = true;
4863 }
4864
4865 static inline dw_loc_list_ref
4866 AT_loc_list (dw_attr_node *a)
4867 {
4868 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4869 return a->dw_attr_val.v.val_loc_list;
4870 }
4871
4872 /* Add a view list attribute to DIE. It must have a DW_AT_location
4873 attribute, because the view list complements the location list. */
4874
4875 static inline void
4876 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4877 {
4878 dw_attr_node attr;
4879
4880 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4881 return;
4882
4883 attr.dw_attr = attr_kind;
4884 attr.dw_attr_val.val_class = dw_val_class_view_list;
4885 attr.dw_attr_val.val_entry = NULL;
4886 attr.dw_attr_val.v.val_view_list = die;
4887 add_dwarf_attr (die, &attr);
4888 gcc_checking_assert (get_AT (die, DW_AT_location));
4889 gcc_assert (have_location_lists);
4890 }
4891
4892 /* Return a pointer to the location list referenced by the attribute.
4893 If the named attribute is a view list, look up the corresponding
4894 DW_AT_location attribute and return its location list. */
4895
4896 static inline dw_loc_list_ref *
4897 AT_loc_list_ptr (dw_attr_node *a)
4898 {
4899 gcc_assert (a);
4900 switch (AT_class (a))
4901 {
4902 case dw_val_class_loc_list:
4903 return &a->dw_attr_val.v.val_loc_list;
4904 case dw_val_class_view_list:
4905 {
4906 dw_attr_node *l;
4907 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4908 if (!l)
4909 return NULL;
4910 gcc_checking_assert (l + 1 == a);
4911 return AT_loc_list_ptr (l);
4912 }
4913 default:
4914 gcc_unreachable ();
4915 }
4916 }
4917
4918 /* Return the location attribute value associated with a view list
4919 attribute value. */
4920
4921 static inline dw_val_node *
4922 view_list_to_loc_list_val_node (dw_val_node *val)
4923 {
4924 gcc_assert (val->val_class == dw_val_class_view_list);
4925 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4926 if (!loc)
4927 return NULL;
4928 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4929 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4930 return &loc->dw_attr_val;
4931 }
4932
4933 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4934 {
4935 static hashval_t hash (addr_table_entry *);
4936 static bool equal (addr_table_entry *, addr_table_entry *);
4937 };
4938
4939 /* Table of entries into the .debug_addr section. */
4940
4941 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4942
4943 /* Hash an address_table_entry. */
4944
4945 hashval_t
4946 addr_hasher::hash (addr_table_entry *a)
4947 {
4948 inchash::hash hstate;
4949 switch (a->kind)
4950 {
4951 case ate_kind_rtx:
4952 hstate.add_int (0);
4953 break;
4954 case ate_kind_rtx_dtprel:
4955 hstate.add_int (1);
4956 break;
4957 case ate_kind_label:
4958 return htab_hash_string (a->addr.label);
4959 default:
4960 gcc_unreachable ();
4961 }
4962 inchash::add_rtx (a->addr.rtl, hstate);
4963 return hstate.end ();
4964 }
4965
4966 /* Determine equality for two address_table_entries. */
4967
4968 bool
4969 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4970 {
4971 if (a1->kind != a2->kind)
4972 return 0;
4973 switch (a1->kind)
4974 {
4975 case ate_kind_rtx:
4976 case ate_kind_rtx_dtprel:
4977 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4978 case ate_kind_label:
4979 return strcmp (a1->addr.label, a2->addr.label) == 0;
4980 default:
4981 gcc_unreachable ();
4982 }
4983 }
4984
4985 /* Initialize an addr_table_entry. */
4986
4987 void
4988 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4989 {
4990 e->kind = kind;
4991 switch (kind)
4992 {
4993 case ate_kind_rtx:
4994 case ate_kind_rtx_dtprel:
4995 e->addr.rtl = (rtx) addr;
4996 break;
4997 case ate_kind_label:
4998 e->addr.label = (char *) addr;
4999 break;
5000 }
5001 e->refcount = 0;
5002 e->index = NO_INDEX_ASSIGNED;
5003 }
5004
5005 /* Add attr to the address table entry to the table. Defer setting an
5006 index until output time. */
5007
5008 static addr_table_entry *
5009 add_addr_table_entry (void *addr, enum ate_kind kind)
5010 {
5011 addr_table_entry *node;
5012 addr_table_entry finder;
5013
5014 gcc_assert (dwarf_split_debug_info);
5015 if (! addr_index_table)
5016 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5017 init_addr_table_entry (&finder, kind, addr);
5018 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5019
5020 if (*slot == HTAB_EMPTY_ENTRY)
5021 {
5022 node = ggc_cleared_alloc<addr_table_entry> ();
5023 init_addr_table_entry (node, kind, addr);
5024 *slot = node;
5025 }
5026 else
5027 node = *slot;
5028
5029 node->refcount++;
5030 return node;
5031 }
5032
5033 /* Remove an entry from the addr table by decrementing its refcount.
5034 Strictly, decrementing the refcount would be enough, but the
5035 assertion that the entry is actually in the table has found
5036 bugs. */
5037
5038 static void
5039 remove_addr_table_entry (addr_table_entry *entry)
5040 {
5041 gcc_assert (dwarf_split_debug_info && addr_index_table);
5042 /* After an index is assigned, the table is frozen. */
5043 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5044 entry->refcount--;
5045 }
5046
5047 /* Given a location list, remove all addresses it refers to from the
5048 address_table. */
5049
5050 static void
5051 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5052 {
5053 for (; descr; descr = descr->dw_loc_next)
5054 if (descr->dw_loc_oprnd1.val_entry != NULL)
5055 {
5056 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5057 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5058 }
5059 }
5060
5061 /* A helper function for dwarf2out_finish called through
5062 htab_traverse. Assign an addr_table_entry its index. All entries
5063 must be collected into the table when this function is called,
5064 because the indexing code relies on htab_traverse to traverse nodes
5065 in the same order for each run. */
5066
5067 int
5068 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5069 {
5070 addr_table_entry *node = *h;
5071
5072 /* Don't index unreferenced nodes. */
5073 if (node->refcount == 0)
5074 return 1;
5075
5076 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5077 node->index = *index;
5078 *index += 1;
5079
5080 return 1;
5081 }
5082
5083 /* Add an address constant attribute value to a DIE. When using
5084 dwarf_split_debug_info, address attributes in dies destined for the
5085 final executable should be direct references--setting the parameter
5086 force_direct ensures this behavior. */
5087
5088 static inline void
5089 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5090 bool force_direct)
5091 {
5092 dw_attr_node attr;
5093
5094 attr.dw_attr = attr_kind;
5095 attr.dw_attr_val.val_class = dw_val_class_addr;
5096 attr.dw_attr_val.v.val_addr = addr;
5097 if (dwarf_split_debug_info && !force_direct)
5098 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5099 else
5100 attr.dw_attr_val.val_entry = NULL;
5101 add_dwarf_attr (die, &attr);
5102 }
5103
5104 /* Get the RTX from to an address DIE attribute. */
5105
5106 static inline rtx
5107 AT_addr (dw_attr_node *a)
5108 {
5109 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5110 return a->dw_attr_val.v.val_addr;
5111 }
5112
5113 /* Add a file attribute value to a DIE. */
5114
5115 static inline void
5116 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5117 struct dwarf_file_data *fd)
5118 {
5119 dw_attr_node attr;
5120
5121 attr.dw_attr = attr_kind;
5122 attr.dw_attr_val.val_class = dw_val_class_file;
5123 attr.dw_attr_val.val_entry = NULL;
5124 attr.dw_attr_val.v.val_file = fd;
5125 add_dwarf_attr (die, &attr);
5126 }
5127
5128 /* Get the dwarf_file_data from a file DIE attribute. */
5129
5130 static inline struct dwarf_file_data *
5131 AT_file (dw_attr_node *a)
5132 {
5133 gcc_assert (a && (AT_class (a) == dw_val_class_file
5134 || AT_class (a) == dw_val_class_file_implicit));
5135 return a->dw_attr_val.v.val_file;
5136 }
5137
5138 /* Add a symbolic view identifier attribute value to a DIE. */
5139
5140 static inline void
5141 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5142 const char *view_label)
5143 {
5144 dw_attr_node attr;
5145
5146 attr.dw_attr = attr_kind;
5147 attr.dw_attr_val.val_class = dw_val_class_symview;
5148 attr.dw_attr_val.val_entry = NULL;
5149 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5150 add_dwarf_attr (die, &attr);
5151 }
5152
5153 /* Add a label identifier attribute value to a DIE. */
5154
5155 static inline void
5156 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5157 const char *lbl_id)
5158 {
5159 dw_attr_node attr;
5160
5161 attr.dw_attr = attr_kind;
5162 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5163 attr.dw_attr_val.val_entry = NULL;
5164 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5165 if (dwarf_split_debug_info)
5166 attr.dw_attr_val.val_entry
5167 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5168 ate_kind_label);
5169 add_dwarf_attr (die, &attr);
5170 }
5171
5172 /* Add a section offset attribute value to a DIE, an offset into the
5173 debug_line section. */
5174
5175 static inline void
5176 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5177 const char *label)
5178 {
5179 dw_attr_node attr;
5180
5181 attr.dw_attr = attr_kind;
5182 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5183 attr.dw_attr_val.val_entry = NULL;
5184 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5185 add_dwarf_attr (die, &attr);
5186 }
5187
5188 /* Add a section offset attribute value to a DIE, an offset into the
5189 debug_macinfo section. */
5190
5191 static inline void
5192 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5193 const char *label)
5194 {
5195 dw_attr_node attr;
5196
5197 attr.dw_attr = attr_kind;
5198 attr.dw_attr_val.val_class = dw_val_class_macptr;
5199 attr.dw_attr_val.val_entry = NULL;
5200 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5201 add_dwarf_attr (die, &attr);
5202 }
5203
5204 /* Add a range_list attribute value to a DIE. When using
5205 dwarf_split_debug_info, address attributes in dies destined for the
5206 final executable should be direct references--setting the parameter
5207 force_direct ensures this behavior. */
5208
5209 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5210 #define RELOCATED_OFFSET (NULL)
5211
5212 static void
5213 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5214 long unsigned int offset, bool force_direct)
5215 {
5216 dw_attr_node attr;
5217
5218 attr.dw_attr = attr_kind;
5219 attr.dw_attr_val.val_class = dw_val_class_range_list;
5220 /* For the range_list attribute, use val_entry to store whether the
5221 offset should follow split-debug-info or normal semantics. This
5222 value is read in output_range_list_offset. */
5223 if (dwarf_split_debug_info && !force_direct)
5224 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5225 else
5226 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5227 attr.dw_attr_val.v.val_offset = offset;
5228 add_dwarf_attr (die, &attr);
5229 }
5230
5231 /* Return the start label of a delta attribute. */
5232
5233 static inline const char *
5234 AT_vms_delta1 (dw_attr_node *a)
5235 {
5236 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5237 return a->dw_attr_val.v.val_vms_delta.lbl1;
5238 }
5239
5240 /* Return the end label of a delta attribute. */
5241
5242 static inline const char *
5243 AT_vms_delta2 (dw_attr_node *a)
5244 {
5245 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5246 return a->dw_attr_val.v.val_vms_delta.lbl2;
5247 }
5248
5249 static inline const char *
5250 AT_lbl (dw_attr_node *a)
5251 {
5252 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5253 || AT_class (a) == dw_val_class_lineptr
5254 || AT_class (a) == dw_val_class_macptr
5255 || AT_class (a) == dw_val_class_loclistsptr
5256 || AT_class (a) == dw_val_class_high_pc));
5257 return a->dw_attr_val.v.val_lbl_id;
5258 }
5259
5260 /* Get the attribute of type attr_kind. */
5261
5262 static dw_attr_node *
5263 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5264 {
5265 dw_attr_node *a;
5266 unsigned ix;
5267 dw_die_ref spec = NULL;
5268
5269 if (! die)
5270 return NULL;
5271
5272 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5273 if (a->dw_attr == attr_kind)
5274 return a;
5275 else if (a->dw_attr == DW_AT_specification
5276 || a->dw_attr == DW_AT_abstract_origin)
5277 spec = AT_ref (a);
5278
5279 if (spec)
5280 return get_AT (spec, attr_kind);
5281
5282 return NULL;
5283 }
5284
5285 /* Returns the parent of the declaration of DIE. */
5286
5287 static dw_die_ref
5288 get_die_parent (dw_die_ref die)
5289 {
5290 dw_die_ref t;
5291
5292 if (!die)
5293 return NULL;
5294
5295 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5296 || (t = get_AT_ref (die, DW_AT_specification)))
5297 die = t;
5298
5299 return die->die_parent;
5300 }
5301
5302 /* Return the "low pc" attribute value, typically associated with a subprogram
5303 DIE. Return null if the "low pc" attribute is either not present, or if it
5304 cannot be represented as an assembler label identifier. */
5305
5306 static inline const char *
5307 get_AT_low_pc (dw_die_ref die)
5308 {
5309 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5310
5311 return a ? AT_lbl (a) : NULL;
5312 }
5313
5314 /* Return the value of the string attribute designated by ATTR_KIND, or
5315 NULL if it is not present. */
5316
5317 static inline const char *
5318 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5319 {
5320 dw_attr_node *a = get_AT (die, attr_kind);
5321
5322 return a ? AT_string (a) : NULL;
5323 }
5324
5325 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5326 if it is not present. */
5327
5328 static inline int
5329 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5330 {
5331 dw_attr_node *a = get_AT (die, attr_kind);
5332
5333 return a ? AT_flag (a) : 0;
5334 }
5335
5336 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5337 if it is not present. */
5338
5339 static inline unsigned
5340 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5341 {
5342 dw_attr_node *a = get_AT (die, attr_kind);
5343
5344 return a ? AT_unsigned (a) : 0;
5345 }
5346
5347 static inline dw_die_ref
5348 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5349 {
5350 dw_attr_node *a = get_AT (die, attr_kind);
5351
5352 return a ? AT_ref (a) : NULL;
5353 }
5354
5355 static inline struct dwarf_file_data *
5356 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5357 {
5358 dw_attr_node *a = get_AT (die, attr_kind);
5359
5360 return a ? AT_file (a) : NULL;
5361 }
5362
5363 /* Return TRUE if the language is C. */
5364
5365 static inline bool
5366 is_c (void)
5367 {
5368 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5369
5370 return (lang == DW_LANG_C || lang == DW_LANG_C89 || lang == DW_LANG_C99
5371 || lang == DW_LANG_C11 || lang == DW_LANG_ObjC);
5372
5373
5374 }
5375
5376 /* Return TRUE if the language is C++. */
5377
5378 static inline bool
5379 is_cxx (void)
5380 {
5381 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5382
5383 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5384 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5385 }
5386
5387 /* Return TRUE if DECL was created by the C++ frontend. */
5388
5389 static bool
5390 is_cxx (const_tree decl)
5391 {
5392 if (in_lto_p)
5393 {
5394 const_tree context = get_ultimate_context (decl);
5395 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5396 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5397 }
5398 return is_cxx ();
5399 }
5400
5401 /* Return TRUE if the language is Fortran. */
5402
5403 static inline bool
5404 is_fortran (void)
5405 {
5406 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5407
5408 return (lang == DW_LANG_Fortran77
5409 || lang == DW_LANG_Fortran90
5410 || lang == DW_LANG_Fortran95
5411 || lang == DW_LANG_Fortran03
5412 || lang == DW_LANG_Fortran08);
5413 }
5414
5415 static inline bool
5416 is_fortran (const_tree decl)
5417 {
5418 if (in_lto_p)
5419 {
5420 const_tree context = get_ultimate_context (decl);
5421 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5422 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5423 "GNU Fortran", 11) == 0
5424 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5425 "GNU F77") == 0);
5426 }
5427 return is_fortran ();
5428 }
5429
5430 /* Return TRUE if the language is Ada. */
5431
5432 static inline bool
5433 is_ada (void)
5434 {
5435 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5436
5437 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5438 }
5439
5440 /* Remove the specified attribute if present. Return TRUE if removal
5441 was successful. */
5442
5443 static bool
5444 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5445 {
5446 dw_attr_node *a;
5447 unsigned ix;
5448
5449 if (! die)
5450 return false;
5451
5452 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5453 if (a->dw_attr == attr_kind)
5454 {
5455 if (AT_class (a) == dw_val_class_str)
5456 if (a->dw_attr_val.v.val_str->refcount)
5457 a->dw_attr_val.v.val_str->refcount--;
5458
5459 /* vec::ordered_remove should help reduce the number of abbrevs
5460 that are needed. */
5461 die->die_attr->ordered_remove (ix);
5462 return true;
5463 }
5464 return false;
5465 }
5466
5467 /* Remove CHILD from its parent. PREV must have the property that
5468 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5469
5470 static void
5471 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5472 {
5473 gcc_assert (child->die_parent == prev->die_parent);
5474 gcc_assert (prev->die_sib == child);
5475 if (prev == child)
5476 {
5477 gcc_assert (child->die_parent->die_child == child);
5478 prev = NULL;
5479 }
5480 else
5481 prev->die_sib = child->die_sib;
5482 if (child->die_parent->die_child == child)
5483 child->die_parent->die_child = prev;
5484 child->die_sib = NULL;
5485 }
5486
5487 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5488 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5489
5490 static void
5491 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5492 {
5493 dw_die_ref parent = old_child->die_parent;
5494
5495 gcc_assert (parent == prev->die_parent);
5496 gcc_assert (prev->die_sib == old_child);
5497
5498 new_child->die_parent = parent;
5499 if (prev == old_child)
5500 {
5501 gcc_assert (parent->die_child == old_child);
5502 new_child->die_sib = new_child;
5503 }
5504 else
5505 {
5506 prev->die_sib = new_child;
5507 new_child->die_sib = old_child->die_sib;
5508 }
5509 if (old_child->die_parent->die_child == old_child)
5510 old_child->die_parent->die_child = new_child;
5511 old_child->die_sib = NULL;
5512 }
5513
5514 /* Move all children from OLD_PARENT to NEW_PARENT. */
5515
5516 static void
5517 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5518 {
5519 dw_die_ref c;
5520 new_parent->die_child = old_parent->die_child;
5521 old_parent->die_child = NULL;
5522 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5523 }
5524
5525 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5526 matches TAG. */
5527
5528 static void
5529 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5530 {
5531 dw_die_ref c;
5532
5533 c = die->die_child;
5534 if (c) do {
5535 dw_die_ref prev = c;
5536 c = c->die_sib;
5537 while (c->die_tag == tag)
5538 {
5539 remove_child_with_prev (c, prev);
5540 c->die_parent = NULL;
5541 /* Might have removed every child. */
5542 if (die->die_child == NULL)
5543 return;
5544 c = prev->die_sib;
5545 }
5546 } while (c != die->die_child);
5547 }
5548
5549 /* Add a CHILD_DIE as the last child of DIE. */
5550
5551 static void
5552 add_child_die (dw_die_ref die, dw_die_ref child_die)
5553 {
5554 /* FIXME this should probably be an assert. */
5555 if (! die || ! child_die)
5556 return;
5557 gcc_assert (die != child_die);
5558
5559 child_die->die_parent = die;
5560 if (die->die_child)
5561 {
5562 child_die->die_sib = die->die_child->die_sib;
5563 die->die_child->die_sib = child_die;
5564 }
5565 else
5566 child_die->die_sib = child_die;
5567 die->die_child = child_die;
5568 }
5569
5570 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5571
5572 static void
5573 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5574 dw_die_ref after_die)
5575 {
5576 gcc_assert (die
5577 && child_die
5578 && after_die
5579 && die->die_child
5580 && die != child_die);
5581
5582 child_die->die_parent = die;
5583 child_die->die_sib = after_die->die_sib;
5584 after_die->die_sib = child_die;
5585 if (die->die_child == after_die)
5586 die->die_child = child_die;
5587 }
5588
5589 /* Unassociate CHILD from its parent, and make its parent be
5590 NEW_PARENT. */
5591
5592 static void
5593 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5594 {
5595 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5596 if (p->die_sib == child)
5597 {
5598 remove_child_with_prev (child, p);
5599 break;
5600 }
5601 add_child_die (new_parent, child);
5602 }
5603
5604 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5605 is the specification, to the end of PARENT's list of children.
5606 This is done by removing and re-adding it. */
5607
5608 static void
5609 splice_child_die (dw_die_ref parent, dw_die_ref child)
5610 {
5611 /* We want the declaration DIE from inside the class, not the
5612 specification DIE at toplevel. */
5613 if (child->die_parent != parent)
5614 {
5615 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5616
5617 if (tmp)
5618 child = tmp;
5619 }
5620
5621 gcc_assert (child->die_parent == parent
5622 || (child->die_parent
5623 == get_AT_ref (parent, DW_AT_specification)));
5624
5625 reparent_child (child, parent);
5626 }
5627
5628 /* Create and return a new die with TAG_VALUE as tag. */
5629
5630 static inline dw_die_ref
5631 new_die_raw (enum dwarf_tag tag_value)
5632 {
5633 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5634 die->die_tag = tag_value;
5635 return die;
5636 }
5637
5638 /* Create and return a new die with a parent of PARENT_DIE. If
5639 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5640 associated tree T must be supplied to determine parenthood
5641 later. */
5642
5643 static inline dw_die_ref
5644 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5645 {
5646 dw_die_ref die = new_die_raw (tag_value);
5647
5648 if (parent_die != NULL)
5649 add_child_die (parent_die, die);
5650 else
5651 {
5652 limbo_die_node *limbo_node;
5653
5654 /* No DIEs created after early dwarf should end up in limbo,
5655 because the limbo list should not persist past LTO
5656 streaming. */
5657 if (tag_value != DW_TAG_compile_unit
5658 /* These are allowed because they're generated while
5659 breaking out COMDAT units late. */
5660 && tag_value != DW_TAG_type_unit
5661 && tag_value != DW_TAG_skeleton_unit
5662 && !early_dwarf
5663 /* Allow nested functions to live in limbo because they will
5664 only temporarily live there, as decls_for_scope will fix
5665 them up. */
5666 && (TREE_CODE (t) != FUNCTION_DECL
5667 || !decl_function_context (t))
5668 /* Same as nested functions above but for types. Types that
5669 are local to a function will be fixed in
5670 decls_for_scope. */
5671 && (!RECORD_OR_UNION_TYPE_P (t)
5672 || !TYPE_CONTEXT (t)
5673 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5674 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5675 especially in the ltrans stage, but once we implement LTO
5676 dwarf streaming, we should remove this exception. */
5677 && !in_lto_p)
5678 {
5679 fprintf (stderr, "symbol ended up in limbo too late:");
5680 debug_generic_stmt (t);
5681 gcc_unreachable ();
5682 }
5683
5684 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5685 limbo_node->die = die;
5686 limbo_node->created_for = t;
5687 limbo_node->next = limbo_die_list;
5688 limbo_die_list = limbo_node;
5689 }
5690
5691 return die;
5692 }
5693
5694 /* Return the DIE associated with the given type specifier. */
5695
5696 static inline dw_die_ref
5697 lookup_type_die (tree type)
5698 {
5699 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5700 if (die && die->removed)
5701 {
5702 TYPE_SYMTAB_DIE (type) = NULL;
5703 return NULL;
5704 }
5705 return die;
5706 }
5707
5708 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5709 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5710 anonymous type instead the one of the naming typedef. */
5711
5712 static inline dw_die_ref
5713 strip_naming_typedef (tree type, dw_die_ref type_die)
5714 {
5715 if (type
5716 && TREE_CODE (type) == RECORD_TYPE
5717 && type_die
5718 && type_die->die_tag == DW_TAG_typedef
5719 && is_naming_typedef_decl (TYPE_NAME (type)))
5720 type_die = get_AT_ref (type_die, DW_AT_type);
5721 return type_die;
5722 }
5723
5724 /* Like lookup_type_die, but if type is an anonymous type named by a
5725 typedef[1], return the DIE of the anonymous type instead the one of
5726 the naming typedef. This is because in gen_typedef_die, we did
5727 equate the anonymous struct named by the typedef with the DIE of
5728 the naming typedef. So by default, lookup_type_die on an anonymous
5729 struct yields the DIE of the naming typedef.
5730
5731 [1]: Read the comment of is_naming_typedef_decl to learn about what
5732 a naming typedef is. */
5733
5734 static inline dw_die_ref
5735 lookup_type_die_strip_naming_typedef (tree type)
5736 {
5737 dw_die_ref die = lookup_type_die (type);
5738 return strip_naming_typedef (type, die);
5739 }
5740
5741 /* Equate a DIE to a given type specifier. */
5742
5743 static inline void
5744 equate_type_number_to_die (tree type, dw_die_ref type_die)
5745 {
5746 TYPE_SYMTAB_DIE (type) = type_die;
5747 }
5748
5749 static dw_die_ref maybe_create_die_with_external_ref (tree);
5750 struct GTY(()) sym_off_pair
5751 {
5752 const char * GTY((skip)) sym;
5753 unsigned HOST_WIDE_INT off;
5754 };
5755 static GTY(()) hash_map<tree, sym_off_pair> *external_die_map;
5756
5757 /* Returns a hash value for X (which really is a die_struct). */
5758
5759 inline hashval_t
5760 decl_die_hasher::hash (die_node *x)
5761 {
5762 return (hashval_t) x->decl_id;
5763 }
5764
5765 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5766
5767 inline bool
5768 decl_die_hasher::equal (die_node *x, tree y)
5769 {
5770 return (x->decl_id == DECL_UID (y));
5771 }
5772
5773 /* Return the DIE associated with a given declaration. */
5774
5775 static inline dw_die_ref
5776 lookup_decl_die (tree decl)
5777 {
5778 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5779 NO_INSERT);
5780 if (!die)
5781 {
5782 if (in_lto_p)
5783 return maybe_create_die_with_external_ref (decl);
5784 return NULL;
5785 }
5786 if ((*die)->removed)
5787 {
5788 decl_die_table->clear_slot (die);
5789 return NULL;
5790 }
5791 return *die;
5792 }
5793
5794
5795 /* Return the DIE associated with BLOCK. */
5796
5797 static inline dw_die_ref
5798 lookup_block_die (tree block)
5799 {
5800 dw_die_ref die = BLOCK_DIE (block);
5801 if (!die && in_lto_p)
5802 return maybe_create_die_with_external_ref (block);
5803 return die;
5804 }
5805
5806 /* Associate DIE with BLOCK. */
5807
5808 static inline void
5809 equate_block_to_die (tree block, dw_die_ref die)
5810 {
5811 BLOCK_DIE (block) = die;
5812 }
5813 #undef BLOCK_DIE
5814
5815
5816 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5817 style reference. Return true if we found one refering to a DIE for
5818 DECL, otherwise return false. */
5819
5820 static bool
5821 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5822 unsigned HOST_WIDE_INT *off)
5823 {
5824 dw_die_ref die;
5825
5826 if (in_lto_p)
5827 {
5828 /* During WPA stage and incremental linking we use a hash-map
5829 to store the decl <-> label + offset map. */
5830 if (!external_die_map)
5831 return false;
5832 sym_off_pair *desc = external_die_map->get (decl);
5833 if (!desc)
5834 return false;
5835 *sym = desc->sym;
5836 *off = desc->off;
5837 return true;
5838 }
5839
5840 if (TREE_CODE (decl) == BLOCK)
5841 die = lookup_block_die (decl);
5842 else
5843 die = lookup_decl_die (decl);
5844 if (!die)
5845 return false;
5846
5847 /* Similar to get_ref_die_offset_label, but using the "correct"
5848 label. */
5849 *off = die->die_offset;
5850 while (die->die_parent)
5851 die = die->die_parent;
5852 /* For the containing CU DIE we compute a die_symbol in
5853 compute_comp_unit_symbol. */
5854 gcc_assert (die->die_tag == DW_TAG_compile_unit
5855 && die->die_id.die_symbol != NULL);
5856 *sym = die->die_id.die_symbol;
5857 return true;
5858 }
5859
5860 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5861
5862 static void
5863 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5864 const char *symbol, HOST_WIDE_INT offset)
5865 {
5866 /* Create a fake DIE that contains the reference. Don't use
5867 new_die because we don't want to end up in the limbo list. */
5868 /* ??? We probably want to share these, thus put a ref to the DIE
5869 we create here to the external_die_map entry. */
5870 dw_die_ref ref = new_die_raw (die->die_tag);
5871 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5872 ref->die_offset = offset;
5873 ref->with_offset = 1;
5874 add_AT_die_ref (die, attr_kind, ref);
5875 }
5876
5877 /* Create a DIE for DECL if required and add a reference to a DIE
5878 at SYMBOL + OFFSET which contains attributes dumped early. */
5879
5880 static void
5881 dwarf2out_register_external_die (tree decl, const char *sym,
5882 unsigned HOST_WIDE_INT off)
5883 {
5884 if (debug_info_level == DINFO_LEVEL_NONE)
5885 return;
5886
5887 if (!external_die_map)
5888 external_die_map = hash_map<tree, sym_off_pair>::create_ggc (1000);
5889 gcc_checking_assert (!external_die_map->get (decl));
5890 sym_off_pair p = { IDENTIFIER_POINTER (get_identifier (sym)), off };
5891 external_die_map->put (decl, p);
5892 }
5893
5894 /* If we have a registered external DIE for DECL return a new DIE for
5895 the concrete instance with an appropriate abstract origin. */
5896
5897 static dw_die_ref
5898 maybe_create_die_with_external_ref (tree decl)
5899 {
5900 if (!external_die_map)
5901 return NULL;
5902 sym_off_pair *desc = external_die_map->get (decl);
5903 if (!desc)
5904 return NULL;
5905
5906 const char *sym = desc->sym;
5907 unsigned HOST_WIDE_INT off = desc->off;
5908
5909 in_lto_p = false;
5910 dw_die_ref die = (TREE_CODE (decl) == BLOCK
5911 ? lookup_block_die (decl) : lookup_decl_die (decl));
5912 gcc_assert (!die);
5913 in_lto_p = true;
5914
5915 tree ctx;
5916 dw_die_ref parent = NULL;
5917 /* Need to lookup a DIE for the decls context - the containing
5918 function or translation unit. */
5919 if (TREE_CODE (decl) == BLOCK)
5920 {
5921 ctx = BLOCK_SUPERCONTEXT (decl);
5922 /* ??? We do not output DIEs for all scopes thus skip as
5923 many DIEs as needed. */
5924 while (TREE_CODE (ctx) == BLOCK
5925 && !lookup_block_die (ctx))
5926 ctx = BLOCK_SUPERCONTEXT (ctx);
5927 }
5928 else
5929 ctx = DECL_CONTEXT (decl);
5930 /* Peel types in the context stack. */
5931 while (ctx && TYPE_P (ctx))
5932 ctx = TYPE_CONTEXT (ctx);
5933 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5934 if (debug_info_level <= DINFO_LEVEL_TERSE)
5935 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5936 ctx = DECL_CONTEXT (ctx);
5937 if (ctx)
5938 {
5939 if (TREE_CODE (ctx) == BLOCK)
5940 parent = lookup_block_die (ctx);
5941 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5942 /* Keep the 1:1 association during WPA. */
5943 && !flag_wpa
5944 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5945 /* Otherwise all late annotations go to the main CU which
5946 imports the original CUs. */
5947 parent = comp_unit_die ();
5948 else if (TREE_CODE (ctx) == FUNCTION_DECL
5949 && TREE_CODE (decl) != FUNCTION_DECL
5950 && TREE_CODE (decl) != PARM_DECL
5951 && TREE_CODE (decl) != RESULT_DECL
5952 && TREE_CODE (decl) != BLOCK)
5953 /* Leave function local entities parent determination to when
5954 we process scope vars. */
5955 ;
5956 else
5957 parent = lookup_decl_die (ctx);
5958 }
5959 else
5960 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5961 Handle this case gracefully by globalizing stuff. */
5962 parent = comp_unit_die ();
5963 /* Create a DIE "stub". */
5964 switch (TREE_CODE (decl))
5965 {
5966 case TRANSLATION_UNIT_DECL:
5967 {
5968 die = comp_unit_die ();
5969 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5970 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5971 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5972 to create a DIE for the original CUs. */
5973 return die;
5974 }
5975 case NAMESPACE_DECL:
5976 if (is_fortran (decl))
5977 die = new_die (DW_TAG_module, parent, decl);
5978 else
5979 die = new_die (DW_TAG_namespace, parent, decl);
5980 break;
5981 case FUNCTION_DECL:
5982 die = new_die (DW_TAG_subprogram, parent, decl);
5983 break;
5984 case VAR_DECL:
5985 die = new_die (DW_TAG_variable, parent, decl);
5986 break;
5987 case RESULT_DECL:
5988 die = new_die (DW_TAG_variable, parent, decl);
5989 break;
5990 case PARM_DECL:
5991 die = new_die (DW_TAG_formal_parameter, parent, decl);
5992 break;
5993 case CONST_DECL:
5994 die = new_die (DW_TAG_constant, parent, decl);
5995 break;
5996 case LABEL_DECL:
5997 die = new_die (DW_TAG_label, parent, decl);
5998 break;
5999 case BLOCK:
6000 die = new_die (DW_TAG_lexical_block, parent, decl);
6001 break;
6002 default:
6003 gcc_unreachable ();
6004 }
6005 if (TREE_CODE (decl) == BLOCK)
6006 equate_block_to_die (decl, die);
6007 else
6008 equate_decl_number_to_die (decl, die);
6009
6010 add_desc_attribute (die, decl);
6011
6012 /* Add a reference to the DIE providing early debug at $sym + off. */
6013 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6014
6015 return die;
6016 }
6017
6018 /* Returns a hash value for X (which really is a var_loc_list). */
6019
6020 inline hashval_t
6021 decl_loc_hasher::hash (var_loc_list *x)
6022 {
6023 return (hashval_t) x->decl_id;
6024 }
6025
6026 /* Return nonzero if decl_id of var_loc_list X is the same as
6027 UID of decl *Y. */
6028
6029 inline bool
6030 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6031 {
6032 return (x->decl_id == DECL_UID (y));
6033 }
6034
6035 /* Return the var_loc list associated with a given declaration. */
6036
6037 static inline var_loc_list *
6038 lookup_decl_loc (const_tree decl)
6039 {
6040 if (!decl_loc_table)
6041 return NULL;
6042 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6043 }
6044
6045 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6046
6047 inline hashval_t
6048 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6049 {
6050 return (hashval_t) x->decl_id;
6051 }
6052
6053 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6054 UID of decl *Y. */
6055
6056 inline bool
6057 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6058 {
6059 return (x->decl_id == DECL_UID (y));
6060 }
6061
6062 /* Equate a DIE to a particular declaration. */
6063
6064 static void
6065 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6066 {
6067 unsigned int decl_id = DECL_UID (decl);
6068
6069 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6070 decl_die->decl_id = decl_id;
6071 }
6072
6073 /* Return how many bits covers PIECE EXPR_LIST. */
6074
6075 static HOST_WIDE_INT
6076 decl_piece_bitsize (rtx piece)
6077 {
6078 int ret = (int) GET_MODE (piece);
6079 if (ret)
6080 return ret;
6081 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6082 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6083 return INTVAL (XEXP (XEXP (piece, 0), 0));
6084 }
6085
6086 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6087
6088 static rtx *
6089 decl_piece_varloc_ptr (rtx piece)
6090 {
6091 if ((int) GET_MODE (piece))
6092 return &XEXP (piece, 0);
6093 else
6094 return &XEXP (XEXP (piece, 0), 1);
6095 }
6096
6097 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6098 Next is the chain of following piece nodes. */
6099
6100 static rtx_expr_list *
6101 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6102 {
6103 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6104 return alloc_EXPR_LIST (bitsize, loc_note, next);
6105 else
6106 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6107 GEN_INT (bitsize),
6108 loc_note), next);
6109 }
6110
6111 /* Return rtx that should be stored into loc field for
6112 LOC_NOTE and BITPOS/BITSIZE. */
6113
6114 static rtx
6115 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6116 HOST_WIDE_INT bitsize)
6117 {
6118 if (bitsize != -1)
6119 {
6120 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6121 if (bitpos != 0)
6122 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6123 }
6124 return loc_note;
6125 }
6126
6127 /* This function either modifies location piece list *DEST in
6128 place (if SRC and INNER is NULL), or copies location piece list
6129 *SRC to *DEST while modifying it. Location BITPOS is modified
6130 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6131 not copied and if needed some padding around it is added.
6132 When modifying in place, DEST should point to EXPR_LIST where
6133 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6134 to the start of the whole list and INNER points to the EXPR_LIST
6135 where earlier pieces cover PIECE_BITPOS bits. */
6136
6137 static void
6138 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6139 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6140 HOST_WIDE_INT bitsize, rtx loc_note)
6141 {
6142 HOST_WIDE_INT diff;
6143 bool copy = inner != NULL;
6144
6145 if (copy)
6146 {
6147 /* First copy all nodes preceding the current bitpos. */
6148 while (src != inner)
6149 {
6150 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6151 decl_piece_bitsize (*src), NULL_RTX);
6152 dest = &XEXP (*dest, 1);
6153 src = &XEXP (*src, 1);
6154 }
6155 }
6156 /* Add padding if needed. */
6157 if (bitpos != piece_bitpos)
6158 {
6159 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6160 copy ? NULL_RTX : *dest);
6161 dest = &XEXP (*dest, 1);
6162 }
6163 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6164 {
6165 gcc_assert (!copy);
6166 /* A piece with correct bitpos and bitsize already exist,
6167 just update the location for it and return. */
6168 *decl_piece_varloc_ptr (*dest) = loc_note;
6169 return;
6170 }
6171 /* Add the piece that changed. */
6172 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6173 dest = &XEXP (*dest, 1);
6174 /* Skip over pieces that overlap it. */
6175 diff = bitpos - piece_bitpos + bitsize;
6176 if (!copy)
6177 src = dest;
6178 while (diff > 0 && *src)
6179 {
6180 rtx piece = *src;
6181 diff -= decl_piece_bitsize (piece);
6182 if (copy)
6183 src = &XEXP (piece, 1);
6184 else
6185 {
6186 *src = XEXP (piece, 1);
6187 free_EXPR_LIST_node (piece);
6188 }
6189 }
6190 /* Add padding if needed. */
6191 if (diff < 0 && *src)
6192 {
6193 if (!copy)
6194 dest = src;
6195 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6196 dest = &XEXP (*dest, 1);
6197 }
6198 if (!copy)
6199 return;
6200 /* Finally copy all nodes following it. */
6201 while (*src)
6202 {
6203 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6204 decl_piece_bitsize (*src), NULL_RTX);
6205 dest = &XEXP (*dest, 1);
6206 src = &XEXP (*src, 1);
6207 }
6208 }
6209
6210 /* Add a variable location node to the linked list for DECL. */
6211
6212 static struct var_loc_node *
6213 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6214 {
6215 unsigned int decl_id;
6216 var_loc_list *temp;
6217 struct var_loc_node *loc = NULL;
6218 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6219
6220 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6221 {
6222 tree realdecl = DECL_DEBUG_EXPR (decl);
6223 if (handled_component_p (realdecl)
6224 || (TREE_CODE (realdecl) == MEM_REF
6225 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6226 {
6227 bool reverse;
6228 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6229 &bitsize, &reverse);
6230 if (!innerdecl
6231 || !DECL_P (innerdecl)
6232 || DECL_IGNORED_P (innerdecl)
6233 || TREE_STATIC (innerdecl)
6234 || bitsize == 0
6235 || bitpos + bitsize > 256)
6236 return NULL;
6237 decl = innerdecl;
6238 }
6239 }
6240
6241 decl_id = DECL_UID (decl);
6242 var_loc_list **slot
6243 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6244 if (*slot == NULL)
6245 {
6246 temp = ggc_cleared_alloc<var_loc_list> ();
6247 temp->decl_id = decl_id;
6248 *slot = temp;
6249 }
6250 else
6251 temp = *slot;
6252
6253 /* For PARM_DECLs try to keep around the original incoming value,
6254 even if that means we'll emit a zero-range .debug_loc entry. */
6255 if (temp->last
6256 && temp->first == temp->last
6257 && TREE_CODE (decl) == PARM_DECL
6258 && NOTE_P (temp->first->loc)
6259 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6260 && DECL_INCOMING_RTL (decl)
6261 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6262 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6263 == GET_CODE (DECL_INCOMING_RTL (decl))
6264 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6265 && (bitsize != -1
6266 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6267 NOTE_VAR_LOCATION_LOC (loc_note))
6268 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6269 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6270 {
6271 loc = ggc_cleared_alloc<var_loc_node> ();
6272 temp->first->next = loc;
6273 temp->last = loc;
6274 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6275 }
6276 else if (temp->last)
6277 {
6278 struct var_loc_node *last = temp->last, *unused = NULL;
6279 rtx *piece_loc = NULL, last_loc_note;
6280 HOST_WIDE_INT piece_bitpos = 0;
6281 if (last->next)
6282 {
6283 last = last->next;
6284 gcc_assert (last->next == NULL);
6285 }
6286 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6287 {
6288 piece_loc = &last->loc;
6289 do
6290 {
6291 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6292 if (piece_bitpos + cur_bitsize > bitpos)
6293 break;
6294 piece_bitpos += cur_bitsize;
6295 piece_loc = &XEXP (*piece_loc, 1);
6296 }
6297 while (*piece_loc);
6298 }
6299 /* TEMP->LAST here is either pointer to the last but one or
6300 last element in the chained list, LAST is pointer to the
6301 last element. */
6302 if (label && strcmp (last->label, label) == 0 && last->view == view)
6303 {
6304 /* For SRA optimized variables if there weren't any real
6305 insns since last note, just modify the last node. */
6306 if (piece_loc != NULL)
6307 {
6308 adjust_piece_list (piece_loc, NULL, NULL,
6309 bitpos, piece_bitpos, bitsize, loc_note);
6310 return NULL;
6311 }
6312 /* If the last note doesn't cover any instructions, remove it. */
6313 if (temp->last != last)
6314 {
6315 temp->last->next = NULL;
6316 unused = last;
6317 last = temp->last;
6318 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6319 }
6320 else
6321 {
6322 gcc_assert (temp->first == temp->last
6323 || (temp->first->next == temp->last
6324 && TREE_CODE (decl) == PARM_DECL));
6325 memset (temp->last, '\0', sizeof (*temp->last));
6326 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6327 return temp->last;
6328 }
6329 }
6330 if (bitsize == -1 && NOTE_P (last->loc))
6331 last_loc_note = last->loc;
6332 else if (piece_loc != NULL
6333 && *piece_loc != NULL_RTX
6334 && piece_bitpos == bitpos
6335 && decl_piece_bitsize (*piece_loc) == bitsize)
6336 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6337 else
6338 last_loc_note = NULL_RTX;
6339 /* If the current location is the same as the end of the list,
6340 and either both or neither of the locations is uninitialized,
6341 we have nothing to do. */
6342 if (last_loc_note == NULL_RTX
6343 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6344 NOTE_VAR_LOCATION_LOC (loc_note)))
6345 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6346 != NOTE_VAR_LOCATION_STATUS (loc_note))
6347 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6348 == VAR_INIT_STATUS_UNINITIALIZED)
6349 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6350 == VAR_INIT_STATUS_UNINITIALIZED))))
6351 {
6352 /* Add LOC to the end of list and update LAST. If the last
6353 element of the list has been removed above, reuse its
6354 memory for the new node, otherwise allocate a new one. */
6355 if (unused)
6356 {
6357 loc = unused;
6358 memset (loc, '\0', sizeof (*loc));
6359 }
6360 else
6361 loc = ggc_cleared_alloc<var_loc_node> ();
6362 if (bitsize == -1 || piece_loc == NULL)
6363 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6364 else
6365 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6366 bitpos, piece_bitpos, bitsize, loc_note);
6367 last->next = loc;
6368 /* Ensure TEMP->LAST will point either to the new last but one
6369 element of the chain, or to the last element in it. */
6370 if (last != temp->last)
6371 temp->last = last;
6372 }
6373 else if (unused)
6374 ggc_free (unused);
6375 }
6376 else
6377 {
6378 loc = ggc_cleared_alloc<var_loc_node> ();
6379 temp->first = loc;
6380 temp->last = loc;
6381 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6382 }
6383 return loc;
6384 }
6385 \f
6386 /* Keep track of the number of spaces used to indent the
6387 output of the debugging routines that print the structure of
6388 the DIE internal representation. */
6389 static int print_indent;
6390
6391 /* Indent the line the number of spaces given by print_indent. */
6392
6393 static inline void
6394 print_spaces (FILE *outfile)
6395 {
6396 fprintf (outfile, "%*s", print_indent, "");
6397 }
6398
6399 /* Print a type signature in hex. */
6400
6401 static inline void
6402 print_signature (FILE *outfile, char *sig)
6403 {
6404 int i;
6405
6406 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6407 fprintf (outfile, "%02x", sig[i] & 0xff);
6408 }
6409
6410 static inline void
6411 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6412 {
6413 if (discr_value->pos)
6414 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6415 else
6416 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6417 }
6418
6419 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6420
6421 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6422 RECURSE, output location descriptor operations. */
6423
6424 static void
6425 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6426 {
6427 switch (val->val_class)
6428 {
6429 case dw_val_class_addr:
6430 fprintf (outfile, "address");
6431 break;
6432 case dw_val_class_offset:
6433 fprintf (outfile, "offset");
6434 break;
6435 case dw_val_class_loc:
6436 fprintf (outfile, "location descriptor");
6437 if (val->v.val_loc == NULL)
6438 fprintf (outfile, " -> <null>\n");
6439 else if (recurse)
6440 {
6441 fprintf (outfile, ":\n");
6442 print_indent += 4;
6443 print_loc_descr (val->v.val_loc, outfile);
6444 print_indent -= 4;
6445 }
6446 else
6447 {
6448 if (flag_dump_noaddr || flag_dump_unnumbered)
6449 fprintf (outfile, " #\n");
6450 else
6451 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6452 }
6453 break;
6454 case dw_val_class_loc_list:
6455 fprintf (outfile, "location list -> label:%s",
6456 val->v.val_loc_list->ll_symbol);
6457 break;
6458 case dw_val_class_view_list:
6459 val = view_list_to_loc_list_val_node (val);
6460 fprintf (outfile, "location list with views -> labels:%s and %s",
6461 val->v.val_loc_list->ll_symbol,
6462 val->v.val_loc_list->vl_symbol);
6463 break;
6464 case dw_val_class_range_list:
6465 fprintf (outfile, "range list");
6466 break;
6467 case dw_val_class_const:
6468 case dw_val_class_const_implicit:
6469 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6470 break;
6471 case dw_val_class_unsigned_const:
6472 case dw_val_class_unsigned_const_implicit:
6473 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6474 break;
6475 case dw_val_class_const_double:
6476 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6477 HOST_WIDE_INT_PRINT_UNSIGNED")",
6478 val->v.val_double.high,
6479 val->v.val_double.low);
6480 break;
6481 case dw_val_class_wide_int:
6482 {
6483 int i = val->v.val_wide->get_len ();
6484 fprintf (outfile, "constant (");
6485 gcc_assert (i > 0);
6486 if (val->v.val_wide->elt (i - 1) == 0)
6487 fprintf (outfile, "0x");
6488 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6489 val->v.val_wide->elt (--i));
6490 while (--i >= 0)
6491 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6492 val->v.val_wide->elt (i));
6493 fprintf (outfile, ")");
6494 break;
6495 }
6496 case dw_val_class_vec:
6497 fprintf (outfile, "floating-point or vector constant");
6498 break;
6499 case dw_val_class_flag:
6500 fprintf (outfile, "%u", val->v.val_flag);
6501 break;
6502 case dw_val_class_die_ref:
6503 if (val->v.val_die_ref.die != NULL)
6504 {
6505 dw_die_ref die = val->v.val_die_ref.die;
6506
6507 if (die->comdat_type_p)
6508 {
6509 fprintf (outfile, "die -> signature: ");
6510 print_signature (outfile,
6511 die->die_id.die_type_node->signature);
6512 }
6513 else if (die->die_id.die_symbol)
6514 {
6515 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6516 if (die->with_offset)
6517 fprintf (outfile, " + %ld", die->die_offset);
6518 }
6519 else
6520 fprintf (outfile, "die -> %ld", die->die_offset);
6521 if (flag_dump_noaddr || flag_dump_unnumbered)
6522 fprintf (outfile, " #");
6523 else
6524 fprintf (outfile, " (%p)", (void *) die);
6525 }
6526 else
6527 fprintf (outfile, "die -> <null>");
6528 break;
6529 case dw_val_class_vms_delta:
6530 fprintf (outfile, "delta: @slotcount(%s-%s)",
6531 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6532 break;
6533 case dw_val_class_symview:
6534 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6535 break;
6536 case dw_val_class_lbl_id:
6537 case dw_val_class_lineptr:
6538 case dw_val_class_macptr:
6539 case dw_val_class_loclistsptr:
6540 case dw_val_class_high_pc:
6541 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6542 break;
6543 case dw_val_class_str:
6544 if (val->v.val_str->str != NULL)
6545 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6546 else
6547 fprintf (outfile, "<null>");
6548 break;
6549 case dw_val_class_file:
6550 case dw_val_class_file_implicit:
6551 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6552 val->v.val_file->emitted_number);
6553 break;
6554 case dw_val_class_data8:
6555 {
6556 int i;
6557
6558 for (i = 0; i < 8; i++)
6559 fprintf (outfile, "%02x", val->v.val_data8[i]);
6560 break;
6561 }
6562 case dw_val_class_discr_value:
6563 print_discr_value (outfile, &val->v.val_discr_value);
6564 break;
6565 case dw_val_class_discr_list:
6566 for (dw_discr_list_ref node = val->v.val_discr_list;
6567 node != NULL;
6568 node = node->dw_discr_next)
6569 {
6570 if (node->dw_discr_range)
6571 {
6572 fprintf (outfile, " .. ");
6573 print_discr_value (outfile, &node->dw_discr_lower_bound);
6574 print_discr_value (outfile, &node->dw_discr_upper_bound);
6575 }
6576 else
6577 print_discr_value (outfile, &node->dw_discr_lower_bound);
6578
6579 if (node->dw_discr_next != NULL)
6580 fprintf (outfile, " | ");
6581 }
6582 default:
6583 break;
6584 }
6585 }
6586
6587 /* Likewise, for a DIE attribute. */
6588
6589 static void
6590 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6591 {
6592 print_dw_val (&a->dw_attr_val, recurse, outfile);
6593 }
6594
6595
6596 /* Print the list of operands in the LOC location description to OUTFILE. This
6597 routine is a debugging aid only. */
6598
6599 static void
6600 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6601 {
6602 dw_loc_descr_ref l = loc;
6603
6604 if (loc == NULL)
6605 {
6606 print_spaces (outfile);
6607 fprintf (outfile, "<null>\n");
6608 return;
6609 }
6610
6611 for (l = loc; l != NULL; l = l->dw_loc_next)
6612 {
6613 print_spaces (outfile);
6614 if (flag_dump_noaddr || flag_dump_unnumbered)
6615 fprintf (outfile, "#");
6616 else
6617 fprintf (outfile, "(%p)", (void *) l);
6618 fprintf (outfile, " %s",
6619 dwarf_stack_op_name (l->dw_loc_opc));
6620 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6621 {
6622 fprintf (outfile, " ");
6623 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6624 }
6625 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6626 {
6627 fprintf (outfile, ", ");
6628 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6629 }
6630 fprintf (outfile, "\n");
6631 }
6632 }
6633
6634 /* Print the information associated with a given DIE, and its children.
6635 This routine is a debugging aid only. */
6636
6637 static void
6638 print_die (dw_die_ref die, FILE *outfile)
6639 {
6640 dw_attr_node *a;
6641 dw_die_ref c;
6642 unsigned ix;
6643
6644 print_spaces (outfile);
6645 fprintf (outfile, "DIE %4ld: %s ",
6646 die->die_offset, dwarf_tag_name (die->die_tag));
6647 if (flag_dump_noaddr || flag_dump_unnumbered)
6648 fprintf (outfile, "#\n");
6649 else
6650 fprintf (outfile, "(%p)\n", (void*) die);
6651 print_spaces (outfile);
6652 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6653 fprintf (outfile, " offset: %ld", die->die_offset);
6654 fprintf (outfile, " mark: %d\n", die->die_mark);
6655
6656 if (die->comdat_type_p)
6657 {
6658 print_spaces (outfile);
6659 fprintf (outfile, " signature: ");
6660 print_signature (outfile, die->die_id.die_type_node->signature);
6661 fprintf (outfile, "\n");
6662 }
6663
6664 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6665 {
6666 print_spaces (outfile);
6667 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6668
6669 print_attribute (a, true, outfile);
6670 fprintf (outfile, "\n");
6671 }
6672
6673 if (die->die_child != NULL)
6674 {
6675 print_indent += 4;
6676 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6677 print_indent -= 4;
6678 }
6679 if (print_indent == 0)
6680 fprintf (outfile, "\n");
6681 }
6682
6683 /* Print the list of operations in the LOC location description. */
6684
6685 DEBUG_FUNCTION void
6686 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6687 {
6688 print_loc_descr (loc, stderr);
6689 }
6690
6691 /* Print the information collected for a given DIE. */
6692
6693 DEBUG_FUNCTION void
6694 debug_dwarf_die (dw_die_ref die)
6695 {
6696 print_die (die, stderr);
6697 }
6698
6699 DEBUG_FUNCTION void
6700 debug (die_struct &ref)
6701 {
6702 print_die (&ref, stderr);
6703 }
6704
6705 DEBUG_FUNCTION void
6706 debug (die_struct *ptr)
6707 {
6708 if (ptr)
6709 debug (*ptr);
6710 else
6711 fprintf (stderr, "<nil>\n");
6712 }
6713
6714
6715 /* Print all DWARF information collected for the compilation unit.
6716 This routine is a debugging aid only. */
6717
6718 DEBUG_FUNCTION void
6719 debug_dwarf (void)
6720 {
6721 print_indent = 0;
6722 print_die (comp_unit_die (), stderr);
6723 }
6724
6725 /* Verify the DIE tree structure. */
6726
6727 DEBUG_FUNCTION void
6728 verify_die (dw_die_ref die)
6729 {
6730 gcc_assert (!die->die_mark);
6731 if (die->die_parent == NULL
6732 && die->die_sib == NULL)
6733 return;
6734 /* Verify the die_sib list is cyclic. */
6735 dw_die_ref x = die;
6736 do
6737 {
6738 x->die_mark = 1;
6739 x = x->die_sib;
6740 }
6741 while (x && !x->die_mark);
6742 gcc_assert (x == die);
6743 x = die;
6744 do
6745 {
6746 /* Verify all dies have the same parent. */
6747 gcc_assert (x->die_parent == die->die_parent);
6748 if (x->die_child)
6749 {
6750 /* Verify the child has the proper parent and recurse. */
6751 gcc_assert (x->die_child->die_parent == x);
6752 verify_die (x->die_child);
6753 }
6754 x->die_mark = 0;
6755 x = x->die_sib;
6756 }
6757 while (x && x->die_mark);
6758 }
6759
6760 /* Sanity checks on DIEs. */
6761
6762 static void
6763 check_die (dw_die_ref die)
6764 {
6765 unsigned ix;
6766 dw_attr_node *a;
6767 bool inline_found = false;
6768 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6769 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6770 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6771 {
6772 switch (a->dw_attr)
6773 {
6774 case DW_AT_inline:
6775 if (a->dw_attr_val.v.val_unsigned)
6776 inline_found = true;
6777 break;
6778 case DW_AT_location:
6779 ++n_location;
6780 break;
6781 case DW_AT_low_pc:
6782 ++n_low_pc;
6783 break;
6784 case DW_AT_high_pc:
6785 ++n_high_pc;
6786 break;
6787 case DW_AT_artificial:
6788 ++n_artificial;
6789 break;
6790 case DW_AT_decl_column:
6791 ++n_decl_column;
6792 break;
6793 case DW_AT_decl_line:
6794 ++n_decl_line;
6795 break;
6796 case DW_AT_decl_file:
6797 ++n_decl_file;
6798 break;
6799 default:
6800 break;
6801 }
6802 }
6803 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6804 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6805 {
6806 fprintf (stderr, "Duplicate attributes in DIE:\n");
6807 debug_dwarf_die (die);
6808 gcc_unreachable ();
6809 }
6810 if (inline_found)
6811 {
6812 /* A debugging information entry that is a member of an abstract
6813 instance tree [that has DW_AT_inline] should not contain any
6814 attributes which describe aspects of the subroutine which vary
6815 between distinct inlined expansions or distinct out-of-line
6816 expansions. */
6817 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6818 gcc_assert (a->dw_attr != DW_AT_low_pc
6819 && a->dw_attr != DW_AT_high_pc
6820 && a->dw_attr != DW_AT_location
6821 && a->dw_attr != DW_AT_frame_base
6822 && a->dw_attr != DW_AT_call_all_calls
6823 && a->dw_attr != DW_AT_GNU_all_call_sites);
6824 }
6825 }
6826 \f
6827 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6828 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6829 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6830
6831 /* Calculate the checksum of a location expression. */
6832
6833 static inline void
6834 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6835 {
6836 int tem;
6837 inchash::hash hstate;
6838 hashval_t hash;
6839
6840 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6841 CHECKSUM (tem);
6842 hash_loc_operands (loc, hstate);
6843 hash = hstate.end();
6844 CHECKSUM (hash);
6845 }
6846
6847 /* Calculate the checksum of an attribute. */
6848
6849 static void
6850 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6851 {
6852 dw_loc_descr_ref loc;
6853 rtx r;
6854
6855 CHECKSUM (at->dw_attr);
6856
6857 /* We don't care that this was compiled with a different compiler
6858 snapshot; if the output is the same, that's what matters. */
6859 if (at->dw_attr == DW_AT_producer)
6860 return;
6861
6862 switch (AT_class (at))
6863 {
6864 case dw_val_class_const:
6865 case dw_val_class_const_implicit:
6866 CHECKSUM (at->dw_attr_val.v.val_int);
6867 break;
6868 case dw_val_class_unsigned_const:
6869 case dw_val_class_unsigned_const_implicit:
6870 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6871 break;
6872 case dw_val_class_const_double:
6873 CHECKSUM (at->dw_attr_val.v.val_double);
6874 break;
6875 case dw_val_class_wide_int:
6876 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6877 get_full_len (*at->dw_attr_val.v.val_wide)
6878 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6879 break;
6880 case dw_val_class_vec:
6881 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6882 (at->dw_attr_val.v.val_vec.length
6883 * at->dw_attr_val.v.val_vec.elt_size));
6884 break;
6885 case dw_val_class_flag:
6886 CHECKSUM (at->dw_attr_val.v.val_flag);
6887 break;
6888 case dw_val_class_str:
6889 CHECKSUM_STRING (AT_string (at));
6890 break;
6891
6892 case dw_val_class_addr:
6893 r = AT_addr (at);
6894 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6895 CHECKSUM_STRING (XSTR (r, 0));
6896 break;
6897
6898 case dw_val_class_offset:
6899 CHECKSUM (at->dw_attr_val.v.val_offset);
6900 break;
6901
6902 case dw_val_class_loc:
6903 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6904 loc_checksum (loc, ctx);
6905 break;
6906
6907 case dw_val_class_die_ref:
6908 die_checksum (AT_ref (at), ctx, mark);
6909 break;
6910
6911 case dw_val_class_fde_ref:
6912 case dw_val_class_vms_delta:
6913 case dw_val_class_symview:
6914 case dw_val_class_lbl_id:
6915 case dw_val_class_lineptr:
6916 case dw_val_class_macptr:
6917 case dw_val_class_loclistsptr:
6918 case dw_val_class_high_pc:
6919 break;
6920
6921 case dw_val_class_file:
6922 case dw_val_class_file_implicit:
6923 CHECKSUM_STRING (AT_file (at)->filename);
6924 break;
6925
6926 case dw_val_class_data8:
6927 CHECKSUM (at->dw_attr_val.v.val_data8);
6928 break;
6929
6930 default:
6931 break;
6932 }
6933 }
6934
6935 /* Calculate the checksum of a DIE. */
6936
6937 static void
6938 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6939 {
6940 dw_die_ref c;
6941 dw_attr_node *a;
6942 unsigned ix;
6943
6944 /* To avoid infinite recursion. */
6945 if (die->die_mark)
6946 {
6947 CHECKSUM (die->die_mark);
6948 return;
6949 }
6950 die->die_mark = ++(*mark);
6951
6952 CHECKSUM (die->die_tag);
6953
6954 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6955 attr_checksum (a, ctx, mark);
6956
6957 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6958 }
6959
6960 #undef CHECKSUM
6961 #undef CHECKSUM_BLOCK
6962 #undef CHECKSUM_STRING
6963
6964 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6965 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6966 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6967 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6968 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6969 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6970 #define CHECKSUM_ATTR(FOO) \
6971 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6972
6973 /* Calculate the checksum of a number in signed LEB128 format. */
6974
6975 static void
6976 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6977 {
6978 unsigned char byte;
6979 bool more;
6980
6981 while (1)
6982 {
6983 byte = (value & 0x7f);
6984 value >>= 7;
6985 more = !((value == 0 && (byte & 0x40) == 0)
6986 || (value == -1 && (byte & 0x40) != 0));
6987 if (more)
6988 byte |= 0x80;
6989 CHECKSUM (byte);
6990 if (!more)
6991 break;
6992 }
6993 }
6994
6995 /* Calculate the checksum of a number in unsigned LEB128 format. */
6996
6997 static void
6998 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6999 {
7000 while (1)
7001 {
7002 unsigned char byte = (value & 0x7f);
7003 value >>= 7;
7004 if (value != 0)
7005 /* More bytes to follow. */
7006 byte |= 0x80;
7007 CHECKSUM (byte);
7008 if (value == 0)
7009 break;
7010 }
7011 }
7012
7013 /* Checksum the context of the DIE. This adds the names of any
7014 surrounding namespaces or structures to the checksum. */
7015
7016 static void
7017 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7018 {
7019 const char *name;
7020 dw_die_ref spec;
7021 int tag = die->die_tag;
7022
7023 if (tag != DW_TAG_namespace
7024 && tag != DW_TAG_structure_type
7025 && tag != DW_TAG_class_type)
7026 return;
7027
7028 name = get_AT_string (die, DW_AT_name);
7029
7030 spec = get_AT_ref (die, DW_AT_specification);
7031 if (spec != NULL)
7032 die = spec;
7033
7034 if (die->die_parent != NULL)
7035 checksum_die_context (die->die_parent, ctx);
7036
7037 CHECKSUM_ULEB128 ('C');
7038 CHECKSUM_ULEB128 (tag);
7039 if (name != NULL)
7040 CHECKSUM_STRING (name);
7041 }
7042
7043 /* Calculate the checksum of a location expression. */
7044
7045 static inline void
7046 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7047 {
7048 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7049 were emitted as a DW_FORM_sdata instead of a location expression. */
7050 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7051 {
7052 CHECKSUM_ULEB128 (DW_FORM_sdata);
7053 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7054 return;
7055 }
7056
7057 /* Otherwise, just checksum the raw location expression. */
7058 while (loc != NULL)
7059 {
7060 inchash::hash hstate;
7061 hashval_t hash;
7062
7063 CHECKSUM_ULEB128 (loc->dtprel);
7064 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7065 hash_loc_operands (loc, hstate);
7066 hash = hstate.end ();
7067 CHECKSUM (hash);
7068 loc = loc->dw_loc_next;
7069 }
7070 }
7071
7072 /* Calculate the checksum of an attribute. */
7073
7074 static void
7075 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7076 struct md5_ctx *ctx, int *mark)
7077 {
7078 dw_loc_descr_ref loc;
7079 rtx r;
7080
7081 if (AT_class (at) == dw_val_class_die_ref)
7082 {
7083 dw_die_ref target_die = AT_ref (at);
7084
7085 /* For pointer and reference types, we checksum only the (qualified)
7086 name of the target type (if there is a name). For friend entries,
7087 we checksum only the (qualified) name of the target type or function.
7088 This allows the checksum to remain the same whether the target type
7089 is complete or not. */
7090 if ((at->dw_attr == DW_AT_type
7091 && (tag == DW_TAG_pointer_type
7092 || tag == DW_TAG_reference_type
7093 || tag == DW_TAG_rvalue_reference_type
7094 || tag == DW_TAG_ptr_to_member_type))
7095 || (at->dw_attr == DW_AT_friend
7096 && tag == DW_TAG_friend))
7097 {
7098 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7099
7100 if (name_attr != NULL)
7101 {
7102 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7103
7104 if (decl == NULL)
7105 decl = target_die;
7106 CHECKSUM_ULEB128 ('N');
7107 CHECKSUM_ULEB128 (at->dw_attr);
7108 if (decl->die_parent != NULL)
7109 checksum_die_context (decl->die_parent, ctx);
7110 CHECKSUM_ULEB128 ('E');
7111 CHECKSUM_STRING (AT_string (name_attr));
7112 return;
7113 }
7114 }
7115
7116 /* For all other references to another DIE, we check to see if the
7117 target DIE has already been visited. If it has, we emit a
7118 backward reference; if not, we descend recursively. */
7119 if (target_die->die_mark > 0)
7120 {
7121 CHECKSUM_ULEB128 ('R');
7122 CHECKSUM_ULEB128 (at->dw_attr);
7123 CHECKSUM_ULEB128 (target_die->die_mark);
7124 }
7125 else
7126 {
7127 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7128
7129 if (decl == NULL)
7130 decl = target_die;
7131 target_die->die_mark = ++(*mark);
7132 CHECKSUM_ULEB128 ('T');
7133 CHECKSUM_ULEB128 (at->dw_attr);
7134 if (decl->die_parent != NULL)
7135 checksum_die_context (decl->die_parent, ctx);
7136 die_checksum_ordered (target_die, ctx, mark);
7137 }
7138 return;
7139 }
7140
7141 CHECKSUM_ULEB128 ('A');
7142 CHECKSUM_ULEB128 (at->dw_attr);
7143
7144 switch (AT_class (at))
7145 {
7146 case dw_val_class_const:
7147 case dw_val_class_const_implicit:
7148 CHECKSUM_ULEB128 (DW_FORM_sdata);
7149 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7150 break;
7151
7152 case dw_val_class_unsigned_const:
7153 case dw_val_class_unsigned_const_implicit:
7154 CHECKSUM_ULEB128 (DW_FORM_sdata);
7155 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7156 break;
7157
7158 case dw_val_class_const_double:
7159 CHECKSUM_ULEB128 (DW_FORM_block);
7160 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7161 CHECKSUM (at->dw_attr_val.v.val_double);
7162 break;
7163
7164 case dw_val_class_wide_int:
7165 CHECKSUM_ULEB128 (DW_FORM_block);
7166 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7167 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7168 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7169 get_full_len (*at->dw_attr_val.v.val_wide)
7170 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7171 break;
7172
7173 case dw_val_class_vec:
7174 CHECKSUM_ULEB128 (DW_FORM_block);
7175 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7176 * at->dw_attr_val.v.val_vec.elt_size);
7177 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7178 (at->dw_attr_val.v.val_vec.length
7179 * at->dw_attr_val.v.val_vec.elt_size));
7180 break;
7181
7182 case dw_val_class_flag:
7183 CHECKSUM_ULEB128 (DW_FORM_flag);
7184 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7185 break;
7186
7187 case dw_val_class_str:
7188 CHECKSUM_ULEB128 (DW_FORM_string);
7189 CHECKSUM_STRING (AT_string (at));
7190 break;
7191
7192 case dw_val_class_addr:
7193 r = AT_addr (at);
7194 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7195 CHECKSUM_ULEB128 (DW_FORM_string);
7196 CHECKSUM_STRING (XSTR (r, 0));
7197 break;
7198
7199 case dw_val_class_offset:
7200 CHECKSUM_ULEB128 (DW_FORM_sdata);
7201 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7202 break;
7203
7204 case dw_val_class_loc:
7205 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7206 loc_checksum_ordered (loc, ctx);
7207 break;
7208
7209 case dw_val_class_fde_ref:
7210 case dw_val_class_symview:
7211 case dw_val_class_lbl_id:
7212 case dw_val_class_lineptr:
7213 case dw_val_class_macptr:
7214 case dw_val_class_loclistsptr:
7215 case dw_val_class_high_pc:
7216 break;
7217
7218 case dw_val_class_file:
7219 case dw_val_class_file_implicit:
7220 CHECKSUM_ULEB128 (DW_FORM_string);
7221 CHECKSUM_STRING (AT_file (at)->filename);
7222 break;
7223
7224 case dw_val_class_data8:
7225 CHECKSUM (at->dw_attr_val.v.val_data8);
7226 break;
7227
7228 default:
7229 break;
7230 }
7231 }
7232
7233 struct checksum_attributes
7234 {
7235 dw_attr_node *at_name;
7236 dw_attr_node *at_type;
7237 dw_attr_node *at_friend;
7238 dw_attr_node *at_accessibility;
7239 dw_attr_node *at_address_class;
7240 dw_attr_node *at_alignment;
7241 dw_attr_node *at_allocated;
7242 dw_attr_node *at_artificial;
7243 dw_attr_node *at_associated;
7244 dw_attr_node *at_binary_scale;
7245 dw_attr_node *at_bit_offset;
7246 dw_attr_node *at_bit_size;
7247 dw_attr_node *at_bit_stride;
7248 dw_attr_node *at_byte_size;
7249 dw_attr_node *at_byte_stride;
7250 dw_attr_node *at_const_value;
7251 dw_attr_node *at_containing_type;
7252 dw_attr_node *at_count;
7253 dw_attr_node *at_data_location;
7254 dw_attr_node *at_data_member_location;
7255 dw_attr_node *at_decimal_scale;
7256 dw_attr_node *at_decimal_sign;
7257 dw_attr_node *at_default_value;
7258 dw_attr_node *at_digit_count;
7259 dw_attr_node *at_discr;
7260 dw_attr_node *at_discr_list;
7261 dw_attr_node *at_discr_value;
7262 dw_attr_node *at_encoding;
7263 dw_attr_node *at_endianity;
7264 dw_attr_node *at_explicit;
7265 dw_attr_node *at_is_optional;
7266 dw_attr_node *at_location;
7267 dw_attr_node *at_lower_bound;
7268 dw_attr_node *at_mutable;
7269 dw_attr_node *at_ordering;
7270 dw_attr_node *at_picture_string;
7271 dw_attr_node *at_prototyped;
7272 dw_attr_node *at_small;
7273 dw_attr_node *at_segment;
7274 dw_attr_node *at_string_length;
7275 dw_attr_node *at_string_length_bit_size;
7276 dw_attr_node *at_string_length_byte_size;
7277 dw_attr_node *at_threads_scaled;
7278 dw_attr_node *at_upper_bound;
7279 dw_attr_node *at_use_location;
7280 dw_attr_node *at_use_UTF8;
7281 dw_attr_node *at_variable_parameter;
7282 dw_attr_node *at_virtuality;
7283 dw_attr_node *at_visibility;
7284 dw_attr_node *at_vtable_elem_location;
7285 };
7286
7287 /* Collect the attributes that we will want to use for the checksum. */
7288
7289 static void
7290 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7291 {
7292 dw_attr_node *a;
7293 unsigned ix;
7294
7295 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7296 {
7297 switch (a->dw_attr)
7298 {
7299 case DW_AT_name:
7300 attrs->at_name = a;
7301 break;
7302 case DW_AT_type:
7303 attrs->at_type = a;
7304 break;
7305 case DW_AT_friend:
7306 attrs->at_friend = a;
7307 break;
7308 case DW_AT_accessibility:
7309 attrs->at_accessibility = a;
7310 break;
7311 case DW_AT_address_class:
7312 attrs->at_address_class = a;
7313 break;
7314 case DW_AT_alignment:
7315 attrs->at_alignment = a;
7316 break;
7317 case DW_AT_allocated:
7318 attrs->at_allocated = a;
7319 break;
7320 case DW_AT_artificial:
7321 attrs->at_artificial = a;
7322 break;
7323 case DW_AT_associated:
7324 attrs->at_associated = a;
7325 break;
7326 case DW_AT_binary_scale:
7327 attrs->at_binary_scale = a;
7328 break;
7329 case DW_AT_bit_offset:
7330 attrs->at_bit_offset = a;
7331 break;
7332 case DW_AT_bit_size:
7333 attrs->at_bit_size = a;
7334 break;
7335 case DW_AT_bit_stride:
7336 attrs->at_bit_stride = a;
7337 break;
7338 case DW_AT_byte_size:
7339 attrs->at_byte_size = a;
7340 break;
7341 case DW_AT_byte_stride:
7342 attrs->at_byte_stride = a;
7343 break;
7344 case DW_AT_const_value:
7345 attrs->at_const_value = a;
7346 break;
7347 case DW_AT_containing_type:
7348 attrs->at_containing_type = a;
7349 break;
7350 case DW_AT_count:
7351 attrs->at_count = a;
7352 break;
7353 case DW_AT_data_location:
7354 attrs->at_data_location = a;
7355 break;
7356 case DW_AT_data_member_location:
7357 attrs->at_data_member_location = a;
7358 break;
7359 case DW_AT_decimal_scale:
7360 attrs->at_decimal_scale = a;
7361 break;
7362 case DW_AT_decimal_sign:
7363 attrs->at_decimal_sign = a;
7364 break;
7365 case DW_AT_default_value:
7366 attrs->at_default_value = a;
7367 break;
7368 case DW_AT_digit_count:
7369 attrs->at_digit_count = a;
7370 break;
7371 case DW_AT_discr:
7372 attrs->at_discr = a;
7373 break;
7374 case DW_AT_discr_list:
7375 attrs->at_discr_list = a;
7376 break;
7377 case DW_AT_discr_value:
7378 attrs->at_discr_value = a;
7379 break;
7380 case DW_AT_encoding:
7381 attrs->at_encoding = a;
7382 break;
7383 case DW_AT_endianity:
7384 attrs->at_endianity = a;
7385 break;
7386 case DW_AT_explicit:
7387 attrs->at_explicit = a;
7388 break;
7389 case DW_AT_is_optional:
7390 attrs->at_is_optional = a;
7391 break;
7392 case DW_AT_location:
7393 attrs->at_location = a;
7394 break;
7395 case DW_AT_lower_bound:
7396 attrs->at_lower_bound = a;
7397 break;
7398 case DW_AT_mutable:
7399 attrs->at_mutable = a;
7400 break;
7401 case DW_AT_ordering:
7402 attrs->at_ordering = a;
7403 break;
7404 case DW_AT_picture_string:
7405 attrs->at_picture_string = a;
7406 break;
7407 case DW_AT_prototyped:
7408 attrs->at_prototyped = a;
7409 break;
7410 case DW_AT_small:
7411 attrs->at_small = a;
7412 break;
7413 case DW_AT_segment:
7414 attrs->at_segment = a;
7415 break;
7416 case DW_AT_string_length:
7417 attrs->at_string_length = a;
7418 break;
7419 case DW_AT_string_length_bit_size:
7420 attrs->at_string_length_bit_size = a;
7421 break;
7422 case DW_AT_string_length_byte_size:
7423 attrs->at_string_length_byte_size = a;
7424 break;
7425 case DW_AT_threads_scaled:
7426 attrs->at_threads_scaled = a;
7427 break;
7428 case DW_AT_upper_bound:
7429 attrs->at_upper_bound = a;
7430 break;
7431 case DW_AT_use_location:
7432 attrs->at_use_location = a;
7433 break;
7434 case DW_AT_use_UTF8:
7435 attrs->at_use_UTF8 = a;
7436 break;
7437 case DW_AT_variable_parameter:
7438 attrs->at_variable_parameter = a;
7439 break;
7440 case DW_AT_virtuality:
7441 attrs->at_virtuality = a;
7442 break;
7443 case DW_AT_visibility:
7444 attrs->at_visibility = a;
7445 break;
7446 case DW_AT_vtable_elem_location:
7447 attrs->at_vtable_elem_location = a;
7448 break;
7449 default:
7450 break;
7451 }
7452 }
7453 }
7454
7455 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7456
7457 static void
7458 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7459 {
7460 dw_die_ref c;
7461 dw_die_ref decl;
7462 struct checksum_attributes attrs;
7463
7464 CHECKSUM_ULEB128 ('D');
7465 CHECKSUM_ULEB128 (die->die_tag);
7466
7467 memset (&attrs, 0, sizeof (attrs));
7468
7469 decl = get_AT_ref (die, DW_AT_specification);
7470 if (decl != NULL)
7471 collect_checksum_attributes (&attrs, decl);
7472 collect_checksum_attributes (&attrs, die);
7473
7474 CHECKSUM_ATTR (attrs.at_name);
7475 CHECKSUM_ATTR (attrs.at_accessibility);
7476 CHECKSUM_ATTR (attrs.at_address_class);
7477 CHECKSUM_ATTR (attrs.at_allocated);
7478 CHECKSUM_ATTR (attrs.at_artificial);
7479 CHECKSUM_ATTR (attrs.at_associated);
7480 CHECKSUM_ATTR (attrs.at_binary_scale);
7481 CHECKSUM_ATTR (attrs.at_bit_offset);
7482 CHECKSUM_ATTR (attrs.at_bit_size);
7483 CHECKSUM_ATTR (attrs.at_bit_stride);
7484 CHECKSUM_ATTR (attrs.at_byte_size);
7485 CHECKSUM_ATTR (attrs.at_byte_stride);
7486 CHECKSUM_ATTR (attrs.at_const_value);
7487 CHECKSUM_ATTR (attrs.at_containing_type);
7488 CHECKSUM_ATTR (attrs.at_count);
7489 CHECKSUM_ATTR (attrs.at_data_location);
7490 CHECKSUM_ATTR (attrs.at_data_member_location);
7491 CHECKSUM_ATTR (attrs.at_decimal_scale);
7492 CHECKSUM_ATTR (attrs.at_decimal_sign);
7493 CHECKSUM_ATTR (attrs.at_default_value);
7494 CHECKSUM_ATTR (attrs.at_digit_count);
7495 CHECKSUM_ATTR (attrs.at_discr);
7496 CHECKSUM_ATTR (attrs.at_discr_list);
7497 CHECKSUM_ATTR (attrs.at_discr_value);
7498 CHECKSUM_ATTR (attrs.at_encoding);
7499 CHECKSUM_ATTR (attrs.at_endianity);
7500 CHECKSUM_ATTR (attrs.at_explicit);
7501 CHECKSUM_ATTR (attrs.at_is_optional);
7502 CHECKSUM_ATTR (attrs.at_location);
7503 CHECKSUM_ATTR (attrs.at_lower_bound);
7504 CHECKSUM_ATTR (attrs.at_mutable);
7505 CHECKSUM_ATTR (attrs.at_ordering);
7506 CHECKSUM_ATTR (attrs.at_picture_string);
7507 CHECKSUM_ATTR (attrs.at_prototyped);
7508 CHECKSUM_ATTR (attrs.at_small);
7509 CHECKSUM_ATTR (attrs.at_segment);
7510 CHECKSUM_ATTR (attrs.at_string_length);
7511 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7512 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7513 CHECKSUM_ATTR (attrs.at_threads_scaled);
7514 CHECKSUM_ATTR (attrs.at_upper_bound);
7515 CHECKSUM_ATTR (attrs.at_use_location);
7516 CHECKSUM_ATTR (attrs.at_use_UTF8);
7517 CHECKSUM_ATTR (attrs.at_variable_parameter);
7518 CHECKSUM_ATTR (attrs.at_virtuality);
7519 CHECKSUM_ATTR (attrs.at_visibility);
7520 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7521 CHECKSUM_ATTR (attrs.at_type);
7522 CHECKSUM_ATTR (attrs.at_friend);
7523 CHECKSUM_ATTR (attrs.at_alignment);
7524
7525 /* Checksum the child DIEs. */
7526 c = die->die_child;
7527 if (c) do {
7528 dw_attr_node *name_attr;
7529
7530 c = c->die_sib;
7531 name_attr = get_AT (c, DW_AT_name);
7532 if (is_template_instantiation (c))
7533 {
7534 /* Ignore instantiations of member type and function templates. */
7535 }
7536 else if (name_attr != NULL
7537 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7538 {
7539 /* Use a shallow checksum for named nested types and member
7540 functions. */
7541 CHECKSUM_ULEB128 ('S');
7542 CHECKSUM_ULEB128 (c->die_tag);
7543 CHECKSUM_STRING (AT_string (name_attr));
7544 }
7545 else
7546 {
7547 /* Use a deep checksum for other children. */
7548 /* Mark this DIE so it gets processed when unmarking. */
7549 if (c->die_mark == 0)
7550 c->die_mark = -1;
7551 die_checksum_ordered (c, ctx, mark);
7552 }
7553 } while (c != die->die_child);
7554
7555 CHECKSUM_ULEB128 (0);
7556 }
7557
7558 /* Add a type name and tag to a hash. */
7559 static void
7560 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7561 {
7562 CHECKSUM_ULEB128 (tag);
7563 CHECKSUM_STRING (name);
7564 }
7565
7566 #undef CHECKSUM
7567 #undef CHECKSUM_STRING
7568 #undef CHECKSUM_ATTR
7569 #undef CHECKSUM_LEB128
7570 #undef CHECKSUM_ULEB128
7571
7572 /* Generate the type signature for DIE. This is computed by generating an
7573 MD5 checksum over the DIE's tag, its relevant attributes, and its
7574 children. Attributes that are references to other DIEs are processed
7575 by recursion, using the MARK field to prevent infinite recursion.
7576 If the DIE is nested inside a namespace or another type, we also
7577 need to include that context in the signature. The lower 64 bits
7578 of the resulting MD5 checksum comprise the signature. */
7579
7580 static void
7581 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7582 {
7583 int mark;
7584 const char *name;
7585 unsigned char checksum[16];
7586 struct md5_ctx ctx;
7587 dw_die_ref decl;
7588 dw_die_ref parent;
7589
7590 name = get_AT_string (die, DW_AT_name);
7591 decl = get_AT_ref (die, DW_AT_specification);
7592 parent = get_die_parent (die);
7593
7594 /* First, compute a signature for just the type name (and its surrounding
7595 context, if any. This is stored in the type unit DIE for link-time
7596 ODR (one-definition rule) checking. */
7597
7598 if (is_cxx () && name != NULL)
7599 {
7600 md5_init_ctx (&ctx);
7601
7602 /* Checksum the names of surrounding namespaces and structures. */
7603 if (parent != NULL)
7604 checksum_die_context (parent, &ctx);
7605
7606 /* Checksum the current DIE. */
7607 die_odr_checksum (die->die_tag, name, &ctx);
7608 md5_finish_ctx (&ctx, checksum);
7609
7610 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7611 }
7612
7613 /* Next, compute the complete type signature. */
7614
7615 md5_init_ctx (&ctx);
7616 mark = 1;
7617 die->die_mark = mark;
7618
7619 /* Checksum the names of surrounding namespaces and structures. */
7620 if (parent != NULL)
7621 checksum_die_context (parent, &ctx);
7622
7623 /* Checksum the DIE and its children. */
7624 die_checksum_ordered (die, &ctx, &mark);
7625 unmark_all_dies (die);
7626 md5_finish_ctx (&ctx, checksum);
7627
7628 /* Store the signature in the type node and link the type DIE and the
7629 type node together. */
7630 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7631 DWARF_TYPE_SIGNATURE_SIZE);
7632 die->comdat_type_p = true;
7633 die->die_id.die_type_node = type_node;
7634 type_node->type_die = die;
7635
7636 /* If the DIE is a specification, link its declaration to the type node
7637 as well. */
7638 if (decl != NULL)
7639 {
7640 decl->comdat_type_p = true;
7641 decl->die_id.die_type_node = type_node;
7642 }
7643 }
7644
7645 /* Do the location expressions look same? */
7646 static inline int
7647 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7648 {
7649 return loc1->dw_loc_opc == loc2->dw_loc_opc
7650 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7651 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7652 }
7653
7654 /* Do the values look the same? */
7655 static int
7656 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7657 {
7658 dw_loc_descr_ref loc1, loc2;
7659 rtx r1, r2;
7660
7661 if (v1->val_class != v2->val_class)
7662 return 0;
7663
7664 switch (v1->val_class)
7665 {
7666 case dw_val_class_const:
7667 case dw_val_class_const_implicit:
7668 return v1->v.val_int == v2->v.val_int;
7669 case dw_val_class_unsigned_const:
7670 case dw_val_class_unsigned_const_implicit:
7671 return v1->v.val_unsigned == v2->v.val_unsigned;
7672 case dw_val_class_const_double:
7673 return v1->v.val_double.high == v2->v.val_double.high
7674 && v1->v.val_double.low == v2->v.val_double.low;
7675 case dw_val_class_wide_int:
7676 return *v1->v.val_wide == *v2->v.val_wide;
7677 case dw_val_class_vec:
7678 if (v1->v.val_vec.length != v2->v.val_vec.length
7679 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7680 return 0;
7681 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7682 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7683 return 0;
7684 return 1;
7685 case dw_val_class_flag:
7686 return v1->v.val_flag == v2->v.val_flag;
7687 case dw_val_class_str:
7688 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7689
7690 case dw_val_class_addr:
7691 r1 = v1->v.val_addr;
7692 r2 = v2->v.val_addr;
7693 if (GET_CODE (r1) != GET_CODE (r2))
7694 return 0;
7695 return !rtx_equal_p (r1, r2);
7696
7697 case dw_val_class_offset:
7698 return v1->v.val_offset == v2->v.val_offset;
7699
7700 case dw_val_class_loc:
7701 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7702 loc1 && loc2;
7703 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7704 if (!same_loc_p (loc1, loc2, mark))
7705 return 0;
7706 return !loc1 && !loc2;
7707
7708 case dw_val_class_die_ref:
7709 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7710
7711 case dw_val_class_symview:
7712 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7713
7714 case dw_val_class_fde_ref:
7715 case dw_val_class_vms_delta:
7716 case dw_val_class_lbl_id:
7717 case dw_val_class_lineptr:
7718 case dw_val_class_macptr:
7719 case dw_val_class_loclistsptr:
7720 case dw_val_class_high_pc:
7721 return 1;
7722
7723 case dw_val_class_file:
7724 case dw_val_class_file_implicit:
7725 return v1->v.val_file == v2->v.val_file;
7726
7727 case dw_val_class_data8:
7728 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7729
7730 default:
7731 return 1;
7732 }
7733 }
7734
7735 /* Do the attributes look the same? */
7736
7737 static int
7738 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7739 {
7740 if (at1->dw_attr != at2->dw_attr)
7741 return 0;
7742
7743 /* We don't care that this was compiled with a different compiler
7744 snapshot; if the output is the same, that's what matters. */
7745 if (at1->dw_attr == DW_AT_producer)
7746 return 1;
7747
7748 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7749 }
7750
7751 /* Do the dies look the same? */
7752
7753 static int
7754 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7755 {
7756 dw_die_ref c1, c2;
7757 dw_attr_node *a1;
7758 unsigned ix;
7759
7760 /* To avoid infinite recursion. */
7761 if (die1->die_mark)
7762 return die1->die_mark == die2->die_mark;
7763 die1->die_mark = die2->die_mark = ++(*mark);
7764
7765 if (die1->die_tag != die2->die_tag)
7766 return 0;
7767
7768 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7769 return 0;
7770
7771 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7772 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7773 return 0;
7774
7775 c1 = die1->die_child;
7776 c2 = die2->die_child;
7777 if (! c1)
7778 {
7779 if (c2)
7780 return 0;
7781 }
7782 else
7783 for (;;)
7784 {
7785 if (!same_die_p (c1, c2, mark))
7786 return 0;
7787 c1 = c1->die_sib;
7788 c2 = c2->die_sib;
7789 if (c1 == die1->die_child)
7790 {
7791 if (c2 == die2->die_child)
7792 break;
7793 else
7794 return 0;
7795 }
7796 }
7797
7798 return 1;
7799 }
7800
7801 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7802 children, and set die_symbol. */
7803
7804 static void
7805 compute_comp_unit_symbol (dw_die_ref unit_die)
7806 {
7807 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7808 const char *base = die_name ? lbasename (die_name) : "anonymous";
7809 char *name = XALLOCAVEC (char, strlen (base) + 64);
7810 char *p;
7811 int i, mark;
7812 unsigned char checksum[16];
7813 struct md5_ctx ctx;
7814
7815 /* Compute the checksum of the DIE, then append part of it as hex digits to
7816 the name filename of the unit. */
7817
7818 md5_init_ctx (&ctx);
7819 mark = 0;
7820 die_checksum (unit_die, &ctx, &mark);
7821 unmark_all_dies (unit_die);
7822 md5_finish_ctx (&ctx, checksum);
7823
7824 /* When we this for comp_unit_die () we have a DW_AT_name that might
7825 not start with a letter but with anything valid for filenames and
7826 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7827 character is not a letter. */
7828 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7829 clean_symbol_name (name);
7830
7831 p = name + strlen (name);
7832 for (i = 0; i < 4; i++)
7833 {
7834 sprintf (p, "%.2x", checksum[i]);
7835 p += 2;
7836 }
7837
7838 unit_die->die_id.die_symbol = xstrdup (name);
7839 }
7840
7841 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7842
7843 static int
7844 is_type_die (dw_die_ref die)
7845 {
7846 switch (die->die_tag)
7847 {
7848 case DW_TAG_array_type:
7849 case DW_TAG_class_type:
7850 case DW_TAG_interface_type:
7851 case DW_TAG_enumeration_type:
7852 case DW_TAG_pointer_type:
7853 case DW_TAG_reference_type:
7854 case DW_TAG_rvalue_reference_type:
7855 case DW_TAG_string_type:
7856 case DW_TAG_structure_type:
7857 case DW_TAG_subroutine_type:
7858 case DW_TAG_union_type:
7859 case DW_TAG_ptr_to_member_type:
7860 case DW_TAG_set_type:
7861 case DW_TAG_subrange_type:
7862 case DW_TAG_base_type:
7863 case DW_TAG_const_type:
7864 case DW_TAG_file_type:
7865 case DW_TAG_packed_type:
7866 case DW_TAG_volatile_type:
7867 case DW_TAG_typedef:
7868 return 1;
7869 default:
7870 return 0;
7871 }
7872 }
7873
7874 /* Returns true iff C is a compile-unit DIE. */
7875
7876 static inline bool
7877 is_cu_die (dw_die_ref c)
7878 {
7879 return c && (c->die_tag == DW_TAG_compile_unit
7880 || c->die_tag == DW_TAG_skeleton_unit);
7881 }
7882
7883 /* Returns true iff C is a unit DIE of some sort. */
7884
7885 static inline bool
7886 is_unit_die (dw_die_ref c)
7887 {
7888 return c && (c->die_tag == DW_TAG_compile_unit
7889 || c->die_tag == DW_TAG_partial_unit
7890 || c->die_tag == DW_TAG_type_unit
7891 || c->die_tag == DW_TAG_skeleton_unit);
7892 }
7893
7894 /* Returns true iff C is a namespace DIE. */
7895
7896 static inline bool
7897 is_namespace_die (dw_die_ref c)
7898 {
7899 return c && c->die_tag == DW_TAG_namespace;
7900 }
7901
7902 /* Return non-zero if this DIE is a template parameter. */
7903
7904 static inline bool
7905 is_template_parameter (dw_die_ref die)
7906 {
7907 switch (die->die_tag)
7908 {
7909 case DW_TAG_template_type_param:
7910 case DW_TAG_template_value_param:
7911 case DW_TAG_GNU_template_template_param:
7912 case DW_TAG_GNU_template_parameter_pack:
7913 return true;
7914 default:
7915 return false;
7916 }
7917 }
7918
7919 /* Return non-zero if this DIE represents a template instantiation. */
7920
7921 static inline bool
7922 is_template_instantiation (dw_die_ref die)
7923 {
7924 dw_die_ref c;
7925
7926 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7927 return false;
7928 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7929 return false;
7930 }
7931
7932 static char *
7933 gen_internal_sym (const char *prefix)
7934 {
7935 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7936
7937 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7938 return xstrdup (buf);
7939 }
7940
7941 /* Return non-zero if this DIE is a declaration. */
7942
7943 static int
7944 is_declaration_die (dw_die_ref die)
7945 {
7946 dw_attr_node *a;
7947 unsigned ix;
7948
7949 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7950 if (a->dw_attr == DW_AT_declaration)
7951 return 1;
7952
7953 return 0;
7954 }
7955
7956 /* Return non-zero if this DIE is nested inside a subprogram. */
7957
7958 static int
7959 is_nested_in_subprogram (dw_die_ref die)
7960 {
7961 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7962
7963 if (decl == NULL)
7964 decl = die;
7965 return local_scope_p (decl);
7966 }
7967
7968 /* Return non-zero if this DIE contains a defining declaration of a
7969 subprogram. */
7970
7971 static int
7972 contains_subprogram_definition (dw_die_ref die)
7973 {
7974 dw_die_ref c;
7975
7976 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7977 return 1;
7978 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7979 return 0;
7980 }
7981
7982 /* Return non-zero if this is a type DIE that should be moved to a
7983 COMDAT .debug_types section or .debug_info section with DW_UT_*type
7984 unit type. */
7985
7986 static int
7987 should_move_die_to_comdat (dw_die_ref die)
7988 {
7989 switch (die->die_tag)
7990 {
7991 case DW_TAG_class_type:
7992 case DW_TAG_structure_type:
7993 case DW_TAG_enumeration_type:
7994 case DW_TAG_union_type:
7995 /* Don't move declarations, inlined instances, types nested in a
7996 subprogram, or types that contain subprogram definitions. */
7997 if (is_declaration_die (die)
7998 || get_AT (die, DW_AT_abstract_origin)
7999 || is_nested_in_subprogram (die)
8000 || contains_subprogram_definition (die))
8001 return 0;
8002 return 1;
8003 case DW_TAG_array_type:
8004 case DW_TAG_interface_type:
8005 case DW_TAG_pointer_type:
8006 case DW_TAG_reference_type:
8007 case DW_TAG_rvalue_reference_type:
8008 case DW_TAG_string_type:
8009 case DW_TAG_subroutine_type:
8010 case DW_TAG_ptr_to_member_type:
8011 case DW_TAG_set_type:
8012 case DW_TAG_subrange_type:
8013 case DW_TAG_base_type:
8014 case DW_TAG_const_type:
8015 case DW_TAG_file_type:
8016 case DW_TAG_packed_type:
8017 case DW_TAG_volatile_type:
8018 case DW_TAG_typedef:
8019 default:
8020 return 0;
8021 }
8022 }
8023
8024 /* Make a clone of DIE. */
8025
8026 static dw_die_ref
8027 clone_die (dw_die_ref die)
8028 {
8029 dw_die_ref clone = new_die_raw (die->die_tag);
8030 dw_attr_node *a;
8031 unsigned ix;
8032
8033 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8034 add_dwarf_attr (clone, a);
8035
8036 return clone;
8037 }
8038
8039 /* Make a clone of the tree rooted at DIE. */
8040
8041 static dw_die_ref
8042 clone_tree (dw_die_ref die)
8043 {
8044 dw_die_ref c;
8045 dw_die_ref clone = clone_die (die);
8046
8047 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8048
8049 return clone;
8050 }
8051
8052 /* Make a clone of DIE as a declaration. */
8053
8054 static dw_die_ref
8055 clone_as_declaration (dw_die_ref die)
8056 {
8057 dw_die_ref clone;
8058 dw_die_ref decl;
8059 dw_attr_node *a;
8060 unsigned ix;
8061
8062 /* If the DIE is already a declaration, just clone it. */
8063 if (is_declaration_die (die))
8064 return clone_die (die);
8065
8066 /* If the DIE is a specification, just clone its declaration DIE. */
8067 decl = get_AT_ref (die, DW_AT_specification);
8068 if (decl != NULL)
8069 {
8070 clone = clone_die (decl);
8071 if (die->comdat_type_p)
8072 add_AT_die_ref (clone, DW_AT_signature, die);
8073 return clone;
8074 }
8075
8076 clone = new_die_raw (die->die_tag);
8077
8078 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8079 {
8080 /* We don't want to copy over all attributes.
8081 For example we don't want DW_AT_byte_size because otherwise we will no
8082 longer have a declaration and GDB will treat it as a definition. */
8083
8084 switch (a->dw_attr)
8085 {
8086 case DW_AT_abstract_origin:
8087 case DW_AT_artificial:
8088 case DW_AT_containing_type:
8089 case DW_AT_external:
8090 case DW_AT_name:
8091 case DW_AT_type:
8092 case DW_AT_virtuality:
8093 case DW_AT_linkage_name:
8094 case DW_AT_MIPS_linkage_name:
8095 add_dwarf_attr (clone, a);
8096 break;
8097 case DW_AT_byte_size:
8098 case DW_AT_alignment:
8099 default:
8100 break;
8101 }
8102 }
8103
8104 if (die->comdat_type_p)
8105 add_AT_die_ref (clone, DW_AT_signature, die);
8106
8107 add_AT_flag (clone, DW_AT_declaration, 1);
8108 return clone;
8109 }
8110
8111
8112 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8113
8114 struct decl_table_entry
8115 {
8116 dw_die_ref orig;
8117 dw_die_ref copy;
8118 };
8119
8120 /* Helpers to manipulate hash table of copied declarations. */
8121
8122 /* Hashtable helpers. */
8123
8124 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8125 {
8126 typedef die_struct *compare_type;
8127 static inline hashval_t hash (const decl_table_entry *);
8128 static inline bool equal (const decl_table_entry *, const die_struct *);
8129 };
8130
8131 inline hashval_t
8132 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8133 {
8134 return htab_hash_pointer (entry->orig);
8135 }
8136
8137 inline bool
8138 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8139 const die_struct *entry2)
8140 {
8141 return entry1->orig == entry2;
8142 }
8143
8144 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8145
8146 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8147 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8148 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8149 to check if the ancestor has already been copied into UNIT. */
8150
8151 static dw_die_ref
8152 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8153 decl_hash_type *decl_table)
8154 {
8155 dw_die_ref parent = die->die_parent;
8156 dw_die_ref new_parent = unit;
8157 dw_die_ref copy;
8158 decl_table_entry **slot = NULL;
8159 struct decl_table_entry *entry = NULL;
8160
8161 if (decl_table)
8162 {
8163 /* Check if the entry has already been copied to UNIT. */
8164 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8165 INSERT);
8166 if (*slot != HTAB_EMPTY_ENTRY)
8167 {
8168 entry = *slot;
8169 return entry->copy;
8170 }
8171
8172 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8173 entry = XCNEW (struct decl_table_entry);
8174 entry->orig = die;
8175 entry->copy = NULL;
8176 *slot = entry;
8177 }
8178
8179 if (parent != NULL)
8180 {
8181 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8182 if (spec != NULL)
8183 parent = spec;
8184 if (!is_unit_die (parent))
8185 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8186 }
8187
8188 copy = clone_as_declaration (die);
8189 add_child_die (new_parent, copy);
8190
8191 if (decl_table)
8192 {
8193 /* Record the pointer to the copy. */
8194 entry->copy = copy;
8195 }
8196
8197 return copy;
8198 }
8199 /* Copy the declaration context to the new type unit DIE. This includes
8200 any surrounding namespace or type declarations. If the DIE has an
8201 AT_specification attribute, it also includes attributes and children
8202 attached to the specification, and returns a pointer to the original
8203 parent of the declaration DIE. Returns NULL otherwise. */
8204
8205 static dw_die_ref
8206 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8207 {
8208 dw_die_ref decl;
8209 dw_die_ref new_decl;
8210 dw_die_ref orig_parent = NULL;
8211
8212 decl = get_AT_ref (die, DW_AT_specification);
8213 if (decl == NULL)
8214 decl = die;
8215 else
8216 {
8217 unsigned ix;
8218 dw_die_ref c;
8219 dw_attr_node *a;
8220
8221 /* The original DIE will be changed to a declaration, and must
8222 be moved to be a child of the original declaration DIE. */
8223 orig_parent = decl->die_parent;
8224
8225 /* Copy the type node pointer from the new DIE to the original
8226 declaration DIE so we can forward references later. */
8227 decl->comdat_type_p = true;
8228 decl->die_id.die_type_node = die->die_id.die_type_node;
8229
8230 remove_AT (die, DW_AT_specification);
8231
8232 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8233 {
8234 if (a->dw_attr != DW_AT_name
8235 && a->dw_attr != DW_AT_declaration
8236 && a->dw_attr != DW_AT_external)
8237 add_dwarf_attr (die, a);
8238 }
8239
8240 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8241 }
8242
8243 if (decl->die_parent != NULL
8244 && !is_unit_die (decl->die_parent))
8245 {
8246 new_decl = copy_ancestor_tree (unit, decl, NULL);
8247 if (new_decl != NULL)
8248 {
8249 remove_AT (new_decl, DW_AT_signature);
8250 add_AT_specification (die, new_decl);
8251 }
8252 }
8253
8254 return orig_parent;
8255 }
8256
8257 /* Generate the skeleton ancestor tree for the given NODE, then clone
8258 the DIE and add the clone into the tree. */
8259
8260 static void
8261 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8262 {
8263 if (node->new_die != NULL)
8264 return;
8265
8266 node->new_die = clone_as_declaration (node->old_die);
8267
8268 if (node->parent != NULL)
8269 {
8270 generate_skeleton_ancestor_tree (node->parent);
8271 add_child_die (node->parent->new_die, node->new_die);
8272 }
8273 }
8274
8275 /* Generate a skeleton tree of DIEs containing any declarations that are
8276 found in the original tree. We traverse the tree looking for declaration
8277 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8278
8279 static void
8280 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8281 {
8282 skeleton_chain_node node;
8283 dw_die_ref c;
8284 dw_die_ref first;
8285 dw_die_ref prev = NULL;
8286 dw_die_ref next = NULL;
8287
8288 node.parent = parent;
8289
8290 first = c = parent->old_die->die_child;
8291 if (c)
8292 next = c->die_sib;
8293 if (c) do {
8294 if (prev == NULL || prev->die_sib == c)
8295 prev = c;
8296 c = next;
8297 next = (c == first ? NULL : c->die_sib);
8298 node.old_die = c;
8299 node.new_die = NULL;
8300 if (is_declaration_die (c))
8301 {
8302 if (is_template_instantiation (c))
8303 {
8304 /* Instantiated templates do not need to be cloned into the
8305 type unit. Just move the DIE and its children back to
8306 the skeleton tree (in the main CU). */
8307 remove_child_with_prev (c, prev);
8308 add_child_die (parent->new_die, c);
8309 c = prev;
8310 }
8311 else if (c->comdat_type_p)
8312 {
8313 /* This is the skeleton of earlier break_out_comdat_types
8314 type. Clone the existing DIE, but keep the children
8315 under the original (which is in the main CU). */
8316 dw_die_ref clone = clone_die (c);
8317
8318 replace_child (c, clone, prev);
8319 generate_skeleton_ancestor_tree (parent);
8320 add_child_die (parent->new_die, c);
8321 c = clone;
8322 continue;
8323 }
8324 else
8325 {
8326 /* Clone the existing DIE, move the original to the skeleton
8327 tree (which is in the main CU), and put the clone, with
8328 all the original's children, where the original came from
8329 (which is about to be moved to the type unit). */
8330 dw_die_ref clone = clone_die (c);
8331 move_all_children (c, clone);
8332
8333 /* If the original has a DW_AT_object_pointer attribute,
8334 it would now point to a child DIE just moved to the
8335 cloned tree, so we need to remove that attribute from
8336 the original. */
8337 remove_AT (c, DW_AT_object_pointer);
8338
8339 replace_child (c, clone, prev);
8340 generate_skeleton_ancestor_tree (parent);
8341 add_child_die (parent->new_die, c);
8342 node.old_die = clone;
8343 node.new_die = c;
8344 c = clone;
8345 }
8346 }
8347 generate_skeleton_bottom_up (&node);
8348 } while (next != NULL);
8349 }
8350
8351 /* Wrapper function for generate_skeleton_bottom_up. */
8352
8353 static dw_die_ref
8354 generate_skeleton (dw_die_ref die)
8355 {
8356 skeleton_chain_node node;
8357
8358 node.old_die = die;
8359 node.new_die = NULL;
8360 node.parent = NULL;
8361
8362 /* If this type definition is nested inside another type,
8363 and is not an instantiation of a template, always leave
8364 at least a declaration in its place. */
8365 if (die->die_parent != NULL
8366 && is_type_die (die->die_parent)
8367 && !is_template_instantiation (die))
8368 node.new_die = clone_as_declaration (die);
8369
8370 generate_skeleton_bottom_up (&node);
8371 return node.new_die;
8372 }
8373
8374 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8375 declaration. The original DIE is moved to a new compile unit so that
8376 existing references to it follow it to the new location. If any of the
8377 original DIE's descendants is a declaration, we need to replace the
8378 original DIE with a skeleton tree and move the declarations back into the
8379 skeleton tree. */
8380
8381 static dw_die_ref
8382 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8383 dw_die_ref prev)
8384 {
8385 dw_die_ref skeleton, orig_parent;
8386
8387 /* Copy the declaration context to the type unit DIE. If the returned
8388 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8389 that DIE. */
8390 orig_parent = copy_declaration_context (unit, child);
8391
8392 skeleton = generate_skeleton (child);
8393 if (skeleton == NULL)
8394 remove_child_with_prev (child, prev);
8395 else
8396 {
8397 skeleton->comdat_type_p = true;
8398 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8399
8400 /* If the original DIE was a specification, we need to put
8401 the skeleton under the parent DIE of the declaration.
8402 This leaves the original declaration in the tree, but
8403 it will be pruned later since there are no longer any
8404 references to it. */
8405 if (orig_parent != NULL)
8406 {
8407 remove_child_with_prev (child, prev);
8408 add_child_die (orig_parent, skeleton);
8409 }
8410 else
8411 replace_child (child, skeleton, prev);
8412 }
8413
8414 return skeleton;
8415 }
8416
8417 static void
8418 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8419 comdat_type_node *type_node,
8420 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8421
8422 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8423 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8424 DWARF procedure references in the DW_AT_location attribute. */
8425
8426 static dw_die_ref
8427 copy_dwarf_procedure (dw_die_ref die,
8428 comdat_type_node *type_node,
8429 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8430 {
8431 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8432
8433 /* DWARF procedures are not supposed to have children... */
8434 gcc_assert (die->die_child == NULL);
8435
8436 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8437 gcc_assert (vec_safe_length (die->die_attr) == 1
8438 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8439
8440 /* Do not copy more than once DWARF procedures. */
8441 bool existed;
8442 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8443 if (existed)
8444 return die_copy;
8445
8446 die_copy = clone_die (die);
8447 add_child_die (type_node->root_die, die_copy);
8448 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8449 return die_copy;
8450 }
8451
8452 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8453 procedures in DIE's attributes. */
8454
8455 static void
8456 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8457 comdat_type_node *type_node,
8458 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8459 {
8460 dw_attr_node *a;
8461 unsigned i;
8462
8463 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8464 {
8465 dw_loc_descr_ref loc;
8466
8467 if (a->dw_attr_val.val_class != dw_val_class_loc)
8468 continue;
8469
8470 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8471 {
8472 switch (loc->dw_loc_opc)
8473 {
8474 case DW_OP_call2:
8475 case DW_OP_call4:
8476 case DW_OP_call_ref:
8477 gcc_assert (loc->dw_loc_oprnd1.val_class
8478 == dw_val_class_die_ref);
8479 loc->dw_loc_oprnd1.v.val_die_ref.die
8480 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8481 type_node,
8482 copied_dwarf_procs);
8483
8484 default:
8485 break;
8486 }
8487 }
8488 }
8489 }
8490
8491 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8492 rewrite references to point to the copies.
8493
8494 References are looked for in DIE's attributes and recursively in all its
8495 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8496 mapping from old DWARF procedures to their copy. It is used not to copy
8497 twice the same DWARF procedure under TYPE_NODE. */
8498
8499 static void
8500 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8501 comdat_type_node *type_node,
8502 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8503 {
8504 dw_die_ref c;
8505
8506 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8507 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8508 type_node,
8509 copied_dwarf_procs));
8510 }
8511
8512 /* Traverse the DIE and set up additional .debug_types or .debug_info
8513 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8514 section. */
8515
8516 static void
8517 break_out_comdat_types (dw_die_ref die)
8518 {
8519 dw_die_ref c;
8520 dw_die_ref first;
8521 dw_die_ref prev = NULL;
8522 dw_die_ref next = NULL;
8523 dw_die_ref unit = NULL;
8524
8525 first = c = die->die_child;
8526 if (c)
8527 next = c->die_sib;
8528 if (c) do {
8529 if (prev == NULL || prev->die_sib == c)
8530 prev = c;
8531 c = next;
8532 next = (c == first ? NULL : c->die_sib);
8533 if (should_move_die_to_comdat (c))
8534 {
8535 dw_die_ref replacement;
8536 comdat_type_node *type_node;
8537
8538 /* Break out nested types into their own type units. */
8539 break_out_comdat_types (c);
8540
8541 /* Create a new type unit DIE as the root for the new tree, and
8542 add it to the list of comdat types. */
8543 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8544 add_AT_unsigned (unit, DW_AT_language,
8545 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8546 type_node = ggc_cleared_alloc<comdat_type_node> ();
8547 type_node->root_die = unit;
8548 type_node->next = comdat_type_list;
8549 comdat_type_list = type_node;
8550
8551 /* Generate the type signature. */
8552 generate_type_signature (c, type_node);
8553
8554 /* Copy the declaration context, attributes, and children of the
8555 declaration into the new type unit DIE, then remove this DIE
8556 from the main CU (or replace it with a skeleton if necessary). */
8557 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8558 type_node->skeleton_die = replacement;
8559
8560 /* Add the DIE to the new compunit. */
8561 add_child_die (unit, c);
8562
8563 /* Types can reference DWARF procedures for type size or data location
8564 expressions. Calls in DWARF expressions cannot target procedures
8565 that are not in the same section. So we must copy DWARF procedures
8566 along with this type and then rewrite references to them. */
8567 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8568 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8569
8570 if (replacement != NULL)
8571 c = replacement;
8572 }
8573 else if (c->die_tag == DW_TAG_namespace
8574 || c->die_tag == DW_TAG_class_type
8575 || c->die_tag == DW_TAG_structure_type
8576 || c->die_tag == DW_TAG_union_type)
8577 {
8578 /* Look for nested types that can be broken out. */
8579 break_out_comdat_types (c);
8580 }
8581 } while (next != NULL);
8582 }
8583
8584 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8585 Enter all the cloned children into the hash table decl_table. */
8586
8587 static dw_die_ref
8588 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8589 {
8590 dw_die_ref c;
8591 dw_die_ref clone;
8592 struct decl_table_entry *entry;
8593 decl_table_entry **slot;
8594
8595 if (die->die_tag == DW_TAG_subprogram)
8596 clone = clone_as_declaration (die);
8597 else
8598 clone = clone_die (die);
8599
8600 slot = decl_table->find_slot_with_hash (die,
8601 htab_hash_pointer (die), INSERT);
8602
8603 /* Assert that DIE isn't in the hash table yet. If it would be there
8604 before, the ancestors would be necessarily there as well, therefore
8605 clone_tree_partial wouldn't be called. */
8606 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8607
8608 entry = XCNEW (struct decl_table_entry);
8609 entry->orig = die;
8610 entry->copy = clone;
8611 *slot = entry;
8612
8613 if (die->die_tag != DW_TAG_subprogram)
8614 FOR_EACH_CHILD (die, c,
8615 add_child_die (clone, clone_tree_partial (c, decl_table)));
8616
8617 return clone;
8618 }
8619
8620 /* Walk the DIE and its children, looking for references to incomplete
8621 or trivial types that are unmarked (i.e., that are not in the current
8622 type_unit). */
8623
8624 static void
8625 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8626 {
8627 dw_die_ref c;
8628 dw_attr_node *a;
8629 unsigned ix;
8630
8631 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8632 {
8633 if (AT_class (a) == dw_val_class_die_ref)
8634 {
8635 dw_die_ref targ = AT_ref (a);
8636 decl_table_entry **slot;
8637 struct decl_table_entry *entry;
8638
8639 if (targ->die_mark != 0 || targ->comdat_type_p)
8640 continue;
8641
8642 slot = decl_table->find_slot_with_hash (targ,
8643 htab_hash_pointer (targ),
8644 INSERT);
8645
8646 if (*slot != HTAB_EMPTY_ENTRY)
8647 {
8648 /* TARG has already been copied, so we just need to
8649 modify the reference to point to the copy. */
8650 entry = *slot;
8651 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8652 }
8653 else
8654 {
8655 dw_die_ref parent = unit;
8656 dw_die_ref copy = clone_die (targ);
8657
8658 /* Record in DECL_TABLE that TARG has been copied.
8659 Need to do this now, before the recursive call,
8660 because DECL_TABLE may be expanded and SLOT
8661 would no longer be a valid pointer. */
8662 entry = XCNEW (struct decl_table_entry);
8663 entry->orig = targ;
8664 entry->copy = copy;
8665 *slot = entry;
8666
8667 /* If TARG is not a declaration DIE, we need to copy its
8668 children. */
8669 if (!is_declaration_die (targ))
8670 {
8671 FOR_EACH_CHILD (
8672 targ, c,
8673 add_child_die (copy,
8674 clone_tree_partial (c, decl_table)));
8675 }
8676
8677 /* Make sure the cloned tree is marked as part of the
8678 type unit. */
8679 mark_dies (copy);
8680
8681 /* If TARG has surrounding context, copy its ancestor tree
8682 into the new type unit. */
8683 if (targ->die_parent != NULL
8684 && !is_unit_die (targ->die_parent))
8685 parent = copy_ancestor_tree (unit, targ->die_parent,
8686 decl_table);
8687
8688 add_child_die (parent, copy);
8689 a->dw_attr_val.v.val_die_ref.die = copy;
8690
8691 /* Make sure the newly-copied DIE is walked. If it was
8692 installed in a previously-added context, it won't
8693 get visited otherwise. */
8694 if (parent != unit)
8695 {
8696 /* Find the highest point of the newly-added tree,
8697 mark each node along the way, and walk from there. */
8698 parent->die_mark = 1;
8699 while (parent->die_parent
8700 && parent->die_parent->die_mark == 0)
8701 {
8702 parent = parent->die_parent;
8703 parent->die_mark = 1;
8704 }
8705 copy_decls_walk (unit, parent, decl_table);
8706 }
8707 }
8708 }
8709 }
8710
8711 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8712 }
8713
8714 /* Copy declarations for "unworthy" types into the new comdat section.
8715 Incomplete types, modified types, and certain other types aren't broken
8716 out into comdat sections of their own, so they don't have a signature,
8717 and we need to copy the declaration into the same section so that we
8718 don't have an external reference. */
8719
8720 static void
8721 copy_decls_for_unworthy_types (dw_die_ref unit)
8722 {
8723 mark_dies (unit);
8724 decl_hash_type decl_table (10);
8725 copy_decls_walk (unit, unit, &decl_table);
8726 unmark_dies (unit);
8727 }
8728
8729 /* Traverse the DIE and add a sibling attribute if it may have the
8730 effect of speeding up access to siblings. To save some space,
8731 avoid generating sibling attributes for DIE's without children. */
8732
8733 static void
8734 add_sibling_attributes (dw_die_ref die)
8735 {
8736 dw_die_ref c;
8737
8738 if (! die->die_child)
8739 return;
8740
8741 if (die->die_parent && die != die->die_parent->die_child)
8742 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8743
8744 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8745 }
8746
8747 /* Output all location lists for the DIE and its children. */
8748
8749 static void
8750 output_location_lists (dw_die_ref die)
8751 {
8752 dw_die_ref c;
8753 dw_attr_node *a;
8754 unsigned ix;
8755
8756 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8757 if (AT_class (a) == dw_val_class_loc_list)
8758 output_loc_list (AT_loc_list (a));
8759
8760 FOR_EACH_CHILD (die, c, output_location_lists (c));
8761 }
8762
8763 /* During assign_location_list_indexes and output_loclists_offset the
8764 current index, after it the number of assigned indexes (i.e. how
8765 large the .debug_loclists* offset table should be). */
8766 static unsigned int loc_list_idx;
8767
8768 /* Output all location list offsets for the DIE and its children. */
8769
8770 static void
8771 output_loclists_offsets (dw_die_ref die)
8772 {
8773 dw_die_ref c;
8774 dw_attr_node *a;
8775 unsigned ix;
8776
8777 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8778 if (AT_class (a) == dw_val_class_loc_list)
8779 {
8780 dw_loc_list_ref l = AT_loc_list (a);
8781 if (l->offset_emitted)
8782 continue;
8783 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8784 loc_section_label, NULL);
8785 gcc_assert (l->hash == loc_list_idx);
8786 loc_list_idx++;
8787 l->offset_emitted = true;
8788 }
8789
8790 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8791 }
8792
8793 /* Recursively set indexes of location lists. */
8794
8795 static void
8796 assign_location_list_indexes (dw_die_ref die)
8797 {
8798 dw_die_ref c;
8799 dw_attr_node *a;
8800 unsigned ix;
8801
8802 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8803 if (AT_class (a) == dw_val_class_loc_list)
8804 {
8805 dw_loc_list_ref list = AT_loc_list (a);
8806 if (!list->num_assigned)
8807 {
8808 list->num_assigned = true;
8809 list->hash = loc_list_idx++;
8810 }
8811 }
8812
8813 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8814 }
8815
8816 /* We want to limit the number of external references, because they are
8817 larger than local references: a relocation takes multiple words, and
8818 even a sig8 reference is always eight bytes, whereas a local reference
8819 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8820 So if we encounter multiple external references to the same type DIE, we
8821 make a local typedef stub for it and redirect all references there.
8822
8823 This is the element of the hash table for keeping track of these
8824 references. */
8825
8826 struct external_ref
8827 {
8828 dw_die_ref type;
8829 dw_die_ref stub;
8830 unsigned n_refs;
8831 };
8832
8833 /* Hashtable helpers. */
8834
8835 struct external_ref_hasher : free_ptr_hash <external_ref>
8836 {
8837 static inline hashval_t hash (const external_ref *);
8838 static inline bool equal (const external_ref *, const external_ref *);
8839 };
8840
8841 inline hashval_t
8842 external_ref_hasher::hash (const external_ref *r)
8843 {
8844 dw_die_ref die = r->type;
8845 hashval_t h = 0;
8846
8847 /* We can't use the address of the DIE for hashing, because
8848 that will make the order of the stub DIEs non-deterministic. */
8849 if (! die->comdat_type_p)
8850 /* We have a symbol; use it to compute a hash. */
8851 h = htab_hash_string (die->die_id.die_symbol);
8852 else
8853 {
8854 /* We have a type signature; use a subset of the bits as the hash.
8855 The 8-byte signature is at least as large as hashval_t. */
8856 comdat_type_node *type_node = die->die_id.die_type_node;
8857 memcpy (&h, type_node->signature, sizeof (h));
8858 }
8859 return h;
8860 }
8861
8862 inline bool
8863 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8864 {
8865 return r1->type == r2->type;
8866 }
8867
8868 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8869
8870 /* Return a pointer to the external_ref for references to DIE. */
8871
8872 static struct external_ref *
8873 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8874 {
8875 struct external_ref ref, *ref_p;
8876 external_ref **slot;
8877
8878 ref.type = die;
8879 slot = map->find_slot (&ref, INSERT);
8880 if (*slot != HTAB_EMPTY_ENTRY)
8881 return *slot;
8882
8883 ref_p = XCNEW (struct external_ref);
8884 ref_p->type = die;
8885 *slot = ref_p;
8886 return ref_p;
8887 }
8888
8889 /* Subroutine of optimize_external_refs, below.
8890
8891 If we see a type skeleton, record it as our stub. If we see external
8892 references, remember how many we've seen. */
8893
8894 static void
8895 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8896 {
8897 dw_die_ref c;
8898 dw_attr_node *a;
8899 unsigned ix;
8900 struct external_ref *ref_p;
8901
8902 if (is_type_die (die)
8903 && (c = get_AT_ref (die, DW_AT_signature)))
8904 {
8905 /* This is a local skeleton; use it for local references. */
8906 ref_p = lookup_external_ref (map, c);
8907 ref_p->stub = die;
8908 }
8909
8910 /* Scan the DIE references, and remember any that refer to DIEs from
8911 other CUs (i.e. those which are not marked). */
8912 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8913 if (AT_class (a) == dw_val_class_die_ref
8914 && (c = AT_ref (a))->die_mark == 0
8915 && is_type_die (c))
8916 {
8917 ref_p = lookup_external_ref (map, c);
8918 ref_p->n_refs++;
8919 }
8920
8921 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8922 }
8923
8924 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8925 points to an external_ref, DATA is the CU we're processing. If we don't
8926 already have a local stub, and we have multiple refs, build a stub. */
8927
8928 int
8929 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8930 {
8931 struct external_ref *ref_p = *slot;
8932
8933 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8934 {
8935 /* We have multiple references to this type, so build a small stub.
8936 Both of these forms are a bit dodgy from the perspective of the
8937 DWARF standard, since technically they should have names. */
8938 dw_die_ref cu = data;
8939 dw_die_ref type = ref_p->type;
8940 dw_die_ref stub = NULL;
8941
8942 if (type->comdat_type_p)
8943 {
8944 /* If we refer to this type via sig8, use AT_signature. */
8945 stub = new_die (type->die_tag, cu, NULL_TREE);
8946 add_AT_die_ref (stub, DW_AT_signature, type);
8947 }
8948 else
8949 {
8950 /* Otherwise, use a typedef with no name. */
8951 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8952 add_AT_die_ref (stub, DW_AT_type, type);
8953 }
8954
8955 stub->die_mark++;
8956 ref_p->stub = stub;
8957 }
8958 return 1;
8959 }
8960
8961 /* DIE is a unit; look through all the DIE references to see if there are
8962 any external references to types, and if so, create local stubs for
8963 them which will be applied in build_abbrev_table. This is useful because
8964 references to local DIEs are smaller. */
8965
8966 static external_ref_hash_type *
8967 optimize_external_refs (dw_die_ref die)
8968 {
8969 external_ref_hash_type *map = new external_ref_hash_type (10);
8970 optimize_external_refs_1 (die, map);
8971 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8972 return map;
8973 }
8974
8975 /* The following 3 variables are temporaries that are computed only during the
8976 build_abbrev_table call and used and released during the following
8977 optimize_abbrev_table call. */
8978
8979 /* First abbrev_id that can be optimized based on usage. */
8980 static unsigned int abbrev_opt_start;
8981
8982 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
8983 abbrev_id smaller than this, because they must be already sized
8984 during build_abbrev_table). */
8985 static unsigned int abbrev_opt_base_type_end;
8986
8987 /* Vector of usage counts during build_abbrev_table. Indexed by
8988 abbrev_id - abbrev_opt_start. */
8989 static vec<unsigned int> abbrev_usage_count;
8990
8991 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
8992 static vec<dw_die_ref> sorted_abbrev_dies;
8993
8994 /* The format of each DIE (and its attribute value pairs) is encoded in an
8995 abbreviation table. This routine builds the abbreviation table and assigns
8996 a unique abbreviation id for each abbreviation entry. The children of each
8997 die are visited recursively. */
8998
8999 static void
9000 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9001 {
9002 unsigned int abbrev_id = 0;
9003 dw_die_ref c;
9004 dw_attr_node *a;
9005 unsigned ix;
9006 dw_die_ref abbrev;
9007
9008 /* Scan the DIE references, and replace any that refer to
9009 DIEs from other CUs (i.e. those which are not marked) with
9010 the local stubs we built in optimize_external_refs. */
9011 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9012 if (AT_class (a) == dw_val_class_die_ref
9013 && (c = AT_ref (a))->die_mark == 0)
9014 {
9015 struct external_ref *ref_p;
9016 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9017
9018 ref_p = lookup_external_ref (extern_map, c);
9019 if (ref_p->stub && ref_p->stub != die)
9020 change_AT_die_ref (a, ref_p->stub);
9021 else
9022 /* We aren't changing this reference, so mark it external. */
9023 set_AT_ref_external (a, 1);
9024 }
9025
9026 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9027 {
9028 dw_attr_node *die_a, *abbrev_a;
9029 unsigned ix;
9030 bool ok = true;
9031
9032 if (abbrev_id == 0)
9033 continue;
9034 if (abbrev->die_tag != die->die_tag)
9035 continue;
9036 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9037 continue;
9038
9039 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9040 continue;
9041
9042 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9043 {
9044 abbrev_a = &(*abbrev->die_attr)[ix];
9045 if ((abbrev_a->dw_attr != die_a->dw_attr)
9046 || (value_format (abbrev_a) != value_format (die_a)))
9047 {
9048 ok = false;
9049 break;
9050 }
9051 }
9052 if (ok)
9053 break;
9054 }
9055
9056 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9057 {
9058 vec_safe_push (abbrev_die_table, die);
9059 if (abbrev_opt_start)
9060 abbrev_usage_count.safe_push (0);
9061 }
9062 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9063 {
9064 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9065 sorted_abbrev_dies.safe_push (die);
9066 }
9067
9068 die->die_abbrev = abbrev_id;
9069 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9070 }
9071
9072 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9073 by die_abbrev's usage count, from the most commonly used
9074 abbreviation to the least. */
9075
9076 static int
9077 die_abbrev_cmp (const void *p1, const void *p2)
9078 {
9079 dw_die_ref die1 = *(const dw_die_ref *) p1;
9080 dw_die_ref die2 = *(const dw_die_ref *) p2;
9081
9082 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9083 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9084
9085 if (die1->die_abbrev >= abbrev_opt_base_type_end
9086 && die2->die_abbrev >= abbrev_opt_base_type_end)
9087 {
9088 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9089 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9090 return -1;
9091 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9092 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9093 return 1;
9094 }
9095
9096 /* Stabilize the sort. */
9097 if (die1->die_abbrev < die2->die_abbrev)
9098 return -1;
9099 if (die1->die_abbrev > die2->die_abbrev)
9100 return 1;
9101
9102 return 0;
9103 }
9104
9105 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9106 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9107 into dw_val_class_const_implicit or
9108 dw_val_class_unsigned_const_implicit. */
9109
9110 static void
9111 optimize_implicit_const (unsigned int first_id, unsigned int end,
9112 vec<bool> &implicit_consts)
9113 {
9114 /* It never makes sense if there is just one DIE using the abbreviation. */
9115 if (end < first_id + 2)
9116 return;
9117
9118 dw_attr_node *a;
9119 unsigned ix, i;
9120 dw_die_ref die = sorted_abbrev_dies[first_id];
9121 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9122 if (implicit_consts[ix])
9123 {
9124 enum dw_val_class new_class = dw_val_class_none;
9125 switch (AT_class (a))
9126 {
9127 case dw_val_class_unsigned_const:
9128 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9129 continue;
9130
9131 /* The .debug_abbrev section will grow by
9132 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9133 in all the DIEs using that abbreviation. */
9134 if (constant_size (AT_unsigned (a)) * (end - first_id)
9135 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9136 continue;
9137
9138 new_class = dw_val_class_unsigned_const_implicit;
9139 break;
9140
9141 case dw_val_class_const:
9142 new_class = dw_val_class_const_implicit;
9143 break;
9144
9145 case dw_val_class_file:
9146 new_class = dw_val_class_file_implicit;
9147 break;
9148
9149 default:
9150 continue;
9151 }
9152 for (i = first_id; i < end; i++)
9153 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9154 = new_class;
9155 }
9156 }
9157
9158 /* Attempt to optimize abbreviation table from abbrev_opt_start
9159 abbreviation above. */
9160
9161 static void
9162 optimize_abbrev_table (void)
9163 {
9164 if (abbrev_opt_start
9165 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9166 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9167 {
9168 auto_vec<bool, 32> implicit_consts;
9169 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9170
9171 unsigned int abbrev_id = abbrev_opt_start - 1;
9172 unsigned int first_id = ~0U;
9173 unsigned int last_abbrev_id = 0;
9174 unsigned int i;
9175 dw_die_ref die;
9176 if (abbrev_opt_base_type_end > abbrev_opt_start)
9177 abbrev_id = abbrev_opt_base_type_end - 1;
9178 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9179 most commonly used abbreviations come first. */
9180 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9181 {
9182 dw_attr_node *a;
9183 unsigned ix;
9184
9185 /* If calc_base_type_die_sizes has been called, the CU and
9186 base types after it can't be optimized, because we've already
9187 calculated their DIE offsets. We've sorted them first. */
9188 if (die->die_abbrev < abbrev_opt_base_type_end)
9189 continue;
9190 if (die->die_abbrev != last_abbrev_id)
9191 {
9192 last_abbrev_id = die->die_abbrev;
9193 if (dwarf_version >= 5 && first_id != ~0U)
9194 optimize_implicit_const (first_id, i, implicit_consts);
9195 abbrev_id++;
9196 (*abbrev_die_table)[abbrev_id] = die;
9197 if (dwarf_version >= 5)
9198 {
9199 first_id = i;
9200 implicit_consts.truncate (0);
9201
9202 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9203 switch (AT_class (a))
9204 {
9205 case dw_val_class_const:
9206 case dw_val_class_unsigned_const:
9207 case dw_val_class_file:
9208 implicit_consts.safe_push (true);
9209 break;
9210 default:
9211 implicit_consts.safe_push (false);
9212 break;
9213 }
9214 }
9215 }
9216 else if (dwarf_version >= 5)
9217 {
9218 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9219 if (!implicit_consts[ix])
9220 continue;
9221 else
9222 {
9223 dw_attr_node *other_a
9224 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9225 if (!dw_val_equal_p (&a->dw_attr_val,
9226 &other_a->dw_attr_val))
9227 implicit_consts[ix] = false;
9228 }
9229 }
9230 die->die_abbrev = abbrev_id;
9231 }
9232 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9233 if (dwarf_version >= 5 && first_id != ~0U)
9234 optimize_implicit_const (first_id, i, implicit_consts);
9235 }
9236
9237 abbrev_opt_start = 0;
9238 abbrev_opt_base_type_end = 0;
9239 abbrev_usage_count.release ();
9240 sorted_abbrev_dies.release ();
9241 }
9242 \f
9243 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9244
9245 static int
9246 constant_size (unsigned HOST_WIDE_INT value)
9247 {
9248 int log;
9249
9250 if (value == 0)
9251 log = 0;
9252 else
9253 log = floor_log2 (value);
9254
9255 log = log / 8;
9256 log = 1 << (floor_log2 (log) + 1);
9257
9258 return log;
9259 }
9260
9261 /* Return the size of a DIE as it is represented in the
9262 .debug_info section. */
9263
9264 static unsigned long
9265 size_of_die (dw_die_ref die)
9266 {
9267 unsigned long size = 0;
9268 dw_attr_node *a;
9269 unsigned ix;
9270 enum dwarf_form form;
9271
9272 size += size_of_uleb128 (die->die_abbrev);
9273 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9274 {
9275 switch (AT_class (a))
9276 {
9277 case dw_val_class_addr:
9278 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9279 {
9280 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9281 size += size_of_uleb128 (AT_index (a));
9282 }
9283 else
9284 size += DWARF2_ADDR_SIZE;
9285 break;
9286 case dw_val_class_offset:
9287 size += DWARF_OFFSET_SIZE;
9288 break;
9289 case dw_val_class_loc:
9290 {
9291 unsigned long lsize = size_of_locs (AT_loc (a));
9292
9293 /* Block length. */
9294 if (dwarf_version >= 4)
9295 size += size_of_uleb128 (lsize);
9296 else
9297 size += constant_size (lsize);
9298 size += lsize;
9299 }
9300 break;
9301 case dw_val_class_loc_list:
9302 case dw_val_class_view_list:
9303 if (dwarf_split_debug_info && dwarf_version >= 5)
9304 {
9305 gcc_assert (AT_loc_list (a)->num_assigned);
9306 size += size_of_uleb128 (AT_loc_list (a)->hash);
9307 }
9308 else
9309 size += DWARF_OFFSET_SIZE;
9310 break;
9311 case dw_val_class_range_list:
9312 if (value_format (a) == DW_FORM_rnglistx)
9313 {
9314 gcc_assert (rnglist_idx);
9315 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9316 size += size_of_uleb128 (r->idx);
9317 }
9318 else
9319 size += DWARF_OFFSET_SIZE;
9320 break;
9321 case dw_val_class_const:
9322 size += size_of_sleb128 (AT_int (a));
9323 break;
9324 case dw_val_class_unsigned_const:
9325 {
9326 int csize = constant_size (AT_unsigned (a));
9327 if (dwarf_version == 3
9328 && a->dw_attr == DW_AT_data_member_location
9329 && csize >= 4)
9330 size += size_of_uleb128 (AT_unsigned (a));
9331 else
9332 size += csize;
9333 }
9334 break;
9335 case dw_val_class_symview:
9336 if (symview_upper_bound <= 0xff)
9337 size += 1;
9338 else if (symview_upper_bound <= 0xffff)
9339 size += 2;
9340 else if (symview_upper_bound <= 0xffffffff)
9341 size += 4;
9342 else
9343 size += 8;
9344 break;
9345 case dw_val_class_const_implicit:
9346 case dw_val_class_unsigned_const_implicit:
9347 case dw_val_class_file_implicit:
9348 /* These occupy no size in the DIE, just an extra sleb128 in
9349 .debug_abbrev. */
9350 break;
9351 case dw_val_class_const_double:
9352 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9353 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9354 size++; /* block */
9355 break;
9356 case dw_val_class_wide_int:
9357 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9358 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9359 if (get_full_len (*a->dw_attr_val.v.val_wide)
9360 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9361 size++; /* block */
9362 break;
9363 case dw_val_class_vec:
9364 size += constant_size (a->dw_attr_val.v.val_vec.length
9365 * a->dw_attr_val.v.val_vec.elt_size)
9366 + a->dw_attr_val.v.val_vec.length
9367 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9368 break;
9369 case dw_val_class_flag:
9370 if (dwarf_version >= 4)
9371 /* Currently all add_AT_flag calls pass in 1 as last argument,
9372 so DW_FORM_flag_present can be used. If that ever changes,
9373 we'll need to use DW_FORM_flag and have some optimization
9374 in build_abbrev_table that will change those to
9375 DW_FORM_flag_present if it is set to 1 in all DIEs using
9376 the same abbrev entry. */
9377 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9378 else
9379 size += 1;
9380 break;
9381 case dw_val_class_die_ref:
9382 if (AT_ref_external (a))
9383 {
9384 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9385 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9386 is sized by target address length, whereas in DWARF3
9387 it's always sized as an offset. */
9388 if (use_debug_types)
9389 size += DWARF_TYPE_SIGNATURE_SIZE;
9390 else if (dwarf_version == 2)
9391 size += DWARF2_ADDR_SIZE;
9392 else
9393 size += DWARF_OFFSET_SIZE;
9394 }
9395 else
9396 size += DWARF_OFFSET_SIZE;
9397 break;
9398 case dw_val_class_fde_ref:
9399 size += DWARF_OFFSET_SIZE;
9400 break;
9401 case dw_val_class_lbl_id:
9402 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9403 {
9404 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9405 size += size_of_uleb128 (AT_index (a));
9406 }
9407 else
9408 size += DWARF2_ADDR_SIZE;
9409 break;
9410 case dw_val_class_lineptr:
9411 case dw_val_class_macptr:
9412 case dw_val_class_loclistsptr:
9413 size += DWARF_OFFSET_SIZE;
9414 break;
9415 case dw_val_class_str:
9416 form = AT_string_form (a);
9417 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9418 size += DWARF_OFFSET_SIZE;
9419 else if (form == dwarf_FORM (DW_FORM_strx))
9420 size += size_of_uleb128 (AT_index (a));
9421 else
9422 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9423 break;
9424 case dw_val_class_file:
9425 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9426 break;
9427 case dw_val_class_data8:
9428 size += 8;
9429 break;
9430 case dw_val_class_vms_delta:
9431 size += DWARF_OFFSET_SIZE;
9432 break;
9433 case dw_val_class_high_pc:
9434 size += DWARF2_ADDR_SIZE;
9435 break;
9436 case dw_val_class_discr_value:
9437 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9438 break;
9439 case dw_val_class_discr_list:
9440 {
9441 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9442
9443 /* This is a block, so we have the block length and then its
9444 data. */
9445 size += constant_size (block_size) + block_size;
9446 }
9447 break;
9448 default:
9449 gcc_unreachable ();
9450 }
9451 }
9452
9453 return size;
9454 }
9455
9456 /* Size the debugging information associated with a given DIE. Visits the
9457 DIE's children recursively. Updates the global variable next_die_offset, on
9458 each time through. Uses the current value of next_die_offset to update the
9459 die_offset field in each DIE. */
9460
9461 static void
9462 calc_die_sizes (dw_die_ref die)
9463 {
9464 dw_die_ref c;
9465
9466 gcc_assert (die->die_offset == 0
9467 || (unsigned long int) die->die_offset == next_die_offset);
9468 die->die_offset = next_die_offset;
9469 next_die_offset += size_of_die (die);
9470
9471 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9472
9473 if (die->die_child != NULL)
9474 /* Count the null byte used to terminate sibling lists. */
9475 next_die_offset += 1;
9476 }
9477
9478 /* Size just the base type children at the start of the CU.
9479 This is needed because build_abbrev needs to size locs
9480 and sizing of type based stack ops needs to know die_offset
9481 values for the base types. */
9482
9483 static void
9484 calc_base_type_die_sizes (void)
9485 {
9486 unsigned long die_offset = (dwarf_split_debug_info
9487 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9488 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9489 unsigned int i;
9490 dw_die_ref base_type;
9491 #if ENABLE_ASSERT_CHECKING
9492 dw_die_ref prev = comp_unit_die ()->die_child;
9493 #endif
9494
9495 die_offset += size_of_die (comp_unit_die ());
9496 for (i = 0; base_types.iterate (i, &base_type); i++)
9497 {
9498 #if ENABLE_ASSERT_CHECKING
9499 gcc_assert (base_type->die_offset == 0
9500 && prev->die_sib == base_type
9501 && base_type->die_child == NULL
9502 && base_type->die_abbrev);
9503 prev = base_type;
9504 #endif
9505 if (abbrev_opt_start
9506 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9507 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9508 base_type->die_offset = die_offset;
9509 die_offset += size_of_die (base_type);
9510 }
9511 }
9512
9513 /* Set the marks for a die and its children. We do this so
9514 that we know whether or not a reference needs to use FORM_ref_addr; only
9515 DIEs in the same CU will be marked. We used to clear out the offset
9516 and use that as the flag, but ran into ordering problems. */
9517
9518 static void
9519 mark_dies (dw_die_ref die)
9520 {
9521 dw_die_ref c;
9522
9523 gcc_assert (!die->die_mark);
9524
9525 die->die_mark = 1;
9526 FOR_EACH_CHILD (die, c, mark_dies (c));
9527 }
9528
9529 /* Clear the marks for a die and its children. */
9530
9531 static void
9532 unmark_dies (dw_die_ref die)
9533 {
9534 dw_die_ref c;
9535
9536 if (! use_debug_types)
9537 gcc_assert (die->die_mark);
9538
9539 die->die_mark = 0;
9540 FOR_EACH_CHILD (die, c, unmark_dies (c));
9541 }
9542
9543 /* Clear the marks for a die, its children and referred dies. */
9544
9545 static void
9546 unmark_all_dies (dw_die_ref die)
9547 {
9548 dw_die_ref c;
9549 dw_attr_node *a;
9550 unsigned ix;
9551
9552 if (!die->die_mark)
9553 return;
9554 die->die_mark = 0;
9555
9556 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9557
9558 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9559 if (AT_class (a) == dw_val_class_die_ref)
9560 unmark_all_dies (AT_ref (a));
9561 }
9562
9563 /* Calculate if the entry should appear in the final output file. It may be
9564 from a pruned a type. */
9565
9566 static bool
9567 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9568 {
9569 /* By limiting gnu pubnames to definitions only, gold can generate a
9570 gdb index without entries for declarations, which don't include
9571 enough information to be useful. */
9572 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9573 return false;
9574
9575 if (table == pubname_table)
9576 {
9577 /* Enumerator names are part of the pubname table, but the
9578 parent DW_TAG_enumeration_type die may have been pruned.
9579 Don't output them if that is the case. */
9580 if (p->die->die_tag == DW_TAG_enumerator &&
9581 (p->die->die_parent == NULL
9582 || !p->die->die_parent->die_perennial_p))
9583 return false;
9584
9585 /* Everything else in the pubname table is included. */
9586 return true;
9587 }
9588
9589 /* The pubtypes table shouldn't include types that have been
9590 pruned. */
9591 return (p->die->die_offset != 0
9592 || !flag_eliminate_unused_debug_types);
9593 }
9594
9595 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9596 generated for the compilation unit. */
9597
9598 static unsigned long
9599 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9600 {
9601 unsigned long size;
9602 unsigned i;
9603 pubname_entry *p;
9604 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9605
9606 size = DWARF_PUBNAMES_HEADER_SIZE;
9607 FOR_EACH_VEC_ELT (*names, i, p)
9608 if (include_pubname_in_output (names, p))
9609 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9610
9611 size += DWARF_OFFSET_SIZE;
9612 return size;
9613 }
9614
9615 /* Return the size of the information in the .debug_aranges section. */
9616
9617 static unsigned long
9618 size_of_aranges (void)
9619 {
9620 unsigned long size;
9621
9622 size = DWARF_ARANGES_HEADER_SIZE;
9623
9624 /* Count the address/length pair for this compilation unit. */
9625 if (text_section_used)
9626 size += 2 * DWARF2_ADDR_SIZE;
9627 if (cold_text_section_used)
9628 size += 2 * DWARF2_ADDR_SIZE;
9629 if (have_multiple_function_sections)
9630 {
9631 unsigned fde_idx;
9632 dw_fde_ref fde;
9633
9634 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9635 {
9636 if (DECL_IGNORED_P (fde->decl))
9637 continue;
9638 if (!fde->in_std_section)
9639 size += 2 * DWARF2_ADDR_SIZE;
9640 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9641 size += 2 * DWARF2_ADDR_SIZE;
9642 }
9643 }
9644
9645 /* Count the two zero words used to terminated the address range table. */
9646 size += 2 * DWARF2_ADDR_SIZE;
9647 return size;
9648 }
9649 \f
9650 /* Select the encoding of an attribute value. */
9651
9652 static enum dwarf_form
9653 value_format (dw_attr_node *a)
9654 {
9655 switch (AT_class (a))
9656 {
9657 case dw_val_class_addr:
9658 /* Only very few attributes allow DW_FORM_addr. */
9659 switch (a->dw_attr)
9660 {
9661 case DW_AT_low_pc:
9662 case DW_AT_high_pc:
9663 case DW_AT_entry_pc:
9664 case DW_AT_trampoline:
9665 return (AT_index (a) == NOT_INDEXED
9666 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9667 default:
9668 break;
9669 }
9670 switch (DWARF2_ADDR_SIZE)
9671 {
9672 case 1:
9673 return DW_FORM_data1;
9674 case 2:
9675 return DW_FORM_data2;
9676 case 4:
9677 return DW_FORM_data4;
9678 case 8:
9679 return DW_FORM_data8;
9680 default:
9681 gcc_unreachable ();
9682 }
9683 case dw_val_class_loc_list:
9684 case dw_val_class_view_list:
9685 if (dwarf_split_debug_info
9686 && dwarf_version >= 5
9687 && AT_loc_list (a)->num_assigned)
9688 return DW_FORM_loclistx;
9689 /* FALLTHRU */
9690 case dw_val_class_range_list:
9691 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9692 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9693 care about sizes of .debug* sections in shared libraries and
9694 executables and don't take into account relocations that affect just
9695 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9696 table in the .debug_rnglists section. */
9697 if (dwarf_split_debug_info
9698 && dwarf_version >= 5
9699 && AT_class (a) == dw_val_class_range_list
9700 && rnglist_idx
9701 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9702 return DW_FORM_rnglistx;
9703 if (dwarf_version >= 4)
9704 return DW_FORM_sec_offset;
9705 /* FALLTHRU */
9706 case dw_val_class_vms_delta:
9707 case dw_val_class_offset:
9708 switch (DWARF_OFFSET_SIZE)
9709 {
9710 case 4:
9711 return DW_FORM_data4;
9712 case 8:
9713 return DW_FORM_data8;
9714 default:
9715 gcc_unreachable ();
9716 }
9717 case dw_val_class_loc:
9718 if (dwarf_version >= 4)
9719 return DW_FORM_exprloc;
9720 switch (constant_size (size_of_locs (AT_loc (a))))
9721 {
9722 case 1:
9723 return DW_FORM_block1;
9724 case 2:
9725 return DW_FORM_block2;
9726 case 4:
9727 return DW_FORM_block4;
9728 default:
9729 gcc_unreachable ();
9730 }
9731 case dw_val_class_const:
9732 return DW_FORM_sdata;
9733 case dw_val_class_unsigned_const:
9734 switch (constant_size (AT_unsigned (a)))
9735 {
9736 case 1:
9737 return DW_FORM_data1;
9738 case 2:
9739 return DW_FORM_data2;
9740 case 4:
9741 /* In DWARF3 DW_AT_data_member_location with
9742 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9743 constant, so we need to use DW_FORM_udata if we need
9744 a large constant. */
9745 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9746 return DW_FORM_udata;
9747 return DW_FORM_data4;
9748 case 8:
9749 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9750 return DW_FORM_udata;
9751 return DW_FORM_data8;
9752 default:
9753 gcc_unreachable ();
9754 }
9755 case dw_val_class_const_implicit:
9756 case dw_val_class_unsigned_const_implicit:
9757 case dw_val_class_file_implicit:
9758 return DW_FORM_implicit_const;
9759 case dw_val_class_const_double:
9760 switch (HOST_BITS_PER_WIDE_INT)
9761 {
9762 case 8:
9763 return DW_FORM_data2;
9764 case 16:
9765 return DW_FORM_data4;
9766 case 32:
9767 return DW_FORM_data8;
9768 case 64:
9769 if (dwarf_version >= 5)
9770 return DW_FORM_data16;
9771 /* FALLTHRU */
9772 default:
9773 return DW_FORM_block1;
9774 }
9775 case dw_val_class_wide_int:
9776 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9777 {
9778 case 8:
9779 return DW_FORM_data1;
9780 case 16:
9781 return DW_FORM_data2;
9782 case 32:
9783 return DW_FORM_data4;
9784 case 64:
9785 return DW_FORM_data8;
9786 case 128:
9787 if (dwarf_version >= 5)
9788 return DW_FORM_data16;
9789 /* FALLTHRU */
9790 default:
9791 return DW_FORM_block1;
9792 }
9793 case dw_val_class_symview:
9794 /* ??? We might use uleb128, but then we'd have to compute
9795 .debug_info offsets in the assembler. */
9796 if (symview_upper_bound <= 0xff)
9797 return DW_FORM_data1;
9798 else if (symview_upper_bound <= 0xffff)
9799 return DW_FORM_data2;
9800 else if (symview_upper_bound <= 0xffffffff)
9801 return DW_FORM_data4;
9802 else
9803 return DW_FORM_data8;
9804 case dw_val_class_vec:
9805 switch (constant_size (a->dw_attr_val.v.val_vec.length
9806 * a->dw_attr_val.v.val_vec.elt_size))
9807 {
9808 case 1:
9809 return DW_FORM_block1;
9810 case 2:
9811 return DW_FORM_block2;
9812 case 4:
9813 return DW_FORM_block4;
9814 default:
9815 gcc_unreachable ();
9816 }
9817 case dw_val_class_flag:
9818 if (dwarf_version >= 4)
9819 {
9820 /* Currently all add_AT_flag calls pass in 1 as last argument,
9821 so DW_FORM_flag_present can be used. If that ever changes,
9822 we'll need to use DW_FORM_flag and have some optimization
9823 in build_abbrev_table that will change those to
9824 DW_FORM_flag_present if it is set to 1 in all DIEs using
9825 the same abbrev entry. */
9826 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9827 return DW_FORM_flag_present;
9828 }
9829 return DW_FORM_flag;
9830 case dw_val_class_die_ref:
9831 if (AT_ref_external (a))
9832 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9833 else
9834 return DW_FORM_ref;
9835 case dw_val_class_fde_ref:
9836 return DW_FORM_data;
9837 case dw_val_class_lbl_id:
9838 return (AT_index (a) == NOT_INDEXED
9839 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9840 case dw_val_class_lineptr:
9841 case dw_val_class_macptr:
9842 case dw_val_class_loclistsptr:
9843 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9844 case dw_val_class_str:
9845 return AT_string_form (a);
9846 case dw_val_class_file:
9847 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9848 {
9849 case 1:
9850 return DW_FORM_data1;
9851 case 2:
9852 return DW_FORM_data2;
9853 case 4:
9854 return DW_FORM_data4;
9855 default:
9856 gcc_unreachable ();
9857 }
9858
9859 case dw_val_class_data8:
9860 return DW_FORM_data8;
9861
9862 case dw_val_class_high_pc:
9863 switch (DWARF2_ADDR_SIZE)
9864 {
9865 case 1:
9866 return DW_FORM_data1;
9867 case 2:
9868 return DW_FORM_data2;
9869 case 4:
9870 return DW_FORM_data4;
9871 case 8:
9872 return DW_FORM_data8;
9873 default:
9874 gcc_unreachable ();
9875 }
9876
9877 case dw_val_class_discr_value:
9878 return (a->dw_attr_val.v.val_discr_value.pos
9879 ? DW_FORM_udata
9880 : DW_FORM_sdata);
9881 case dw_val_class_discr_list:
9882 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9883 {
9884 case 1:
9885 return DW_FORM_block1;
9886 case 2:
9887 return DW_FORM_block2;
9888 case 4:
9889 return DW_FORM_block4;
9890 default:
9891 gcc_unreachable ();
9892 }
9893
9894 default:
9895 gcc_unreachable ();
9896 }
9897 }
9898
9899 /* Output the encoding of an attribute value. */
9900
9901 static void
9902 output_value_format (dw_attr_node *a)
9903 {
9904 enum dwarf_form form = value_format (a);
9905
9906 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9907 }
9908
9909 /* Given a die and id, produce the appropriate abbreviations. */
9910
9911 static void
9912 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9913 {
9914 unsigned ix;
9915 dw_attr_node *a_attr;
9916
9917 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9918 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9919 dwarf_tag_name (abbrev->die_tag));
9920
9921 if (abbrev->die_child != NULL)
9922 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9923 else
9924 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9925
9926 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9927 {
9928 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9929 dwarf_attr_name (a_attr->dw_attr));
9930 output_value_format (a_attr);
9931 if (value_format (a_attr) == DW_FORM_implicit_const)
9932 {
9933 if (AT_class (a_attr) == dw_val_class_file_implicit)
9934 {
9935 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9936 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9937 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9938 }
9939 else
9940 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9941 }
9942 }
9943
9944 dw2_asm_output_data (1, 0, NULL);
9945 dw2_asm_output_data (1, 0, NULL);
9946 }
9947
9948
9949 /* Output the .debug_abbrev section which defines the DIE abbreviation
9950 table. */
9951
9952 static void
9953 output_abbrev_section (void)
9954 {
9955 unsigned int abbrev_id;
9956 dw_die_ref abbrev;
9957
9958 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9959 if (abbrev_id != 0)
9960 output_die_abbrevs (abbrev_id, abbrev);
9961
9962 /* Terminate the table. */
9963 dw2_asm_output_data (1, 0, NULL);
9964 }
9965
9966 /* Return a new location list, given the begin and end range, and the
9967 expression. */
9968
9969 static inline dw_loc_list_ref
9970 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
9971 const char *end, var_loc_view vend,
9972 const char *section)
9973 {
9974 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
9975
9976 retlist->begin = begin;
9977 retlist->begin_entry = NULL;
9978 retlist->end = end;
9979 retlist->expr = expr;
9980 retlist->section = section;
9981 retlist->vbegin = vbegin;
9982 retlist->vend = vend;
9983
9984 return retlist;
9985 }
9986
9987 /* Return true iff there's any nonzero view number in the loc list.
9988
9989 ??? When views are not enabled, we'll often extend a single range
9990 to the entire function, so that we emit a single location
9991 expression rather than a location list. With views, even with a
9992 single range, we'll output a list if start or end have a nonzero
9993 view. If we change this, we may want to stop splitting a single
9994 range in dw_loc_list just because of a nonzero view, even if it
9995 straddles across hot/cold partitions. */
9996
9997 static bool
9998 loc_list_has_views (dw_loc_list_ref list)
9999 {
10000 if (!debug_variable_location_views)
10001 return false;
10002
10003 for (dw_loc_list_ref loc = list;
10004 loc != NULL; loc = loc->dw_loc_next)
10005 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10006 return true;
10007
10008 return false;
10009 }
10010
10011 /* Generate a new internal symbol for this location list node, if it
10012 hasn't got one yet. */
10013
10014 static inline void
10015 gen_llsym (dw_loc_list_ref list)
10016 {
10017 gcc_assert (!list->ll_symbol);
10018 list->ll_symbol = gen_internal_sym ("LLST");
10019
10020 if (!loc_list_has_views (list))
10021 return;
10022
10023 if (dwarf2out_locviews_in_attribute ())
10024 {
10025 /* Use the same label_num for the view list. */
10026 label_num--;
10027 list->vl_symbol = gen_internal_sym ("LVUS");
10028 }
10029 else
10030 list->vl_symbol = list->ll_symbol;
10031 }
10032
10033 /* Generate a symbol for the list, but only if we really want to emit
10034 it as a list. */
10035
10036 static inline void
10037 maybe_gen_llsym (dw_loc_list_ref list)
10038 {
10039 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10040 return;
10041
10042 gen_llsym (list);
10043 }
10044
10045 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10046 NULL, don't consider size of the location expression. If we're not
10047 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10048 representation in *SIZEP. */
10049
10050 static bool
10051 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10052 {
10053 /* Don't output an entry that starts and ends at the same address. */
10054 if (strcmp (curr->begin, curr->end) == 0
10055 && curr->vbegin == curr->vend && !curr->force)
10056 return true;
10057
10058 if (!sizep)
10059 return false;
10060
10061 unsigned long size = size_of_locs (curr->expr);
10062
10063 /* If the expression is too large, drop it on the floor. We could
10064 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10065 in the expression, but >= 64KB expressions for a single value
10066 in a single range are unlikely very useful. */
10067 if (dwarf_version < 5 && size > 0xffff)
10068 return true;
10069
10070 *sizep = size;
10071
10072 return false;
10073 }
10074
10075 /* Output a view pair loclist entry for CURR, if it requires one. */
10076
10077 static void
10078 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10079 {
10080 if (!dwarf2out_locviews_in_loclist ())
10081 return;
10082
10083 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10084 return;
10085
10086 #ifdef DW_LLE_view_pair
10087 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10088
10089 if (dwarf2out_as_locview_support)
10090 {
10091 if (ZERO_VIEW_P (curr->vbegin))
10092 dw2_asm_output_data_uleb128 (0, "Location view begin");
10093 else
10094 {
10095 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10096 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10097 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10098 }
10099
10100 if (ZERO_VIEW_P (curr->vend))
10101 dw2_asm_output_data_uleb128 (0, "Location view end");
10102 else
10103 {
10104 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10105 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10106 dw2_asm_output_symname_uleb128 (label, "Location view end");
10107 }
10108 }
10109 else
10110 {
10111 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10112 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10113 }
10114 #endif /* DW_LLE_view_pair */
10115
10116 return;
10117 }
10118
10119 /* Output the location list given to us. */
10120
10121 static void
10122 output_loc_list (dw_loc_list_ref list_head)
10123 {
10124 int vcount = 0, lcount = 0;
10125
10126 if (list_head->emitted)
10127 return;
10128 list_head->emitted = true;
10129
10130 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10131 {
10132 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10133
10134 for (dw_loc_list_ref curr = list_head; curr != NULL;
10135 curr = curr->dw_loc_next)
10136 {
10137 unsigned long size;
10138
10139 if (skip_loc_list_entry (curr, &size))
10140 continue;
10141
10142 vcount++;
10143
10144 /* ?? dwarf_split_debug_info? */
10145 if (dwarf2out_as_locview_support)
10146 {
10147 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10148
10149 if (!ZERO_VIEW_P (curr->vbegin))
10150 {
10151 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10152 dw2_asm_output_symname_uleb128 (label,
10153 "View list begin (%s)",
10154 list_head->vl_symbol);
10155 }
10156 else
10157 dw2_asm_output_data_uleb128 (0,
10158 "View list begin (%s)",
10159 list_head->vl_symbol);
10160
10161 if (!ZERO_VIEW_P (curr->vend))
10162 {
10163 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10164 dw2_asm_output_symname_uleb128 (label,
10165 "View list end (%s)",
10166 list_head->vl_symbol);
10167 }
10168 else
10169 dw2_asm_output_data_uleb128 (0,
10170 "View list end (%s)",
10171 list_head->vl_symbol);
10172 }
10173 else
10174 {
10175 dw2_asm_output_data_uleb128 (curr->vbegin,
10176 "View list begin (%s)",
10177 list_head->vl_symbol);
10178 dw2_asm_output_data_uleb128 (curr->vend,
10179 "View list end (%s)",
10180 list_head->vl_symbol);
10181 }
10182 }
10183 }
10184
10185 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10186
10187 const char *last_section = NULL;
10188 const char *base_label = NULL;
10189
10190 /* Walk the location list, and output each range + expression. */
10191 for (dw_loc_list_ref curr = list_head; curr != NULL;
10192 curr = curr->dw_loc_next)
10193 {
10194 unsigned long size;
10195
10196 /* Skip this entry? If we skip it here, we must skip it in the
10197 view list above as well. */
10198 if (skip_loc_list_entry (curr, &size))
10199 continue;
10200
10201 lcount++;
10202
10203 if (dwarf_version >= 5)
10204 {
10205 if (dwarf_split_debug_info)
10206 {
10207 dwarf2out_maybe_output_loclist_view_pair (curr);
10208 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10209 uleb128 index into .debug_addr and uleb128 length. */
10210 dw2_asm_output_data (1, DW_LLE_startx_length,
10211 "DW_LLE_startx_length (%s)",
10212 list_head->ll_symbol);
10213 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10214 "Location list range start index "
10215 "(%s)", curr->begin);
10216 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10217 For that case we probably need to emit DW_LLE_startx_endx,
10218 but we'd need 2 .debug_addr entries rather than just one. */
10219 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10220 "Location list length (%s)",
10221 list_head->ll_symbol);
10222 }
10223 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10224 {
10225 dwarf2out_maybe_output_loclist_view_pair (curr);
10226 /* If all code is in .text section, the base address is
10227 already provided by the CU attributes. Use
10228 DW_LLE_offset_pair where both addresses are uleb128 encoded
10229 offsets against that base. */
10230 dw2_asm_output_data (1, DW_LLE_offset_pair,
10231 "DW_LLE_offset_pair (%s)",
10232 list_head->ll_symbol);
10233 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10234 "Location list begin address (%s)",
10235 list_head->ll_symbol);
10236 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10237 "Location list end address (%s)",
10238 list_head->ll_symbol);
10239 }
10240 else if (HAVE_AS_LEB128)
10241 {
10242 /* Otherwise, find out how many consecutive entries could share
10243 the same base entry. If just one, emit DW_LLE_start_length,
10244 otherwise emit DW_LLE_base_address for the base address
10245 followed by a series of DW_LLE_offset_pair. */
10246 if (last_section == NULL || curr->section != last_section)
10247 {
10248 dw_loc_list_ref curr2;
10249 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10250 curr2 = curr2->dw_loc_next)
10251 {
10252 if (strcmp (curr2->begin, curr2->end) == 0
10253 && !curr2->force)
10254 continue;
10255 break;
10256 }
10257 if (curr2 == NULL || curr->section != curr2->section)
10258 last_section = NULL;
10259 else
10260 {
10261 last_section = curr->section;
10262 base_label = curr->begin;
10263 dw2_asm_output_data (1, DW_LLE_base_address,
10264 "DW_LLE_base_address (%s)",
10265 list_head->ll_symbol);
10266 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10267 "Base address (%s)",
10268 list_head->ll_symbol);
10269 }
10270 }
10271 /* Only one entry with the same base address. Use
10272 DW_LLE_start_length with absolute address and uleb128
10273 length. */
10274 if (last_section == NULL)
10275 {
10276 dwarf2out_maybe_output_loclist_view_pair (curr);
10277 dw2_asm_output_data (1, DW_LLE_start_length,
10278 "DW_LLE_start_length (%s)",
10279 list_head->ll_symbol);
10280 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10281 "Location list begin address (%s)",
10282 list_head->ll_symbol);
10283 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10284 "Location list length "
10285 "(%s)", list_head->ll_symbol);
10286 }
10287 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10288 DW_LLE_base_address. */
10289 else
10290 {
10291 dwarf2out_maybe_output_loclist_view_pair (curr);
10292 dw2_asm_output_data (1, DW_LLE_offset_pair,
10293 "DW_LLE_offset_pair (%s)",
10294 list_head->ll_symbol);
10295 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10296 "Location list begin address "
10297 "(%s)", list_head->ll_symbol);
10298 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10299 "Location list end address "
10300 "(%s)", list_head->ll_symbol);
10301 }
10302 }
10303 /* The assembler does not support .uleb128 directive. Emit
10304 DW_LLE_start_end with a pair of absolute addresses. */
10305 else
10306 {
10307 dwarf2out_maybe_output_loclist_view_pair (curr);
10308 dw2_asm_output_data (1, DW_LLE_start_end,
10309 "DW_LLE_start_end (%s)",
10310 list_head->ll_symbol);
10311 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10312 "Location list begin address (%s)",
10313 list_head->ll_symbol);
10314 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10315 "Location list end address (%s)",
10316 list_head->ll_symbol);
10317 }
10318 }
10319 else if (dwarf_split_debug_info)
10320 {
10321 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10322 and 4 byte length. */
10323 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10324 "Location list start/length entry (%s)",
10325 list_head->ll_symbol);
10326 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10327 "Location list range start index (%s)",
10328 curr->begin);
10329 /* The length field is 4 bytes. If we ever need to support
10330 an 8-byte length, we can add a new DW_LLE code or fall back
10331 to DW_LLE_GNU_start_end_entry. */
10332 dw2_asm_output_delta (4, curr->end, curr->begin,
10333 "Location list range length (%s)",
10334 list_head->ll_symbol);
10335 }
10336 else if (!have_multiple_function_sections)
10337 {
10338 /* Pair of relative addresses against start of text section. */
10339 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10340 "Location list begin address (%s)",
10341 list_head->ll_symbol);
10342 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10343 "Location list end address (%s)",
10344 list_head->ll_symbol);
10345 }
10346 else
10347 {
10348 /* Pair of absolute addresses. */
10349 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10350 "Location list begin address (%s)",
10351 list_head->ll_symbol);
10352 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10353 "Location list end address (%s)",
10354 list_head->ll_symbol);
10355 }
10356
10357 /* Output the block length for this list of location operations. */
10358 if (dwarf_version >= 5)
10359 dw2_asm_output_data_uleb128 (size, "Location expression size");
10360 else
10361 {
10362 gcc_assert (size <= 0xffff);
10363 dw2_asm_output_data (2, size, "Location expression size");
10364 }
10365
10366 output_loc_sequence (curr->expr, -1);
10367 }
10368
10369 /* And finally list termination. */
10370 if (dwarf_version >= 5)
10371 dw2_asm_output_data (1, DW_LLE_end_of_list,
10372 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10373 else if (dwarf_split_debug_info)
10374 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10375 "Location list terminator (%s)",
10376 list_head->ll_symbol);
10377 else
10378 {
10379 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10380 "Location list terminator begin (%s)",
10381 list_head->ll_symbol);
10382 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10383 "Location list terminator end (%s)",
10384 list_head->ll_symbol);
10385 }
10386
10387 gcc_assert (!list_head->vl_symbol
10388 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10389 }
10390
10391 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10392 section. Emit a relocated reference if val_entry is NULL, otherwise,
10393 emit an indirect reference. */
10394
10395 static void
10396 output_range_list_offset (dw_attr_node *a)
10397 {
10398 const char *name = dwarf_attr_name (a->dw_attr);
10399
10400 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10401 {
10402 if (dwarf_version >= 5)
10403 {
10404 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10405 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10406 debug_ranges_section, "%s", name);
10407 }
10408 else
10409 {
10410 char *p = strchr (ranges_section_label, '\0');
10411 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10412 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10413 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10414 debug_ranges_section, "%s", name);
10415 *p = '\0';
10416 }
10417 }
10418 else if (dwarf_version >= 5)
10419 {
10420 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10421 gcc_assert (rnglist_idx);
10422 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10423 }
10424 else
10425 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10426 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10427 "%s (offset from %s)", name, ranges_section_label);
10428 }
10429
10430 /* Output the offset into the debug_loc section. */
10431
10432 static void
10433 output_loc_list_offset (dw_attr_node *a)
10434 {
10435 char *sym = AT_loc_list (a)->ll_symbol;
10436
10437 gcc_assert (sym);
10438 if (!dwarf_split_debug_info)
10439 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10440 "%s", dwarf_attr_name (a->dw_attr));
10441 else if (dwarf_version >= 5)
10442 {
10443 gcc_assert (AT_loc_list (a)->num_assigned);
10444 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10445 dwarf_attr_name (a->dw_attr),
10446 sym);
10447 }
10448 else
10449 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10450 "%s", dwarf_attr_name (a->dw_attr));
10451 }
10452
10453 /* Output the offset into the debug_loc section. */
10454
10455 static void
10456 output_view_list_offset (dw_attr_node *a)
10457 {
10458 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10459
10460 gcc_assert (sym);
10461 if (dwarf_split_debug_info)
10462 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10463 "%s", dwarf_attr_name (a->dw_attr));
10464 else
10465 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10466 "%s", dwarf_attr_name (a->dw_attr));
10467 }
10468
10469 /* Output an attribute's index or value appropriately. */
10470
10471 static void
10472 output_attr_index_or_value (dw_attr_node *a)
10473 {
10474 const char *name = dwarf_attr_name (a->dw_attr);
10475
10476 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10477 {
10478 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10479 return;
10480 }
10481 switch (AT_class (a))
10482 {
10483 case dw_val_class_addr:
10484 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10485 break;
10486 case dw_val_class_high_pc:
10487 case dw_val_class_lbl_id:
10488 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10489 break;
10490 default:
10491 gcc_unreachable ();
10492 }
10493 }
10494
10495 /* Output a type signature. */
10496
10497 static inline void
10498 output_signature (const char *sig, const char *name)
10499 {
10500 int i;
10501
10502 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10503 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10504 }
10505
10506 /* Output a discriminant value. */
10507
10508 static inline void
10509 output_discr_value (dw_discr_value *discr_value, const char *name)
10510 {
10511 if (discr_value->pos)
10512 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10513 else
10514 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10515 }
10516
10517 /* Output the DIE and its attributes. Called recursively to generate
10518 the definitions of each child DIE. */
10519
10520 static void
10521 output_die (dw_die_ref die)
10522 {
10523 dw_attr_node *a;
10524 dw_die_ref c;
10525 unsigned long size;
10526 unsigned ix;
10527
10528 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10529 (unsigned long)die->die_offset,
10530 dwarf_tag_name (die->die_tag));
10531
10532 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10533 {
10534 const char *name = dwarf_attr_name (a->dw_attr);
10535
10536 switch (AT_class (a))
10537 {
10538 case dw_val_class_addr:
10539 output_attr_index_or_value (a);
10540 break;
10541
10542 case dw_val_class_offset:
10543 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10544 "%s", name);
10545 break;
10546
10547 case dw_val_class_range_list:
10548 output_range_list_offset (a);
10549 break;
10550
10551 case dw_val_class_loc:
10552 size = size_of_locs (AT_loc (a));
10553
10554 /* Output the block length for this list of location operations. */
10555 if (dwarf_version >= 4)
10556 dw2_asm_output_data_uleb128 (size, "%s", name);
10557 else
10558 dw2_asm_output_data (constant_size (size), size, "%s", name);
10559
10560 output_loc_sequence (AT_loc (a), -1);
10561 break;
10562
10563 case dw_val_class_const:
10564 /* ??? It would be slightly more efficient to use a scheme like is
10565 used for unsigned constants below, but gdb 4.x does not sign
10566 extend. Gdb 5.x does sign extend. */
10567 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10568 break;
10569
10570 case dw_val_class_unsigned_const:
10571 {
10572 int csize = constant_size (AT_unsigned (a));
10573 if (dwarf_version == 3
10574 && a->dw_attr == DW_AT_data_member_location
10575 && csize >= 4)
10576 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10577 else
10578 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10579 }
10580 break;
10581
10582 case dw_val_class_symview:
10583 {
10584 int vsize;
10585 if (symview_upper_bound <= 0xff)
10586 vsize = 1;
10587 else if (symview_upper_bound <= 0xffff)
10588 vsize = 2;
10589 else if (symview_upper_bound <= 0xffffffff)
10590 vsize = 4;
10591 else
10592 vsize = 8;
10593 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10594 "%s", name);
10595 }
10596 break;
10597
10598 case dw_val_class_const_implicit:
10599 if (flag_debug_asm)
10600 fprintf (asm_out_file, "\t\t\t%s %s ("
10601 HOST_WIDE_INT_PRINT_DEC ")\n",
10602 ASM_COMMENT_START, name, AT_int (a));
10603 break;
10604
10605 case dw_val_class_unsigned_const_implicit:
10606 if (flag_debug_asm)
10607 fprintf (asm_out_file, "\t\t\t%s %s ("
10608 HOST_WIDE_INT_PRINT_HEX ")\n",
10609 ASM_COMMENT_START, name, AT_unsigned (a));
10610 break;
10611
10612 case dw_val_class_const_double:
10613 {
10614 unsigned HOST_WIDE_INT first, second;
10615
10616 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10617 dw2_asm_output_data (1,
10618 HOST_BITS_PER_DOUBLE_INT
10619 / HOST_BITS_PER_CHAR,
10620 NULL);
10621
10622 if (WORDS_BIG_ENDIAN)
10623 {
10624 first = a->dw_attr_val.v.val_double.high;
10625 second = a->dw_attr_val.v.val_double.low;
10626 }
10627 else
10628 {
10629 first = a->dw_attr_val.v.val_double.low;
10630 second = a->dw_attr_val.v.val_double.high;
10631 }
10632
10633 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10634 first, "%s", name);
10635 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10636 second, NULL);
10637 }
10638 break;
10639
10640 case dw_val_class_wide_int:
10641 {
10642 int i;
10643 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10644 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10645 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10646 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10647 * l, NULL);
10648
10649 if (WORDS_BIG_ENDIAN)
10650 for (i = len - 1; i >= 0; --i)
10651 {
10652 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10653 "%s", name);
10654 name = "";
10655 }
10656 else
10657 for (i = 0; i < len; ++i)
10658 {
10659 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10660 "%s", name);
10661 name = "";
10662 }
10663 }
10664 break;
10665
10666 case dw_val_class_vec:
10667 {
10668 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10669 unsigned int len = a->dw_attr_val.v.val_vec.length;
10670 unsigned int i;
10671 unsigned char *p;
10672
10673 dw2_asm_output_data (constant_size (len * elt_size),
10674 len * elt_size, "%s", name);
10675 if (elt_size > sizeof (HOST_WIDE_INT))
10676 {
10677 elt_size /= 2;
10678 len *= 2;
10679 }
10680 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10681 i < len;
10682 i++, p += elt_size)
10683 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10684 "fp or vector constant word %u", i);
10685 break;
10686 }
10687
10688 case dw_val_class_flag:
10689 if (dwarf_version >= 4)
10690 {
10691 /* Currently all add_AT_flag calls pass in 1 as last argument,
10692 so DW_FORM_flag_present can be used. If that ever changes,
10693 we'll need to use DW_FORM_flag and have some optimization
10694 in build_abbrev_table that will change those to
10695 DW_FORM_flag_present if it is set to 1 in all DIEs using
10696 the same abbrev entry. */
10697 gcc_assert (AT_flag (a) == 1);
10698 if (flag_debug_asm)
10699 fprintf (asm_out_file, "\t\t\t%s %s\n",
10700 ASM_COMMENT_START, name);
10701 break;
10702 }
10703 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10704 break;
10705
10706 case dw_val_class_loc_list:
10707 output_loc_list_offset (a);
10708 break;
10709
10710 case dw_val_class_view_list:
10711 output_view_list_offset (a);
10712 break;
10713
10714 case dw_val_class_die_ref:
10715 if (AT_ref_external (a))
10716 {
10717 if (AT_ref (a)->comdat_type_p)
10718 {
10719 comdat_type_node *type_node
10720 = AT_ref (a)->die_id.die_type_node;
10721
10722 gcc_assert (type_node);
10723 output_signature (type_node->signature, name);
10724 }
10725 else
10726 {
10727 const char *sym = AT_ref (a)->die_id.die_symbol;
10728 int size;
10729
10730 gcc_assert (sym);
10731 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10732 length, whereas in DWARF3 it's always sized as an
10733 offset. */
10734 if (dwarf_version == 2)
10735 size = DWARF2_ADDR_SIZE;
10736 else
10737 size = DWARF_OFFSET_SIZE;
10738 /* ??? We cannot unconditionally output die_offset if
10739 non-zero - others might create references to those
10740 DIEs via symbols.
10741 And we do not clear its DIE offset after outputting it
10742 (and the label refers to the actual DIEs, not the
10743 DWARF CU unit header which is when using label + offset
10744 would be the correct thing to do).
10745 ??? This is the reason for the with_offset flag. */
10746 if (AT_ref (a)->with_offset)
10747 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10748 debug_info_section, "%s", name);
10749 else
10750 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10751 name);
10752 }
10753 }
10754 else
10755 {
10756 gcc_assert (AT_ref (a)->die_offset);
10757 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10758 "%s", name);
10759 }
10760 break;
10761
10762 case dw_val_class_fde_ref:
10763 {
10764 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10765
10766 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10767 a->dw_attr_val.v.val_fde_index * 2);
10768 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10769 "%s", name);
10770 }
10771 break;
10772
10773 case dw_val_class_vms_delta:
10774 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10775 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10776 AT_vms_delta2 (a), AT_vms_delta1 (a),
10777 "%s", name);
10778 #else
10779 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10780 AT_vms_delta2 (a), AT_vms_delta1 (a),
10781 "%s", name);
10782 #endif
10783 break;
10784
10785 case dw_val_class_lbl_id:
10786 output_attr_index_or_value (a);
10787 break;
10788
10789 case dw_val_class_lineptr:
10790 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10791 debug_line_section, "%s", name);
10792 break;
10793
10794 case dw_val_class_macptr:
10795 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10796 debug_macinfo_section, "%s", name);
10797 break;
10798
10799 case dw_val_class_loclistsptr:
10800 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10801 debug_loc_section, "%s", name);
10802 break;
10803
10804 case dw_val_class_str:
10805 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10806 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10807 a->dw_attr_val.v.val_str->label,
10808 debug_str_section,
10809 "%s: \"%s\"", name, AT_string (a));
10810 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10811 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10812 a->dw_attr_val.v.val_str->label,
10813 debug_line_str_section,
10814 "%s: \"%s\"", name, AT_string (a));
10815 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10816 dw2_asm_output_data_uleb128 (AT_index (a),
10817 "%s: \"%s\"", name, AT_string (a));
10818 else
10819 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10820 break;
10821
10822 case dw_val_class_file:
10823 {
10824 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10825
10826 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10827 a->dw_attr_val.v.val_file->filename);
10828 break;
10829 }
10830
10831 case dw_val_class_file_implicit:
10832 if (flag_debug_asm)
10833 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10834 ASM_COMMENT_START, name,
10835 maybe_emit_file (a->dw_attr_val.v.val_file),
10836 a->dw_attr_val.v.val_file->filename);
10837 break;
10838
10839 case dw_val_class_data8:
10840 {
10841 int i;
10842
10843 for (i = 0; i < 8; i++)
10844 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10845 i == 0 ? "%s" : NULL, name);
10846 break;
10847 }
10848
10849 case dw_val_class_high_pc:
10850 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10851 get_AT_low_pc (die), "DW_AT_high_pc");
10852 break;
10853
10854 case dw_val_class_discr_value:
10855 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10856 break;
10857
10858 case dw_val_class_discr_list:
10859 {
10860 dw_discr_list_ref list = AT_discr_list (a);
10861 const int size = size_of_discr_list (list);
10862
10863 /* This is a block, so output its length first. */
10864 dw2_asm_output_data (constant_size (size), size,
10865 "%s: block size", name);
10866
10867 for (; list != NULL; list = list->dw_discr_next)
10868 {
10869 /* One byte for the discriminant value descriptor, and then as
10870 many LEB128 numbers as required. */
10871 if (list->dw_discr_range)
10872 dw2_asm_output_data (1, DW_DSC_range,
10873 "%s: DW_DSC_range", name);
10874 else
10875 dw2_asm_output_data (1, DW_DSC_label,
10876 "%s: DW_DSC_label", name);
10877
10878 output_discr_value (&list->dw_discr_lower_bound, name);
10879 if (list->dw_discr_range)
10880 output_discr_value (&list->dw_discr_upper_bound, name);
10881 }
10882 break;
10883 }
10884
10885 default:
10886 gcc_unreachable ();
10887 }
10888 }
10889
10890 FOR_EACH_CHILD (die, c, output_die (c));
10891
10892 /* Add null byte to terminate sibling list. */
10893 if (die->die_child != NULL)
10894 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10895 (unsigned long) die->die_offset);
10896 }
10897
10898 /* Output the dwarf version number. */
10899
10900 static void
10901 output_dwarf_version ()
10902 {
10903 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10904 views in loclist. That will change eventually. */
10905 if (dwarf_version == 6)
10906 {
10907 static bool once;
10908 if (!once)
10909 {
10910 warning (0,
10911 "-gdwarf-6 is output as version 5 with incompatibilities");
10912 once = true;
10913 }
10914 dw2_asm_output_data (2, 5, "DWARF version number");
10915 }
10916 else
10917 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10918 }
10919
10920 /* Output the compilation unit that appears at the beginning of the
10921 .debug_info section, and precedes the DIE descriptions. */
10922
10923 static void
10924 output_compilation_unit_header (enum dwarf_unit_type ut)
10925 {
10926 if (!XCOFF_DEBUGGING_INFO)
10927 {
10928 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10929 dw2_asm_output_data (4, 0xffffffff,
10930 "Initial length escape value indicating 64-bit DWARF extension");
10931 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10932 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10933 "Length of Compilation Unit Info");
10934 }
10935
10936 output_dwarf_version ();
10937 if (dwarf_version >= 5)
10938 {
10939 const char *name;
10940 switch (ut)
10941 {
10942 case DW_UT_compile: name = "DW_UT_compile"; break;
10943 case DW_UT_type: name = "DW_UT_type"; break;
10944 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10945 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10946 default: gcc_unreachable ();
10947 }
10948 dw2_asm_output_data (1, ut, "%s", name);
10949 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10950 }
10951 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10952 debug_abbrev_section,
10953 "Offset Into Abbrev. Section");
10954 if (dwarf_version < 5)
10955 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10956 }
10957
10958 /* Output the compilation unit DIE and its children. */
10959
10960 static void
10961 output_comp_unit (dw_die_ref die, int output_if_empty,
10962 const unsigned char *dwo_id)
10963 {
10964 const char *secname, *oldsym;
10965 char *tmp;
10966
10967 /* Unless we are outputting main CU, we may throw away empty ones. */
10968 if (!output_if_empty && die->die_child == NULL)
10969 return;
10970
10971 /* Even if there are no children of this DIE, we must output the information
10972 about the compilation unit. Otherwise, on an empty translation unit, we
10973 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
10974 will then complain when examining the file. First mark all the DIEs in
10975 this CU so we know which get local refs. */
10976 mark_dies (die);
10977
10978 external_ref_hash_type *extern_map = optimize_external_refs (die);
10979
10980 /* For now, optimize only the main CU, in order to optimize the rest
10981 we'd need to see all of them earlier. Leave the rest for post-linking
10982 tools like DWZ. */
10983 if (die == comp_unit_die ())
10984 abbrev_opt_start = vec_safe_length (abbrev_die_table);
10985
10986 build_abbrev_table (die, extern_map);
10987
10988 optimize_abbrev_table ();
10989
10990 delete extern_map;
10991
10992 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
10993 next_die_offset = (dwo_id
10994 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
10995 : DWARF_COMPILE_UNIT_HEADER_SIZE);
10996 calc_die_sizes (die);
10997
10998 oldsym = die->die_id.die_symbol;
10999 if (oldsym && die->comdat_type_p)
11000 {
11001 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11002
11003 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11004 secname = tmp;
11005 die->die_id.die_symbol = NULL;
11006 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11007 }
11008 else
11009 {
11010 switch_to_section (debug_info_section);
11011 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11012 info_section_emitted = true;
11013 }
11014
11015 /* For LTO cross unit DIE refs we want a symbol on the start of the
11016 debuginfo section, not on the CU DIE. */
11017 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11018 {
11019 /* ??? No way to get visibility assembled without a decl. */
11020 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11021 get_identifier (oldsym), char_type_node);
11022 TREE_PUBLIC (decl) = true;
11023 TREE_STATIC (decl) = true;
11024 DECL_ARTIFICIAL (decl) = true;
11025 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11026 DECL_VISIBILITY_SPECIFIED (decl) = true;
11027 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11028 #ifdef ASM_WEAKEN_LABEL
11029 /* We prefer a .weak because that handles duplicates from duplicate
11030 archive members in a graceful way. */
11031 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11032 #else
11033 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11034 #endif
11035 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11036 }
11037
11038 /* Output debugging information. */
11039 output_compilation_unit_header (dwo_id
11040 ? DW_UT_split_compile : DW_UT_compile);
11041 if (dwarf_version >= 5)
11042 {
11043 if (dwo_id != NULL)
11044 for (int i = 0; i < 8; i++)
11045 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11046 }
11047 output_die (die);
11048
11049 /* Leave the marks on the main CU, so we can check them in
11050 output_pubnames. */
11051 if (oldsym)
11052 {
11053 unmark_dies (die);
11054 die->die_id.die_symbol = oldsym;
11055 }
11056 }
11057
11058 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11059 and .debug_pubtypes. This is configured per-target, but can be
11060 overridden by the -gpubnames or -gno-pubnames options. */
11061
11062 static inline bool
11063 want_pubnames (void)
11064 {
11065 if (debug_info_level <= DINFO_LEVEL_TERSE)
11066 return false;
11067 if (debug_generate_pub_sections != -1)
11068 return debug_generate_pub_sections;
11069 return targetm.want_debug_pub_sections;
11070 }
11071
11072 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11073
11074 static void
11075 add_AT_pubnames (dw_die_ref die)
11076 {
11077 if (want_pubnames ())
11078 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11079 }
11080
11081 /* Add a string attribute value to a skeleton DIE. */
11082
11083 static inline void
11084 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11085 const char *str)
11086 {
11087 dw_attr_node attr;
11088 struct indirect_string_node *node;
11089
11090 if (! skeleton_debug_str_hash)
11091 skeleton_debug_str_hash
11092 = hash_table<indirect_string_hasher>::create_ggc (10);
11093
11094 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11095 find_string_form (node);
11096 if (node->form == dwarf_FORM (DW_FORM_strx))
11097 node->form = DW_FORM_strp;
11098
11099 attr.dw_attr = attr_kind;
11100 attr.dw_attr_val.val_class = dw_val_class_str;
11101 attr.dw_attr_val.val_entry = NULL;
11102 attr.dw_attr_val.v.val_str = node;
11103 add_dwarf_attr (die, &attr);
11104 }
11105
11106 /* Helper function to generate top-level dies for skeleton debug_info and
11107 debug_types. */
11108
11109 static void
11110 add_top_level_skeleton_die_attrs (dw_die_ref die)
11111 {
11112 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11113 const char *comp_dir = comp_dir_string ();
11114
11115 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11116 if (comp_dir != NULL)
11117 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11118 add_AT_pubnames (die);
11119 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11120 }
11121
11122 /* Output skeleton debug sections that point to the dwo file. */
11123
11124 static void
11125 output_skeleton_debug_sections (dw_die_ref comp_unit,
11126 const unsigned char *dwo_id)
11127 {
11128 /* These attributes will be found in the full debug_info section. */
11129 remove_AT (comp_unit, DW_AT_producer);
11130 remove_AT (comp_unit, DW_AT_language);
11131
11132 switch_to_section (debug_skeleton_info_section);
11133 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11134
11135 /* Produce the skeleton compilation-unit header. This one differs enough from
11136 a normal CU header that it's better not to call output_compilation_unit
11137 header. */
11138 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11139 dw2_asm_output_data (4, 0xffffffff,
11140 "Initial length escape value indicating 64-bit "
11141 "DWARF extension");
11142
11143 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11144 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11145 - DWARF_INITIAL_LENGTH_SIZE
11146 + size_of_die (comp_unit),
11147 "Length of Compilation Unit Info");
11148 output_dwarf_version ();
11149 if (dwarf_version >= 5)
11150 {
11151 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11152 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11153 }
11154 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11155 debug_skeleton_abbrev_section,
11156 "Offset Into Abbrev. Section");
11157 if (dwarf_version < 5)
11158 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11159 else
11160 for (int i = 0; i < 8; i++)
11161 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11162
11163 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11164 output_die (comp_unit);
11165
11166 /* Build the skeleton debug_abbrev section. */
11167 switch_to_section (debug_skeleton_abbrev_section);
11168 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11169
11170 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11171
11172 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11173 }
11174
11175 /* Output a comdat type unit DIE and its children. */
11176
11177 static void
11178 output_comdat_type_unit (comdat_type_node *node)
11179 {
11180 const char *secname;
11181 char *tmp;
11182 int i;
11183 #if defined (OBJECT_FORMAT_ELF)
11184 tree comdat_key;
11185 #endif
11186
11187 /* First mark all the DIEs in this CU so we know which get local refs. */
11188 mark_dies (node->root_die);
11189
11190 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11191
11192 build_abbrev_table (node->root_die, extern_map);
11193
11194 delete extern_map;
11195 extern_map = NULL;
11196
11197 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11198 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11199 calc_die_sizes (node->root_die);
11200
11201 #if defined (OBJECT_FORMAT_ELF)
11202 if (dwarf_version >= 5)
11203 {
11204 if (!dwarf_split_debug_info)
11205 secname = ".debug_info";
11206 else
11207 secname = ".debug_info.dwo";
11208 }
11209 else if (!dwarf_split_debug_info)
11210 secname = ".debug_types";
11211 else
11212 secname = ".debug_types.dwo";
11213
11214 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11215 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11216 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11217 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11218 comdat_key = get_identifier (tmp);
11219 targetm.asm_out.named_section (secname,
11220 SECTION_DEBUG | SECTION_LINKONCE,
11221 comdat_key);
11222 #else
11223 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11224 sprintf (tmp, (dwarf_version >= 5
11225 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11226 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11227 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11228 secname = tmp;
11229 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11230 #endif
11231
11232 /* Output debugging information. */
11233 output_compilation_unit_header (dwarf_split_debug_info
11234 ? DW_UT_split_type : DW_UT_type);
11235 output_signature (node->signature, "Type Signature");
11236 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11237 "Offset to Type DIE");
11238 output_die (node->root_die);
11239
11240 unmark_dies (node->root_die);
11241 }
11242
11243 /* Return the DWARF2/3 pubname associated with a decl. */
11244
11245 static const char *
11246 dwarf2_name (tree decl, int scope)
11247 {
11248 if (DECL_NAMELESS (decl))
11249 return NULL;
11250 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11251 }
11252
11253 /* Add a new entry to .debug_pubnames if appropriate. */
11254
11255 static void
11256 add_pubname_string (const char *str, dw_die_ref die)
11257 {
11258 pubname_entry e;
11259
11260 e.die = die;
11261 e.name = xstrdup (str);
11262 vec_safe_push (pubname_table, e);
11263 }
11264
11265 static void
11266 add_pubname (tree decl, dw_die_ref die)
11267 {
11268 if (!want_pubnames ())
11269 return;
11270
11271 /* Don't add items to the table when we expect that the consumer will have
11272 just read the enclosing die. For example, if the consumer is looking at a
11273 class_member, it will either be inside the class already, or will have just
11274 looked up the class to find the member. Either way, searching the class is
11275 faster than searching the index. */
11276 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11277 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11278 {
11279 const char *name = dwarf2_name (decl, 1);
11280
11281 if (name)
11282 add_pubname_string (name, die);
11283 }
11284 }
11285
11286 /* Add an enumerator to the pubnames section. */
11287
11288 static void
11289 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11290 {
11291 pubname_entry e;
11292
11293 gcc_assert (scope_name);
11294 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11295 e.die = die;
11296 vec_safe_push (pubname_table, e);
11297 }
11298
11299 /* Add a new entry to .debug_pubtypes if appropriate. */
11300
11301 static void
11302 add_pubtype (tree decl, dw_die_ref die)
11303 {
11304 pubname_entry e;
11305
11306 if (!want_pubnames ())
11307 return;
11308
11309 if ((TREE_PUBLIC (decl)
11310 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11311 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11312 {
11313 tree scope = NULL;
11314 const char *scope_name = "";
11315 const char *sep = is_cxx () ? "::" : ".";
11316 const char *name;
11317
11318 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11319 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11320 {
11321 scope_name = lang_hooks.dwarf_name (scope, 1);
11322 if (scope_name != NULL && scope_name[0] != '\0')
11323 scope_name = concat (scope_name, sep, NULL);
11324 else
11325 scope_name = "";
11326 }
11327
11328 if (TYPE_P (decl))
11329 name = type_tag (decl);
11330 else
11331 name = lang_hooks.dwarf_name (decl, 1);
11332
11333 /* If we don't have a name for the type, there's no point in adding
11334 it to the table. */
11335 if (name != NULL && name[0] != '\0')
11336 {
11337 e.die = die;
11338 e.name = concat (scope_name, name, NULL);
11339 vec_safe_push (pubtype_table, e);
11340 }
11341
11342 /* Although it might be more consistent to add the pubinfo for the
11343 enumerators as their dies are created, they should only be added if the
11344 enum type meets the criteria above. So rather than re-check the parent
11345 enum type whenever an enumerator die is created, just output them all
11346 here. This isn't protected by the name conditional because anonymous
11347 enums don't have names. */
11348 if (die->die_tag == DW_TAG_enumeration_type)
11349 {
11350 dw_die_ref c;
11351
11352 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11353 }
11354 }
11355 }
11356
11357 /* Output a single entry in the pubnames table. */
11358
11359 static void
11360 output_pubname (dw_offset die_offset, pubname_entry *entry)
11361 {
11362 dw_die_ref die = entry->die;
11363 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11364
11365 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11366
11367 if (debug_generate_pub_sections == 2)
11368 {
11369 /* This logic follows gdb's method for determining the value of the flag
11370 byte. */
11371 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11372 switch (die->die_tag)
11373 {
11374 case DW_TAG_typedef:
11375 case DW_TAG_base_type:
11376 case DW_TAG_subrange_type:
11377 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11378 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11379 break;
11380 case DW_TAG_enumerator:
11381 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11382 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11383 if (!is_cxx ())
11384 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11385 break;
11386 case DW_TAG_subprogram:
11387 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11388 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11389 if (!is_ada ())
11390 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11391 break;
11392 case DW_TAG_constant:
11393 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11394 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11395 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11396 break;
11397 case DW_TAG_variable:
11398 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11399 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11400 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11401 break;
11402 case DW_TAG_namespace:
11403 case DW_TAG_imported_declaration:
11404 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11405 break;
11406 case DW_TAG_class_type:
11407 case DW_TAG_interface_type:
11408 case DW_TAG_structure_type:
11409 case DW_TAG_union_type:
11410 case DW_TAG_enumeration_type:
11411 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11412 if (!is_cxx ())
11413 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11414 break;
11415 default:
11416 /* An unusual tag. Leave the flag-byte empty. */
11417 break;
11418 }
11419 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11420 "GDB-index flags");
11421 }
11422
11423 dw2_asm_output_nstring (entry->name, -1, "external name");
11424 }
11425
11426
11427 /* Output the public names table used to speed up access to externally
11428 visible names; or the public types table used to find type definitions. */
11429
11430 static void
11431 output_pubnames (vec<pubname_entry, va_gc> *names)
11432 {
11433 unsigned i;
11434 unsigned long pubnames_length = size_of_pubnames (names);
11435 pubname_entry *pub;
11436
11437 if (!XCOFF_DEBUGGING_INFO)
11438 {
11439 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11440 dw2_asm_output_data (4, 0xffffffff,
11441 "Initial length escape value indicating 64-bit DWARF extension");
11442 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11443 "Pub Info Length");
11444 }
11445
11446 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11447 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11448
11449 if (dwarf_split_debug_info)
11450 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11451 debug_skeleton_info_section,
11452 "Offset of Compilation Unit Info");
11453 else
11454 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11455 debug_info_section,
11456 "Offset of Compilation Unit Info");
11457 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11458 "Compilation Unit Length");
11459
11460 FOR_EACH_VEC_ELT (*names, i, pub)
11461 {
11462 if (include_pubname_in_output (names, pub))
11463 {
11464 dw_offset die_offset = pub->die->die_offset;
11465
11466 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11467 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11468 gcc_assert (pub->die->die_mark);
11469
11470 /* If we're putting types in their own .debug_types sections,
11471 the .debug_pubtypes table will still point to the compile
11472 unit (not the type unit), so we want to use the offset of
11473 the skeleton DIE (if there is one). */
11474 if (pub->die->comdat_type_p && names == pubtype_table)
11475 {
11476 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11477
11478 if (type_node != NULL)
11479 die_offset = (type_node->skeleton_die != NULL
11480 ? type_node->skeleton_die->die_offset
11481 : comp_unit_die ()->die_offset);
11482 }
11483
11484 output_pubname (die_offset, pub);
11485 }
11486 }
11487
11488 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11489 }
11490
11491 /* Output public names and types tables if necessary. */
11492
11493 static void
11494 output_pubtables (void)
11495 {
11496 if (!want_pubnames () || !info_section_emitted)
11497 return;
11498
11499 switch_to_section (debug_pubnames_section);
11500 output_pubnames (pubname_table);
11501 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11502 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11503 simply won't look for the section. */
11504 switch_to_section (debug_pubtypes_section);
11505 output_pubnames (pubtype_table);
11506 }
11507
11508
11509 /* Output the information that goes into the .debug_aranges table.
11510 Namely, define the beginning and ending address range of the
11511 text section generated for this compilation unit. */
11512
11513 static void
11514 output_aranges (void)
11515 {
11516 unsigned i;
11517 unsigned long aranges_length = size_of_aranges ();
11518
11519 if (!XCOFF_DEBUGGING_INFO)
11520 {
11521 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11522 dw2_asm_output_data (4, 0xffffffff,
11523 "Initial length escape value indicating 64-bit DWARF extension");
11524 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11525 "Length of Address Ranges Info");
11526 }
11527
11528 /* Version number for aranges is still 2, even up to DWARF5. */
11529 dw2_asm_output_data (2, 2, "DWARF aranges version");
11530 if (dwarf_split_debug_info)
11531 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11532 debug_skeleton_info_section,
11533 "Offset of Compilation Unit Info");
11534 else
11535 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11536 debug_info_section,
11537 "Offset of Compilation Unit Info");
11538 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11539 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11540
11541 /* We need to align to twice the pointer size here. */
11542 if (DWARF_ARANGES_PAD_SIZE)
11543 {
11544 /* Pad using a 2 byte words so that padding is correct for any
11545 pointer size. */
11546 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11547 2 * DWARF2_ADDR_SIZE);
11548 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11549 dw2_asm_output_data (2, 0, NULL);
11550 }
11551
11552 /* It is necessary not to output these entries if the sections were
11553 not used; if the sections were not used, the length will be 0 and
11554 the address may end up as 0 if the section is discarded by ld
11555 --gc-sections, leaving an invalid (0, 0) entry that can be
11556 confused with the terminator. */
11557 if (text_section_used)
11558 {
11559 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11560 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11561 text_section_label, "Length");
11562 }
11563 if (cold_text_section_used)
11564 {
11565 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11566 "Address");
11567 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11568 cold_text_section_label, "Length");
11569 }
11570
11571 if (have_multiple_function_sections)
11572 {
11573 unsigned fde_idx;
11574 dw_fde_ref fde;
11575
11576 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11577 {
11578 if (DECL_IGNORED_P (fde->decl))
11579 continue;
11580 if (!fde->in_std_section)
11581 {
11582 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11583 "Address");
11584 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11585 fde->dw_fde_begin, "Length");
11586 }
11587 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11588 {
11589 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11590 "Address");
11591 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11592 fde->dw_fde_second_begin, "Length");
11593 }
11594 }
11595 }
11596
11597 /* Output the terminator words. */
11598 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11599 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11600 }
11601
11602 /* Add a new entry to .debug_ranges. Return its index into
11603 ranges_table vector. */
11604
11605 static unsigned int
11606 add_ranges_num (int num, bool maybe_new_sec)
11607 {
11608 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11609 vec_safe_push (ranges_table, r);
11610 return vec_safe_length (ranges_table) - 1;
11611 }
11612
11613 /* Add a new entry to .debug_ranges corresponding to a block, or a
11614 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11615 this entry might be in a different section from previous range. */
11616
11617 static unsigned int
11618 add_ranges (const_tree block, bool maybe_new_sec)
11619 {
11620 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11621 }
11622
11623 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11624 chain, or middle entry of a chain that will be directly referred to. */
11625
11626 static void
11627 note_rnglist_head (unsigned int offset)
11628 {
11629 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11630 return;
11631 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11632 }
11633
11634 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11635 When using dwarf_split_debug_info, address attributes in dies destined
11636 for the final executable should be direct references--setting the
11637 parameter force_direct ensures this behavior. */
11638
11639 static void
11640 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11641 bool *added, bool force_direct)
11642 {
11643 unsigned int in_use = vec_safe_length (ranges_by_label);
11644 unsigned int offset;
11645 dw_ranges_by_label rbl = { begin, end };
11646 vec_safe_push (ranges_by_label, rbl);
11647 offset = add_ranges_num (-(int)in_use - 1, true);
11648 if (!*added)
11649 {
11650 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11651 *added = true;
11652 note_rnglist_head (offset);
11653 }
11654 }
11655
11656 /* Emit .debug_ranges section. */
11657
11658 static void
11659 output_ranges (void)
11660 {
11661 unsigned i;
11662 static const char *const start_fmt = "Offset %#x";
11663 const char *fmt = start_fmt;
11664 dw_ranges *r;
11665
11666 switch_to_section (debug_ranges_section);
11667 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11668 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11669 {
11670 int block_num = r->num;
11671
11672 if (block_num > 0)
11673 {
11674 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11675 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11676
11677 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11678 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11679
11680 /* If all code is in the text section, then the compilation
11681 unit base address defaults to DW_AT_low_pc, which is the
11682 base of the text section. */
11683 if (!have_multiple_function_sections)
11684 {
11685 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11686 text_section_label,
11687 fmt, i * 2 * DWARF2_ADDR_SIZE);
11688 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11689 text_section_label, NULL);
11690 }
11691
11692 /* Otherwise, the compilation unit base address is zero,
11693 which allows us to use absolute addresses, and not worry
11694 about whether the target supports cross-section
11695 arithmetic. */
11696 else
11697 {
11698 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11699 fmt, i * 2 * DWARF2_ADDR_SIZE);
11700 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11701 }
11702
11703 fmt = NULL;
11704 }
11705
11706 /* Negative block_num stands for an index into ranges_by_label. */
11707 else if (block_num < 0)
11708 {
11709 int lab_idx = - block_num - 1;
11710
11711 if (!have_multiple_function_sections)
11712 {
11713 gcc_unreachable ();
11714 #if 0
11715 /* If we ever use add_ranges_by_labels () for a single
11716 function section, all we have to do is to take out
11717 the #if 0 above. */
11718 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11719 (*ranges_by_label)[lab_idx].begin,
11720 text_section_label,
11721 fmt, i * 2 * DWARF2_ADDR_SIZE);
11722 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11723 (*ranges_by_label)[lab_idx].end,
11724 text_section_label, NULL);
11725 #endif
11726 }
11727 else
11728 {
11729 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11730 (*ranges_by_label)[lab_idx].begin,
11731 fmt, i * 2 * DWARF2_ADDR_SIZE);
11732 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11733 (*ranges_by_label)[lab_idx].end,
11734 NULL);
11735 }
11736 }
11737 else
11738 {
11739 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11740 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11741 fmt = start_fmt;
11742 }
11743 }
11744 }
11745
11746 /* Non-zero if .debug_line_str should be used for .debug_line section
11747 strings or strings that are likely shareable with those. */
11748 #define DWARF5_USE_DEBUG_LINE_STR \
11749 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11750 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11751 /* FIXME: there is no .debug_line_str.dwo section, \
11752 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11753 && !dwarf_split_debug_info)
11754
11755 /* Assign .debug_rnglists indexes. */
11756
11757 static void
11758 index_rnglists (void)
11759 {
11760 unsigned i;
11761 dw_ranges *r;
11762
11763 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11764 if (r->label)
11765 r->idx = rnglist_idx++;
11766 }
11767
11768 /* Emit .debug_rnglists section. */
11769
11770 static void
11771 output_rnglists (unsigned generation)
11772 {
11773 unsigned i;
11774 dw_ranges *r;
11775 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11776 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11777 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11778
11779 switch_to_section (debug_ranges_section);
11780 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11781 /* There are up to 4 unique ranges labels per generation.
11782 See also init_sections_and_labels. */
11783 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11784 2 + generation * 4);
11785 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11786 3 + generation * 4);
11787 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11788 dw2_asm_output_data (4, 0xffffffff,
11789 "Initial length escape value indicating "
11790 "64-bit DWARF extension");
11791 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11792 "Length of Range Lists");
11793 ASM_OUTPUT_LABEL (asm_out_file, l1);
11794 output_dwarf_version ();
11795 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11796 dw2_asm_output_data (1, 0, "Segment Size");
11797 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11798 about relocation sizes and primarily care about the size of .debug*
11799 sections in linked shared libraries and executables, then
11800 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11801 into it are usually larger than just DW_FORM_sec_offset offsets
11802 into the .debug_rnglists section. */
11803 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11804 "Offset Entry Count");
11805 if (dwarf_split_debug_info)
11806 {
11807 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11808 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11809 if (r->label)
11810 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11811 ranges_base_label, NULL);
11812 }
11813
11814 const char *lab = "";
11815 unsigned int len = vec_safe_length (ranges_table);
11816 const char *base = NULL;
11817 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11818 {
11819 int block_num = r->num;
11820
11821 if (r->label)
11822 {
11823 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11824 lab = r->label;
11825 }
11826 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11827 base = NULL;
11828 if (block_num > 0)
11829 {
11830 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11831 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11832
11833 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11834 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11835
11836 if (HAVE_AS_LEB128)
11837 {
11838 /* If all code is in the text section, then the compilation
11839 unit base address defaults to DW_AT_low_pc, which is the
11840 base of the text section. */
11841 if (!have_multiple_function_sections)
11842 {
11843 dw2_asm_output_data (1, DW_RLE_offset_pair,
11844 "DW_RLE_offset_pair (%s)", lab);
11845 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11846 "Range begin address (%s)", lab);
11847 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11848 "Range end address (%s)", lab);
11849 continue;
11850 }
11851 if (base == NULL)
11852 {
11853 dw_ranges *r2 = NULL;
11854 if (i < len - 1)
11855 r2 = &(*ranges_table)[i + 1];
11856 if (r2
11857 && r2->num != 0
11858 && r2->label == NULL
11859 && !r2->maybe_new_sec)
11860 {
11861 dw2_asm_output_data (1, DW_RLE_base_address,
11862 "DW_RLE_base_address (%s)", lab);
11863 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11864 "Base address (%s)", lab);
11865 strcpy (basebuf, blabel);
11866 base = basebuf;
11867 }
11868 }
11869 if (base)
11870 {
11871 dw2_asm_output_data (1, DW_RLE_offset_pair,
11872 "DW_RLE_offset_pair (%s)", lab);
11873 dw2_asm_output_delta_uleb128 (blabel, base,
11874 "Range begin address (%s)", lab);
11875 dw2_asm_output_delta_uleb128 (elabel, base,
11876 "Range end address (%s)", lab);
11877 continue;
11878 }
11879 dw2_asm_output_data (1, DW_RLE_start_length,
11880 "DW_RLE_start_length (%s)", lab);
11881 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11882 "Range begin address (%s)", lab);
11883 dw2_asm_output_delta_uleb128 (elabel, blabel,
11884 "Range length (%s)", lab);
11885 }
11886 else
11887 {
11888 dw2_asm_output_data (1, DW_RLE_start_end,
11889 "DW_RLE_start_end (%s)", lab);
11890 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11891 "Range begin address (%s)", lab);
11892 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11893 "Range end address (%s)", lab);
11894 }
11895 }
11896
11897 /* Negative block_num stands for an index into ranges_by_label. */
11898 else if (block_num < 0)
11899 {
11900 int lab_idx = - block_num - 1;
11901 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11902 const char *elabel = (*ranges_by_label)[lab_idx].end;
11903
11904 if (!have_multiple_function_sections)
11905 gcc_unreachable ();
11906 if (HAVE_AS_LEB128)
11907 {
11908 dw2_asm_output_data (1, DW_RLE_start_length,
11909 "DW_RLE_start_length (%s)", lab);
11910 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11911 "Range begin address (%s)", lab);
11912 dw2_asm_output_delta_uleb128 (elabel, blabel,
11913 "Range length (%s)", lab);
11914 }
11915 else
11916 {
11917 dw2_asm_output_data (1, DW_RLE_start_end,
11918 "DW_RLE_start_end (%s)", lab);
11919 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11920 "Range begin address (%s)", lab);
11921 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11922 "Range end address (%s)", lab);
11923 }
11924 }
11925 else
11926 dw2_asm_output_data (1, DW_RLE_end_of_list,
11927 "DW_RLE_end_of_list (%s)", lab);
11928 }
11929 ASM_OUTPUT_LABEL (asm_out_file, l2);
11930 }
11931
11932 /* Data structure containing information about input files. */
11933 struct file_info
11934 {
11935 const char *path; /* Complete file name. */
11936 const char *fname; /* File name part. */
11937 int length; /* Length of entire string. */
11938 struct dwarf_file_data * file_idx; /* Index in input file table. */
11939 int dir_idx; /* Index in directory table. */
11940 };
11941
11942 /* Data structure containing information about directories with source
11943 files. */
11944 struct dir_info
11945 {
11946 const char *path; /* Path including directory name. */
11947 int length; /* Path length. */
11948 int prefix; /* Index of directory entry which is a prefix. */
11949 int count; /* Number of files in this directory. */
11950 int dir_idx; /* Index of directory used as base. */
11951 };
11952
11953 /* Callback function for file_info comparison. We sort by looking at
11954 the directories in the path. */
11955
11956 static int
11957 file_info_cmp (const void *p1, const void *p2)
11958 {
11959 const struct file_info *const s1 = (const struct file_info *) p1;
11960 const struct file_info *const s2 = (const struct file_info *) p2;
11961 const unsigned char *cp1;
11962 const unsigned char *cp2;
11963
11964 /* Take care of file names without directories. We need to make sure that
11965 we return consistent values to qsort since some will get confused if
11966 we return the same value when identical operands are passed in opposite
11967 orders. So if neither has a directory, return 0 and otherwise return
11968 1 or -1 depending on which one has the directory. We want the one with
11969 the directory to sort after the one without, so all no directory files
11970 are at the start (normally only the compilation unit file). */
11971 if ((s1->path == s1->fname || s2->path == s2->fname))
11972 return (s2->path == s2->fname) - (s1->path == s1->fname);
11973
11974 cp1 = (const unsigned char *) s1->path;
11975 cp2 = (const unsigned char *) s2->path;
11976
11977 while (1)
11978 {
11979 ++cp1;
11980 ++cp2;
11981 /* Reached the end of the first path? If so, handle like above,
11982 but now we want longer directory prefixes before shorter ones. */
11983 if ((cp1 == (const unsigned char *) s1->fname)
11984 || (cp2 == (const unsigned char *) s2->fname))
11985 return ((cp1 == (const unsigned char *) s1->fname)
11986 - (cp2 == (const unsigned char *) s2->fname));
11987
11988 /* Character of current path component the same? */
11989 else if (*cp1 != *cp2)
11990 return *cp1 - *cp2;
11991 }
11992 }
11993
11994 struct file_name_acquire_data
11995 {
11996 struct file_info *files;
11997 int used_files;
11998 int max_files;
11999 };
12000
12001 /* Traversal function for the hash table. */
12002
12003 int
12004 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12005 {
12006 struct dwarf_file_data *d = *slot;
12007 struct file_info *fi;
12008 const char *f;
12009
12010 gcc_assert (fnad->max_files >= d->emitted_number);
12011
12012 if (! d->emitted_number)
12013 return 1;
12014
12015 gcc_assert (fnad->max_files != fnad->used_files);
12016
12017 fi = fnad->files + fnad->used_files++;
12018
12019 /* Skip all leading "./". */
12020 f = d->filename;
12021 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12022 f += 2;
12023
12024 /* Create a new array entry. */
12025 fi->path = f;
12026 fi->length = strlen (f);
12027 fi->file_idx = d;
12028
12029 /* Search for the file name part. */
12030 f = strrchr (f, DIR_SEPARATOR);
12031 #if defined (DIR_SEPARATOR_2)
12032 {
12033 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12034
12035 if (g != NULL)
12036 {
12037 if (f == NULL || f < g)
12038 f = g;
12039 }
12040 }
12041 #endif
12042
12043 fi->fname = f == NULL ? fi->path : f + 1;
12044 return 1;
12045 }
12046
12047 /* Helper function for output_file_names. Emit a FORM encoded
12048 string STR, with assembly comment start ENTRY_KIND and
12049 index IDX */
12050
12051 static void
12052 output_line_string (enum dwarf_form form, const char *str,
12053 const char *entry_kind, unsigned int idx)
12054 {
12055 switch (form)
12056 {
12057 case DW_FORM_string:
12058 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12059 break;
12060 case DW_FORM_line_strp:
12061 if (!debug_line_str_hash)
12062 debug_line_str_hash
12063 = hash_table<indirect_string_hasher>::create_ggc (10);
12064
12065 struct indirect_string_node *node;
12066 node = find_AT_string_in_table (str, debug_line_str_hash);
12067 set_indirect_string (node);
12068 node->form = form;
12069 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12070 debug_line_str_section, "%s: %#x: \"%s\"",
12071 entry_kind, 0, node->str);
12072 break;
12073 default:
12074 gcc_unreachable ();
12075 }
12076 }
12077
12078 /* Output the directory table and the file name table. We try to minimize
12079 the total amount of memory needed. A heuristic is used to avoid large
12080 slowdowns with many input files. */
12081
12082 static void
12083 output_file_names (void)
12084 {
12085 struct file_name_acquire_data fnad;
12086 int numfiles;
12087 struct file_info *files;
12088 struct dir_info *dirs;
12089 int *saved;
12090 int *savehere;
12091 int *backmap;
12092 int ndirs;
12093 int idx_offset;
12094 int i;
12095
12096 if (!last_emitted_file)
12097 {
12098 if (dwarf_version >= 5)
12099 {
12100 dw2_asm_output_data (1, 0, "Directory entry format count");
12101 dw2_asm_output_data_uleb128 (0, "Directories count");
12102 dw2_asm_output_data (1, 0, "File name entry format count");
12103 dw2_asm_output_data_uleb128 (0, "File names count");
12104 }
12105 else
12106 {
12107 dw2_asm_output_data (1, 0, "End directory table");
12108 dw2_asm_output_data (1, 0, "End file name table");
12109 }
12110 return;
12111 }
12112
12113 numfiles = last_emitted_file->emitted_number;
12114
12115 /* Allocate the various arrays we need. */
12116 files = XALLOCAVEC (struct file_info, numfiles);
12117 dirs = XALLOCAVEC (struct dir_info, numfiles);
12118
12119 fnad.files = files;
12120 fnad.used_files = 0;
12121 fnad.max_files = numfiles;
12122 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12123 gcc_assert (fnad.used_files == fnad.max_files);
12124
12125 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12126
12127 /* Find all the different directories used. */
12128 dirs[0].path = files[0].path;
12129 dirs[0].length = files[0].fname - files[0].path;
12130 dirs[0].prefix = -1;
12131 dirs[0].count = 1;
12132 dirs[0].dir_idx = 0;
12133 files[0].dir_idx = 0;
12134 ndirs = 1;
12135
12136 for (i = 1; i < numfiles; i++)
12137 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12138 && memcmp (dirs[ndirs - 1].path, files[i].path,
12139 dirs[ndirs - 1].length) == 0)
12140 {
12141 /* Same directory as last entry. */
12142 files[i].dir_idx = ndirs - 1;
12143 ++dirs[ndirs - 1].count;
12144 }
12145 else
12146 {
12147 int j;
12148
12149 /* This is a new directory. */
12150 dirs[ndirs].path = files[i].path;
12151 dirs[ndirs].length = files[i].fname - files[i].path;
12152 dirs[ndirs].count = 1;
12153 dirs[ndirs].dir_idx = ndirs;
12154 files[i].dir_idx = ndirs;
12155
12156 /* Search for a prefix. */
12157 dirs[ndirs].prefix = -1;
12158 for (j = 0; j < ndirs; j++)
12159 if (dirs[j].length < dirs[ndirs].length
12160 && dirs[j].length > 1
12161 && (dirs[ndirs].prefix == -1
12162 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12163 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12164 dirs[ndirs].prefix = j;
12165
12166 ++ndirs;
12167 }
12168
12169 /* Now to the actual work. We have to find a subset of the directories which
12170 allow expressing the file name using references to the directory table
12171 with the least amount of characters. We do not do an exhaustive search
12172 where we would have to check out every combination of every single
12173 possible prefix. Instead we use a heuristic which provides nearly optimal
12174 results in most cases and never is much off. */
12175 saved = XALLOCAVEC (int, ndirs);
12176 savehere = XALLOCAVEC (int, ndirs);
12177
12178 memset (saved, '\0', ndirs * sizeof (saved[0]));
12179 for (i = 0; i < ndirs; i++)
12180 {
12181 int j;
12182 int total;
12183
12184 /* We can always save some space for the current directory. But this
12185 does not mean it will be enough to justify adding the directory. */
12186 savehere[i] = dirs[i].length;
12187 total = (savehere[i] - saved[i]) * dirs[i].count;
12188
12189 for (j = i + 1; j < ndirs; j++)
12190 {
12191 savehere[j] = 0;
12192 if (saved[j] < dirs[i].length)
12193 {
12194 /* Determine whether the dirs[i] path is a prefix of the
12195 dirs[j] path. */
12196 int k;
12197
12198 k = dirs[j].prefix;
12199 while (k != -1 && k != (int) i)
12200 k = dirs[k].prefix;
12201
12202 if (k == (int) i)
12203 {
12204 /* Yes it is. We can possibly save some memory by
12205 writing the filenames in dirs[j] relative to
12206 dirs[i]. */
12207 savehere[j] = dirs[i].length;
12208 total += (savehere[j] - saved[j]) * dirs[j].count;
12209 }
12210 }
12211 }
12212
12213 /* Check whether we can save enough to justify adding the dirs[i]
12214 directory. */
12215 if (total > dirs[i].length + 1)
12216 {
12217 /* It's worthwhile adding. */
12218 for (j = i; j < ndirs; j++)
12219 if (savehere[j] > 0)
12220 {
12221 /* Remember how much we saved for this directory so far. */
12222 saved[j] = savehere[j];
12223
12224 /* Remember the prefix directory. */
12225 dirs[j].dir_idx = i;
12226 }
12227 }
12228 }
12229
12230 /* Emit the directory name table. */
12231 idx_offset = dirs[0].length > 0 ? 1 : 0;
12232 enum dwarf_form str_form = DW_FORM_string;
12233 enum dwarf_form idx_form = DW_FORM_udata;
12234 if (dwarf_version >= 5)
12235 {
12236 const char *comp_dir = comp_dir_string ();
12237 if (comp_dir == NULL)
12238 comp_dir = "";
12239 dw2_asm_output_data (1, 1, "Directory entry format count");
12240 if (DWARF5_USE_DEBUG_LINE_STR)
12241 str_form = DW_FORM_line_strp;
12242 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12243 dw2_asm_output_data_uleb128 (str_form, "%s",
12244 get_DW_FORM_name (str_form));
12245 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12246 if (str_form == DW_FORM_string)
12247 {
12248 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12249 for (i = 1 - idx_offset; i < ndirs; i++)
12250 dw2_asm_output_nstring (dirs[i].path,
12251 dirs[i].length
12252 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12253 "Directory Entry: %#x", i + idx_offset);
12254 }
12255 else
12256 {
12257 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12258 for (i = 1 - idx_offset; i < ndirs; i++)
12259 {
12260 const char *str
12261 = ggc_alloc_string (dirs[i].path,
12262 dirs[i].length
12263 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12264 output_line_string (str_form, str, "Directory Entry",
12265 (unsigned) i + idx_offset);
12266 }
12267 }
12268 }
12269 else
12270 {
12271 for (i = 1 - idx_offset; i < ndirs; i++)
12272 dw2_asm_output_nstring (dirs[i].path,
12273 dirs[i].length
12274 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12275 "Directory Entry: %#x", i + idx_offset);
12276
12277 dw2_asm_output_data (1, 0, "End directory table");
12278 }
12279
12280 /* We have to emit them in the order of emitted_number since that's
12281 used in the debug info generation. To do this efficiently we
12282 generate a back-mapping of the indices first. */
12283 backmap = XALLOCAVEC (int, numfiles);
12284 for (i = 0; i < numfiles; i++)
12285 backmap[files[i].file_idx->emitted_number - 1] = i;
12286
12287 if (dwarf_version >= 5)
12288 {
12289 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12290 if (filename0 == NULL)
12291 filename0 = "";
12292 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12293 DW_FORM_data2. Choose one based on the number of directories
12294 and how much space would they occupy in each encoding.
12295 If we have at most 256 directories, all indexes fit into
12296 a single byte, so DW_FORM_data1 is most compact (if there
12297 are at most 128 directories, DW_FORM_udata would be as
12298 compact as that, but not shorter and slower to decode). */
12299 if (ndirs + idx_offset <= 256)
12300 idx_form = DW_FORM_data1;
12301 /* If there are more than 65536 directories, we have to use
12302 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12303 Otherwise, compute what space would occupy if all the indexes
12304 used DW_FORM_udata - sum - and compare that to how large would
12305 be DW_FORM_data2 encoding, and pick the more efficient one. */
12306 else if (ndirs + idx_offset <= 65536)
12307 {
12308 unsigned HOST_WIDE_INT sum = 1;
12309 for (i = 0; i < numfiles; i++)
12310 {
12311 int file_idx = backmap[i];
12312 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12313 sum += size_of_uleb128 (dir_idx);
12314 }
12315 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12316 idx_form = DW_FORM_data2;
12317 }
12318 #ifdef VMS_DEBUGGING_INFO
12319 dw2_asm_output_data (1, 4, "File name entry format count");
12320 #else
12321 dw2_asm_output_data (1, 2, "File name entry format count");
12322 #endif
12323 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12324 dw2_asm_output_data_uleb128 (str_form, "%s",
12325 get_DW_FORM_name (str_form));
12326 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12327 "DW_LNCT_directory_index");
12328 dw2_asm_output_data_uleb128 (idx_form, "%s",
12329 get_DW_FORM_name (idx_form));
12330 #ifdef VMS_DEBUGGING_INFO
12331 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12332 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12333 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12334 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12335 #endif
12336 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12337
12338 output_line_string (str_form, filename0, "File Entry", 0);
12339
12340 /* Include directory index. */
12341 if (idx_form != DW_FORM_udata)
12342 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12343 0, NULL);
12344 else
12345 dw2_asm_output_data_uleb128 (0, NULL);
12346
12347 #ifdef VMS_DEBUGGING_INFO
12348 dw2_asm_output_data_uleb128 (0, NULL);
12349 dw2_asm_output_data_uleb128 (0, NULL);
12350 #endif
12351 }
12352
12353 /* Now write all the file names. */
12354 for (i = 0; i < numfiles; i++)
12355 {
12356 int file_idx = backmap[i];
12357 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12358
12359 #ifdef VMS_DEBUGGING_INFO
12360 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12361
12362 /* Setting these fields can lead to debugger miscomparisons,
12363 but VMS Debug requires them to be set correctly. */
12364
12365 int ver;
12366 long long cdt;
12367 long siz;
12368 int maxfilelen = (strlen (files[file_idx].path)
12369 + dirs[dir_idx].length
12370 + MAX_VMS_VERSION_LEN + 1);
12371 char *filebuf = XALLOCAVEC (char, maxfilelen);
12372
12373 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12374 snprintf (filebuf, maxfilelen, "%s;%d",
12375 files[file_idx].path + dirs[dir_idx].length, ver);
12376
12377 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12378
12379 /* Include directory index. */
12380 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12381 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12382 dir_idx + idx_offset, NULL);
12383 else
12384 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12385
12386 /* Modification time. */
12387 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12388 &cdt, 0, 0, 0) == 0)
12389 ? cdt : 0, NULL);
12390
12391 /* File length in bytes. */
12392 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12393 0, &siz, 0, 0) == 0)
12394 ? siz : 0, NULL);
12395 #else
12396 output_line_string (str_form,
12397 files[file_idx].path + dirs[dir_idx].length,
12398 "File Entry", (unsigned) i + 1);
12399
12400 /* Include directory index. */
12401 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12402 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12403 dir_idx + idx_offset, NULL);
12404 else
12405 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12406
12407 if (dwarf_version >= 5)
12408 continue;
12409
12410 /* Modification time. */
12411 dw2_asm_output_data_uleb128 (0, NULL);
12412
12413 /* File length in bytes. */
12414 dw2_asm_output_data_uleb128 (0, NULL);
12415 #endif /* VMS_DEBUGGING_INFO */
12416 }
12417
12418 if (dwarf_version < 5)
12419 dw2_asm_output_data (1, 0, "End file name table");
12420 }
12421
12422
12423 /* Output one line number table into the .debug_line section. */
12424
12425 static void
12426 output_one_line_info_table (dw_line_info_table *table)
12427 {
12428 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12429 unsigned int current_line = 1;
12430 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12431 dw_line_info_entry *ent, *prev_addr;
12432 size_t i;
12433 unsigned int view;
12434
12435 view = 0;
12436
12437 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12438 {
12439 switch (ent->opcode)
12440 {
12441 case LI_set_address:
12442 /* ??? Unfortunately, we have little choice here currently, and
12443 must always use the most general form. GCC does not know the
12444 address delta itself, so we can't use DW_LNS_advance_pc. Many
12445 ports do have length attributes which will give an upper bound
12446 on the address range. We could perhaps use length attributes
12447 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12448 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12449
12450 view = 0;
12451
12452 /* This can handle any delta. This takes
12453 4+DWARF2_ADDR_SIZE bytes. */
12454 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12455 debug_variable_location_views
12456 ? ", reset view to 0" : "");
12457 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12458 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12459 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12460
12461 prev_addr = ent;
12462 break;
12463
12464 case LI_adv_address:
12465 {
12466 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12467 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12468 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12469
12470 view++;
12471
12472 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12473 dw2_asm_output_delta (2, line_label, prev_label,
12474 "from %s to %s", prev_label, line_label);
12475
12476 prev_addr = ent;
12477 break;
12478 }
12479
12480 case LI_set_line:
12481 if (ent->val == current_line)
12482 {
12483 /* We still need to start a new row, so output a copy insn. */
12484 dw2_asm_output_data (1, DW_LNS_copy,
12485 "copy line %u", current_line);
12486 }
12487 else
12488 {
12489 int line_offset = ent->val - current_line;
12490 int line_delta = line_offset - DWARF_LINE_BASE;
12491
12492 current_line = ent->val;
12493 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12494 {
12495 /* This can handle deltas from -10 to 234, using the current
12496 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12497 This takes 1 byte. */
12498 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12499 "line %u", current_line);
12500 }
12501 else
12502 {
12503 /* This can handle any delta. This takes at least 4 bytes,
12504 depending on the value being encoded. */
12505 dw2_asm_output_data (1, DW_LNS_advance_line,
12506 "advance to line %u", current_line);
12507 dw2_asm_output_data_sleb128 (line_offset, NULL);
12508 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12509 }
12510 }
12511 break;
12512
12513 case LI_set_file:
12514 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12515 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12516 break;
12517
12518 case LI_set_column:
12519 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12520 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12521 break;
12522
12523 case LI_negate_stmt:
12524 current_is_stmt = !current_is_stmt;
12525 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12526 "is_stmt %d", current_is_stmt);
12527 break;
12528
12529 case LI_set_prologue_end:
12530 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12531 "set prologue end");
12532 break;
12533
12534 case LI_set_epilogue_begin:
12535 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12536 "set epilogue begin");
12537 break;
12538
12539 case LI_set_discriminator:
12540 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12541 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12542 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12543 dw2_asm_output_data_uleb128 (ent->val, NULL);
12544 break;
12545 }
12546 }
12547
12548 /* Emit debug info for the address of the end of the table. */
12549 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12550 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12551 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12552 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12553
12554 dw2_asm_output_data (1, 0, "end sequence");
12555 dw2_asm_output_data_uleb128 (1, NULL);
12556 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12557 }
12558
12559 /* Output the source line number correspondence information. This
12560 information goes into the .debug_line section. */
12561
12562 static void
12563 output_line_info (bool prologue_only)
12564 {
12565 static unsigned int generation;
12566 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12567 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12568 bool saw_one = false;
12569 int opc;
12570
12571 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12572 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12573 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12574 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12575
12576 if (!XCOFF_DEBUGGING_INFO)
12577 {
12578 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12579 dw2_asm_output_data (4, 0xffffffff,
12580 "Initial length escape value indicating 64-bit DWARF extension");
12581 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12582 "Length of Source Line Info");
12583 }
12584
12585 ASM_OUTPUT_LABEL (asm_out_file, l1);
12586
12587 output_dwarf_version ();
12588 if (dwarf_version >= 5)
12589 {
12590 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12591 dw2_asm_output_data (1, 0, "Segment Size");
12592 }
12593 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12594 ASM_OUTPUT_LABEL (asm_out_file, p1);
12595
12596 /* Define the architecture-dependent minimum instruction length (in bytes).
12597 In this implementation of DWARF, this field is used for information
12598 purposes only. Since GCC generates assembly language, we have no
12599 a priori knowledge of how many instruction bytes are generated for each
12600 source line, and therefore can use only the DW_LNE_set_address and
12601 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12602 this as '1', which is "correct enough" for all architectures,
12603 and don't let the target override. */
12604 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12605
12606 if (dwarf_version >= 4)
12607 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12608 "Maximum Operations Per Instruction");
12609 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12610 "Default is_stmt_start flag");
12611 dw2_asm_output_data (1, DWARF_LINE_BASE,
12612 "Line Base Value (Special Opcodes)");
12613 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12614 "Line Range Value (Special Opcodes)");
12615 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12616 "Special Opcode Base");
12617
12618 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12619 {
12620 int n_op_args;
12621 switch (opc)
12622 {
12623 case DW_LNS_advance_pc:
12624 case DW_LNS_advance_line:
12625 case DW_LNS_set_file:
12626 case DW_LNS_set_column:
12627 case DW_LNS_fixed_advance_pc:
12628 case DW_LNS_set_isa:
12629 n_op_args = 1;
12630 break;
12631 default:
12632 n_op_args = 0;
12633 break;
12634 }
12635
12636 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12637 opc, n_op_args);
12638 }
12639
12640 /* Write out the information about the files we use. */
12641 output_file_names ();
12642 ASM_OUTPUT_LABEL (asm_out_file, p2);
12643 if (prologue_only)
12644 {
12645 /* Output the marker for the end of the line number info. */
12646 ASM_OUTPUT_LABEL (asm_out_file, l2);
12647 return;
12648 }
12649
12650 if (separate_line_info)
12651 {
12652 dw_line_info_table *table;
12653 size_t i;
12654
12655 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12656 if (table->in_use)
12657 {
12658 output_one_line_info_table (table);
12659 saw_one = true;
12660 }
12661 }
12662 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12663 {
12664 output_one_line_info_table (cold_text_section_line_info);
12665 saw_one = true;
12666 }
12667
12668 /* ??? Some Darwin linkers crash on a .debug_line section with no
12669 sequences. Further, merely a DW_LNE_end_sequence entry is not
12670 sufficient -- the address column must also be initialized.
12671 Make sure to output at least one set_address/end_sequence pair,
12672 choosing .text since that section is always present. */
12673 if (text_section_line_info->in_use || !saw_one)
12674 output_one_line_info_table (text_section_line_info);
12675
12676 /* Output the marker for the end of the line number info. */
12677 ASM_OUTPUT_LABEL (asm_out_file, l2);
12678 }
12679 \f
12680 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12681
12682 static inline bool
12683 need_endianity_attribute_p (bool reverse)
12684 {
12685 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12686 }
12687
12688 /* Given a pointer to a tree node for some base type, return a pointer to
12689 a DIE that describes the given type. REVERSE is true if the type is
12690 to be interpreted in the reverse storage order wrt the target order.
12691
12692 This routine must only be called for GCC type nodes that correspond to
12693 Dwarf base (fundamental) types. */
12694
12695 static dw_die_ref
12696 base_type_die (tree type, bool reverse)
12697 {
12698 dw_die_ref base_type_result;
12699 enum dwarf_type encoding;
12700 bool fpt_used = false;
12701 struct fixed_point_type_info fpt_info;
12702 tree type_bias = NULL_TREE;
12703
12704 /* If this is a subtype that should not be emitted as a subrange type,
12705 use the base type. See subrange_type_for_debug_p. */
12706 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12707 type = TREE_TYPE (type);
12708
12709 switch (TREE_CODE (type))
12710 {
12711 case INTEGER_TYPE:
12712 if ((dwarf_version >= 4 || !dwarf_strict)
12713 && TYPE_NAME (type)
12714 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12715 && DECL_IS_BUILTIN (TYPE_NAME (type))
12716 && DECL_NAME (TYPE_NAME (type)))
12717 {
12718 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12719 if (strcmp (name, "char16_t") == 0
12720 || strcmp (name, "char32_t") == 0)
12721 {
12722 encoding = DW_ATE_UTF;
12723 break;
12724 }
12725 }
12726 if ((dwarf_version >= 3 || !dwarf_strict)
12727 && lang_hooks.types.get_fixed_point_type_info)
12728 {
12729 memset (&fpt_info, 0, sizeof (fpt_info));
12730 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12731 {
12732 fpt_used = true;
12733 encoding = ((TYPE_UNSIGNED (type))
12734 ? DW_ATE_unsigned_fixed
12735 : DW_ATE_signed_fixed);
12736 break;
12737 }
12738 }
12739 if (TYPE_STRING_FLAG (type))
12740 {
12741 if (TYPE_UNSIGNED (type))
12742 encoding = DW_ATE_unsigned_char;
12743 else
12744 encoding = DW_ATE_signed_char;
12745 }
12746 else if (TYPE_UNSIGNED (type))
12747 encoding = DW_ATE_unsigned;
12748 else
12749 encoding = DW_ATE_signed;
12750
12751 if (!dwarf_strict
12752 && lang_hooks.types.get_type_bias)
12753 type_bias = lang_hooks.types.get_type_bias (type);
12754 break;
12755
12756 case REAL_TYPE:
12757 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12758 {
12759 if (dwarf_version >= 3 || !dwarf_strict)
12760 encoding = DW_ATE_decimal_float;
12761 else
12762 encoding = DW_ATE_lo_user;
12763 }
12764 else
12765 encoding = DW_ATE_float;
12766 break;
12767
12768 case FIXED_POINT_TYPE:
12769 if (!(dwarf_version >= 3 || !dwarf_strict))
12770 encoding = DW_ATE_lo_user;
12771 else if (TYPE_UNSIGNED (type))
12772 encoding = DW_ATE_unsigned_fixed;
12773 else
12774 encoding = DW_ATE_signed_fixed;
12775 break;
12776
12777 /* Dwarf2 doesn't know anything about complex ints, so use
12778 a user defined type for it. */
12779 case COMPLEX_TYPE:
12780 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12781 encoding = DW_ATE_complex_float;
12782 else
12783 encoding = DW_ATE_lo_user;
12784 break;
12785
12786 case BOOLEAN_TYPE:
12787 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12788 encoding = DW_ATE_boolean;
12789 break;
12790
12791 default:
12792 /* No other TREE_CODEs are Dwarf fundamental types. */
12793 gcc_unreachable ();
12794 }
12795
12796 base_type_result = new_die_raw (DW_TAG_base_type);
12797
12798 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12799 int_size_in_bytes (type));
12800 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12801
12802 if (need_endianity_attribute_p (reverse))
12803 add_AT_unsigned (base_type_result, DW_AT_endianity,
12804 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12805
12806 add_alignment_attribute (base_type_result, type);
12807
12808 if (fpt_used)
12809 {
12810 switch (fpt_info.scale_factor_kind)
12811 {
12812 case fixed_point_scale_factor_binary:
12813 add_AT_int (base_type_result, DW_AT_binary_scale,
12814 fpt_info.scale_factor.binary);
12815 break;
12816
12817 case fixed_point_scale_factor_decimal:
12818 add_AT_int (base_type_result, DW_AT_decimal_scale,
12819 fpt_info.scale_factor.decimal);
12820 break;
12821
12822 case fixed_point_scale_factor_arbitrary:
12823 /* Arbitrary scale factors cannot be described in standard DWARF,
12824 yet. */
12825 if (!dwarf_strict)
12826 {
12827 /* Describe the scale factor as a rational constant. */
12828 const dw_die_ref scale_factor
12829 = new_die (DW_TAG_constant, comp_unit_die (), type);
12830
12831 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12832 fpt_info.scale_factor.arbitrary.numerator);
12833 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12834 fpt_info.scale_factor.arbitrary.denominator);
12835
12836 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12837 }
12838 break;
12839
12840 default:
12841 gcc_unreachable ();
12842 }
12843 }
12844
12845 if (type_bias)
12846 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12847 dw_scalar_form_constant
12848 | dw_scalar_form_exprloc
12849 | dw_scalar_form_reference,
12850 NULL);
12851
12852 return base_type_result;
12853 }
12854
12855 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12856 named 'auto' in its type: return true for it, false otherwise. */
12857
12858 static inline bool
12859 is_cxx_auto (tree type)
12860 {
12861 if (is_cxx ())
12862 {
12863 tree name = TYPE_IDENTIFIER (type);
12864 if (name == get_identifier ("auto")
12865 || name == get_identifier ("decltype(auto)"))
12866 return true;
12867 }
12868 return false;
12869 }
12870
12871 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12872 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12873
12874 static inline int
12875 is_base_type (tree type)
12876 {
12877 switch (TREE_CODE (type))
12878 {
12879 case INTEGER_TYPE:
12880 case REAL_TYPE:
12881 case FIXED_POINT_TYPE:
12882 case COMPLEX_TYPE:
12883 case BOOLEAN_TYPE:
12884 return 1;
12885
12886 case VOID_TYPE:
12887 case ARRAY_TYPE:
12888 case RECORD_TYPE:
12889 case UNION_TYPE:
12890 case QUAL_UNION_TYPE:
12891 case ENUMERAL_TYPE:
12892 case FUNCTION_TYPE:
12893 case METHOD_TYPE:
12894 case POINTER_TYPE:
12895 case REFERENCE_TYPE:
12896 case NULLPTR_TYPE:
12897 case OFFSET_TYPE:
12898 case LANG_TYPE:
12899 case VECTOR_TYPE:
12900 return 0;
12901
12902 default:
12903 if (is_cxx_auto (type))
12904 return 0;
12905 gcc_unreachable ();
12906 }
12907
12908 return 0;
12909 }
12910
12911 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12912 node, return the size in bits for the type if it is a constant, or else
12913 return the alignment for the type if the type's size is not constant, or
12914 else return BITS_PER_WORD if the type actually turns out to be an
12915 ERROR_MARK node. */
12916
12917 static inline unsigned HOST_WIDE_INT
12918 simple_type_size_in_bits (const_tree type)
12919 {
12920 if (TREE_CODE (type) == ERROR_MARK)
12921 return BITS_PER_WORD;
12922 else if (TYPE_SIZE (type) == NULL_TREE)
12923 return 0;
12924 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12925 return tree_to_uhwi (TYPE_SIZE (type));
12926 else
12927 return TYPE_ALIGN (type);
12928 }
12929
12930 /* Similarly, but return an offset_int instead of UHWI. */
12931
12932 static inline offset_int
12933 offset_int_type_size_in_bits (const_tree type)
12934 {
12935 if (TREE_CODE (type) == ERROR_MARK)
12936 return BITS_PER_WORD;
12937 else if (TYPE_SIZE (type) == NULL_TREE)
12938 return 0;
12939 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12940 return wi::to_offset (TYPE_SIZE (type));
12941 else
12942 return TYPE_ALIGN (type);
12943 }
12944
12945 /* Given a pointer to a tree node for a subrange type, return a pointer
12946 to a DIE that describes the given type. */
12947
12948 static dw_die_ref
12949 subrange_type_die (tree type, tree low, tree high, tree bias,
12950 dw_die_ref context_die)
12951 {
12952 dw_die_ref subrange_die;
12953 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12954
12955 if (context_die == NULL)
12956 context_die = comp_unit_die ();
12957
12958 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12959
12960 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12961 {
12962 /* The size of the subrange type and its base type do not match,
12963 so we need to generate a size attribute for the subrange type. */
12964 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
12965 }
12966
12967 add_alignment_attribute (subrange_die, type);
12968
12969 if (low)
12970 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
12971 if (high)
12972 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
12973 if (bias && !dwarf_strict)
12974 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
12975 dw_scalar_form_constant
12976 | dw_scalar_form_exprloc
12977 | dw_scalar_form_reference,
12978 NULL);
12979
12980 return subrange_die;
12981 }
12982
12983 /* Returns the (const and/or volatile) cv_qualifiers associated with
12984 the decl node. This will normally be augmented with the
12985 cv_qualifiers of the underlying type in add_type_attribute. */
12986
12987 static int
12988 decl_quals (const_tree decl)
12989 {
12990 return ((TREE_READONLY (decl)
12991 /* The C++ front-end correctly marks reference-typed
12992 variables as readonly, but from a language (and debug
12993 info) standpoint they are not const-qualified. */
12994 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
12995 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
12996 | (TREE_THIS_VOLATILE (decl)
12997 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
12998 }
12999
13000 /* Determine the TYPE whose qualifiers match the largest strict subset
13001 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13002 qualifiers outside QUAL_MASK. */
13003
13004 static int
13005 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13006 {
13007 tree t;
13008 int best_rank = 0, best_qual = 0, max_rank;
13009
13010 type_quals &= qual_mask;
13011 max_rank = popcount_hwi (type_quals) - 1;
13012
13013 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13014 t = TYPE_NEXT_VARIANT (t))
13015 {
13016 int q = TYPE_QUALS (t) & qual_mask;
13017
13018 if ((q & type_quals) == q && q != type_quals
13019 && check_base_type (t, type))
13020 {
13021 int rank = popcount_hwi (q);
13022
13023 if (rank > best_rank)
13024 {
13025 best_rank = rank;
13026 best_qual = q;
13027 }
13028 }
13029 }
13030
13031 return best_qual;
13032 }
13033
13034 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13035 static const dwarf_qual_info_t dwarf_qual_info[] =
13036 {
13037 { TYPE_QUAL_CONST, DW_TAG_const_type },
13038 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13039 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13040 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13041 };
13042 static const unsigned int dwarf_qual_info_size
13043 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13044
13045 /* If DIE is a qualified DIE of some base DIE with the same parent,
13046 return the base DIE, otherwise return NULL. Set MASK to the
13047 qualifiers added compared to the returned DIE. */
13048
13049 static dw_die_ref
13050 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13051 {
13052 unsigned int i;
13053 for (i = 0; i < dwarf_qual_info_size; i++)
13054 if (die->die_tag == dwarf_qual_info[i].t)
13055 break;
13056 if (i == dwarf_qual_info_size)
13057 return NULL;
13058 if (vec_safe_length (die->die_attr) != 1)
13059 return NULL;
13060 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13061 if (type == NULL || type->die_parent != die->die_parent)
13062 return NULL;
13063 *mask |= dwarf_qual_info[i].q;
13064 if (depth)
13065 {
13066 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13067 if (ret)
13068 return ret;
13069 }
13070 return type;
13071 }
13072
13073 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13074 entry that chains the modifiers specified by CV_QUALS in front of the
13075 given type. REVERSE is true if the type is to be interpreted in the
13076 reverse storage order wrt the target order. */
13077
13078 static dw_die_ref
13079 modified_type_die (tree type, int cv_quals, bool reverse,
13080 dw_die_ref context_die)
13081 {
13082 enum tree_code code = TREE_CODE (type);
13083 dw_die_ref mod_type_die;
13084 dw_die_ref sub_die = NULL;
13085 tree item_type = NULL;
13086 tree qualified_type;
13087 tree name, low, high;
13088 dw_die_ref mod_scope;
13089 /* Only these cv-qualifiers are currently handled. */
13090 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13091 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13092 ENCODE_QUAL_ADDR_SPACE(~0U));
13093 const bool reverse_base_type
13094 = need_endianity_attribute_p (reverse) && is_base_type (type);
13095
13096 if (code == ERROR_MARK)
13097 return NULL;
13098
13099 if (lang_hooks.types.get_debug_type)
13100 {
13101 tree debug_type = lang_hooks.types.get_debug_type (type);
13102
13103 if (debug_type != NULL_TREE && debug_type != type)
13104 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13105 }
13106
13107 cv_quals &= cv_qual_mask;
13108
13109 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13110 tag modifier (and not an attribute) old consumers won't be able
13111 to handle it. */
13112 if (dwarf_version < 3)
13113 cv_quals &= ~TYPE_QUAL_RESTRICT;
13114
13115 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13116 if (dwarf_version < 5)
13117 cv_quals &= ~TYPE_QUAL_ATOMIC;
13118
13119 /* See if we already have the appropriately qualified variant of
13120 this type. */
13121 qualified_type = get_qualified_type (type, cv_quals);
13122
13123 if (qualified_type == sizetype)
13124 {
13125 /* Try not to expose the internal sizetype type's name. */
13126 if (TYPE_NAME (qualified_type)
13127 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13128 {
13129 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13130
13131 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13132 && (TYPE_PRECISION (t)
13133 == TYPE_PRECISION (qualified_type))
13134 && (TYPE_UNSIGNED (t)
13135 == TYPE_UNSIGNED (qualified_type)));
13136 qualified_type = t;
13137 }
13138 else if (qualified_type == sizetype
13139 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13140 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13141 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13142 qualified_type = size_type_node;
13143 }
13144
13145 /* If we do, then we can just use its DIE, if it exists. */
13146 if (qualified_type)
13147 {
13148 mod_type_die = lookup_type_die (qualified_type);
13149
13150 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13151 dealt with specially: the DIE with the attribute, if it exists, is
13152 placed immediately after the regular DIE for the same base type. */
13153 if (mod_type_die
13154 && (!reverse_base_type
13155 || ((mod_type_die = mod_type_die->die_sib) != NULL
13156 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13157 return mod_type_die;
13158 }
13159
13160 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13161
13162 /* Handle C typedef types. */
13163 if (name
13164 && TREE_CODE (name) == TYPE_DECL
13165 && DECL_ORIGINAL_TYPE (name)
13166 && !DECL_ARTIFICIAL (name))
13167 {
13168 tree dtype = TREE_TYPE (name);
13169
13170 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13171 if (qualified_type == dtype && !reverse_base_type)
13172 {
13173 tree origin = decl_ultimate_origin (name);
13174
13175 /* Typedef variants that have an abstract origin don't get their own
13176 type DIE (see gen_typedef_die), so fall back on the ultimate
13177 abstract origin instead. */
13178 if (origin != NULL && origin != name)
13179 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13180 context_die);
13181
13182 /* For a named type, use the typedef. */
13183 gen_type_die (qualified_type, context_die);
13184 return lookup_type_die (qualified_type);
13185 }
13186 else
13187 {
13188 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13189 dquals &= cv_qual_mask;
13190 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13191 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13192 /* cv-unqualified version of named type. Just use
13193 the unnamed type to which it refers. */
13194 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13195 reverse, context_die);
13196 /* Else cv-qualified version of named type; fall through. */
13197 }
13198 }
13199
13200 mod_scope = scope_die_for (type, context_die);
13201
13202 if (cv_quals)
13203 {
13204 int sub_quals = 0, first_quals = 0;
13205 unsigned i;
13206 dw_die_ref first = NULL, last = NULL;
13207
13208 /* Determine a lesser qualified type that most closely matches
13209 this one. Then generate DW_TAG_* entries for the remaining
13210 qualifiers. */
13211 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13212 cv_qual_mask);
13213 if (sub_quals && use_debug_types)
13214 {
13215 bool needed = false;
13216 /* If emitting type units, make sure the order of qualifiers
13217 is canonical. Thus, start from unqualified type if
13218 an earlier qualifier is missing in sub_quals, but some later
13219 one is present there. */
13220 for (i = 0; i < dwarf_qual_info_size; i++)
13221 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13222 needed = true;
13223 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13224 {
13225 sub_quals = 0;
13226 break;
13227 }
13228 }
13229 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13230 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13231 {
13232 /* As not all intermediate qualified DIEs have corresponding
13233 tree types, ensure that qualified DIEs in the same scope
13234 as their DW_AT_type are emitted after their DW_AT_type,
13235 only with other qualified DIEs for the same type possibly
13236 in between them. Determine the range of such qualified
13237 DIEs now (first being the base type, last being corresponding
13238 last qualified DIE for it). */
13239 unsigned int count = 0;
13240 first = qualified_die_p (mod_type_die, &first_quals,
13241 dwarf_qual_info_size);
13242 if (first == NULL)
13243 first = mod_type_die;
13244 gcc_assert ((first_quals & ~sub_quals) == 0);
13245 for (count = 0, last = first;
13246 count < (1U << dwarf_qual_info_size);
13247 count++, last = last->die_sib)
13248 {
13249 int quals = 0;
13250 if (last == mod_scope->die_child)
13251 break;
13252 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13253 != first)
13254 break;
13255 }
13256 }
13257
13258 for (i = 0; i < dwarf_qual_info_size; i++)
13259 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13260 {
13261 dw_die_ref d;
13262 if (first && first != last)
13263 {
13264 for (d = first->die_sib; ; d = d->die_sib)
13265 {
13266 int quals = 0;
13267 qualified_die_p (d, &quals, dwarf_qual_info_size);
13268 if (quals == (first_quals | dwarf_qual_info[i].q))
13269 break;
13270 if (d == last)
13271 {
13272 d = NULL;
13273 break;
13274 }
13275 }
13276 if (d)
13277 {
13278 mod_type_die = d;
13279 continue;
13280 }
13281 }
13282 if (first)
13283 {
13284 d = new_die_raw (dwarf_qual_info[i].t);
13285 add_child_die_after (mod_scope, d, last);
13286 last = d;
13287 }
13288 else
13289 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13290 if (mod_type_die)
13291 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13292 mod_type_die = d;
13293 first_quals |= dwarf_qual_info[i].q;
13294 }
13295 }
13296 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13297 {
13298 dwarf_tag tag = DW_TAG_pointer_type;
13299 if (code == REFERENCE_TYPE)
13300 {
13301 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13302 tag = DW_TAG_rvalue_reference_type;
13303 else
13304 tag = DW_TAG_reference_type;
13305 }
13306 mod_type_die = new_die (tag, mod_scope, type);
13307
13308 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13309 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13310 add_alignment_attribute (mod_type_die, type);
13311 item_type = TREE_TYPE (type);
13312
13313 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13314 if (!ADDR_SPACE_GENERIC_P (as))
13315 {
13316 int action = targetm.addr_space.debug (as);
13317 if (action >= 0)
13318 {
13319 /* Positive values indicate an address_class. */
13320 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13321 }
13322 else
13323 {
13324 /* Negative values indicate an (inverted) segment base reg. */
13325 dw_loc_descr_ref d
13326 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13327 add_AT_loc (mod_type_die, DW_AT_segment, d);
13328 }
13329 }
13330 }
13331 else if (code == INTEGER_TYPE
13332 && TREE_TYPE (type) != NULL_TREE
13333 && subrange_type_for_debug_p (type, &low, &high))
13334 {
13335 tree bias = NULL_TREE;
13336 if (lang_hooks.types.get_type_bias)
13337 bias = lang_hooks.types.get_type_bias (type);
13338 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13339 item_type = TREE_TYPE (type);
13340 }
13341 else if (is_base_type (type))
13342 {
13343 mod_type_die = base_type_die (type, reverse);
13344
13345 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13346 if (reverse_base_type)
13347 {
13348 dw_die_ref after_die
13349 = modified_type_die (type, cv_quals, false, context_die);
13350 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13351 }
13352 else
13353 add_child_die (comp_unit_die (), mod_type_die);
13354
13355 add_pubtype (type, mod_type_die);
13356 }
13357 else
13358 {
13359 gen_type_die (type, context_die);
13360
13361 /* We have to get the type_main_variant here (and pass that to the
13362 `lookup_type_die' routine) because the ..._TYPE node we have
13363 might simply be a *copy* of some original type node (where the
13364 copy was created to help us keep track of typedef names) and
13365 that copy might have a different TYPE_UID from the original
13366 ..._TYPE node. */
13367 if (TREE_CODE (type) == FUNCTION_TYPE
13368 || TREE_CODE (type) == METHOD_TYPE)
13369 {
13370 /* For function/method types, can't just use type_main_variant here,
13371 because that can have different ref-qualifiers for C++,
13372 but try to canonicalize. */
13373 tree main = TYPE_MAIN_VARIANT (type);
13374 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13375 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13376 && check_base_type (t, main)
13377 && check_lang_type (t, type))
13378 return lookup_type_die (t);
13379 return lookup_type_die (type);
13380 }
13381 else if (TREE_CODE (type) != VECTOR_TYPE
13382 && TREE_CODE (type) != ARRAY_TYPE)
13383 return lookup_type_die (type_main_variant (type));
13384 else
13385 /* Vectors have the debugging information in the type,
13386 not the main variant. */
13387 return lookup_type_die (type);
13388 }
13389
13390 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13391 don't output a DW_TAG_typedef, since there isn't one in the
13392 user's program; just attach a DW_AT_name to the type.
13393 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13394 if the base type already has the same name. */
13395 if (name
13396 && ((TREE_CODE (name) != TYPE_DECL
13397 && (qualified_type == TYPE_MAIN_VARIANT (type)
13398 || (cv_quals == TYPE_UNQUALIFIED)))
13399 || (TREE_CODE (name) == TYPE_DECL
13400 && TREE_TYPE (name) == qualified_type
13401 && DECL_NAME (name))))
13402 {
13403 if (TREE_CODE (name) == TYPE_DECL)
13404 /* Could just call add_name_and_src_coords_attributes here,
13405 but since this is a builtin type it doesn't have any
13406 useful source coordinates anyway. */
13407 name = DECL_NAME (name);
13408 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13409 }
13410 /* This probably indicates a bug. */
13411 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13412 {
13413 name = TYPE_IDENTIFIER (type);
13414 add_name_attribute (mod_type_die,
13415 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13416 }
13417
13418 if (qualified_type && !reverse_base_type)
13419 equate_type_number_to_die (qualified_type, mod_type_die);
13420
13421 if (item_type)
13422 /* We must do this after the equate_type_number_to_die call, in case
13423 this is a recursive type. This ensures that the modified_type_die
13424 recursion will terminate even if the type is recursive. Recursive
13425 types are possible in Ada. */
13426 sub_die = modified_type_die (item_type,
13427 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13428 reverse,
13429 context_die);
13430
13431 if (sub_die != NULL)
13432 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13433
13434 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13435 if (TYPE_ARTIFICIAL (type))
13436 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13437
13438 return mod_type_die;
13439 }
13440
13441 /* Generate DIEs for the generic parameters of T.
13442 T must be either a generic type or a generic function.
13443 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13444
13445 static void
13446 gen_generic_params_dies (tree t)
13447 {
13448 tree parms, args;
13449 int parms_num, i;
13450 dw_die_ref die = NULL;
13451 int non_default;
13452
13453 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13454 return;
13455
13456 if (TYPE_P (t))
13457 die = lookup_type_die (t);
13458 else if (DECL_P (t))
13459 die = lookup_decl_die (t);
13460
13461 gcc_assert (die);
13462
13463 parms = lang_hooks.get_innermost_generic_parms (t);
13464 if (!parms)
13465 /* T has no generic parameter. It means T is neither a generic type
13466 or function. End of story. */
13467 return;
13468
13469 parms_num = TREE_VEC_LENGTH (parms);
13470 args = lang_hooks.get_innermost_generic_args (t);
13471 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13472 non_default = int_cst_value (TREE_CHAIN (args));
13473 else
13474 non_default = TREE_VEC_LENGTH (args);
13475 for (i = 0; i < parms_num; i++)
13476 {
13477 tree parm, arg, arg_pack_elems;
13478 dw_die_ref parm_die;
13479
13480 parm = TREE_VEC_ELT (parms, i);
13481 arg = TREE_VEC_ELT (args, i);
13482 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13483 gcc_assert (parm && TREE_VALUE (parm) && arg);
13484
13485 if (parm && TREE_VALUE (parm) && arg)
13486 {
13487 /* If PARM represents a template parameter pack,
13488 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13489 by DW_TAG_template_*_parameter DIEs for the argument
13490 pack elements of ARG. Note that ARG would then be
13491 an argument pack. */
13492 if (arg_pack_elems)
13493 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13494 arg_pack_elems,
13495 die);
13496 else
13497 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13498 true /* emit name */, die);
13499 if (i >= non_default)
13500 add_AT_flag (parm_die, DW_AT_default_value, 1);
13501 }
13502 }
13503 }
13504
13505 /* Create and return a DIE for PARM which should be
13506 the representation of a generic type parameter.
13507 For instance, in the C++ front end, PARM would be a template parameter.
13508 ARG is the argument to PARM.
13509 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13510 name of the PARM.
13511 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13512 as a child node. */
13513
13514 static dw_die_ref
13515 generic_parameter_die (tree parm, tree arg,
13516 bool emit_name_p,
13517 dw_die_ref parent_die)
13518 {
13519 dw_die_ref tmpl_die = NULL;
13520 const char *name = NULL;
13521
13522 if (!parm || !DECL_NAME (parm) || !arg)
13523 return NULL;
13524
13525 /* We support non-type generic parameters and arguments,
13526 type generic parameters and arguments, as well as
13527 generic generic parameters (a.k.a. template template parameters in C++)
13528 and arguments. */
13529 if (TREE_CODE (parm) == PARM_DECL)
13530 /* PARM is a nontype generic parameter */
13531 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13532 else if (TREE_CODE (parm) == TYPE_DECL)
13533 /* PARM is a type generic parameter. */
13534 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13535 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13536 /* PARM is a generic generic parameter.
13537 Its DIE is a GNU extension. It shall have a
13538 DW_AT_name attribute to represent the name of the template template
13539 parameter, and a DW_AT_GNU_template_name attribute to represent the
13540 name of the template template argument. */
13541 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13542 parent_die, parm);
13543 else
13544 gcc_unreachable ();
13545
13546 if (tmpl_die)
13547 {
13548 tree tmpl_type;
13549
13550 /* If PARM is a generic parameter pack, it means we are
13551 emitting debug info for a template argument pack element.
13552 In other terms, ARG is a template argument pack element.
13553 In that case, we don't emit any DW_AT_name attribute for
13554 the die. */
13555 if (emit_name_p)
13556 {
13557 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13558 gcc_assert (name);
13559 add_AT_string (tmpl_die, DW_AT_name, name);
13560 }
13561
13562 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13563 {
13564 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13565 TMPL_DIE should have a child DW_AT_type attribute that is set
13566 to the type of the argument to PARM, which is ARG.
13567 If PARM is a type generic parameter, TMPL_DIE should have a
13568 child DW_AT_type that is set to ARG. */
13569 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13570 add_type_attribute (tmpl_die, tmpl_type,
13571 (TREE_THIS_VOLATILE (tmpl_type)
13572 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13573 false, parent_die);
13574 }
13575 else
13576 {
13577 /* So TMPL_DIE is a DIE representing a
13578 a generic generic template parameter, a.k.a template template
13579 parameter in C++ and arg is a template. */
13580
13581 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13582 to the name of the argument. */
13583 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13584 if (name)
13585 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13586 }
13587
13588 if (TREE_CODE (parm) == PARM_DECL)
13589 /* So PARM is a non-type generic parameter.
13590 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13591 attribute of TMPL_DIE which value represents the value
13592 of ARG.
13593 We must be careful here:
13594 The value of ARG might reference some function decls.
13595 We might currently be emitting debug info for a generic
13596 type and types are emitted before function decls, we don't
13597 know if the function decls referenced by ARG will actually be
13598 emitted after cgraph computations.
13599 So must defer the generation of the DW_AT_const_value to
13600 after cgraph is ready. */
13601 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13602 }
13603
13604 return tmpl_die;
13605 }
13606
13607 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13608 PARM_PACK must be a template parameter pack. The returned DIE
13609 will be child DIE of PARENT_DIE. */
13610
13611 static dw_die_ref
13612 template_parameter_pack_die (tree parm_pack,
13613 tree parm_pack_args,
13614 dw_die_ref parent_die)
13615 {
13616 dw_die_ref die;
13617 int j;
13618
13619 gcc_assert (parent_die && parm_pack);
13620
13621 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13622 add_name_and_src_coords_attributes (die, parm_pack);
13623 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13624 generic_parameter_die (parm_pack,
13625 TREE_VEC_ELT (parm_pack_args, j),
13626 false /* Don't emit DW_AT_name */,
13627 die);
13628 return die;
13629 }
13630
13631 /* Return the DBX register number described by a given RTL node. */
13632
13633 static unsigned int
13634 dbx_reg_number (const_rtx rtl)
13635 {
13636 unsigned regno = REGNO (rtl);
13637
13638 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13639
13640 #ifdef LEAF_REG_REMAP
13641 if (crtl->uses_only_leaf_regs)
13642 {
13643 int leaf_reg = LEAF_REG_REMAP (regno);
13644 if (leaf_reg != -1)
13645 regno = (unsigned) leaf_reg;
13646 }
13647 #endif
13648
13649 regno = DBX_REGISTER_NUMBER (regno);
13650 gcc_assert (regno != INVALID_REGNUM);
13651 return regno;
13652 }
13653
13654 /* Optionally add a DW_OP_piece term to a location description expression.
13655 DW_OP_piece is only added if the location description expression already
13656 doesn't end with DW_OP_piece. */
13657
13658 static void
13659 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13660 {
13661 dw_loc_descr_ref loc;
13662
13663 if (*list_head != NULL)
13664 {
13665 /* Find the end of the chain. */
13666 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13667 ;
13668
13669 if (loc->dw_loc_opc != DW_OP_piece)
13670 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13671 }
13672 }
13673
13674 /* Return a location descriptor that designates a machine register or
13675 zero if there is none. */
13676
13677 static dw_loc_descr_ref
13678 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13679 {
13680 rtx regs;
13681
13682 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13683 return 0;
13684
13685 /* We only use "frame base" when we're sure we're talking about the
13686 post-prologue local stack frame. We do this by *not* running
13687 register elimination until this point, and recognizing the special
13688 argument pointer and soft frame pointer rtx's.
13689 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13690 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13691 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13692 {
13693 dw_loc_descr_ref result = NULL;
13694
13695 if (dwarf_version >= 4 || !dwarf_strict)
13696 {
13697 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13698 initialized);
13699 if (result)
13700 add_loc_descr (&result,
13701 new_loc_descr (DW_OP_stack_value, 0, 0));
13702 }
13703 return result;
13704 }
13705
13706 regs = targetm.dwarf_register_span (rtl);
13707
13708 if (REG_NREGS (rtl) > 1 || regs)
13709 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13710 else
13711 {
13712 unsigned int dbx_regnum = dbx_reg_number (rtl);
13713 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13714 return 0;
13715 return one_reg_loc_descriptor (dbx_regnum, initialized);
13716 }
13717 }
13718
13719 /* Return a location descriptor that designates a machine register for
13720 a given hard register number. */
13721
13722 static dw_loc_descr_ref
13723 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13724 {
13725 dw_loc_descr_ref reg_loc_descr;
13726
13727 if (regno <= 31)
13728 reg_loc_descr
13729 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13730 else
13731 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13732
13733 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13734 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13735
13736 return reg_loc_descr;
13737 }
13738
13739 /* Given an RTL of a register, return a location descriptor that
13740 designates a value that spans more than one register. */
13741
13742 static dw_loc_descr_ref
13743 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13744 enum var_init_status initialized)
13745 {
13746 int size, i;
13747 dw_loc_descr_ref loc_result = NULL;
13748
13749 /* Simple, contiguous registers. */
13750 if (regs == NULL_RTX)
13751 {
13752 unsigned reg = REGNO (rtl);
13753 int nregs;
13754
13755 #ifdef LEAF_REG_REMAP
13756 if (crtl->uses_only_leaf_regs)
13757 {
13758 int leaf_reg = LEAF_REG_REMAP (reg);
13759 if (leaf_reg != -1)
13760 reg = (unsigned) leaf_reg;
13761 }
13762 #endif
13763
13764 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13765 nregs = REG_NREGS (rtl);
13766
13767 /* At present we only track constant-sized pieces. */
13768 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13769 return NULL;
13770 size /= nregs;
13771
13772 loc_result = NULL;
13773 while (nregs--)
13774 {
13775 dw_loc_descr_ref t;
13776
13777 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13778 VAR_INIT_STATUS_INITIALIZED);
13779 add_loc_descr (&loc_result, t);
13780 add_loc_descr_op_piece (&loc_result, size);
13781 ++reg;
13782 }
13783 return loc_result;
13784 }
13785
13786 /* Now onto stupid register sets in non contiguous locations. */
13787
13788 gcc_assert (GET_CODE (regs) == PARALLEL);
13789
13790 /* At present we only track constant-sized pieces. */
13791 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13792 return NULL;
13793 loc_result = NULL;
13794
13795 for (i = 0; i < XVECLEN (regs, 0); ++i)
13796 {
13797 dw_loc_descr_ref t;
13798
13799 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13800 VAR_INIT_STATUS_INITIALIZED);
13801 add_loc_descr (&loc_result, t);
13802 add_loc_descr_op_piece (&loc_result, size);
13803 }
13804
13805 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13806 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13807 return loc_result;
13808 }
13809
13810 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13811
13812 /* Return a location descriptor that designates a constant i,
13813 as a compound operation from constant (i >> shift), constant shift
13814 and DW_OP_shl. */
13815
13816 static dw_loc_descr_ref
13817 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13818 {
13819 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13820 add_loc_descr (&ret, int_loc_descriptor (shift));
13821 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13822 return ret;
13823 }
13824
13825 /* Return a location descriptor that designates constant POLY_I. */
13826
13827 static dw_loc_descr_ref
13828 int_loc_descriptor (poly_int64 poly_i)
13829 {
13830 enum dwarf_location_atom op;
13831
13832 HOST_WIDE_INT i;
13833 if (!poly_i.is_constant (&i))
13834 {
13835 /* Create location descriptions for the non-constant part and
13836 add any constant offset at the end. */
13837 dw_loc_descr_ref ret = NULL;
13838 HOST_WIDE_INT constant = poly_i.coeffs[0];
13839 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13840 {
13841 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13842 if (coeff != 0)
13843 {
13844 dw_loc_descr_ref start = ret;
13845 unsigned int factor;
13846 int bias;
13847 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13848 (j, &factor, &bias);
13849
13850 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13851 add COEFF * (REGNO / FACTOR) now and subtract
13852 COEFF * BIAS from the final constant part. */
13853 constant -= coeff * bias;
13854 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13855 if (coeff % factor == 0)
13856 coeff /= factor;
13857 else
13858 {
13859 int amount = exact_log2 (factor);
13860 gcc_assert (amount >= 0);
13861 add_loc_descr (&ret, int_loc_descriptor (amount));
13862 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13863 }
13864 if (coeff != 1)
13865 {
13866 add_loc_descr (&ret, int_loc_descriptor (coeff));
13867 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13868 }
13869 if (start)
13870 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13871 }
13872 }
13873 loc_descr_plus_const (&ret, constant);
13874 return ret;
13875 }
13876
13877 /* Pick the smallest representation of a constant, rather than just
13878 defaulting to the LEB encoding. */
13879 if (i >= 0)
13880 {
13881 int clz = clz_hwi (i);
13882 int ctz = ctz_hwi (i);
13883 if (i <= 31)
13884 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13885 else if (i <= 0xff)
13886 op = DW_OP_const1u;
13887 else if (i <= 0xffff)
13888 op = DW_OP_const2u;
13889 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13890 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13891 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13892 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13893 while DW_OP_const4u is 5 bytes. */
13894 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13895 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13896 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13897 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13898 while DW_OP_const4u is 5 bytes. */
13899 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13900
13901 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13902 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13903 <= 4)
13904 {
13905 /* As i >= 2**31, the double cast above will yield a negative number.
13906 Since wrapping is defined in DWARF expressions we can output big
13907 positive integers as small negative ones, regardless of the size
13908 of host wide ints.
13909
13910 Here, since the evaluator will handle 32-bit values and since i >=
13911 2**31, we know it's going to be interpreted as a negative literal:
13912 store it this way if we can do better than 5 bytes this way. */
13913 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13914 }
13915 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13916 op = DW_OP_const4u;
13917
13918 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13919 least 6 bytes: see if we can do better before falling back to it. */
13920 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13921 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13922 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13923 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13924 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13925 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13926 >= HOST_BITS_PER_WIDE_INT)
13927 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13928 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13929 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13930 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13931 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13932 && size_of_uleb128 (i) > 6)
13933 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13934 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13935 else
13936 op = DW_OP_constu;
13937 }
13938 else
13939 {
13940 if (i >= -0x80)
13941 op = DW_OP_const1s;
13942 else if (i >= -0x8000)
13943 op = DW_OP_const2s;
13944 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13945 {
13946 if (size_of_int_loc_descriptor (i) < 5)
13947 {
13948 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13949 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13950 return ret;
13951 }
13952 op = DW_OP_const4s;
13953 }
13954 else
13955 {
13956 if (size_of_int_loc_descriptor (i)
13957 < (unsigned long) 1 + size_of_sleb128 (i))
13958 {
13959 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13960 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13961 return ret;
13962 }
13963 op = DW_OP_consts;
13964 }
13965 }
13966
13967 return new_loc_descr (op, i, 0);
13968 }
13969
13970 /* Likewise, for unsigned constants. */
13971
13972 static dw_loc_descr_ref
13973 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
13974 {
13975 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
13976 const unsigned HOST_WIDE_INT max_uint
13977 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
13978
13979 /* If possible, use the clever signed constants handling. */
13980 if (i <= max_int)
13981 return int_loc_descriptor ((HOST_WIDE_INT) i);
13982
13983 /* Here, we are left with positive numbers that cannot be represented as
13984 HOST_WIDE_INT, i.e.:
13985 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
13986
13987 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
13988 whereas may be better to output a negative integer: thanks to integer
13989 wrapping, we know that:
13990 x = x - 2 ** DWARF2_ADDR_SIZE
13991 = x - 2 * (max (HOST_WIDE_INT) + 1)
13992 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
13993 small negative integers. Let's try that in cases it will clearly improve
13994 the encoding: there is no gain turning DW_OP_const4u into
13995 DW_OP_const4s. */
13996 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
13997 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
13998 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
13999 {
14000 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14001
14002 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14003 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14004 const HOST_WIDE_INT second_shift
14005 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14006
14007 /* So we finally have:
14008 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14009 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14010 return int_loc_descriptor (second_shift);
14011 }
14012
14013 /* Last chance: fallback to a simple constant operation. */
14014 return new_loc_descr
14015 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14016 ? DW_OP_const4u
14017 : DW_OP_const8u,
14018 i, 0);
14019 }
14020
14021 /* Generate and return a location description that computes the unsigned
14022 comparison of the two stack top entries (a OP b where b is the top-most
14023 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14024 LE_EXPR, GT_EXPR or GE_EXPR. */
14025
14026 static dw_loc_descr_ref
14027 uint_comparison_loc_list (enum tree_code kind)
14028 {
14029 enum dwarf_location_atom op, flip_op;
14030 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14031
14032 switch (kind)
14033 {
14034 case LT_EXPR:
14035 op = DW_OP_lt;
14036 break;
14037 case LE_EXPR:
14038 op = DW_OP_le;
14039 break;
14040 case GT_EXPR:
14041 op = DW_OP_gt;
14042 break;
14043 case GE_EXPR:
14044 op = DW_OP_ge;
14045 break;
14046 default:
14047 gcc_unreachable ();
14048 }
14049
14050 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14051 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14052
14053 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14054 possible to perform unsigned comparisons: we just have to distinguish
14055 three cases:
14056
14057 1. when a and b have the same sign (as signed integers); then we should
14058 return: a OP(signed) b;
14059
14060 2. when a is a negative signed integer while b is a positive one, then a
14061 is a greater unsigned integer than b; likewise when a and b's roles
14062 are flipped.
14063
14064 So first, compare the sign of the two operands. */
14065 ret = new_loc_descr (DW_OP_over, 0, 0);
14066 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14067 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14068 /* If they have different signs (i.e. they have different sign bits), then
14069 the stack top value has now the sign bit set and thus it's smaller than
14070 zero. */
14071 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14072 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14073 add_loc_descr (&ret, bra_node);
14074
14075 /* We are in case 1. At this point, we know both operands have the same
14076 sign, to it's safe to use the built-in signed comparison. */
14077 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14078 add_loc_descr (&ret, jmp_node);
14079
14080 /* We are in case 2. Here, we know both operands do not have the same sign,
14081 so we have to flip the signed comparison. */
14082 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14083 tmp = new_loc_descr (flip_op, 0, 0);
14084 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14085 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14086 add_loc_descr (&ret, tmp);
14087
14088 /* This dummy operation is necessary to make the two branches join. */
14089 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14090 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14091 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14092 add_loc_descr (&ret, tmp);
14093
14094 return ret;
14095 }
14096
14097 /* Likewise, but takes the location description lists (might be destructive on
14098 them). Return NULL if either is NULL or if concatenation fails. */
14099
14100 static dw_loc_list_ref
14101 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14102 enum tree_code kind)
14103 {
14104 if (left == NULL || right == NULL)
14105 return NULL;
14106
14107 add_loc_list (&left, right);
14108 if (left == NULL)
14109 return NULL;
14110
14111 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14112 return left;
14113 }
14114
14115 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14116 without actually allocating it. */
14117
14118 static unsigned long
14119 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14120 {
14121 return size_of_int_loc_descriptor (i >> shift)
14122 + size_of_int_loc_descriptor (shift)
14123 + 1;
14124 }
14125
14126 /* Return size_of_locs (int_loc_descriptor (i)) without
14127 actually allocating it. */
14128
14129 static unsigned long
14130 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14131 {
14132 unsigned long s;
14133
14134 if (i >= 0)
14135 {
14136 int clz, ctz;
14137 if (i <= 31)
14138 return 1;
14139 else if (i <= 0xff)
14140 return 2;
14141 else if (i <= 0xffff)
14142 return 3;
14143 clz = clz_hwi (i);
14144 ctz = ctz_hwi (i);
14145 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14146 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14147 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14148 - clz - 5);
14149 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14150 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14151 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14152 - clz - 8);
14153 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14154 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14155 <= 4)
14156 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14157 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14158 return 5;
14159 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14160 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14161 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14162 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14163 - clz - 8);
14164 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14165 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14166 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14167 - clz - 16);
14168 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14169 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14170 && s > 6)
14171 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14172 - clz - 32);
14173 else
14174 return 1 + s;
14175 }
14176 else
14177 {
14178 if (i >= -0x80)
14179 return 2;
14180 else if (i >= -0x8000)
14181 return 3;
14182 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14183 {
14184 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14185 {
14186 s = size_of_int_loc_descriptor (-i) + 1;
14187 if (s < 5)
14188 return s;
14189 }
14190 return 5;
14191 }
14192 else
14193 {
14194 unsigned long r = 1 + size_of_sleb128 (i);
14195 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14196 {
14197 s = size_of_int_loc_descriptor (-i) + 1;
14198 if (s < r)
14199 return s;
14200 }
14201 return r;
14202 }
14203 }
14204 }
14205
14206 /* Return loc description representing "address" of integer value.
14207 This can appear only as toplevel expression. */
14208
14209 static dw_loc_descr_ref
14210 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14211 {
14212 int litsize;
14213 dw_loc_descr_ref loc_result = NULL;
14214
14215 if (!(dwarf_version >= 4 || !dwarf_strict))
14216 return NULL;
14217
14218 litsize = size_of_int_loc_descriptor (i);
14219 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14220 is more compact. For DW_OP_stack_value we need:
14221 litsize + 1 (DW_OP_stack_value)
14222 and for DW_OP_implicit_value:
14223 1 (DW_OP_implicit_value) + 1 (length) + size. */
14224 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14225 {
14226 loc_result = int_loc_descriptor (i);
14227 add_loc_descr (&loc_result,
14228 new_loc_descr (DW_OP_stack_value, 0, 0));
14229 return loc_result;
14230 }
14231
14232 loc_result = new_loc_descr (DW_OP_implicit_value,
14233 size, 0);
14234 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14235 loc_result->dw_loc_oprnd2.v.val_int = i;
14236 return loc_result;
14237 }
14238
14239 /* Return a location descriptor that designates a base+offset location. */
14240
14241 static dw_loc_descr_ref
14242 based_loc_descr (rtx reg, poly_int64 offset,
14243 enum var_init_status initialized)
14244 {
14245 unsigned int regno;
14246 dw_loc_descr_ref result;
14247 dw_fde_ref fde = cfun->fde;
14248
14249 /* We only use "frame base" when we're sure we're talking about the
14250 post-prologue local stack frame. We do this by *not* running
14251 register elimination until this point, and recognizing the special
14252 argument pointer and soft frame pointer rtx's. */
14253 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14254 {
14255 rtx elim = (ira_use_lra_p
14256 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14257 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14258
14259 if (elim != reg)
14260 {
14261 /* Allow hard frame pointer here even if frame pointer
14262 isn't used since hard frame pointer is encoded with
14263 DW_OP_fbreg which uses the DW_AT_frame_base attribute,
14264 not hard frame pointer directly. */
14265 elim = strip_offset_and_add (elim, &offset);
14266 gcc_assert (elim == hard_frame_pointer_rtx
14267 || elim == stack_pointer_rtx);
14268
14269 /* If drap register is used to align stack, use frame
14270 pointer + offset to access stack variables. If stack
14271 is aligned without drap, use stack pointer + offset to
14272 access stack variables. */
14273 if (crtl->stack_realign_tried
14274 && reg == frame_pointer_rtx)
14275 {
14276 int base_reg
14277 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14278 ? HARD_FRAME_POINTER_REGNUM
14279 : REGNO (elim));
14280 return new_reg_loc_descr (base_reg, offset);
14281 }
14282
14283 gcc_assert (frame_pointer_fb_offset_valid);
14284 offset += frame_pointer_fb_offset;
14285 HOST_WIDE_INT const_offset;
14286 if (offset.is_constant (&const_offset))
14287 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14288 else
14289 {
14290 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14291 loc_descr_plus_const (&ret, offset);
14292 return ret;
14293 }
14294 }
14295 }
14296
14297 regno = REGNO (reg);
14298 #ifdef LEAF_REG_REMAP
14299 if (crtl->uses_only_leaf_regs)
14300 {
14301 int leaf_reg = LEAF_REG_REMAP (regno);
14302 if (leaf_reg != -1)
14303 regno = (unsigned) leaf_reg;
14304 }
14305 #endif
14306 regno = DWARF_FRAME_REGNUM (regno);
14307
14308 HOST_WIDE_INT const_offset;
14309 if (!optimize && fde
14310 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14311 && offset.is_constant (&const_offset))
14312 {
14313 /* Use cfa+offset to represent the location of arguments passed
14314 on the stack when drap is used to align stack.
14315 Only do this when not optimizing, for optimized code var-tracking
14316 is supposed to track where the arguments live and the register
14317 used as vdrap or drap in some spot might be used for something
14318 else in other part of the routine. */
14319 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14320 }
14321
14322 result = new_reg_loc_descr (regno, offset);
14323
14324 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14325 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14326
14327 return result;
14328 }
14329
14330 /* Return true if this RTL expression describes a base+offset calculation. */
14331
14332 static inline int
14333 is_based_loc (const_rtx rtl)
14334 {
14335 return (GET_CODE (rtl) == PLUS
14336 && ((REG_P (XEXP (rtl, 0))
14337 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14338 && CONST_INT_P (XEXP (rtl, 1)))));
14339 }
14340
14341 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14342 failed. */
14343
14344 static dw_loc_descr_ref
14345 tls_mem_loc_descriptor (rtx mem)
14346 {
14347 tree base;
14348 dw_loc_descr_ref loc_result;
14349
14350 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14351 return NULL;
14352
14353 base = get_base_address (MEM_EXPR (mem));
14354 if (base == NULL
14355 || !VAR_P (base)
14356 || !DECL_THREAD_LOCAL_P (base))
14357 return NULL;
14358
14359 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14360 if (loc_result == NULL)
14361 return NULL;
14362
14363 if (maybe_ne (MEM_OFFSET (mem), 0))
14364 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14365
14366 return loc_result;
14367 }
14368
14369 /* Output debug info about reason why we failed to expand expression as dwarf
14370 expression. */
14371
14372 static void
14373 expansion_failed (tree expr, rtx rtl, char const *reason)
14374 {
14375 if (dump_file && (dump_flags & TDF_DETAILS))
14376 {
14377 fprintf (dump_file, "Failed to expand as dwarf: ");
14378 if (expr)
14379 print_generic_expr (dump_file, expr, dump_flags);
14380 if (rtl)
14381 {
14382 fprintf (dump_file, "\n");
14383 print_rtl (dump_file, rtl);
14384 }
14385 fprintf (dump_file, "\nReason: %s\n", reason);
14386 }
14387 }
14388
14389 /* Helper function for const_ok_for_output. */
14390
14391 static bool
14392 const_ok_for_output_1 (rtx rtl)
14393 {
14394 if (targetm.const_not_ok_for_debug_p (rtl))
14395 {
14396 if (GET_CODE (rtl) != UNSPEC)
14397 {
14398 expansion_failed (NULL_TREE, rtl,
14399 "Expression rejected for debug by the backend.\n");
14400 return false;
14401 }
14402
14403 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14404 the target hook doesn't explicitly allow it in debug info, assume
14405 we can't express it in the debug info. */
14406 /* Don't complain about TLS UNSPECs, those are just too hard to
14407 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14408 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14409 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14410 if (flag_checking
14411 && (XVECLEN (rtl, 0) == 0
14412 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14413 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14414 inform (current_function_decl
14415 ? DECL_SOURCE_LOCATION (current_function_decl)
14416 : UNKNOWN_LOCATION,
14417 #if NUM_UNSPEC_VALUES > 0
14418 "non-delegitimized UNSPEC %s (%d) found in variable location",
14419 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14420 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14421 XINT (rtl, 1));
14422 #else
14423 "non-delegitimized UNSPEC %d found in variable location",
14424 XINT (rtl, 1));
14425 #endif
14426 expansion_failed (NULL_TREE, rtl,
14427 "UNSPEC hasn't been delegitimized.\n");
14428 return false;
14429 }
14430
14431 if (CONST_POLY_INT_P (rtl))
14432 return false;
14433
14434 if (targetm.const_not_ok_for_debug_p (rtl))
14435 {
14436 expansion_failed (NULL_TREE, rtl,
14437 "Expression rejected for debug by the backend.\n");
14438 return false;
14439 }
14440
14441 /* FIXME: Refer to PR60655. It is possible for simplification
14442 of rtl expressions in var tracking to produce such expressions.
14443 We should really identify / validate expressions
14444 enclosed in CONST that can be handled by assemblers on various
14445 targets and only handle legitimate cases here. */
14446 switch (GET_CODE (rtl))
14447 {
14448 case SYMBOL_REF:
14449 break;
14450 case NOT:
14451 case NEG:
14452 return false;
14453 default:
14454 return true;
14455 }
14456
14457 if (CONSTANT_POOL_ADDRESS_P (rtl))
14458 {
14459 bool marked;
14460 get_pool_constant_mark (rtl, &marked);
14461 /* If all references to this pool constant were optimized away,
14462 it was not output and thus we can't represent it. */
14463 if (!marked)
14464 {
14465 expansion_failed (NULL_TREE, rtl,
14466 "Constant was removed from constant pool.\n");
14467 return false;
14468 }
14469 }
14470
14471 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14472 return false;
14473
14474 /* Avoid references to external symbols in debug info, on several targets
14475 the linker might even refuse to link when linking a shared library,
14476 and in many other cases the relocations for .debug_info/.debug_loc are
14477 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14478 to be defined within the same shared library or executable are fine. */
14479 if (SYMBOL_REF_EXTERNAL_P (rtl))
14480 {
14481 tree decl = SYMBOL_REF_DECL (rtl);
14482
14483 if (decl == NULL || !targetm.binds_local_p (decl))
14484 {
14485 expansion_failed (NULL_TREE, rtl,
14486 "Symbol not defined in current TU.\n");
14487 return false;
14488 }
14489 }
14490
14491 return true;
14492 }
14493
14494 /* Return true if constant RTL can be emitted in DW_OP_addr or
14495 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14496 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14497
14498 static bool
14499 const_ok_for_output (rtx rtl)
14500 {
14501 if (GET_CODE (rtl) == SYMBOL_REF)
14502 return const_ok_for_output_1 (rtl);
14503
14504 if (GET_CODE (rtl) == CONST)
14505 {
14506 subrtx_var_iterator::array_type array;
14507 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14508 if (!const_ok_for_output_1 (*iter))
14509 return false;
14510 return true;
14511 }
14512
14513 return true;
14514 }
14515
14516 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14517 if possible, NULL otherwise. */
14518
14519 static dw_die_ref
14520 base_type_for_mode (machine_mode mode, bool unsignedp)
14521 {
14522 dw_die_ref type_die;
14523 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14524
14525 if (type == NULL)
14526 return NULL;
14527 switch (TREE_CODE (type))
14528 {
14529 case INTEGER_TYPE:
14530 case REAL_TYPE:
14531 break;
14532 default:
14533 return NULL;
14534 }
14535 type_die = lookup_type_die (type);
14536 if (!type_die)
14537 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14538 comp_unit_die ());
14539 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14540 return NULL;
14541 return type_die;
14542 }
14543
14544 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14545 type matching MODE, or, if MODE is narrower than or as wide as
14546 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14547 possible. */
14548
14549 static dw_loc_descr_ref
14550 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14551 {
14552 machine_mode outer_mode = mode;
14553 dw_die_ref type_die;
14554 dw_loc_descr_ref cvt;
14555
14556 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14557 {
14558 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14559 return op;
14560 }
14561 type_die = base_type_for_mode (outer_mode, 1);
14562 if (type_die == NULL)
14563 return NULL;
14564 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14565 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14566 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14567 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14568 add_loc_descr (&op, cvt);
14569 return op;
14570 }
14571
14572 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14573
14574 static dw_loc_descr_ref
14575 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14576 dw_loc_descr_ref op1)
14577 {
14578 dw_loc_descr_ref ret = op0;
14579 add_loc_descr (&ret, op1);
14580 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14581 if (STORE_FLAG_VALUE != 1)
14582 {
14583 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14584 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14585 }
14586 return ret;
14587 }
14588
14589 /* Subroutine of scompare_loc_descriptor for the case in which we're
14590 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14591 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14592
14593 static dw_loc_descr_ref
14594 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14595 scalar_int_mode op_mode,
14596 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14597 {
14598 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14599 dw_loc_descr_ref cvt;
14600
14601 if (type_die == NULL)
14602 return NULL;
14603 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14604 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14605 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14606 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14607 add_loc_descr (&op0, cvt);
14608 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14609 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14610 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14611 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14612 add_loc_descr (&op1, cvt);
14613 return compare_loc_descriptor (op, op0, op1);
14614 }
14615
14616 /* Subroutine of scompare_loc_descriptor for the case in which we're
14617 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14618 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14619
14620 static dw_loc_descr_ref
14621 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14622 scalar_int_mode op_mode,
14623 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14624 {
14625 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14626 /* For eq/ne, if the operands are known to be zero-extended,
14627 there is no need to do the fancy shifting up. */
14628 if (op == DW_OP_eq || op == DW_OP_ne)
14629 {
14630 dw_loc_descr_ref last0, last1;
14631 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14632 ;
14633 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14634 ;
14635 /* deref_size zero extends, and for constants we can check
14636 whether they are zero extended or not. */
14637 if (((last0->dw_loc_opc == DW_OP_deref_size
14638 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14639 || (CONST_INT_P (XEXP (rtl, 0))
14640 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14641 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14642 && ((last1->dw_loc_opc == DW_OP_deref_size
14643 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14644 || (CONST_INT_P (XEXP (rtl, 1))
14645 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14646 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14647 return compare_loc_descriptor (op, op0, op1);
14648
14649 /* EQ/NE comparison against constant in narrower type than
14650 DWARF2_ADDR_SIZE can be performed either as
14651 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14652 DW_OP_{eq,ne}
14653 or
14654 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14655 DW_OP_{eq,ne}. Pick whatever is shorter. */
14656 if (CONST_INT_P (XEXP (rtl, 1))
14657 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14658 && (size_of_int_loc_descriptor (shift) + 1
14659 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14660 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14661 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14662 & GET_MODE_MASK (op_mode))))
14663 {
14664 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14665 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14666 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14667 & GET_MODE_MASK (op_mode));
14668 return compare_loc_descriptor (op, op0, op1);
14669 }
14670 }
14671 add_loc_descr (&op0, int_loc_descriptor (shift));
14672 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14673 if (CONST_INT_P (XEXP (rtl, 1)))
14674 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14675 else
14676 {
14677 add_loc_descr (&op1, int_loc_descriptor (shift));
14678 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14679 }
14680 return compare_loc_descriptor (op, op0, op1);
14681 }
14682
14683 /* Return location descriptor for unsigned comparison OP RTL. */
14684
14685 static dw_loc_descr_ref
14686 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14687 machine_mode mem_mode)
14688 {
14689 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14690 dw_loc_descr_ref op0, op1;
14691
14692 if (op_mode == VOIDmode)
14693 op_mode = GET_MODE (XEXP (rtl, 1));
14694 if (op_mode == VOIDmode)
14695 return NULL;
14696
14697 scalar_int_mode int_op_mode;
14698 if (dwarf_strict
14699 && dwarf_version < 5
14700 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14701 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14702 return NULL;
14703
14704 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14705 VAR_INIT_STATUS_INITIALIZED);
14706 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14707 VAR_INIT_STATUS_INITIALIZED);
14708
14709 if (op0 == NULL || op1 == NULL)
14710 return NULL;
14711
14712 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14713 {
14714 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14715 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14716
14717 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14718 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14719 }
14720 return compare_loc_descriptor (op, op0, op1);
14721 }
14722
14723 /* Return location descriptor for unsigned comparison OP RTL. */
14724
14725 static dw_loc_descr_ref
14726 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14727 machine_mode mem_mode)
14728 {
14729 dw_loc_descr_ref op0, op1;
14730
14731 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14732 if (test_op_mode == VOIDmode)
14733 test_op_mode = GET_MODE (XEXP (rtl, 1));
14734
14735 scalar_int_mode op_mode;
14736 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14737 return NULL;
14738
14739 if (dwarf_strict
14740 && dwarf_version < 5
14741 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14742 return NULL;
14743
14744 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14745 VAR_INIT_STATUS_INITIALIZED);
14746 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14747 VAR_INIT_STATUS_INITIALIZED);
14748
14749 if (op0 == NULL || op1 == NULL)
14750 return NULL;
14751
14752 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14753 {
14754 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14755 dw_loc_descr_ref last0, last1;
14756 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14757 ;
14758 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14759 ;
14760 if (CONST_INT_P (XEXP (rtl, 0)))
14761 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14762 /* deref_size zero extends, so no need to mask it again. */
14763 else if (last0->dw_loc_opc != DW_OP_deref_size
14764 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14765 {
14766 add_loc_descr (&op0, int_loc_descriptor (mask));
14767 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14768 }
14769 if (CONST_INT_P (XEXP (rtl, 1)))
14770 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14771 /* deref_size zero extends, so no need to mask it again. */
14772 else if (last1->dw_loc_opc != DW_OP_deref_size
14773 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14774 {
14775 add_loc_descr (&op1, int_loc_descriptor (mask));
14776 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14777 }
14778 }
14779 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14780 {
14781 HOST_WIDE_INT bias = 1;
14782 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14783 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14784 if (CONST_INT_P (XEXP (rtl, 1)))
14785 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14786 + INTVAL (XEXP (rtl, 1)));
14787 else
14788 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14789 bias, 0));
14790 }
14791 return compare_loc_descriptor (op, op0, op1);
14792 }
14793
14794 /* Return location descriptor for {U,S}{MIN,MAX}. */
14795
14796 static dw_loc_descr_ref
14797 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14798 machine_mode mem_mode)
14799 {
14800 enum dwarf_location_atom op;
14801 dw_loc_descr_ref op0, op1, ret;
14802 dw_loc_descr_ref bra_node, drop_node;
14803
14804 scalar_int_mode int_mode;
14805 if (dwarf_strict
14806 && dwarf_version < 5
14807 && (!is_a <scalar_int_mode> (mode, &int_mode)
14808 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14809 return NULL;
14810
14811 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14812 VAR_INIT_STATUS_INITIALIZED);
14813 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14814 VAR_INIT_STATUS_INITIALIZED);
14815
14816 if (op0 == NULL || op1 == NULL)
14817 return NULL;
14818
14819 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14820 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14821 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14822 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14823 {
14824 /* Checked by the caller. */
14825 int_mode = as_a <scalar_int_mode> (mode);
14826 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14827 {
14828 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14829 add_loc_descr (&op0, int_loc_descriptor (mask));
14830 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14831 add_loc_descr (&op1, int_loc_descriptor (mask));
14832 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14833 }
14834 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14835 {
14836 HOST_WIDE_INT bias = 1;
14837 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14838 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14839 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14840 }
14841 }
14842 else if (is_a <scalar_int_mode> (mode, &int_mode)
14843 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14844 {
14845 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14846 add_loc_descr (&op0, int_loc_descriptor (shift));
14847 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14848 add_loc_descr (&op1, int_loc_descriptor (shift));
14849 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14850 }
14851 else if (is_a <scalar_int_mode> (mode, &int_mode)
14852 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14853 {
14854 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14855 dw_loc_descr_ref cvt;
14856 if (type_die == NULL)
14857 return NULL;
14858 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14859 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14860 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14861 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14862 add_loc_descr (&op0, cvt);
14863 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14864 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14865 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14866 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14867 add_loc_descr (&op1, cvt);
14868 }
14869
14870 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14871 op = DW_OP_lt;
14872 else
14873 op = DW_OP_gt;
14874 ret = op0;
14875 add_loc_descr (&ret, op1);
14876 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14877 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14878 add_loc_descr (&ret, bra_node);
14879 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14880 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14881 add_loc_descr (&ret, drop_node);
14882 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14883 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14884 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14885 && is_a <scalar_int_mode> (mode, &int_mode)
14886 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14887 ret = convert_descriptor_to_mode (int_mode, ret);
14888 return ret;
14889 }
14890
14891 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14892 but after converting arguments to type_die, afterwards
14893 convert back to unsigned. */
14894
14895 static dw_loc_descr_ref
14896 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14897 scalar_int_mode mode, machine_mode mem_mode)
14898 {
14899 dw_loc_descr_ref cvt, op0, op1;
14900
14901 if (type_die == NULL)
14902 return NULL;
14903 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14904 VAR_INIT_STATUS_INITIALIZED);
14905 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14906 VAR_INIT_STATUS_INITIALIZED);
14907 if (op0 == NULL || op1 == NULL)
14908 return NULL;
14909 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14910 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14911 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14912 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14913 add_loc_descr (&op0, cvt);
14914 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14915 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14916 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14917 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14918 add_loc_descr (&op1, cvt);
14919 add_loc_descr (&op0, op1);
14920 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14921 return convert_descriptor_to_mode (mode, op0);
14922 }
14923
14924 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14925 const0 is DW_OP_lit0 or corresponding typed constant,
14926 const1 is DW_OP_lit1 or corresponding typed constant
14927 and constMSB is constant with just the MSB bit set
14928 for the mode):
14929 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14930 L1: const0 DW_OP_swap
14931 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14932 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14933 L3: DW_OP_drop
14934 L4: DW_OP_nop
14935
14936 CTZ is similar:
14937 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14938 L1: const0 DW_OP_swap
14939 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14940 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14941 L3: DW_OP_drop
14942 L4: DW_OP_nop
14943
14944 FFS is similar:
14945 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14946 L1: const1 DW_OP_swap
14947 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14948 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14949 L3: DW_OP_drop
14950 L4: DW_OP_nop */
14951
14952 static dw_loc_descr_ref
14953 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14954 machine_mode mem_mode)
14955 {
14956 dw_loc_descr_ref op0, ret, tmp;
14957 HOST_WIDE_INT valv;
14958 dw_loc_descr_ref l1jump, l1label;
14959 dw_loc_descr_ref l2jump, l2label;
14960 dw_loc_descr_ref l3jump, l3label;
14961 dw_loc_descr_ref l4jump, l4label;
14962 rtx msb;
14963
14964 if (GET_MODE (XEXP (rtl, 0)) != mode)
14965 return NULL;
14966
14967 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14968 VAR_INIT_STATUS_INITIALIZED);
14969 if (op0 == NULL)
14970 return NULL;
14971 ret = op0;
14972 if (GET_CODE (rtl) == CLZ)
14973 {
14974 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14975 valv = GET_MODE_BITSIZE (mode);
14976 }
14977 else if (GET_CODE (rtl) == FFS)
14978 valv = 0;
14979 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14980 valv = GET_MODE_BITSIZE (mode);
14981 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14982 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
14983 add_loc_descr (&ret, l1jump);
14984 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14985 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
14986 VAR_INIT_STATUS_INITIALIZED);
14987 if (tmp == NULL)
14988 return NULL;
14989 add_loc_descr (&ret, tmp);
14990 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
14991 add_loc_descr (&ret, l4jump);
14992 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
14993 ? const1_rtx : const0_rtx,
14994 mode, mem_mode,
14995 VAR_INIT_STATUS_INITIALIZED);
14996 if (l1label == NULL)
14997 return NULL;
14998 add_loc_descr (&ret, l1label);
14999 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15000 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15001 add_loc_descr (&ret, l2label);
15002 if (GET_CODE (rtl) != CLZ)
15003 msb = const1_rtx;
15004 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15005 msb = GEN_INT (HOST_WIDE_INT_1U
15006 << (GET_MODE_BITSIZE (mode) - 1));
15007 else
15008 msb = immed_wide_int_const
15009 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15010 GET_MODE_PRECISION (mode)), mode);
15011 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15012 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15013 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15014 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15015 else
15016 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15017 VAR_INIT_STATUS_INITIALIZED);
15018 if (tmp == NULL)
15019 return NULL;
15020 add_loc_descr (&ret, tmp);
15021 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15022 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15023 add_loc_descr (&ret, l3jump);
15024 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15025 VAR_INIT_STATUS_INITIALIZED);
15026 if (tmp == NULL)
15027 return NULL;
15028 add_loc_descr (&ret, tmp);
15029 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15030 ? DW_OP_shl : DW_OP_shr, 0, 0));
15031 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15032 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15033 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15034 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15035 add_loc_descr (&ret, l2jump);
15036 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15037 add_loc_descr (&ret, l3label);
15038 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15039 add_loc_descr (&ret, l4label);
15040 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15041 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15042 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15043 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15044 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15045 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15046 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15047 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15048 return ret;
15049 }
15050
15051 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15052 const1 is DW_OP_lit1 or corresponding typed constant):
15053 const0 DW_OP_swap
15054 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15055 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15056 L2: DW_OP_drop
15057
15058 PARITY is similar:
15059 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15060 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15061 L2: DW_OP_drop */
15062
15063 static dw_loc_descr_ref
15064 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15065 machine_mode mem_mode)
15066 {
15067 dw_loc_descr_ref op0, ret, tmp;
15068 dw_loc_descr_ref l1jump, l1label;
15069 dw_loc_descr_ref l2jump, l2label;
15070
15071 if (GET_MODE (XEXP (rtl, 0)) != mode)
15072 return NULL;
15073
15074 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15075 VAR_INIT_STATUS_INITIALIZED);
15076 if (op0 == NULL)
15077 return NULL;
15078 ret = op0;
15079 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15080 VAR_INIT_STATUS_INITIALIZED);
15081 if (tmp == NULL)
15082 return NULL;
15083 add_loc_descr (&ret, tmp);
15084 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15085 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15086 add_loc_descr (&ret, l1label);
15087 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15088 add_loc_descr (&ret, l2jump);
15089 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15090 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15091 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15092 VAR_INIT_STATUS_INITIALIZED);
15093 if (tmp == NULL)
15094 return NULL;
15095 add_loc_descr (&ret, tmp);
15096 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15097 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15098 ? DW_OP_plus : DW_OP_xor, 0, 0));
15099 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15100 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15101 VAR_INIT_STATUS_INITIALIZED);
15102 add_loc_descr (&ret, tmp);
15103 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15104 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15105 add_loc_descr (&ret, l1jump);
15106 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15107 add_loc_descr (&ret, l2label);
15108 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15109 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15110 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15111 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15112 return ret;
15113 }
15114
15115 /* BSWAP (constS is initial shift count, either 56 or 24):
15116 constS const0
15117 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15118 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15119 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15120 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15121 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15122
15123 static dw_loc_descr_ref
15124 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15125 machine_mode mem_mode)
15126 {
15127 dw_loc_descr_ref op0, ret, tmp;
15128 dw_loc_descr_ref l1jump, l1label;
15129 dw_loc_descr_ref l2jump, l2label;
15130
15131 if (BITS_PER_UNIT != 8
15132 || (GET_MODE_BITSIZE (mode) != 32
15133 && GET_MODE_BITSIZE (mode) != 64))
15134 return NULL;
15135
15136 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15137 VAR_INIT_STATUS_INITIALIZED);
15138 if (op0 == NULL)
15139 return NULL;
15140
15141 ret = op0;
15142 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15143 mode, mem_mode,
15144 VAR_INIT_STATUS_INITIALIZED);
15145 if (tmp == NULL)
15146 return NULL;
15147 add_loc_descr (&ret, tmp);
15148 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15149 VAR_INIT_STATUS_INITIALIZED);
15150 if (tmp == NULL)
15151 return NULL;
15152 add_loc_descr (&ret, tmp);
15153 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15154 add_loc_descr (&ret, l1label);
15155 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15156 mode, mem_mode,
15157 VAR_INIT_STATUS_INITIALIZED);
15158 add_loc_descr (&ret, tmp);
15159 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15160 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15161 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15162 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15163 VAR_INIT_STATUS_INITIALIZED);
15164 if (tmp == NULL)
15165 return NULL;
15166 add_loc_descr (&ret, tmp);
15167 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15168 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15169 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15170 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15171 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15172 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15173 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15174 VAR_INIT_STATUS_INITIALIZED);
15175 add_loc_descr (&ret, tmp);
15176 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15177 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15178 add_loc_descr (&ret, l2jump);
15179 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15180 VAR_INIT_STATUS_INITIALIZED);
15181 add_loc_descr (&ret, tmp);
15182 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15183 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15184 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15185 add_loc_descr (&ret, l1jump);
15186 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15187 add_loc_descr (&ret, l2label);
15188 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15189 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15190 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15191 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15192 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15193 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15194 return ret;
15195 }
15196
15197 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15198 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15199 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15200 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15201
15202 ROTATERT is similar:
15203 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15204 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15205 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15206
15207 static dw_loc_descr_ref
15208 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15209 machine_mode mem_mode)
15210 {
15211 rtx rtlop1 = XEXP (rtl, 1);
15212 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15213 int i;
15214
15215 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15216 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15217 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15218 VAR_INIT_STATUS_INITIALIZED);
15219 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15220 VAR_INIT_STATUS_INITIALIZED);
15221 if (op0 == NULL || op1 == NULL)
15222 return NULL;
15223 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15224 for (i = 0; i < 2; i++)
15225 {
15226 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15227 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15228 mode, mem_mode,
15229 VAR_INIT_STATUS_INITIALIZED);
15230 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15231 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15232 ? DW_OP_const4u
15233 : HOST_BITS_PER_WIDE_INT == 64
15234 ? DW_OP_const8u : DW_OP_constu,
15235 GET_MODE_MASK (mode), 0);
15236 else
15237 mask[i] = NULL;
15238 if (mask[i] == NULL)
15239 return NULL;
15240 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15241 }
15242 ret = op0;
15243 add_loc_descr (&ret, op1);
15244 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15245 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15246 if (GET_CODE (rtl) == ROTATERT)
15247 {
15248 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15249 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15250 GET_MODE_BITSIZE (mode), 0));
15251 }
15252 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15253 if (mask[0] != NULL)
15254 add_loc_descr (&ret, mask[0]);
15255 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15256 if (mask[1] != NULL)
15257 {
15258 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15259 add_loc_descr (&ret, mask[1]);
15260 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15261 }
15262 if (GET_CODE (rtl) == ROTATE)
15263 {
15264 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15265 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15266 GET_MODE_BITSIZE (mode), 0));
15267 }
15268 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15269 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15270 return ret;
15271 }
15272
15273 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15274 for DEBUG_PARAMETER_REF RTL. */
15275
15276 static dw_loc_descr_ref
15277 parameter_ref_descriptor (rtx rtl)
15278 {
15279 dw_loc_descr_ref ret;
15280 dw_die_ref ref;
15281
15282 if (dwarf_strict)
15283 return NULL;
15284 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15285 /* With LTO during LTRANS we get the late DIE that refers to the early
15286 DIE, thus we add another indirection here. This seems to confuse
15287 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15288 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15289 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15290 if (ref)
15291 {
15292 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15293 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15294 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15295 }
15296 else
15297 {
15298 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15299 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15300 }
15301 return ret;
15302 }
15303
15304 /* The following routine converts the RTL for a variable or parameter
15305 (resident in memory) into an equivalent Dwarf representation of a
15306 mechanism for getting the address of that same variable onto the top of a
15307 hypothetical "address evaluation" stack.
15308
15309 When creating memory location descriptors, we are effectively transforming
15310 the RTL for a memory-resident object into its Dwarf postfix expression
15311 equivalent. This routine recursively descends an RTL tree, turning
15312 it into Dwarf postfix code as it goes.
15313
15314 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15315
15316 MEM_MODE is the mode of the memory reference, needed to handle some
15317 autoincrement addressing modes.
15318
15319 Return 0 if we can't represent the location. */
15320
15321 dw_loc_descr_ref
15322 mem_loc_descriptor (rtx rtl, machine_mode mode,
15323 machine_mode mem_mode,
15324 enum var_init_status initialized)
15325 {
15326 dw_loc_descr_ref mem_loc_result = NULL;
15327 enum dwarf_location_atom op;
15328 dw_loc_descr_ref op0, op1;
15329 rtx inner = NULL_RTX;
15330 poly_int64 offset;
15331
15332 if (mode == VOIDmode)
15333 mode = GET_MODE (rtl);
15334
15335 /* Note that for a dynamically sized array, the location we will generate a
15336 description of here will be the lowest numbered location which is
15337 actually within the array. That's *not* necessarily the same as the
15338 zeroth element of the array. */
15339
15340 rtl = targetm.delegitimize_address (rtl);
15341
15342 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15343 return NULL;
15344
15345 scalar_int_mode int_mode, inner_mode, op1_mode;
15346 switch (GET_CODE (rtl))
15347 {
15348 case POST_INC:
15349 case POST_DEC:
15350 case POST_MODIFY:
15351 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15352
15353 case SUBREG:
15354 /* The case of a subreg may arise when we have a local (register)
15355 variable or a formal (register) parameter which doesn't quite fill
15356 up an entire register. For now, just assume that it is
15357 legitimate to make the Dwarf info refer to the whole register which
15358 contains the given subreg. */
15359 if (!subreg_lowpart_p (rtl))
15360 break;
15361 inner = SUBREG_REG (rtl);
15362 /* FALLTHRU */
15363 case TRUNCATE:
15364 if (inner == NULL_RTX)
15365 inner = XEXP (rtl, 0);
15366 if (is_a <scalar_int_mode> (mode, &int_mode)
15367 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15368 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15369 #ifdef POINTERS_EXTEND_UNSIGNED
15370 || (int_mode == Pmode && mem_mode != VOIDmode)
15371 #endif
15372 )
15373 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15374 {
15375 mem_loc_result = mem_loc_descriptor (inner,
15376 inner_mode,
15377 mem_mode, initialized);
15378 break;
15379 }
15380 if (dwarf_strict && dwarf_version < 5)
15381 break;
15382 if (is_a <scalar_int_mode> (mode, &int_mode)
15383 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15384 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15385 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15386 {
15387 dw_die_ref type_die;
15388 dw_loc_descr_ref cvt;
15389
15390 mem_loc_result = mem_loc_descriptor (inner,
15391 GET_MODE (inner),
15392 mem_mode, initialized);
15393 if (mem_loc_result == NULL)
15394 break;
15395 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15396 if (type_die == NULL)
15397 {
15398 mem_loc_result = NULL;
15399 break;
15400 }
15401 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15402 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15403 else
15404 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15405 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15406 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15407 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15408 add_loc_descr (&mem_loc_result, cvt);
15409 if (is_a <scalar_int_mode> (mode, &int_mode)
15410 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15411 {
15412 /* Convert it to untyped afterwards. */
15413 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15414 add_loc_descr (&mem_loc_result, cvt);
15415 }
15416 }
15417 break;
15418
15419 case REG:
15420 if (!is_a <scalar_int_mode> (mode, &int_mode)
15421 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15422 && rtl != arg_pointer_rtx
15423 && rtl != frame_pointer_rtx
15424 #ifdef POINTERS_EXTEND_UNSIGNED
15425 && (int_mode != Pmode || mem_mode == VOIDmode)
15426 #endif
15427 ))
15428 {
15429 dw_die_ref type_die;
15430 unsigned int dbx_regnum;
15431
15432 if (dwarf_strict && dwarf_version < 5)
15433 break;
15434 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15435 break;
15436 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15437 if (type_die == NULL)
15438 break;
15439
15440 dbx_regnum = dbx_reg_number (rtl);
15441 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15442 break;
15443 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15444 dbx_regnum, 0);
15445 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15446 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15447 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15448 break;
15449 }
15450 /* Whenever a register number forms a part of the description of the
15451 method for calculating the (dynamic) address of a memory resident
15452 object, DWARF rules require the register number be referred to as
15453 a "base register". This distinction is not based in any way upon
15454 what category of register the hardware believes the given register
15455 belongs to. This is strictly DWARF terminology we're dealing with
15456 here. Note that in cases where the location of a memory-resident
15457 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15458 OP_CONST (0)) the actual DWARF location descriptor that we generate
15459 may just be OP_BASEREG (basereg). This may look deceptively like
15460 the object in question was allocated to a register (rather than in
15461 memory) so DWARF consumers need to be aware of the subtle
15462 distinction between OP_REG and OP_BASEREG. */
15463 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15464 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15465 else if (stack_realign_drap
15466 && crtl->drap_reg
15467 && crtl->args.internal_arg_pointer == rtl
15468 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15469 {
15470 /* If RTL is internal_arg_pointer, which has been optimized
15471 out, use DRAP instead. */
15472 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15473 VAR_INIT_STATUS_INITIALIZED);
15474 }
15475 break;
15476
15477 case SIGN_EXTEND:
15478 case ZERO_EXTEND:
15479 if (!is_a <scalar_int_mode> (mode, &int_mode)
15480 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15481 break;
15482 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15483 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15484 if (op0 == 0)
15485 break;
15486 else if (GET_CODE (rtl) == ZERO_EXTEND
15487 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15488 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15489 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15490 to expand zero extend as two shifts instead of
15491 masking. */
15492 && GET_MODE_SIZE (inner_mode) <= 4)
15493 {
15494 mem_loc_result = op0;
15495 add_loc_descr (&mem_loc_result,
15496 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15497 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15498 }
15499 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15500 {
15501 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15502 shift *= BITS_PER_UNIT;
15503 if (GET_CODE (rtl) == SIGN_EXTEND)
15504 op = DW_OP_shra;
15505 else
15506 op = DW_OP_shr;
15507 mem_loc_result = op0;
15508 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15509 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15510 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15511 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15512 }
15513 else if (!dwarf_strict || dwarf_version >= 5)
15514 {
15515 dw_die_ref type_die1, type_die2;
15516 dw_loc_descr_ref cvt;
15517
15518 type_die1 = base_type_for_mode (inner_mode,
15519 GET_CODE (rtl) == ZERO_EXTEND);
15520 if (type_die1 == NULL)
15521 break;
15522 type_die2 = base_type_for_mode (int_mode, 1);
15523 if (type_die2 == NULL)
15524 break;
15525 mem_loc_result = op0;
15526 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15527 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15528 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15529 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15530 add_loc_descr (&mem_loc_result, cvt);
15531 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15532 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15533 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15534 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15535 add_loc_descr (&mem_loc_result, cvt);
15536 }
15537 break;
15538
15539 case MEM:
15540 {
15541 rtx new_rtl = avoid_constant_pool_reference (rtl);
15542 if (new_rtl != rtl)
15543 {
15544 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15545 initialized);
15546 if (mem_loc_result != NULL)
15547 return mem_loc_result;
15548 }
15549 }
15550 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15551 get_address_mode (rtl), mode,
15552 VAR_INIT_STATUS_INITIALIZED);
15553 if (mem_loc_result == NULL)
15554 mem_loc_result = tls_mem_loc_descriptor (rtl);
15555 if (mem_loc_result != NULL)
15556 {
15557 if (!is_a <scalar_int_mode> (mode, &int_mode)
15558 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15559 {
15560 dw_die_ref type_die;
15561 dw_loc_descr_ref deref;
15562 HOST_WIDE_INT size;
15563
15564 if (dwarf_strict && dwarf_version < 5)
15565 return NULL;
15566 if (!GET_MODE_SIZE (mode).is_constant (&size))
15567 return NULL;
15568 type_die
15569 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15570 if (type_die == NULL)
15571 return NULL;
15572 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15573 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15574 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15575 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15576 add_loc_descr (&mem_loc_result, deref);
15577 }
15578 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15579 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15580 else
15581 add_loc_descr (&mem_loc_result,
15582 new_loc_descr (DW_OP_deref_size,
15583 GET_MODE_SIZE (int_mode), 0));
15584 }
15585 break;
15586
15587 case LO_SUM:
15588 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15589
15590 case LABEL_REF:
15591 /* Some ports can transform a symbol ref into a label ref, because
15592 the symbol ref is too far away and has to be dumped into a constant
15593 pool. */
15594 case CONST:
15595 case SYMBOL_REF:
15596 if (!is_a <scalar_int_mode> (mode, &int_mode)
15597 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15598 #ifdef POINTERS_EXTEND_UNSIGNED
15599 && (int_mode != Pmode || mem_mode == VOIDmode)
15600 #endif
15601 ))
15602 break;
15603 if (GET_CODE (rtl) == SYMBOL_REF
15604 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15605 {
15606 dw_loc_descr_ref temp;
15607
15608 /* If this is not defined, we have no way to emit the data. */
15609 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15610 break;
15611
15612 temp = new_addr_loc_descr (rtl, dtprel_true);
15613
15614 /* We check for DWARF 5 here because gdb did not implement
15615 DW_OP_form_tls_address until after 7.12. */
15616 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15617 ? DW_OP_form_tls_address
15618 : DW_OP_GNU_push_tls_address),
15619 0, 0);
15620 add_loc_descr (&mem_loc_result, temp);
15621
15622 break;
15623 }
15624
15625 if (!const_ok_for_output (rtl))
15626 {
15627 if (GET_CODE (rtl) == CONST)
15628 switch (GET_CODE (XEXP (rtl, 0)))
15629 {
15630 case NOT:
15631 op = DW_OP_not;
15632 goto try_const_unop;
15633 case NEG:
15634 op = DW_OP_neg;
15635 goto try_const_unop;
15636 try_const_unop:
15637 rtx arg;
15638 arg = XEXP (XEXP (rtl, 0), 0);
15639 if (!CONSTANT_P (arg))
15640 arg = gen_rtx_CONST (int_mode, arg);
15641 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15642 initialized);
15643 if (op0)
15644 {
15645 mem_loc_result = op0;
15646 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15647 }
15648 break;
15649 default:
15650 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15651 mem_mode, initialized);
15652 break;
15653 }
15654 break;
15655 }
15656
15657 symref:
15658 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15659 vec_safe_push (used_rtx_array, rtl);
15660 break;
15661
15662 case CONCAT:
15663 case CONCATN:
15664 case VAR_LOCATION:
15665 case DEBUG_IMPLICIT_PTR:
15666 expansion_failed (NULL_TREE, rtl,
15667 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15668 return 0;
15669
15670 case ENTRY_VALUE:
15671 if (dwarf_strict && dwarf_version < 5)
15672 return NULL;
15673 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15674 {
15675 if (!is_a <scalar_int_mode> (mode, &int_mode)
15676 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15677 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15678 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15679 else
15680 {
15681 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15682 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15683 return NULL;
15684 op0 = one_reg_loc_descriptor (dbx_regnum,
15685 VAR_INIT_STATUS_INITIALIZED);
15686 }
15687 }
15688 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15689 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15690 {
15691 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15692 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15693 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15694 return NULL;
15695 }
15696 else
15697 gcc_unreachable ();
15698 if (op0 == NULL)
15699 return NULL;
15700 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15701 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15702 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15703 break;
15704
15705 case DEBUG_PARAMETER_REF:
15706 mem_loc_result = parameter_ref_descriptor (rtl);
15707 break;
15708
15709 case PRE_MODIFY:
15710 /* Extract the PLUS expression nested inside and fall into
15711 PLUS code below. */
15712 rtl = XEXP (rtl, 1);
15713 goto plus;
15714
15715 case PRE_INC:
15716 case PRE_DEC:
15717 /* Turn these into a PLUS expression and fall into the PLUS code
15718 below. */
15719 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15720 gen_int_mode (GET_CODE (rtl) == PRE_INC
15721 ? GET_MODE_UNIT_SIZE (mem_mode)
15722 : -GET_MODE_UNIT_SIZE (mem_mode),
15723 mode));
15724
15725 /* fall through */
15726
15727 case PLUS:
15728 plus:
15729 if (is_based_loc (rtl)
15730 && is_a <scalar_int_mode> (mode, &int_mode)
15731 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15732 || XEXP (rtl, 0) == arg_pointer_rtx
15733 || XEXP (rtl, 0) == frame_pointer_rtx))
15734 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15735 INTVAL (XEXP (rtl, 1)),
15736 VAR_INIT_STATUS_INITIALIZED);
15737 else
15738 {
15739 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15740 VAR_INIT_STATUS_INITIALIZED);
15741 if (mem_loc_result == 0)
15742 break;
15743
15744 if (CONST_INT_P (XEXP (rtl, 1))
15745 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15746 <= DWARF2_ADDR_SIZE))
15747 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15748 else
15749 {
15750 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15751 VAR_INIT_STATUS_INITIALIZED);
15752 if (op1 == 0)
15753 return NULL;
15754 add_loc_descr (&mem_loc_result, op1);
15755 add_loc_descr (&mem_loc_result,
15756 new_loc_descr (DW_OP_plus, 0, 0));
15757 }
15758 }
15759 break;
15760
15761 /* If a pseudo-reg is optimized away, it is possible for it to
15762 be replaced with a MEM containing a multiply or shift. */
15763 case MINUS:
15764 op = DW_OP_minus;
15765 goto do_binop;
15766
15767 case MULT:
15768 op = DW_OP_mul;
15769 goto do_binop;
15770
15771 case DIV:
15772 if ((!dwarf_strict || dwarf_version >= 5)
15773 && is_a <scalar_int_mode> (mode, &int_mode)
15774 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15775 {
15776 mem_loc_result = typed_binop (DW_OP_div, rtl,
15777 base_type_for_mode (mode, 0),
15778 int_mode, mem_mode);
15779 break;
15780 }
15781 op = DW_OP_div;
15782 goto do_binop;
15783
15784 case UMOD:
15785 op = DW_OP_mod;
15786 goto do_binop;
15787
15788 case ASHIFT:
15789 op = DW_OP_shl;
15790 goto do_shift;
15791
15792 case ASHIFTRT:
15793 op = DW_OP_shra;
15794 goto do_shift;
15795
15796 case LSHIFTRT:
15797 op = DW_OP_shr;
15798 goto do_shift;
15799
15800 do_shift:
15801 if (!is_a <scalar_int_mode> (mode, &int_mode))
15802 break;
15803 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15804 VAR_INIT_STATUS_INITIALIZED);
15805 {
15806 rtx rtlop1 = XEXP (rtl, 1);
15807 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15808 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15809 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15810 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15811 VAR_INIT_STATUS_INITIALIZED);
15812 }
15813
15814 if (op0 == 0 || op1 == 0)
15815 break;
15816
15817 mem_loc_result = op0;
15818 add_loc_descr (&mem_loc_result, op1);
15819 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15820 break;
15821
15822 case AND:
15823 op = DW_OP_and;
15824 goto do_binop;
15825
15826 case IOR:
15827 op = DW_OP_or;
15828 goto do_binop;
15829
15830 case XOR:
15831 op = DW_OP_xor;
15832 goto do_binop;
15833
15834 do_binop:
15835 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15836 VAR_INIT_STATUS_INITIALIZED);
15837 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15838 VAR_INIT_STATUS_INITIALIZED);
15839
15840 if (op0 == 0 || op1 == 0)
15841 break;
15842
15843 mem_loc_result = op0;
15844 add_loc_descr (&mem_loc_result, op1);
15845 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15846 break;
15847
15848 case MOD:
15849 if ((!dwarf_strict || dwarf_version >= 5)
15850 && is_a <scalar_int_mode> (mode, &int_mode)
15851 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15852 {
15853 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15854 base_type_for_mode (mode, 0),
15855 int_mode, mem_mode);
15856 break;
15857 }
15858
15859 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15860 VAR_INIT_STATUS_INITIALIZED);
15861 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15862 VAR_INIT_STATUS_INITIALIZED);
15863
15864 if (op0 == 0 || op1 == 0)
15865 break;
15866
15867 mem_loc_result = op0;
15868 add_loc_descr (&mem_loc_result, op1);
15869 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15870 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15871 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15872 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15873 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15874 break;
15875
15876 case UDIV:
15877 if ((!dwarf_strict || dwarf_version >= 5)
15878 && is_a <scalar_int_mode> (mode, &int_mode))
15879 {
15880 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15881 {
15882 op = DW_OP_div;
15883 goto do_binop;
15884 }
15885 mem_loc_result = typed_binop (DW_OP_div, rtl,
15886 base_type_for_mode (int_mode, 1),
15887 int_mode, mem_mode);
15888 }
15889 break;
15890
15891 case NOT:
15892 op = DW_OP_not;
15893 goto do_unop;
15894
15895 case ABS:
15896 op = DW_OP_abs;
15897 goto do_unop;
15898
15899 case NEG:
15900 op = DW_OP_neg;
15901 goto do_unop;
15902
15903 do_unop:
15904 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15905 VAR_INIT_STATUS_INITIALIZED);
15906
15907 if (op0 == 0)
15908 break;
15909
15910 mem_loc_result = op0;
15911 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15912 break;
15913
15914 case CONST_INT:
15915 if (!is_a <scalar_int_mode> (mode, &int_mode)
15916 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15917 #ifdef POINTERS_EXTEND_UNSIGNED
15918 || (int_mode == Pmode
15919 && mem_mode != VOIDmode
15920 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15921 #endif
15922 )
15923 {
15924 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15925 break;
15926 }
15927 if ((!dwarf_strict || dwarf_version >= 5)
15928 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15929 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15930 {
15931 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15932 scalar_int_mode amode;
15933 if (type_die == NULL)
15934 return NULL;
15935 if (INTVAL (rtl) >= 0
15936 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15937 .exists (&amode))
15938 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15939 /* const DW_OP_convert <XXX> vs.
15940 DW_OP_const_type <XXX, 1, const>. */
15941 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15942 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15943 {
15944 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15945 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15946 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15947 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15948 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15949 add_loc_descr (&mem_loc_result, op0);
15950 return mem_loc_result;
15951 }
15952 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15953 INTVAL (rtl));
15954 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15955 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15956 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15957 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
15958 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
15959 else
15960 {
15961 mem_loc_result->dw_loc_oprnd2.val_class
15962 = dw_val_class_const_double;
15963 mem_loc_result->dw_loc_oprnd2.v.val_double
15964 = double_int::from_shwi (INTVAL (rtl));
15965 }
15966 }
15967 break;
15968
15969 case CONST_DOUBLE:
15970 if (!dwarf_strict || dwarf_version >= 5)
15971 {
15972 dw_die_ref type_die;
15973
15974 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
15975 CONST_DOUBLE rtx could represent either a large integer
15976 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
15977 the value is always a floating point constant.
15978
15979 When it is an integer, a CONST_DOUBLE is used whenever
15980 the constant requires 2 HWIs to be adequately represented.
15981 We output CONST_DOUBLEs as blocks. */
15982 if (mode == VOIDmode
15983 || (GET_MODE (rtl) == VOIDmode
15984 && maybe_ne (GET_MODE_BITSIZE (mode),
15985 HOST_BITS_PER_DOUBLE_INT)))
15986 break;
15987 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15988 if (type_die == NULL)
15989 return NULL;
15990 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15991 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15992 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15993 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15994 #if TARGET_SUPPORTS_WIDE_INT == 0
15995 if (!SCALAR_FLOAT_MODE_P (mode))
15996 {
15997 mem_loc_result->dw_loc_oprnd2.val_class
15998 = dw_val_class_const_double;
15999 mem_loc_result->dw_loc_oprnd2.v.val_double
16000 = rtx_to_double_int (rtl);
16001 }
16002 else
16003 #endif
16004 {
16005 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16006 unsigned int length = GET_MODE_SIZE (float_mode);
16007 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16008
16009 insert_float (rtl, array);
16010 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16011 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16012 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16013 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16014 }
16015 }
16016 break;
16017
16018 case CONST_WIDE_INT:
16019 if (!dwarf_strict || dwarf_version >= 5)
16020 {
16021 dw_die_ref type_die;
16022
16023 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16024 if (type_die == NULL)
16025 return NULL;
16026 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16027 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16028 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16029 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16030 mem_loc_result->dw_loc_oprnd2.val_class
16031 = dw_val_class_wide_int;
16032 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16033 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16034 }
16035 break;
16036
16037 case CONST_POLY_INT:
16038 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16039 break;
16040
16041 case EQ:
16042 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16043 break;
16044
16045 case GE:
16046 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16047 break;
16048
16049 case GT:
16050 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16051 break;
16052
16053 case LE:
16054 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16055 break;
16056
16057 case LT:
16058 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16059 break;
16060
16061 case NE:
16062 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16063 break;
16064
16065 case GEU:
16066 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16067 break;
16068
16069 case GTU:
16070 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16071 break;
16072
16073 case LEU:
16074 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16075 break;
16076
16077 case LTU:
16078 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16079 break;
16080
16081 case UMIN:
16082 case UMAX:
16083 if (!SCALAR_INT_MODE_P (mode))
16084 break;
16085 /* FALLTHRU */
16086 case SMIN:
16087 case SMAX:
16088 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16089 break;
16090
16091 case ZERO_EXTRACT:
16092 case SIGN_EXTRACT:
16093 if (CONST_INT_P (XEXP (rtl, 1))
16094 && CONST_INT_P (XEXP (rtl, 2))
16095 && is_a <scalar_int_mode> (mode, &int_mode)
16096 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16097 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16098 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16099 && ((unsigned) INTVAL (XEXP (rtl, 1))
16100 + (unsigned) INTVAL (XEXP (rtl, 2))
16101 <= GET_MODE_BITSIZE (int_mode)))
16102 {
16103 int shift, size;
16104 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16105 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16106 if (op0 == 0)
16107 break;
16108 if (GET_CODE (rtl) == SIGN_EXTRACT)
16109 op = DW_OP_shra;
16110 else
16111 op = DW_OP_shr;
16112 mem_loc_result = op0;
16113 size = INTVAL (XEXP (rtl, 1));
16114 shift = INTVAL (XEXP (rtl, 2));
16115 if (BITS_BIG_ENDIAN)
16116 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16117 if (shift + size != (int) DWARF2_ADDR_SIZE)
16118 {
16119 add_loc_descr (&mem_loc_result,
16120 int_loc_descriptor (DWARF2_ADDR_SIZE
16121 - shift - size));
16122 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16123 }
16124 if (size != (int) DWARF2_ADDR_SIZE)
16125 {
16126 add_loc_descr (&mem_loc_result,
16127 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16128 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16129 }
16130 }
16131 break;
16132
16133 case IF_THEN_ELSE:
16134 {
16135 dw_loc_descr_ref op2, bra_node, drop_node;
16136 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16137 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16138 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16139 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16140 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16141 VAR_INIT_STATUS_INITIALIZED);
16142 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16143 VAR_INIT_STATUS_INITIALIZED);
16144 if (op0 == NULL || op1 == NULL || op2 == NULL)
16145 break;
16146
16147 mem_loc_result = op1;
16148 add_loc_descr (&mem_loc_result, op2);
16149 add_loc_descr (&mem_loc_result, op0);
16150 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16151 add_loc_descr (&mem_loc_result, bra_node);
16152 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16153 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16154 add_loc_descr (&mem_loc_result, drop_node);
16155 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16156 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16157 }
16158 break;
16159
16160 case FLOAT_EXTEND:
16161 case FLOAT_TRUNCATE:
16162 case FLOAT:
16163 case UNSIGNED_FLOAT:
16164 case FIX:
16165 case UNSIGNED_FIX:
16166 if (!dwarf_strict || dwarf_version >= 5)
16167 {
16168 dw_die_ref type_die;
16169 dw_loc_descr_ref cvt;
16170
16171 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16172 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16173 if (op0 == NULL)
16174 break;
16175 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16176 && (GET_CODE (rtl) == FLOAT
16177 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16178 {
16179 type_die = base_type_for_mode (int_mode,
16180 GET_CODE (rtl) == UNSIGNED_FLOAT);
16181 if (type_die == NULL)
16182 break;
16183 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16184 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16185 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16186 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16187 add_loc_descr (&op0, cvt);
16188 }
16189 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16190 if (type_die == NULL)
16191 break;
16192 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16193 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16194 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16195 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16196 add_loc_descr (&op0, cvt);
16197 if (is_a <scalar_int_mode> (mode, &int_mode)
16198 && (GET_CODE (rtl) == FIX
16199 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16200 {
16201 op0 = convert_descriptor_to_mode (int_mode, op0);
16202 if (op0 == NULL)
16203 break;
16204 }
16205 mem_loc_result = op0;
16206 }
16207 break;
16208
16209 case CLZ:
16210 case CTZ:
16211 case FFS:
16212 if (is_a <scalar_int_mode> (mode, &int_mode))
16213 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16214 break;
16215
16216 case POPCOUNT:
16217 case PARITY:
16218 if (is_a <scalar_int_mode> (mode, &int_mode))
16219 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16220 break;
16221
16222 case BSWAP:
16223 if (is_a <scalar_int_mode> (mode, &int_mode))
16224 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16225 break;
16226
16227 case ROTATE:
16228 case ROTATERT:
16229 if (is_a <scalar_int_mode> (mode, &int_mode))
16230 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16231 break;
16232
16233 case COMPARE:
16234 /* In theory, we could implement the above. */
16235 /* DWARF cannot represent the unsigned compare operations
16236 natively. */
16237 case SS_MULT:
16238 case US_MULT:
16239 case SS_DIV:
16240 case US_DIV:
16241 case SS_PLUS:
16242 case US_PLUS:
16243 case SS_MINUS:
16244 case US_MINUS:
16245 case SS_NEG:
16246 case US_NEG:
16247 case SS_ABS:
16248 case SS_ASHIFT:
16249 case US_ASHIFT:
16250 case SS_TRUNCATE:
16251 case US_TRUNCATE:
16252 case UNORDERED:
16253 case ORDERED:
16254 case UNEQ:
16255 case UNGE:
16256 case UNGT:
16257 case UNLE:
16258 case UNLT:
16259 case LTGT:
16260 case FRACT_CONVERT:
16261 case UNSIGNED_FRACT_CONVERT:
16262 case SAT_FRACT:
16263 case UNSIGNED_SAT_FRACT:
16264 case SQRT:
16265 case ASM_OPERANDS:
16266 case VEC_MERGE:
16267 case VEC_SELECT:
16268 case VEC_CONCAT:
16269 case VEC_DUPLICATE:
16270 case VEC_SERIES:
16271 case UNSPEC:
16272 case HIGH:
16273 case FMA:
16274 case STRICT_LOW_PART:
16275 case CONST_VECTOR:
16276 case CONST_FIXED:
16277 case CLRSB:
16278 case CLOBBER:
16279 case CLOBBER_HIGH:
16280 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16281 can't express it in the debug info. This can happen e.g. with some
16282 TLS UNSPECs. */
16283 break;
16284
16285 case CONST_STRING:
16286 resolve_one_addr (&rtl);
16287 goto symref;
16288
16289 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16290 the expression. An UNSPEC rtx represents a raw DWARF operation,
16291 new_loc_descr is called for it to build the operation directly.
16292 Otherwise mem_loc_descriptor is called recursively. */
16293 case PARALLEL:
16294 {
16295 int index = 0;
16296 dw_loc_descr_ref exp_result = NULL;
16297
16298 for (; index < XVECLEN (rtl, 0); index++)
16299 {
16300 rtx elem = XVECEXP (rtl, 0, index);
16301 if (GET_CODE (elem) == UNSPEC)
16302 {
16303 /* Each DWARF operation UNSPEC contain two operands, if
16304 one operand is not used for the operation, const0_rtx is
16305 passed. */
16306 gcc_assert (XVECLEN (elem, 0) == 2);
16307
16308 HOST_WIDE_INT dw_op = XINT (elem, 1);
16309 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16310 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16311 exp_result
16312 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16313 oprnd2);
16314 }
16315 else
16316 exp_result
16317 = mem_loc_descriptor (elem, mode, mem_mode,
16318 VAR_INIT_STATUS_INITIALIZED);
16319
16320 if (!mem_loc_result)
16321 mem_loc_result = exp_result;
16322 else
16323 add_loc_descr (&mem_loc_result, exp_result);
16324 }
16325
16326 break;
16327 }
16328
16329 default:
16330 if (flag_checking)
16331 {
16332 print_rtl (stderr, rtl);
16333 gcc_unreachable ();
16334 }
16335 break;
16336 }
16337
16338 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16339 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16340
16341 return mem_loc_result;
16342 }
16343
16344 /* Return a descriptor that describes the concatenation of two locations.
16345 This is typically a complex variable. */
16346
16347 static dw_loc_descr_ref
16348 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16349 {
16350 /* At present we only track constant-sized pieces. */
16351 unsigned int size0, size1;
16352 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16353 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16354 return 0;
16355
16356 dw_loc_descr_ref cc_loc_result = NULL;
16357 dw_loc_descr_ref x0_ref
16358 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16359 dw_loc_descr_ref x1_ref
16360 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16361
16362 if (x0_ref == 0 || x1_ref == 0)
16363 return 0;
16364
16365 cc_loc_result = x0_ref;
16366 add_loc_descr_op_piece (&cc_loc_result, size0);
16367
16368 add_loc_descr (&cc_loc_result, x1_ref);
16369 add_loc_descr_op_piece (&cc_loc_result, size1);
16370
16371 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16372 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16373
16374 return cc_loc_result;
16375 }
16376
16377 /* Return a descriptor that describes the concatenation of N
16378 locations. */
16379
16380 static dw_loc_descr_ref
16381 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16382 {
16383 unsigned int i;
16384 dw_loc_descr_ref cc_loc_result = NULL;
16385 unsigned int n = XVECLEN (concatn, 0);
16386 unsigned int size;
16387
16388 for (i = 0; i < n; ++i)
16389 {
16390 dw_loc_descr_ref ref;
16391 rtx x = XVECEXP (concatn, 0, i);
16392
16393 /* At present we only track constant-sized pieces. */
16394 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16395 return NULL;
16396
16397 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16398 if (ref == NULL)
16399 return NULL;
16400
16401 add_loc_descr (&cc_loc_result, ref);
16402 add_loc_descr_op_piece (&cc_loc_result, size);
16403 }
16404
16405 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16406 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16407
16408 return cc_loc_result;
16409 }
16410
16411 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16412 for DEBUG_IMPLICIT_PTR RTL. */
16413
16414 static dw_loc_descr_ref
16415 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16416 {
16417 dw_loc_descr_ref ret;
16418 dw_die_ref ref;
16419
16420 if (dwarf_strict && dwarf_version < 5)
16421 return NULL;
16422 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16423 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16424 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16425 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16426 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16427 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16428 if (ref)
16429 {
16430 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16431 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16432 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16433 }
16434 else
16435 {
16436 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16437 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16438 }
16439 return ret;
16440 }
16441
16442 /* Output a proper Dwarf location descriptor for a variable or parameter
16443 which is either allocated in a register or in a memory location. For a
16444 register, we just generate an OP_REG and the register number. For a
16445 memory location we provide a Dwarf postfix expression describing how to
16446 generate the (dynamic) address of the object onto the address stack.
16447
16448 MODE is mode of the decl if this loc_descriptor is going to be used in
16449 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16450 allowed, VOIDmode otherwise.
16451
16452 If we don't know how to describe it, return 0. */
16453
16454 static dw_loc_descr_ref
16455 loc_descriptor (rtx rtl, machine_mode mode,
16456 enum var_init_status initialized)
16457 {
16458 dw_loc_descr_ref loc_result = NULL;
16459 scalar_int_mode int_mode;
16460
16461 switch (GET_CODE (rtl))
16462 {
16463 case SUBREG:
16464 /* The case of a subreg may arise when we have a local (register)
16465 variable or a formal (register) parameter which doesn't quite fill
16466 up an entire register. For now, just assume that it is
16467 legitimate to make the Dwarf info refer to the whole register which
16468 contains the given subreg. */
16469 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16470 loc_result = loc_descriptor (SUBREG_REG (rtl),
16471 GET_MODE (SUBREG_REG (rtl)), initialized);
16472 else
16473 goto do_default;
16474 break;
16475
16476 case REG:
16477 loc_result = reg_loc_descriptor (rtl, initialized);
16478 break;
16479
16480 case MEM:
16481 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16482 GET_MODE (rtl), initialized);
16483 if (loc_result == NULL)
16484 loc_result = tls_mem_loc_descriptor (rtl);
16485 if (loc_result == NULL)
16486 {
16487 rtx new_rtl = avoid_constant_pool_reference (rtl);
16488 if (new_rtl != rtl)
16489 loc_result = loc_descriptor (new_rtl, mode, initialized);
16490 }
16491 break;
16492
16493 case CONCAT:
16494 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16495 initialized);
16496 break;
16497
16498 case CONCATN:
16499 loc_result = concatn_loc_descriptor (rtl, initialized);
16500 break;
16501
16502 case VAR_LOCATION:
16503 /* Single part. */
16504 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16505 {
16506 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16507 if (GET_CODE (loc) == EXPR_LIST)
16508 loc = XEXP (loc, 0);
16509 loc_result = loc_descriptor (loc, mode, initialized);
16510 break;
16511 }
16512
16513 rtl = XEXP (rtl, 1);
16514 /* FALLTHRU */
16515
16516 case PARALLEL:
16517 {
16518 rtvec par_elems = XVEC (rtl, 0);
16519 int num_elem = GET_NUM_ELEM (par_elems);
16520 machine_mode mode;
16521 int i, size;
16522
16523 /* Create the first one, so we have something to add to. */
16524 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16525 VOIDmode, initialized);
16526 if (loc_result == NULL)
16527 return NULL;
16528 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16529 /* At present we only track constant-sized pieces. */
16530 if (!GET_MODE_SIZE (mode).is_constant (&size))
16531 return NULL;
16532 add_loc_descr_op_piece (&loc_result, size);
16533 for (i = 1; i < num_elem; i++)
16534 {
16535 dw_loc_descr_ref temp;
16536
16537 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16538 VOIDmode, initialized);
16539 if (temp == NULL)
16540 return NULL;
16541 add_loc_descr (&loc_result, temp);
16542 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16543 /* At present we only track constant-sized pieces. */
16544 if (!GET_MODE_SIZE (mode).is_constant (&size))
16545 return NULL;
16546 add_loc_descr_op_piece (&loc_result, size);
16547 }
16548 }
16549 break;
16550
16551 case CONST_INT:
16552 if (mode != VOIDmode && mode != BLKmode)
16553 {
16554 int_mode = as_a <scalar_int_mode> (mode);
16555 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16556 INTVAL (rtl));
16557 }
16558 break;
16559
16560 case CONST_DOUBLE:
16561 if (mode == VOIDmode)
16562 mode = GET_MODE (rtl);
16563
16564 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16565 {
16566 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16567
16568 /* Note that a CONST_DOUBLE rtx could represent either an integer
16569 or a floating-point constant. A CONST_DOUBLE is used whenever
16570 the constant requires more than one word in order to be
16571 adequately represented. We output CONST_DOUBLEs as blocks. */
16572 scalar_mode smode = as_a <scalar_mode> (mode);
16573 loc_result = new_loc_descr (DW_OP_implicit_value,
16574 GET_MODE_SIZE (smode), 0);
16575 #if TARGET_SUPPORTS_WIDE_INT == 0
16576 if (!SCALAR_FLOAT_MODE_P (smode))
16577 {
16578 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16579 loc_result->dw_loc_oprnd2.v.val_double
16580 = rtx_to_double_int (rtl);
16581 }
16582 else
16583 #endif
16584 {
16585 unsigned int length = GET_MODE_SIZE (smode);
16586 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16587
16588 insert_float (rtl, array);
16589 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16590 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16591 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16592 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16593 }
16594 }
16595 break;
16596
16597 case CONST_WIDE_INT:
16598 if (mode == VOIDmode)
16599 mode = GET_MODE (rtl);
16600
16601 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16602 {
16603 int_mode = as_a <scalar_int_mode> (mode);
16604 loc_result = new_loc_descr (DW_OP_implicit_value,
16605 GET_MODE_SIZE (int_mode), 0);
16606 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16607 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16608 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16609 }
16610 break;
16611
16612 case CONST_VECTOR:
16613 if (mode == VOIDmode)
16614 mode = GET_MODE (rtl);
16615
16616 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16617 {
16618 unsigned int length;
16619 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16620 return NULL;
16621
16622 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16623 unsigned char *array
16624 = ggc_vec_alloc<unsigned char> (length * elt_size);
16625 unsigned int i;
16626 unsigned char *p;
16627 machine_mode imode = GET_MODE_INNER (mode);
16628
16629 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16630 switch (GET_MODE_CLASS (mode))
16631 {
16632 case MODE_VECTOR_INT:
16633 for (i = 0, p = array; i < length; i++, p += elt_size)
16634 {
16635 rtx elt = CONST_VECTOR_ELT (rtl, i);
16636 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16637 }
16638 break;
16639
16640 case MODE_VECTOR_FLOAT:
16641 for (i = 0, p = array; i < length; i++, p += elt_size)
16642 {
16643 rtx elt = CONST_VECTOR_ELT (rtl, i);
16644 insert_float (elt, p);
16645 }
16646 break;
16647
16648 default:
16649 gcc_unreachable ();
16650 }
16651
16652 loc_result = new_loc_descr (DW_OP_implicit_value,
16653 length * elt_size, 0);
16654 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16655 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16656 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16657 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16658 }
16659 break;
16660
16661 case CONST:
16662 if (mode == VOIDmode
16663 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16664 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16665 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16666 {
16667 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16668 break;
16669 }
16670 /* FALLTHROUGH */
16671 case SYMBOL_REF:
16672 if (!const_ok_for_output (rtl))
16673 break;
16674 /* FALLTHROUGH */
16675 case LABEL_REF:
16676 if (is_a <scalar_int_mode> (mode, &int_mode)
16677 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16678 && (dwarf_version >= 4 || !dwarf_strict))
16679 {
16680 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16681 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16682 vec_safe_push (used_rtx_array, rtl);
16683 }
16684 break;
16685
16686 case DEBUG_IMPLICIT_PTR:
16687 loc_result = implicit_ptr_descriptor (rtl, 0);
16688 break;
16689
16690 case PLUS:
16691 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16692 && CONST_INT_P (XEXP (rtl, 1)))
16693 {
16694 loc_result
16695 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16696 break;
16697 }
16698 /* FALLTHRU */
16699 do_default:
16700 default:
16701 if ((is_a <scalar_int_mode> (mode, &int_mode)
16702 && GET_MODE (rtl) == int_mode
16703 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16704 && dwarf_version >= 4)
16705 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16706 {
16707 /* Value expression. */
16708 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16709 if (loc_result)
16710 add_loc_descr (&loc_result,
16711 new_loc_descr (DW_OP_stack_value, 0, 0));
16712 }
16713 break;
16714 }
16715
16716 return loc_result;
16717 }
16718
16719 /* We need to figure out what section we should use as the base for the
16720 address ranges where a given location is valid.
16721 1. If this particular DECL has a section associated with it, use that.
16722 2. If this function has a section associated with it, use that.
16723 3. Otherwise, use the text section.
16724 XXX: If you split a variable across multiple sections, we won't notice. */
16725
16726 static const char *
16727 secname_for_decl (const_tree decl)
16728 {
16729 const char *secname;
16730
16731 if (VAR_OR_FUNCTION_DECL_P (decl)
16732 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16733 && DECL_SECTION_NAME (decl))
16734 secname = DECL_SECTION_NAME (decl);
16735 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16736 secname = DECL_SECTION_NAME (current_function_decl);
16737 else if (cfun && in_cold_section_p)
16738 secname = crtl->subsections.cold_section_label;
16739 else
16740 secname = text_section_label;
16741
16742 return secname;
16743 }
16744
16745 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16746
16747 static bool
16748 decl_by_reference_p (tree decl)
16749 {
16750 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16751 || VAR_P (decl))
16752 && DECL_BY_REFERENCE (decl));
16753 }
16754
16755 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16756 for VARLOC. */
16757
16758 static dw_loc_descr_ref
16759 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16760 enum var_init_status initialized)
16761 {
16762 int have_address = 0;
16763 dw_loc_descr_ref descr;
16764 machine_mode mode;
16765
16766 if (want_address != 2)
16767 {
16768 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16769 /* Single part. */
16770 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16771 {
16772 varloc = PAT_VAR_LOCATION_LOC (varloc);
16773 if (GET_CODE (varloc) == EXPR_LIST)
16774 varloc = XEXP (varloc, 0);
16775 mode = GET_MODE (varloc);
16776 if (MEM_P (varloc))
16777 {
16778 rtx addr = XEXP (varloc, 0);
16779 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16780 mode, initialized);
16781 if (descr)
16782 have_address = 1;
16783 else
16784 {
16785 rtx x = avoid_constant_pool_reference (varloc);
16786 if (x != varloc)
16787 descr = mem_loc_descriptor (x, mode, VOIDmode,
16788 initialized);
16789 }
16790 }
16791 else
16792 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16793 }
16794 else
16795 return 0;
16796 }
16797 else
16798 {
16799 if (GET_CODE (varloc) == VAR_LOCATION)
16800 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16801 else
16802 mode = DECL_MODE (loc);
16803 descr = loc_descriptor (varloc, mode, initialized);
16804 have_address = 1;
16805 }
16806
16807 if (!descr)
16808 return 0;
16809
16810 if (want_address == 2 && !have_address
16811 && (dwarf_version >= 4 || !dwarf_strict))
16812 {
16813 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16814 {
16815 expansion_failed (loc, NULL_RTX,
16816 "DWARF address size mismatch");
16817 return 0;
16818 }
16819 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16820 have_address = 1;
16821 }
16822 /* Show if we can't fill the request for an address. */
16823 if (want_address && !have_address)
16824 {
16825 expansion_failed (loc, NULL_RTX,
16826 "Want address and only have value");
16827 return 0;
16828 }
16829
16830 /* If we've got an address and don't want one, dereference. */
16831 if (!want_address && have_address)
16832 {
16833 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16834 enum dwarf_location_atom op;
16835
16836 if (size > DWARF2_ADDR_SIZE || size == -1)
16837 {
16838 expansion_failed (loc, NULL_RTX,
16839 "DWARF address size mismatch");
16840 return 0;
16841 }
16842 else if (size == DWARF2_ADDR_SIZE)
16843 op = DW_OP_deref;
16844 else
16845 op = DW_OP_deref_size;
16846
16847 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16848 }
16849
16850 return descr;
16851 }
16852
16853 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16854 if it is not possible. */
16855
16856 static dw_loc_descr_ref
16857 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16858 {
16859 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16860 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16861 else if (dwarf_version >= 3 || !dwarf_strict)
16862 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16863 else
16864 return NULL;
16865 }
16866
16867 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16868 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16869
16870 static dw_loc_descr_ref
16871 dw_sra_loc_expr (tree decl, rtx loc)
16872 {
16873 rtx p;
16874 unsigned HOST_WIDE_INT padsize = 0;
16875 dw_loc_descr_ref descr, *descr_tail;
16876 unsigned HOST_WIDE_INT decl_size;
16877 rtx varloc;
16878 enum var_init_status initialized;
16879
16880 if (DECL_SIZE (decl) == NULL
16881 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16882 return NULL;
16883
16884 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16885 descr = NULL;
16886 descr_tail = &descr;
16887
16888 for (p = loc; p; p = XEXP (p, 1))
16889 {
16890 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16891 rtx loc_note = *decl_piece_varloc_ptr (p);
16892 dw_loc_descr_ref cur_descr;
16893 dw_loc_descr_ref *tail, last = NULL;
16894 unsigned HOST_WIDE_INT opsize = 0;
16895
16896 if (loc_note == NULL_RTX
16897 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16898 {
16899 padsize += bitsize;
16900 continue;
16901 }
16902 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16903 varloc = NOTE_VAR_LOCATION (loc_note);
16904 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16905 if (cur_descr == NULL)
16906 {
16907 padsize += bitsize;
16908 continue;
16909 }
16910
16911 /* Check that cur_descr either doesn't use
16912 DW_OP_*piece operations, or their sum is equal
16913 to bitsize. Otherwise we can't embed it. */
16914 for (tail = &cur_descr; *tail != NULL;
16915 tail = &(*tail)->dw_loc_next)
16916 if ((*tail)->dw_loc_opc == DW_OP_piece)
16917 {
16918 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16919 * BITS_PER_UNIT;
16920 last = *tail;
16921 }
16922 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16923 {
16924 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16925 last = *tail;
16926 }
16927
16928 if (last != NULL && opsize != bitsize)
16929 {
16930 padsize += bitsize;
16931 /* Discard the current piece of the descriptor and release any
16932 addr_table entries it uses. */
16933 remove_loc_list_addr_table_entries (cur_descr);
16934 continue;
16935 }
16936
16937 /* If there is a hole, add DW_OP_*piece after empty DWARF
16938 expression, which means that those bits are optimized out. */
16939 if (padsize)
16940 {
16941 if (padsize > decl_size)
16942 {
16943 remove_loc_list_addr_table_entries (cur_descr);
16944 goto discard_descr;
16945 }
16946 decl_size -= padsize;
16947 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16948 if (*descr_tail == NULL)
16949 {
16950 remove_loc_list_addr_table_entries (cur_descr);
16951 goto discard_descr;
16952 }
16953 descr_tail = &(*descr_tail)->dw_loc_next;
16954 padsize = 0;
16955 }
16956 *descr_tail = cur_descr;
16957 descr_tail = tail;
16958 if (bitsize > decl_size)
16959 goto discard_descr;
16960 decl_size -= bitsize;
16961 if (last == NULL)
16962 {
16963 HOST_WIDE_INT offset = 0;
16964 if (GET_CODE (varloc) == VAR_LOCATION
16965 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16966 {
16967 varloc = PAT_VAR_LOCATION_LOC (varloc);
16968 if (GET_CODE (varloc) == EXPR_LIST)
16969 varloc = XEXP (varloc, 0);
16970 }
16971 do
16972 {
16973 if (GET_CODE (varloc) == CONST
16974 || GET_CODE (varloc) == SIGN_EXTEND
16975 || GET_CODE (varloc) == ZERO_EXTEND)
16976 varloc = XEXP (varloc, 0);
16977 else if (GET_CODE (varloc) == SUBREG)
16978 varloc = SUBREG_REG (varloc);
16979 else
16980 break;
16981 }
16982 while (1);
16983 /* DW_OP_bit_size offset should be zero for register
16984 or implicit location descriptions and empty location
16985 descriptions, but for memory addresses needs big endian
16986 adjustment. */
16987 if (MEM_P (varloc))
16988 {
16989 unsigned HOST_WIDE_INT memsize;
16990 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
16991 goto discard_descr;
16992 memsize *= BITS_PER_UNIT;
16993 if (memsize != bitsize)
16994 {
16995 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
16996 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
16997 goto discard_descr;
16998 if (memsize < bitsize)
16999 goto discard_descr;
17000 if (BITS_BIG_ENDIAN)
17001 offset = memsize - bitsize;
17002 }
17003 }
17004
17005 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17006 if (*descr_tail == NULL)
17007 goto discard_descr;
17008 descr_tail = &(*descr_tail)->dw_loc_next;
17009 }
17010 }
17011
17012 /* If there were any non-empty expressions, add padding till the end of
17013 the decl. */
17014 if (descr != NULL && decl_size != 0)
17015 {
17016 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17017 if (*descr_tail == NULL)
17018 goto discard_descr;
17019 }
17020 return descr;
17021
17022 discard_descr:
17023 /* Discard the descriptor and release any addr_table entries it uses. */
17024 remove_loc_list_addr_table_entries (descr);
17025 return NULL;
17026 }
17027
17028 /* Return the dwarf representation of the location list LOC_LIST of
17029 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17030 function. */
17031
17032 static dw_loc_list_ref
17033 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17034 {
17035 const char *endname, *secname;
17036 var_loc_view endview;
17037 rtx varloc;
17038 enum var_init_status initialized;
17039 struct var_loc_node *node;
17040 dw_loc_descr_ref descr;
17041 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17042 dw_loc_list_ref list = NULL;
17043 dw_loc_list_ref *listp = &list;
17044
17045 /* Now that we know what section we are using for a base,
17046 actually construct the list of locations.
17047 The first location information is what is passed to the
17048 function that creates the location list, and the remaining
17049 locations just get added on to that list.
17050 Note that we only know the start address for a location
17051 (IE location changes), so to build the range, we use
17052 the range [current location start, next location start].
17053 This means we have to special case the last node, and generate
17054 a range of [last location start, end of function label]. */
17055
17056 if (cfun && crtl->has_bb_partition)
17057 {
17058 bool save_in_cold_section_p = in_cold_section_p;
17059 in_cold_section_p = first_function_block_is_cold;
17060 if (loc_list->last_before_switch == NULL)
17061 in_cold_section_p = !in_cold_section_p;
17062 secname = secname_for_decl (decl);
17063 in_cold_section_p = save_in_cold_section_p;
17064 }
17065 else
17066 secname = secname_for_decl (decl);
17067
17068 for (node = loc_list->first; node; node = node->next)
17069 {
17070 bool range_across_switch = false;
17071 if (GET_CODE (node->loc) == EXPR_LIST
17072 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17073 {
17074 if (GET_CODE (node->loc) == EXPR_LIST)
17075 {
17076 descr = NULL;
17077 /* This requires DW_OP_{,bit_}piece, which is not usable
17078 inside DWARF expressions. */
17079 if (want_address == 2)
17080 descr = dw_sra_loc_expr (decl, node->loc);
17081 }
17082 else
17083 {
17084 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17085 varloc = NOTE_VAR_LOCATION (node->loc);
17086 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17087 }
17088 if (descr)
17089 {
17090 /* If section switch happens in between node->label
17091 and node->next->label (or end of function) and
17092 we can't emit it as a single entry list,
17093 emit two ranges, first one ending at the end
17094 of first partition and second one starting at the
17095 beginning of second partition. */
17096 if (node == loc_list->last_before_switch
17097 && (node != loc_list->first || loc_list->first->next
17098 /* If we are to emit a view number, we will emit
17099 a loclist rather than a single location
17100 expression for the entire function (see
17101 loc_list_has_views), so we have to split the
17102 range that straddles across partitions. */
17103 || !ZERO_VIEW_P (node->view))
17104 && current_function_decl)
17105 {
17106 endname = cfun->fde->dw_fde_end;
17107 endview = 0;
17108 range_across_switch = true;
17109 }
17110 /* The variable has a location between NODE->LABEL and
17111 NODE->NEXT->LABEL. */
17112 else if (node->next)
17113 endname = node->next->label, endview = node->next->view;
17114 /* If the variable has a location at the last label
17115 it keeps its location until the end of function. */
17116 else if (!current_function_decl)
17117 endname = text_end_label, endview = 0;
17118 else
17119 {
17120 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17121 current_function_funcdef_no);
17122 endname = ggc_strdup (label_id);
17123 endview = 0;
17124 }
17125
17126 *listp = new_loc_list (descr, node->label, node->view,
17127 endname, endview, secname);
17128 if (TREE_CODE (decl) == PARM_DECL
17129 && node == loc_list->first
17130 && NOTE_P (node->loc)
17131 && strcmp (node->label, endname) == 0)
17132 (*listp)->force = true;
17133 listp = &(*listp)->dw_loc_next;
17134 }
17135 }
17136
17137 if (cfun
17138 && crtl->has_bb_partition
17139 && node == loc_list->last_before_switch)
17140 {
17141 bool save_in_cold_section_p = in_cold_section_p;
17142 in_cold_section_p = !first_function_block_is_cold;
17143 secname = secname_for_decl (decl);
17144 in_cold_section_p = save_in_cold_section_p;
17145 }
17146
17147 if (range_across_switch)
17148 {
17149 if (GET_CODE (node->loc) == EXPR_LIST)
17150 descr = dw_sra_loc_expr (decl, node->loc);
17151 else
17152 {
17153 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17154 varloc = NOTE_VAR_LOCATION (node->loc);
17155 descr = dw_loc_list_1 (decl, varloc, want_address,
17156 initialized);
17157 }
17158 gcc_assert (descr);
17159 /* The variable has a location between NODE->LABEL and
17160 NODE->NEXT->LABEL. */
17161 if (node->next)
17162 endname = node->next->label, endview = node->next->view;
17163 else
17164 endname = cfun->fde->dw_fde_second_end, endview = 0;
17165 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17166 endname, endview, secname);
17167 listp = &(*listp)->dw_loc_next;
17168 }
17169 }
17170
17171 /* Try to avoid the overhead of a location list emitting a location
17172 expression instead, but only if we didn't have more than one
17173 location entry in the first place. If some entries were not
17174 representable, we don't want to pretend a single entry that was
17175 applies to the entire scope in which the variable is
17176 available. */
17177 if (list && loc_list->first->next)
17178 gen_llsym (list);
17179 else
17180 maybe_gen_llsym (list);
17181
17182 return list;
17183 }
17184
17185 /* Return if the loc_list has only single element and thus can be represented
17186 as location description. */
17187
17188 static bool
17189 single_element_loc_list_p (dw_loc_list_ref list)
17190 {
17191 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17192 return !list->ll_symbol;
17193 }
17194
17195 /* Duplicate a single element of location list. */
17196
17197 static inline dw_loc_descr_ref
17198 copy_loc_descr (dw_loc_descr_ref ref)
17199 {
17200 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17201 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17202 return copy;
17203 }
17204
17205 /* To each location in list LIST append loc descr REF. */
17206
17207 static void
17208 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17209 {
17210 dw_loc_descr_ref copy;
17211 add_loc_descr (&list->expr, ref);
17212 list = list->dw_loc_next;
17213 while (list)
17214 {
17215 copy = copy_loc_descr (ref);
17216 add_loc_descr (&list->expr, copy);
17217 while (copy->dw_loc_next)
17218 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17219 list = list->dw_loc_next;
17220 }
17221 }
17222
17223 /* To each location in list LIST prepend loc descr REF. */
17224
17225 static void
17226 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17227 {
17228 dw_loc_descr_ref copy;
17229 dw_loc_descr_ref ref_end = list->expr;
17230 add_loc_descr (&ref, list->expr);
17231 list->expr = ref;
17232 list = list->dw_loc_next;
17233 while (list)
17234 {
17235 dw_loc_descr_ref end = list->expr;
17236 list->expr = copy = copy_loc_descr (ref);
17237 while (copy->dw_loc_next != ref_end)
17238 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17239 copy->dw_loc_next = end;
17240 list = list->dw_loc_next;
17241 }
17242 }
17243
17244 /* Given two lists RET and LIST
17245 produce location list that is result of adding expression in LIST
17246 to expression in RET on each position in program.
17247 Might be destructive on both RET and LIST.
17248
17249 TODO: We handle only simple cases of RET or LIST having at most one
17250 element. General case would involve sorting the lists in program order
17251 and merging them that will need some additional work.
17252 Adding that will improve quality of debug info especially for SRA-ed
17253 structures. */
17254
17255 static void
17256 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17257 {
17258 if (!list)
17259 return;
17260 if (!*ret)
17261 {
17262 *ret = list;
17263 return;
17264 }
17265 if (!list->dw_loc_next)
17266 {
17267 add_loc_descr_to_each (*ret, list->expr);
17268 return;
17269 }
17270 if (!(*ret)->dw_loc_next)
17271 {
17272 prepend_loc_descr_to_each (list, (*ret)->expr);
17273 *ret = list;
17274 return;
17275 }
17276 expansion_failed (NULL_TREE, NULL_RTX,
17277 "Don't know how to merge two non-trivial"
17278 " location lists.\n");
17279 *ret = NULL;
17280 return;
17281 }
17282
17283 /* LOC is constant expression. Try a luck, look it up in constant
17284 pool and return its loc_descr of its address. */
17285
17286 static dw_loc_descr_ref
17287 cst_pool_loc_descr (tree loc)
17288 {
17289 /* Get an RTL for this, if something has been emitted. */
17290 rtx rtl = lookup_constant_def (loc);
17291
17292 if (!rtl || !MEM_P (rtl))
17293 {
17294 gcc_assert (!rtl);
17295 return 0;
17296 }
17297 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17298
17299 /* TODO: We might get more coverage if we was actually delaying expansion
17300 of all expressions till end of compilation when constant pools are fully
17301 populated. */
17302 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17303 {
17304 expansion_failed (loc, NULL_RTX,
17305 "CST value in contant pool but not marked.");
17306 return 0;
17307 }
17308 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17309 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17310 }
17311
17312 /* Return dw_loc_list representing address of addr_expr LOC
17313 by looking for inner INDIRECT_REF expression and turning
17314 it into simple arithmetics.
17315
17316 See loc_list_from_tree for the meaning of CONTEXT. */
17317
17318 static dw_loc_list_ref
17319 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17320 loc_descr_context *context)
17321 {
17322 tree obj, offset;
17323 poly_int64 bitsize, bitpos, bytepos;
17324 machine_mode mode;
17325 int unsignedp, reversep, volatilep = 0;
17326 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17327
17328 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17329 &bitsize, &bitpos, &offset, &mode,
17330 &unsignedp, &reversep, &volatilep);
17331 STRIP_NOPS (obj);
17332 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17333 {
17334 expansion_failed (loc, NULL_RTX, "bitfield access");
17335 return 0;
17336 }
17337 if (!INDIRECT_REF_P (obj))
17338 {
17339 expansion_failed (obj,
17340 NULL_RTX, "no indirect ref in inner refrence");
17341 return 0;
17342 }
17343 if (!offset && known_eq (bitpos, 0))
17344 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17345 context);
17346 else if (toplev
17347 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17348 && (dwarf_version >= 4 || !dwarf_strict))
17349 {
17350 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17351 if (!list_ret)
17352 return 0;
17353 if (offset)
17354 {
17355 /* Variable offset. */
17356 list_ret1 = loc_list_from_tree (offset, 0, context);
17357 if (list_ret1 == 0)
17358 return 0;
17359 add_loc_list (&list_ret, list_ret1);
17360 if (!list_ret)
17361 return 0;
17362 add_loc_descr_to_each (list_ret,
17363 new_loc_descr (DW_OP_plus, 0, 0));
17364 }
17365 HOST_WIDE_INT value;
17366 if (bytepos.is_constant (&value) && value > 0)
17367 add_loc_descr_to_each (list_ret,
17368 new_loc_descr (DW_OP_plus_uconst, value, 0));
17369 else if (maybe_ne (bytepos, 0))
17370 loc_list_plus_const (list_ret, bytepos);
17371 add_loc_descr_to_each (list_ret,
17372 new_loc_descr (DW_OP_stack_value, 0, 0));
17373 }
17374 return list_ret;
17375 }
17376
17377 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17378 all operations from LOC are nops, move to the last one. Insert in NOPS all
17379 operations that are skipped. */
17380
17381 static void
17382 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17383 hash_set<dw_loc_descr_ref> &nops)
17384 {
17385 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17386 {
17387 nops.add (loc);
17388 loc = loc->dw_loc_next;
17389 }
17390 }
17391
17392 /* Helper for loc_descr_without_nops: free the location description operation
17393 P. */
17394
17395 bool
17396 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17397 {
17398 ggc_free (loc);
17399 return true;
17400 }
17401
17402 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17403 finishes LOC. */
17404
17405 static void
17406 loc_descr_without_nops (dw_loc_descr_ref &loc)
17407 {
17408 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17409 return;
17410
17411 /* Set of all DW_OP_nop operations we remove. */
17412 hash_set<dw_loc_descr_ref> nops;
17413
17414 /* First, strip all prefix NOP operations in order to keep the head of the
17415 operations list. */
17416 loc_descr_to_next_no_nop (loc, nops);
17417
17418 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17419 {
17420 /* For control flow operations: strip "prefix" nops in destination
17421 labels. */
17422 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17423 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17424 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17425 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17426
17427 /* Do the same for the operations that follow, then move to the next
17428 iteration. */
17429 if (cur->dw_loc_next != NULL)
17430 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17431 cur = cur->dw_loc_next;
17432 }
17433
17434 nops.traverse<void *, free_loc_descr> (NULL);
17435 }
17436
17437
17438 struct dwarf_procedure_info;
17439
17440 /* Helper structure for location descriptions generation. */
17441 struct loc_descr_context
17442 {
17443 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17444 NULL_TREE if DW_OP_push_object_address in invalid for this location
17445 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17446 tree context_type;
17447 /* The ..._DECL node that should be translated as a
17448 DW_OP_push_object_address operation. */
17449 tree base_decl;
17450 /* Information about the DWARF procedure we are currently generating. NULL if
17451 we are not generating a DWARF procedure. */
17452 struct dwarf_procedure_info *dpi;
17453 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17454 by consumer. Used for DW_TAG_generic_subrange attributes. */
17455 bool placeholder_arg;
17456 /* True if PLACEHOLDER_EXPR has been seen. */
17457 bool placeholder_seen;
17458 };
17459
17460 /* DWARF procedures generation
17461
17462 DWARF expressions (aka. location descriptions) are used to encode variable
17463 things such as sizes or offsets. Such computations can have redundant parts
17464 that can be factorized in order to reduce the size of the output debug
17465 information. This is the whole point of DWARF procedures.
17466
17467 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17468 already factorized into functions ("size functions") in order to handle very
17469 big and complex types. Such functions are quite simple: they have integral
17470 arguments, they return an integral result and their body contains only a
17471 return statement with arithmetic expressions. This is the only kind of
17472 function we are interested in translating into DWARF procedures, here.
17473
17474 DWARF expressions and DWARF procedure are executed using a stack, so we have
17475 to define some calling convention for them to interact. Let's say that:
17476
17477 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17478 all arguments in reverse order (right-to-left) so that when the DWARF
17479 procedure execution starts, the first argument is the top of the stack.
17480
17481 - Then, when returning, the DWARF procedure must have consumed all arguments
17482 on the stack, must have pushed the result and touched nothing else.
17483
17484 - Each integral argument and the result are integral types can be hold in a
17485 single stack slot.
17486
17487 - We call "frame offset" the number of stack slots that are "under DWARF
17488 procedure control": it includes the arguments slots, the temporaries and
17489 the result slot. Thus, it is equal to the number of arguments when the
17490 procedure execution starts and must be equal to one (the result) when it
17491 returns. */
17492
17493 /* Helper structure used when generating operations for a DWARF procedure. */
17494 struct dwarf_procedure_info
17495 {
17496 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17497 currently translated. */
17498 tree fndecl;
17499 /* The number of arguments FNDECL takes. */
17500 unsigned args_count;
17501 };
17502
17503 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17504 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17505 equate it to this DIE. */
17506
17507 static dw_die_ref
17508 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17509 dw_die_ref parent_die)
17510 {
17511 dw_die_ref dwarf_proc_die;
17512
17513 if ((dwarf_version < 3 && dwarf_strict)
17514 || location == NULL)
17515 return NULL;
17516
17517 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17518 if (fndecl)
17519 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17520 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17521 return dwarf_proc_die;
17522 }
17523
17524 /* Return whether TYPE is a supported type as a DWARF procedure argument
17525 type or return type (we handle only scalar types and pointer types that
17526 aren't wider than the DWARF expression evaluation stack. */
17527
17528 static bool
17529 is_handled_procedure_type (tree type)
17530 {
17531 return ((INTEGRAL_TYPE_P (type)
17532 || TREE_CODE (type) == OFFSET_TYPE
17533 || TREE_CODE (type) == POINTER_TYPE)
17534 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17535 }
17536
17537 /* Helper for resolve_args_picking: do the same but stop when coming across
17538 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17539 offset *before* evaluating the corresponding operation. */
17540
17541 static bool
17542 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17543 struct dwarf_procedure_info *dpi,
17544 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17545 {
17546 /* The "frame_offset" identifier is already used to name a macro... */
17547 unsigned frame_offset_ = initial_frame_offset;
17548 dw_loc_descr_ref l;
17549
17550 for (l = loc; l != NULL;)
17551 {
17552 bool existed;
17553 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17554
17555 /* If we already met this node, there is nothing to compute anymore. */
17556 if (existed)
17557 {
17558 /* Make sure that the stack size is consistent wherever the execution
17559 flow comes from. */
17560 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17561 break;
17562 }
17563 l_frame_offset = frame_offset_;
17564
17565 /* If needed, relocate the picking offset with respect to the frame
17566 offset. */
17567 if (l->frame_offset_rel)
17568 {
17569 unsigned HOST_WIDE_INT off;
17570 switch (l->dw_loc_opc)
17571 {
17572 case DW_OP_pick:
17573 off = l->dw_loc_oprnd1.v.val_unsigned;
17574 break;
17575 case DW_OP_dup:
17576 off = 0;
17577 break;
17578 case DW_OP_over:
17579 off = 1;
17580 break;
17581 default:
17582 gcc_unreachable ();
17583 }
17584 /* frame_offset_ is the size of the current stack frame, including
17585 incoming arguments. Besides, the arguments are pushed
17586 right-to-left. Thus, in order to access the Nth argument from
17587 this operation node, the picking has to skip temporaries *plus*
17588 one stack slot per argument (0 for the first one, 1 for the second
17589 one, etc.).
17590
17591 The targetted argument number (N) is already set as the operand,
17592 and the number of temporaries can be computed with:
17593 frame_offsets_ - dpi->args_count */
17594 off += frame_offset_ - dpi->args_count;
17595
17596 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17597 if (off > 255)
17598 return false;
17599
17600 if (off == 0)
17601 {
17602 l->dw_loc_opc = DW_OP_dup;
17603 l->dw_loc_oprnd1.v.val_unsigned = 0;
17604 }
17605 else if (off == 1)
17606 {
17607 l->dw_loc_opc = DW_OP_over;
17608 l->dw_loc_oprnd1.v.val_unsigned = 0;
17609 }
17610 else
17611 {
17612 l->dw_loc_opc = DW_OP_pick;
17613 l->dw_loc_oprnd1.v.val_unsigned = off;
17614 }
17615 }
17616
17617 /* Update frame_offset according to the effect the current operation has
17618 on the stack. */
17619 switch (l->dw_loc_opc)
17620 {
17621 case DW_OP_deref:
17622 case DW_OP_swap:
17623 case DW_OP_rot:
17624 case DW_OP_abs:
17625 case DW_OP_neg:
17626 case DW_OP_not:
17627 case DW_OP_plus_uconst:
17628 case DW_OP_skip:
17629 case DW_OP_reg0:
17630 case DW_OP_reg1:
17631 case DW_OP_reg2:
17632 case DW_OP_reg3:
17633 case DW_OP_reg4:
17634 case DW_OP_reg5:
17635 case DW_OP_reg6:
17636 case DW_OP_reg7:
17637 case DW_OP_reg8:
17638 case DW_OP_reg9:
17639 case DW_OP_reg10:
17640 case DW_OP_reg11:
17641 case DW_OP_reg12:
17642 case DW_OP_reg13:
17643 case DW_OP_reg14:
17644 case DW_OP_reg15:
17645 case DW_OP_reg16:
17646 case DW_OP_reg17:
17647 case DW_OP_reg18:
17648 case DW_OP_reg19:
17649 case DW_OP_reg20:
17650 case DW_OP_reg21:
17651 case DW_OP_reg22:
17652 case DW_OP_reg23:
17653 case DW_OP_reg24:
17654 case DW_OP_reg25:
17655 case DW_OP_reg26:
17656 case DW_OP_reg27:
17657 case DW_OP_reg28:
17658 case DW_OP_reg29:
17659 case DW_OP_reg30:
17660 case DW_OP_reg31:
17661 case DW_OP_bregx:
17662 case DW_OP_piece:
17663 case DW_OP_deref_size:
17664 case DW_OP_nop:
17665 case DW_OP_bit_piece:
17666 case DW_OP_implicit_value:
17667 case DW_OP_stack_value:
17668 break;
17669
17670 case DW_OP_addr:
17671 case DW_OP_const1u:
17672 case DW_OP_const1s:
17673 case DW_OP_const2u:
17674 case DW_OP_const2s:
17675 case DW_OP_const4u:
17676 case DW_OP_const4s:
17677 case DW_OP_const8u:
17678 case DW_OP_const8s:
17679 case DW_OP_constu:
17680 case DW_OP_consts:
17681 case DW_OP_dup:
17682 case DW_OP_over:
17683 case DW_OP_pick:
17684 case DW_OP_lit0:
17685 case DW_OP_lit1:
17686 case DW_OP_lit2:
17687 case DW_OP_lit3:
17688 case DW_OP_lit4:
17689 case DW_OP_lit5:
17690 case DW_OP_lit6:
17691 case DW_OP_lit7:
17692 case DW_OP_lit8:
17693 case DW_OP_lit9:
17694 case DW_OP_lit10:
17695 case DW_OP_lit11:
17696 case DW_OP_lit12:
17697 case DW_OP_lit13:
17698 case DW_OP_lit14:
17699 case DW_OP_lit15:
17700 case DW_OP_lit16:
17701 case DW_OP_lit17:
17702 case DW_OP_lit18:
17703 case DW_OP_lit19:
17704 case DW_OP_lit20:
17705 case DW_OP_lit21:
17706 case DW_OP_lit22:
17707 case DW_OP_lit23:
17708 case DW_OP_lit24:
17709 case DW_OP_lit25:
17710 case DW_OP_lit26:
17711 case DW_OP_lit27:
17712 case DW_OP_lit28:
17713 case DW_OP_lit29:
17714 case DW_OP_lit30:
17715 case DW_OP_lit31:
17716 case DW_OP_breg0:
17717 case DW_OP_breg1:
17718 case DW_OP_breg2:
17719 case DW_OP_breg3:
17720 case DW_OP_breg4:
17721 case DW_OP_breg5:
17722 case DW_OP_breg6:
17723 case DW_OP_breg7:
17724 case DW_OP_breg8:
17725 case DW_OP_breg9:
17726 case DW_OP_breg10:
17727 case DW_OP_breg11:
17728 case DW_OP_breg12:
17729 case DW_OP_breg13:
17730 case DW_OP_breg14:
17731 case DW_OP_breg15:
17732 case DW_OP_breg16:
17733 case DW_OP_breg17:
17734 case DW_OP_breg18:
17735 case DW_OP_breg19:
17736 case DW_OP_breg20:
17737 case DW_OP_breg21:
17738 case DW_OP_breg22:
17739 case DW_OP_breg23:
17740 case DW_OP_breg24:
17741 case DW_OP_breg25:
17742 case DW_OP_breg26:
17743 case DW_OP_breg27:
17744 case DW_OP_breg28:
17745 case DW_OP_breg29:
17746 case DW_OP_breg30:
17747 case DW_OP_breg31:
17748 case DW_OP_fbreg:
17749 case DW_OP_push_object_address:
17750 case DW_OP_call_frame_cfa:
17751 case DW_OP_GNU_variable_value:
17752 ++frame_offset_;
17753 break;
17754
17755 case DW_OP_drop:
17756 case DW_OP_xderef:
17757 case DW_OP_and:
17758 case DW_OP_div:
17759 case DW_OP_minus:
17760 case DW_OP_mod:
17761 case DW_OP_mul:
17762 case DW_OP_or:
17763 case DW_OP_plus:
17764 case DW_OP_shl:
17765 case DW_OP_shr:
17766 case DW_OP_shra:
17767 case DW_OP_xor:
17768 case DW_OP_bra:
17769 case DW_OP_eq:
17770 case DW_OP_ge:
17771 case DW_OP_gt:
17772 case DW_OP_le:
17773 case DW_OP_lt:
17774 case DW_OP_ne:
17775 case DW_OP_regx:
17776 case DW_OP_xderef_size:
17777 --frame_offset_;
17778 break;
17779
17780 case DW_OP_call2:
17781 case DW_OP_call4:
17782 case DW_OP_call_ref:
17783 {
17784 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17785 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17786
17787 if (stack_usage == NULL)
17788 return false;
17789 frame_offset_ += *stack_usage;
17790 break;
17791 }
17792
17793 case DW_OP_implicit_pointer:
17794 case DW_OP_entry_value:
17795 case DW_OP_const_type:
17796 case DW_OP_regval_type:
17797 case DW_OP_deref_type:
17798 case DW_OP_convert:
17799 case DW_OP_reinterpret:
17800 case DW_OP_form_tls_address:
17801 case DW_OP_GNU_push_tls_address:
17802 case DW_OP_GNU_uninit:
17803 case DW_OP_GNU_encoded_addr:
17804 case DW_OP_GNU_implicit_pointer:
17805 case DW_OP_GNU_entry_value:
17806 case DW_OP_GNU_const_type:
17807 case DW_OP_GNU_regval_type:
17808 case DW_OP_GNU_deref_type:
17809 case DW_OP_GNU_convert:
17810 case DW_OP_GNU_reinterpret:
17811 case DW_OP_GNU_parameter_ref:
17812 /* loc_list_from_tree will probably not output these operations for
17813 size functions, so assume they will not appear here. */
17814 /* Fall through... */
17815
17816 default:
17817 gcc_unreachable ();
17818 }
17819
17820 /* Now, follow the control flow (except subroutine calls). */
17821 switch (l->dw_loc_opc)
17822 {
17823 case DW_OP_bra:
17824 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17825 frame_offsets))
17826 return false;
17827 /* Fall through. */
17828
17829 case DW_OP_skip:
17830 l = l->dw_loc_oprnd1.v.val_loc;
17831 break;
17832
17833 case DW_OP_stack_value:
17834 return true;
17835
17836 default:
17837 l = l->dw_loc_next;
17838 break;
17839 }
17840 }
17841
17842 return true;
17843 }
17844
17845 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17846 operations) in order to resolve the operand of DW_OP_pick operations that
17847 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17848 offset *before* LOC is executed. Return if all relocations were
17849 successful. */
17850
17851 static bool
17852 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17853 struct dwarf_procedure_info *dpi)
17854 {
17855 /* Associate to all visited operations the frame offset *before* evaluating
17856 this operation. */
17857 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17858
17859 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17860 frame_offsets);
17861 }
17862
17863 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17864 Return NULL if it is not possible. */
17865
17866 static dw_die_ref
17867 function_to_dwarf_procedure (tree fndecl)
17868 {
17869 struct loc_descr_context ctx;
17870 struct dwarf_procedure_info dpi;
17871 dw_die_ref dwarf_proc_die;
17872 tree tree_body = DECL_SAVED_TREE (fndecl);
17873 dw_loc_descr_ref loc_body, epilogue;
17874
17875 tree cursor;
17876 unsigned i;
17877
17878 /* Do not generate multiple DWARF procedures for the same function
17879 declaration. */
17880 dwarf_proc_die = lookup_decl_die (fndecl);
17881 if (dwarf_proc_die != NULL)
17882 return dwarf_proc_die;
17883
17884 /* DWARF procedures are available starting with the DWARFv3 standard. */
17885 if (dwarf_version < 3 && dwarf_strict)
17886 return NULL;
17887
17888 /* We handle only functions for which we still have a body, that return a
17889 supported type and that takes arguments with supported types. Note that
17890 there is no point translating functions that return nothing. */
17891 if (tree_body == NULL_TREE
17892 || DECL_RESULT (fndecl) == NULL_TREE
17893 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17894 return NULL;
17895
17896 for (cursor = DECL_ARGUMENTS (fndecl);
17897 cursor != NULL_TREE;
17898 cursor = TREE_CHAIN (cursor))
17899 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17900 return NULL;
17901
17902 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17903 if (TREE_CODE (tree_body) != RETURN_EXPR)
17904 return NULL;
17905 tree_body = TREE_OPERAND (tree_body, 0);
17906 if (TREE_CODE (tree_body) != MODIFY_EXPR
17907 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17908 return NULL;
17909 tree_body = TREE_OPERAND (tree_body, 1);
17910
17911 /* Try to translate the body expression itself. Note that this will probably
17912 cause an infinite recursion if its call graph has a cycle. This is very
17913 unlikely for size functions, however, so don't bother with such things at
17914 the moment. */
17915 ctx.context_type = NULL_TREE;
17916 ctx.base_decl = NULL_TREE;
17917 ctx.dpi = &dpi;
17918 ctx.placeholder_arg = false;
17919 ctx.placeholder_seen = false;
17920 dpi.fndecl = fndecl;
17921 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17922 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17923 if (!loc_body)
17924 return NULL;
17925
17926 /* After evaluating all operands in "loc_body", we should still have on the
17927 stack all arguments plus the desired function result (top of the stack).
17928 Generate code in order to keep only the result in our stack frame. */
17929 epilogue = NULL;
17930 for (i = 0; i < dpi.args_count; ++i)
17931 {
17932 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17933 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17934 op_couple->dw_loc_next->dw_loc_next = epilogue;
17935 epilogue = op_couple;
17936 }
17937 add_loc_descr (&loc_body, epilogue);
17938 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17939 return NULL;
17940
17941 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17942 because they are considered useful. Now there is an epilogue, they are
17943 not anymore, so give it another try. */
17944 loc_descr_without_nops (loc_body);
17945
17946 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17947 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17948 though, given that size functions do not come from source, so they should
17949 not have a dedicated DW_TAG_subprogram DIE. */
17950 dwarf_proc_die
17951 = new_dwarf_proc_die (loc_body, fndecl,
17952 get_context_die (DECL_CONTEXT (fndecl)));
17953
17954 /* The called DWARF procedure consumes one stack slot per argument and
17955 returns one stack slot. */
17956 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
17957
17958 return dwarf_proc_die;
17959 }
17960
17961
17962 /* Generate Dwarf location list representing LOC.
17963 If WANT_ADDRESS is false, expression computing LOC will be computed
17964 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
17965 if WANT_ADDRESS is 2, expression computing address useable in location
17966 will be returned (i.e. DW_OP_reg can be used
17967 to refer to register values).
17968
17969 CONTEXT provides information to customize the location descriptions
17970 generation. Its context_type field specifies what type is implicitly
17971 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
17972 will not be generated.
17973
17974 Its DPI field determines whether we are generating a DWARF expression for a
17975 DWARF procedure, so PARM_DECL references are processed specifically.
17976
17977 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
17978 and dpi fields were null. */
17979
17980 static dw_loc_list_ref
17981 loc_list_from_tree_1 (tree loc, int want_address,
17982 struct loc_descr_context *context)
17983 {
17984 dw_loc_descr_ref ret = NULL, ret1 = NULL;
17985 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17986 int have_address = 0;
17987 enum dwarf_location_atom op;
17988
17989 /* ??? Most of the time we do not take proper care for sign/zero
17990 extending the values properly. Hopefully this won't be a real
17991 problem... */
17992
17993 if (context != NULL
17994 && context->base_decl == loc
17995 && want_address == 0)
17996 {
17997 if (dwarf_version >= 3 || !dwarf_strict)
17998 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
17999 NULL, 0, NULL, 0, NULL);
18000 else
18001 return NULL;
18002 }
18003
18004 switch (TREE_CODE (loc))
18005 {
18006 case ERROR_MARK:
18007 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18008 return 0;
18009
18010 case PLACEHOLDER_EXPR:
18011 /* This case involves extracting fields from an object to determine the
18012 position of other fields. It is supposed to appear only as the first
18013 operand of COMPONENT_REF nodes and to reference precisely the type
18014 that the context allows. */
18015 if (context != NULL
18016 && TREE_TYPE (loc) == context->context_type
18017 && want_address >= 1)
18018 {
18019 if (dwarf_version >= 3 || !dwarf_strict)
18020 {
18021 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18022 have_address = 1;
18023 break;
18024 }
18025 else
18026 return NULL;
18027 }
18028 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18029 the single argument passed by consumer. */
18030 else if (context != NULL
18031 && context->placeholder_arg
18032 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18033 && want_address == 0)
18034 {
18035 ret = new_loc_descr (DW_OP_pick, 0, 0);
18036 ret->frame_offset_rel = 1;
18037 context->placeholder_seen = true;
18038 break;
18039 }
18040 else
18041 expansion_failed (loc, NULL_RTX,
18042 "PLACEHOLDER_EXPR for an unexpected type");
18043 break;
18044
18045 case CALL_EXPR:
18046 {
18047 const int nargs = call_expr_nargs (loc);
18048 tree callee = get_callee_fndecl (loc);
18049 int i;
18050 dw_die_ref dwarf_proc;
18051
18052 if (callee == NULL_TREE)
18053 goto call_expansion_failed;
18054
18055 /* We handle only functions that return an integer. */
18056 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18057 goto call_expansion_failed;
18058
18059 dwarf_proc = function_to_dwarf_procedure (callee);
18060 if (dwarf_proc == NULL)
18061 goto call_expansion_failed;
18062
18063 /* Evaluate arguments right-to-left so that the first argument will
18064 be the top-most one on the stack. */
18065 for (i = nargs - 1; i >= 0; --i)
18066 {
18067 dw_loc_descr_ref loc_descr
18068 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18069 context);
18070
18071 if (loc_descr == NULL)
18072 goto call_expansion_failed;
18073
18074 add_loc_descr (&ret, loc_descr);
18075 }
18076
18077 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18078 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18079 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18080 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18081 add_loc_descr (&ret, ret1);
18082 break;
18083
18084 call_expansion_failed:
18085 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18086 /* There are no opcodes for these operations. */
18087 return 0;
18088 }
18089
18090 case PREINCREMENT_EXPR:
18091 case PREDECREMENT_EXPR:
18092 case POSTINCREMENT_EXPR:
18093 case POSTDECREMENT_EXPR:
18094 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18095 /* There are no opcodes for these operations. */
18096 return 0;
18097
18098 case ADDR_EXPR:
18099 /* If we already want an address, see if there is INDIRECT_REF inside
18100 e.g. for &this->field. */
18101 if (want_address)
18102 {
18103 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18104 (loc, want_address == 2, context);
18105 if (list_ret)
18106 have_address = 1;
18107 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18108 && (ret = cst_pool_loc_descr (loc)))
18109 have_address = 1;
18110 }
18111 /* Otherwise, process the argument and look for the address. */
18112 if (!list_ret && !ret)
18113 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18114 else
18115 {
18116 if (want_address)
18117 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18118 return NULL;
18119 }
18120 break;
18121
18122 case VAR_DECL:
18123 if (DECL_THREAD_LOCAL_P (loc))
18124 {
18125 rtx rtl;
18126 enum dwarf_location_atom tls_op;
18127 enum dtprel_bool dtprel = dtprel_false;
18128
18129 if (targetm.have_tls)
18130 {
18131 /* If this is not defined, we have no way to emit the
18132 data. */
18133 if (!targetm.asm_out.output_dwarf_dtprel)
18134 return 0;
18135
18136 /* The way DW_OP_GNU_push_tls_address is specified, we
18137 can only look up addresses of objects in the current
18138 module. We used DW_OP_addr as first op, but that's
18139 wrong, because DW_OP_addr is relocated by the debug
18140 info consumer, while DW_OP_GNU_push_tls_address
18141 operand shouldn't be. */
18142 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18143 return 0;
18144 dtprel = dtprel_true;
18145 /* We check for DWARF 5 here because gdb did not implement
18146 DW_OP_form_tls_address until after 7.12. */
18147 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18148 : DW_OP_GNU_push_tls_address);
18149 }
18150 else
18151 {
18152 if (!targetm.emutls.debug_form_tls_address
18153 || !(dwarf_version >= 3 || !dwarf_strict))
18154 return 0;
18155 /* We stuffed the control variable into the DECL_VALUE_EXPR
18156 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18157 no longer appear in gimple code. We used the control
18158 variable in specific so that we could pick it up here. */
18159 loc = DECL_VALUE_EXPR (loc);
18160 tls_op = DW_OP_form_tls_address;
18161 }
18162
18163 rtl = rtl_for_decl_location (loc);
18164 if (rtl == NULL_RTX)
18165 return 0;
18166
18167 if (!MEM_P (rtl))
18168 return 0;
18169 rtl = XEXP (rtl, 0);
18170 if (! CONSTANT_P (rtl))
18171 return 0;
18172
18173 ret = new_addr_loc_descr (rtl, dtprel);
18174 ret1 = new_loc_descr (tls_op, 0, 0);
18175 add_loc_descr (&ret, ret1);
18176
18177 have_address = 1;
18178 break;
18179 }
18180 /* FALLTHRU */
18181
18182 case PARM_DECL:
18183 if (context != NULL && context->dpi != NULL
18184 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18185 {
18186 /* We are generating code for a DWARF procedure and we want to access
18187 one of its arguments: find the appropriate argument offset and let
18188 the resolve_args_picking pass compute the offset that complies
18189 with the stack frame size. */
18190 unsigned i = 0;
18191 tree cursor;
18192
18193 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18194 cursor != NULL_TREE && cursor != loc;
18195 cursor = TREE_CHAIN (cursor), ++i)
18196 ;
18197 /* If we are translating a DWARF procedure, all referenced parameters
18198 must belong to the current function. */
18199 gcc_assert (cursor != NULL_TREE);
18200
18201 ret = new_loc_descr (DW_OP_pick, i, 0);
18202 ret->frame_offset_rel = 1;
18203 break;
18204 }
18205 /* FALLTHRU */
18206
18207 case RESULT_DECL:
18208 if (DECL_HAS_VALUE_EXPR_P (loc))
18209 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18210 want_address, context);
18211 /* FALLTHRU */
18212
18213 case FUNCTION_DECL:
18214 {
18215 rtx rtl;
18216 var_loc_list *loc_list = lookup_decl_loc (loc);
18217
18218 if (loc_list && loc_list->first)
18219 {
18220 list_ret = dw_loc_list (loc_list, loc, want_address);
18221 have_address = want_address != 0;
18222 break;
18223 }
18224 rtl = rtl_for_decl_location (loc);
18225 if (rtl == NULL_RTX)
18226 {
18227 if (TREE_CODE (loc) != FUNCTION_DECL
18228 && early_dwarf
18229 && current_function_decl
18230 && want_address != 1
18231 && ! DECL_IGNORED_P (loc)
18232 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18233 || POINTER_TYPE_P (TREE_TYPE (loc)))
18234 && DECL_CONTEXT (loc) == current_function_decl
18235 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18236 <= DWARF2_ADDR_SIZE))
18237 {
18238 dw_die_ref ref = lookup_decl_die (loc);
18239 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18240 if (ref)
18241 {
18242 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18243 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18244 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18245 }
18246 else
18247 {
18248 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18249 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18250 }
18251 break;
18252 }
18253 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18254 return 0;
18255 }
18256 else if (CONST_INT_P (rtl))
18257 {
18258 HOST_WIDE_INT val = INTVAL (rtl);
18259 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18260 val &= GET_MODE_MASK (DECL_MODE (loc));
18261 ret = int_loc_descriptor (val);
18262 }
18263 else if (GET_CODE (rtl) == CONST_STRING)
18264 {
18265 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18266 return 0;
18267 }
18268 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18269 ret = new_addr_loc_descr (rtl, dtprel_false);
18270 else
18271 {
18272 machine_mode mode, mem_mode;
18273
18274 /* Certain constructs can only be represented at top-level. */
18275 if (want_address == 2)
18276 {
18277 ret = loc_descriptor (rtl, VOIDmode,
18278 VAR_INIT_STATUS_INITIALIZED);
18279 have_address = 1;
18280 }
18281 else
18282 {
18283 mode = GET_MODE (rtl);
18284 mem_mode = VOIDmode;
18285 if (MEM_P (rtl))
18286 {
18287 mem_mode = mode;
18288 mode = get_address_mode (rtl);
18289 rtl = XEXP (rtl, 0);
18290 have_address = 1;
18291 }
18292 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18293 VAR_INIT_STATUS_INITIALIZED);
18294 }
18295 if (!ret)
18296 expansion_failed (loc, rtl,
18297 "failed to produce loc descriptor for rtl");
18298 }
18299 }
18300 break;
18301
18302 case MEM_REF:
18303 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18304 {
18305 have_address = 1;
18306 goto do_plus;
18307 }
18308 /* Fallthru. */
18309 case INDIRECT_REF:
18310 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18311 have_address = 1;
18312 break;
18313
18314 case TARGET_MEM_REF:
18315 case SSA_NAME:
18316 case DEBUG_EXPR_DECL:
18317 return NULL;
18318
18319 case COMPOUND_EXPR:
18320 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18321 context);
18322
18323 CASE_CONVERT:
18324 case VIEW_CONVERT_EXPR:
18325 case SAVE_EXPR:
18326 case MODIFY_EXPR:
18327 case NON_LVALUE_EXPR:
18328 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18329 context);
18330
18331 case COMPONENT_REF:
18332 case BIT_FIELD_REF:
18333 case ARRAY_REF:
18334 case ARRAY_RANGE_REF:
18335 case REALPART_EXPR:
18336 case IMAGPART_EXPR:
18337 {
18338 tree obj, offset;
18339 poly_int64 bitsize, bitpos, bytepos;
18340 machine_mode mode;
18341 int unsignedp, reversep, volatilep = 0;
18342
18343 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18344 &unsignedp, &reversep, &volatilep);
18345
18346 gcc_assert (obj != loc);
18347
18348 list_ret = loc_list_from_tree_1 (obj,
18349 want_address == 2
18350 && known_eq (bitpos, 0)
18351 && !offset ? 2 : 1,
18352 context);
18353 /* TODO: We can extract value of the small expression via shifting even
18354 for nonzero bitpos. */
18355 if (list_ret == 0)
18356 return 0;
18357 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18358 || !multiple_p (bitsize, BITS_PER_UNIT))
18359 {
18360 expansion_failed (loc, NULL_RTX,
18361 "bitfield access");
18362 return 0;
18363 }
18364
18365 if (offset != NULL_TREE)
18366 {
18367 /* Variable offset. */
18368 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18369 if (list_ret1 == 0)
18370 return 0;
18371 add_loc_list (&list_ret, list_ret1);
18372 if (!list_ret)
18373 return 0;
18374 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18375 }
18376
18377 HOST_WIDE_INT value;
18378 if (bytepos.is_constant (&value) && value > 0)
18379 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18380 value, 0));
18381 else if (maybe_ne (bytepos, 0))
18382 loc_list_plus_const (list_ret, bytepos);
18383
18384 have_address = 1;
18385 break;
18386 }
18387
18388 case INTEGER_CST:
18389 if ((want_address || !tree_fits_shwi_p (loc))
18390 && (ret = cst_pool_loc_descr (loc)))
18391 have_address = 1;
18392 else if (want_address == 2
18393 && tree_fits_shwi_p (loc)
18394 && (ret = address_of_int_loc_descriptor
18395 (int_size_in_bytes (TREE_TYPE (loc)),
18396 tree_to_shwi (loc))))
18397 have_address = 1;
18398 else if (tree_fits_shwi_p (loc))
18399 ret = int_loc_descriptor (tree_to_shwi (loc));
18400 else if (tree_fits_uhwi_p (loc))
18401 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18402 else
18403 {
18404 expansion_failed (loc, NULL_RTX,
18405 "Integer operand is not host integer");
18406 return 0;
18407 }
18408 break;
18409
18410 case CONSTRUCTOR:
18411 case REAL_CST:
18412 case STRING_CST:
18413 case COMPLEX_CST:
18414 if ((ret = cst_pool_loc_descr (loc)))
18415 have_address = 1;
18416 else if (TREE_CODE (loc) == CONSTRUCTOR)
18417 {
18418 tree type = TREE_TYPE (loc);
18419 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18420 unsigned HOST_WIDE_INT offset = 0;
18421 unsigned HOST_WIDE_INT cnt;
18422 constructor_elt *ce;
18423
18424 if (TREE_CODE (type) == RECORD_TYPE)
18425 {
18426 /* This is very limited, but it's enough to output
18427 pointers to member functions, as long as the
18428 referenced function is defined in the current
18429 translation unit. */
18430 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18431 {
18432 tree val = ce->value;
18433
18434 tree field = ce->index;
18435
18436 if (val)
18437 STRIP_NOPS (val);
18438
18439 if (!field || DECL_BIT_FIELD (field))
18440 {
18441 expansion_failed (loc, NULL_RTX,
18442 "bitfield in record type constructor");
18443 size = offset = (unsigned HOST_WIDE_INT)-1;
18444 ret = NULL;
18445 break;
18446 }
18447
18448 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18449 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18450 gcc_assert (pos + fieldsize <= size);
18451 if (pos < offset)
18452 {
18453 expansion_failed (loc, NULL_RTX,
18454 "out-of-order fields in record constructor");
18455 size = offset = (unsigned HOST_WIDE_INT)-1;
18456 ret = NULL;
18457 break;
18458 }
18459 if (pos > offset)
18460 {
18461 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18462 add_loc_descr (&ret, ret1);
18463 offset = pos;
18464 }
18465 if (val && fieldsize != 0)
18466 {
18467 ret1 = loc_descriptor_from_tree (val, want_address, context);
18468 if (!ret1)
18469 {
18470 expansion_failed (loc, NULL_RTX,
18471 "unsupported expression in field");
18472 size = offset = (unsigned HOST_WIDE_INT)-1;
18473 ret = NULL;
18474 break;
18475 }
18476 add_loc_descr (&ret, ret1);
18477 }
18478 if (fieldsize)
18479 {
18480 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18481 add_loc_descr (&ret, ret1);
18482 offset = pos + fieldsize;
18483 }
18484 }
18485
18486 if (offset != size)
18487 {
18488 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18489 add_loc_descr (&ret, ret1);
18490 offset = size;
18491 }
18492
18493 have_address = !!want_address;
18494 }
18495 else
18496 expansion_failed (loc, NULL_RTX,
18497 "constructor of non-record type");
18498 }
18499 else
18500 /* We can construct small constants here using int_loc_descriptor. */
18501 expansion_failed (loc, NULL_RTX,
18502 "constructor or constant not in constant pool");
18503 break;
18504
18505 case TRUTH_AND_EXPR:
18506 case TRUTH_ANDIF_EXPR:
18507 case BIT_AND_EXPR:
18508 op = DW_OP_and;
18509 goto do_binop;
18510
18511 case TRUTH_XOR_EXPR:
18512 case BIT_XOR_EXPR:
18513 op = DW_OP_xor;
18514 goto do_binop;
18515
18516 case TRUTH_OR_EXPR:
18517 case TRUTH_ORIF_EXPR:
18518 case BIT_IOR_EXPR:
18519 op = DW_OP_or;
18520 goto do_binop;
18521
18522 case FLOOR_DIV_EXPR:
18523 case CEIL_DIV_EXPR:
18524 case ROUND_DIV_EXPR:
18525 case TRUNC_DIV_EXPR:
18526 case EXACT_DIV_EXPR:
18527 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18528 return 0;
18529 op = DW_OP_div;
18530 goto do_binop;
18531
18532 case MINUS_EXPR:
18533 op = DW_OP_minus;
18534 goto do_binop;
18535
18536 case FLOOR_MOD_EXPR:
18537 case CEIL_MOD_EXPR:
18538 case ROUND_MOD_EXPR:
18539 case TRUNC_MOD_EXPR:
18540 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18541 {
18542 op = DW_OP_mod;
18543 goto do_binop;
18544 }
18545 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18546 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18547 if (list_ret == 0 || list_ret1 == 0)
18548 return 0;
18549
18550 add_loc_list (&list_ret, list_ret1);
18551 if (list_ret == 0)
18552 return 0;
18553 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18554 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18555 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18556 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18557 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18558 break;
18559
18560 case MULT_EXPR:
18561 op = DW_OP_mul;
18562 goto do_binop;
18563
18564 case LSHIFT_EXPR:
18565 op = DW_OP_shl;
18566 goto do_binop;
18567
18568 case RSHIFT_EXPR:
18569 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18570 goto do_binop;
18571
18572 case POINTER_PLUS_EXPR:
18573 case PLUS_EXPR:
18574 do_plus:
18575 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18576 {
18577 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18578 smarter to encode their opposite. The DW_OP_plus_uconst operation
18579 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18580 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18581 bytes, Y being the size of the operation that pushes the opposite
18582 of the addend. So let's choose the smallest representation. */
18583 const tree tree_addend = TREE_OPERAND (loc, 1);
18584 offset_int wi_addend;
18585 HOST_WIDE_INT shwi_addend;
18586 dw_loc_descr_ref loc_naddend;
18587
18588 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18589 if (list_ret == 0)
18590 return 0;
18591
18592 /* Try to get the literal to push. It is the opposite of the addend,
18593 so as we rely on wrapping during DWARF evaluation, first decode
18594 the literal as a "DWARF-sized" signed number. */
18595 wi_addend = wi::to_offset (tree_addend);
18596 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18597 shwi_addend = wi_addend.to_shwi ();
18598 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18599 ? int_loc_descriptor (-shwi_addend)
18600 : NULL;
18601
18602 if (loc_naddend != NULL
18603 && ((unsigned) size_of_uleb128 (shwi_addend)
18604 > size_of_loc_descr (loc_naddend)))
18605 {
18606 add_loc_descr_to_each (list_ret, loc_naddend);
18607 add_loc_descr_to_each (list_ret,
18608 new_loc_descr (DW_OP_minus, 0, 0));
18609 }
18610 else
18611 {
18612 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18613 {
18614 loc_naddend = loc_cur;
18615 loc_cur = loc_cur->dw_loc_next;
18616 ggc_free (loc_naddend);
18617 }
18618 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18619 }
18620 break;
18621 }
18622
18623 op = DW_OP_plus;
18624 goto do_binop;
18625
18626 case LE_EXPR:
18627 op = DW_OP_le;
18628 goto do_comp_binop;
18629
18630 case GE_EXPR:
18631 op = DW_OP_ge;
18632 goto do_comp_binop;
18633
18634 case LT_EXPR:
18635 op = DW_OP_lt;
18636 goto do_comp_binop;
18637
18638 case GT_EXPR:
18639 op = DW_OP_gt;
18640 goto do_comp_binop;
18641
18642 do_comp_binop:
18643 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18644 {
18645 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18646 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18647 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18648 TREE_CODE (loc));
18649 break;
18650 }
18651 else
18652 goto do_binop;
18653
18654 case EQ_EXPR:
18655 op = DW_OP_eq;
18656 goto do_binop;
18657
18658 case NE_EXPR:
18659 op = DW_OP_ne;
18660 goto do_binop;
18661
18662 do_binop:
18663 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18664 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18665 if (list_ret == 0 || list_ret1 == 0)
18666 return 0;
18667
18668 add_loc_list (&list_ret, list_ret1);
18669 if (list_ret == 0)
18670 return 0;
18671 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18672 break;
18673
18674 case TRUTH_NOT_EXPR:
18675 case BIT_NOT_EXPR:
18676 op = DW_OP_not;
18677 goto do_unop;
18678
18679 case ABS_EXPR:
18680 op = DW_OP_abs;
18681 goto do_unop;
18682
18683 case NEGATE_EXPR:
18684 op = DW_OP_neg;
18685 goto do_unop;
18686
18687 do_unop:
18688 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18689 if (list_ret == 0)
18690 return 0;
18691
18692 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18693 break;
18694
18695 case MIN_EXPR:
18696 case MAX_EXPR:
18697 {
18698 const enum tree_code code =
18699 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18700
18701 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18702 build2 (code, integer_type_node,
18703 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18704 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18705 }
18706
18707 /* fall through */
18708
18709 case COND_EXPR:
18710 {
18711 dw_loc_descr_ref lhs
18712 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18713 dw_loc_list_ref rhs
18714 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18715 dw_loc_descr_ref bra_node, jump_node, tmp;
18716
18717 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18718 if (list_ret == 0 || lhs == 0 || rhs == 0)
18719 return 0;
18720
18721 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18722 add_loc_descr_to_each (list_ret, bra_node);
18723
18724 add_loc_list (&list_ret, rhs);
18725 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18726 add_loc_descr_to_each (list_ret, jump_node);
18727
18728 add_loc_descr_to_each (list_ret, lhs);
18729 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18730 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18731
18732 /* ??? Need a node to point the skip at. Use a nop. */
18733 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18734 add_loc_descr_to_each (list_ret, tmp);
18735 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18736 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18737 }
18738 break;
18739
18740 case FIX_TRUNC_EXPR:
18741 return 0;
18742
18743 default:
18744 /* Leave front-end specific codes as simply unknown. This comes
18745 up, for instance, with the C STMT_EXPR. */
18746 if ((unsigned int) TREE_CODE (loc)
18747 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18748 {
18749 expansion_failed (loc, NULL_RTX,
18750 "language specific tree node");
18751 return 0;
18752 }
18753
18754 /* Otherwise this is a generic code; we should just lists all of
18755 these explicitly. We forgot one. */
18756 if (flag_checking)
18757 gcc_unreachable ();
18758
18759 /* In a release build, we want to degrade gracefully: better to
18760 generate incomplete debugging information than to crash. */
18761 return NULL;
18762 }
18763
18764 if (!ret && !list_ret)
18765 return 0;
18766
18767 if (want_address == 2 && !have_address
18768 && (dwarf_version >= 4 || !dwarf_strict))
18769 {
18770 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18771 {
18772 expansion_failed (loc, NULL_RTX,
18773 "DWARF address size mismatch");
18774 return 0;
18775 }
18776 if (ret)
18777 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18778 else
18779 add_loc_descr_to_each (list_ret,
18780 new_loc_descr (DW_OP_stack_value, 0, 0));
18781 have_address = 1;
18782 }
18783 /* Show if we can't fill the request for an address. */
18784 if (want_address && !have_address)
18785 {
18786 expansion_failed (loc, NULL_RTX,
18787 "Want address and only have value");
18788 return 0;
18789 }
18790
18791 gcc_assert (!ret || !list_ret);
18792
18793 /* If we've got an address and don't want one, dereference. */
18794 if (!want_address && have_address)
18795 {
18796 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18797
18798 if (size > DWARF2_ADDR_SIZE || size == -1)
18799 {
18800 expansion_failed (loc, NULL_RTX,
18801 "DWARF address size mismatch");
18802 return 0;
18803 }
18804 else if (size == DWARF2_ADDR_SIZE)
18805 op = DW_OP_deref;
18806 else
18807 op = DW_OP_deref_size;
18808
18809 if (ret)
18810 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18811 else
18812 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18813 }
18814 if (ret)
18815 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18816
18817 return list_ret;
18818 }
18819
18820 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18821 expressions. */
18822
18823 static dw_loc_list_ref
18824 loc_list_from_tree (tree loc, int want_address,
18825 struct loc_descr_context *context)
18826 {
18827 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18828
18829 for (dw_loc_list_ref loc_cur = result;
18830 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18831 loc_descr_without_nops (loc_cur->expr);
18832 return result;
18833 }
18834
18835 /* Same as above but return only single location expression. */
18836 static dw_loc_descr_ref
18837 loc_descriptor_from_tree (tree loc, int want_address,
18838 struct loc_descr_context *context)
18839 {
18840 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18841 if (!ret)
18842 return NULL;
18843 if (ret->dw_loc_next)
18844 {
18845 expansion_failed (loc, NULL_RTX,
18846 "Location list where only loc descriptor needed");
18847 return NULL;
18848 }
18849 return ret->expr;
18850 }
18851
18852 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18853 pointer to the declared type for the relevant field variable, or return
18854 `integer_type_node' if the given node turns out to be an
18855 ERROR_MARK node. */
18856
18857 static inline tree
18858 field_type (const_tree decl)
18859 {
18860 tree type;
18861
18862 if (TREE_CODE (decl) == ERROR_MARK)
18863 return integer_type_node;
18864
18865 type = DECL_BIT_FIELD_TYPE (decl);
18866 if (type == NULL_TREE)
18867 type = TREE_TYPE (decl);
18868
18869 return type;
18870 }
18871
18872 /* Given a pointer to a tree node, return the alignment in bits for
18873 it, or else return BITS_PER_WORD if the node actually turns out to
18874 be an ERROR_MARK node. */
18875
18876 static inline unsigned
18877 simple_type_align_in_bits (const_tree type)
18878 {
18879 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18880 }
18881
18882 static inline unsigned
18883 simple_decl_align_in_bits (const_tree decl)
18884 {
18885 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18886 }
18887
18888 /* Return the result of rounding T up to ALIGN. */
18889
18890 static inline offset_int
18891 round_up_to_align (const offset_int &t, unsigned int align)
18892 {
18893 return wi::udiv_trunc (t + align - 1, align) * align;
18894 }
18895
18896 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18897 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18898 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18899 if we fail to return the size in one of these two forms. */
18900
18901 static dw_loc_descr_ref
18902 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18903 {
18904 tree tree_size;
18905 struct loc_descr_context ctx;
18906
18907 /* Return a constant integer in priority, if possible. */
18908 *cst_size = int_size_in_bytes (type);
18909 if (*cst_size != -1)
18910 return NULL;
18911
18912 ctx.context_type = const_cast<tree> (type);
18913 ctx.base_decl = NULL_TREE;
18914 ctx.dpi = NULL;
18915 ctx.placeholder_arg = false;
18916 ctx.placeholder_seen = false;
18917
18918 type = TYPE_MAIN_VARIANT (type);
18919 tree_size = TYPE_SIZE_UNIT (type);
18920 return ((tree_size != NULL_TREE)
18921 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18922 : NULL);
18923 }
18924
18925 /* Helper structure for RECORD_TYPE processing. */
18926 struct vlr_context
18927 {
18928 /* Root RECORD_TYPE. It is needed to generate data member location
18929 descriptions in variable-length records (VLR), but also to cope with
18930 variants, which are composed of nested structures multiplexed with
18931 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18932 function processing a FIELD_DECL, it is required to be non null. */
18933 tree struct_type;
18934 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18935 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18936 this variant part as part of the root record (in storage units). For
18937 regular records, it must be NULL_TREE. */
18938 tree variant_part_offset;
18939 };
18940
18941 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18942 addressed byte of the "containing object" for the given FIELD_DECL. If
18943 possible, return a native constant through CST_OFFSET (in which case NULL is
18944 returned); otherwise return a DWARF expression that computes the offset.
18945
18946 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
18947 that offset is, either because the argument turns out to be a pointer to an
18948 ERROR_MARK node, or because the offset expression is too complex for us.
18949
18950 CTX is required: see the comment for VLR_CONTEXT. */
18951
18952 static dw_loc_descr_ref
18953 field_byte_offset (const_tree decl, struct vlr_context *ctx,
18954 HOST_WIDE_INT *cst_offset)
18955 {
18956 tree tree_result;
18957 dw_loc_list_ref loc_result;
18958
18959 *cst_offset = 0;
18960
18961 if (TREE_CODE (decl) == ERROR_MARK)
18962 return NULL;
18963 else
18964 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
18965
18966 /* We cannot handle variable bit offsets at the moment, so abort if it's the
18967 case. */
18968 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
18969 return NULL;
18970
18971 #ifdef PCC_BITFIELD_TYPE_MATTERS
18972 /* We used to handle only constant offsets in all cases. Now, we handle
18973 properly dynamic byte offsets only when PCC bitfield type doesn't
18974 matter. */
18975 if (PCC_BITFIELD_TYPE_MATTERS
18976 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
18977 {
18978 offset_int object_offset_in_bits;
18979 offset_int object_offset_in_bytes;
18980 offset_int bitpos_int;
18981 tree type;
18982 tree field_size_tree;
18983 offset_int deepest_bitpos;
18984 offset_int field_size_in_bits;
18985 unsigned int type_align_in_bits;
18986 unsigned int decl_align_in_bits;
18987 offset_int type_size_in_bits;
18988
18989 bitpos_int = wi::to_offset (bit_position (decl));
18990 type = field_type (decl);
18991 type_size_in_bits = offset_int_type_size_in_bits (type);
18992 type_align_in_bits = simple_type_align_in_bits (type);
18993
18994 field_size_tree = DECL_SIZE (decl);
18995
18996 /* The size could be unspecified if there was an error, or for
18997 a flexible array member. */
18998 if (!field_size_tree)
18999 field_size_tree = bitsize_zero_node;
19000
19001 /* If the size of the field is not constant, use the type size. */
19002 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19003 field_size_in_bits = wi::to_offset (field_size_tree);
19004 else
19005 field_size_in_bits = type_size_in_bits;
19006
19007 decl_align_in_bits = simple_decl_align_in_bits (decl);
19008
19009 /* The GCC front-end doesn't make any attempt to keep track of the
19010 starting bit offset (relative to the start of the containing
19011 structure type) of the hypothetical "containing object" for a
19012 bit-field. Thus, when computing the byte offset value for the
19013 start of the "containing object" of a bit-field, we must deduce
19014 this information on our own. This can be rather tricky to do in
19015 some cases. For example, handling the following structure type
19016 definition when compiling for an i386/i486 target (which only
19017 aligns long long's to 32-bit boundaries) can be very tricky:
19018
19019 struct S { int field1; long long field2:31; };
19020
19021 Fortunately, there is a simple rule-of-thumb which can be used
19022 in such cases. When compiling for an i386/i486, GCC will
19023 allocate 8 bytes for the structure shown above. It decides to
19024 do this based upon one simple rule for bit-field allocation.
19025 GCC allocates each "containing object" for each bit-field at
19026 the first (i.e. lowest addressed) legitimate alignment boundary
19027 (based upon the required minimum alignment for the declared
19028 type of the field) which it can possibly use, subject to the
19029 condition that there is still enough available space remaining
19030 in the containing object (when allocated at the selected point)
19031 to fully accommodate all of the bits of the bit-field itself.
19032
19033 This simple rule makes it obvious why GCC allocates 8 bytes for
19034 each object of the structure type shown above. When looking
19035 for a place to allocate the "containing object" for `field2',
19036 the compiler simply tries to allocate a 64-bit "containing
19037 object" at each successive 32-bit boundary (starting at zero)
19038 until it finds a place to allocate that 64- bit field such that
19039 at least 31 contiguous (and previously unallocated) bits remain
19040 within that selected 64 bit field. (As it turns out, for the
19041 example above, the compiler finds it is OK to allocate the
19042 "containing object" 64-bit field at bit-offset zero within the
19043 structure type.)
19044
19045 Here we attempt to work backwards from the limited set of facts
19046 we're given, and we try to deduce from those facts, where GCC
19047 must have believed that the containing object started (within
19048 the structure type). The value we deduce is then used (by the
19049 callers of this routine) to generate DW_AT_location and
19050 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19051 the case of DW_AT_location, regular fields as well). */
19052
19053 /* Figure out the bit-distance from the start of the structure to
19054 the "deepest" bit of the bit-field. */
19055 deepest_bitpos = bitpos_int + field_size_in_bits;
19056
19057 /* This is the tricky part. Use some fancy footwork to deduce
19058 where the lowest addressed bit of the containing object must
19059 be. */
19060 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19061
19062 /* Round up to type_align by default. This works best for
19063 bitfields. */
19064 object_offset_in_bits
19065 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19066
19067 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19068 {
19069 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19070
19071 /* Round up to decl_align instead. */
19072 object_offset_in_bits
19073 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19074 }
19075
19076 object_offset_in_bytes
19077 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19078 if (ctx->variant_part_offset == NULL_TREE)
19079 {
19080 *cst_offset = object_offset_in_bytes.to_shwi ();
19081 return NULL;
19082 }
19083 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19084 }
19085 else
19086 #endif /* PCC_BITFIELD_TYPE_MATTERS */
19087 tree_result = byte_position (decl);
19088
19089 if (ctx->variant_part_offset != NULL_TREE)
19090 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19091 ctx->variant_part_offset, tree_result);
19092
19093 /* If the byte offset is a constant, it's simplier to handle a native
19094 constant rather than a DWARF expression. */
19095 if (TREE_CODE (tree_result) == INTEGER_CST)
19096 {
19097 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19098 return NULL;
19099 }
19100 struct loc_descr_context loc_ctx = {
19101 ctx->struct_type, /* context_type */
19102 NULL_TREE, /* base_decl */
19103 NULL, /* dpi */
19104 false, /* placeholder_arg */
19105 false /* placeholder_seen */
19106 };
19107 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19108
19109 /* We want a DWARF expression: abort if we only have a location list with
19110 multiple elements. */
19111 if (!loc_result || !single_element_loc_list_p (loc_result))
19112 return NULL;
19113 else
19114 return loc_result->expr;
19115 }
19116 \f
19117 /* The following routines define various Dwarf attributes and any data
19118 associated with them. */
19119
19120 /* Add a location description attribute value to a DIE.
19121
19122 This emits location attributes suitable for whole variables and
19123 whole parameters. Note that the location attributes for struct fields are
19124 generated by the routine `data_member_location_attribute' below. */
19125
19126 static inline void
19127 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19128 dw_loc_list_ref descr)
19129 {
19130 bool check_no_locviews = true;
19131 if (descr == 0)
19132 return;
19133 if (single_element_loc_list_p (descr))
19134 add_AT_loc (die, attr_kind, descr->expr);
19135 else
19136 {
19137 add_AT_loc_list (die, attr_kind, descr);
19138 gcc_assert (descr->ll_symbol);
19139 if (attr_kind == DW_AT_location && descr->vl_symbol
19140 && dwarf2out_locviews_in_attribute ())
19141 {
19142 add_AT_view_list (die, DW_AT_GNU_locviews);
19143 check_no_locviews = false;
19144 }
19145 }
19146
19147 if (check_no_locviews)
19148 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19149 }
19150
19151 /* Add DW_AT_accessibility attribute to DIE if needed. */
19152
19153 static void
19154 add_accessibility_attribute (dw_die_ref die, tree decl)
19155 {
19156 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19157 children, otherwise the default is DW_ACCESS_public. In DWARF2
19158 the default has always been DW_ACCESS_public. */
19159 if (TREE_PROTECTED (decl))
19160 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19161 else if (TREE_PRIVATE (decl))
19162 {
19163 if (dwarf_version == 2
19164 || die->die_parent == NULL
19165 || die->die_parent->die_tag != DW_TAG_class_type)
19166 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19167 }
19168 else if (dwarf_version > 2
19169 && die->die_parent
19170 && die->die_parent->die_tag == DW_TAG_class_type)
19171 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19172 }
19173
19174 /* Attach the specialized form of location attribute used for data members of
19175 struct and union types. In the special case of a FIELD_DECL node which
19176 represents a bit-field, the "offset" part of this special location
19177 descriptor must indicate the distance in bytes from the lowest-addressed
19178 byte of the containing struct or union type to the lowest-addressed byte of
19179 the "containing object" for the bit-field. (See the `field_byte_offset'
19180 function above).
19181
19182 For any given bit-field, the "containing object" is a hypothetical object
19183 (of some integral or enum type) within which the given bit-field lives. The
19184 type of this hypothetical "containing object" is always the same as the
19185 declared type of the individual bit-field itself (for GCC anyway... the
19186 DWARF spec doesn't actually mandate this). Note that it is the size (in
19187 bytes) of the hypothetical "containing object" which will be given in the
19188 DW_AT_byte_size attribute for this bit-field. (See the
19189 `byte_size_attribute' function below.) It is also used when calculating the
19190 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19191 function below.)
19192
19193 CTX is required: see the comment for VLR_CONTEXT. */
19194
19195 static void
19196 add_data_member_location_attribute (dw_die_ref die,
19197 tree decl,
19198 struct vlr_context *ctx)
19199 {
19200 HOST_WIDE_INT offset;
19201 dw_loc_descr_ref loc_descr = 0;
19202
19203 if (TREE_CODE (decl) == TREE_BINFO)
19204 {
19205 /* We're working on the TAG_inheritance for a base class. */
19206 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19207 {
19208 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19209 aren't at a fixed offset from all (sub)objects of the same
19210 type. We need to extract the appropriate offset from our
19211 vtable. The following dwarf expression means
19212
19213 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19214
19215 This is specific to the V3 ABI, of course. */
19216
19217 dw_loc_descr_ref tmp;
19218
19219 /* Make a copy of the object address. */
19220 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19221 add_loc_descr (&loc_descr, tmp);
19222
19223 /* Extract the vtable address. */
19224 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19225 add_loc_descr (&loc_descr, tmp);
19226
19227 /* Calculate the address of the offset. */
19228 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19229 gcc_assert (offset < 0);
19230
19231 tmp = int_loc_descriptor (-offset);
19232 add_loc_descr (&loc_descr, tmp);
19233 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19234 add_loc_descr (&loc_descr, tmp);
19235
19236 /* Extract the offset. */
19237 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19238 add_loc_descr (&loc_descr, tmp);
19239
19240 /* Add it to the object address. */
19241 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19242 add_loc_descr (&loc_descr, tmp);
19243 }
19244 else
19245 offset = tree_to_shwi (BINFO_OFFSET (decl));
19246 }
19247 else
19248 {
19249 loc_descr = field_byte_offset (decl, ctx, &offset);
19250
19251 /* If loc_descr is available then we know the field offset is dynamic.
19252 However, GDB does not handle dynamic field offsets very well at the
19253 moment. */
19254 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19255 {
19256 loc_descr = NULL;
19257 offset = 0;
19258 }
19259
19260 /* Data member location evalutation starts with the base address on the
19261 stack. Compute the field offset and add it to this base address. */
19262 else if (loc_descr != NULL)
19263 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19264 }
19265
19266 if (! loc_descr)
19267 {
19268 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19269 e.g. GDB only added support to it in November 2016. For DWARF5
19270 we need newer debug info consumers anyway. We might change this
19271 to dwarf_version >= 4 once most consumers catched up. */
19272 if (dwarf_version >= 5
19273 && TREE_CODE (decl) == FIELD_DECL
19274 && DECL_BIT_FIELD_TYPE (decl))
19275 {
19276 tree off = bit_position (decl);
19277 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19278 {
19279 remove_AT (die, DW_AT_byte_size);
19280 remove_AT (die, DW_AT_bit_offset);
19281 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19282 return;
19283 }
19284 }
19285 if (dwarf_version > 2)
19286 {
19287 /* Don't need to output a location expression, just the constant. */
19288 if (offset < 0)
19289 add_AT_int (die, DW_AT_data_member_location, offset);
19290 else
19291 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19292 return;
19293 }
19294 else
19295 {
19296 enum dwarf_location_atom op;
19297
19298 /* The DWARF2 standard says that we should assume that the structure
19299 address is already on the stack, so we can specify a structure
19300 field address by using DW_OP_plus_uconst. */
19301 op = DW_OP_plus_uconst;
19302 loc_descr = new_loc_descr (op, offset, 0);
19303 }
19304 }
19305
19306 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19307 }
19308
19309 /* Writes integer values to dw_vec_const array. */
19310
19311 static void
19312 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19313 {
19314 while (size != 0)
19315 {
19316 *dest++ = val & 0xff;
19317 val >>= 8;
19318 --size;
19319 }
19320 }
19321
19322 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19323
19324 static HOST_WIDE_INT
19325 extract_int (const unsigned char *src, unsigned int size)
19326 {
19327 HOST_WIDE_INT val = 0;
19328
19329 src += size;
19330 while (size != 0)
19331 {
19332 val <<= 8;
19333 val |= *--src & 0xff;
19334 --size;
19335 }
19336 return val;
19337 }
19338
19339 /* Writes wide_int values to dw_vec_const array. */
19340
19341 static void
19342 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19343 {
19344 int i;
19345
19346 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19347 {
19348 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19349 return;
19350 }
19351
19352 /* We'd have to extend this code to support odd sizes. */
19353 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19354
19355 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19356
19357 if (WORDS_BIG_ENDIAN)
19358 for (i = n - 1; i >= 0; i--)
19359 {
19360 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19361 dest += sizeof (HOST_WIDE_INT);
19362 }
19363 else
19364 for (i = 0; i < n; i++)
19365 {
19366 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19367 dest += sizeof (HOST_WIDE_INT);
19368 }
19369 }
19370
19371 /* Writes floating point values to dw_vec_const array. */
19372
19373 static void
19374 insert_float (const_rtx rtl, unsigned char *array)
19375 {
19376 long val[4];
19377 int i;
19378 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19379
19380 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19381
19382 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19383 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19384 {
19385 insert_int (val[i], 4, array);
19386 array += 4;
19387 }
19388 }
19389
19390 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19391 does not have a "location" either in memory or in a register. These
19392 things can arise in GNU C when a constant is passed as an actual parameter
19393 to an inlined function. They can also arise in C++ where declared
19394 constants do not necessarily get memory "homes". */
19395
19396 static bool
19397 add_const_value_attribute (dw_die_ref die, rtx rtl)
19398 {
19399 switch (GET_CODE (rtl))
19400 {
19401 case CONST_INT:
19402 {
19403 HOST_WIDE_INT val = INTVAL (rtl);
19404
19405 if (val < 0)
19406 add_AT_int (die, DW_AT_const_value, val);
19407 else
19408 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19409 }
19410 return true;
19411
19412 case CONST_WIDE_INT:
19413 {
19414 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19415 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19416 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19417 wide_int w = wi::zext (w1, prec);
19418 add_AT_wide (die, DW_AT_const_value, w);
19419 }
19420 return true;
19421
19422 case CONST_DOUBLE:
19423 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19424 floating-point constant. A CONST_DOUBLE is used whenever the
19425 constant requires more than one word in order to be adequately
19426 represented. */
19427 if (TARGET_SUPPORTS_WIDE_INT == 0
19428 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19429 add_AT_double (die, DW_AT_const_value,
19430 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19431 else
19432 {
19433 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19434 unsigned int length = GET_MODE_SIZE (mode);
19435 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19436
19437 insert_float (rtl, array);
19438 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19439 }
19440 return true;
19441
19442 case CONST_VECTOR:
19443 {
19444 unsigned int length;
19445 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19446 return false;
19447
19448 machine_mode mode = GET_MODE (rtl);
19449 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19450 unsigned char *array
19451 = ggc_vec_alloc<unsigned char> (length * elt_size);
19452 unsigned int i;
19453 unsigned char *p;
19454 machine_mode imode = GET_MODE_INNER (mode);
19455
19456 switch (GET_MODE_CLASS (mode))
19457 {
19458 case MODE_VECTOR_INT:
19459 for (i = 0, p = array; i < length; i++, p += elt_size)
19460 {
19461 rtx elt = CONST_VECTOR_ELT (rtl, i);
19462 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19463 }
19464 break;
19465
19466 case MODE_VECTOR_FLOAT:
19467 for (i = 0, p = array; i < length; i++, p += elt_size)
19468 {
19469 rtx elt = CONST_VECTOR_ELT (rtl, i);
19470 insert_float (elt, p);
19471 }
19472 break;
19473
19474 default:
19475 gcc_unreachable ();
19476 }
19477
19478 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19479 }
19480 return true;
19481
19482 case CONST_STRING:
19483 if (dwarf_version >= 4 || !dwarf_strict)
19484 {
19485 dw_loc_descr_ref loc_result;
19486 resolve_one_addr (&rtl);
19487 rtl_addr:
19488 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19489 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19490 add_AT_loc (die, DW_AT_location, loc_result);
19491 vec_safe_push (used_rtx_array, rtl);
19492 return true;
19493 }
19494 return false;
19495
19496 case CONST:
19497 if (CONSTANT_P (XEXP (rtl, 0)))
19498 return add_const_value_attribute (die, XEXP (rtl, 0));
19499 /* FALLTHROUGH */
19500 case SYMBOL_REF:
19501 if (!const_ok_for_output (rtl))
19502 return false;
19503 /* FALLTHROUGH */
19504 case LABEL_REF:
19505 if (dwarf_version >= 4 || !dwarf_strict)
19506 goto rtl_addr;
19507 return false;
19508
19509 case PLUS:
19510 /* In cases where an inlined instance of an inline function is passed
19511 the address of an `auto' variable (which is local to the caller) we
19512 can get a situation where the DECL_RTL of the artificial local
19513 variable (for the inlining) which acts as a stand-in for the
19514 corresponding formal parameter (of the inline function) will look
19515 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19516 exactly a compile-time constant expression, but it isn't the address
19517 of the (artificial) local variable either. Rather, it represents the
19518 *value* which the artificial local variable always has during its
19519 lifetime. We currently have no way to represent such quasi-constant
19520 values in Dwarf, so for now we just punt and generate nothing. */
19521 return false;
19522
19523 case HIGH:
19524 case CONST_FIXED:
19525 return false;
19526
19527 case MEM:
19528 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19529 && MEM_READONLY_P (rtl)
19530 && GET_MODE (rtl) == BLKmode)
19531 {
19532 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19533 return true;
19534 }
19535 return false;
19536
19537 default:
19538 /* No other kinds of rtx should be possible here. */
19539 gcc_unreachable ();
19540 }
19541 return false;
19542 }
19543
19544 /* Determine whether the evaluation of EXPR references any variables
19545 or functions which aren't otherwise used (and therefore may not be
19546 output). */
19547 static tree
19548 reference_to_unused (tree * tp, int * walk_subtrees,
19549 void * data ATTRIBUTE_UNUSED)
19550 {
19551 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19552 *walk_subtrees = 0;
19553
19554 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19555 && ! TREE_ASM_WRITTEN (*tp))
19556 return *tp;
19557 /* ??? The C++ FE emits debug information for using decls, so
19558 putting gcc_unreachable here falls over. See PR31899. For now
19559 be conservative. */
19560 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19561 return *tp;
19562 else if (VAR_P (*tp))
19563 {
19564 varpool_node *node = varpool_node::get (*tp);
19565 if (!node || !node->definition)
19566 return *tp;
19567 }
19568 else if (TREE_CODE (*tp) == FUNCTION_DECL
19569 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19570 {
19571 /* The call graph machinery must have finished analyzing,
19572 optimizing and gimplifying the CU by now.
19573 So if *TP has no call graph node associated
19574 to it, it means *TP will not be emitted. */
19575 if (!cgraph_node::get (*tp))
19576 return *tp;
19577 }
19578 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19579 return *tp;
19580
19581 return NULL_TREE;
19582 }
19583
19584 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19585 for use in a later add_const_value_attribute call. */
19586
19587 static rtx
19588 rtl_for_decl_init (tree init, tree type)
19589 {
19590 rtx rtl = NULL_RTX;
19591
19592 STRIP_NOPS (init);
19593
19594 /* If a variable is initialized with a string constant without embedded
19595 zeros, build CONST_STRING. */
19596 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19597 {
19598 tree enttype = TREE_TYPE (type);
19599 tree domain = TYPE_DOMAIN (type);
19600 scalar_int_mode mode;
19601
19602 if (is_int_mode (TYPE_MODE (enttype), &mode)
19603 && GET_MODE_SIZE (mode) == 1
19604 && domain
19605 && TYPE_MAX_VALUE (domain)
19606 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19607 && integer_zerop (TYPE_MIN_VALUE (domain))
19608 && compare_tree_int (TYPE_MAX_VALUE (domain),
19609 TREE_STRING_LENGTH (init) - 1) == 0
19610 && ((size_t) TREE_STRING_LENGTH (init)
19611 == strlen (TREE_STRING_POINTER (init)) + 1))
19612 {
19613 rtl = gen_rtx_CONST_STRING (VOIDmode,
19614 ggc_strdup (TREE_STRING_POINTER (init)));
19615 rtl = gen_rtx_MEM (BLKmode, rtl);
19616 MEM_READONLY_P (rtl) = 1;
19617 }
19618 }
19619 /* Other aggregates, and complex values, could be represented using
19620 CONCAT: FIXME! */
19621 else if (AGGREGATE_TYPE_P (type)
19622 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19623 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19624 || TREE_CODE (type) == COMPLEX_TYPE)
19625 ;
19626 /* Vectors only work if their mode is supported by the target.
19627 FIXME: generic vectors ought to work too. */
19628 else if (TREE_CODE (type) == VECTOR_TYPE
19629 && !VECTOR_MODE_P (TYPE_MODE (type)))
19630 ;
19631 /* If the initializer is something that we know will expand into an
19632 immediate RTL constant, expand it now. We must be careful not to
19633 reference variables which won't be output. */
19634 else if (initializer_constant_valid_p (init, type)
19635 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19636 {
19637 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19638 possible. */
19639 if (TREE_CODE (type) == VECTOR_TYPE)
19640 switch (TREE_CODE (init))
19641 {
19642 case VECTOR_CST:
19643 break;
19644 case CONSTRUCTOR:
19645 if (TREE_CONSTANT (init))
19646 {
19647 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19648 bool constant_p = true;
19649 tree value;
19650 unsigned HOST_WIDE_INT ix;
19651
19652 /* Even when ctor is constant, it might contain non-*_CST
19653 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19654 belong into VECTOR_CST nodes. */
19655 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19656 if (!CONSTANT_CLASS_P (value))
19657 {
19658 constant_p = false;
19659 break;
19660 }
19661
19662 if (constant_p)
19663 {
19664 init = build_vector_from_ctor (type, elts);
19665 break;
19666 }
19667 }
19668 /* FALLTHRU */
19669
19670 default:
19671 return NULL;
19672 }
19673
19674 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19675
19676 /* If expand_expr returns a MEM, it wasn't immediate. */
19677 gcc_assert (!rtl || !MEM_P (rtl));
19678 }
19679
19680 return rtl;
19681 }
19682
19683 /* Generate RTL for the variable DECL to represent its location. */
19684
19685 static rtx
19686 rtl_for_decl_location (tree decl)
19687 {
19688 rtx rtl;
19689
19690 /* Here we have to decide where we are going to say the parameter "lives"
19691 (as far as the debugger is concerned). We only have a couple of
19692 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19693
19694 DECL_RTL normally indicates where the parameter lives during most of the
19695 activation of the function. If optimization is enabled however, this
19696 could be either NULL or else a pseudo-reg. Both of those cases indicate
19697 that the parameter doesn't really live anywhere (as far as the code
19698 generation parts of GCC are concerned) during most of the function's
19699 activation. That will happen (for example) if the parameter is never
19700 referenced within the function.
19701
19702 We could just generate a location descriptor here for all non-NULL
19703 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19704 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19705 where DECL_RTL is NULL or is a pseudo-reg.
19706
19707 Note however that we can only get away with using DECL_INCOMING_RTL as
19708 a backup substitute for DECL_RTL in certain limited cases. In cases
19709 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19710 we can be sure that the parameter was passed using the same type as it is
19711 declared to have within the function, and that its DECL_INCOMING_RTL
19712 points us to a place where a value of that type is passed.
19713
19714 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19715 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19716 because in these cases DECL_INCOMING_RTL points us to a value of some
19717 type which is *different* from the type of the parameter itself. Thus,
19718 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19719 such cases, the debugger would end up (for example) trying to fetch a
19720 `float' from a place which actually contains the first part of a
19721 `double'. That would lead to really incorrect and confusing
19722 output at debug-time.
19723
19724 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19725 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19726 are a couple of exceptions however. On little-endian machines we can
19727 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19728 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19729 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19730 when (on a little-endian machine) a non-prototyped function has a
19731 parameter declared to be of type `short' or `char'. In such cases,
19732 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19733 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19734 passed `int' value. If the debugger then uses that address to fetch
19735 a `short' or a `char' (on a little-endian machine) the result will be
19736 the correct data, so we allow for such exceptional cases below.
19737
19738 Note that our goal here is to describe the place where the given formal
19739 parameter lives during most of the function's activation (i.e. between the
19740 end of the prologue and the start of the epilogue). We'll do that as best
19741 as we can. Note however that if the given formal parameter is modified
19742 sometime during the execution of the function, then a stack backtrace (at
19743 debug-time) will show the function as having been called with the *new*
19744 value rather than the value which was originally passed in. This happens
19745 rarely enough that it is not a major problem, but it *is* a problem, and
19746 I'd like to fix it.
19747
19748 A future version of dwarf2out.c may generate two additional attributes for
19749 any given DW_TAG_formal_parameter DIE which will describe the "passed
19750 type" and the "passed location" for the given formal parameter in addition
19751 to the attributes we now generate to indicate the "declared type" and the
19752 "active location" for each parameter. This additional set of attributes
19753 could be used by debuggers for stack backtraces. Separately, note that
19754 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19755 This happens (for example) for inlined-instances of inline function formal
19756 parameters which are never referenced. This really shouldn't be
19757 happening. All PARM_DECL nodes should get valid non-NULL
19758 DECL_INCOMING_RTL values. FIXME. */
19759
19760 /* Use DECL_RTL as the "location" unless we find something better. */
19761 rtl = DECL_RTL_IF_SET (decl);
19762
19763 /* When generating abstract instances, ignore everything except
19764 constants, symbols living in memory, and symbols living in
19765 fixed registers. */
19766 if (! reload_completed)
19767 {
19768 if (rtl
19769 && (CONSTANT_P (rtl)
19770 || (MEM_P (rtl)
19771 && CONSTANT_P (XEXP (rtl, 0)))
19772 || (REG_P (rtl)
19773 && VAR_P (decl)
19774 && TREE_STATIC (decl))))
19775 {
19776 rtl = targetm.delegitimize_address (rtl);
19777 return rtl;
19778 }
19779 rtl = NULL_RTX;
19780 }
19781 else if (TREE_CODE (decl) == PARM_DECL)
19782 {
19783 if (rtl == NULL_RTX
19784 || is_pseudo_reg (rtl)
19785 || (MEM_P (rtl)
19786 && is_pseudo_reg (XEXP (rtl, 0))
19787 && DECL_INCOMING_RTL (decl)
19788 && MEM_P (DECL_INCOMING_RTL (decl))
19789 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19790 {
19791 tree declared_type = TREE_TYPE (decl);
19792 tree passed_type = DECL_ARG_TYPE (decl);
19793 machine_mode dmode = TYPE_MODE (declared_type);
19794 machine_mode pmode = TYPE_MODE (passed_type);
19795
19796 /* This decl represents a formal parameter which was optimized out.
19797 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19798 all cases where (rtl == NULL_RTX) just below. */
19799 if (dmode == pmode)
19800 rtl = DECL_INCOMING_RTL (decl);
19801 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19802 && SCALAR_INT_MODE_P (dmode)
19803 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19804 && DECL_INCOMING_RTL (decl))
19805 {
19806 rtx inc = DECL_INCOMING_RTL (decl);
19807 if (REG_P (inc))
19808 rtl = inc;
19809 else if (MEM_P (inc))
19810 {
19811 if (BYTES_BIG_ENDIAN)
19812 rtl = adjust_address_nv (inc, dmode,
19813 GET_MODE_SIZE (pmode)
19814 - GET_MODE_SIZE (dmode));
19815 else
19816 rtl = inc;
19817 }
19818 }
19819 }
19820
19821 /* If the parm was passed in registers, but lives on the stack, then
19822 make a big endian correction if the mode of the type of the
19823 parameter is not the same as the mode of the rtl. */
19824 /* ??? This is the same series of checks that are made in dbxout.c before
19825 we reach the big endian correction code there. It isn't clear if all
19826 of these checks are necessary here, but keeping them all is the safe
19827 thing to do. */
19828 else if (MEM_P (rtl)
19829 && XEXP (rtl, 0) != const0_rtx
19830 && ! CONSTANT_P (XEXP (rtl, 0))
19831 /* Not passed in memory. */
19832 && !MEM_P (DECL_INCOMING_RTL (decl))
19833 /* Not passed by invisible reference. */
19834 && (!REG_P (XEXP (rtl, 0))
19835 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19836 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19837 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19838 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19839 #endif
19840 )
19841 /* Big endian correction check. */
19842 && BYTES_BIG_ENDIAN
19843 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19844 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19845 UNITS_PER_WORD))
19846 {
19847 machine_mode addr_mode = get_address_mode (rtl);
19848 poly_int64 offset = (UNITS_PER_WORD
19849 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19850
19851 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19852 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19853 }
19854 }
19855 else if (VAR_P (decl)
19856 && rtl
19857 && MEM_P (rtl)
19858 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19859 {
19860 machine_mode addr_mode = get_address_mode (rtl);
19861 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19862 GET_MODE (rtl));
19863
19864 /* If a variable is declared "register" yet is smaller than
19865 a register, then if we store the variable to memory, it
19866 looks like we're storing a register-sized value, when in
19867 fact we are not. We need to adjust the offset of the
19868 storage location to reflect the actual value's bytes,
19869 else gdb will not be able to display it. */
19870 if (maybe_ne (offset, 0))
19871 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19872 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19873 }
19874
19875 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19876 and will have been substituted directly into all expressions that use it.
19877 C does not have such a concept, but C++ and other languages do. */
19878 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19879 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19880
19881 if (rtl)
19882 rtl = targetm.delegitimize_address (rtl);
19883
19884 /* If we don't look past the constant pool, we risk emitting a
19885 reference to a constant pool entry that isn't referenced from
19886 code, and thus is not emitted. */
19887 if (rtl)
19888 rtl = avoid_constant_pool_reference (rtl);
19889
19890 /* Try harder to get a rtl. If this symbol ends up not being emitted
19891 in the current CU, resolve_addr will remove the expression referencing
19892 it. */
19893 if (rtl == NULL_RTX
19894 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
19895 && VAR_P (decl)
19896 && !DECL_EXTERNAL (decl)
19897 && TREE_STATIC (decl)
19898 && DECL_NAME (decl)
19899 && !DECL_HARD_REGISTER (decl)
19900 && DECL_MODE (decl) != VOIDmode)
19901 {
19902 rtl = make_decl_rtl_for_debug (decl);
19903 if (!MEM_P (rtl)
19904 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19905 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19906 rtl = NULL_RTX;
19907 }
19908
19909 return rtl;
19910 }
19911
19912 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19913 returned. If so, the decl for the COMMON block is returned, and the
19914 value is the offset into the common block for the symbol. */
19915
19916 static tree
19917 fortran_common (tree decl, HOST_WIDE_INT *value)
19918 {
19919 tree val_expr, cvar;
19920 machine_mode mode;
19921 poly_int64 bitsize, bitpos;
19922 tree offset;
19923 HOST_WIDE_INT cbitpos;
19924 int unsignedp, reversep, volatilep = 0;
19925
19926 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19927 it does not have a value (the offset into the common area), or if it
19928 is thread local (as opposed to global) then it isn't common, and shouldn't
19929 be handled as such. */
19930 if (!VAR_P (decl)
19931 || !TREE_STATIC (decl)
19932 || !DECL_HAS_VALUE_EXPR_P (decl)
19933 || !is_fortran ())
19934 return NULL_TREE;
19935
19936 val_expr = DECL_VALUE_EXPR (decl);
19937 if (TREE_CODE (val_expr) != COMPONENT_REF)
19938 return NULL_TREE;
19939
19940 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19941 &unsignedp, &reversep, &volatilep);
19942
19943 if (cvar == NULL_TREE
19944 || !VAR_P (cvar)
19945 || DECL_ARTIFICIAL (cvar)
19946 || !TREE_PUBLIC (cvar)
19947 /* We don't expect to have to cope with variable offsets,
19948 since at present all static data must have a constant size. */
19949 || !bitpos.is_constant (&cbitpos))
19950 return NULL_TREE;
19951
19952 *value = 0;
19953 if (offset != NULL)
19954 {
19955 if (!tree_fits_shwi_p (offset))
19956 return NULL_TREE;
19957 *value = tree_to_shwi (offset);
19958 }
19959 if (cbitpos != 0)
19960 *value += cbitpos / BITS_PER_UNIT;
19961
19962 return cvar;
19963 }
19964
19965 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
19966 data attribute for a variable or a parameter. We generate the
19967 DW_AT_const_value attribute only in those cases where the given variable
19968 or parameter does not have a true "location" either in memory or in a
19969 register. This can happen (for example) when a constant is passed as an
19970 actual argument in a call to an inline function. (It's possible that
19971 these things can crop up in other ways also.) Note that one type of
19972 constant value which can be passed into an inlined function is a constant
19973 pointer. This can happen for example if an actual argument in an inlined
19974 function call evaluates to a compile-time constant address.
19975
19976 CACHE_P is true if it is worth caching the location list for DECL,
19977 so that future calls can reuse it rather than regenerate it from scratch.
19978 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
19979 since we will need to refer to them each time the function is inlined. */
19980
19981 static bool
19982 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
19983 {
19984 rtx rtl;
19985 dw_loc_list_ref list;
19986 var_loc_list *loc_list;
19987 cached_dw_loc_list *cache;
19988
19989 if (early_dwarf)
19990 return false;
19991
19992 if (TREE_CODE (decl) == ERROR_MARK)
19993 return false;
19994
19995 if (get_AT (die, DW_AT_location)
19996 || get_AT (die, DW_AT_const_value))
19997 return true;
19998
19999 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20000 || TREE_CODE (decl) == RESULT_DECL);
20001
20002 /* Try to get some constant RTL for this decl, and use that as the value of
20003 the location. */
20004
20005 rtl = rtl_for_decl_location (decl);
20006 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20007 && add_const_value_attribute (die, rtl))
20008 return true;
20009
20010 /* See if we have single element location list that is equivalent to
20011 a constant value. That way we are better to use add_const_value_attribute
20012 rather than expanding constant value equivalent. */
20013 loc_list = lookup_decl_loc (decl);
20014 if (loc_list
20015 && loc_list->first
20016 && loc_list->first->next == NULL
20017 && NOTE_P (loc_list->first->loc)
20018 && NOTE_VAR_LOCATION (loc_list->first->loc)
20019 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20020 {
20021 struct var_loc_node *node;
20022
20023 node = loc_list->first;
20024 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20025 if (GET_CODE (rtl) == EXPR_LIST)
20026 rtl = XEXP (rtl, 0);
20027 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20028 && add_const_value_attribute (die, rtl))
20029 return true;
20030 }
20031 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20032 list several times. See if we've already cached the contents. */
20033 list = NULL;
20034 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20035 cache_p = false;
20036 if (cache_p)
20037 {
20038 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20039 if (cache)
20040 list = cache->loc_list;
20041 }
20042 if (list == NULL)
20043 {
20044 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20045 NULL);
20046 /* It is usually worth caching this result if the decl is from
20047 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20048 if (cache_p && list && list->dw_loc_next)
20049 {
20050 cached_dw_loc_list **slot
20051 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20052 DECL_UID (decl),
20053 INSERT);
20054 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20055 cache->decl_id = DECL_UID (decl);
20056 cache->loc_list = list;
20057 *slot = cache;
20058 }
20059 }
20060 if (list)
20061 {
20062 add_AT_location_description (die, DW_AT_location, list);
20063 return true;
20064 }
20065 /* None of that worked, so it must not really have a location;
20066 try adding a constant value attribute from the DECL_INITIAL. */
20067 return tree_add_const_value_attribute_for_decl (die, decl);
20068 }
20069
20070 /* Helper function for tree_add_const_value_attribute. Natively encode
20071 initializer INIT into an array. Return true if successful. */
20072
20073 static bool
20074 native_encode_initializer (tree init, unsigned char *array, int size)
20075 {
20076 tree type;
20077
20078 if (init == NULL_TREE)
20079 return false;
20080
20081 STRIP_NOPS (init);
20082 switch (TREE_CODE (init))
20083 {
20084 case STRING_CST:
20085 type = TREE_TYPE (init);
20086 if (TREE_CODE (type) == ARRAY_TYPE)
20087 {
20088 tree enttype = TREE_TYPE (type);
20089 scalar_int_mode mode;
20090
20091 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20092 || GET_MODE_SIZE (mode) != 1)
20093 return false;
20094 if (int_size_in_bytes (type) != size)
20095 return false;
20096 if (size > TREE_STRING_LENGTH (init))
20097 {
20098 memcpy (array, TREE_STRING_POINTER (init),
20099 TREE_STRING_LENGTH (init));
20100 memset (array + TREE_STRING_LENGTH (init),
20101 '\0', size - TREE_STRING_LENGTH (init));
20102 }
20103 else
20104 memcpy (array, TREE_STRING_POINTER (init), size);
20105 return true;
20106 }
20107 return false;
20108 case CONSTRUCTOR:
20109 type = TREE_TYPE (init);
20110 if (int_size_in_bytes (type) != size)
20111 return false;
20112 if (TREE_CODE (type) == ARRAY_TYPE)
20113 {
20114 HOST_WIDE_INT min_index;
20115 unsigned HOST_WIDE_INT cnt;
20116 int curpos = 0, fieldsize;
20117 constructor_elt *ce;
20118
20119 if (TYPE_DOMAIN (type) == NULL_TREE
20120 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20121 return false;
20122
20123 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20124 if (fieldsize <= 0)
20125 return false;
20126
20127 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20128 memset (array, '\0', size);
20129 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20130 {
20131 tree val = ce->value;
20132 tree index = ce->index;
20133 int pos = curpos;
20134 if (index && TREE_CODE (index) == RANGE_EXPR)
20135 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20136 * fieldsize;
20137 else if (index)
20138 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20139
20140 if (val)
20141 {
20142 STRIP_NOPS (val);
20143 if (!native_encode_initializer (val, array + pos, fieldsize))
20144 return false;
20145 }
20146 curpos = pos + fieldsize;
20147 if (index && TREE_CODE (index) == RANGE_EXPR)
20148 {
20149 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20150 - tree_to_shwi (TREE_OPERAND (index, 0));
20151 while (count-- > 0)
20152 {
20153 if (val)
20154 memcpy (array + curpos, array + pos, fieldsize);
20155 curpos += fieldsize;
20156 }
20157 }
20158 gcc_assert (curpos <= size);
20159 }
20160 return true;
20161 }
20162 else if (TREE_CODE (type) == RECORD_TYPE
20163 || TREE_CODE (type) == UNION_TYPE)
20164 {
20165 tree field = NULL_TREE;
20166 unsigned HOST_WIDE_INT cnt;
20167 constructor_elt *ce;
20168
20169 if (int_size_in_bytes (type) != size)
20170 return false;
20171
20172 if (TREE_CODE (type) == RECORD_TYPE)
20173 field = TYPE_FIELDS (type);
20174
20175 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20176 {
20177 tree val = ce->value;
20178 int pos, fieldsize;
20179
20180 if (ce->index != 0)
20181 field = ce->index;
20182
20183 if (val)
20184 STRIP_NOPS (val);
20185
20186 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20187 return false;
20188
20189 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20190 && TYPE_DOMAIN (TREE_TYPE (field))
20191 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20192 return false;
20193 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20194 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20195 return false;
20196 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20197 pos = int_byte_position (field);
20198 gcc_assert (pos + fieldsize <= size);
20199 if (val && fieldsize != 0
20200 && !native_encode_initializer (val, array + pos, fieldsize))
20201 return false;
20202 }
20203 return true;
20204 }
20205 return false;
20206 case VIEW_CONVERT_EXPR:
20207 case NON_LVALUE_EXPR:
20208 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20209 default:
20210 return native_encode_expr (init, array, size) == size;
20211 }
20212 }
20213
20214 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20215 attribute is the const value T. */
20216
20217 static bool
20218 tree_add_const_value_attribute (dw_die_ref die, tree t)
20219 {
20220 tree init;
20221 tree type = TREE_TYPE (t);
20222 rtx rtl;
20223
20224 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20225 return false;
20226
20227 init = t;
20228 gcc_assert (!DECL_P (init));
20229
20230 if (TREE_CODE (init) == INTEGER_CST)
20231 {
20232 if (tree_fits_uhwi_p (init))
20233 {
20234 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20235 return true;
20236 }
20237 if (tree_fits_shwi_p (init))
20238 {
20239 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20240 return true;
20241 }
20242 }
20243 if (! early_dwarf)
20244 {
20245 rtl = rtl_for_decl_init (init, type);
20246 if (rtl)
20247 return add_const_value_attribute (die, rtl);
20248 }
20249 /* If the host and target are sane, try harder. */
20250 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20251 && initializer_constant_valid_p (init, type))
20252 {
20253 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20254 if (size > 0 && (int) size == size)
20255 {
20256 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20257
20258 if (native_encode_initializer (init, array, size))
20259 {
20260 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20261 return true;
20262 }
20263 ggc_free (array);
20264 }
20265 }
20266 return false;
20267 }
20268
20269 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20270 attribute is the const value of T, where T is an integral constant
20271 variable with static storage duration
20272 (so it can't be a PARM_DECL or a RESULT_DECL). */
20273
20274 static bool
20275 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20276 {
20277
20278 if (!decl
20279 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20280 || (VAR_P (decl) && !TREE_STATIC (decl)))
20281 return false;
20282
20283 if (TREE_READONLY (decl)
20284 && ! TREE_THIS_VOLATILE (decl)
20285 && DECL_INITIAL (decl))
20286 /* OK */;
20287 else
20288 return false;
20289
20290 /* Don't add DW_AT_const_value if abstract origin already has one. */
20291 if (get_AT (var_die, DW_AT_const_value))
20292 return false;
20293
20294 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20295 }
20296
20297 /* Convert the CFI instructions for the current function into a
20298 location list. This is used for DW_AT_frame_base when we targeting
20299 a dwarf2 consumer that does not support the dwarf3
20300 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20301 expressions. */
20302
20303 static dw_loc_list_ref
20304 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20305 {
20306 int ix;
20307 dw_fde_ref fde;
20308 dw_loc_list_ref list, *list_tail;
20309 dw_cfi_ref cfi;
20310 dw_cfa_location last_cfa, next_cfa;
20311 const char *start_label, *last_label, *section;
20312 dw_cfa_location remember;
20313
20314 fde = cfun->fde;
20315 gcc_assert (fde != NULL);
20316
20317 section = secname_for_decl (current_function_decl);
20318 list_tail = &list;
20319 list = NULL;
20320
20321 memset (&next_cfa, 0, sizeof (next_cfa));
20322 next_cfa.reg = INVALID_REGNUM;
20323 remember = next_cfa;
20324
20325 start_label = fde->dw_fde_begin;
20326
20327 /* ??? Bald assumption that the CIE opcode list does not contain
20328 advance opcodes. */
20329 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20330 lookup_cfa_1 (cfi, &next_cfa, &remember);
20331
20332 last_cfa = next_cfa;
20333 last_label = start_label;
20334
20335 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20336 {
20337 /* If the first partition contained no CFI adjustments, the
20338 CIE opcodes apply to the whole first partition. */
20339 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20340 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20341 list_tail =&(*list_tail)->dw_loc_next;
20342 start_label = last_label = fde->dw_fde_second_begin;
20343 }
20344
20345 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20346 {
20347 switch (cfi->dw_cfi_opc)
20348 {
20349 case DW_CFA_set_loc:
20350 case DW_CFA_advance_loc1:
20351 case DW_CFA_advance_loc2:
20352 case DW_CFA_advance_loc4:
20353 if (!cfa_equal_p (&last_cfa, &next_cfa))
20354 {
20355 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20356 start_label, 0, last_label, 0, section);
20357
20358 list_tail = &(*list_tail)->dw_loc_next;
20359 last_cfa = next_cfa;
20360 start_label = last_label;
20361 }
20362 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20363 break;
20364
20365 case DW_CFA_advance_loc:
20366 /* The encoding is complex enough that we should never emit this. */
20367 gcc_unreachable ();
20368
20369 default:
20370 lookup_cfa_1 (cfi, &next_cfa, &remember);
20371 break;
20372 }
20373 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20374 {
20375 if (!cfa_equal_p (&last_cfa, &next_cfa))
20376 {
20377 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20378 start_label, 0, last_label, 0, section);
20379
20380 list_tail = &(*list_tail)->dw_loc_next;
20381 last_cfa = next_cfa;
20382 start_label = last_label;
20383 }
20384 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20385 start_label, 0, fde->dw_fde_end, 0, section);
20386 list_tail = &(*list_tail)->dw_loc_next;
20387 start_label = last_label = fde->dw_fde_second_begin;
20388 }
20389 }
20390
20391 if (!cfa_equal_p (&last_cfa, &next_cfa))
20392 {
20393 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20394 start_label, 0, last_label, 0, section);
20395 list_tail = &(*list_tail)->dw_loc_next;
20396 start_label = last_label;
20397 }
20398
20399 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20400 start_label, 0,
20401 fde->dw_fde_second_begin
20402 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20403 section);
20404
20405 maybe_gen_llsym (list);
20406
20407 return list;
20408 }
20409
20410 /* Compute a displacement from the "steady-state frame pointer" to the
20411 frame base (often the same as the CFA), and store it in
20412 frame_pointer_fb_offset. OFFSET is added to the displacement
20413 before the latter is negated. */
20414
20415 static void
20416 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20417 {
20418 rtx reg, elim;
20419
20420 #ifdef FRAME_POINTER_CFA_OFFSET
20421 reg = frame_pointer_rtx;
20422 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20423 #else
20424 reg = arg_pointer_rtx;
20425 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20426 #endif
20427
20428 elim = (ira_use_lra_p
20429 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20430 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20431 elim = strip_offset_and_add (elim, &offset);
20432
20433 frame_pointer_fb_offset = -offset;
20434
20435 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20436 in which to eliminate. This is because it's stack pointer isn't
20437 directly accessible as a register within the ISA. To work around
20438 this, assume that while we cannot provide a proper value for
20439 frame_pointer_fb_offset, we won't need one either. We can use
20440 hard frame pointer in debug info even if frame pointer isn't used
20441 since hard frame pointer in debug info is encoded with DW_OP_fbreg
20442 which uses the DW_AT_frame_base attribute, not hard frame pointer
20443 directly. */
20444 frame_pointer_fb_offset_valid
20445 = (elim == hard_frame_pointer_rtx || elim == stack_pointer_rtx);
20446 }
20447
20448 /* Generate a DW_AT_name attribute given some string value to be included as
20449 the value of the attribute. */
20450
20451 static void
20452 add_name_attribute (dw_die_ref die, const char *name_string)
20453 {
20454 if (name_string != NULL && *name_string != 0)
20455 {
20456 if (demangle_name_func)
20457 name_string = (*demangle_name_func) (name_string);
20458
20459 add_AT_string (die, DW_AT_name, name_string);
20460 }
20461 }
20462
20463 /* Generate a DW_AT_description attribute given some string value to be included
20464 as the value of the attribute. */
20465
20466 static void
20467 add_desc_attribute (dw_die_ref die, const char *name_string)
20468 {
20469 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20470 return;
20471
20472 if (name_string == NULL || *name_string == 0)
20473 return;
20474
20475 if (demangle_name_func)
20476 name_string = (*demangle_name_func) (name_string);
20477
20478 add_AT_string (die, DW_AT_description, name_string);
20479 }
20480
20481 /* Generate a DW_AT_description attribute given some decl to be included
20482 as the value of the attribute. */
20483
20484 static void
20485 add_desc_attribute (dw_die_ref die, tree decl)
20486 {
20487 tree decl_name;
20488
20489 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20490 return;
20491
20492 if (decl == NULL_TREE || !DECL_P (decl))
20493 return;
20494 decl_name = DECL_NAME (decl);
20495
20496 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20497 {
20498 const char *name = dwarf2_name (decl, 0);
20499 add_desc_attribute (die, name ? name : IDENTIFIER_POINTER (decl_name));
20500 }
20501 else
20502 {
20503 char *desc = print_generic_expr_to_str (decl);
20504 add_desc_attribute (die, desc);
20505 free (desc);
20506 }
20507 }
20508
20509 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20510 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20511 of TYPE accordingly.
20512
20513 ??? This is a temporary measure until after we're able to generate
20514 regular DWARF for the complex Ada type system. */
20515
20516 static void
20517 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20518 dw_die_ref context_die)
20519 {
20520 tree dtype;
20521 dw_die_ref dtype_die;
20522
20523 if (!lang_hooks.types.descriptive_type)
20524 return;
20525
20526 dtype = lang_hooks.types.descriptive_type (type);
20527 if (!dtype)
20528 return;
20529
20530 dtype_die = lookup_type_die (dtype);
20531 if (!dtype_die)
20532 {
20533 gen_type_die (dtype, context_die);
20534 dtype_die = lookup_type_die (dtype);
20535 gcc_assert (dtype_die);
20536 }
20537
20538 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20539 }
20540
20541 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20542
20543 static const char *
20544 comp_dir_string (void)
20545 {
20546 const char *wd;
20547 char *wd1;
20548 static const char *cached_wd = NULL;
20549
20550 if (cached_wd != NULL)
20551 return cached_wd;
20552
20553 wd = get_src_pwd ();
20554 if (wd == NULL)
20555 return NULL;
20556
20557 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20558 {
20559 int wdlen;
20560
20561 wdlen = strlen (wd);
20562 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20563 strcpy (wd1, wd);
20564 wd1 [wdlen] = DIR_SEPARATOR;
20565 wd1 [wdlen + 1] = 0;
20566 wd = wd1;
20567 }
20568
20569 cached_wd = remap_debug_filename (wd);
20570 return cached_wd;
20571 }
20572
20573 /* Generate a DW_AT_comp_dir attribute for DIE. */
20574
20575 static void
20576 add_comp_dir_attribute (dw_die_ref die)
20577 {
20578 const char * wd = comp_dir_string ();
20579 if (wd != NULL)
20580 add_AT_string (die, DW_AT_comp_dir, wd);
20581 }
20582
20583 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20584 pointer computation, ...), output a representation for that bound according
20585 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20586 loc_list_from_tree for the meaning of CONTEXT. */
20587
20588 static void
20589 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20590 int forms, struct loc_descr_context *context)
20591 {
20592 dw_die_ref context_die, decl_die = NULL;
20593 dw_loc_list_ref list;
20594 bool strip_conversions = true;
20595 bool placeholder_seen = false;
20596
20597 while (strip_conversions)
20598 switch (TREE_CODE (value))
20599 {
20600 case ERROR_MARK:
20601 case SAVE_EXPR:
20602 return;
20603
20604 CASE_CONVERT:
20605 case VIEW_CONVERT_EXPR:
20606 value = TREE_OPERAND (value, 0);
20607 break;
20608
20609 default:
20610 strip_conversions = false;
20611 break;
20612 }
20613
20614 /* If possible and permitted, output the attribute as a constant. */
20615 if ((forms & dw_scalar_form_constant) != 0
20616 && TREE_CODE (value) == INTEGER_CST)
20617 {
20618 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20619
20620 /* If HOST_WIDE_INT is big enough then represent the bound as
20621 a constant value. We need to choose a form based on
20622 whether the type is signed or unsigned. We cannot just
20623 call add_AT_unsigned if the value itself is positive
20624 (add_AT_unsigned might add the unsigned value encoded as
20625 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20626 bounds type and then sign extend any unsigned values found
20627 for signed types. This is needed only for
20628 DW_AT_{lower,upper}_bound, since for most other attributes,
20629 consumers will treat DW_FORM_data[1248] as unsigned values,
20630 regardless of the underlying type. */
20631 if (prec <= HOST_BITS_PER_WIDE_INT
20632 || tree_fits_uhwi_p (value))
20633 {
20634 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20635 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20636 else
20637 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20638 }
20639 else
20640 /* Otherwise represent the bound as an unsigned value with
20641 the precision of its type. The precision and signedness
20642 of the type will be necessary to re-interpret it
20643 unambiguously. */
20644 add_AT_wide (die, attr, wi::to_wide (value));
20645 return;
20646 }
20647
20648 /* Otherwise, if it's possible and permitted too, output a reference to
20649 another DIE. */
20650 if ((forms & dw_scalar_form_reference) != 0)
20651 {
20652 tree decl = NULL_TREE;
20653
20654 /* Some type attributes reference an outer type. For instance, the upper
20655 bound of an array may reference an embedding record (this happens in
20656 Ada). */
20657 if (TREE_CODE (value) == COMPONENT_REF
20658 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20659 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20660 decl = TREE_OPERAND (value, 1);
20661
20662 else if (VAR_P (value)
20663 || TREE_CODE (value) == PARM_DECL
20664 || TREE_CODE (value) == RESULT_DECL)
20665 decl = value;
20666
20667 if (decl != NULL_TREE)
20668 {
20669 decl_die = lookup_decl_die (decl);
20670
20671 /* ??? Can this happen, or should the variable have been bound
20672 first? Probably it can, since I imagine that we try to create
20673 the types of parameters in the order in which they exist in
20674 the list, and won't have created a forward reference to a
20675 later parameter. */
20676 if (decl_die != NULL)
20677 {
20678 if (get_AT (decl_die, DW_AT_location)
20679 || get_AT (decl_die, DW_AT_const_value))
20680 {
20681 add_AT_die_ref (die, attr, decl_die);
20682 return;
20683 }
20684 }
20685 }
20686 }
20687
20688 /* Last chance: try to create a stack operation procedure to evaluate the
20689 value. Do nothing if even that is not possible or permitted. */
20690 if ((forms & dw_scalar_form_exprloc) == 0)
20691 return;
20692
20693 list = loc_list_from_tree (value, 2, context);
20694 if (context && context->placeholder_arg)
20695 {
20696 placeholder_seen = context->placeholder_seen;
20697 context->placeholder_seen = false;
20698 }
20699 if (list == NULL || single_element_loc_list_p (list))
20700 {
20701 /* If this attribute is not a reference nor constant, it is
20702 a DWARF expression rather than location description. For that
20703 loc_list_from_tree (value, 0, &context) is needed. */
20704 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20705 if (list2 && single_element_loc_list_p (list2))
20706 {
20707 if (placeholder_seen)
20708 {
20709 struct dwarf_procedure_info dpi;
20710 dpi.fndecl = NULL_TREE;
20711 dpi.args_count = 1;
20712 if (!resolve_args_picking (list2->expr, 1, &dpi))
20713 return;
20714 }
20715 add_AT_loc (die, attr, list2->expr);
20716 return;
20717 }
20718 }
20719
20720 /* If that failed to give a single element location list, fall back to
20721 outputting this as a reference... still if permitted. */
20722 if (list == NULL
20723 || (forms & dw_scalar_form_reference) == 0
20724 || placeholder_seen)
20725 return;
20726
20727 if (!decl_die)
20728 {
20729 if (current_function_decl == 0)
20730 context_die = comp_unit_die ();
20731 else
20732 context_die = lookup_decl_die (current_function_decl);
20733
20734 decl_die = new_die (DW_TAG_variable, context_die, value);
20735 add_AT_flag (decl_die, DW_AT_artificial, 1);
20736 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20737 context_die);
20738 }
20739
20740 add_AT_location_description (decl_die, DW_AT_location, list);
20741 add_AT_die_ref (die, attr, decl_die);
20742 }
20743
20744 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20745 default. */
20746
20747 static int
20748 lower_bound_default (void)
20749 {
20750 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20751 {
20752 case DW_LANG_C:
20753 case DW_LANG_C89:
20754 case DW_LANG_C99:
20755 case DW_LANG_C11:
20756 case DW_LANG_C_plus_plus:
20757 case DW_LANG_C_plus_plus_11:
20758 case DW_LANG_C_plus_plus_14:
20759 case DW_LANG_ObjC:
20760 case DW_LANG_ObjC_plus_plus:
20761 return 0;
20762 case DW_LANG_Fortran77:
20763 case DW_LANG_Fortran90:
20764 case DW_LANG_Fortran95:
20765 case DW_LANG_Fortran03:
20766 case DW_LANG_Fortran08:
20767 return 1;
20768 case DW_LANG_UPC:
20769 case DW_LANG_D:
20770 case DW_LANG_Python:
20771 return dwarf_version >= 4 ? 0 : -1;
20772 case DW_LANG_Ada95:
20773 case DW_LANG_Ada83:
20774 case DW_LANG_Cobol74:
20775 case DW_LANG_Cobol85:
20776 case DW_LANG_Modula2:
20777 case DW_LANG_PLI:
20778 return dwarf_version >= 4 ? 1 : -1;
20779 default:
20780 return -1;
20781 }
20782 }
20783
20784 /* Given a tree node describing an array bound (either lower or upper) output
20785 a representation for that bound. */
20786
20787 static void
20788 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20789 tree bound, struct loc_descr_context *context)
20790 {
20791 int dflt;
20792
20793 while (1)
20794 switch (TREE_CODE (bound))
20795 {
20796 /* Strip all conversions. */
20797 CASE_CONVERT:
20798 case VIEW_CONVERT_EXPR:
20799 bound = TREE_OPERAND (bound, 0);
20800 break;
20801
20802 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20803 are even omitted when they are the default. */
20804 case INTEGER_CST:
20805 /* If the value for this bound is the default one, we can even omit the
20806 attribute. */
20807 if (bound_attr == DW_AT_lower_bound
20808 && tree_fits_shwi_p (bound)
20809 && (dflt = lower_bound_default ()) != -1
20810 && tree_to_shwi (bound) == dflt)
20811 return;
20812
20813 /* FALLTHRU */
20814
20815 default:
20816 /* Because of the complex interaction there can be with other GNAT
20817 encodings, GDB isn't ready yet to handle proper DWARF description
20818 for self-referencial subrange bounds: let GNAT encodings do the
20819 magic in such a case. */
20820 if (is_ada ()
20821 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20822 && contains_placeholder_p (bound))
20823 return;
20824
20825 add_scalar_info (subrange_die, bound_attr, bound,
20826 dw_scalar_form_constant
20827 | dw_scalar_form_exprloc
20828 | dw_scalar_form_reference,
20829 context);
20830 return;
20831 }
20832 }
20833
20834 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20835 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20836 Note that the block of subscript information for an array type also
20837 includes information about the element type of the given array type.
20838
20839 This function reuses previously set type and bound information if
20840 available. */
20841
20842 static void
20843 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20844 {
20845 unsigned dimension_number;
20846 tree lower, upper;
20847 dw_die_ref child = type_die->die_child;
20848
20849 for (dimension_number = 0;
20850 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20851 type = TREE_TYPE (type), dimension_number++)
20852 {
20853 tree domain = TYPE_DOMAIN (type);
20854
20855 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20856 break;
20857
20858 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20859 and (in GNU C only) variable bounds. Handle all three forms
20860 here. */
20861
20862 /* Find and reuse a previously generated DW_TAG_subrange_type if
20863 available.
20864
20865 For multi-dimensional arrays, as we iterate through the
20866 various dimensions in the enclosing for loop above, we also
20867 iterate through the DIE children and pick at each
20868 DW_TAG_subrange_type previously generated (if available).
20869 Each child DW_TAG_subrange_type DIE describes the range of
20870 the current dimension. At this point we should have as many
20871 DW_TAG_subrange_type's as we have dimensions in the
20872 array. */
20873 dw_die_ref subrange_die = NULL;
20874 if (child)
20875 while (1)
20876 {
20877 child = child->die_sib;
20878 if (child->die_tag == DW_TAG_subrange_type)
20879 subrange_die = child;
20880 if (child == type_die->die_child)
20881 {
20882 /* If we wrapped around, stop looking next time. */
20883 child = NULL;
20884 break;
20885 }
20886 if (child->die_tag == DW_TAG_subrange_type)
20887 break;
20888 }
20889 if (!subrange_die)
20890 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20891
20892 if (domain)
20893 {
20894 /* We have an array type with specified bounds. */
20895 lower = TYPE_MIN_VALUE (domain);
20896 upper = TYPE_MAX_VALUE (domain);
20897
20898 /* Define the index type. */
20899 if (TREE_TYPE (domain)
20900 && !get_AT (subrange_die, DW_AT_type))
20901 {
20902 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20903 TREE_TYPE field. We can't emit debug info for this
20904 because it is an unnamed integral type. */
20905 if (TREE_CODE (domain) == INTEGER_TYPE
20906 && TYPE_NAME (domain) == NULL_TREE
20907 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20908 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20909 ;
20910 else
20911 add_type_attribute (subrange_die, TREE_TYPE (domain),
20912 TYPE_UNQUALIFIED, false, type_die);
20913 }
20914
20915 /* ??? If upper is NULL, the array has unspecified length,
20916 but it does have a lower bound. This happens with Fortran
20917 dimension arr(N:*)
20918 Since the debugger is definitely going to need to know N
20919 to produce useful results, go ahead and output the lower
20920 bound solo, and hope the debugger can cope. */
20921
20922 if (!get_AT (subrange_die, DW_AT_lower_bound))
20923 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20924 if (!get_AT (subrange_die, DW_AT_upper_bound)
20925 && !get_AT (subrange_die, DW_AT_count))
20926 {
20927 if (upper)
20928 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20929 else if ((is_c () || is_cxx ()) && COMPLETE_TYPE_P (type))
20930 /* Zero-length array. */
20931 add_bound_info (subrange_die, DW_AT_count,
20932 build_int_cst (TREE_TYPE (lower), 0), NULL);
20933 }
20934 }
20935
20936 /* Otherwise we have an array type with an unspecified length. The
20937 DWARF-2 spec does not say how to handle this; let's just leave out the
20938 bounds. */
20939 }
20940 }
20941
20942 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20943
20944 static void
20945 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20946 {
20947 dw_die_ref decl_die;
20948 HOST_WIDE_INT size;
20949 dw_loc_descr_ref size_expr = NULL;
20950
20951 switch (TREE_CODE (tree_node))
20952 {
20953 case ERROR_MARK:
20954 size = 0;
20955 break;
20956 case ENUMERAL_TYPE:
20957 case RECORD_TYPE:
20958 case UNION_TYPE:
20959 case QUAL_UNION_TYPE:
20960 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20961 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20962 {
20963 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20964 return;
20965 }
20966 size_expr = type_byte_size (tree_node, &size);
20967 break;
20968 case FIELD_DECL:
20969 /* For a data member of a struct or union, the DW_AT_byte_size is
20970 generally given as the number of bytes normally allocated for an
20971 object of the *declared* type of the member itself. This is true
20972 even for bit-fields. */
20973 size = int_size_in_bytes (field_type (tree_node));
20974 break;
20975 default:
20976 gcc_unreachable ();
20977 }
20978
20979 /* Support for dynamically-sized objects was introduced by DWARFv3.
20980 At the moment, GDB does not handle variable byte sizes very well,
20981 though. */
20982 if ((dwarf_version >= 3 || !dwarf_strict)
20983 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20984 && size_expr != NULL)
20985 add_AT_loc (die, DW_AT_byte_size, size_expr);
20986
20987 /* Note that `size' might be -1 when we get to this point. If it is, that
20988 indicates that the byte size of the entity in question is variable and
20989 that we could not generate a DWARF expression that computes it. */
20990 if (size >= 0)
20991 add_AT_unsigned (die, DW_AT_byte_size, size);
20992 }
20993
20994 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20995 alignment. */
20996
20997 static void
20998 add_alignment_attribute (dw_die_ref die, tree tree_node)
20999 {
21000 if (dwarf_version < 5 && dwarf_strict)
21001 return;
21002
21003 unsigned align;
21004
21005 if (DECL_P (tree_node))
21006 {
21007 if (!DECL_USER_ALIGN (tree_node))
21008 return;
21009
21010 align = DECL_ALIGN_UNIT (tree_node);
21011 }
21012 else if (TYPE_P (tree_node))
21013 {
21014 if (!TYPE_USER_ALIGN (tree_node))
21015 return;
21016
21017 align = TYPE_ALIGN_UNIT (tree_node);
21018 }
21019 else
21020 gcc_unreachable ();
21021
21022 add_AT_unsigned (die, DW_AT_alignment, align);
21023 }
21024
21025 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21026 which specifies the distance in bits from the highest order bit of the
21027 "containing object" for the bit-field to the highest order bit of the
21028 bit-field itself.
21029
21030 For any given bit-field, the "containing object" is a hypothetical object
21031 (of some integral or enum type) within which the given bit-field lives. The
21032 type of this hypothetical "containing object" is always the same as the
21033 declared type of the individual bit-field itself. The determination of the
21034 exact location of the "containing object" for a bit-field is rather
21035 complicated. It's handled by the `field_byte_offset' function (above).
21036
21037 CTX is required: see the comment for VLR_CONTEXT.
21038
21039 Note that it is the size (in bytes) of the hypothetical "containing object"
21040 which will be given in the DW_AT_byte_size attribute for this bit-field.
21041 (See `byte_size_attribute' above). */
21042
21043 static inline void
21044 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21045 {
21046 HOST_WIDE_INT object_offset_in_bytes;
21047 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21048 HOST_WIDE_INT bitpos_int;
21049 HOST_WIDE_INT highest_order_object_bit_offset;
21050 HOST_WIDE_INT highest_order_field_bit_offset;
21051 HOST_WIDE_INT bit_offset;
21052
21053 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21054
21055 /* Must be a field and a bit field. */
21056 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21057
21058 /* We can't yet handle bit-fields whose offsets are variable, so if we
21059 encounter such things, just return without generating any attribute
21060 whatsoever. Likewise for variable or too large size. */
21061 if (! tree_fits_shwi_p (bit_position (decl))
21062 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21063 return;
21064
21065 bitpos_int = int_bit_position (decl);
21066
21067 /* Note that the bit offset is always the distance (in bits) from the
21068 highest-order bit of the "containing object" to the highest-order bit of
21069 the bit-field itself. Since the "high-order end" of any object or field
21070 is different on big-endian and little-endian machines, the computation
21071 below must take account of these differences. */
21072 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21073 highest_order_field_bit_offset = bitpos_int;
21074
21075 if (! BYTES_BIG_ENDIAN)
21076 {
21077 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21078 highest_order_object_bit_offset +=
21079 simple_type_size_in_bits (original_type);
21080 }
21081
21082 bit_offset
21083 = (! BYTES_BIG_ENDIAN
21084 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21085 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21086
21087 if (bit_offset < 0)
21088 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21089 else
21090 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21091 }
21092
21093 /* For a FIELD_DECL node which represents a bit field, output an attribute
21094 which specifies the length in bits of the given field. */
21095
21096 static inline void
21097 add_bit_size_attribute (dw_die_ref die, tree decl)
21098 {
21099 /* Must be a field and a bit field. */
21100 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21101 && DECL_BIT_FIELD_TYPE (decl));
21102
21103 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21104 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21105 }
21106
21107 /* If the compiled language is ANSI C, then add a 'prototyped'
21108 attribute, if arg types are given for the parameters of a function. */
21109
21110 static inline void
21111 add_prototyped_attribute (dw_die_ref die, tree func_type)
21112 {
21113 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21114 {
21115 case DW_LANG_C:
21116 case DW_LANG_C89:
21117 case DW_LANG_C99:
21118 case DW_LANG_C11:
21119 case DW_LANG_ObjC:
21120 if (prototype_p (func_type))
21121 add_AT_flag (die, DW_AT_prototyped, 1);
21122 break;
21123 default:
21124 break;
21125 }
21126 }
21127
21128 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21129 by looking in the type declaration, the object declaration equate table or
21130 the block mapping. */
21131
21132 static inline void
21133 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21134 {
21135 dw_die_ref origin_die = NULL;
21136
21137 if (DECL_P (origin))
21138 {
21139 sym_off_pair *desc;
21140 if (in_lto_p
21141 && external_die_map
21142 && (desc = external_die_map->get (origin)))
21143 {
21144 add_AT_external_die_ref (die, DW_AT_abstract_origin,
21145 desc->sym, desc->off);
21146 return;
21147 }
21148 origin_die = lookup_decl_die (origin);
21149 }
21150 else if (TYPE_P (origin))
21151 origin_die = lookup_type_die (origin);
21152 else if (TREE_CODE (origin) == BLOCK)
21153 origin_die = lookup_block_die (origin);
21154
21155 /* XXX: Functions that are never lowered don't always have correct block
21156 trees (in the case of java, they simply have no block tree, in some other
21157 languages). For these functions, there is nothing we can really do to
21158 output correct debug info for inlined functions in all cases. Rather
21159 than die, we'll just produce deficient debug info now, in that we will
21160 have variables without a proper abstract origin. In the future, when all
21161 functions are lowered, we should re-add a gcc_assert (origin_die)
21162 here. */
21163
21164 if (origin_die)
21165 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21166 }
21167
21168 /* We do not currently support the pure_virtual attribute. */
21169
21170 static inline void
21171 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21172 {
21173 if (DECL_VINDEX (func_decl))
21174 {
21175 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21176
21177 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21178 add_AT_loc (die, DW_AT_vtable_elem_location,
21179 new_loc_descr (DW_OP_constu,
21180 tree_to_shwi (DECL_VINDEX (func_decl)),
21181 0));
21182
21183 /* GNU extension: Record what type this method came from originally. */
21184 if (debug_info_level > DINFO_LEVEL_TERSE
21185 && DECL_CONTEXT (func_decl))
21186 add_AT_die_ref (die, DW_AT_containing_type,
21187 lookup_type_die (DECL_CONTEXT (func_decl)));
21188 }
21189 }
21190 \f
21191 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21192 given decl. This used to be a vendor extension until after DWARF 4
21193 standardized it. */
21194
21195 static void
21196 add_linkage_attr (dw_die_ref die, tree decl)
21197 {
21198 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21199
21200 /* Mimic what assemble_name_raw does with a leading '*'. */
21201 if (name[0] == '*')
21202 name = &name[1];
21203
21204 if (dwarf_version >= 4)
21205 add_AT_string (die, DW_AT_linkage_name, name);
21206 else
21207 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21208 }
21209
21210 /* Add source coordinate attributes for the given decl. */
21211
21212 static void
21213 add_src_coords_attributes (dw_die_ref die, tree decl)
21214 {
21215 expanded_location s;
21216
21217 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21218 return;
21219 s = expand_location (DECL_SOURCE_LOCATION (decl));
21220 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21221 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21222 if (debug_column_info && s.column)
21223 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21224 }
21225
21226 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21227
21228 static void
21229 add_linkage_name_raw (dw_die_ref die, tree decl)
21230 {
21231 /* Defer until we have an assembler name set. */
21232 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21233 {
21234 limbo_die_node *asm_name;
21235
21236 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21237 asm_name->die = die;
21238 asm_name->created_for = decl;
21239 asm_name->next = deferred_asm_name;
21240 deferred_asm_name = asm_name;
21241 }
21242 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21243 add_linkage_attr (die, decl);
21244 }
21245
21246 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21247
21248 static void
21249 add_linkage_name (dw_die_ref die, tree decl)
21250 {
21251 if (debug_info_level > DINFO_LEVEL_NONE
21252 && VAR_OR_FUNCTION_DECL_P (decl)
21253 && TREE_PUBLIC (decl)
21254 && !(VAR_P (decl) && DECL_REGISTER (decl))
21255 && die->die_tag != DW_TAG_member)
21256 add_linkage_name_raw (die, decl);
21257 }
21258
21259 /* Add a DW_AT_name attribute and source coordinate attribute for the
21260 given decl, but only if it actually has a name. */
21261
21262 static void
21263 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21264 bool no_linkage_name)
21265 {
21266 tree decl_name;
21267
21268 decl_name = DECL_NAME (decl);
21269 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21270 {
21271 const char *name = dwarf2_name (decl, 0);
21272 if (name)
21273 add_name_attribute (die, name);
21274 else
21275 add_desc_attribute (die, decl);
21276
21277 if (! DECL_ARTIFICIAL (decl))
21278 add_src_coords_attributes (die, decl);
21279
21280 if (!no_linkage_name)
21281 add_linkage_name (die, decl);
21282 }
21283 else
21284 add_desc_attribute (die, decl);
21285
21286 #ifdef VMS_DEBUGGING_INFO
21287 /* Get the function's name, as described by its RTL. This may be different
21288 from the DECL_NAME name used in the source file. */
21289 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21290 {
21291 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21292 XEXP (DECL_RTL (decl), 0), false);
21293 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21294 }
21295 #endif /* VMS_DEBUGGING_INFO */
21296 }
21297
21298 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21299
21300 static void
21301 add_discr_value (dw_die_ref die, dw_discr_value *value)
21302 {
21303 dw_attr_node attr;
21304
21305 attr.dw_attr = DW_AT_discr_value;
21306 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21307 attr.dw_attr_val.val_entry = NULL;
21308 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21309 if (value->pos)
21310 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21311 else
21312 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21313 add_dwarf_attr (die, &attr);
21314 }
21315
21316 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21317
21318 static void
21319 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21320 {
21321 dw_attr_node attr;
21322
21323 attr.dw_attr = DW_AT_discr_list;
21324 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21325 attr.dw_attr_val.val_entry = NULL;
21326 attr.dw_attr_val.v.val_discr_list = discr_list;
21327 add_dwarf_attr (die, &attr);
21328 }
21329
21330 static inline dw_discr_list_ref
21331 AT_discr_list (dw_attr_node *attr)
21332 {
21333 return attr->dw_attr_val.v.val_discr_list;
21334 }
21335
21336 #ifdef VMS_DEBUGGING_INFO
21337 /* Output the debug main pointer die for VMS */
21338
21339 void
21340 dwarf2out_vms_debug_main_pointer (void)
21341 {
21342 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21343 dw_die_ref die;
21344
21345 /* Allocate the VMS debug main subprogram die. */
21346 die = new_die_raw (DW_TAG_subprogram);
21347 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21348 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21349 current_function_funcdef_no);
21350 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21351
21352 /* Make it the first child of comp_unit_die (). */
21353 die->die_parent = comp_unit_die ();
21354 if (comp_unit_die ()->die_child)
21355 {
21356 die->die_sib = comp_unit_die ()->die_child->die_sib;
21357 comp_unit_die ()->die_child->die_sib = die;
21358 }
21359 else
21360 {
21361 die->die_sib = die;
21362 comp_unit_die ()->die_child = die;
21363 }
21364 }
21365 #endif /* VMS_DEBUGGING_INFO */
21366
21367 /* walk_tree helper function for uses_local_type, below. */
21368
21369 static tree
21370 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21371 {
21372 if (!TYPE_P (*tp))
21373 *walk_subtrees = 0;
21374 else
21375 {
21376 tree name = TYPE_NAME (*tp);
21377 if (name && DECL_P (name) && decl_function_context (name))
21378 return *tp;
21379 }
21380 return NULL_TREE;
21381 }
21382
21383 /* If TYPE involves a function-local type (including a local typedef to a
21384 non-local type), returns that type; otherwise returns NULL_TREE. */
21385
21386 static tree
21387 uses_local_type (tree type)
21388 {
21389 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21390 return used;
21391 }
21392
21393 /* Return the DIE for the scope that immediately contains this type.
21394 Non-named types that do not involve a function-local type get global
21395 scope. Named types nested in namespaces or other types get their
21396 containing scope. All other types (i.e. function-local named types) get
21397 the current active scope. */
21398
21399 static dw_die_ref
21400 scope_die_for (tree t, dw_die_ref context_die)
21401 {
21402 dw_die_ref scope_die = NULL;
21403 tree containing_scope;
21404
21405 /* Non-types always go in the current scope. */
21406 gcc_assert (TYPE_P (t));
21407
21408 /* Use the scope of the typedef, rather than the scope of the type
21409 it refers to. */
21410 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21411 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21412 else
21413 containing_scope = TYPE_CONTEXT (t);
21414
21415 /* Use the containing namespace if there is one. */
21416 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21417 {
21418 if (context_die == lookup_decl_die (containing_scope))
21419 /* OK */;
21420 else if (debug_info_level > DINFO_LEVEL_TERSE)
21421 context_die = get_context_die (containing_scope);
21422 else
21423 containing_scope = NULL_TREE;
21424 }
21425
21426 /* Ignore function type "scopes" from the C frontend. They mean that
21427 a tagged type is local to a parmlist of a function declarator, but
21428 that isn't useful to DWARF. */
21429 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21430 containing_scope = NULL_TREE;
21431
21432 if (SCOPE_FILE_SCOPE_P (containing_scope))
21433 {
21434 /* If T uses a local type keep it local as well, to avoid references
21435 to function-local DIEs from outside the function. */
21436 if (current_function_decl && uses_local_type (t))
21437 scope_die = context_die;
21438 else
21439 scope_die = comp_unit_die ();
21440 }
21441 else if (TYPE_P (containing_scope))
21442 {
21443 /* For types, we can just look up the appropriate DIE. */
21444 if (debug_info_level > DINFO_LEVEL_TERSE)
21445 scope_die = get_context_die (containing_scope);
21446 else
21447 {
21448 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21449 if (scope_die == NULL)
21450 scope_die = comp_unit_die ();
21451 }
21452 }
21453 else
21454 scope_die = context_die;
21455
21456 return scope_die;
21457 }
21458
21459 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21460
21461 static inline int
21462 local_scope_p (dw_die_ref context_die)
21463 {
21464 for (; context_die; context_die = context_die->die_parent)
21465 if (context_die->die_tag == DW_TAG_inlined_subroutine
21466 || context_die->die_tag == DW_TAG_subprogram)
21467 return 1;
21468
21469 return 0;
21470 }
21471
21472 /* Returns nonzero if CONTEXT_DIE is a class. */
21473
21474 static inline int
21475 class_scope_p (dw_die_ref context_die)
21476 {
21477 return (context_die
21478 && (context_die->die_tag == DW_TAG_structure_type
21479 || context_die->die_tag == DW_TAG_class_type
21480 || context_die->die_tag == DW_TAG_interface_type
21481 || context_die->die_tag == DW_TAG_union_type));
21482 }
21483
21484 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21485 whether or not to treat a DIE in this context as a declaration. */
21486
21487 static inline int
21488 class_or_namespace_scope_p (dw_die_ref context_die)
21489 {
21490 return (class_scope_p (context_die)
21491 || (context_die && context_die->die_tag == DW_TAG_namespace));
21492 }
21493
21494 /* Many forms of DIEs require a "type description" attribute. This
21495 routine locates the proper "type descriptor" die for the type given
21496 by 'type' plus any additional qualifiers given by 'cv_quals', and
21497 adds a DW_AT_type attribute below the given die. */
21498
21499 static void
21500 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21501 bool reverse, dw_die_ref context_die)
21502 {
21503 enum tree_code code = TREE_CODE (type);
21504 dw_die_ref type_die = NULL;
21505
21506 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21507 or fixed-point type, use the inner type. This is because we have no
21508 support for unnamed types in base_type_die. This can happen if this is
21509 an Ada subrange type. Correct solution is emit a subrange type die. */
21510 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21511 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21512 type = TREE_TYPE (type), code = TREE_CODE (type);
21513
21514 if (code == ERROR_MARK
21515 /* Handle a special case. For functions whose return type is void, we
21516 generate *no* type attribute. (Note that no object may have type
21517 `void', so this only applies to function return types). */
21518 || code == VOID_TYPE)
21519 return;
21520
21521 type_die = modified_type_die (type,
21522 cv_quals | TYPE_QUALS (type),
21523 reverse,
21524 context_die);
21525
21526 if (type_die != NULL)
21527 add_AT_die_ref (object_die, DW_AT_type, type_die);
21528 }
21529
21530 /* Given an object die, add the calling convention attribute for the
21531 function call type. */
21532 static void
21533 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21534 {
21535 enum dwarf_calling_convention value = DW_CC_normal;
21536
21537 value = ((enum dwarf_calling_convention)
21538 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21539
21540 if (is_fortran ()
21541 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21542 {
21543 /* DWARF 2 doesn't provide a way to identify a program's source-level
21544 entry point. DW_AT_calling_convention attributes are only meant
21545 to describe functions' calling conventions. However, lacking a
21546 better way to signal the Fortran main program, we used this for
21547 a long time, following existing custom. Now, DWARF 4 has
21548 DW_AT_main_subprogram, which we add below, but some tools still
21549 rely on the old way, which we thus keep. */
21550 value = DW_CC_program;
21551
21552 if (dwarf_version >= 4 || !dwarf_strict)
21553 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21554 }
21555
21556 /* Only add the attribute if the backend requests it, and
21557 is not DW_CC_normal. */
21558 if (value && (value != DW_CC_normal))
21559 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21560 }
21561
21562 /* Given a tree pointer to a struct, class, union, or enum type node, return
21563 a pointer to the (string) tag name for the given type, or zero if the type
21564 was declared without a tag. */
21565
21566 static const char *
21567 type_tag (const_tree type)
21568 {
21569 const char *name = 0;
21570
21571 if (TYPE_NAME (type) != 0)
21572 {
21573 tree t = 0;
21574
21575 /* Find the IDENTIFIER_NODE for the type name. */
21576 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21577 && !TYPE_NAMELESS (type))
21578 t = TYPE_NAME (type);
21579
21580 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21581 a TYPE_DECL node, regardless of whether or not a `typedef' was
21582 involved. */
21583 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21584 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21585 {
21586 /* We want to be extra verbose. Don't call dwarf_name if
21587 DECL_NAME isn't set. The default hook for decl_printable_name
21588 doesn't like that, and in this context it's correct to return
21589 0, instead of "<anonymous>" or the like. */
21590 if (DECL_NAME (TYPE_NAME (type))
21591 && !DECL_NAMELESS (TYPE_NAME (type)))
21592 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21593 }
21594
21595 /* Now get the name as a string, or invent one. */
21596 if (!name && t != 0)
21597 name = IDENTIFIER_POINTER (t);
21598 }
21599
21600 return (name == 0 || *name == '\0') ? 0 : name;
21601 }
21602
21603 /* Return the type associated with a data member, make a special check
21604 for bit field types. */
21605
21606 static inline tree
21607 member_declared_type (const_tree member)
21608 {
21609 return (DECL_BIT_FIELD_TYPE (member)
21610 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21611 }
21612
21613 /* Get the decl's label, as described by its RTL. This may be different
21614 from the DECL_NAME name used in the source file. */
21615
21616 #if 0
21617 static const char *
21618 decl_start_label (tree decl)
21619 {
21620 rtx x;
21621 const char *fnname;
21622
21623 x = DECL_RTL (decl);
21624 gcc_assert (MEM_P (x));
21625
21626 x = XEXP (x, 0);
21627 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21628
21629 fnname = XSTR (x, 0);
21630 return fnname;
21631 }
21632 #endif
21633 \f
21634 /* For variable-length arrays that have been previously generated, but
21635 may be incomplete due to missing subscript info, fill the subscript
21636 info. Return TRUE if this is one of those cases. */
21637 static bool
21638 fill_variable_array_bounds (tree type)
21639 {
21640 if (TREE_ASM_WRITTEN (type)
21641 && TREE_CODE (type) == ARRAY_TYPE
21642 && variably_modified_type_p (type, NULL))
21643 {
21644 dw_die_ref array_die = lookup_type_die (type);
21645 if (!array_die)
21646 return false;
21647 add_subscript_info (array_die, type, !is_ada ());
21648 return true;
21649 }
21650 return false;
21651 }
21652
21653 /* These routines generate the internal representation of the DIE's for
21654 the compilation unit. Debugging information is collected by walking
21655 the declaration trees passed in from dwarf2out_decl(). */
21656
21657 static void
21658 gen_array_type_die (tree type, dw_die_ref context_die)
21659 {
21660 dw_die_ref array_die;
21661
21662 /* GNU compilers represent multidimensional array types as sequences of one
21663 dimensional array types whose element types are themselves array types.
21664 We sometimes squish that down to a single array_type DIE with multiple
21665 subscripts in the Dwarf debugging info. The draft Dwarf specification
21666 say that we are allowed to do this kind of compression in C, because
21667 there is no difference between an array of arrays and a multidimensional
21668 array. We don't do this for Ada to remain as close as possible to the
21669 actual representation, which is especially important against the language
21670 flexibilty wrt arrays of variable size. */
21671
21672 bool collapse_nested_arrays = !is_ada ();
21673
21674 if (fill_variable_array_bounds (type))
21675 return;
21676
21677 dw_die_ref scope_die = scope_die_for (type, context_die);
21678 tree element_type;
21679
21680 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21681 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21682 if (TYPE_STRING_FLAG (type)
21683 && TREE_CODE (type) == ARRAY_TYPE
21684 && is_fortran ()
21685 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21686 {
21687 HOST_WIDE_INT size;
21688
21689 array_die = new_die (DW_TAG_string_type, scope_die, type);
21690 add_name_attribute (array_die, type_tag (type));
21691 equate_type_number_to_die (type, array_die);
21692 size = int_size_in_bytes (type);
21693 if (size >= 0)
21694 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21695 /* ??? We can't annotate types late, but for LTO we may not
21696 generate a location early either (gfortran.dg/save_6.f90). */
21697 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21698 && TYPE_DOMAIN (type) != NULL_TREE
21699 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21700 {
21701 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21702 tree rszdecl = szdecl;
21703
21704 size = int_size_in_bytes (TREE_TYPE (szdecl));
21705 if (!DECL_P (szdecl))
21706 {
21707 if (TREE_CODE (szdecl) == INDIRECT_REF
21708 && DECL_P (TREE_OPERAND (szdecl, 0)))
21709 {
21710 rszdecl = TREE_OPERAND (szdecl, 0);
21711 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21712 != DWARF2_ADDR_SIZE)
21713 size = 0;
21714 }
21715 else
21716 size = 0;
21717 }
21718 if (size > 0)
21719 {
21720 dw_loc_list_ref loc
21721 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21722 NULL);
21723 if (loc)
21724 {
21725 add_AT_location_description (array_die, DW_AT_string_length,
21726 loc);
21727 if (size != DWARF2_ADDR_SIZE)
21728 add_AT_unsigned (array_die, dwarf_version >= 5
21729 ? DW_AT_string_length_byte_size
21730 : DW_AT_byte_size, size);
21731 }
21732 }
21733 }
21734 return;
21735 }
21736
21737 array_die = new_die (DW_TAG_array_type, scope_die, type);
21738 add_name_attribute (array_die, type_tag (type));
21739 equate_type_number_to_die (type, array_die);
21740
21741 if (TREE_CODE (type) == VECTOR_TYPE)
21742 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21743
21744 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21745 if (is_fortran ()
21746 && TREE_CODE (type) == ARRAY_TYPE
21747 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21748 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21749 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21750
21751 #if 0
21752 /* We default the array ordering. Debuggers will probably do the right
21753 things even if DW_AT_ordering is not present. It's not even an issue
21754 until we start to get into multidimensional arrays anyway. If a debugger
21755 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21756 then we'll have to put the DW_AT_ordering attribute back in. (But if
21757 and when we find out that we need to put these in, we will only do so
21758 for multidimensional arrays. */
21759 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21760 #endif
21761
21762 if (TREE_CODE (type) == VECTOR_TYPE)
21763 {
21764 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21765 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21766 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21767 add_bound_info (subrange_die, DW_AT_upper_bound,
21768 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21769 }
21770 else
21771 add_subscript_info (array_die, type, collapse_nested_arrays);
21772
21773 /* Add representation of the type of the elements of this array type and
21774 emit the corresponding DIE if we haven't done it already. */
21775 element_type = TREE_TYPE (type);
21776 if (collapse_nested_arrays)
21777 while (TREE_CODE (element_type) == ARRAY_TYPE)
21778 {
21779 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21780 break;
21781 element_type = TREE_TYPE (element_type);
21782 }
21783
21784 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21785 TREE_CODE (type) == ARRAY_TYPE
21786 && TYPE_REVERSE_STORAGE_ORDER (type),
21787 context_die);
21788
21789 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21790 if (TYPE_ARTIFICIAL (type))
21791 add_AT_flag (array_die, DW_AT_artificial, 1);
21792
21793 if (get_AT (array_die, DW_AT_name))
21794 add_pubtype (type, array_die);
21795
21796 add_alignment_attribute (array_die, type);
21797 }
21798
21799 /* This routine generates DIE for array with hidden descriptor, details
21800 are filled into *info by a langhook. */
21801
21802 static void
21803 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21804 dw_die_ref context_die)
21805 {
21806 const dw_die_ref scope_die = scope_die_for (type, context_die);
21807 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21808 struct loc_descr_context context = { type, info->base_decl, NULL,
21809 false, false };
21810 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21811 int dim;
21812
21813 add_name_attribute (array_die, type_tag (type));
21814 equate_type_number_to_die (type, array_die);
21815
21816 if (info->ndimensions > 1)
21817 switch (info->ordering)
21818 {
21819 case array_descr_ordering_row_major:
21820 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21821 break;
21822 case array_descr_ordering_column_major:
21823 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21824 break;
21825 default:
21826 break;
21827 }
21828
21829 if (dwarf_version >= 3 || !dwarf_strict)
21830 {
21831 if (info->data_location)
21832 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21833 dw_scalar_form_exprloc, &context);
21834 if (info->associated)
21835 add_scalar_info (array_die, DW_AT_associated, info->associated,
21836 dw_scalar_form_constant
21837 | dw_scalar_form_exprloc
21838 | dw_scalar_form_reference, &context);
21839 if (info->allocated)
21840 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21841 dw_scalar_form_constant
21842 | dw_scalar_form_exprloc
21843 | dw_scalar_form_reference, &context);
21844 if (info->stride)
21845 {
21846 const enum dwarf_attribute attr
21847 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21848 const int forms
21849 = (info->stride_in_bits)
21850 ? dw_scalar_form_constant
21851 : (dw_scalar_form_constant
21852 | dw_scalar_form_exprloc
21853 | dw_scalar_form_reference);
21854
21855 add_scalar_info (array_die, attr, info->stride, forms, &context);
21856 }
21857 }
21858 if (dwarf_version >= 5)
21859 {
21860 if (info->rank)
21861 {
21862 add_scalar_info (array_die, DW_AT_rank, info->rank,
21863 dw_scalar_form_constant
21864 | dw_scalar_form_exprloc, &context);
21865 subrange_tag = DW_TAG_generic_subrange;
21866 context.placeholder_arg = true;
21867 }
21868 }
21869
21870 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21871
21872 for (dim = 0; dim < info->ndimensions; dim++)
21873 {
21874 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21875
21876 if (info->dimen[dim].bounds_type)
21877 add_type_attribute (subrange_die,
21878 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21879 false, context_die);
21880 if (info->dimen[dim].lower_bound)
21881 add_bound_info (subrange_die, DW_AT_lower_bound,
21882 info->dimen[dim].lower_bound, &context);
21883 if (info->dimen[dim].upper_bound)
21884 add_bound_info (subrange_die, DW_AT_upper_bound,
21885 info->dimen[dim].upper_bound, &context);
21886 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21887 add_scalar_info (subrange_die, DW_AT_byte_stride,
21888 info->dimen[dim].stride,
21889 dw_scalar_form_constant
21890 | dw_scalar_form_exprloc
21891 | dw_scalar_form_reference,
21892 &context);
21893 }
21894
21895 gen_type_die (info->element_type, context_die);
21896 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21897 TREE_CODE (type) == ARRAY_TYPE
21898 && TYPE_REVERSE_STORAGE_ORDER (type),
21899 context_die);
21900
21901 if (get_AT (array_die, DW_AT_name))
21902 add_pubtype (type, array_die);
21903
21904 add_alignment_attribute (array_die, type);
21905 }
21906
21907 #if 0
21908 static void
21909 gen_entry_point_die (tree decl, dw_die_ref context_die)
21910 {
21911 tree origin = decl_ultimate_origin (decl);
21912 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21913
21914 if (origin != NULL)
21915 add_abstract_origin_attribute (decl_die, origin);
21916 else
21917 {
21918 add_name_and_src_coords_attributes (decl_die, decl);
21919 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21920 TYPE_UNQUALIFIED, false, context_die);
21921 }
21922
21923 if (DECL_ABSTRACT_P (decl))
21924 equate_decl_number_to_die (decl, decl_die);
21925 else
21926 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21927 }
21928 #endif
21929
21930 /* Walk through the list of incomplete types again, trying once more to
21931 emit full debugging info for them. */
21932
21933 static void
21934 retry_incomplete_types (void)
21935 {
21936 set_early_dwarf s;
21937 int i;
21938
21939 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21940 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21941 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21942 vec_safe_truncate (incomplete_types, 0);
21943 }
21944
21945 /* Determine what tag to use for a record type. */
21946
21947 static enum dwarf_tag
21948 record_type_tag (tree type)
21949 {
21950 if (! lang_hooks.types.classify_record)
21951 return DW_TAG_structure_type;
21952
21953 switch (lang_hooks.types.classify_record (type))
21954 {
21955 case RECORD_IS_STRUCT:
21956 return DW_TAG_structure_type;
21957
21958 case RECORD_IS_CLASS:
21959 return DW_TAG_class_type;
21960
21961 case RECORD_IS_INTERFACE:
21962 if (dwarf_version >= 3 || !dwarf_strict)
21963 return DW_TAG_interface_type;
21964 return DW_TAG_structure_type;
21965
21966 default:
21967 gcc_unreachable ();
21968 }
21969 }
21970
21971 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21972 include all of the information about the enumeration values also. Each
21973 enumerated type name/value is listed as a child of the enumerated type
21974 DIE. */
21975
21976 static dw_die_ref
21977 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21978 {
21979 dw_die_ref type_die = lookup_type_die (type);
21980 dw_die_ref orig_type_die = type_die;
21981
21982 if (type_die == NULL)
21983 {
21984 type_die = new_die (DW_TAG_enumeration_type,
21985 scope_die_for (type, context_die), type);
21986 equate_type_number_to_die (type, type_die);
21987 add_name_attribute (type_die, type_tag (type));
21988 if ((dwarf_version >= 4 || !dwarf_strict)
21989 && ENUM_IS_SCOPED (type))
21990 add_AT_flag (type_die, DW_AT_enum_class, 1);
21991 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
21992 add_AT_flag (type_die, DW_AT_declaration, 1);
21993 if (!dwarf_strict)
21994 add_AT_unsigned (type_die, DW_AT_encoding,
21995 TYPE_UNSIGNED (type)
21996 ? DW_ATE_unsigned
21997 : DW_ATE_signed);
21998 }
21999 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22000 return type_die;
22001 else
22002 remove_AT (type_die, DW_AT_declaration);
22003
22004 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22005 given enum type is incomplete, do not generate the DW_AT_byte_size
22006 attribute or the DW_AT_element_list attribute. */
22007 if (TYPE_SIZE (type))
22008 {
22009 tree link;
22010
22011 if (!ENUM_IS_OPAQUE (type))
22012 TREE_ASM_WRITTEN (type) = 1;
22013 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22014 add_byte_size_attribute (type_die, type);
22015 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22016 add_alignment_attribute (type_die, type);
22017 if ((dwarf_version >= 3 || !dwarf_strict)
22018 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22019 {
22020 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22021 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22022 context_die);
22023 }
22024 if (TYPE_STUB_DECL (type) != NULL_TREE)
22025 {
22026 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22027 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22028 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22029 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22030 }
22031
22032 /* If the first reference to this type was as the return type of an
22033 inline function, then it may not have a parent. Fix this now. */
22034 if (type_die->die_parent == NULL)
22035 add_child_die (scope_die_for (type, context_die), type_die);
22036
22037 for (link = TYPE_VALUES (type);
22038 link != NULL; link = TREE_CHAIN (link))
22039 {
22040 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22041 tree value = TREE_VALUE (link);
22042
22043 gcc_assert (!ENUM_IS_OPAQUE (type));
22044 add_name_attribute (enum_die,
22045 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22046
22047 if (TREE_CODE (value) == CONST_DECL)
22048 value = DECL_INITIAL (value);
22049
22050 if (simple_type_size_in_bits (TREE_TYPE (value))
22051 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22052 {
22053 /* For constant forms created by add_AT_unsigned DWARF
22054 consumers (GDB, elfutils, etc.) always zero extend
22055 the value. Only when the actual value is negative
22056 do we need to use add_AT_int to generate a constant
22057 form that can represent negative values. */
22058 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22059 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22060 add_AT_unsigned (enum_die, DW_AT_const_value,
22061 (unsigned HOST_WIDE_INT) val);
22062 else
22063 add_AT_int (enum_die, DW_AT_const_value, val);
22064 }
22065 else
22066 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22067 that here. TODO: This should be re-worked to use correct
22068 signed/unsigned double tags for all cases. */
22069 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22070 }
22071
22072 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22073 if (TYPE_ARTIFICIAL (type)
22074 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22075 add_AT_flag (type_die, DW_AT_artificial, 1);
22076 }
22077 else
22078 add_AT_flag (type_die, DW_AT_declaration, 1);
22079
22080 add_pubtype (type, type_die);
22081
22082 return type_die;
22083 }
22084
22085 /* Generate a DIE to represent either a real live formal parameter decl or to
22086 represent just the type of some formal parameter position in some function
22087 type.
22088
22089 Note that this routine is a bit unusual because its argument may be a
22090 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22091 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22092 node. If it's the former then this function is being called to output a
22093 DIE to represent a formal parameter object (or some inlining thereof). If
22094 it's the latter, then this function is only being called to output a
22095 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22096 argument type of some subprogram type.
22097 If EMIT_NAME_P is true, name and source coordinate attributes
22098 are emitted. */
22099
22100 static dw_die_ref
22101 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22102 dw_die_ref context_die)
22103 {
22104 tree node_or_origin = node ? node : origin;
22105 tree ultimate_origin;
22106 dw_die_ref parm_die = NULL;
22107
22108 if (DECL_P (node_or_origin))
22109 {
22110 parm_die = lookup_decl_die (node);
22111
22112 /* If the contexts differ, we may not be talking about the same
22113 thing.
22114 ??? When in LTO the DIE parent is the "abstract" copy and the
22115 context_die is the specification "copy". But this whole block
22116 should eventually be no longer needed. */
22117 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22118 {
22119 if (!DECL_ABSTRACT_P (node))
22120 {
22121 /* This can happen when creating an inlined instance, in
22122 which case we need to create a new DIE that will get
22123 annotated with DW_AT_abstract_origin. */
22124 parm_die = NULL;
22125 }
22126 else
22127 gcc_unreachable ();
22128 }
22129
22130 if (parm_die && parm_die->die_parent == NULL)
22131 {
22132 /* Check that parm_die already has the right attributes that
22133 we would have added below. If any attributes are
22134 missing, fall through to add them. */
22135 if (! DECL_ABSTRACT_P (node_or_origin)
22136 && !get_AT (parm_die, DW_AT_location)
22137 && !get_AT (parm_die, DW_AT_const_value))
22138 /* We are missing location info, and are about to add it. */
22139 ;
22140 else
22141 {
22142 add_child_die (context_die, parm_die);
22143 return parm_die;
22144 }
22145 }
22146 }
22147
22148 /* If we have a previously generated DIE, use it, unless this is an
22149 concrete instance (origin != NULL), in which case we need a new
22150 DIE with a corresponding DW_AT_abstract_origin. */
22151 bool reusing_die;
22152 if (parm_die && origin == NULL)
22153 reusing_die = true;
22154 else
22155 {
22156 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22157 reusing_die = false;
22158 }
22159
22160 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22161 {
22162 case tcc_declaration:
22163 ultimate_origin = decl_ultimate_origin (node_or_origin);
22164 if (node || ultimate_origin)
22165 origin = ultimate_origin;
22166
22167 if (reusing_die)
22168 goto add_location;
22169
22170 if (origin != NULL)
22171 add_abstract_origin_attribute (parm_die, origin);
22172 else if (emit_name_p)
22173 add_name_and_src_coords_attributes (parm_die, node);
22174 if (origin == NULL
22175 || (! DECL_ABSTRACT_P (node_or_origin)
22176 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22177 decl_function_context
22178 (node_or_origin))))
22179 {
22180 tree type = TREE_TYPE (node_or_origin);
22181 if (decl_by_reference_p (node_or_origin))
22182 add_type_attribute (parm_die, TREE_TYPE (type),
22183 TYPE_UNQUALIFIED,
22184 false, context_die);
22185 else
22186 add_type_attribute (parm_die, type,
22187 decl_quals (node_or_origin),
22188 false, context_die);
22189 }
22190 if (origin == NULL && DECL_ARTIFICIAL (node))
22191 add_AT_flag (parm_die, DW_AT_artificial, 1);
22192 add_location:
22193 if (node && node != origin)
22194 equate_decl_number_to_die (node, parm_die);
22195 if (! DECL_ABSTRACT_P (node_or_origin))
22196 add_location_or_const_value_attribute (parm_die, node_or_origin,
22197 node == NULL);
22198
22199 break;
22200
22201 case tcc_type:
22202 /* We were called with some kind of a ..._TYPE node. */
22203 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22204 context_die);
22205 break;
22206
22207 default:
22208 gcc_unreachable ();
22209 }
22210
22211 return parm_die;
22212 }
22213
22214 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22215 children DW_TAG_formal_parameter DIEs representing the arguments of the
22216 parameter pack.
22217
22218 PARM_PACK must be a function parameter pack.
22219 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22220 must point to the subsequent arguments of the function PACK_ARG belongs to.
22221 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22222 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22223 following the last one for which a DIE was generated. */
22224
22225 static dw_die_ref
22226 gen_formal_parameter_pack_die (tree parm_pack,
22227 tree pack_arg,
22228 dw_die_ref subr_die,
22229 tree *next_arg)
22230 {
22231 tree arg;
22232 dw_die_ref parm_pack_die;
22233
22234 gcc_assert (parm_pack
22235 && lang_hooks.function_parameter_pack_p (parm_pack)
22236 && subr_die);
22237
22238 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22239 add_src_coords_attributes (parm_pack_die, parm_pack);
22240
22241 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22242 {
22243 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22244 parm_pack))
22245 break;
22246 gen_formal_parameter_die (arg, NULL,
22247 false /* Don't emit name attribute. */,
22248 parm_pack_die);
22249 }
22250 if (next_arg)
22251 *next_arg = arg;
22252 return parm_pack_die;
22253 }
22254
22255 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22256 at the end of an (ANSI prototyped) formal parameters list. */
22257
22258 static void
22259 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22260 {
22261 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22262 }
22263
22264 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22265 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22266 parameters as specified in some function type specification (except for
22267 those which appear as part of a function *definition*). */
22268
22269 static void
22270 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22271 {
22272 tree link;
22273 tree formal_type = NULL;
22274 tree first_parm_type;
22275 tree arg;
22276
22277 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22278 {
22279 arg = DECL_ARGUMENTS (function_or_method_type);
22280 function_or_method_type = TREE_TYPE (function_or_method_type);
22281 }
22282 else
22283 arg = NULL_TREE;
22284
22285 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22286
22287 /* Make our first pass over the list of formal parameter types and output a
22288 DW_TAG_formal_parameter DIE for each one. */
22289 for (link = first_parm_type; link; )
22290 {
22291 dw_die_ref parm_die;
22292
22293 formal_type = TREE_VALUE (link);
22294 if (formal_type == void_type_node)
22295 break;
22296
22297 /* Output a (nameless) DIE to represent the formal parameter itself. */
22298 parm_die = gen_formal_parameter_die (formal_type, NULL,
22299 true /* Emit name attribute. */,
22300 context_die);
22301 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22302 && link == first_parm_type)
22303 {
22304 add_AT_flag (parm_die, DW_AT_artificial, 1);
22305 if (dwarf_version >= 3 || !dwarf_strict)
22306 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22307 }
22308 else if (arg && DECL_ARTIFICIAL (arg))
22309 add_AT_flag (parm_die, DW_AT_artificial, 1);
22310
22311 link = TREE_CHAIN (link);
22312 if (arg)
22313 arg = DECL_CHAIN (arg);
22314 }
22315
22316 /* If this function type has an ellipsis, add a
22317 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22318 if (formal_type != void_type_node)
22319 gen_unspecified_parameters_die (function_or_method_type, context_die);
22320
22321 /* Make our second (and final) pass over the list of formal parameter types
22322 and output DIEs to represent those types (as necessary). */
22323 for (link = TYPE_ARG_TYPES (function_or_method_type);
22324 link && TREE_VALUE (link);
22325 link = TREE_CHAIN (link))
22326 gen_type_die (TREE_VALUE (link), context_die);
22327 }
22328
22329 /* We want to generate the DIE for TYPE so that we can generate the
22330 die for MEMBER, which has been defined; we will need to refer back
22331 to the member declaration nested within TYPE. If we're trying to
22332 generate minimal debug info for TYPE, processing TYPE won't do the
22333 trick; we need to attach the member declaration by hand. */
22334
22335 static void
22336 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22337 {
22338 gen_type_die (type, context_die);
22339
22340 /* If we're trying to avoid duplicate debug info, we may not have
22341 emitted the member decl for this function. Emit it now. */
22342 if (TYPE_STUB_DECL (type)
22343 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22344 && ! lookup_decl_die (member))
22345 {
22346 dw_die_ref type_die;
22347 gcc_assert (!decl_ultimate_origin (member));
22348
22349 type_die = lookup_type_die_strip_naming_typedef (type);
22350 if (TREE_CODE (member) == FUNCTION_DECL)
22351 gen_subprogram_die (member, type_die);
22352 else if (TREE_CODE (member) == FIELD_DECL)
22353 {
22354 /* Ignore the nameless fields that are used to skip bits but handle
22355 C++ anonymous unions and structs. */
22356 if (DECL_NAME (member) != NULL_TREE
22357 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22358 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22359 {
22360 struct vlr_context vlr_ctx = {
22361 DECL_CONTEXT (member), /* struct_type */
22362 NULL_TREE /* variant_part_offset */
22363 };
22364 gen_type_die (member_declared_type (member), type_die);
22365 gen_field_die (member, &vlr_ctx, type_die);
22366 }
22367 }
22368 else
22369 gen_variable_die (member, NULL_TREE, type_die);
22370 }
22371 }
22372 \f
22373 /* Forward declare these functions, because they are mutually recursive
22374 with their set_block_* pairing functions. */
22375 static void set_decl_origin_self (tree);
22376
22377 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22378 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22379 that it points to the node itself, thus indicating that the node is its
22380 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22381 the given node is NULL, recursively descend the decl/block tree which
22382 it is the root of, and for each other ..._DECL or BLOCK node contained
22383 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22384 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22385 values to point to themselves. */
22386
22387 static void
22388 set_block_origin_self (tree stmt)
22389 {
22390 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22391 {
22392 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22393
22394 {
22395 tree local_decl;
22396
22397 for (local_decl = BLOCK_VARS (stmt);
22398 local_decl != NULL_TREE;
22399 local_decl = DECL_CHAIN (local_decl))
22400 /* Do not recurse on nested functions since the inlining status
22401 of parent and child can be different as per the DWARF spec. */
22402 if (TREE_CODE (local_decl) != FUNCTION_DECL
22403 && !DECL_EXTERNAL (local_decl))
22404 set_decl_origin_self (local_decl);
22405 }
22406
22407 {
22408 tree subblock;
22409
22410 for (subblock = BLOCK_SUBBLOCKS (stmt);
22411 subblock != NULL_TREE;
22412 subblock = BLOCK_CHAIN (subblock))
22413 set_block_origin_self (subblock); /* Recurse. */
22414 }
22415 }
22416 }
22417
22418 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22419 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22420 node to so that it points to the node itself, thus indicating that the
22421 node represents its own (abstract) origin. Additionally, if the
22422 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22423 the decl/block tree of which the given node is the root of, and for
22424 each other ..._DECL or BLOCK node contained therein whose
22425 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22426 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22427 point to themselves. */
22428
22429 static void
22430 set_decl_origin_self (tree decl)
22431 {
22432 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22433 {
22434 DECL_ABSTRACT_ORIGIN (decl) = decl;
22435 if (TREE_CODE (decl) == FUNCTION_DECL)
22436 {
22437 tree arg;
22438
22439 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22440 DECL_ABSTRACT_ORIGIN (arg) = arg;
22441 if (DECL_INITIAL (decl) != NULL_TREE
22442 && DECL_INITIAL (decl) != error_mark_node)
22443 set_block_origin_self (DECL_INITIAL (decl));
22444 }
22445 }
22446 }
22447 \f
22448 /* Mark the early DIE for DECL as the abstract instance. */
22449
22450 static void
22451 dwarf2out_abstract_function (tree decl)
22452 {
22453 dw_die_ref old_die;
22454
22455 /* Make sure we have the actual abstract inline, not a clone. */
22456 decl = DECL_ORIGIN (decl);
22457
22458 if (DECL_IGNORED_P (decl))
22459 return;
22460
22461 /* Do not lazily create a DIE for decl here just because we
22462 got called via debug_hooks->outlining_inline_function. */
22463 if (in_lto_p
22464 && external_die_map
22465 && external_die_map->get (decl))
22466 return;
22467
22468 old_die = lookup_decl_die (decl);
22469 /* With early debug we always have an old DIE unless we are in LTO
22470 and the user did not compile but only link with debug. */
22471 if (in_lto_p && ! old_die)
22472 return;
22473 gcc_assert (old_die != NULL);
22474 if (get_AT (old_die, DW_AT_inline)
22475 || get_AT (old_die, DW_AT_abstract_origin))
22476 /* We've already generated the abstract instance. */
22477 return;
22478
22479 /* Go ahead and put DW_AT_inline on the DIE. */
22480 if (DECL_DECLARED_INLINE_P (decl))
22481 {
22482 if (cgraph_function_possibly_inlined_p (decl))
22483 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22484 else
22485 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22486 }
22487 else
22488 {
22489 if (cgraph_function_possibly_inlined_p (decl))
22490 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22491 else
22492 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22493 }
22494
22495 if (DECL_DECLARED_INLINE_P (decl)
22496 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22497 add_AT_flag (old_die, DW_AT_artificial, 1);
22498
22499 set_decl_origin_self (decl);
22500 }
22501
22502 /* Helper function of premark_used_types() which gets called through
22503 htab_traverse.
22504
22505 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22506 marked as unused by prune_unused_types. */
22507
22508 bool
22509 premark_used_types_helper (tree const &type, void *)
22510 {
22511 dw_die_ref die;
22512
22513 die = lookup_type_die (type);
22514 if (die != NULL)
22515 die->die_perennial_p = 1;
22516 return true;
22517 }
22518
22519 /* Helper function of premark_types_used_by_global_vars which gets called
22520 through htab_traverse.
22521
22522 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22523 marked as unused by prune_unused_types. The DIE of the type is marked
22524 only if the global variable using the type will actually be emitted. */
22525
22526 int
22527 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22528 void *)
22529 {
22530 struct types_used_by_vars_entry *entry;
22531 dw_die_ref die;
22532
22533 entry = (struct types_used_by_vars_entry *) *slot;
22534 gcc_assert (entry->type != NULL
22535 && entry->var_decl != NULL);
22536 die = lookup_type_die (entry->type);
22537 if (die)
22538 {
22539 /* Ask cgraph if the global variable really is to be emitted.
22540 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22541 varpool_node *node = varpool_node::get (entry->var_decl);
22542 if (node && node->definition)
22543 {
22544 die->die_perennial_p = 1;
22545 /* Keep the parent DIEs as well. */
22546 while ((die = die->die_parent) && die->die_perennial_p == 0)
22547 die->die_perennial_p = 1;
22548 }
22549 }
22550 return 1;
22551 }
22552
22553 /* Mark all members of used_types_hash as perennial. */
22554
22555 static void
22556 premark_used_types (struct function *fun)
22557 {
22558 if (fun && fun->used_types_hash)
22559 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22560 }
22561
22562 /* Mark all members of types_used_by_vars_entry as perennial. */
22563
22564 static void
22565 premark_types_used_by_global_vars (void)
22566 {
22567 if (types_used_by_vars_hash)
22568 types_used_by_vars_hash
22569 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22570 }
22571
22572 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22573 for CA_LOC call arg loc node. */
22574
22575 static dw_die_ref
22576 gen_call_site_die (tree decl, dw_die_ref subr_die,
22577 struct call_arg_loc_node *ca_loc)
22578 {
22579 dw_die_ref stmt_die = NULL, die;
22580 tree block = ca_loc->block;
22581
22582 while (block
22583 && block != DECL_INITIAL (decl)
22584 && TREE_CODE (block) == BLOCK)
22585 {
22586 stmt_die = lookup_block_die (block);
22587 if (stmt_die)
22588 break;
22589 block = BLOCK_SUPERCONTEXT (block);
22590 }
22591 if (stmt_die == NULL)
22592 stmt_die = subr_die;
22593 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22594 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22595 if (ca_loc->tail_call_p)
22596 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22597 if (ca_loc->symbol_ref)
22598 {
22599 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22600 if (tdie)
22601 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22602 else
22603 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22604 false);
22605 }
22606 return die;
22607 }
22608
22609 /* Generate a DIE to represent a declared function (either file-scope or
22610 block-local). */
22611
22612 static void
22613 gen_subprogram_die (tree decl, dw_die_ref context_die)
22614 {
22615 tree origin = decl_ultimate_origin (decl);
22616 dw_die_ref subr_die;
22617 dw_die_ref old_die = lookup_decl_die (decl);
22618
22619 /* This function gets called multiple times for different stages of
22620 the debug process. For example, for func() in this code:
22621
22622 namespace S
22623 {
22624 void func() { ... }
22625 }
22626
22627 ...we get called 4 times. Twice in early debug and twice in
22628 late debug:
22629
22630 Early debug
22631 -----------
22632
22633 1. Once while generating func() within the namespace. This is
22634 the declaration. The declaration bit below is set, as the
22635 context is the namespace.
22636
22637 A new DIE will be generated with DW_AT_declaration set.
22638
22639 2. Once for func() itself. This is the specification. The
22640 declaration bit below is clear as the context is the CU.
22641
22642 We will use the cached DIE from (1) to create a new DIE with
22643 DW_AT_specification pointing to the declaration in (1).
22644
22645 Late debug via rest_of_handle_final()
22646 -------------------------------------
22647
22648 3. Once generating func() within the namespace. This is also the
22649 declaration, as in (1), but this time we will early exit below
22650 as we have a cached DIE and a declaration needs no additional
22651 annotations (no locations), as the source declaration line
22652 info is enough.
22653
22654 4. Once for func() itself. As in (2), this is the specification,
22655 but this time we will re-use the cached DIE, and just annotate
22656 it with the location information that should now be available.
22657
22658 For something without namespaces, but with abstract instances, we
22659 are also called a multiple times:
22660
22661 class Base
22662 {
22663 public:
22664 Base (); // constructor declaration (1)
22665 };
22666
22667 Base::Base () { } // constructor specification (2)
22668
22669 Early debug
22670 -----------
22671
22672 1. Once for the Base() constructor by virtue of it being a
22673 member of the Base class. This is done via
22674 rest_of_type_compilation.
22675
22676 This is a declaration, so a new DIE will be created with
22677 DW_AT_declaration.
22678
22679 2. Once for the Base() constructor definition, but this time
22680 while generating the abstract instance of the base
22681 constructor (__base_ctor) which is being generated via early
22682 debug of reachable functions.
22683
22684 Even though we have a cached version of the declaration (1),
22685 we will create a DW_AT_specification of the declaration DIE
22686 in (1).
22687
22688 3. Once for the __base_ctor itself, but this time, we generate
22689 an DW_AT_abstract_origin version of the DW_AT_specification in
22690 (2).
22691
22692 Late debug via rest_of_handle_final
22693 -----------------------------------
22694
22695 4. One final time for the __base_ctor (which will have a cached
22696 DIE with DW_AT_abstract_origin created in (3). This time,
22697 we will just annotate the location information now
22698 available.
22699 */
22700 int declaration = (current_function_decl != decl
22701 || class_or_namespace_scope_p (context_die));
22702
22703 /* A declaration that has been previously dumped needs no
22704 additional information. */
22705 if (old_die && declaration)
22706 return;
22707
22708 /* Now that the C++ front end lazily declares artificial member fns, we
22709 might need to retrofit the declaration into its class. */
22710 if (!declaration && !origin && !old_die
22711 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22712 && !class_or_namespace_scope_p (context_die)
22713 && debug_info_level > DINFO_LEVEL_TERSE)
22714 old_die = force_decl_die (decl);
22715
22716 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22717 if (origin != NULL)
22718 {
22719 gcc_assert (!declaration || local_scope_p (context_die));
22720
22721 /* Fixup die_parent for the abstract instance of a nested
22722 inline function. */
22723 if (old_die && old_die->die_parent == NULL)
22724 add_child_die (context_die, old_die);
22725
22726 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22727 {
22728 /* If we have a DW_AT_abstract_origin we have a working
22729 cached version. */
22730 subr_die = old_die;
22731 }
22732 else
22733 {
22734 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22735 add_abstract_origin_attribute (subr_die, origin);
22736 /* This is where the actual code for a cloned function is.
22737 Let's emit linkage name attribute for it. This helps
22738 debuggers to e.g, set breakpoints into
22739 constructors/destructors when the user asks "break
22740 K::K". */
22741 add_linkage_name (subr_die, decl);
22742 }
22743 }
22744 /* A cached copy, possibly from early dwarf generation. Reuse as
22745 much as possible. */
22746 else if (old_die)
22747 {
22748 if (!get_AT_flag (old_die, DW_AT_declaration)
22749 /* We can have a normal definition following an inline one in the
22750 case of redefinition of GNU C extern inlines.
22751 It seems reasonable to use AT_specification in this case. */
22752 && !get_AT (old_die, DW_AT_inline))
22753 {
22754 /* Detect and ignore this case, where we are trying to output
22755 something we have already output. */
22756 if (get_AT (old_die, DW_AT_low_pc)
22757 || get_AT (old_die, DW_AT_ranges))
22758 return;
22759
22760 /* If we have no location information, this must be a
22761 partially generated DIE from early dwarf generation.
22762 Fall through and generate it. */
22763 }
22764
22765 /* If the definition comes from the same place as the declaration,
22766 maybe use the old DIE. We always want the DIE for this function
22767 that has the *_pc attributes to be under comp_unit_die so the
22768 debugger can find it. We also need to do this for abstract
22769 instances of inlines, since the spec requires the out-of-line copy
22770 to have the same parent. For local class methods, this doesn't
22771 apply; we just use the old DIE. */
22772 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22773 struct dwarf_file_data * file_index = lookup_filename (s.file);
22774 if (((is_unit_die (old_die->die_parent)
22775 /* This condition fixes the inconsistency/ICE with the
22776 following Fortran test (or some derivative thereof) while
22777 building libgfortran:
22778
22779 module some_m
22780 contains
22781 logical function funky (FLAG)
22782 funky = .true.
22783 end function
22784 end module
22785 */
22786 || (old_die->die_parent
22787 && old_die->die_parent->die_tag == DW_TAG_module)
22788 || local_scope_p (old_die->die_parent)
22789 || context_die == NULL)
22790 && (DECL_ARTIFICIAL (decl)
22791 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22792 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22793 == (unsigned) s.line)
22794 && (!debug_column_info
22795 || s.column == 0
22796 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22797 == (unsigned) s.column)))))
22798 /* With LTO if there's an abstract instance for
22799 the old DIE, this is a concrete instance and
22800 thus re-use the DIE. */
22801 || get_AT (old_die, DW_AT_abstract_origin))
22802 {
22803 subr_die = old_die;
22804
22805 /* Clear out the declaration attribute, but leave the
22806 parameters so they can be augmented with location
22807 information later. Unless this was a declaration, in
22808 which case, wipe out the nameless parameters and recreate
22809 them further down. */
22810 if (remove_AT (subr_die, DW_AT_declaration))
22811 {
22812
22813 remove_AT (subr_die, DW_AT_object_pointer);
22814 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22815 }
22816 }
22817 /* Make a specification pointing to the previously built
22818 declaration. */
22819 else
22820 {
22821 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22822 add_AT_specification (subr_die, old_die);
22823 add_pubname (decl, subr_die);
22824 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22825 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22826 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22827 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22828 if (debug_column_info
22829 && s.column
22830 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22831 != (unsigned) s.column))
22832 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22833
22834 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22835 emit the real type on the definition die. */
22836 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22837 {
22838 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22839 if (die == auto_die || die == decltype_auto_die)
22840 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22841 TYPE_UNQUALIFIED, false, context_die);
22842 }
22843
22844 /* When we process the method declaration, we haven't seen
22845 the out-of-class defaulted definition yet, so we have to
22846 recheck now. */
22847 if ((dwarf_version >= 5 || ! dwarf_strict)
22848 && !get_AT (subr_die, DW_AT_defaulted))
22849 {
22850 int defaulted
22851 = lang_hooks.decls.decl_dwarf_attribute (decl,
22852 DW_AT_defaulted);
22853 if (defaulted != -1)
22854 {
22855 /* Other values must have been handled before. */
22856 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22857 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22858 }
22859 }
22860 }
22861 }
22862 /* Create a fresh DIE for anything else. */
22863 else
22864 {
22865 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22866
22867 if (TREE_PUBLIC (decl))
22868 add_AT_flag (subr_die, DW_AT_external, 1);
22869
22870 add_name_and_src_coords_attributes (subr_die, decl);
22871 add_pubname (decl, subr_die);
22872 if (debug_info_level > DINFO_LEVEL_TERSE)
22873 {
22874 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22875 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22876 TYPE_UNQUALIFIED, false, context_die);
22877 }
22878
22879 add_pure_or_virtual_attribute (subr_die, decl);
22880 if (DECL_ARTIFICIAL (decl))
22881 add_AT_flag (subr_die, DW_AT_artificial, 1);
22882
22883 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22884 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22885
22886 add_alignment_attribute (subr_die, decl);
22887
22888 add_accessibility_attribute (subr_die, decl);
22889 }
22890
22891 /* Unless we have an existing non-declaration DIE, equate the new
22892 DIE. */
22893 if (!old_die || is_declaration_die (old_die))
22894 equate_decl_number_to_die (decl, subr_die);
22895
22896 if (declaration)
22897 {
22898 if (!old_die || !get_AT (old_die, DW_AT_inline))
22899 {
22900 add_AT_flag (subr_die, DW_AT_declaration, 1);
22901
22902 /* If this is an explicit function declaration then generate
22903 a DW_AT_explicit attribute. */
22904 if ((dwarf_version >= 3 || !dwarf_strict)
22905 && lang_hooks.decls.decl_dwarf_attribute (decl,
22906 DW_AT_explicit) == 1)
22907 add_AT_flag (subr_die, DW_AT_explicit, 1);
22908
22909 /* If this is a C++11 deleted special function member then generate
22910 a DW_AT_deleted attribute. */
22911 if ((dwarf_version >= 5 || !dwarf_strict)
22912 && lang_hooks.decls.decl_dwarf_attribute (decl,
22913 DW_AT_deleted) == 1)
22914 add_AT_flag (subr_die, DW_AT_deleted, 1);
22915
22916 /* If this is a C++11 defaulted special function member then
22917 generate a DW_AT_defaulted attribute. */
22918 if (dwarf_version >= 5 || !dwarf_strict)
22919 {
22920 int defaulted
22921 = lang_hooks.decls.decl_dwarf_attribute (decl,
22922 DW_AT_defaulted);
22923 if (defaulted != -1)
22924 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22925 }
22926
22927 /* If this is a C++11 non-static member function with & ref-qualifier
22928 then generate a DW_AT_reference attribute. */
22929 if ((dwarf_version >= 5 || !dwarf_strict)
22930 && lang_hooks.decls.decl_dwarf_attribute (decl,
22931 DW_AT_reference) == 1)
22932 add_AT_flag (subr_die, DW_AT_reference, 1);
22933
22934 /* If this is a C++11 non-static member function with &&
22935 ref-qualifier then generate a DW_AT_reference attribute. */
22936 if ((dwarf_version >= 5 || !dwarf_strict)
22937 && lang_hooks.decls.decl_dwarf_attribute (decl,
22938 DW_AT_rvalue_reference)
22939 == 1)
22940 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22941 }
22942 }
22943 /* For non DECL_EXTERNALs, if range information is available, fill
22944 the DIE with it. */
22945 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22946 {
22947 HOST_WIDE_INT cfa_fb_offset;
22948
22949 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22950
22951 if (!crtl->has_bb_partition)
22952 {
22953 dw_fde_ref fde = fun->fde;
22954 if (fde->dw_fde_begin)
22955 {
22956 /* We have already generated the labels. */
22957 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22958 fde->dw_fde_end, false);
22959 }
22960 else
22961 {
22962 /* Create start/end labels and add the range. */
22963 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22964 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22965 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22966 current_function_funcdef_no);
22967 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22968 current_function_funcdef_no);
22969 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22970 false);
22971 }
22972
22973 #if VMS_DEBUGGING_INFO
22974 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22975 Section 2.3 Prologue and Epilogue Attributes:
22976 When a breakpoint is set on entry to a function, it is generally
22977 desirable for execution to be suspended, not on the very first
22978 instruction of the function, but rather at a point after the
22979 function's frame has been set up, after any language defined local
22980 declaration processing has been completed, and before execution of
22981 the first statement of the function begins. Debuggers generally
22982 cannot properly determine where this point is. Similarly for a
22983 breakpoint set on exit from a function. The prologue and epilogue
22984 attributes allow a compiler to communicate the location(s) to use. */
22985
22986 {
22987 if (fde->dw_fde_vms_end_prologue)
22988 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22989 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22990
22991 if (fde->dw_fde_vms_begin_epilogue)
22992 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22993 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22994 }
22995 #endif
22996
22997 }
22998 else
22999 {
23000 /* Generate pubnames entries for the split function code ranges. */
23001 dw_fde_ref fde = fun->fde;
23002
23003 if (fde->dw_fde_second_begin)
23004 {
23005 if (dwarf_version >= 3 || !dwarf_strict)
23006 {
23007 /* We should use ranges for non-contiguous code section
23008 addresses. Use the actual code range for the initial
23009 section, since the HOT/COLD labels might precede an
23010 alignment offset. */
23011 bool range_list_added = false;
23012 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23013 fde->dw_fde_end, &range_list_added,
23014 false);
23015 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23016 fde->dw_fde_second_end,
23017 &range_list_added, false);
23018 if (range_list_added)
23019 add_ranges (NULL);
23020 }
23021 else
23022 {
23023 /* There is no real support in DW2 for this .. so we make
23024 a work-around. First, emit the pub name for the segment
23025 containing the function label. Then make and emit a
23026 simplified subprogram DIE for the second segment with the
23027 name pre-fixed by __hot/cold_sect_of_. We use the same
23028 linkage name for the second die so that gdb will find both
23029 sections when given "b foo". */
23030 const char *name = NULL;
23031 tree decl_name = DECL_NAME (decl);
23032 dw_die_ref seg_die;
23033
23034 /* Do the 'primary' section. */
23035 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23036 fde->dw_fde_end, false);
23037
23038 /* Build a minimal DIE for the secondary section. */
23039 seg_die = new_die (DW_TAG_subprogram,
23040 subr_die->die_parent, decl);
23041
23042 if (TREE_PUBLIC (decl))
23043 add_AT_flag (seg_die, DW_AT_external, 1);
23044
23045 if (decl_name != NULL
23046 && IDENTIFIER_POINTER (decl_name) != NULL)
23047 {
23048 name = dwarf2_name (decl, 1);
23049 if (! DECL_ARTIFICIAL (decl))
23050 add_src_coords_attributes (seg_die, decl);
23051
23052 add_linkage_name (seg_die, decl);
23053 }
23054 gcc_assert (name != NULL);
23055 add_pure_or_virtual_attribute (seg_die, decl);
23056 if (DECL_ARTIFICIAL (decl))
23057 add_AT_flag (seg_die, DW_AT_artificial, 1);
23058
23059 name = concat ("__second_sect_of_", name, NULL);
23060 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23061 fde->dw_fde_second_end, false);
23062 add_name_attribute (seg_die, name);
23063 if (want_pubnames ())
23064 add_pubname_string (name, seg_die);
23065 }
23066 }
23067 else
23068 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23069 false);
23070 }
23071
23072 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23073
23074 /* We define the "frame base" as the function's CFA. This is more
23075 convenient for several reasons: (1) It's stable across the prologue
23076 and epilogue, which makes it better than just a frame pointer,
23077 (2) With dwarf3, there exists a one-byte encoding that allows us
23078 to reference the .debug_frame data by proxy, but failing that,
23079 (3) We can at least reuse the code inspection and interpretation
23080 code that determines the CFA position at various points in the
23081 function. */
23082 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23083 {
23084 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23085 add_AT_loc (subr_die, DW_AT_frame_base, op);
23086 }
23087 else
23088 {
23089 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23090 if (list->dw_loc_next)
23091 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23092 else
23093 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23094 }
23095
23096 /* Compute a displacement from the "steady-state frame pointer" to
23097 the CFA. The former is what all stack slots and argument slots
23098 will reference in the rtl; the latter is what we've told the
23099 debugger about. We'll need to adjust all frame_base references
23100 by this displacement. */
23101 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23102
23103 if (fun->static_chain_decl)
23104 {
23105 /* DWARF requires here a location expression that computes the
23106 address of the enclosing subprogram's frame base. The machinery
23107 in tree-nested.c is supposed to store this specific address in the
23108 last field of the FRAME record. */
23109 const tree frame_type
23110 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23111 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23112
23113 tree fb_expr
23114 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23115 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23116 fb_expr, fb_decl, NULL_TREE);
23117
23118 add_AT_location_description (subr_die, DW_AT_static_link,
23119 loc_list_from_tree (fb_expr, 0, NULL));
23120 }
23121
23122 resolve_variable_values ();
23123 }
23124
23125 /* Generate child dies for template paramaters. */
23126 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23127 gen_generic_params_dies (decl);
23128
23129 /* Now output descriptions of the arguments for this function. This gets
23130 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23131 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23132 `...' at the end of the formal parameter list. In order to find out if
23133 there was a trailing ellipsis or not, we must instead look at the type
23134 associated with the FUNCTION_DECL. This will be a node of type
23135 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23136 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23137 an ellipsis at the end. */
23138
23139 /* In the case where we are describing a mere function declaration, all we
23140 need to do here (and all we *can* do here) is to describe the *types* of
23141 its formal parameters. */
23142 if (debug_info_level <= DINFO_LEVEL_TERSE)
23143 ;
23144 else if (declaration)
23145 gen_formal_types_die (decl, subr_die);
23146 else
23147 {
23148 /* Generate DIEs to represent all known formal parameters. */
23149 tree parm = DECL_ARGUMENTS (decl);
23150 tree generic_decl = early_dwarf
23151 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23152 tree generic_decl_parm = generic_decl
23153 ? DECL_ARGUMENTS (generic_decl)
23154 : NULL;
23155
23156 /* Now we want to walk the list of parameters of the function and
23157 emit their relevant DIEs.
23158
23159 We consider the case of DECL being an instance of a generic function
23160 as well as it being a normal function.
23161
23162 If DECL is an instance of a generic function we walk the
23163 parameters of the generic function declaration _and_ the parameters of
23164 DECL itself. This is useful because we want to emit specific DIEs for
23165 function parameter packs and those are declared as part of the
23166 generic function declaration. In that particular case,
23167 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23168 That DIE has children DIEs representing the set of arguments
23169 of the pack. Note that the set of pack arguments can be empty.
23170 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23171 children DIE.
23172
23173 Otherwise, we just consider the parameters of DECL. */
23174 while (generic_decl_parm || parm)
23175 {
23176 if (generic_decl_parm
23177 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23178 gen_formal_parameter_pack_die (generic_decl_parm,
23179 parm, subr_die,
23180 &parm);
23181 else if (parm)
23182 {
23183 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23184
23185 if (early_dwarf
23186 && parm == DECL_ARGUMENTS (decl)
23187 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23188 && parm_die
23189 && (dwarf_version >= 3 || !dwarf_strict))
23190 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23191
23192 parm = DECL_CHAIN (parm);
23193 }
23194 else if (parm)
23195 parm = DECL_CHAIN (parm);
23196
23197 if (generic_decl_parm)
23198 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23199 }
23200
23201 /* Decide whether we need an unspecified_parameters DIE at the end.
23202 There are 2 more cases to do this for: 1) the ansi ... declaration -
23203 this is detectable when the end of the arg list is not a
23204 void_type_node 2) an unprototyped function declaration (not a
23205 definition). This just means that we have no info about the
23206 parameters at all. */
23207 if (early_dwarf)
23208 {
23209 if (prototype_p (TREE_TYPE (decl)))
23210 {
23211 /* This is the prototyped case, check for.... */
23212 if (stdarg_p (TREE_TYPE (decl)))
23213 gen_unspecified_parameters_die (decl, subr_die);
23214 }
23215 else if (DECL_INITIAL (decl) == NULL_TREE)
23216 gen_unspecified_parameters_die (decl, subr_die);
23217 }
23218 }
23219
23220 if (subr_die != old_die)
23221 /* Add the calling convention attribute if requested. */
23222 add_calling_convention_attribute (subr_die, decl);
23223
23224 /* Output Dwarf info for all of the stuff within the body of the function
23225 (if it has one - it may be just a declaration).
23226
23227 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23228 a function. This BLOCK actually represents the outermost binding contour
23229 for the function, i.e. the contour in which the function's formal
23230 parameters and labels get declared. Curiously, it appears that the front
23231 end doesn't actually put the PARM_DECL nodes for the current function onto
23232 the BLOCK_VARS list for this outer scope, but are strung off of the
23233 DECL_ARGUMENTS list for the function instead.
23234
23235 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23236 the LABEL_DECL nodes for the function however, and we output DWARF info
23237 for those in decls_for_scope. Just within the `outer_scope' there will be
23238 a BLOCK node representing the function's outermost pair of curly braces,
23239 and any blocks used for the base and member initializers of a C++
23240 constructor function. */
23241 tree outer_scope = DECL_INITIAL (decl);
23242 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23243 {
23244 int call_site_note_count = 0;
23245 int tail_call_site_note_count = 0;
23246
23247 /* Emit a DW_TAG_variable DIE for a named return value. */
23248 if (DECL_NAME (DECL_RESULT (decl)))
23249 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23250
23251 /* The first time through decls_for_scope we will generate the
23252 DIEs for the locals. The second time, we fill in the
23253 location info. */
23254 decls_for_scope (outer_scope, subr_die);
23255
23256 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23257 {
23258 struct call_arg_loc_node *ca_loc;
23259 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23260 {
23261 dw_die_ref die = NULL;
23262 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23263 rtx arg, next_arg;
23264 tree arg_decl = NULL_TREE;
23265
23266 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23267 ? XEXP (ca_loc->call_arg_loc_note, 0)
23268 : NULL_RTX);
23269 arg; arg = next_arg)
23270 {
23271 dw_loc_descr_ref reg, val;
23272 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23273 dw_die_ref cdie, tdie = NULL;
23274
23275 next_arg = XEXP (arg, 1);
23276 if (REG_P (XEXP (XEXP (arg, 0), 0))
23277 && next_arg
23278 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23279 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23280 && REGNO (XEXP (XEXP (arg, 0), 0))
23281 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23282 next_arg = XEXP (next_arg, 1);
23283 if (mode == VOIDmode)
23284 {
23285 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23286 if (mode == VOIDmode)
23287 mode = GET_MODE (XEXP (arg, 0));
23288 }
23289 if (mode == VOIDmode || mode == BLKmode)
23290 continue;
23291 /* Get dynamic information about call target only if we
23292 have no static information: we cannot generate both
23293 DW_AT_call_origin and DW_AT_call_target
23294 attributes. */
23295 if (ca_loc->symbol_ref == NULL_RTX)
23296 {
23297 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23298 {
23299 tloc = XEXP (XEXP (arg, 0), 1);
23300 continue;
23301 }
23302 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23303 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23304 {
23305 tlocc = XEXP (XEXP (arg, 0), 1);
23306 continue;
23307 }
23308 }
23309 reg = NULL;
23310 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23311 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23312 VAR_INIT_STATUS_INITIALIZED);
23313 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23314 {
23315 rtx mem = XEXP (XEXP (arg, 0), 0);
23316 reg = mem_loc_descriptor (XEXP (mem, 0),
23317 get_address_mode (mem),
23318 GET_MODE (mem),
23319 VAR_INIT_STATUS_INITIALIZED);
23320 }
23321 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23322 == DEBUG_PARAMETER_REF)
23323 {
23324 tree tdecl
23325 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23326 tdie = lookup_decl_die (tdecl);
23327 if (tdie == NULL)
23328 continue;
23329 arg_decl = tdecl;
23330 }
23331 else
23332 continue;
23333 if (reg == NULL
23334 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23335 != DEBUG_PARAMETER_REF)
23336 continue;
23337 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23338 VOIDmode,
23339 VAR_INIT_STATUS_INITIALIZED);
23340 if (val == NULL)
23341 continue;
23342 if (die == NULL)
23343 die = gen_call_site_die (decl, subr_die, ca_loc);
23344 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23345 NULL_TREE);
23346 add_desc_attribute (cdie, arg_decl);
23347 if (reg != NULL)
23348 add_AT_loc (cdie, DW_AT_location, reg);
23349 else if (tdie != NULL)
23350 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23351 tdie);
23352 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23353 if (next_arg != XEXP (arg, 1))
23354 {
23355 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23356 if (mode == VOIDmode)
23357 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23358 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23359 0), 1),
23360 mode, VOIDmode,
23361 VAR_INIT_STATUS_INITIALIZED);
23362 if (val != NULL)
23363 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23364 val);
23365 }
23366 }
23367 if (die == NULL
23368 && (ca_loc->symbol_ref || tloc))
23369 die = gen_call_site_die (decl, subr_die, ca_loc);
23370 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23371 {
23372 dw_loc_descr_ref tval = NULL;
23373
23374 if (tloc != NULL_RTX)
23375 tval = mem_loc_descriptor (tloc,
23376 GET_MODE (tloc) == VOIDmode
23377 ? Pmode : GET_MODE (tloc),
23378 VOIDmode,
23379 VAR_INIT_STATUS_INITIALIZED);
23380 if (tval)
23381 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23382 else if (tlocc != NULL_RTX)
23383 {
23384 tval = mem_loc_descriptor (tlocc,
23385 GET_MODE (tlocc) == VOIDmode
23386 ? Pmode : GET_MODE (tlocc),
23387 VOIDmode,
23388 VAR_INIT_STATUS_INITIALIZED);
23389 if (tval)
23390 add_AT_loc (die,
23391 dwarf_AT (DW_AT_call_target_clobbered),
23392 tval);
23393 }
23394 }
23395 if (die != NULL)
23396 {
23397 call_site_note_count++;
23398 if (ca_loc->tail_call_p)
23399 tail_call_site_note_count++;
23400 }
23401 }
23402 }
23403 call_arg_locations = NULL;
23404 call_arg_loc_last = NULL;
23405 if (tail_call_site_count >= 0
23406 && tail_call_site_count == tail_call_site_note_count
23407 && (!dwarf_strict || dwarf_version >= 5))
23408 {
23409 if (call_site_count >= 0
23410 && call_site_count == call_site_note_count)
23411 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23412 else
23413 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23414 }
23415 call_site_count = -1;
23416 tail_call_site_count = -1;
23417 }
23418
23419 /* Mark used types after we have created DIEs for the functions scopes. */
23420 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23421 }
23422
23423 /* Returns a hash value for X (which really is a die_struct). */
23424
23425 hashval_t
23426 block_die_hasher::hash (die_struct *d)
23427 {
23428 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23429 }
23430
23431 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23432 as decl_id and die_parent of die_struct Y. */
23433
23434 bool
23435 block_die_hasher::equal (die_struct *x, die_struct *y)
23436 {
23437 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23438 }
23439
23440 /* Hold information about markers for inlined entry points. */
23441 struct GTY ((for_user)) inline_entry_data
23442 {
23443 /* The block that's the inlined_function_outer_scope for an inlined
23444 function. */
23445 tree block;
23446
23447 /* The label at the inlined entry point. */
23448 const char *label_pfx;
23449 unsigned int label_num;
23450
23451 /* The view number to be used as the inlined entry point. */
23452 var_loc_view view;
23453 };
23454
23455 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23456 {
23457 typedef tree compare_type;
23458 static inline hashval_t hash (const inline_entry_data *);
23459 static inline bool equal (const inline_entry_data *, const_tree);
23460 };
23461
23462 /* Hash table routines for inline_entry_data. */
23463
23464 inline hashval_t
23465 inline_entry_data_hasher::hash (const inline_entry_data *data)
23466 {
23467 return htab_hash_pointer (data->block);
23468 }
23469
23470 inline bool
23471 inline_entry_data_hasher::equal (const inline_entry_data *data,
23472 const_tree block)
23473 {
23474 return data->block == block;
23475 }
23476
23477 /* Inlined entry points pending DIE creation in this compilation unit. */
23478
23479 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23480
23481
23482 /* Return TRUE if DECL, which may have been previously generated as
23483 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23484 true if decl (or its origin) is either an extern declaration or a
23485 class/namespace scoped declaration.
23486
23487 The declare_in_namespace support causes us to get two DIEs for one
23488 variable, both of which are declarations. We want to avoid
23489 considering one to be a specification, so we must test for
23490 DECLARATION and DW_AT_declaration. */
23491 static inline bool
23492 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23493 {
23494 return (old_die && TREE_STATIC (decl) && !declaration
23495 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23496 }
23497
23498 /* Return true if DECL is a local static. */
23499
23500 static inline bool
23501 local_function_static (tree decl)
23502 {
23503 gcc_assert (VAR_P (decl));
23504 return TREE_STATIC (decl)
23505 && DECL_CONTEXT (decl)
23506 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23507 }
23508
23509 /* Generate a DIE to represent a declared data object.
23510 Either DECL or ORIGIN must be non-null. */
23511
23512 static void
23513 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23514 {
23515 HOST_WIDE_INT off = 0;
23516 tree com_decl;
23517 tree decl_or_origin = decl ? decl : origin;
23518 tree ultimate_origin;
23519 dw_die_ref var_die;
23520 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23521 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23522 || class_or_namespace_scope_p (context_die));
23523 bool specialization_p = false;
23524 bool no_linkage_name = false;
23525
23526 /* While C++ inline static data members have definitions inside of the
23527 class, force the first DIE to be a declaration, then let gen_member_die
23528 reparent it to the class context and call gen_variable_die again
23529 to create the outside of the class DIE for the definition. */
23530 if (!declaration
23531 && old_die == NULL
23532 && decl
23533 && DECL_CONTEXT (decl)
23534 && TYPE_P (DECL_CONTEXT (decl))
23535 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23536 {
23537 declaration = true;
23538 if (dwarf_version < 5)
23539 no_linkage_name = true;
23540 }
23541
23542 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23543 if (decl || ultimate_origin)
23544 origin = ultimate_origin;
23545 com_decl = fortran_common (decl_or_origin, &off);
23546
23547 /* Symbol in common gets emitted as a child of the common block, in the form
23548 of a data member. */
23549 if (com_decl)
23550 {
23551 dw_die_ref com_die;
23552 dw_loc_list_ref loc = NULL;
23553 die_node com_die_arg;
23554
23555 var_die = lookup_decl_die (decl_or_origin);
23556 if (var_die)
23557 {
23558 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23559 {
23560 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23561 if (loc)
23562 {
23563 if (off)
23564 {
23565 /* Optimize the common case. */
23566 if (single_element_loc_list_p (loc)
23567 && loc->expr->dw_loc_opc == DW_OP_addr
23568 && loc->expr->dw_loc_next == NULL
23569 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23570 == SYMBOL_REF)
23571 {
23572 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23573 loc->expr->dw_loc_oprnd1.v.val_addr
23574 = plus_constant (GET_MODE (x), x , off);
23575 }
23576 else
23577 loc_list_plus_const (loc, off);
23578 }
23579 add_AT_location_description (var_die, DW_AT_location, loc);
23580 remove_AT (var_die, DW_AT_declaration);
23581 }
23582 }
23583 return;
23584 }
23585
23586 if (common_block_die_table == NULL)
23587 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23588
23589 com_die_arg.decl_id = DECL_UID (com_decl);
23590 com_die_arg.die_parent = context_die;
23591 com_die = common_block_die_table->find (&com_die_arg);
23592 if (! early_dwarf)
23593 loc = loc_list_from_tree (com_decl, 2, NULL);
23594 if (com_die == NULL)
23595 {
23596 const char *cnam
23597 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23598 die_node **slot;
23599
23600 com_die = new_die (DW_TAG_common_block, context_die, decl);
23601 add_name_and_src_coords_attributes (com_die, com_decl);
23602 if (loc)
23603 {
23604 add_AT_location_description (com_die, DW_AT_location, loc);
23605 /* Avoid sharing the same loc descriptor between
23606 DW_TAG_common_block and DW_TAG_variable. */
23607 loc = loc_list_from_tree (com_decl, 2, NULL);
23608 }
23609 else if (DECL_EXTERNAL (decl_or_origin))
23610 add_AT_flag (com_die, DW_AT_declaration, 1);
23611 if (want_pubnames ())
23612 add_pubname_string (cnam, com_die); /* ??? needed? */
23613 com_die->decl_id = DECL_UID (com_decl);
23614 slot = common_block_die_table->find_slot (com_die, INSERT);
23615 *slot = com_die;
23616 }
23617 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23618 {
23619 add_AT_location_description (com_die, DW_AT_location, loc);
23620 loc = loc_list_from_tree (com_decl, 2, NULL);
23621 remove_AT (com_die, DW_AT_declaration);
23622 }
23623 var_die = new_die (DW_TAG_variable, com_die, decl);
23624 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23625 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23626 decl_quals (decl_or_origin), false,
23627 context_die);
23628 add_alignment_attribute (var_die, decl);
23629 add_AT_flag (var_die, DW_AT_external, 1);
23630 if (loc)
23631 {
23632 if (off)
23633 {
23634 /* Optimize the common case. */
23635 if (single_element_loc_list_p (loc)
23636 && loc->expr->dw_loc_opc == DW_OP_addr
23637 && loc->expr->dw_loc_next == NULL
23638 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23639 {
23640 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23641 loc->expr->dw_loc_oprnd1.v.val_addr
23642 = plus_constant (GET_MODE (x), x, off);
23643 }
23644 else
23645 loc_list_plus_const (loc, off);
23646 }
23647 add_AT_location_description (var_die, DW_AT_location, loc);
23648 }
23649 else if (DECL_EXTERNAL (decl_or_origin))
23650 add_AT_flag (var_die, DW_AT_declaration, 1);
23651 if (decl)
23652 equate_decl_number_to_die (decl, var_die);
23653 return;
23654 }
23655
23656 if (old_die)
23657 {
23658 if (declaration)
23659 {
23660 /* A declaration that has been previously dumped, needs no
23661 further annotations, since it doesn't need location on
23662 the second pass. */
23663 return;
23664 }
23665 else if (decl_will_get_specification_p (old_die, decl, declaration)
23666 && !get_AT (old_die, DW_AT_specification))
23667 {
23668 /* Fall-thru so we can make a new variable die along with a
23669 DW_AT_specification. */
23670 }
23671 else if (origin && old_die->die_parent != context_die)
23672 {
23673 /* If we will be creating an inlined instance, we need a
23674 new DIE that will get annotated with
23675 DW_AT_abstract_origin. */
23676 gcc_assert (!DECL_ABSTRACT_P (decl));
23677 }
23678 else
23679 {
23680 /* If a DIE was dumped early, it still needs location info.
23681 Skip to where we fill the location bits. */
23682 var_die = old_die;
23683
23684 /* ??? In LTRANS we cannot annotate early created variably
23685 modified type DIEs without copying them and adjusting all
23686 references to them. Thus we dumped them again. Also add a
23687 reference to them but beware of -g0 compile and -g link
23688 in which case the reference will be already present. */
23689 tree type = TREE_TYPE (decl_or_origin);
23690 if (in_lto_p
23691 && ! get_AT (var_die, DW_AT_type)
23692 && variably_modified_type_p
23693 (type, decl_function_context (decl_or_origin)))
23694 {
23695 if (decl_by_reference_p (decl_or_origin))
23696 add_type_attribute (var_die, TREE_TYPE (type),
23697 TYPE_UNQUALIFIED, false, context_die);
23698 else
23699 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23700 false, context_die);
23701 }
23702
23703 goto gen_variable_die_location;
23704 }
23705 }
23706
23707 /* For static data members, the declaration in the class is supposed
23708 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23709 also in DWARF2; the specification should still be DW_TAG_variable
23710 referencing the DW_TAG_member DIE. */
23711 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23712 var_die = new_die (DW_TAG_member, context_die, decl);
23713 else
23714 var_die = new_die (DW_TAG_variable, context_die, decl);
23715
23716 if (origin != NULL)
23717 add_abstract_origin_attribute (var_die, origin);
23718
23719 /* Loop unrolling can create multiple blocks that refer to the same
23720 static variable, so we must test for the DW_AT_declaration flag.
23721
23722 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23723 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23724 sharing them.
23725
23726 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23727 else if (decl_will_get_specification_p (old_die, decl, declaration))
23728 {
23729 /* This is a definition of a C++ class level static. */
23730 add_AT_specification (var_die, old_die);
23731 specialization_p = true;
23732 if (DECL_NAME (decl))
23733 {
23734 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23735 struct dwarf_file_data * file_index = lookup_filename (s.file);
23736
23737 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23738 add_AT_file (var_die, DW_AT_decl_file, file_index);
23739
23740 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23741 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23742
23743 if (debug_column_info
23744 && s.column
23745 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23746 != (unsigned) s.column))
23747 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23748
23749 if (old_die->die_tag == DW_TAG_member)
23750 add_linkage_name (var_die, decl);
23751 }
23752 }
23753 else
23754 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23755
23756 if ((origin == NULL && !specialization_p)
23757 || (origin != NULL
23758 && !DECL_ABSTRACT_P (decl_or_origin)
23759 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23760 decl_function_context
23761 (decl_or_origin))))
23762 {
23763 tree type = TREE_TYPE (decl_or_origin);
23764
23765 if (decl_by_reference_p (decl_or_origin))
23766 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23767 context_die);
23768 else
23769 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23770 context_die);
23771 }
23772
23773 if (origin == NULL && !specialization_p)
23774 {
23775 if (TREE_PUBLIC (decl))
23776 add_AT_flag (var_die, DW_AT_external, 1);
23777
23778 if (DECL_ARTIFICIAL (decl))
23779 add_AT_flag (var_die, DW_AT_artificial, 1);
23780
23781 add_alignment_attribute (var_die, decl);
23782
23783 add_accessibility_attribute (var_die, decl);
23784 }
23785
23786 if (declaration)
23787 add_AT_flag (var_die, DW_AT_declaration, 1);
23788
23789 if (decl && (DECL_ABSTRACT_P (decl)
23790 || !old_die || is_declaration_die (old_die)))
23791 equate_decl_number_to_die (decl, var_die);
23792
23793 gen_variable_die_location:
23794 if (! declaration
23795 && (! DECL_ABSTRACT_P (decl_or_origin)
23796 /* Local static vars are shared between all clones/inlines,
23797 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23798 already set. */
23799 || (VAR_P (decl_or_origin)
23800 && TREE_STATIC (decl_or_origin)
23801 && DECL_RTL_SET_P (decl_or_origin))))
23802 {
23803 if (early_dwarf)
23804 add_pubname (decl_or_origin, var_die);
23805 else
23806 add_location_or_const_value_attribute (var_die, decl_or_origin,
23807 decl == NULL);
23808 }
23809 else
23810 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23811
23812 if ((dwarf_version >= 4 || !dwarf_strict)
23813 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23814 DW_AT_const_expr) == 1
23815 && !get_AT (var_die, DW_AT_const_expr)
23816 && !specialization_p)
23817 add_AT_flag (var_die, DW_AT_const_expr, 1);
23818
23819 if (!dwarf_strict)
23820 {
23821 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23822 DW_AT_inline);
23823 if (inl != -1
23824 && !get_AT (var_die, DW_AT_inline)
23825 && !specialization_p)
23826 add_AT_unsigned (var_die, DW_AT_inline, inl);
23827 }
23828 }
23829
23830 /* Generate a DIE to represent a named constant. */
23831
23832 static void
23833 gen_const_die (tree decl, dw_die_ref context_die)
23834 {
23835 dw_die_ref const_die;
23836 tree type = TREE_TYPE (decl);
23837
23838 const_die = lookup_decl_die (decl);
23839 if (const_die)
23840 return;
23841
23842 const_die = new_die (DW_TAG_constant, context_die, decl);
23843 equate_decl_number_to_die (decl, const_die);
23844 add_name_and_src_coords_attributes (const_die, decl);
23845 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23846 if (TREE_PUBLIC (decl))
23847 add_AT_flag (const_die, DW_AT_external, 1);
23848 if (DECL_ARTIFICIAL (decl))
23849 add_AT_flag (const_die, DW_AT_artificial, 1);
23850 tree_add_const_value_attribute_for_decl (const_die, decl);
23851 }
23852
23853 /* Generate a DIE to represent a label identifier. */
23854
23855 static void
23856 gen_label_die (tree decl, dw_die_ref context_die)
23857 {
23858 tree origin = decl_ultimate_origin (decl);
23859 dw_die_ref lbl_die = lookup_decl_die (decl);
23860 rtx insn;
23861 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23862
23863 if (!lbl_die)
23864 {
23865 lbl_die = new_die (DW_TAG_label, context_die, decl);
23866 equate_decl_number_to_die (decl, lbl_die);
23867
23868 if (origin != NULL)
23869 add_abstract_origin_attribute (lbl_die, origin);
23870 else
23871 add_name_and_src_coords_attributes (lbl_die, decl);
23872 }
23873
23874 if (DECL_ABSTRACT_P (decl))
23875 equate_decl_number_to_die (decl, lbl_die);
23876 else if (! early_dwarf)
23877 {
23878 insn = DECL_RTL_IF_SET (decl);
23879
23880 /* Deleted labels are programmer specified labels which have been
23881 eliminated because of various optimizations. We still emit them
23882 here so that it is possible to put breakpoints on them. */
23883 if (insn
23884 && (LABEL_P (insn)
23885 || ((NOTE_P (insn)
23886 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23887 {
23888 /* When optimization is enabled (via -O) some parts of the compiler
23889 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23890 represent source-level labels which were explicitly declared by
23891 the user. This really shouldn't be happening though, so catch
23892 it if it ever does happen. */
23893 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23894
23895 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23896 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23897 }
23898 else if (insn
23899 && NOTE_P (insn)
23900 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23901 && CODE_LABEL_NUMBER (insn) != -1)
23902 {
23903 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23904 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23905 }
23906 }
23907 }
23908
23909 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23910 attributes to the DIE for a block STMT, to describe where the inlined
23911 function was called from. This is similar to add_src_coords_attributes. */
23912
23913 static inline void
23914 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23915 {
23916 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23917
23918 if (dwarf_version >= 3 || !dwarf_strict)
23919 {
23920 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23921 add_AT_unsigned (die, DW_AT_call_line, s.line);
23922 if (debug_column_info && s.column)
23923 add_AT_unsigned (die, DW_AT_call_column, s.column);
23924 }
23925 }
23926
23927
23928 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23929 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23930
23931 static inline void
23932 add_high_low_attributes (tree stmt, dw_die_ref die)
23933 {
23934 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23935
23936 if (inline_entry_data **iedp
23937 = !inline_entry_data_table ? NULL
23938 : inline_entry_data_table->find_slot_with_hash (stmt,
23939 htab_hash_pointer (stmt),
23940 NO_INSERT))
23941 {
23942 inline_entry_data *ied = *iedp;
23943 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
23944 gcc_assert (debug_inline_points);
23945 gcc_assert (inlined_function_outer_scope_p (stmt));
23946
23947 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
23948 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23949
23950 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
23951 && !dwarf_strict)
23952 {
23953 if (!output_asm_line_debug_info ())
23954 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
23955 else
23956 {
23957 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
23958 /* FIXME: this will resolve to a small number. Could we
23959 possibly emit smaller data? Ideally we'd emit a
23960 uleb128, but that would make the size of DIEs
23961 impossible for the compiler to compute, since it's
23962 the assembler that computes the value of the view
23963 label in this case. Ideally, we'd have a single form
23964 encompassing both the address and the view, and
23965 indirecting them through a table might make things
23966 easier, but even that would be more wasteful,
23967 space-wise, than what we have now. */
23968 add_AT_symview (die, DW_AT_GNU_entry_view, label);
23969 }
23970 }
23971
23972 inline_entry_data_table->clear_slot (iedp);
23973 }
23974
23975 if (BLOCK_FRAGMENT_CHAIN (stmt)
23976 && (dwarf_version >= 3 || !dwarf_strict))
23977 {
23978 tree chain, superblock = NULL_TREE;
23979 dw_die_ref pdie;
23980 dw_attr_node *attr = NULL;
23981
23982 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
23983 {
23984 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23985 BLOCK_NUMBER (stmt));
23986 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23987 }
23988
23989 /* Optimize duplicate .debug_ranges lists or even tails of
23990 lists. If this BLOCK has same ranges as its supercontext,
23991 lookup DW_AT_ranges attribute in the supercontext (and
23992 recursively so), verify that the ranges_table contains the
23993 right values and use it instead of adding a new .debug_range. */
23994 for (chain = stmt, pdie = die;
23995 BLOCK_SAME_RANGE (chain);
23996 chain = BLOCK_SUPERCONTEXT (chain))
23997 {
23998 dw_attr_node *new_attr;
23999
24000 pdie = pdie->die_parent;
24001 if (pdie == NULL)
24002 break;
24003 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
24004 break;
24005 new_attr = get_AT (pdie, DW_AT_ranges);
24006 if (new_attr == NULL
24007 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24008 break;
24009 attr = new_attr;
24010 superblock = BLOCK_SUPERCONTEXT (chain);
24011 }
24012 if (attr != NULL
24013 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24014 == BLOCK_NUMBER (superblock))
24015 && BLOCK_FRAGMENT_CHAIN (superblock))
24016 {
24017 unsigned long off = attr->dw_attr_val.v.val_offset;
24018 unsigned long supercnt = 0, thiscnt = 0;
24019 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24020 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24021 {
24022 ++supercnt;
24023 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24024 == BLOCK_NUMBER (chain));
24025 }
24026 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24027 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24028 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24029 ++thiscnt;
24030 gcc_assert (supercnt >= thiscnt);
24031 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24032 false);
24033 note_rnglist_head (off + supercnt - thiscnt);
24034 return;
24035 }
24036
24037 unsigned int offset = add_ranges (stmt, true);
24038 add_AT_range_list (die, DW_AT_ranges, offset, false);
24039 note_rnglist_head (offset);
24040
24041 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24042 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24043 do
24044 {
24045 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24046 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24047 chain = BLOCK_FRAGMENT_CHAIN (chain);
24048 }
24049 while (chain);
24050 add_ranges (NULL);
24051 }
24052 else
24053 {
24054 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24055 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24056 BLOCK_NUMBER (stmt));
24057 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24058 BLOCK_NUMBER (stmt));
24059 add_AT_low_high_pc (die, label, label_high, false);
24060 }
24061 }
24062
24063 /* Generate a DIE for a lexical block. */
24064
24065 static void
24066 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24067 {
24068 dw_die_ref old_die = lookup_block_die (stmt);
24069 dw_die_ref stmt_die = NULL;
24070 if (!old_die)
24071 {
24072 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24073 equate_block_to_die (stmt, stmt_die);
24074 }
24075
24076 if (BLOCK_ABSTRACT (stmt))
24077 {
24078 if (old_die)
24079 {
24080 /* This must have been generated early and it won't even
24081 need location information since it's a DW_AT_inline
24082 function. */
24083 if (flag_checking)
24084 for (dw_die_ref c = context_die; c; c = c->die_parent)
24085 if (c->die_tag == DW_TAG_inlined_subroutine
24086 || c->die_tag == DW_TAG_subprogram)
24087 {
24088 gcc_assert (get_AT (c, DW_AT_inline));
24089 break;
24090 }
24091 return;
24092 }
24093 }
24094 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
24095 {
24096 /* If this is an inlined instance, create a new lexical die for
24097 anything below to attach DW_AT_abstract_origin to. */
24098 if (old_die)
24099 {
24100 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24101 equate_block_to_die (stmt, stmt_die);
24102 old_die = NULL;
24103 }
24104
24105 tree origin = block_ultimate_origin (stmt);
24106 if (origin != NULL_TREE && origin != stmt)
24107 add_abstract_origin_attribute (stmt_die, origin);
24108 }
24109
24110 if (old_die)
24111 stmt_die = old_die;
24112
24113 /* A non abstract block whose blocks have already been reordered
24114 should have the instruction range for this block. If so, set the
24115 high/low attributes. */
24116 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
24117 {
24118 gcc_assert (stmt_die);
24119 add_high_low_attributes (stmt, stmt_die);
24120 }
24121
24122 decls_for_scope (stmt, stmt_die);
24123 }
24124
24125 /* Generate a DIE for an inlined subprogram. */
24126
24127 static void
24128 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24129 {
24130 tree decl;
24131
24132 /* The instance of function that is effectively being inlined shall not
24133 be abstract. */
24134 gcc_assert (! BLOCK_ABSTRACT (stmt));
24135
24136 decl = block_ultimate_origin (stmt);
24137
24138 /* Make sure any inlined functions are known to be inlineable. */
24139 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24140 || cgraph_function_possibly_inlined_p (decl));
24141
24142 if (! BLOCK_ABSTRACT (stmt))
24143 {
24144 dw_die_ref subr_die
24145 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24146
24147 if (call_arg_locations || debug_inline_points)
24148 equate_block_to_die (stmt, subr_die);
24149 add_abstract_origin_attribute (subr_die, decl);
24150 if (TREE_ASM_WRITTEN (stmt))
24151 add_high_low_attributes (stmt, subr_die);
24152 add_call_src_coords_attributes (stmt, subr_die);
24153
24154 decls_for_scope (stmt, subr_die);
24155 }
24156 }
24157
24158 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24159 the comment for VLR_CONTEXT. */
24160
24161 static void
24162 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24163 {
24164 dw_die_ref decl_die;
24165
24166 if (TREE_TYPE (decl) == error_mark_node)
24167 return;
24168
24169 decl_die = new_die (DW_TAG_member, context_die, decl);
24170 add_name_and_src_coords_attributes (decl_die, decl);
24171 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24172 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24173 context_die);
24174
24175 if (DECL_BIT_FIELD_TYPE (decl))
24176 {
24177 add_byte_size_attribute (decl_die, decl);
24178 add_bit_size_attribute (decl_die, decl);
24179 add_bit_offset_attribute (decl_die, decl, ctx);
24180 }
24181
24182 add_alignment_attribute (decl_die, decl);
24183
24184 /* If we have a variant part offset, then we are supposed to process a member
24185 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24186 trees. */
24187 gcc_assert (ctx->variant_part_offset == NULL_TREE
24188 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24189 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24190 add_data_member_location_attribute (decl_die, decl, ctx);
24191
24192 if (DECL_ARTIFICIAL (decl))
24193 add_AT_flag (decl_die, DW_AT_artificial, 1);
24194
24195 add_accessibility_attribute (decl_die, decl);
24196
24197 /* Equate decl number to die, so that we can look up this decl later on. */
24198 equate_decl_number_to_die (decl, decl_die);
24199 }
24200
24201 /* Generate a DIE for a pointer to a member type. TYPE can be an
24202 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24203 pointer to member function. */
24204
24205 static void
24206 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24207 {
24208 if (lookup_type_die (type))
24209 return;
24210
24211 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24212 scope_die_for (type, context_die), type);
24213
24214 equate_type_number_to_die (type, ptr_die);
24215 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24216 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24217 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24218 context_die);
24219 add_alignment_attribute (ptr_die, type);
24220
24221 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24222 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24223 {
24224 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24225 add_AT_loc (ptr_die, DW_AT_use_location, op);
24226 }
24227 }
24228
24229 static char *producer_string;
24230
24231 /* Return a heap allocated producer string including command line options
24232 if -grecord-gcc-switches. */
24233
24234 static char *
24235 gen_producer_string (void)
24236 {
24237 size_t j;
24238 auto_vec<const char *> switches;
24239 const char *language_string = lang_hooks.name;
24240 char *producer, *tail;
24241 const char *p;
24242 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24243 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24244
24245 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24246 switch (save_decoded_options[j].opt_index)
24247 {
24248 case OPT_o:
24249 case OPT_d:
24250 case OPT_dumpbase:
24251 case OPT_dumpdir:
24252 case OPT_auxbase:
24253 case OPT_auxbase_strip:
24254 case OPT_quiet:
24255 case OPT_version:
24256 case OPT_v:
24257 case OPT_w:
24258 case OPT_L:
24259 case OPT_D:
24260 case OPT_I:
24261 case OPT_U:
24262 case OPT_SPECIAL_unknown:
24263 case OPT_SPECIAL_ignore:
24264 case OPT_SPECIAL_deprecated:
24265 case OPT_SPECIAL_program_name:
24266 case OPT_SPECIAL_input_file:
24267 case OPT_grecord_gcc_switches:
24268 case OPT__output_pch_:
24269 case OPT_fdiagnostics_show_location_:
24270 case OPT_fdiagnostics_show_option:
24271 case OPT_fdiagnostics_show_caret:
24272 case OPT_fdiagnostics_show_labels:
24273 case OPT_fdiagnostics_show_line_numbers:
24274 case OPT_fdiagnostics_color_:
24275 case OPT_fverbose_asm:
24276 case OPT____:
24277 case OPT__sysroot_:
24278 case OPT_nostdinc:
24279 case OPT_nostdinc__:
24280 case OPT_fpreprocessed:
24281 case OPT_fltrans_output_list_:
24282 case OPT_fresolution_:
24283 case OPT_fdebug_prefix_map_:
24284 case OPT_fmacro_prefix_map_:
24285 case OPT_ffile_prefix_map_:
24286 case OPT_fcompare_debug:
24287 case OPT_fchecking:
24288 case OPT_fchecking_:
24289 /* Ignore these. */
24290 continue;
24291 default:
24292 if (cl_options[save_decoded_options[j].opt_index].flags
24293 & CL_NO_DWARF_RECORD)
24294 continue;
24295 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24296 == '-');
24297 switch (save_decoded_options[j].canonical_option[0][1])
24298 {
24299 case 'M':
24300 case 'i':
24301 case 'W':
24302 continue;
24303 case 'f':
24304 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24305 "dump", 4) == 0)
24306 continue;
24307 break;
24308 default:
24309 break;
24310 }
24311 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24312 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24313 break;
24314 }
24315
24316 producer = XNEWVEC (char, plen + 1 + len + 1);
24317 tail = producer;
24318 sprintf (tail, "%s %s", language_string, version_string);
24319 tail += plen;
24320
24321 FOR_EACH_VEC_ELT (switches, j, p)
24322 {
24323 len = strlen (p);
24324 *tail = ' ';
24325 memcpy (tail + 1, p, len);
24326 tail += len + 1;
24327 }
24328
24329 *tail = '\0';
24330 return producer;
24331 }
24332
24333 /* Given a C and/or C++ language/version string return the "highest".
24334 C++ is assumed to be "higher" than C in this case. Used for merging
24335 LTO translation unit languages. */
24336 static const char *
24337 highest_c_language (const char *lang1, const char *lang2)
24338 {
24339 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24340 return "GNU C++17";
24341 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24342 return "GNU C++14";
24343 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24344 return "GNU C++11";
24345 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24346 return "GNU C++98";
24347
24348 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24349 return "GNU C17";
24350 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24351 return "GNU C11";
24352 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24353 return "GNU C99";
24354 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24355 return "GNU C89";
24356
24357 gcc_unreachable ();
24358 }
24359
24360
24361 /* Generate the DIE for the compilation unit. */
24362
24363 static dw_die_ref
24364 gen_compile_unit_die (const char *filename)
24365 {
24366 dw_die_ref die;
24367 const char *language_string = lang_hooks.name;
24368 int language;
24369
24370 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24371
24372 if (filename)
24373 {
24374 add_name_attribute (die, filename);
24375 /* Don't add cwd for <built-in>. */
24376 if (filename[0] != '<')
24377 add_comp_dir_attribute (die);
24378 }
24379
24380 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24381
24382 /* If our producer is LTO try to figure out a common language to use
24383 from the global list of translation units. */
24384 if (strcmp (language_string, "GNU GIMPLE") == 0)
24385 {
24386 unsigned i;
24387 tree t;
24388 const char *common_lang = NULL;
24389
24390 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24391 {
24392 if (!TRANSLATION_UNIT_LANGUAGE (t))
24393 continue;
24394 if (!common_lang)
24395 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24396 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24397 ;
24398 else if (strncmp (common_lang, "GNU C", 5) == 0
24399 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24400 /* Mixing C and C++ is ok, use C++ in that case. */
24401 common_lang = highest_c_language (common_lang,
24402 TRANSLATION_UNIT_LANGUAGE (t));
24403 else
24404 {
24405 /* Fall back to C. */
24406 common_lang = NULL;
24407 break;
24408 }
24409 }
24410
24411 if (common_lang)
24412 language_string = common_lang;
24413 }
24414
24415 language = DW_LANG_C;
24416 if (strncmp (language_string, "GNU C", 5) == 0
24417 && ISDIGIT (language_string[5]))
24418 {
24419 language = DW_LANG_C89;
24420 if (dwarf_version >= 3 || !dwarf_strict)
24421 {
24422 if (strcmp (language_string, "GNU C89") != 0)
24423 language = DW_LANG_C99;
24424
24425 if (dwarf_version >= 5 /* || !dwarf_strict */)
24426 if (strcmp (language_string, "GNU C11") == 0
24427 || strcmp (language_string, "GNU C17") == 0)
24428 language = DW_LANG_C11;
24429 }
24430 }
24431 else if (strncmp (language_string, "GNU C++", 7) == 0)
24432 {
24433 language = DW_LANG_C_plus_plus;
24434 if (dwarf_version >= 5 /* || !dwarf_strict */)
24435 {
24436 if (strcmp (language_string, "GNU C++11") == 0)
24437 language = DW_LANG_C_plus_plus_11;
24438 else if (strcmp (language_string, "GNU C++14") == 0)
24439 language = DW_LANG_C_plus_plus_14;
24440 else if (strcmp (language_string, "GNU C++17") == 0)
24441 /* For now. */
24442 language = DW_LANG_C_plus_plus_14;
24443 }
24444 }
24445 else if (strcmp (language_string, "GNU F77") == 0)
24446 language = DW_LANG_Fortran77;
24447 else if (dwarf_version >= 3 || !dwarf_strict)
24448 {
24449 if (strcmp (language_string, "GNU Ada") == 0)
24450 language = DW_LANG_Ada95;
24451 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24452 {
24453 language = DW_LANG_Fortran95;
24454 if (dwarf_version >= 5 /* || !dwarf_strict */)
24455 {
24456 if (strcmp (language_string, "GNU Fortran2003") == 0)
24457 language = DW_LANG_Fortran03;
24458 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24459 language = DW_LANG_Fortran08;
24460 }
24461 }
24462 else if (strcmp (language_string, "GNU Objective-C") == 0)
24463 language = DW_LANG_ObjC;
24464 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24465 language = DW_LANG_ObjC_plus_plus;
24466 else if (dwarf_version >= 5 || !dwarf_strict)
24467 {
24468 if (strcmp (language_string, "GNU Go") == 0)
24469 language = DW_LANG_Go;
24470 }
24471 }
24472 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24473 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24474 language = DW_LANG_Fortran90;
24475 /* Likewise for Ada. */
24476 else if (strcmp (language_string, "GNU Ada") == 0)
24477 language = DW_LANG_Ada83;
24478
24479 add_AT_unsigned (die, DW_AT_language, language);
24480
24481 switch (language)
24482 {
24483 case DW_LANG_Fortran77:
24484 case DW_LANG_Fortran90:
24485 case DW_LANG_Fortran95:
24486 case DW_LANG_Fortran03:
24487 case DW_LANG_Fortran08:
24488 /* Fortran has case insensitive identifiers and the front-end
24489 lowercases everything. */
24490 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24491 break;
24492 default:
24493 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24494 break;
24495 }
24496 return die;
24497 }
24498
24499 /* Generate the DIE for a base class. */
24500
24501 static void
24502 gen_inheritance_die (tree binfo, tree access, tree type,
24503 dw_die_ref context_die)
24504 {
24505 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24506 struct vlr_context ctx = { type, NULL };
24507
24508 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24509 context_die);
24510 add_data_member_location_attribute (die, binfo, &ctx);
24511
24512 if (BINFO_VIRTUAL_P (binfo))
24513 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24514
24515 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24516 children, otherwise the default is DW_ACCESS_public. In DWARF2
24517 the default has always been DW_ACCESS_private. */
24518 if (access == access_public_node)
24519 {
24520 if (dwarf_version == 2
24521 || context_die->die_tag == DW_TAG_class_type)
24522 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24523 }
24524 else if (access == access_protected_node)
24525 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24526 else if (dwarf_version > 2
24527 && context_die->die_tag != DW_TAG_class_type)
24528 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24529 }
24530
24531 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24532 structure. */
24533 static bool
24534 is_variant_part (tree decl)
24535 {
24536 return (TREE_CODE (decl) == FIELD_DECL
24537 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24538 }
24539
24540 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24541 return the FIELD_DECL. Return NULL_TREE otherwise. */
24542
24543 static tree
24544 analyze_discr_in_predicate (tree operand, tree struct_type)
24545 {
24546 bool continue_stripping = true;
24547 while (continue_stripping)
24548 switch (TREE_CODE (operand))
24549 {
24550 CASE_CONVERT:
24551 operand = TREE_OPERAND (operand, 0);
24552 break;
24553 default:
24554 continue_stripping = false;
24555 break;
24556 }
24557
24558 /* Match field access to members of struct_type only. */
24559 if (TREE_CODE (operand) == COMPONENT_REF
24560 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24561 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24562 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24563 return TREE_OPERAND (operand, 1);
24564 else
24565 return NULL_TREE;
24566 }
24567
24568 /* Check that SRC is a constant integer that can be represented as a native
24569 integer constant (either signed or unsigned). If so, store it into DEST and
24570 return true. Return false otherwise. */
24571
24572 static bool
24573 get_discr_value (tree src, dw_discr_value *dest)
24574 {
24575 tree discr_type = TREE_TYPE (src);
24576
24577 if (lang_hooks.types.get_debug_type)
24578 {
24579 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24580 if (debug_type != NULL)
24581 discr_type = debug_type;
24582 }
24583
24584 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24585 return false;
24586
24587 /* Signedness can vary between the original type and the debug type. This
24588 can happen for character types in Ada for instance: the character type
24589 used for code generation can be signed, to be compatible with the C one,
24590 but from a debugger point of view, it must be unsigned. */
24591 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24592 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24593
24594 if (is_orig_unsigned != is_debug_unsigned)
24595 src = fold_convert (discr_type, src);
24596
24597 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24598 return false;
24599
24600 dest->pos = is_debug_unsigned;
24601 if (is_debug_unsigned)
24602 dest->v.uval = tree_to_uhwi (src);
24603 else
24604 dest->v.sval = tree_to_shwi (src);
24605
24606 return true;
24607 }
24608
24609 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24610 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24611 store NULL_TREE in DISCR_DECL. Otherwise:
24612
24613 - store the discriminant field in STRUCT_TYPE that controls the variant
24614 part to *DISCR_DECL
24615
24616 - put in *DISCR_LISTS_P an array where for each variant, the item
24617 represents the corresponding matching list of discriminant values.
24618
24619 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24620 the above array.
24621
24622 Note that when the array is allocated (i.e. when the analysis is
24623 successful), it is up to the caller to free the array. */
24624
24625 static void
24626 analyze_variants_discr (tree variant_part_decl,
24627 tree struct_type,
24628 tree *discr_decl,
24629 dw_discr_list_ref **discr_lists_p,
24630 unsigned *discr_lists_length)
24631 {
24632 tree variant_part_type = TREE_TYPE (variant_part_decl);
24633 tree variant;
24634 dw_discr_list_ref *discr_lists;
24635 unsigned i;
24636
24637 /* Compute how many variants there are in this variant part. */
24638 *discr_lists_length = 0;
24639 for (variant = TYPE_FIELDS (variant_part_type);
24640 variant != NULL_TREE;
24641 variant = DECL_CHAIN (variant))
24642 ++*discr_lists_length;
24643
24644 *discr_decl = NULL_TREE;
24645 *discr_lists_p
24646 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24647 sizeof (**discr_lists_p));
24648 discr_lists = *discr_lists_p;
24649
24650 /* And then analyze all variants to extract discriminant information for all
24651 of them. This analysis is conservative: as soon as we detect something we
24652 do not support, abort everything and pretend we found nothing. */
24653 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24654 variant != NULL_TREE;
24655 variant = DECL_CHAIN (variant), ++i)
24656 {
24657 tree match_expr = DECL_QUALIFIER (variant);
24658
24659 /* Now, try to analyze the predicate and deduce a discriminant for
24660 it. */
24661 if (match_expr == boolean_true_node)
24662 /* Typically happens for the default variant: it matches all cases that
24663 previous variants rejected. Don't output any matching value for
24664 this one. */
24665 continue;
24666
24667 /* The following loop tries to iterate over each discriminant
24668 possibility: single values or ranges. */
24669 while (match_expr != NULL_TREE)
24670 {
24671 tree next_round_match_expr;
24672 tree candidate_discr = NULL_TREE;
24673 dw_discr_list_ref new_node = NULL;
24674
24675 /* Possibilities are matched one after the other by nested
24676 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24677 continue with the rest at next iteration. */
24678 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24679 {
24680 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24681 match_expr = TREE_OPERAND (match_expr, 1);
24682 }
24683 else
24684 next_round_match_expr = NULL_TREE;
24685
24686 if (match_expr == boolean_false_node)
24687 /* This sub-expression matches nothing: just wait for the next
24688 one. */
24689 ;
24690
24691 else if (TREE_CODE (match_expr) == EQ_EXPR)
24692 {
24693 /* We are matching: <discr_field> == <integer_cst>
24694 This sub-expression matches a single value. */
24695 tree integer_cst = TREE_OPERAND (match_expr, 1);
24696
24697 candidate_discr
24698 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24699 struct_type);
24700
24701 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24702 if (!get_discr_value (integer_cst,
24703 &new_node->dw_discr_lower_bound))
24704 goto abort;
24705 new_node->dw_discr_range = false;
24706 }
24707
24708 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24709 {
24710 /* We are matching:
24711 <discr_field> > <integer_cst>
24712 && <discr_field> < <integer_cst>.
24713 This sub-expression matches the range of values between the
24714 two matched integer constants. Note that comparisons can be
24715 inclusive or exclusive. */
24716 tree candidate_discr_1, candidate_discr_2;
24717 tree lower_cst, upper_cst;
24718 bool lower_cst_included, upper_cst_included;
24719 tree lower_op = TREE_OPERAND (match_expr, 0);
24720 tree upper_op = TREE_OPERAND (match_expr, 1);
24721
24722 /* When the comparison is exclusive, the integer constant is not
24723 the discriminant range bound we are looking for: we will have
24724 to increment or decrement it. */
24725 if (TREE_CODE (lower_op) == GE_EXPR)
24726 lower_cst_included = true;
24727 else if (TREE_CODE (lower_op) == GT_EXPR)
24728 lower_cst_included = false;
24729 else
24730 goto abort;
24731
24732 if (TREE_CODE (upper_op) == LE_EXPR)
24733 upper_cst_included = true;
24734 else if (TREE_CODE (upper_op) == LT_EXPR)
24735 upper_cst_included = false;
24736 else
24737 goto abort;
24738
24739 /* Extract the discriminant from the first operand and check it
24740 is consistant with the same analysis in the second
24741 operand. */
24742 candidate_discr_1
24743 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24744 struct_type);
24745 candidate_discr_2
24746 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24747 struct_type);
24748 if (candidate_discr_1 == candidate_discr_2)
24749 candidate_discr = candidate_discr_1;
24750 else
24751 goto abort;
24752
24753 /* Extract bounds from both. */
24754 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24755 lower_cst = TREE_OPERAND (lower_op, 1);
24756 upper_cst = TREE_OPERAND (upper_op, 1);
24757
24758 if (!lower_cst_included)
24759 lower_cst
24760 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24761 build_int_cst (TREE_TYPE (lower_cst), 1));
24762 if (!upper_cst_included)
24763 upper_cst
24764 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24765 build_int_cst (TREE_TYPE (upper_cst), 1));
24766
24767 if (!get_discr_value (lower_cst,
24768 &new_node->dw_discr_lower_bound)
24769 || !get_discr_value (upper_cst,
24770 &new_node->dw_discr_upper_bound))
24771 goto abort;
24772
24773 new_node->dw_discr_range = true;
24774 }
24775
24776 else
24777 /* Unsupported sub-expression: we cannot determine the set of
24778 matching discriminant values. Abort everything. */
24779 goto abort;
24780
24781 /* If the discriminant info is not consistant with what we saw so
24782 far, consider the analysis failed and abort everything. */
24783 if (candidate_discr == NULL_TREE
24784 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24785 goto abort;
24786 else
24787 *discr_decl = candidate_discr;
24788
24789 if (new_node != NULL)
24790 {
24791 new_node->dw_discr_next = discr_lists[i];
24792 discr_lists[i] = new_node;
24793 }
24794 match_expr = next_round_match_expr;
24795 }
24796 }
24797
24798 /* If we reach this point, we could match everything we were interested
24799 in. */
24800 return;
24801
24802 abort:
24803 /* Clean all data structure and return no result. */
24804 free (*discr_lists_p);
24805 *discr_lists_p = NULL;
24806 *discr_decl = NULL_TREE;
24807 }
24808
24809 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24810 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24811 under CONTEXT_DIE.
24812
24813 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24814 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24815 this type, which are record types, represent the available variants and each
24816 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24817 values are inferred from these attributes.
24818
24819 In trees, the offsets for the fields inside these sub-records are relative
24820 to the variant part itself, whereas the corresponding DIEs should have
24821 offset attributes that are relative to the embedding record base address.
24822 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24823 must be an expression that computes the offset of the variant part to
24824 describe in DWARF. */
24825
24826 static void
24827 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24828 dw_die_ref context_die)
24829 {
24830 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24831 tree variant_part_offset = vlr_ctx->variant_part_offset;
24832 struct loc_descr_context ctx = {
24833 vlr_ctx->struct_type, /* context_type */
24834 NULL_TREE, /* base_decl */
24835 NULL, /* dpi */
24836 false, /* placeholder_arg */
24837 false /* placeholder_seen */
24838 };
24839
24840 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24841 NULL_TREE if there is no such field. */
24842 tree discr_decl = NULL_TREE;
24843 dw_discr_list_ref *discr_lists;
24844 unsigned discr_lists_length = 0;
24845 unsigned i;
24846
24847 dw_die_ref dwarf_proc_die = NULL;
24848 dw_die_ref variant_part_die
24849 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24850
24851 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24852
24853 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24854 &discr_decl, &discr_lists, &discr_lists_length);
24855
24856 if (discr_decl != NULL_TREE)
24857 {
24858 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24859
24860 if (discr_die)
24861 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24862 else
24863 /* We have no DIE for the discriminant, so just discard all
24864 discrimimant information in the output. */
24865 discr_decl = NULL_TREE;
24866 }
24867
24868 /* If the offset for this variant part is more complex than a constant,
24869 create a DWARF procedure for it so that we will not have to generate DWARF
24870 expressions for it for each member. */
24871 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24872 && (dwarf_version >= 3 || !dwarf_strict))
24873 {
24874 const tree dwarf_proc_fndecl
24875 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24876 build_function_type (TREE_TYPE (variant_part_offset),
24877 NULL_TREE));
24878 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24879 const dw_loc_descr_ref dwarf_proc_body
24880 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24881
24882 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24883 dwarf_proc_fndecl, context_die);
24884 if (dwarf_proc_die != NULL)
24885 variant_part_offset = dwarf_proc_call;
24886 }
24887
24888 /* Output DIEs for all variants. */
24889 i = 0;
24890 for (tree variant = TYPE_FIELDS (variant_part_type);
24891 variant != NULL_TREE;
24892 variant = DECL_CHAIN (variant), ++i)
24893 {
24894 tree variant_type = TREE_TYPE (variant);
24895 dw_die_ref variant_die;
24896
24897 /* All variants (i.e. members of a variant part) are supposed to be
24898 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24899 under these records. */
24900 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24901
24902 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24903 equate_decl_number_to_die (variant, variant_die);
24904
24905 /* Output discriminant values this variant matches, if any. */
24906 if (discr_decl == NULL || discr_lists[i] == NULL)
24907 /* In the case we have discriminant information at all, this is
24908 probably the default variant: as the standard says, don't
24909 output any discriminant value/list attribute. */
24910 ;
24911 else if (discr_lists[i]->dw_discr_next == NULL
24912 && !discr_lists[i]->dw_discr_range)
24913 /* If there is only one accepted value, don't bother outputting a
24914 list. */
24915 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24916 else
24917 add_discr_list (variant_die, discr_lists[i]);
24918
24919 for (tree member = TYPE_FIELDS (variant_type);
24920 member != NULL_TREE;
24921 member = DECL_CHAIN (member))
24922 {
24923 struct vlr_context vlr_sub_ctx = {
24924 vlr_ctx->struct_type, /* struct_type */
24925 NULL /* variant_part_offset */
24926 };
24927 if (is_variant_part (member))
24928 {
24929 /* All offsets for fields inside variant parts are relative to
24930 the top-level embedding RECORD_TYPE's base address. On the
24931 other hand, offsets in GCC's types are relative to the
24932 nested-most variant part. So we have to sum offsets each time
24933 we recurse. */
24934
24935 vlr_sub_ctx.variant_part_offset
24936 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24937 variant_part_offset, byte_position (member));
24938 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24939 }
24940 else
24941 {
24942 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24943 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24944 }
24945 }
24946 }
24947
24948 free (discr_lists);
24949 }
24950
24951 /* Generate a DIE for a class member. */
24952
24953 static void
24954 gen_member_die (tree type, dw_die_ref context_die)
24955 {
24956 tree member;
24957 tree binfo = TYPE_BINFO (type);
24958
24959 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24960
24961 /* If this is not an incomplete type, output descriptions of each of its
24962 members. Note that as we output the DIEs necessary to represent the
24963 members of this record or union type, we will also be trying to output
24964 DIEs to represent the *types* of those members. However the `type'
24965 function (above) will specifically avoid generating type DIEs for member
24966 types *within* the list of member DIEs for this (containing) type except
24967 for those types (of members) which are explicitly marked as also being
24968 members of this (containing) type themselves. The g++ front- end can
24969 force any given type to be treated as a member of some other (containing)
24970 type by setting the TYPE_CONTEXT of the given (member) type to point to
24971 the TREE node representing the appropriate (containing) type. */
24972
24973 /* First output info about the base classes. */
24974 if (binfo)
24975 {
24976 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24977 int i;
24978 tree base;
24979
24980 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24981 gen_inheritance_die (base,
24982 (accesses ? (*accesses)[i] : access_public_node),
24983 type,
24984 context_die);
24985 }
24986
24987 /* Now output info about the data members and type members. */
24988 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24989 {
24990 struct vlr_context vlr_ctx = { type, NULL_TREE };
24991 bool static_inline_p
24992 = (TREE_STATIC (member)
24993 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24994 != -1));
24995
24996 /* Ignore clones. */
24997 if (DECL_ABSTRACT_ORIGIN (member))
24998 continue;
24999
25000 /* If we thought we were generating minimal debug info for TYPE
25001 and then changed our minds, some of the member declarations
25002 may have already been defined. Don't define them again, but
25003 do put them in the right order. */
25004
25005 if (dw_die_ref child = lookup_decl_die (member))
25006 {
25007 /* Handle inline static data members, which only have in-class
25008 declarations. */
25009 dw_die_ref ref = NULL;
25010 if (child->die_tag == DW_TAG_variable
25011 && child->die_parent == comp_unit_die ())
25012 {
25013 ref = get_AT_ref (child, DW_AT_specification);
25014 /* For C++17 inline static data members followed by redundant
25015 out of class redeclaration, we might get here with
25016 child being the DIE created for the out of class
25017 redeclaration and with its DW_AT_specification being
25018 the DIE created for in-class definition. We want to
25019 reparent the latter, and don't want to create another
25020 DIE with DW_AT_specification in that case, because
25021 we already have one. */
25022 if (ref
25023 && static_inline_p
25024 && ref->die_tag == DW_TAG_variable
25025 && ref->die_parent == comp_unit_die ()
25026 && get_AT (ref, DW_AT_specification) == NULL)
25027 {
25028 child = ref;
25029 ref = NULL;
25030 static_inline_p = false;
25031 }
25032 }
25033
25034 if (child->die_tag == DW_TAG_variable
25035 && child->die_parent == comp_unit_die ()
25036 && ref == NULL)
25037 {
25038 reparent_child (child, context_die);
25039 if (dwarf_version < 5)
25040 child->die_tag = DW_TAG_member;
25041 }
25042 else
25043 splice_child_die (context_die, child);
25044 }
25045
25046 /* Do not generate standard DWARF for variant parts if we are generating
25047 the corresponding GNAT encodings: DIEs generated for both would
25048 conflict in our mappings. */
25049 else if (is_variant_part (member)
25050 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25051 {
25052 vlr_ctx.variant_part_offset = byte_position (member);
25053 gen_variant_part (member, &vlr_ctx, context_die);
25054 }
25055 else
25056 {
25057 vlr_ctx.variant_part_offset = NULL_TREE;
25058 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25059 }
25060
25061 /* For C++ inline static data members emit immediately a DW_TAG_variable
25062 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25063 DW_AT_specification. */
25064 if (static_inline_p)
25065 {
25066 int old_extern = DECL_EXTERNAL (member);
25067 DECL_EXTERNAL (member) = 0;
25068 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25069 DECL_EXTERNAL (member) = old_extern;
25070 }
25071 }
25072 }
25073
25074 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25075 is set, we pretend that the type was never defined, so we only get the
25076 member DIEs needed by later specification DIEs. */
25077
25078 static void
25079 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25080 enum debug_info_usage usage)
25081 {
25082 if (TREE_ASM_WRITTEN (type))
25083 {
25084 /* Fill in the bound of variable-length fields in late dwarf if
25085 still incomplete. */
25086 if (!early_dwarf && variably_modified_type_p (type, NULL))
25087 for (tree member = TYPE_FIELDS (type);
25088 member;
25089 member = DECL_CHAIN (member))
25090 fill_variable_array_bounds (TREE_TYPE (member));
25091 return;
25092 }
25093
25094 dw_die_ref type_die = lookup_type_die (type);
25095 dw_die_ref scope_die = 0;
25096 int nested = 0;
25097 int complete = (TYPE_SIZE (type)
25098 && (! TYPE_STUB_DECL (type)
25099 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25100 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25101 complete = complete && should_emit_struct_debug (type, usage);
25102
25103 if (type_die && ! complete)
25104 return;
25105
25106 if (TYPE_CONTEXT (type) != NULL_TREE
25107 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25108 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25109 nested = 1;
25110
25111 scope_die = scope_die_for (type, context_die);
25112
25113 /* Generate child dies for template paramaters. */
25114 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25115 schedule_generic_params_dies_gen (type);
25116
25117 if (! type_die || (nested && is_cu_die (scope_die)))
25118 /* First occurrence of type or toplevel definition of nested class. */
25119 {
25120 dw_die_ref old_die = type_die;
25121
25122 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25123 ? record_type_tag (type) : DW_TAG_union_type,
25124 scope_die, type);
25125 equate_type_number_to_die (type, type_die);
25126 if (old_die)
25127 add_AT_specification (type_die, old_die);
25128 else
25129 add_name_attribute (type_die, type_tag (type));
25130 }
25131 else
25132 remove_AT (type_die, DW_AT_declaration);
25133
25134 /* If this type has been completed, then give it a byte_size attribute and
25135 then give a list of members. */
25136 if (complete && !ns_decl)
25137 {
25138 /* Prevent infinite recursion in cases where the type of some member of
25139 this type is expressed in terms of this type itself. */
25140 TREE_ASM_WRITTEN (type) = 1;
25141 add_byte_size_attribute (type_die, type);
25142 add_alignment_attribute (type_die, type);
25143 if (TYPE_STUB_DECL (type) != NULL_TREE)
25144 {
25145 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25146 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25147 }
25148
25149 /* If the first reference to this type was as the return type of an
25150 inline function, then it may not have a parent. Fix this now. */
25151 if (type_die->die_parent == NULL)
25152 add_child_die (scope_die, type_die);
25153
25154 gen_member_die (type, type_die);
25155
25156 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25157 if (TYPE_ARTIFICIAL (type))
25158 add_AT_flag (type_die, DW_AT_artificial, 1);
25159
25160 /* GNU extension: Record what type our vtable lives in. */
25161 if (TYPE_VFIELD (type))
25162 {
25163 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25164
25165 gen_type_die (vtype, context_die);
25166 add_AT_die_ref (type_die, DW_AT_containing_type,
25167 lookup_type_die (vtype));
25168 }
25169 }
25170 else
25171 {
25172 add_AT_flag (type_die, DW_AT_declaration, 1);
25173
25174 /* We don't need to do this for function-local types. */
25175 if (TYPE_STUB_DECL (type)
25176 && ! decl_function_context (TYPE_STUB_DECL (type)))
25177 vec_safe_push (incomplete_types, type);
25178 }
25179
25180 if (get_AT (type_die, DW_AT_name))
25181 add_pubtype (type, type_die);
25182 }
25183
25184 /* Generate a DIE for a subroutine _type_. */
25185
25186 static void
25187 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25188 {
25189 tree return_type = TREE_TYPE (type);
25190 dw_die_ref subr_die
25191 = new_die (DW_TAG_subroutine_type,
25192 scope_die_for (type, context_die), type);
25193
25194 equate_type_number_to_die (type, subr_die);
25195 add_prototyped_attribute (subr_die, type);
25196 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25197 context_die);
25198 add_alignment_attribute (subr_die, type);
25199 gen_formal_types_die (type, subr_die);
25200
25201 if (get_AT (subr_die, DW_AT_name))
25202 add_pubtype (type, subr_die);
25203 if ((dwarf_version >= 5 || !dwarf_strict)
25204 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25205 add_AT_flag (subr_die, DW_AT_reference, 1);
25206 if ((dwarf_version >= 5 || !dwarf_strict)
25207 && lang_hooks.types.type_dwarf_attribute (type,
25208 DW_AT_rvalue_reference) != -1)
25209 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25210 }
25211
25212 /* Generate a DIE for a type definition. */
25213
25214 static void
25215 gen_typedef_die (tree decl, dw_die_ref context_die)
25216 {
25217 dw_die_ref type_die;
25218 tree type;
25219
25220 if (TREE_ASM_WRITTEN (decl))
25221 {
25222 if (DECL_ORIGINAL_TYPE (decl))
25223 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25224 return;
25225 }
25226
25227 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25228 checks in process_scope_var and modified_type_die), this should be called
25229 only for original types. */
25230 gcc_assert (decl_ultimate_origin (decl) == NULL
25231 || decl_ultimate_origin (decl) == decl);
25232
25233 TREE_ASM_WRITTEN (decl) = 1;
25234 type_die = new_die (DW_TAG_typedef, context_die, decl);
25235
25236 add_name_and_src_coords_attributes (type_die, decl);
25237 if (DECL_ORIGINAL_TYPE (decl))
25238 {
25239 type = DECL_ORIGINAL_TYPE (decl);
25240 if (type == error_mark_node)
25241 return;
25242
25243 gcc_assert (type != TREE_TYPE (decl));
25244 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25245 }
25246 else
25247 {
25248 type = TREE_TYPE (decl);
25249 if (type == error_mark_node)
25250 return;
25251
25252 if (is_naming_typedef_decl (TYPE_NAME (type)))
25253 {
25254 /* Here, we are in the case of decl being a typedef naming
25255 an anonymous type, e.g:
25256 typedef struct {...} foo;
25257 In that case TREE_TYPE (decl) is not a typedef variant
25258 type and TYPE_NAME of the anonymous type is set to the
25259 TYPE_DECL of the typedef. This construct is emitted by
25260 the C++ FE.
25261
25262 TYPE is the anonymous struct named by the typedef
25263 DECL. As we need the DW_AT_type attribute of the
25264 DW_TAG_typedef to point to the DIE of TYPE, let's
25265 generate that DIE right away. add_type_attribute
25266 called below will then pick (via lookup_type_die) that
25267 anonymous struct DIE. */
25268 if (!TREE_ASM_WRITTEN (type))
25269 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25270
25271 /* This is a GNU Extension. We are adding a
25272 DW_AT_linkage_name attribute to the DIE of the
25273 anonymous struct TYPE. The value of that attribute
25274 is the name of the typedef decl naming the anonymous
25275 struct. This greatly eases the work of consumers of
25276 this debug info. */
25277 add_linkage_name_raw (lookup_type_die (type), decl);
25278 }
25279 }
25280
25281 add_type_attribute (type_die, type, decl_quals (decl), false,
25282 context_die);
25283
25284 if (is_naming_typedef_decl (decl))
25285 /* We want that all subsequent calls to lookup_type_die with
25286 TYPE in argument yield the DW_TAG_typedef we have just
25287 created. */
25288 equate_type_number_to_die (type, type_die);
25289
25290 add_alignment_attribute (type_die, TREE_TYPE (decl));
25291
25292 add_accessibility_attribute (type_die, decl);
25293
25294 if (DECL_ABSTRACT_P (decl))
25295 equate_decl_number_to_die (decl, type_die);
25296
25297 if (get_AT (type_die, DW_AT_name))
25298 add_pubtype (decl, type_die);
25299 }
25300
25301 /* Generate a DIE for a struct, class, enum or union type. */
25302
25303 static void
25304 gen_tagged_type_die (tree type,
25305 dw_die_ref context_die,
25306 enum debug_info_usage usage)
25307 {
25308 if (type == NULL_TREE
25309 || !is_tagged_type (type))
25310 return;
25311
25312 if (TREE_ASM_WRITTEN (type))
25313 ;
25314 /* If this is a nested type whose containing class hasn't been written
25315 out yet, writing it out will cover this one, too. This does not apply
25316 to instantiations of member class templates; they need to be added to
25317 the containing class as they are generated. FIXME: This hurts the
25318 idea of combining type decls from multiple TUs, since we can't predict
25319 what set of template instantiations we'll get. */
25320 else if (TYPE_CONTEXT (type)
25321 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25322 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25323 {
25324 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25325
25326 if (TREE_ASM_WRITTEN (type))
25327 return;
25328
25329 /* If that failed, attach ourselves to the stub. */
25330 context_die = lookup_type_die (TYPE_CONTEXT (type));
25331 }
25332 else if (TYPE_CONTEXT (type) != NULL_TREE
25333 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25334 {
25335 /* If this type is local to a function that hasn't been written
25336 out yet, use a NULL context for now; it will be fixed up in
25337 decls_for_scope. */
25338 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25339 /* A declaration DIE doesn't count; nested types need to go in the
25340 specification. */
25341 if (context_die && is_declaration_die (context_die))
25342 context_die = NULL;
25343 }
25344 else
25345 context_die = declare_in_namespace (type, context_die);
25346
25347 if (TREE_CODE (type) == ENUMERAL_TYPE)
25348 {
25349 /* This might have been written out by the call to
25350 declare_in_namespace. */
25351 if (!TREE_ASM_WRITTEN (type))
25352 gen_enumeration_type_die (type, context_die);
25353 }
25354 else
25355 gen_struct_or_union_type_die (type, context_die, usage);
25356
25357 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25358 it up if it is ever completed. gen_*_type_die will set it for us
25359 when appropriate. */
25360 }
25361
25362 /* Generate a type description DIE. */
25363
25364 static void
25365 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25366 enum debug_info_usage usage)
25367 {
25368 struct array_descr_info info;
25369
25370 if (type == NULL_TREE || type == error_mark_node)
25371 return;
25372
25373 if (flag_checking && type)
25374 verify_type (type);
25375
25376 if (TYPE_NAME (type) != NULL_TREE
25377 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25378 && is_redundant_typedef (TYPE_NAME (type))
25379 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25380 /* The DECL of this type is a typedef we don't want to emit debug
25381 info for but we want debug info for its underlying typedef.
25382 This can happen for e.g, the injected-class-name of a C++
25383 type. */
25384 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25385
25386 /* If TYPE is a typedef type variant, let's generate debug info
25387 for the parent typedef which TYPE is a type of. */
25388 if (typedef_variant_p (type))
25389 {
25390 if (TREE_ASM_WRITTEN (type))
25391 return;
25392
25393 tree name = TYPE_NAME (type);
25394 tree origin = decl_ultimate_origin (name);
25395 if (origin != NULL && origin != name)
25396 {
25397 gen_decl_die (origin, NULL, NULL, context_die);
25398 return;
25399 }
25400
25401 /* Prevent broken recursion; we can't hand off to the same type. */
25402 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25403
25404 /* Give typedefs the right scope. */
25405 context_die = scope_die_for (type, context_die);
25406
25407 TREE_ASM_WRITTEN (type) = 1;
25408
25409 gen_decl_die (name, NULL, NULL, context_die);
25410 return;
25411 }
25412
25413 /* If type is an anonymous tagged type named by a typedef, let's
25414 generate debug info for the typedef. */
25415 if (is_naming_typedef_decl (TYPE_NAME (type)))
25416 {
25417 /* Give typedefs the right scope. */
25418 context_die = scope_die_for (type, context_die);
25419
25420 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25421 return;
25422 }
25423
25424 if (lang_hooks.types.get_debug_type)
25425 {
25426 tree debug_type = lang_hooks.types.get_debug_type (type);
25427
25428 if (debug_type != NULL_TREE && debug_type != type)
25429 {
25430 gen_type_die_with_usage (debug_type, context_die, usage);
25431 return;
25432 }
25433 }
25434
25435 /* We are going to output a DIE to represent the unqualified version
25436 of this type (i.e. without any const or volatile qualifiers) so
25437 get the main variant (i.e. the unqualified version) of this type
25438 now. (Vectors and arrays are special because the debugging info is in the
25439 cloned type itself. Similarly function/method types can contain extra
25440 ref-qualification). */
25441 if (TREE_CODE (type) == FUNCTION_TYPE
25442 || TREE_CODE (type) == METHOD_TYPE)
25443 {
25444 /* For function/method types, can't use type_main_variant here,
25445 because that can have different ref-qualifiers for C++,
25446 but try to canonicalize. */
25447 tree main = TYPE_MAIN_VARIANT (type);
25448 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25449 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25450 && check_base_type (t, main)
25451 && check_lang_type (t, type))
25452 {
25453 type = t;
25454 break;
25455 }
25456 }
25457 else if (TREE_CODE (type) != VECTOR_TYPE
25458 && TREE_CODE (type) != ARRAY_TYPE)
25459 type = type_main_variant (type);
25460
25461 /* If this is an array type with hidden descriptor, handle it first. */
25462 if (!TREE_ASM_WRITTEN (type)
25463 && lang_hooks.types.get_array_descr_info)
25464 {
25465 memset (&info, 0, sizeof (info));
25466 if (lang_hooks.types.get_array_descr_info (type, &info))
25467 {
25468 /* Fortran sometimes emits array types with no dimension. */
25469 gcc_assert (info.ndimensions >= 0
25470 && (info.ndimensions
25471 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25472 gen_descr_array_type_die (type, &info, context_die);
25473 TREE_ASM_WRITTEN (type) = 1;
25474 return;
25475 }
25476 }
25477
25478 if (TREE_ASM_WRITTEN (type))
25479 {
25480 /* Variable-length types may be incomplete even if
25481 TREE_ASM_WRITTEN. For such types, fall through to
25482 gen_array_type_die() and possibly fill in
25483 DW_AT_{upper,lower}_bound attributes. */
25484 if ((TREE_CODE (type) != ARRAY_TYPE
25485 && TREE_CODE (type) != RECORD_TYPE
25486 && TREE_CODE (type) != UNION_TYPE
25487 && TREE_CODE (type) != QUAL_UNION_TYPE)
25488 || !variably_modified_type_p (type, NULL))
25489 return;
25490 }
25491
25492 switch (TREE_CODE (type))
25493 {
25494 case ERROR_MARK:
25495 break;
25496
25497 case POINTER_TYPE:
25498 case REFERENCE_TYPE:
25499 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25500 ensures that the gen_type_die recursion will terminate even if the
25501 type is recursive. Recursive types are possible in Ada. */
25502 /* ??? We could perhaps do this for all types before the switch
25503 statement. */
25504 TREE_ASM_WRITTEN (type) = 1;
25505
25506 /* For these types, all that is required is that we output a DIE (or a
25507 set of DIEs) to represent the "basis" type. */
25508 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25509 DINFO_USAGE_IND_USE);
25510 break;
25511
25512 case OFFSET_TYPE:
25513 /* This code is used for C++ pointer-to-data-member types.
25514 Output a description of the relevant class type. */
25515 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25516 DINFO_USAGE_IND_USE);
25517
25518 /* Output a description of the type of the object pointed to. */
25519 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25520 DINFO_USAGE_IND_USE);
25521
25522 /* Now output a DIE to represent this pointer-to-data-member type
25523 itself. */
25524 gen_ptr_to_mbr_type_die (type, context_die);
25525 break;
25526
25527 case FUNCTION_TYPE:
25528 /* Force out return type (in case it wasn't forced out already). */
25529 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25530 DINFO_USAGE_DIR_USE);
25531 gen_subroutine_type_die (type, context_die);
25532 break;
25533
25534 case METHOD_TYPE:
25535 /* Force out return type (in case it wasn't forced out already). */
25536 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25537 DINFO_USAGE_DIR_USE);
25538 gen_subroutine_type_die (type, context_die);
25539 break;
25540
25541 case ARRAY_TYPE:
25542 case VECTOR_TYPE:
25543 gen_array_type_die (type, context_die);
25544 break;
25545
25546 case ENUMERAL_TYPE:
25547 case RECORD_TYPE:
25548 case UNION_TYPE:
25549 case QUAL_UNION_TYPE:
25550 gen_tagged_type_die (type, context_die, usage);
25551 return;
25552
25553 case VOID_TYPE:
25554 case INTEGER_TYPE:
25555 case REAL_TYPE:
25556 case FIXED_POINT_TYPE:
25557 case COMPLEX_TYPE:
25558 case BOOLEAN_TYPE:
25559 /* No DIEs needed for fundamental types. */
25560 break;
25561
25562 case NULLPTR_TYPE:
25563 case LANG_TYPE:
25564 /* Just use DW_TAG_unspecified_type. */
25565 {
25566 dw_die_ref type_die = lookup_type_die (type);
25567 if (type_die == NULL)
25568 {
25569 tree name = TYPE_IDENTIFIER (type);
25570 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25571 type);
25572 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25573 equate_type_number_to_die (type, type_die);
25574 }
25575 }
25576 break;
25577
25578 default:
25579 if (is_cxx_auto (type))
25580 {
25581 tree name = TYPE_IDENTIFIER (type);
25582 dw_die_ref *die = (name == get_identifier ("auto")
25583 ? &auto_die : &decltype_auto_die);
25584 if (!*die)
25585 {
25586 *die = new_die (DW_TAG_unspecified_type,
25587 comp_unit_die (), NULL_TREE);
25588 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25589 }
25590 equate_type_number_to_die (type, *die);
25591 break;
25592 }
25593 gcc_unreachable ();
25594 }
25595
25596 TREE_ASM_WRITTEN (type) = 1;
25597 }
25598
25599 static void
25600 gen_type_die (tree type, dw_die_ref context_die)
25601 {
25602 if (type != error_mark_node)
25603 {
25604 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25605 if (flag_checking)
25606 {
25607 dw_die_ref die = lookup_type_die (type);
25608 if (die)
25609 check_die (die);
25610 }
25611 }
25612 }
25613
25614 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25615 things which are local to the given block. */
25616
25617 static void
25618 gen_block_die (tree stmt, dw_die_ref context_die)
25619 {
25620 int must_output_die = 0;
25621 bool inlined_func;
25622
25623 /* Ignore blocks that are NULL. */
25624 if (stmt == NULL_TREE)
25625 return;
25626
25627 inlined_func = inlined_function_outer_scope_p (stmt);
25628
25629 /* If the block is one fragment of a non-contiguous block, do not
25630 process the variables, since they will have been done by the
25631 origin block. Do process subblocks. */
25632 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25633 {
25634 tree sub;
25635
25636 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25637 gen_block_die (sub, context_die);
25638
25639 return;
25640 }
25641
25642 /* Determine if we need to output any Dwarf DIEs at all to represent this
25643 block. */
25644 if (inlined_func)
25645 /* The outer scopes for inlinings *must* always be represented. We
25646 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25647 must_output_die = 1;
25648 else if (lookup_block_die (stmt))
25649 /* If we already have a DIE then it was filled early. Meanwhile
25650 we might have pruned all BLOCK_VARS as optimized out but we
25651 still want to generate high/low PC attributes so output it. */
25652 must_output_die = 1;
25653 else if (TREE_USED (stmt)
25654 || TREE_ASM_WRITTEN (stmt)
25655 || BLOCK_ABSTRACT (stmt))
25656 {
25657 /* Determine if this block directly contains any "significant"
25658 local declarations which we will need to output DIEs for. */
25659 if (debug_info_level > DINFO_LEVEL_TERSE)
25660 {
25661 /* We are not in terse mode so any local declaration that
25662 is not ignored for debug purposes counts as being a
25663 "significant" one. */
25664 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25665 must_output_die = 1;
25666 else
25667 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25668 if (!DECL_IGNORED_P (var))
25669 {
25670 must_output_die = 1;
25671 break;
25672 }
25673 }
25674 else if (!dwarf2out_ignore_block (stmt))
25675 must_output_die = 1;
25676 }
25677
25678 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25679 DIE for any block which contains no significant local declarations at
25680 all. Rather, in such cases we just call `decls_for_scope' so that any
25681 needed Dwarf info for any sub-blocks will get properly generated. Note
25682 that in terse mode, our definition of what constitutes a "significant"
25683 local declaration gets restricted to include only inlined function
25684 instances and local (nested) function definitions. */
25685 if (must_output_die)
25686 {
25687 if (inlined_func)
25688 {
25689 /* If STMT block is abstract, that means we have been called
25690 indirectly from dwarf2out_abstract_function.
25691 That function rightfully marks the descendent blocks (of
25692 the abstract function it is dealing with) as being abstract,
25693 precisely to prevent us from emitting any
25694 DW_TAG_inlined_subroutine DIE as a descendent
25695 of an abstract function instance. So in that case, we should
25696 not call gen_inlined_subroutine_die.
25697
25698 Later though, when cgraph asks dwarf2out to emit info
25699 for the concrete instance of the function decl into which
25700 the concrete instance of STMT got inlined, the later will lead
25701 to the generation of a DW_TAG_inlined_subroutine DIE. */
25702 if (! BLOCK_ABSTRACT (stmt))
25703 gen_inlined_subroutine_die (stmt, context_die);
25704 }
25705 else
25706 gen_lexical_block_die (stmt, context_die);
25707 }
25708 else
25709 decls_for_scope (stmt, context_die);
25710 }
25711
25712 /* Process variable DECL (or variable with origin ORIGIN) within
25713 block STMT and add it to CONTEXT_DIE. */
25714 static void
25715 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25716 {
25717 dw_die_ref die;
25718 tree decl_or_origin = decl ? decl : origin;
25719
25720 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25721 die = lookup_decl_die (decl_or_origin);
25722 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25723 {
25724 if (TYPE_DECL_IS_STUB (decl_or_origin))
25725 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25726 else
25727 die = lookup_decl_die (decl_or_origin);
25728 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25729 if (! die && ! early_dwarf)
25730 return;
25731 }
25732 else
25733 die = NULL;
25734
25735 /* Avoid creating DIEs for local typedefs and concrete static variables that
25736 will only be pruned later. */
25737 if ((origin || decl_ultimate_origin (decl))
25738 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25739 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25740 {
25741 origin = decl_ultimate_origin (decl_or_origin);
25742 if (decl && VAR_P (decl) && die != NULL)
25743 {
25744 die = lookup_decl_die (origin);
25745 if (die != NULL)
25746 equate_decl_number_to_die (decl, die);
25747 }
25748 return;
25749 }
25750
25751 if (die != NULL && die->die_parent == NULL)
25752 add_child_die (context_die, die);
25753 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25754 {
25755 if (early_dwarf)
25756 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25757 stmt, context_die);
25758 }
25759 else
25760 {
25761 if (decl && DECL_P (decl))
25762 {
25763 die = lookup_decl_die (decl);
25764
25765 /* Early created DIEs do not have a parent as the decls refer
25766 to the function as DECL_CONTEXT rather than the BLOCK. */
25767 if (die && die->die_parent == NULL)
25768 {
25769 gcc_assert (in_lto_p);
25770 add_child_die (context_die, die);
25771 }
25772 }
25773
25774 gen_decl_die (decl, origin, NULL, context_die);
25775 }
25776 }
25777
25778 /* Generate all of the decls declared within a given scope and (recursively)
25779 all of its sub-blocks. */
25780
25781 static void
25782 decls_for_scope (tree stmt, dw_die_ref context_die)
25783 {
25784 tree decl;
25785 unsigned int i;
25786 tree subblocks;
25787
25788 /* Ignore NULL blocks. */
25789 if (stmt == NULL_TREE)
25790 return;
25791
25792 /* Output the DIEs to represent all of the data objects and typedefs
25793 declared directly within this block but not within any nested
25794 sub-blocks. Also, nested function and tag DIEs have been
25795 generated with a parent of NULL; fix that up now. We don't
25796 have to do this if we're at -g1. */
25797 if (debug_info_level > DINFO_LEVEL_TERSE)
25798 {
25799 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25800 process_scope_var (stmt, decl, NULL_TREE, context_die);
25801 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25802 origin - avoid doing this twice as we have no good way to see
25803 if we've done it once already. */
25804 if (! early_dwarf)
25805 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25806 {
25807 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25808 if (decl == current_function_decl)
25809 /* Ignore declarations of the current function, while they
25810 are declarations, gen_subprogram_die would treat them
25811 as definitions again, because they are equal to
25812 current_function_decl and endlessly recurse. */;
25813 else if (TREE_CODE (decl) == FUNCTION_DECL)
25814 process_scope_var (stmt, decl, NULL_TREE, context_die);
25815 else
25816 process_scope_var (stmt, NULL_TREE, decl, context_die);
25817 }
25818 }
25819
25820 /* Even if we're at -g1, we need to process the subblocks in order to get
25821 inlined call information. */
25822
25823 /* Output the DIEs to represent all sub-blocks (and the items declared
25824 therein) of this block. */
25825 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25826 subblocks != NULL;
25827 subblocks = BLOCK_CHAIN (subblocks))
25828 gen_block_die (subblocks, context_die);
25829 }
25830
25831 /* Is this a typedef we can avoid emitting? */
25832
25833 static bool
25834 is_redundant_typedef (const_tree decl)
25835 {
25836 if (TYPE_DECL_IS_STUB (decl))
25837 return true;
25838
25839 if (DECL_ARTIFICIAL (decl)
25840 && DECL_CONTEXT (decl)
25841 && is_tagged_type (DECL_CONTEXT (decl))
25842 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25843 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25844 /* Also ignore the artificial member typedef for the class name. */
25845 return true;
25846
25847 return false;
25848 }
25849
25850 /* Return TRUE if TYPE is a typedef that names a type for linkage
25851 purposes. This kind of typedefs is produced by the C++ FE for
25852 constructs like:
25853
25854 typedef struct {...} foo;
25855
25856 In that case, there is no typedef variant type produced for foo.
25857 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25858 struct type. */
25859
25860 static bool
25861 is_naming_typedef_decl (const_tree decl)
25862 {
25863 if (decl == NULL_TREE
25864 || TREE_CODE (decl) != TYPE_DECL
25865 || DECL_NAMELESS (decl)
25866 || !is_tagged_type (TREE_TYPE (decl))
25867 || DECL_IS_BUILTIN (decl)
25868 || is_redundant_typedef (decl)
25869 /* It looks like Ada produces TYPE_DECLs that are very similar
25870 to C++ naming typedefs but that have different
25871 semantics. Let's be specific to c++ for now. */
25872 || !is_cxx (decl))
25873 return FALSE;
25874
25875 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25876 && TYPE_NAME (TREE_TYPE (decl)) == decl
25877 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25878 != TYPE_NAME (TREE_TYPE (decl))));
25879 }
25880
25881 /* Looks up the DIE for a context. */
25882
25883 static inline dw_die_ref
25884 lookup_context_die (tree context)
25885 {
25886 if (context)
25887 {
25888 /* Find die that represents this context. */
25889 if (TYPE_P (context))
25890 {
25891 context = TYPE_MAIN_VARIANT (context);
25892 dw_die_ref ctx = lookup_type_die (context);
25893 if (!ctx)
25894 return NULL;
25895 return strip_naming_typedef (context, ctx);
25896 }
25897 else
25898 return lookup_decl_die (context);
25899 }
25900 return comp_unit_die ();
25901 }
25902
25903 /* Returns the DIE for a context. */
25904
25905 static inline dw_die_ref
25906 get_context_die (tree context)
25907 {
25908 if (context)
25909 {
25910 /* Find die that represents this context. */
25911 if (TYPE_P (context))
25912 {
25913 context = TYPE_MAIN_VARIANT (context);
25914 return strip_naming_typedef (context, force_type_die (context));
25915 }
25916 else
25917 return force_decl_die (context);
25918 }
25919 return comp_unit_die ();
25920 }
25921
25922 /* Returns the DIE for decl. A DIE will always be returned. */
25923
25924 static dw_die_ref
25925 force_decl_die (tree decl)
25926 {
25927 dw_die_ref decl_die;
25928 unsigned saved_external_flag;
25929 tree save_fn = NULL_TREE;
25930 decl_die = lookup_decl_die (decl);
25931 if (!decl_die)
25932 {
25933 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25934
25935 decl_die = lookup_decl_die (decl);
25936 if (decl_die)
25937 return decl_die;
25938
25939 switch (TREE_CODE (decl))
25940 {
25941 case FUNCTION_DECL:
25942 /* Clear current_function_decl, so that gen_subprogram_die thinks
25943 that this is a declaration. At this point, we just want to force
25944 declaration die. */
25945 save_fn = current_function_decl;
25946 current_function_decl = NULL_TREE;
25947 gen_subprogram_die (decl, context_die);
25948 current_function_decl = save_fn;
25949 break;
25950
25951 case VAR_DECL:
25952 /* Set external flag to force declaration die. Restore it after
25953 gen_decl_die() call. */
25954 saved_external_flag = DECL_EXTERNAL (decl);
25955 DECL_EXTERNAL (decl) = 1;
25956 gen_decl_die (decl, NULL, NULL, context_die);
25957 DECL_EXTERNAL (decl) = saved_external_flag;
25958 break;
25959
25960 case NAMESPACE_DECL:
25961 if (dwarf_version >= 3 || !dwarf_strict)
25962 dwarf2out_decl (decl);
25963 else
25964 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25965 decl_die = comp_unit_die ();
25966 break;
25967
25968 case TRANSLATION_UNIT_DECL:
25969 decl_die = comp_unit_die ();
25970 break;
25971
25972 default:
25973 gcc_unreachable ();
25974 }
25975
25976 /* We should be able to find the DIE now. */
25977 if (!decl_die)
25978 decl_die = lookup_decl_die (decl);
25979 gcc_assert (decl_die);
25980 }
25981
25982 return decl_die;
25983 }
25984
25985 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25986 always returned. */
25987
25988 static dw_die_ref
25989 force_type_die (tree type)
25990 {
25991 dw_die_ref type_die;
25992
25993 type_die = lookup_type_die (type);
25994 if (!type_die)
25995 {
25996 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25997
25998 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25999 false, context_die);
26000 gcc_assert (type_die);
26001 }
26002 return type_die;
26003 }
26004
26005 /* Force out any required namespaces to be able to output DECL,
26006 and return the new context_die for it, if it's changed. */
26007
26008 static dw_die_ref
26009 setup_namespace_context (tree thing, dw_die_ref context_die)
26010 {
26011 tree context = (DECL_P (thing)
26012 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26013 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26014 /* Force out the namespace. */
26015 context_die = force_decl_die (context);
26016
26017 return context_die;
26018 }
26019
26020 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26021 type) within its namespace, if appropriate.
26022
26023 For compatibility with older debuggers, namespace DIEs only contain
26024 declarations; all definitions are emitted at CU scope, with
26025 DW_AT_specification pointing to the declaration (like with class
26026 members). */
26027
26028 static dw_die_ref
26029 declare_in_namespace (tree thing, dw_die_ref context_die)
26030 {
26031 dw_die_ref ns_context;
26032
26033 if (debug_info_level <= DINFO_LEVEL_TERSE)
26034 return context_die;
26035
26036 /* External declarations in the local scope only need to be emitted
26037 once, not once in the namespace and once in the scope.
26038
26039 This avoids declaring the `extern' below in the
26040 namespace DIE as well as in the innermost scope:
26041
26042 namespace S
26043 {
26044 int i=5;
26045 int foo()
26046 {
26047 int i=8;
26048 extern int i;
26049 return i;
26050 }
26051 }
26052 */
26053 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26054 return context_die;
26055
26056 /* If this decl is from an inlined function, then don't try to emit it in its
26057 namespace, as we will get confused. It would have already been emitted
26058 when the abstract instance of the inline function was emitted anyways. */
26059 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26060 return context_die;
26061
26062 ns_context = setup_namespace_context (thing, context_die);
26063
26064 if (ns_context != context_die)
26065 {
26066 if (is_fortran ())
26067 return ns_context;
26068 if (DECL_P (thing))
26069 gen_decl_die (thing, NULL, NULL, ns_context);
26070 else
26071 gen_type_die (thing, ns_context);
26072 }
26073 return context_die;
26074 }
26075
26076 /* Generate a DIE for a namespace or namespace alias. */
26077
26078 static void
26079 gen_namespace_die (tree decl, dw_die_ref context_die)
26080 {
26081 dw_die_ref namespace_die;
26082
26083 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26084 they are an alias of. */
26085 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26086 {
26087 /* Output a real namespace or module. */
26088 context_die = setup_namespace_context (decl, comp_unit_die ());
26089 namespace_die = new_die (is_fortran ()
26090 ? DW_TAG_module : DW_TAG_namespace,
26091 context_die, decl);
26092 /* For Fortran modules defined in different CU don't add src coords. */
26093 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26094 {
26095 const char *name = dwarf2_name (decl, 0);
26096 if (name)
26097 add_name_attribute (namespace_die, name);
26098 }
26099 else
26100 add_name_and_src_coords_attributes (namespace_die, decl);
26101 if (DECL_EXTERNAL (decl))
26102 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26103 equate_decl_number_to_die (decl, namespace_die);
26104 }
26105 else
26106 {
26107 /* Output a namespace alias. */
26108
26109 /* Force out the namespace we are an alias of, if necessary. */
26110 dw_die_ref origin_die
26111 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26112
26113 if (DECL_FILE_SCOPE_P (decl)
26114 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26115 context_die = setup_namespace_context (decl, comp_unit_die ());
26116 /* Now create the namespace alias DIE. */
26117 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26118 add_name_and_src_coords_attributes (namespace_die, decl);
26119 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26120 equate_decl_number_to_die (decl, namespace_die);
26121 }
26122 if ((dwarf_version >= 5 || !dwarf_strict)
26123 && lang_hooks.decls.decl_dwarf_attribute (decl,
26124 DW_AT_export_symbols) == 1)
26125 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26126
26127 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26128 if (want_pubnames ())
26129 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26130 }
26131
26132 /* Generate Dwarf debug information for a decl described by DECL.
26133 The return value is currently only meaningful for PARM_DECLs,
26134 for all other decls it returns NULL.
26135
26136 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26137 It can be NULL otherwise. */
26138
26139 static dw_die_ref
26140 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26141 dw_die_ref context_die)
26142 {
26143 tree decl_or_origin = decl ? decl : origin;
26144 tree class_origin = NULL, ultimate_origin;
26145
26146 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26147 return NULL;
26148
26149 switch (TREE_CODE (decl_or_origin))
26150 {
26151 case ERROR_MARK:
26152 break;
26153
26154 case CONST_DECL:
26155 if (!is_fortran () && !is_ada ())
26156 {
26157 /* The individual enumerators of an enum type get output when we output
26158 the Dwarf representation of the relevant enum type itself. */
26159 break;
26160 }
26161
26162 /* Emit its type. */
26163 gen_type_die (TREE_TYPE (decl), context_die);
26164
26165 /* And its containing namespace. */
26166 context_die = declare_in_namespace (decl, context_die);
26167
26168 gen_const_die (decl, context_die);
26169 break;
26170
26171 case FUNCTION_DECL:
26172 #if 0
26173 /* FIXME */
26174 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26175 on local redeclarations of global functions. That seems broken. */
26176 if (current_function_decl != decl)
26177 /* This is only a declaration. */;
26178 #endif
26179
26180 /* We should have abstract copies already and should not generate
26181 stray type DIEs in late LTO dumping. */
26182 if (! early_dwarf)
26183 ;
26184
26185 /* If we're emitting a clone, emit info for the abstract instance. */
26186 else if (origin || DECL_ORIGIN (decl) != decl)
26187 dwarf2out_abstract_function (origin
26188 ? DECL_ORIGIN (origin)
26189 : DECL_ABSTRACT_ORIGIN (decl));
26190
26191 /* If we're emitting a possibly inlined function emit it as
26192 abstract instance. */
26193 else if (cgraph_function_possibly_inlined_p (decl)
26194 && ! DECL_ABSTRACT_P (decl)
26195 && ! class_or_namespace_scope_p (context_die)
26196 /* dwarf2out_abstract_function won't emit a die if this is just
26197 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26198 that case, because that works only if we have a die. */
26199 && DECL_INITIAL (decl) != NULL_TREE)
26200 dwarf2out_abstract_function (decl);
26201
26202 /* Otherwise we're emitting the primary DIE for this decl. */
26203 else if (debug_info_level > DINFO_LEVEL_TERSE)
26204 {
26205 /* Before we describe the FUNCTION_DECL itself, make sure that we
26206 have its containing type. */
26207 if (!origin)
26208 origin = decl_class_context (decl);
26209 if (origin != NULL_TREE)
26210 gen_type_die (origin, context_die);
26211
26212 /* And its return type. */
26213 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26214
26215 /* And its virtual context. */
26216 if (DECL_VINDEX (decl) != NULL_TREE)
26217 gen_type_die (DECL_CONTEXT (decl), context_die);
26218
26219 /* Make sure we have a member DIE for decl. */
26220 if (origin != NULL_TREE)
26221 gen_type_die_for_member (origin, decl, context_die);
26222
26223 /* And its containing namespace. */
26224 context_die = declare_in_namespace (decl, context_die);
26225 }
26226
26227 /* Now output a DIE to represent the function itself. */
26228 if (decl)
26229 gen_subprogram_die (decl, context_die);
26230 break;
26231
26232 case TYPE_DECL:
26233 /* If we are in terse mode, don't generate any DIEs to represent any
26234 actual typedefs. */
26235 if (debug_info_level <= DINFO_LEVEL_TERSE)
26236 break;
26237
26238 /* In the special case of a TYPE_DECL node representing the declaration
26239 of some type tag, if the given TYPE_DECL is marked as having been
26240 instantiated from some other (original) TYPE_DECL node (e.g. one which
26241 was generated within the original definition of an inline function) we
26242 used to generate a special (abbreviated) DW_TAG_structure_type,
26243 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26244 should be actually referencing those DIEs, as variable DIEs with that
26245 type would be emitted already in the abstract origin, so it was always
26246 removed during unused type prunning. Don't add anything in this
26247 case. */
26248 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26249 break;
26250
26251 if (is_redundant_typedef (decl))
26252 gen_type_die (TREE_TYPE (decl), context_die);
26253 else
26254 /* Output a DIE to represent the typedef itself. */
26255 gen_typedef_die (decl, context_die);
26256 break;
26257
26258 case LABEL_DECL:
26259 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26260 gen_label_die (decl, context_die);
26261 break;
26262
26263 case VAR_DECL:
26264 case RESULT_DECL:
26265 /* If we are in terse mode, don't generate any DIEs to represent any
26266 variable declarations or definitions. */
26267 if (debug_info_level <= DINFO_LEVEL_TERSE)
26268 break;
26269
26270 /* Avoid generating stray type DIEs during late dwarf dumping.
26271 All types have been dumped early. */
26272 if (early_dwarf
26273 /* ??? But in LTRANS we cannot annotate early created variably
26274 modified type DIEs without copying them and adjusting all
26275 references to them. Dump them again as happens for inlining
26276 which copies both the decl and the types. */
26277 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26278 in VLA bound information for example. */
26279 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26280 current_function_decl)))
26281 {
26282 /* Output any DIEs that are needed to specify the type of this data
26283 object. */
26284 if (decl_by_reference_p (decl_or_origin))
26285 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26286 else
26287 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26288 }
26289
26290 if (early_dwarf)
26291 {
26292 /* And its containing type. */
26293 class_origin = decl_class_context (decl_or_origin);
26294 if (class_origin != NULL_TREE)
26295 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26296
26297 /* And its containing namespace. */
26298 context_die = declare_in_namespace (decl_or_origin, context_die);
26299 }
26300
26301 /* Now output the DIE to represent the data object itself. This gets
26302 complicated because of the possibility that the VAR_DECL really
26303 represents an inlined instance of a formal parameter for an inline
26304 function. */
26305 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26306 if (ultimate_origin != NULL_TREE
26307 && TREE_CODE (ultimate_origin) == PARM_DECL)
26308 gen_formal_parameter_die (decl, origin,
26309 true /* Emit name attribute. */,
26310 context_die);
26311 else
26312 gen_variable_die (decl, origin, context_die);
26313 break;
26314
26315 case FIELD_DECL:
26316 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26317 /* Ignore the nameless fields that are used to skip bits but handle C++
26318 anonymous unions and structs. */
26319 if (DECL_NAME (decl) != NULL_TREE
26320 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26321 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26322 {
26323 gen_type_die (member_declared_type (decl), context_die);
26324 gen_field_die (decl, ctx, context_die);
26325 }
26326 break;
26327
26328 case PARM_DECL:
26329 /* Avoid generating stray type DIEs during late dwarf dumping.
26330 All types have been dumped early. */
26331 if (early_dwarf
26332 /* ??? But in LTRANS we cannot annotate early created variably
26333 modified type DIEs without copying them and adjusting all
26334 references to them. Dump them again as happens for inlining
26335 which copies both the decl and the types. */
26336 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26337 in VLA bound information for example. */
26338 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26339 current_function_decl)))
26340 {
26341 if (DECL_BY_REFERENCE (decl_or_origin))
26342 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26343 else
26344 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26345 }
26346 return gen_formal_parameter_die (decl, origin,
26347 true /* Emit name attribute. */,
26348 context_die);
26349
26350 case NAMESPACE_DECL:
26351 if (dwarf_version >= 3 || !dwarf_strict)
26352 gen_namespace_die (decl, context_die);
26353 break;
26354
26355 case IMPORTED_DECL:
26356 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26357 DECL_CONTEXT (decl), context_die);
26358 break;
26359
26360 case NAMELIST_DECL:
26361 gen_namelist_decl (DECL_NAME (decl), context_die,
26362 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26363 break;
26364
26365 default:
26366 /* Probably some frontend-internal decl. Assume we don't care. */
26367 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26368 break;
26369 }
26370
26371 return NULL;
26372 }
26373 \f
26374 /* Output initial debug information for global DECL. Called at the
26375 end of the parsing process.
26376
26377 This is the initial debug generation process. As such, the DIEs
26378 generated may be incomplete. A later debug generation pass
26379 (dwarf2out_late_global_decl) will augment the information generated
26380 in this pass (e.g., with complete location info). */
26381
26382 static void
26383 dwarf2out_early_global_decl (tree decl)
26384 {
26385 set_early_dwarf s;
26386
26387 /* gen_decl_die() will set DECL_ABSTRACT because
26388 cgraph_function_possibly_inlined_p() returns true. This is in
26389 turn will cause DW_AT_inline attributes to be set.
26390
26391 This happens because at early dwarf generation, there is no
26392 cgraph information, causing cgraph_function_possibly_inlined_p()
26393 to return true. Trick cgraph_function_possibly_inlined_p()
26394 while we generate dwarf early. */
26395 bool save = symtab->global_info_ready;
26396 symtab->global_info_ready = true;
26397
26398 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26399 other DECLs and they can point to template types or other things
26400 that dwarf2out can't handle when done via dwarf2out_decl. */
26401 if (TREE_CODE (decl) != TYPE_DECL
26402 && TREE_CODE (decl) != PARM_DECL)
26403 {
26404 if (TREE_CODE (decl) == FUNCTION_DECL)
26405 {
26406 tree save_fndecl = current_function_decl;
26407
26408 /* For nested functions, make sure we have DIEs for the parents first
26409 so that all nested DIEs are generated at the proper scope in the
26410 first shot. */
26411 tree context = decl_function_context (decl);
26412 if (context != NULL)
26413 {
26414 dw_die_ref context_die = lookup_decl_die (context);
26415 current_function_decl = context;
26416
26417 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26418 enough so that it lands in its own context. This avoids type
26419 pruning issues later on. */
26420 if (context_die == NULL || is_declaration_die (context_die))
26421 dwarf2out_decl (context);
26422 }
26423
26424 /* Emit an abstract origin of a function first. This happens
26425 with C++ constructor clones for example and makes
26426 dwarf2out_abstract_function happy which requires the early
26427 DIE of the abstract instance to be present. */
26428 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26429 dw_die_ref origin_die;
26430 if (origin != NULL
26431 /* Do not emit the DIE multiple times but make sure to
26432 process it fully here in case we just saw a declaration. */
26433 && ((origin_die = lookup_decl_die (origin)) == NULL
26434 || is_declaration_die (origin_die)))
26435 {
26436 current_function_decl = origin;
26437 dwarf2out_decl (origin);
26438 }
26439
26440 /* Emit the DIE for decl but avoid doing that multiple times. */
26441 dw_die_ref old_die;
26442 if ((old_die = lookup_decl_die (decl)) == NULL
26443 || is_declaration_die (old_die))
26444 {
26445 current_function_decl = decl;
26446 dwarf2out_decl (decl);
26447 }
26448
26449 current_function_decl = save_fndecl;
26450 }
26451 else
26452 dwarf2out_decl (decl);
26453 }
26454 symtab->global_info_ready = save;
26455 }
26456
26457 /* Return whether EXPR is an expression with the following pattern:
26458 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26459
26460 static bool
26461 is_trivial_indirect_ref (tree expr)
26462 {
26463 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26464 return false;
26465
26466 tree nop = TREE_OPERAND (expr, 0);
26467 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26468 return false;
26469
26470 tree int_cst = TREE_OPERAND (nop, 0);
26471 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26472 }
26473
26474 /* Output debug information for global decl DECL. Called from
26475 toplev.c after compilation proper has finished. */
26476
26477 static void
26478 dwarf2out_late_global_decl (tree decl)
26479 {
26480 /* Fill-in any location information we were unable to determine
26481 on the first pass. */
26482 if (VAR_P (decl))
26483 {
26484 dw_die_ref die = lookup_decl_die (decl);
26485
26486 /* We may have to generate early debug late for LTO in case debug
26487 was not enabled at compile-time or the target doesn't support
26488 the LTO early debug scheme. */
26489 if (! die && in_lto_p)
26490 {
26491 dwarf2out_decl (decl);
26492 die = lookup_decl_die (decl);
26493 }
26494
26495 if (die)
26496 {
26497 /* We get called via the symtab code invoking late_global_decl
26498 for symbols that are optimized out.
26499
26500 Do not add locations for those, except if they have a
26501 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26502 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26503 INDIRECT_REF expression, as this could generate relocations to
26504 text symbols in LTO object files, which is invalid. */
26505 varpool_node *node = varpool_node::get (decl);
26506 if ((! node || ! node->definition)
26507 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26508 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26509 tree_add_const_value_attribute_for_decl (die, decl);
26510 else
26511 add_location_or_const_value_attribute (die, decl, false);
26512 }
26513 }
26514 }
26515
26516 /* Output debug information for type decl DECL. Called from toplev.c
26517 and from language front ends (to record built-in types). */
26518 static void
26519 dwarf2out_type_decl (tree decl, int local)
26520 {
26521 if (!local)
26522 {
26523 set_early_dwarf s;
26524 dwarf2out_decl (decl);
26525 }
26526 }
26527
26528 /* Output debug information for imported module or decl DECL.
26529 NAME is non-NULL name in the lexical block if the decl has been renamed.
26530 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26531 that DECL belongs to.
26532 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26533 static void
26534 dwarf2out_imported_module_or_decl_1 (tree decl,
26535 tree name,
26536 tree lexical_block,
26537 dw_die_ref lexical_block_die)
26538 {
26539 expanded_location xloc;
26540 dw_die_ref imported_die = NULL;
26541 dw_die_ref at_import_die;
26542
26543 if (TREE_CODE (decl) == IMPORTED_DECL)
26544 {
26545 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26546 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26547 gcc_assert (decl);
26548 }
26549 else
26550 xloc = expand_location (input_location);
26551
26552 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26553 {
26554 at_import_die = force_type_die (TREE_TYPE (decl));
26555 /* For namespace N { typedef void T; } using N::T; base_type_die
26556 returns NULL, but DW_TAG_imported_declaration requires
26557 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26558 if (!at_import_die)
26559 {
26560 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26561 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26562 at_import_die = lookup_type_die (TREE_TYPE (decl));
26563 gcc_assert (at_import_die);
26564 }
26565 }
26566 else
26567 {
26568 at_import_die = lookup_decl_die (decl);
26569 if (!at_import_die)
26570 {
26571 /* If we're trying to avoid duplicate debug info, we may not have
26572 emitted the member decl for this field. Emit it now. */
26573 if (TREE_CODE (decl) == FIELD_DECL)
26574 {
26575 tree type = DECL_CONTEXT (decl);
26576
26577 if (TYPE_CONTEXT (type)
26578 && TYPE_P (TYPE_CONTEXT (type))
26579 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26580 DINFO_USAGE_DIR_USE))
26581 return;
26582 gen_type_die_for_member (type, decl,
26583 get_context_die (TYPE_CONTEXT (type)));
26584 }
26585 if (TREE_CODE (decl) == NAMELIST_DECL)
26586 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26587 get_context_die (DECL_CONTEXT (decl)),
26588 NULL_TREE);
26589 else
26590 at_import_die = force_decl_die (decl);
26591 }
26592 }
26593
26594 if (TREE_CODE (decl) == NAMESPACE_DECL)
26595 {
26596 if (dwarf_version >= 3 || !dwarf_strict)
26597 imported_die = new_die (DW_TAG_imported_module,
26598 lexical_block_die,
26599 lexical_block);
26600 else
26601 return;
26602 }
26603 else
26604 imported_die = new_die (DW_TAG_imported_declaration,
26605 lexical_block_die,
26606 lexical_block);
26607
26608 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26609 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26610 if (debug_column_info && xloc.column)
26611 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26612 if (name)
26613 add_AT_string (imported_die, DW_AT_name,
26614 IDENTIFIER_POINTER (name));
26615 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26616 }
26617
26618 /* Output debug information for imported module or decl DECL.
26619 NAME is non-NULL name in context if the decl has been renamed.
26620 CHILD is true if decl is one of the renamed decls as part of
26621 importing whole module.
26622 IMPLICIT is set if this hook is called for an implicit import
26623 such as inline namespace. */
26624
26625 static void
26626 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26627 bool child, bool implicit)
26628 {
26629 /* dw_die_ref at_import_die; */
26630 dw_die_ref scope_die;
26631
26632 if (debug_info_level <= DINFO_LEVEL_TERSE)
26633 return;
26634
26635 gcc_assert (decl);
26636
26637 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26638 should be enough, for DWARF4 and older even if we emit as extension
26639 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26640 for the benefit of consumers unaware of DW_AT_export_symbols. */
26641 if (implicit
26642 && dwarf_version >= 5
26643 && lang_hooks.decls.decl_dwarf_attribute (decl,
26644 DW_AT_export_symbols) == 1)
26645 return;
26646
26647 set_early_dwarf s;
26648
26649 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26650 We need decl DIE for reference and scope die. First, get DIE for the decl
26651 itself. */
26652
26653 /* Get the scope die for decl context. Use comp_unit_die for global module
26654 or decl. If die is not found for non globals, force new die. */
26655 if (context
26656 && TYPE_P (context)
26657 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26658 return;
26659
26660 scope_die = get_context_die (context);
26661
26662 if (child)
26663 {
26664 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26665 there is nothing we can do, here. */
26666 if (dwarf_version < 3 && dwarf_strict)
26667 return;
26668
26669 gcc_assert (scope_die->die_child);
26670 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26671 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26672 scope_die = scope_die->die_child;
26673 }
26674
26675 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26676 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26677 }
26678
26679 /* Output debug information for namelists. */
26680
26681 static dw_die_ref
26682 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26683 {
26684 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26685 tree value;
26686 unsigned i;
26687
26688 if (debug_info_level <= DINFO_LEVEL_TERSE)
26689 return NULL;
26690
26691 gcc_assert (scope_die != NULL);
26692 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26693 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26694
26695 /* If there are no item_decls, we have a nondefining namelist, e.g.
26696 with USE association; hence, set DW_AT_declaration. */
26697 if (item_decls == NULL_TREE)
26698 {
26699 add_AT_flag (nml_die, DW_AT_declaration, 1);
26700 return nml_die;
26701 }
26702
26703 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26704 {
26705 nml_item_ref_die = lookup_decl_die (value);
26706 if (!nml_item_ref_die)
26707 nml_item_ref_die = force_decl_die (value);
26708
26709 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26710 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26711 }
26712 return nml_die;
26713 }
26714
26715
26716 /* Write the debugging output for DECL and return the DIE. */
26717
26718 static void
26719 dwarf2out_decl (tree decl)
26720 {
26721 dw_die_ref context_die = comp_unit_die ();
26722
26723 switch (TREE_CODE (decl))
26724 {
26725 case ERROR_MARK:
26726 return;
26727
26728 case FUNCTION_DECL:
26729 /* If we're a nested function, initially use a parent of NULL; if we're
26730 a plain function, this will be fixed up in decls_for_scope. If
26731 we're a method, it will be ignored, since we already have a DIE.
26732 Avoid doing this late though since clones of class methods may
26733 otherwise end up in limbo and create type DIEs late. */
26734 if (early_dwarf
26735 && decl_function_context (decl)
26736 /* But if we're in terse mode, we don't care about scope. */
26737 && debug_info_level > DINFO_LEVEL_TERSE)
26738 context_die = NULL;
26739 break;
26740
26741 case VAR_DECL:
26742 /* For local statics lookup proper context die. */
26743 if (local_function_static (decl))
26744 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26745
26746 /* If we are in terse mode, don't generate any DIEs to represent any
26747 variable declarations or definitions. */
26748 if (debug_info_level <= DINFO_LEVEL_TERSE)
26749 return;
26750 break;
26751
26752 case CONST_DECL:
26753 if (debug_info_level <= DINFO_LEVEL_TERSE)
26754 return;
26755 if (!is_fortran () && !is_ada ())
26756 return;
26757 if (TREE_STATIC (decl) && decl_function_context (decl))
26758 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26759 break;
26760
26761 case NAMESPACE_DECL:
26762 case IMPORTED_DECL:
26763 if (debug_info_level <= DINFO_LEVEL_TERSE)
26764 return;
26765 if (lookup_decl_die (decl) != NULL)
26766 return;
26767 break;
26768
26769 case TYPE_DECL:
26770 /* Don't emit stubs for types unless they are needed by other DIEs. */
26771 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26772 return;
26773
26774 /* Don't bother trying to generate any DIEs to represent any of the
26775 normal built-in types for the language we are compiling. */
26776 if (DECL_IS_BUILTIN (decl))
26777 return;
26778
26779 /* If we are in terse mode, don't generate any DIEs for types. */
26780 if (debug_info_level <= DINFO_LEVEL_TERSE)
26781 return;
26782
26783 /* If we're a function-scope tag, initially use a parent of NULL;
26784 this will be fixed up in decls_for_scope. */
26785 if (decl_function_context (decl))
26786 context_die = NULL;
26787
26788 break;
26789
26790 case NAMELIST_DECL:
26791 break;
26792
26793 default:
26794 return;
26795 }
26796
26797 gen_decl_die (decl, NULL, NULL, context_die);
26798
26799 if (flag_checking)
26800 {
26801 dw_die_ref die = lookup_decl_die (decl);
26802 if (die)
26803 check_die (die);
26804 }
26805 }
26806
26807 /* Write the debugging output for DECL. */
26808
26809 static void
26810 dwarf2out_function_decl (tree decl)
26811 {
26812 dwarf2out_decl (decl);
26813 call_arg_locations = NULL;
26814 call_arg_loc_last = NULL;
26815 call_site_count = -1;
26816 tail_call_site_count = -1;
26817 decl_loc_table->empty ();
26818 cached_dw_loc_list_table->empty ();
26819 }
26820
26821 /* Output a marker (i.e. a label) for the beginning of the generated code for
26822 a lexical block. */
26823
26824 static void
26825 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26826 unsigned int blocknum)
26827 {
26828 switch_to_section (current_function_section ());
26829 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26830 }
26831
26832 /* Output a marker (i.e. a label) for the end of the generated code for a
26833 lexical block. */
26834
26835 static void
26836 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26837 {
26838 switch_to_section (current_function_section ());
26839 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26840 }
26841
26842 /* Returns nonzero if it is appropriate not to emit any debugging
26843 information for BLOCK, because it doesn't contain any instructions.
26844
26845 Don't allow this for blocks with nested functions or local classes
26846 as we would end up with orphans, and in the presence of scheduling
26847 we may end up calling them anyway. */
26848
26849 static bool
26850 dwarf2out_ignore_block (const_tree block)
26851 {
26852 tree decl;
26853 unsigned int i;
26854
26855 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26856 if (TREE_CODE (decl) == FUNCTION_DECL
26857 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26858 return 0;
26859 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26860 {
26861 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26862 if (TREE_CODE (decl) == FUNCTION_DECL
26863 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26864 return 0;
26865 }
26866
26867 return 1;
26868 }
26869
26870 /* Hash table routines for file_hash. */
26871
26872 bool
26873 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26874 {
26875 return filename_cmp (p1->filename, p2) == 0;
26876 }
26877
26878 hashval_t
26879 dwarf_file_hasher::hash (dwarf_file_data *p)
26880 {
26881 return htab_hash_string (p->filename);
26882 }
26883
26884 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26885 dwarf2out.c) and return its "index". The index of each (known) filename is
26886 just a unique number which is associated with only that one filename. We
26887 need such numbers for the sake of generating labels (in the .debug_sfnames
26888 section) and references to those files numbers (in the .debug_srcinfo
26889 and .debug_macinfo sections). If the filename given as an argument is not
26890 found in our current list, add it to the list and assign it the next
26891 available unique index number. */
26892
26893 static struct dwarf_file_data *
26894 lookup_filename (const char *file_name)
26895 {
26896 struct dwarf_file_data * created;
26897
26898 if (!file_name)
26899 return NULL;
26900
26901 dwarf_file_data **slot
26902 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26903 INSERT);
26904 if (*slot)
26905 return *slot;
26906
26907 created = ggc_alloc<dwarf_file_data> ();
26908 created->filename = file_name;
26909 created->emitted_number = 0;
26910 *slot = created;
26911 return created;
26912 }
26913
26914 /* If the assembler will construct the file table, then translate the compiler
26915 internal file table number into the assembler file table number, and emit
26916 a .file directive if we haven't already emitted one yet. The file table
26917 numbers are different because we prune debug info for unused variables and
26918 types, which may include filenames. */
26919
26920 static int
26921 maybe_emit_file (struct dwarf_file_data * fd)
26922 {
26923 if (! fd->emitted_number)
26924 {
26925 if (last_emitted_file)
26926 fd->emitted_number = last_emitted_file->emitted_number + 1;
26927 else
26928 fd->emitted_number = 1;
26929 last_emitted_file = fd;
26930
26931 if (output_asm_line_debug_info ())
26932 {
26933 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26934 output_quoted_string (asm_out_file,
26935 remap_debug_filename (fd->filename));
26936 fputc ('\n', asm_out_file);
26937 }
26938 }
26939
26940 return fd->emitted_number;
26941 }
26942
26943 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26944 That generation should happen after function debug info has been
26945 generated. The value of the attribute is the constant value of ARG. */
26946
26947 static void
26948 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26949 {
26950 die_arg_entry entry;
26951
26952 if (!die || !arg)
26953 return;
26954
26955 gcc_assert (early_dwarf);
26956
26957 if (!tmpl_value_parm_die_table)
26958 vec_alloc (tmpl_value_parm_die_table, 32);
26959
26960 entry.die = die;
26961 entry.arg = arg;
26962 vec_safe_push (tmpl_value_parm_die_table, entry);
26963 }
26964
26965 /* Return TRUE if T is an instance of generic type, FALSE
26966 otherwise. */
26967
26968 static bool
26969 generic_type_p (tree t)
26970 {
26971 if (t == NULL_TREE || !TYPE_P (t))
26972 return false;
26973 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26974 }
26975
26976 /* Schedule the generation of the generic parameter dies for the
26977 instance of generic type T. The proper generation itself is later
26978 done by gen_scheduled_generic_parms_dies. */
26979
26980 static void
26981 schedule_generic_params_dies_gen (tree t)
26982 {
26983 if (!generic_type_p (t))
26984 return;
26985
26986 gcc_assert (early_dwarf);
26987
26988 if (!generic_type_instances)
26989 vec_alloc (generic_type_instances, 256);
26990
26991 vec_safe_push (generic_type_instances, t);
26992 }
26993
26994 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26995 by append_entry_to_tmpl_value_parm_die_table. This function must
26996 be called after function DIEs have been generated. */
26997
26998 static void
26999 gen_remaining_tmpl_value_param_die_attribute (void)
27000 {
27001 if (tmpl_value_parm_die_table)
27002 {
27003 unsigned i, j;
27004 die_arg_entry *e;
27005
27006 /* We do this in two phases - first get the cases we can
27007 handle during early-finish, preserving those we cannot
27008 (containing symbolic constants where we don't yet know
27009 whether we are going to output the referenced symbols).
27010 For those we try again at late-finish. */
27011 j = 0;
27012 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27013 {
27014 if (!e->die->removed
27015 && !tree_add_const_value_attribute (e->die, e->arg))
27016 {
27017 dw_loc_descr_ref loc = NULL;
27018 if (! early_dwarf
27019 && (dwarf_version >= 5 || !dwarf_strict))
27020 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27021 if (loc)
27022 add_AT_loc (e->die, DW_AT_location, loc);
27023 else
27024 (*tmpl_value_parm_die_table)[j++] = *e;
27025 }
27026 }
27027 tmpl_value_parm_die_table->truncate (j);
27028 }
27029 }
27030
27031 /* Generate generic parameters DIEs for instances of generic types
27032 that have been previously scheduled by
27033 schedule_generic_params_dies_gen. This function must be called
27034 after all the types of the CU have been laid out. */
27035
27036 static void
27037 gen_scheduled_generic_parms_dies (void)
27038 {
27039 unsigned i;
27040 tree t;
27041
27042 if (!generic_type_instances)
27043 return;
27044
27045 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27046 if (COMPLETE_TYPE_P (t))
27047 gen_generic_params_dies (t);
27048
27049 generic_type_instances = NULL;
27050 }
27051
27052
27053 /* Replace DW_AT_name for the decl with name. */
27054
27055 static void
27056 dwarf2out_set_name (tree decl, tree name)
27057 {
27058 dw_die_ref die;
27059 dw_attr_node *attr;
27060 const char *dname;
27061
27062 die = TYPE_SYMTAB_DIE (decl);
27063 if (!die)
27064 return;
27065
27066 dname = dwarf2_name (name, 0);
27067 if (!dname)
27068 return;
27069
27070 attr = get_AT (die, DW_AT_name);
27071 if (attr)
27072 {
27073 struct indirect_string_node *node;
27074
27075 node = find_AT_string (dname);
27076 /* replace the string. */
27077 attr->dw_attr_val.v.val_str = node;
27078 }
27079
27080 else
27081 add_name_attribute (die, dname);
27082 }
27083
27084 /* True if before or during processing of the first function being emitted. */
27085 static bool in_first_function_p = true;
27086 /* True if loc_note during dwarf2out_var_location call might still be
27087 before first real instruction at address equal to .Ltext0. */
27088 static bool maybe_at_text_label_p = true;
27089 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27090 static unsigned int first_loclabel_num_not_at_text_label;
27091
27092 /* Look ahead for a real insn, or for a begin stmt marker. */
27093
27094 static rtx_insn *
27095 dwarf2out_next_real_insn (rtx_insn *loc_note)
27096 {
27097 rtx_insn *next_real = NEXT_INSN (loc_note);
27098
27099 while (next_real)
27100 if (INSN_P (next_real))
27101 break;
27102 else
27103 next_real = NEXT_INSN (next_real);
27104
27105 return next_real;
27106 }
27107
27108 /* Called by the final INSN scan whenever we see a var location. We
27109 use it to drop labels in the right places, and throw the location in
27110 our lookup table. */
27111
27112 static void
27113 dwarf2out_var_location (rtx_insn *loc_note)
27114 {
27115 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27116 struct var_loc_node *newloc;
27117 rtx_insn *next_real, *next_note;
27118 rtx_insn *call_insn = NULL;
27119 static const char *last_label;
27120 static const char *last_postcall_label;
27121 static bool last_in_cold_section_p;
27122 static rtx_insn *expected_next_loc_note;
27123 tree decl;
27124 bool var_loc_p;
27125 var_loc_view view = 0;
27126
27127 if (!NOTE_P (loc_note))
27128 {
27129 if (CALL_P (loc_note))
27130 {
27131 maybe_reset_location_view (loc_note, cur_line_info_table);
27132 call_site_count++;
27133 if (SIBLING_CALL_P (loc_note))
27134 tail_call_site_count++;
27135 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27136 {
27137 call_insn = loc_note;
27138 loc_note = NULL;
27139 var_loc_p = false;
27140
27141 next_real = dwarf2out_next_real_insn (call_insn);
27142 next_note = NULL;
27143 cached_next_real_insn = NULL;
27144 goto create_label;
27145 }
27146 if (optimize == 0 && !flag_var_tracking)
27147 {
27148 /* When the var-tracking pass is not running, there is no note
27149 for indirect calls whose target is compile-time known. In this
27150 case, process such calls specifically so that we generate call
27151 sites for them anyway. */
27152 rtx x = PATTERN (loc_note);
27153 if (GET_CODE (x) == PARALLEL)
27154 x = XVECEXP (x, 0, 0);
27155 if (GET_CODE (x) == SET)
27156 x = SET_SRC (x);
27157 if (GET_CODE (x) == CALL)
27158 x = XEXP (x, 0);
27159 if (!MEM_P (x)
27160 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27161 || !SYMBOL_REF_DECL (XEXP (x, 0))
27162 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27163 != FUNCTION_DECL))
27164 {
27165 call_insn = loc_note;
27166 loc_note = NULL;
27167 var_loc_p = false;
27168
27169 next_real = dwarf2out_next_real_insn (call_insn);
27170 next_note = NULL;
27171 cached_next_real_insn = NULL;
27172 goto create_label;
27173 }
27174 }
27175 }
27176 else if (!debug_variable_location_views)
27177 gcc_unreachable ();
27178 else
27179 maybe_reset_location_view (loc_note, cur_line_info_table);
27180
27181 return;
27182 }
27183
27184 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27185 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27186 return;
27187
27188 /* Optimize processing a large consecutive sequence of location
27189 notes so we don't spend too much time in next_real_insn. If the
27190 next insn is another location note, remember the next_real_insn
27191 calculation for next time. */
27192 next_real = cached_next_real_insn;
27193 if (next_real)
27194 {
27195 if (expected_next_loc_note != loc_note)
27196 next_real = NULL;
27197 }
27198
27199 next_note = NEXT_INSN (loc_note);
27200 if (! next_note
27201 || next_note->deleted ()
27202 || ! NOTE_P (next_note)
27203 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27204 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27205 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27206 next_note = NULL;
27207
27208 if (! next_real)
27209 next_real = dwarf2out_next_real_insn (loc_note);
27210
27211 if (next_note)
27212 {
27213 expected_next_loc_note = next_note;
27214 cached_next_real_insn = next_real;
27215 }
27216 else
27217 cached_next_real_insn = NULL;
27218
27219 /* If there are no instructions which would be affected by this note,
27220 don't do anything. */
27221 if (var_loc_p
27222 && next_real == NULL_RTX
27223 && !NOTE_DURING_CALL_P (loc_note))
27224 return;
27225
27226 create_label:
27227
27228 if (next_real == NULL_RTX)
27229 next_real = get_last_insn ();
27230
27231 /* If there were any real insns between note we processed last time
27232 and this note (or if it is the first note), clear
27233 last_{,postcall_}label so that they are not reused this time. */
27234 if (last_var_location_insn == NULL_RTX
27235 || last_var_location_insn != next_real
27236 || last_in_cold_section_p != in_cold_section_p)
27237 {
27238 last_label = NULL;
27239 last_postcall_label = NULL;
27240 }
27241
27242 if (var_loc_p)
27243 {
27244 const char *label
27245 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27246 view = cur_line_info_table->view;
27247 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27248 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27249 if (newloc == NULL)
27250 return;
27251 }
27252 else
27253 {
27254 decl = NULL_TREE;
27255 newloc = NULL;
27256 }
27257
27258 /* If there were no real insns between note we processed last time
27259 and this note, use the label we emitted last time. Otherwise
27260 create a new label and emit it. */
27261 if (last_label == NULL)
27262 {
27263 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27264 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27265 loclabel_num++;
27266 last_label = ggc_strdup (loclabel);
27267 /* See if loclabel might be equal to .Ltext0. If yes,
27268 bump first_loclabel_num_not_at_text_label. */
27269 if (!have_multiple_function_sections
27270 && in_first_function_p
27271 && maybe_at_text_label_p)
27272 {
27273 static rtx_insn *last_start;
27274 rtx_insn *insn;
27275 for (insn = loc_note; insn; insn = previous_insn (insn))
27276 if (insn == last_start)
27277 break;
27278 else if (!NONDEBUG_INSN_P (insn))
27279 continue;
27280 else
27281 {
27282 rtx body = PATTERN (insn);
27283 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27284 continue;
27285 /* Inline asm could occupy zero bytes. */
27286 else if (GET_CODE (body) == ASM_INPUT
27287 || asm_noperands (body) >= 0)
27288 continue;
27289 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27290 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27291 continue;
27292 #endif
27293 else
27294 {
27295 /* Assume insn has non-zero length. */
27296 maybe_at_text_label_p = false;
27297 break;
27298 }
27299 }
27300 if (maybe_at_text_label_p)
27301 {
27302 last_start = loc_note;
27303 first_loclabel_num_not_at_text_label = loclabel_num;
27304 }
27305 }
27306 }
27307
27308 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27309 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27310
27311 if (!var_loc_p)
27312 {
27313 struct call_arg_loc_node *ca_loc
27314 = ggc_cleared_alloc<call_arg_loc_node> ();
27315 rtx_insn *prev = call_insn;
27316
27317 ca_loc->call_arg_loc_note
27318 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27319 ca_loc->next = NULL;
27320 ca_loc->label = last_label;
27321 gcc_assert (prev
27322 && (CALL_P (prev)
27323 || (NONJUMP_INSN_P (prev)
27324 && GET_CODE (PATTERN (prev)) == SEQUENCE
27325 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27326 if (!CALL_P (prev))
27327 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27328 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27329
27330 /* Look for a SYMBOL_REF in the "prev" instruction. */
27331 rtx x = get_call_rtx_from (PATTERN (prev));
27332 if (x)
27333 {
27334 /* Try to get the call symbol, if any. */
27335 if (MEM_P (XEXP (x, 0)))
27336 x = XEXP (x, 0);
27337 /* First, look for a memory access to a symbol_ref. */
27338 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27339 && SYMBOL_REF_DECL (XEXP (x, 0))
27340 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27341 ca_loc->symbol_ref = XEXP (x, 0);
27342 /* Otherwise, look at a compile-time known user-level function
27343 declaration. */
27344 else if (MEM_P (x)
27345 && MEM_EXPR (x)
27346 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27347 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27348 }
27349
27350 ca_loc->block = insn_scope (prev);
27351 if (call_arg_locations)
27352 call_arg_loc_last->next = ca_loc;
27353 else
27354 call_arg_locations = ca_loc;
27355 call_arg_loc_last = ca_loc;
27356 }
27357 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27358 {
27359 newloc->label = last_label;
27360 newloc->view = view;
27361 }
27362 else
27363 {
27364 if (!last_postcall_label)
27365 {
27366 sprintf (loclabel, "%s-1", last_label);
27367 last_postcall_label = ggc_strdup (loclabel);
27368 }
27369 newloc->label = last_postcall_label;
27370 /* ??? This view is at last_label, not last_label-1, but we
27371 could only assume view at last_label-1 is zero if we could
27372 assume calls always have length greater than one. This is
27373 probably true in general, though there might be a rare
27374 exception to this rule, e.g. if a call insn is optimized out
27375 by target magic. Then, even the -1 in the label will be
27376 wrong, which might invalidate the range. Anyway, using view,
27377 though technically possibly incorrect, will work as far as
27378 ranges go: since L-1 is in the middle of the call insn,
27379 (L-1).0 and (L-1).V shouldn't make any difference, and having
27380 the loclist entry refer to the .loc entry might be useful, so
27381 leave it like this. */
27382 newloc->view = view;
27383 }
27384
27385 if (var_loc_p && flag_debug_asm)
27386 {
27387 const char *name, *sep, *patstr;
27388 if (decl && DECL_NAME (decl))
27389 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27390 else
27391 name = "";
27392 if (NOTE_VAR_LOCATION_LOC (loc_note))
27393 {
27394 sep = " => ";
27395 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27396 }
27397 else
27398 {
27399 sep = " ";
27400 patstr = "RESET";
27401 }
27402 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27403 name, sep, patstr);
27404 }
27405
27406 last_var_location_insn = next_real;
27407 last_in_cold_section_p = in_cold_section_p;
27408 }
27409
27410 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27411 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27412 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27413 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27414 BLOCK_FRAGMENT_ORIGIN links. */
27415 static bool
27416 block_within_block_p (tree block, tree outer, bool bothways)
27417 {
27418 if (block == outer)
27419 return true;
27420
27421 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27422 for (tree context = BLOCK_SUPERCONTEXT (block);
27423 context != outer;
27424 context = BLOCK_SUPERCONTEXT (context))
27425 if (!context || TREE_CODE (context) != BLOCK)
27426 return false;
27427
27428 if (!bothways)
27429 return true;
27430
27431 /* Now check that each block is actually referenced by its
27432 parent. */
27433 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27434 context = BLOCK_SUPERCONTEXT (context))
27435 {
27436 if (BLOCK_FRAGMENT_ORIGIN (context))
27437 {
27438 gcc_assert (!BLOCK_SUBBLOCKS (context));
27439 context = BLOCK_FRAGMENT_ORIGIN (context);
27440 }
27441 for (tree sub = BLOCK_SUBBLOCKS (context);
27442 sub != block;
27443 sub = BLOCK_CHAIN (sub))
27444 if (!sub)
27445 return false;
27446 if (context == outer)
27447 return true;
27448 else
27449 block = context;
27450 }
27451 }
27452
27453 /* Called during final while assembling the marker of the entry point
27454 for an inlined function. */
27455
27456 static void
27457 dwarf2out_inline_entry (tree block)
27458 {
27459 gcc_assert (debug_inline_points);
27460
27461 /* If we can't represent it, don't bother. */
27462 if (!(dwarf_version >= 3 || !dwarf_strict))
27463 return;
27464
27465 gcc_assert (DECL_P (block_ultimate_origin (block)));
27466
27467 /* Sanity check the block tree. This would catch a case in which
27468 BLOCK got removed from the tree reachable from the outermost
27469 lexical block, but got retained in markers. It would still link
27470 back to its parents, but some ancestor would be missing a link
27471 down the path to the sub BLOCK. If the block got removed, its
27472 BLOCK_NUMBER will not be a usable value. */
27473 if (flag_checking)
27474 gcc_assert (block_within_block_p (block,
27475 DECL_INITIAL (current_function_decl),
27476 true));
27477
27478 gcc_assert (inlined_function_outer_scope_p (block));
27479 gcc_assert (!lookup_block_die (block));
27480
27481 if (BLOCK_FRAGMENT_ORIGIN (block))
27482 block = BLOCK_FRAGMENT_ORIGIN (block);
27483 /* Can the entry point ever not be at the beginning of an
27484 unfragmented lexical block? */
27485 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27486 || (cur_line_info_table
27487 && !ZERO_VIEW_P (cur_line_info_table->view))))
27488 return;
27489
27490 if (!inline_entry_data_table)
27491 inline_entry_data_table
27492 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27493
27494
27495 inline_entry_data **iedp
27496 = inline_entry_data_table->find_slot_with_hash (block,
27497 htab_hash_pointer (block),
27498 INSERT);
27499 if (*iedp)
27500 /* ??? Ideally, we'd record all entry points for the same inlined
27501 function (some may have been duplicated by e.g. unrolling), but
27502 we have no way to represent that ATM. */
27503 return;
27504
27505 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27506 ied->block = block;
27507 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27508 ied->label_num = BLOCK_NUMBER (block);
27509 if (cur_line_info_table)
27510 ied->view = cur_line_info_table->view;
27511
27512 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27513
27514 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27515 BLOCK_NUMBER (block));
27516 ASM_OUTPUT_LABEL (asm_out_file, label);
27517 }
27518
27519 /* Called from finalize_size_functions for size functions so that their body
27520 can be encoded in the debug info to describe the layout of variable-length
27521 structures. */
27522
27523 static void
27524 dwarf2out_size_function (tree decl)
27525 {
27526 function_to_dwarf_procedure (decl);
27527 }
27528
27529 /* Note in one location list that text section has changed. */
27530
27531 int
27532 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27533 {
27534 var_loc_list *list = *slot;
27535 if (list->first)
27536 list->last_before_switch
27537 = list->last->next ? list->last->next : list->last;
27538 return 1;
27539 }
27540
27541 /* Note in all location lists that text section has changed. */
27542
27543 static void
27544 var_location_switch_text_section (void)
27545 {
27546 if (decl_loc_table == NULL)
27547 return;
27548
27549 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27550 }
27551
27552 /* Create a new line number table. */
27553
27554 static dw_line_info_table *
27555 new_line_info_table (void)
27556 {
27557 dw_line_info_table *table;
27558
27559 table = ggc_cleared_alloc<dw_line_info_table> ();
27560 table->file_num = 1;
27561 table->line_num = 1;
27562 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27563 FORCE_RESET_NEXT_VIEW (table->view);
27564 table->symviews_since_reset = 0;
27565
27566 return table;
27567 }
27568
27569 /* Lookup the "current" table into which we emit line info, so
27570 that we don't have to do it for every source line. */
27571
27572 static void
27573 set_cur_line_info_table (section *sec)
27574 {
27575 dw_line_info_table *table;
27576
27577 if (sec == text_section)
27578 table = text_section_line_info;
27579 else if (sec == cold_text_section)
27580 {
27581 table = cold_text_section_line_info;
27582 if (!table)
27583 {
27584 cold_text_section_line_info = table = new_line_info_table ();
27585 table->end_label = cold_end_label;
27586 }
27587 }
27588 else
27589 {
27590 const char *end_label;
27591
27592 if (crtl->has_bb_partition)
27593 {
27594 if (in_cold_section_p)
27595 end_label = crtl->subsections.cold_section_end_label;
27596 else
27597 end_label = crtl->subsections.hot_section_end_label;
27598 }
27599 else
27600 {
27601 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27602 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27603 current_function_funcdef_no);
27604 end_label = ggc_strdup (label);
27605 }
27606
27607 table = new_line_info_table ();
27608 table->end_label = end_label;
27609
27610 vec_safe_push (separate_line_info, table);
27611 }
27612
27613 if (output_asm_line_debug_info ())
27614 table->is_stmt = (cur_line_info_table
27615 ? cur_line_info_table->is_stmt
27616 : DWARF_LINE_DEFAULT_IS_STMT_START);
27617 cur_line_info_table = table;
27618 }
27619
27620
27621 /* We need to reset the locations at the beginning of each
27622 function. We can't do this in the end_function hook, because the
27623 declarations that use the locations won't have been output when
27624 that hook is called. Also compute have_multiple_function_sections here. */
27625
27626 static void
27627 dwarf2out_begin_function (tree fun)
27628 {
27629 section *sec = function_section (fun);
27630
27631 if (sec != text_section)
27632 have_multiple_function_sections = true;
27633
27634 if (crtl->has_bb_partition && !cold_text_section)
27635 {
27636 gcc_assert (current_function_decl == fun);
27637 cold_text_section = unlikely_text_section ();
27638 switch_to_section (cold_text_section);
27639 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27640 switch_to_section (sec);
27641 }
27642
27643 dwarf2out_note_section_used ();
27644 call_site_count = 0;
27645 tail_call_site_count = 0;
27646
27647 set_cur_line_info_table (sec);
27648 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27649 }
27650
27651 /* Helper function of dwarf2out_end_function, called only after emitting
27652 the very first function into assembly. Check if some .debug_loc range
27653 might end with a .LVL* label that could be equal to .Ltext0.
27654 In that case we must force using absolute addresses in .debug_loc ranges,
27655 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27656 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27657 list terminator.
27658 Set have_multiple_function_sections to true in that case and
27659 terminate htab traversal. */
27660
27661 int
27662 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27663 {
27664 var_loc_list *entry = *slot;
27665 struct var_loc_node *node;
27666
27667 node = entry->first;
27668 if (node && node->next && node->next->label)
27669 {
27670 unsigned int i;
27671 const char *label = node->next->label;
27672 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27673
27674 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27675 {
27676 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27677 if (strcmp (label, loclabel) == 0)
27678 {
27679 have_multiple_function_sections = true;
27680 return 0;
27681 }
27682 }
27683 }
27684 return 1;
27685 }
27686
27687 /* Hook called after emitting a function into assembly.
27688 This does something only for the very first function emitted. */
27689
27690 static void
27691 dwarf2out_end_function (unsigned int)
27692 {
27693 if (in_first_function_p
27694 && !have_multiple_function_sections
27695 && first_loclabel_num_not_at_text_label
27696 && decl_loc_table)
27697 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27698 in_first_function_p = false;
27699 maybe_at_text_label_p = false;
27700 }
27701
27702 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27703 front-ends register a translation unit even before dwarf2out_init is
27704 called. */
27705 static tree main_translation_unit = NULL_TREE;
27706
27707 /* Hook called by front-ends after they built their main translation unit.
27708 Associate comp_unit_die to UNIT. */
27709
27710 static void
27711 dwarf2out_register_main_translation_unit (tree unit)
27712 {
27713 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27714 && main_translation_unit == NULL_TREE);
27715 main_translation_unit = unit;
27716 /* If dwarf2out_init has not been called yet, it will perform the association
27717 itself looking at main_translation_unit. */
27718 if (decl_die_table != NULL)
27719 equate_decl_number_to_die (unit, comp_unit_die ());
27720 }
27721
27722 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27723
27724 static void
27725 push_dw_line_info_entry (dw_line_info_table *table,
27726 enum dw_line_info_opcode opcode, unsigned int val)
27727 {
27728 dw_line_info_entry e;
27729 e.opcode = opcode;
27730 e.val = val;
27731 vec_safe_push (table->entries, e);
27732 }
27733
27734 /* Output a label to mark the beginning of a source code line entry
27735 and record information relating to this source line, in
27736 'line_info_table' for later output of the .debug_line section. */
27737 /* ??? The discriminator parameter ought to be unsigned. */
27738
27739 static void
27740 dwarf2out_source_line (unsigned int line, unsigned int column,
27741 const char *filename,
27742 int discriminator, bool is_stmt)
27743 {
27744 unsigned int file_num;
27745 dw_line_info_table *table;
27746 static var_loc_view lvugid;
27747
27748 if (debug_info_level < DINFO_LEVEL_TERSE)
27749 return;
27750
27751 table = cur_line_info_table;
27752
27753 if (line == 0)
27754 {
27755 if (debug_variable_location_views
27756 && output_asm_line_debug_info ()
27757 && table && !RESETTING_VIEW_P (table->view))
27758 {
27759 /* If we're using the assembler to compute view numbers, we
27760 can't issue a .loc directive for line zero, so we can't
27761 get a view number at this point. We might attempt to
27762 compute it from the previous view, or equate it to a
27763 subsequent view (though it might not be there!), but
27764 since we're omitting the line number entry, we might as
27765 well omit the view number as well. That means pretending
27766 it's a view number zero, which might very well turn out
27767 to be correct. ??? Extend the assembler so that the
27768 compiler could emit e.g. ".locview .LVU#", to output a
27769 view without changing line number information. We'd then
27770 have to count it in symviews_since_reset; when it's omitted,
27771 it doesn't count. */
27772 if (!zero_view_p)
27773 zero_view_p = BITMAP_GGC_ALLOC ();
27774 bitmap_set_bit (zero_view_p, table->view);
27775 if (flag_debug_asm)
27776 {
27777 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27778 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27779 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27780 ASM_COMMENT_START);
27781 assemble_name (asm_out_file, label);
27782 putc ('\n', asm_out_file);
27783 }
27784 table->view = ++lvugid;
27785 }
27786 return;
27787 }
27788
27789 /* The discriminator column was added in dwarf4. Simplify the below
27790 by simply removing it if we're not supposed to output it. */
27791 if (dwarf_version < 4 && dwarf_strict)
27792 discriminator = 0;
27793
27794 if (!debug_column_info)
27795 column = 0;
27796
27797 file_num = maybe_emit_file (lookup_filename (filename));
27798
27799 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27800 the debugger has used the second (possibly duplicate) line number
27801 at the beginning of the function to mark the end of the prologue.
27802 We could eliminate any other duplicates within the function. For
27803 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27804 that second line number entry. */
27805 /* Recall that this end-of-prologue indication is *not* the same thing
27806 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27807 to which the hook corresponds, follows the last insn that was
27808 emitted by gen_prologue. What we need is to precede the first insn
27809 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27810 insn that corresponds to something the user wrote. These may be
27811 very different locations once scheduling is enabled. */
27812
27813 if (0 && file_num == table->file_num
27814 && line == table->line_num
27815 && column == table->column_num
27816 && discriminator == table->discrim_num
27817 && is_stmt == table->is_stmt)
27818 return;
27819
27820 switch_to_section (current_function_section ());
27821
27822 /* If requested, emit something human-readable. */
27823 if (flag_debug_asm)
27824 {
27825 if (debug_column_info)
27826 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27827 filename, line, column);
27828 else
27829 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27830 filename, line);
27831 }
27832
27833 if (output_asm_line_debug_info ())
27834 {
27835 /* Emit the .loc directive understood by GNU as. */
27836 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27837 file_num, line, is_stmt, discriminator */
27838 fputs ("\t.loc ", asm_out_file);
27839 fprint_ul (asm_out_file, file_num);
27840 putc (' ', asm_out_file);
27841 fprint_ul (asm_out_file, line);
27842 putc (' ', asm_out_file);
27843 fprint_ul (asm_out_file, column);
27844
27845 if (is_stmt != table->is_stmt)
27846 {
27847 #if HAVE_GAS_LOC_STMT
27848 fputs (" is_stmt ", asm_out_file);
27849 putc (is_stmt ? '1' : '0', asm_out_file);
27850 #endif
27851 }
27852 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27853 {
27854 gcc_assert (discriminator > 0);
27855 fputs (" discriminator ", asm_out_file);
27856 fprint_ul (asm_out_file, (unsigned long) discriminator);
27857 }
27858 if (debug_variable_location_views)
27859 {
27860 if (!RESETTING_VIEW_P (table->view))
27861 {
27862 table->symviews_since_reset++;
27863 if (table->symviews_since_reset > symview_upper_bound)
27864 symview_upper_bound = table->symviews_since_reset;
27865 /* When we're using the assembler to compute view
27866 numbers, we output symbolic labels after "view" in
27867 .loc directives, and the assembler will set them for
27868 us, so that we can refer to the view numbers in
27869 location lists. The only exceptions are when we know
27870 a view will be zero: "-0" is a forced reset, used
27871 e.g. in the beginning of functions, whereas "0" tells
27872 the assembler to check that there was a PC change
27873 since the previous view, in a way that implicitly
27874 resets the next view. */
27875 fputs (" view ", asm_out_file);
27876 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27877 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27878 assemble_name (asm_out_file, label);
27879 table->view = ++lvugid;
27880 }
27881 else
27882 {
27883 table->symviews_since_reset = 0;
27884 if (FORCE_RESETTING_VIEW_P (table->view))
27885 fputs (" view -0", asm_out_file);
27886 else
27887 fputs (" view 0", asm_out_file);
27888 /* Mark the present view as a zero view. Earlier debug
27889 binds may have already added its id to loclists to be
27890 emitted later, so we can't reuse the id for something
27891 else. However, it's good to know whether a view is
27892 known to be zero, because then we may be able to
27893 optimize out locviews that are all zeros, so take
27894 note of it in zero_view_p. */
27895 if (!zero_view_p)
27896 zero_view_p = BITMAP_GGC_ALLOC ();
27897 bitmap_set_bit (zero_view_p, lvugid);
27898 table->view = ++lvugid;
27899 }
27900 }
27901 putc ('\n', asm_out_file);
27902 }
27903 else
27904 {
27905 unsigned int label_num = ++line_info_label_num;
27906
27907 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27908
27909 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27910 push_dw_line_info_entry (table, LI_adv_address, label_num);
27911 else
27912 push_dw_line_info_entry (table, LI_set_address, label_num);
27913 if (debug_variable_location_views)
27914 {
27915 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27916 if (resetting)
27917 table->view = 0;
27918
27919 if (flag_debug_asm)
27920 fprintf (asm_out_file, "\t%s view %s%d\n",
27921 ASM_COMMENT_START,
27922 resetting ? "-" : "",
27923 table->view);
27924
27925 table->view++;
27926 }
27927 if (file_num != table->file_num)
27928 push_dw_line_info_entry (table, LI_set_file, file_num);
27929 if (discriminator != table->discrim_num)
27930 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27931 if (is_stmt != table->is_stmt)
27932 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27933 push_dw_line_info_entry (table, LI_set_line, line);
27934 if (debug_column_info)
27935 push_dw_line_info_entry (table, LI_set_column, column);
27936 }
27937
27938 table->file_num = file_num;
27939 table->line_num = line;
27940 table->column_num = column;
27941 table->discrim_num = discriminator;
27942 table->is_stmt = is_stmt;
27943 table->in_use = true;
27944 }
27945
27946 /* Record the beginning of a new source file. */
27947
27948 static void
27949 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
27950 {
27951 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27952 {
27953 macinfo_entry e;
27954 e.code = DW_MACINFO_start_file;
27955 e.lineno = lineno;
27956 e.info = ggc_strdup (filename);
27957 vec_safe_push (macinfo_table, e);
27958 }
27959 }
27960
27961 /* Record the end of a source file. */
27962
27963 static void
27964 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
27965 {
27966 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27967 {
27968 macinfo_entry e;
27969 e.code = DW_MACINFO_end_file;
27970 e.lineno = lineno;
27971 e.info = NULL;
27972 vec_safe_push (macinfo_table, e);
27973 }
27974 }
27975
27976 /* Called from debug_define in toplev.c. The `buffer' parameter contains
27977 the tail part of the directive line, i.e. the part which is past the
27978 initial whitespace, #, whitespace, directive-name, whitespace part. */
27979
27980 static void
27981 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
27982 const char *buffer ATTRIBUTE_UNUSED)
27983 {
27984 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27985 {
27986 macinfo_entry e;
27987 /* Insert a dummy first entry to be able to optimize the whole
27988 predefined macro block using DW_MACRO_import. */
27989 if (macinfo_table->is_empty () && lineno <= 1)
27990 {
27991 e.code = 0;
27992 e.lineno = 0;
27993 e.info = NULL;
27994 vec_safe_push (macinfo_table, e);
27995 }
27996 e.code = DW_MACINFO_define;
27997 e.lineno = lineno;
27998 e.info = ggc_strdup (buffer);
27999 vec_safe_push (macinfo_table, e);
28000 }
28001 }
28002
28003 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
28004 the tail part of the directive line, i.e. the part which is past the
28005 initial whitespace, #, whitespace, directive-name, whitespace part. */
28006
28007 static void
28008 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28009 const char *buffer ATTRIBUTE_UNUSED)
28010 {
28011 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28012 {
28013 macinfo_entry e;
28014 /* Insert a dummy first entry to be able to optimize the whole
28015 predefined macro block using DW_MACRO_import. */
28016 if (macinfo_table->is_empty () && lineno <= 1)
28017 {
28018 e.code = 0;
28019 e.lineno = 0;
28020 e.info = NULL;
28021 vec_safe_push (macinfo_table, e);
28022 }
28023 e.code = DW_MACINFO_undef;
28024 e.lineno = lineno;
28025 e.info = ggc_strdup (buffer);
28026 vec_safe_push (macinfo_table, e);
28027 }
28028 }
28029
28030 /* Helpers to manipulate hash table of CUs. */
28031
28032 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28033 {
28034 static inline hashval_t hash (const macinfo_entry *);
28035 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28036 };
28037
28038 inline hashval_t
28039 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28040 {
28041 return htab_hash_string (entry->info);
28042 }
28043
28044 inline bool
28045 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28046 const macinfo_entry *entry2)
28047 {
28048 return !strcmp (entry1->info, entry2->info);
28049 }
28050
28051 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28052
28053 /* Output a single .debug_macinfo entry. */
28054
28055 static void
28056 output_macinfo_op (macinfo_entry *ref)
28057 {
28058 int file_num;
28059 size_t len;
28060 struct indirect_string_node *node;
28061 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28062 struct dwarf_file_data *fd;
28063
28064 switch (ref->code)
28065 {
28066 case DW_MACINFO_start_file:
28067 fd = lookup_filename (ref->info);
28068 file_num = maybe_emit_file (fd);
28069 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28070 dw2_asm_output_data_uleb128 (ref->lineno,
28071 "Included from line number %lu",
28072 (unsigned long) ref->lineno);
28073 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28074 break;
28075 case DW_MACINFO_end_file:
28076 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28077 break;
28078 case DW_MACINFO_define:
28079 case DW_MACINFO_undef:
28080 len = strlen (ref->info) + 1;
28081 if (!dwarf_strict
28082 && len > DWARF_OFFSET_SIZE
28083 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28084 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28085 {
28086 ref->code = ref->code == DW_MACINFO_define
28087 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28088 output_macinfo_op (ref);
28089 return;
28090 }
28091 dw2_asm_output_data (1, ref->code,
28092 ref->code == DW_MACINFO_define
28093 ? "Define macro" : "Undefine macro");
28094 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28095 (unsigned long) ref->lineno);
28096 dw2_asm_output_nstring (ref->info, -1, "The macro");
28097 break;
28098 case DW_MACRO_define_strp:
28099 case DW_MACRO_undef_strp:
28100 node = find_AT_string (ref->info);
28101 gcc_assert (node
28102 && (node->form == DW_FORM_strp
28103 || node->form == dwarf_FORM (DW_FORM_strx)));
28104 dw2_asm_output_data (1, ref->code,
28105 ref->code == DW_MACRO_define_strp
28106 ? "Define macro strp"
28107 : "Undefine macro strp");
28108 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28109 (unsigned long) ref->lineno);
28110 if (node->form == DW_FORM_strp)
28111 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28112 debug_str_section, "The macro: \"%s\"",
28113 ref->info);
28114 else
28115 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28116 ref->info);
28117 break;
28118 case DW_MACRO_import:
28119 dw2_asm_output_data (1, ref->code, "Import");
28120 ASM_GENERATE_INTERNAL_LABEL (label,
28121 DEBUG_MACRO_SECTION_LABEL,
28122 ref->lineno + macinfo_label_base);
28123 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28124 break;
28125 default:
28126 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28127 ASM_COMMENT_START, (unsigned long) ref->code);
28128 break;
28129 }
28130 }
28131
28132 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28133 other compilation unit .debug_macinfo sections. IDX is the first
28134 index of a define/undef, return the number of ops that should be
28135 emitted in a comdat .debug_macinfo section and emit
28136 a DW_MACRO_import entry referencing it.
28137 If the define/undef entry should be emitted normally, return 0. */
28138
28139 static unsigned
28140 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28141 macinfo_hash_type **macinfo_htab)
28142 {
28143 macinfo_entry *first, *second, *cur, *inc;
28144 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28145 unsigned char checksum[16];
28146 struct md5_ctx ctx;
28147 char *grp_name, *tail;
28148 const char *base;
28149 unsigned int i, count, encoded_filename_len, linebuf_len;
28150 macinfo_entry **slot;
28151
28152 first = &(*macinfo_table)[idx];
28153 second = &(*macinfo_table)[idx + 1];
28154
28155 /* Optimize only if there are at least two consecutive define/undef ops,
28156 and either all of them are before first DW_MACINFO_start_file
28157 with lineno {0,1} (i.e. predefined macro block), or all of them are
28158 in some included header file. */
28159 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28160 return 0;
28161 if (vec_safe_is_empty (files))
28162 {
28163 if (first->lineno > 1 || second->lineno > 1)
28164 return 0;
28165 }
28166 else if (first->lineno == 0)
28167 return 0;
28168
28169 /* Find the last define/undef entry that can be grouped together
28170 with first and at the same time compute md5 checksum of their
28171 codes, linenumbers and strings. */
28172 md5_init_ctx (&ctx);
28173 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28174 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28175 break;
28176 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28177 break;
28178 else
28179 {
28180 unsigned char code = cur->code;
28181 md5_process_bytes (&code, 1, &ctx);
28182 checksum_uleb128 (cur->lineno, &ctx);
28183 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28184 }
28185 md5_finish_ctx (&ctx, checksum);
28186 count = i - idx;
28187
28188 /* From the containing include filename (if any) pick up just
28189 usable characters from its basename. */
28190 if (vec_safe_is_empty (files))
28191 base = "";
28192 else
28193 base = lbasename (files->last ().info);
28194 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28195 if (ISIDNUM (base[i]) || base[i] == '.')
28196 encoded_filename_len++;
28197 /* Count . at the end. */
28198 if (encoded_filename_len)
28199 encoded_filename_len++;
28200
28201 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28202 linebuf_len = strlen (linebuf);
28203
28204 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28205 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28206 + 16 * 2 + 1);
28207 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28208 tail = grp_name + 4;
28209 if (encoded_filename_len)
28210 {
28211 for (i = 0; base[i]; i++)
28212 if (ISIDNUM (base[i]) || base[i] == '.')
28213 *tail++ = base[i];
28214 *tail++ = '.';
28215 }
28216 memcpy (tail, linebuf, linebuf_len);
28217 tail += linebuf_len;
28218 *tail++ = '.';
28219 for (i = 0; i < 16; i++)
28220 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28221
28222 /* Construct a macinfo_entry for DW_MACRO_import
28223 in the empty vector entry before the first define/undef. */
28224 inc = &(*macinfo_table)[idx - 1];
28225 inc->code = DW_MACRO_import;
28226 inc->lineno = 0;
28227 inc->info = ggc_strdup (grp_name);
28228 if (!*macinfo_htab)
28229 *macinfo_htab = new macinfo_hash_type (10);
28230 /* Avoid emitting duplicates. */
28231 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28232 if (*slot != NULL)
28233 {
28234 inc->code = 0;
28235 inc->info = NULL;
28236 /* If such an entry has been used before, just emit
28237 a DW_MACRO_import op. */
28238 inc = *slot;
28239 output_macinfo_op (inc);
28240 /* And clear all macinfo_entry in the range to avoid emitting them
28241 in the second pass. */
28242 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28243 {
28244 cur->code = 0;
28245 cur->info = NULL;
28246 }
28247 }
28248 else
28249 {
28250 *slot = inc;
28251 inc->lineno = (*macinfo_htab)->elements ();
28252 output_macinfo_op (inc);
28253 }
28254 return count;
28255 }
28256
28257 /* Save any strings needed by the macinfo table in the debug str
28258 table. All strings must be collected into the table by the time
28259 index_string is called. */
28260
28261 static void
28262 save_macinfo_strings (void)
28263 {
28264 unsigned len;
28265 unsigned i;
28266 macinfo_entry *ref;
28267
28268 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28269 {
28270 switch (ref->code)
28271 {
28272 /* Match the logic in output_macinfo_op to decide on
28273 indirect strings. */
28274 case DW_MACINFO_define:
28275 case DW_MACINFO_undef:
28276 len = strlen (ref->info) + 1;
28277 if (!dwarf_strict
28278 && len > DWARF_OFFSET_SIZE
28279 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28280 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28281 set_indirect_string (find_AT_string (ref->info));
28282 break;
28283 case DW_MACINFO_start_file:
28284 /* -gsplit-dwarf -g3 will also output filename as indirect
28285 string. */
28286 if (!dwarf_split_debug_info)
28287 break;
28288 /* Fall through. */
28289 case DW_MACRO_define_strp:
28290 case DW_MACRO_undef_strp:
28291 set_indirect_string (find_AT_string (ref->info));
28292 break;
28293 default:
28294 break;
28295 }
28296 }
28297 }
28298
28299 /* Output macinfo section(s). */
28300
28301 static void
28302 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28303 {
28304 unsigned i;
28305 unsigned long length = vec_safe_length (macinfo_table);
28306 macinfo_entry *ref;
28307 vec<macinfo_entry, va_gc> *files = NULL;
28308 macinfo_hash_type *macinfo_htab = NULL;
28309 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28310
28311 if (! length)
28312 return;
28313
28314 /* output_macinfo* uses these interchangeably. */
28315 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28316 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28317 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28318 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28319
28320 /* AIX Assembler inserts the length, so adjust the reference to match the
28321 offset expected by debuggers. */
28322 strcpy (dl_section_ref, debug_line_label);
28323 if (XCOFF_DEBUGGING_INFO)
28324 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28325
28326 /* For .debug_macro emit the section header. */
28327 if (!dwarf_strict || dwarf_version >= 5)
28328 {
28329 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28330 "DWARF macro version number");
28331 if (DWARF_OFFSET_SIZE == 8)
28332 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28333 else
28334 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28335 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28336 debug_line_section, NULL);
28337 }
28338
28339 /* In the first loop, it emits the primary .debug_macinfo section
28340 and after each emitted op the macinfo_entry is cleared.
28341 If a longer range of define/undef ops can be optimized using
28342 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28343 the vector before the first define/undef in the range and the
28344 whole range of define/undef ops is not emitted and kept. */
28345 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28346 {
28347 switch (ref->code)
28348 {
28349 case DW_MACINFO_start_file:
28350 vec_safe_push (files, *ref);
28351 break;
28352 case DW_MACINFO_end_file:
28353 if (!vec_safe_is_empty (files))
28354 files->pop ();
28355 break;
28356 case DW_MACINFO_define:
28357 case DW_MACINFO_undef:
28358 if ((!dwarf_strict || dwarf_version >= 5)
28359 && HAVE_COMDAT_GROUP
28360 && vec_safe_length (files) != 1
28361 && i > 0
28362 && i + 1 < length
28363 && (*macinfo_table)[i - 1].code == 0)
28364 {
28365 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28366 if (count)
28367 {
28368 i += count - 1;
28369 continue;
28370 }
28371 }
28372 break;
28373 case 0:
28374 /* A dummy entry may be inserted at the beginning to be able
28375 to optimize the whole block of predefined macros. */
28376 if (i == 0)
28377 continue;
28378 default:
28379 break;
28380 }
28381 output_macinfo_op (ref);
28382 ref->info = NULL;
28383 ref->code = 0;
28384 }
28385
28386 if (!macinfo_htab)
28387 return;
28388
28389 /* Save the number of transparent includes so we can adjust the
28390 label number for the fat LTO object DWARF. */
28391 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28392
28393 delete macinfo_htab;
28394 macinfo_htab = NULL;
28395
28396 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28397 terminate the current chain and switch to a new comdat .debug_macinfo
28398 section and emit the define/undef entries within it. */
28399 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28400 switch (ref->code)
28401 {
28402 case 0:
28403 continue;
28404 case DW_MACRO_import:
28405 {
28406 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28407 tree comdat_key = get_identifier (ref->info);
28408 /* Terminate the previous .debug_macinfo section. */
28409 dw2_asm_output_data (1, 0, "End compilation unit");
28410 targetm.asm_out.named_section (debug_macinfo_section_name,
28411 SECTION_DEBUG
28412 | SECTION_LINKONCE
28413 | (early_lto_debug
28414 ? SECTION_EXCLUDE : 0),
28415 comdat_key);
28416 ASM_GENERATE_INTERNAL_LABEL (label,
28417 DEBUG_MACRO_SECTION_LABEL,
28418 ref->lineno + macinfo_label_base);
28419 ASM_OUTPUT_LABEL (asm_out_file, label);
28420 ref->code = 0;
28421 ref->info = NULL;
28422 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28423 "DWARF macro version number");
28424 if (DWARF_OFFSET_SIZE == 8)
28425 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28426 else
28427 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28428 }
28429 break;
28430 case DW_MACINFO_define:
28431 case DW_MACINFO_undef:
28432 output_macinfo_op (ref);
28433 ref->code = 0;
28434 ref->info = NULL;
28435 break;
28436 default:
28437 gcc_unreachable ();
28438 }
28439
28440 macinfo_label_base += macinfo_label_base_adj;
28441 }
28442
28443 /* Initialize the various sections and labels for dwarf output and prefix
28444 them with PREFIX if non-NULL. Returns the generation (zero based
28445 number of times function was called). */
28446
28447 static unsigned
28448 init_sections_and_labels (bool early_lto_debug)
28449 {
28450 /* As we may get called multiple times have a generation count for
28451 labels. */
28452 static unsigned generation = 0;
28453
28454 if (early_lto_debug)
28455 {
28456 if (!dwarf_split_debug_info)
28457 {
28458 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28459 SECTION_DEBUG | SECTION_EXCLUDE,
28460 NULL);
28461 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28462 SECTION_DEBUG | SECTION_EXCLUDE,
28463 NULL);
28464 debug_macinfo_section_name
28465 = ((dwarf_strict && dwarf_version < 5)
28466 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28467 debug_macinfo_section = get_section (debug_macinfo_section_name,
28468 SECTION_DEBUG
28469 | SECTION_EXCLUDE, NULL);
28470 }
28471 else
28472 {
28473 /* ??? Which of the following do we need early? */
28474 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28475 SECTION_DEBUG | SECTION_EXCLUDE,
28476 NULL);
28477 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28478 SECTION_DEBUG | SECTION_EXCLUDE,
28479 NULL);
28480 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28481 SECTION_DEBUG
28482 | SECTION_EXCLUDE, NULL);
28483 debug_skeleton_abbrev_section
28484 = get_section (DEBUG_LTO_ABBREV_SECTION,
28485 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28486 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28487 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28488 generation);
28489
28490 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28491 stay in the main .o, but the skeleton_line goes into the split
28492 off dwo. */
28493 debug_skeleton_line_section
28494 = get_section (DEBUG_LTO_LINE_SECTION,
28495 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28496 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28497 DEBUG_SKELETON_LINE_SECTION_LABEL,
28498 generation);
28499 debug_str_offsets_section
28500 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28501 SECTION_DEBUG | SECTION_EXCLUDE,
28502 NULL);
28503 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28504 DEBUG_SKELETON_INFO_SECTION_LABEL,
28505 generation);
28506 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28507 DEBUG_STR_DWO_SECTION_FLAGS,
28508 NULL);
28509 debug_macinfo_section_name
28510 = ((dwarf_strict && dwarf_version < 5)
28511 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28512 debug_macinfo_section = get_section (debug_macinfo_section_name,
28513 SECTION_DEBUG | SECTION_EXCLUDE,
28514 NULL);
28515 }
28516 /* For macro info and the file table we have to refer to a
28517 debug_line section. */
28518 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28519 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28520 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28521 DEBUG_LINE_SECTION_LABEL, generation);
28522
28523 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28524 DEBUG_STR_SECTION_FLAGS
28525 | SECTION_EXCLUDE, NULL);
28526 if (!dwarf_split_debug_info)
28527 debug_line_str_section
28528 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28529 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28530 }
28531 else
28532 {
28533 if (!dwarf_split_debug_info)
28534 {
28535 debug_info_section = get_section (DEBUG_INFO_SECTION,
28536 SECTION_DEBUG, NULL);
28537 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28538 SECTION_DEBUG, NULL);
28539 debug_loc_section = get_section (dwarf_version >= 5
28540 ? DEBUG_LOCLISTS_SECTION
28541 : DEBUG_LOC_SECTION,
28542 SECTION_DEBUG, NULL);
28543 debug_macinfo_section_name
28544 = ((dwarf_strict && dwarf_version < 5)
28545 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28546 debug_macinfo_section = get_section (debug_macinfo_section_name,
28547 SECTION_DEBUG, NULL);
28548 }
28549 else
28550 {
28551 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28552 SECTION_DEBUG | SECTION_EXCLUDE,
28553 NULL);
28554 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28555 SECTION_DEBUG | SECTION_EXCLUDE,
28556 NULL);
28557 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28558 SECTION_DEBUG, NULL);
28559 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28560 SECTION_DEBUG, NULL);
28561 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28562 SECTION_DEBUG, NULL);
28563 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28564 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28565 generation);
28566
28567 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28568 stay in the main .o, but the skeleton_line goes into the
28569 split off dwo. */
28570 debug_skeleton_line_section
28571 = get_section (DEBUG_DWO_LINE_SECTION,
28572 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28573 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28574 DEBUG_SKELETON_LINE_SECTION_LABEL,
28575 generation);
28576 debug_str_offsets_section
28577 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28578 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28579 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28580 DEBUG_SKELETON_INFO_SECTION_LABEL,
28581 generation);
28582 debug_loc_section = get_section (dwarf_version >= 5
28583 ? DEBUG_DWO_LOCLISTS_SECTION
28584 : DEBUG_DWO_LOC_SECTION,
28585 SECTION_DEBUG | SECTION_EXCLUDE,
28586 NULL);
28587 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28588 DEBUG_STR_DWO_SECTION_FLAGS,
28589 NULL);
28590 debug_macinfo_section_name
28591 = ((dwarf_strict && dwarf_version < 5)
28592 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28593 debug_macinfo_section = get_section (debug_macinfo_section_name,
28594 SECTION_DEBUG | SECTION_EXCLUDE,
28595 NULL);
28596 }
28597 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28598 SECTION_DEBUG, NULL);
28599 debug_line_section = get_section (DEBUG_LINE_SECTION,
28600 SECTION_DEBUG, NULL);
28601 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28602 SECTION_DEBUG, NULL);
28603 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28604 SECTION_DEBUG, NULL);
28605 debug_str_section = get_section (DEBUG_STR_SECTION,
28606 DEBUG_STR_SECTION_FLAGS, NULL);
28607 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28608 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28609 DEBUG_STR_SECTION_FLAGS, NULL);
28610
28611 debug_ranges_section = get_section (dwarf_version >= 5
28612 ? DEBUG_RNGLISTS_SECTION
28613 : DEBUG_RANGES_SECTION,
28614 SECTION_DEBUG, NULL);
28615 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28616 SECTION_DEBUG, NULL);
28617 }
28618
28619 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28620 DEBUG_ABBREV_SECTION_LABEL, generation);
28621 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28622 DEBUG_INFO_SECTION_LABEL, generation);
28623 info_section_emitted = false;
28624 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28625 DEBUG_LINE_SECTION_LABEL, generation);
28626 /* There are up to 4 unique ranges labels per generation.
28627 See also output_rnglists. */
28628 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28629 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28630 if (dwarf_version >= 5 && dwarf_split_debug_info)
28631 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28632 DEBUG_RANGES_SECTION_LABEL,
28633 1 + generation * 4);
28634 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28635 DEBUG_ADDR_SECTION_LABEL, generation);
28636 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28637 (dwarf_strict && dwarf_version < 5)
28638 ? DEBUG_MACINFO_SECTION_LABEL
28639 : DEBUG_MACRO_SECTION_LABEL, generation);
28640 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28641 generation);
28642
28643 ++generation;
28644 return generation - 1;
28645 }
28646
28647 /* Set up for Dwarf output at the start of compilation. */
28648
28649 static void
28650 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28651 {
28652 /* Allocate the file_table. */
28653 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28654
28655 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28656 /* Allocate the decl_die_table. */
28657 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28658
28659 /* Allocate the decl_loc_table. */
28660 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28661
28662 /* Allocate the cached_dw_loc_list_table. */
28663 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28664
28665 /* Allocate the initial hunk of the abbrev_die_table. */
28666 vec_alloc (abbrev_die_table, 256);
28667 /* Zero-th entry is allocated, but unused. */
28668 abbrev_die_table->quick_push (NULL);
28669
28670 /* Allocate the dwarf_proc_stack_usage_map. */
28671 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28672
28673 /* Allocate the pubtypes and pubnames vectors. */
28674 vec_alloc (pubname_table, 32);
28675 vec_alloc (pubtype_table, 32);
28676
28677 vec_alloc (incomplete_types, 64);
28678
28679 vec_alloc (used_rtx_array, 32);
28680
28681 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28682 vec_alloc (macinfo_table, 64);
28683 #endif
28684
28685 /* If front-ends already registered a main translation unit but we were not
28686 ready to perform the association, do this now. */
28687 if (main_translation_unit != NULL_TREE)
28688 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28689 }
28690
28691 /* Called before compile () starts outputtting functions, variables
28692 and toplevel asms into assembly. */
28693
28694 static void
28695 dwarf2out_assembly_start (void)
28696 {
28697 if (text_section_line_info)
28698 return;
28699
28700 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28701 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28702 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28703 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28704 COLD_TEXT_SECTION_LABEL, 0);
28705 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28706
28707 switch_to_section (text_section);
28708 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28709 #endif
28710
28711 /* Make sure the line number table for .text always exists. */
28712 text_section_line_info = new_line_info_table ();
28713 text_section_line_info->end_label = text_end_label;
28714
28715 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28716 cur_line_info_table = text_section_line_info;
28717 #endif
28718
28719 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28720 && dwarf2out_do_cfi_asm ()
28721 && !dwarf2out_do_eh_frame ())
28722 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28723 }
28724
28725 /* A helper function for dwarf2out_finish called through
28726 htab_traverse. Assign a string its index. All strings must be
28727 collected into the table by the time index_string is called,
28728 because the indexing code relies on htab_traverse to traverse nodes
28729 in the same order for each run. */
28730
28731 int
28732 index_string (indirect_string_node **h, unsigned int *index)
28733 {
28734 indirect_string_node *node = *h;
28735
28736 find_string_form (node);
28737 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28738 {
28739 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28740 node->index = *index;
28741 *index += 1;
28742 }
28743 return 1;
28744 }
28745
28746 /* A helper function for output_indirect_strings called through
28747 htab_traverse. Output the offset to a string and update the
28748 current offset. */
28749
28750 int
28751 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28752 {
28753 indirect_string_node *node = *h;
28754
28755 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28756 {
28757 /* Assert that this node has been assigned an index. */
28758 gcc_assert (node->index != NO_INDEX_ASSIGNED
28759 && node->index != NOT_INDEXED);
28760 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28761 "indexed string 0x%x: %s", node->index, node->str);
28762 *offset += strlen (node->str) + 1;
28763 }
28764 return 1;
28765 }
28766
28767 /* A helper function for dwarf2out_finish called through
28768 htab_traverse. Output the indexed string. */
28769
28770 int
28771 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28772 {
28773 struct indirect_string_node *node = *h;
28774
28775 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28776 {
28777 /* Assert that the strings are output in the same order as their
28778 indexes were assigned. */
28779 gcc_assert (*cur_idx == node->index);
28780 assemble_string (node->str, strlen (node->str) + 1);
28781 *cur_idx += 1;
28782 }
28783 return 1;
28784 }
28785
28786 /* A helper function for output_indirect_strings. Counts the number
28787 of index strings offsets. Must match the logic of the functions
28788 output_index_string[_offsets] above. */
28789 int
28790 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28791 {
28792 struct indirect_string_node *node = *h;
28793
28794 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28795 *last_idx += 1;
28796 return 1;
28797 }
28798
28799 /* A helper function for dwarf2out_finish called through
28800 htab_traverse. Emit one queued .debug_str string. */
28801
28802 int
28803 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28804 {
28805 struct indirect_string_node *node = *h;
28806
28807 node->form = find_string_form (node);
28808 if (node->form == form && node->refcount > 0)
28809 {
28810 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28811 assemble_string (node->str, strlen (node->str) + 1);
28812 }
28813
28814 return 1;
28815 }
28816
28817 /* Output the indexed string table. */
28818
28819 static void
28820 output_indirect_strings (void)
28821 {
28822 switch_to_section (debug_str_section);
28823 if (!dwarf_split_debug_info)
28824 debug_str_hash->traverse<enum dwarf_form,
28825 output_indirect_string> (DW_FORM_strp);
28826 else
28827 {
28828 unsigned int offset = 0;
28829 unsigned int cur_idx = 0;
28830
28831 if (skeleton_debug_str_hash)
28832 skeleton_debug_str_hash->traverse<enum dwarf_form,
28833 output_indirect_string> (DW_FORM_strp);
28834
28835 switch_to_section (debug_str_offsets_section);
28836 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
28837 header. Note that we don't need to generate a label to the
28838 actual index table following the header here, because this is
28839 for the split dwarf case only. In an .dwo file there is only
28840 one string offsets table (and one debug info section). But
28841 if we would start using string offset tables for the main (or
28842 skeleton) unit, then we have to add a DW_AT_str_offsets_base
28843 pointing to the actual index after the header. Split dwarf
28844 units will never have a string offsets base attribute. When
28845 a split unit is moved into a .dwp file the string offsets can
28846 be found through the .debug_cu_index section table. */
28847 if (dwarf_version >= 5)
28848 {
28849 unsigned int last_idx = 0;
28850 unsigned long str_offsets_length;
28851
28852 debug_str_hash->traverse_noresize
28853 <unsigned int *, count_index_strings> (&last_idx);
28854 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
28855 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
28856 dw2_asm_output_data (4, 0xffffffff,
28857 "Escape value for 64-bit DWARF extension");
28858 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
28859 "Length of string offsets unit");
28860 dw2_asm_output_data (2, 5, "DWARF string offsets version");
28861 dw2_asm_output_data (2, 0, "Header zero padding");
28862 }
28863 debug_str_hash->traverse_noresize
28864 <unsigned int *, output_index_string_offset> (&offset);
28865 switch_to_section (debug_str_dwo_section);
28866 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28867 (&cur_idx);
28868 }
28869 }
28870
28871 /* Callback for htab_traverse to assign an index to an entry in the
28872 table, and to write that entry to the .debug_addr section. */
28873
28874 int
28875 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28876 {
28877 addr_table_entry *entry = *slot;
28878
28879 if (entry->refcount == 0)
28880 {
28881 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28882 || entry->index == NOT_INDEXED);
28883 return 1;
28884 }
28885
28886 gcc_assert (entry->index == *cur_index);
28887 (*cur_index)++;
28888
28889 switch (entry->kind)
28890 {
28891 case ate_kind_rtx:
28892 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28893 "0x%x", entry->index);
28894 break;
28895 case ate_kind_rtx_dtprel:
28896 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28897 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28898 DWARF2_ADDR_SIZE,
28899 entry->addr.rtl);
28900 fputc ('\n', asm_out_file);
28901 break;
28902 case ate_kind_label:
28903 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28904 "0x%x", entry->index);
28905 break;
28906 default:
28907 gcc_unreachable ();
28908 }
28909 return 1;
28910 }
28911
28912 /* A helper function for dwarf2out_finish. Counts the number
28913 of indexed addresses. Must match the logic of the functions
28914 output_addr_table_entry above. */
28915 int
28916 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
28917 {
28918 addr_table_entry *entry = *slot;
28919
28920 if (entry->refcount > 0)
28921 *last_idx += 1;
28922 return 1;
28923 }
28924
28925 /* Produce the .debug_addr section. */
28926
28927 static void
28928 output_addr_table (void)
28929 {
28930 unsigned int index = 0;
28931 if (addr_index_table == NULL || addr_index_table->size () == 0)
28932 return;
28933
28934 switch_to_section (debug_addr_section);
28935 addr_index_table
28936 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28937 }
28938
28939 #if ENABLE_ASSERT_CHECKING
28940 /* Verify that all marks are clear. */
28941
28942 static void
28943 verify_marks_clear (dw_die_ref die)
28944 {
28945 dw_die_ref c;
28946
28947 gcc_assert (! die->die_mark);
28948 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28949 }
28950 #endif /* ENABLE_ASSERT_CHECKING */
28951
28952 /* Clear the marks for a die and its children.
28953 Be cool if the mark isn't set. */
28954
28955 static void
28956 prune_unmark_dies (dw_die_ref die)
28957 {
28958 dw_die_ref c;
28959
28960 if (die->die_mark)
28961 die->die_mark = 0;
28962 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
28963 }
28964
28965 /* Given LOC that is referenced by a DIE we're marking as used, find all
28966 referenced DWARF procedures it references and mark them as used. */
28967
28968 static void
28969 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
28970 {
28971 for (; loc != NULL; loc = loc->dw_loc_next)
28972 switch (loc->dw_loc_opc)
28973 {
28974 case DW_OP_implicit_pointer:
28975 case DW_OP_convert:
28976 case DW_OP_reinterpret:
28977 case DW_OP_GNU_implicit_pointer:
28978 case DW_OP_GNU_convert:
28979 case DW_OP_GNU_reinterpret:
28980 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
28981 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28982 break;
28983 case DW_OP_GNU_variable_value:
28984 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28985 {
28986 dw_die_ref ref
28987 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28988 if (ref == NULL)
28989 break;
28990 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28991 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28992 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28993 }
28994 /* FALLTHRU */
28995 case DW_OP_call2:
28996 case DW_OP_call4:
28997 case DW_OP_call_ref:
28998 case DW_OP_const_type:
28999 case DW_OP_GNU_const_type:
29000 case DW_OP_GNU_parameter_ref:
29001 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
29002 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29003 break;
29004 case DW_OP_regval_type:
29005 case DW_OP_deref_type:
29006 case DW_OP_GNU_regval_type:
29007 case DW_OP_GNU_deref_type:
29008 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
29009 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
29010 break;
29011 case DW_OP_entry_value:
29012 case DW_OP_GNU_entry_value:
29013 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29014 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29015 break;
29016 default:
29017 break;
29018 }
29019 }
29020
29021 /* Given DIE that we're marking as used, find any other dies
29022 it references as attributes and mark them as used. */
29023
29024 static void
29025 prune_unused_types_walk_attribs (dw_die_ref die)
29026 {
29027 dw_attr_node *a;
29028 unsigned ix;
29029
29030 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29031 {
29032 switch (AT_class (a))
29033 {
29034 /* Make sure DWARF procedures referenced by location descriptions will
29035 get emitted. */
29036 case dw_val_class_loc:
29037 prune_unused_types_walk_loc_descr (AT_loc (a));
29038 break;
29039 case dw_val_class_loc_list:
29040 for (dw_loc_list_ref list = AT_loc_list (a);
29041 list != NULL;
29042 list = list->dw_loc_next)
29043 prune_unused_types_walk_loc_descr (list->expr);
29044 break;
29045
29046 case dw_val_class_view_list:
29047 /* This points to a loc_list in another attribute, so it's
29048 already covered. */
29049 break;
29050
29051 case dw_val_class_die_ref:
29052 /* A reference to another DIE.
29053 Make sure that it will get emitted.
29054 If it was broken out into a comdat group, don't follow it. */
29055 if (! AT_ref (a)->comdat_type_p
29056 || a->dw_attr == DW_AT_specification)
29057 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29058 break;
29059
29060 case dw_val_class_str:
29061 /* Set the string's refcount to 0 so that prune_unused_types_mark
29062 accounts properly for it. */
29063 a->dw_attr_val.v.val_str->refcount = 0;
29064 break;
29065
29066 default:
29067 break;
29068 }
29069 }
29070 }
29071
29072 /* Mark the generic parameters and arguments children DIEs of DIE. */
29073
29074 static void
29075 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29076 {
29077 dw_die_ref c;
29078
29079 if (die == NULL || die->die_child == NULL)
29080 return;
29081 c = die->die_child;
29082 do
29083 {
29084 if (is_template_parameter (c))
29085 prune_unused_types_mark (c, 1);
29086 c = c->die_sib;
29087 } while (c && c != die->die_child);
29088 }
29089
29090 /* Mark DIE as being used. If DOKIDS is true, then walk down
29091 to DIE's children. */
29092
29093 static void
29094 prune_unused_types_mark (dw_die_ref die, int dokids)
29095 {
29096 dw_die_ref c;
29097
29098 if (die->die_mark == 0)
29099 {
29100 /* We haven't done this node yet. Mark it as used. */
29101 die->die_mark = 1;
29102 /* If this is the DIE of a generic type instantiation,
29103 mark the children DIEs that describe its generic parms and
29104 args. */
29105 prune_unused_types_mark_generic_parms_dies (die);
29106
29107 /* We also have to mark its parents as used.
29108 (But we don't want to mark our parent's kids due to this,
29109 unless it is a class.) */
29110 if (die->die_parent)
29111 prune_unused_types_mark (die->die_parent,
29112 class_scope_p (die->die_parent));
29113
29114 /* Mark any referenced nodes. */
29115 prune_unused_types_walk_attribs (die);
29116
29117 /* If this node is a specification,
29118 also mark the definition, if it exists. */
29119 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29120 prune_unused_types_mark (die->die_definition, 1);
29121 }
29122
29123 if (dokids && die->die_mark != 2)
29124 {
29125 /* We need to walk the children, but haven't done so yet.
29126 Remember that we've walked the kids. */
29127 die->die_mark = 2;
29128
29129 /* If this is an array type, we need to make sure our
29130 kids get marked, even if they're types. If we're
29131 breaking out types into comdat sections, do this
29132 for all type definitions. */
29133 if (die->die_tag == DW_TAG_array_type
29134 || (use_debug_types
29135 && is_type_die (die) && ! is_declaration_die (die)))
29136 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29137 else
29138 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29139 }
29140 }
29141
29142 /* For local classes, look if any static member functions were emitted
29143 and if so, mark them. */
29144
29145 static void
29146 prune_unused_types_walk_local_classes (dw_die_ref die)
29147 {
29148 dw_die_ref c;
29149
29150 if (die->die_mark == 2)
29151 return;
29152
29153 switch (die->die_tag)
29154 {
29155 case DW_TAG_structure_type:
29156 case DW_TAG_union_type:
29157 case DW_TAG_class_type:
29158 break;
29159
29160 case DW_TAG_subprogram:
29161 if (!get_AT_flag (die, DW_AT_declaration)
29162 || die->die_definition != NULL)
29163 prune_unused_types_mark (die, 1);
29164 return;
29165
29166 default:
29167 return;
29168 }
29169
29170 /* Mark children. */
29171 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29172 }
29173
29174 /* Walk the tree DIE and mark types that we actually use. */
29175
29176 static void
29177 prune_unused_types_walk (dw_die_ref die)
29178 {
29179 dw_die_ref c;
29180
29181 /* Don't do anything if this node is already marked and
29182 children have been marked as well. */
29183 if (die->die_mark == 2)
29184 return;
29185
29186 switch (die->die_tag)
29187 {
29188 case DW_TAG_structure_type:
29189 case DW_TAG_union_type:
29190 case DW_TAG_class_type:
29191 if (die->die_perennial_p)
29192 break;
29193
29194 for (c = die->die_parent; c; c = c->die_parent)
29195 if (c->die_tag == DW_TAG_subprogram)
29196 break;
29197
29198 /* Finding used static member functions inside of classes
29199 is needed just for local classes, because for other classes
29200 static member function DIEs with DW_AT_specification
29201 are emitted outside of the DW_TAG_*_type. If we ever change
29202 it, we'd need to call this even for non-local classes. */
29203 if (c)
29204 prune_unused_types_walk_local_classes (die);
29205
29206 /* It's a type node --- don't mark it. */
29207 return;
29208
29209 case DW_TAG_const_type:
29210 case DW_TAG_packed_type:
29211 case DW_TAG_pointer_type:
29212 case DW_TAG_reference_type:
29213 case DW_TAG_rvalue_reference_type:
29214 case DW_TAG_volatile_type:
29215 case DW_TAG_typedef:
29216 case DW_TAG_array_type:
29217 case DW_TAG_interface_type:
29218 case DW_TAG_friend:
29219 case DW_TAG_enumeration_type:
29220 case DW_TAG_subroutine_type:
29221 case DW_TAG_string_type:
29222 case DW_TAG_set_type:
29223 case DW_TAG_subrange_type:
29224 case DW_TAG_ptr_to_member_type:
29225 case DW_TAG_file_type:
29226 /* Type nodes are useful only when other DIEs reference them --- don't
29227 mark them. */
29228 /* FALLTHROUGH */
29229
29230 case DW_TAG_dwarf_procedure:
29231 /* Likewise for DWARF procedures. */
29232
29233 if (die->die_perennial_p)
29234 break;
29235
29236 return;
29237
29238 default:
29239 /* Mark everything else. */
29240 break;
29241 }
29242
29243 if (die->die_mark == 0)
29244 {
29245 die->die_mark = 1;
29246
29247 /* Now, mark any dies referenced from here. */
29248 prune_unused_types_walk_attribs (die);
29249 }
29250
29251 die->die_mark = 2;
29252
29253 /* Mark children. */
29254 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29255 }
29256
29257 /* Increment the string counts on strings referred to from DIE's
29258 attributes. */
29259
29260 static void
29261 prune_unused_types_update_strings (dw_die_ref die)
29262 {
29263 dw_attr_node *a;
29264 unsigned ix;
29265
29266 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29267 if (AT_class (a) == dw_val_class_str)
29268 {
29269 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29270 s->refcount++;
29271 /* Avoid unnecessarily putting strings that are used less than
29272 twice in the hash table. */
29273 if (s->refcount
29274 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29275 {
29276 indirect_string_node **slot
29277 = debug_str_hash->find_slot_with_hash (s->str,
29278 htab_hash_string (s->str),
29279 INSERT);
29280 gcc_assert (*slot == NULL);
29281 *slot = s;
29282 }
29283 }
29284 }
29285
29286 /* Mark DIE and its children as removed. */
29287
29288 static void
29289 mark_removed (dw_die_ref die)
29290 {
29291 dw_die_ref c;
29292 die->removed = true;
29293 FOR_EACH_CHILD (die, c, mark_removed (c));
29294 }
29295
29296 /* Remove from the tree DIE any dies that aren't marked. */
29297
29298 static void
29299 prune_unused_types_prune (dw_die_ref die)
29300 {
29301 dw_die_ref c;
29302
29303 gcc_assert (die->die_mark);
29304 prune_unused_types_update_strings (die);
29305
29306 if (! die->die_child)
29307 return;
29308
29309 c = die->die_child;
29310 do {
29311 dw_die_ref prev = c, next;
29312 for (c = c->die_sib; ! c->die_mark; c = next)
29313 if (c == die->die_child)
29314 {
29315 /* No marked children between 'prev' and the end of the list. */
29316 if (prev == c)
29317 /* No marked children at all. */
29318 die->die_child = NULL;
29319 else
29320 {
29321 prev->die_sib = c->die_sib;
29322 die->die_child = prev;
29323 }
29324 c->die_sib = NULL;
29325 mark_removed (c);
29326 return;
29327 }
29328 else
29329 {
29330 next = c->die_sib;
29331 c->die_sib = NULL;
29332 mark_removed (c);
29333 }
29334
29335 if (c != prev->die_sib)
29336 prev->die_sib = c;
29337 prune_unused_types_prune (c);
29338 } while (c != die->die_child);
29339 }
29340
29341 /* Remove dies representing declarations that we never use. */
29342
29343 static void
29344 prune_unused_types (void)
29345 {
29346 unsigned int i;
29347 limbo_die_node *node;
29348 comdat_type_node *ctnode;
29349 pubname_entry *pub;
29350 dw_die_ref base_type;
29351
29352 #if ENABLE_ASSERT_CHECKING
29353 /* All the marks should already be clear. */
29354 verify_marks_clear (comp_unit_die ());
29355 for (node = limbo_die_list; node; node = node->next)
29356 verify_marks_clear (node->die);
29357 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29358 verify_marks_clear (ctnode->root_die);
29359 #endif /* ENABLE_ASSERT_CHECKING */
29360
29361 /* Mark types that are used in global variables. */
29362 premark_types_used_by_global_vars ();
29363
29364 /* Set the mark on nodes that are actually used. */
29365 prune_unused_types_walk (comp_unit_die ());
29366 for (node = limbo_die_list; node; node = node->next)
29367 prune_unused_types_walk (node->die);
29368 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29369 {
29370 prune_unused_types_walk (ctnode->root_die);
29371 prune_unused_types_mark (ctnode->type_die, 1);
29372 }
29373
29374 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29375 are unusual in that they are pubnames that are the children of pubtypes.
29376 They should only be marked via their parent DW_TAG_enumeration_type die,
29377 not as roots in themselves. */
29378 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29379 if (pub->die->die_tag != DW_TAG_enumerator)
29380 prune_unused_types_mark (pub->die, 1);
29381 for (i = 0; base_types.iterate (i, &base_type); i++)
29382 prune_unused_types_mark (base_type, 1);
29383
29384 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29385 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29386 callees). */
29387 cgraph_node *cnode;
29388 FOR_EACH_FUNCTION (cnode)
29389 if (cnode->referred_to_p (false))
29390 {
29391 dw_die_ref die = lookup_decl_die (cnode->decl);
29392 if (die == NULL || die->die_mark)
29393 continue;
29394 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29395 if (e->caller != cnode
29396 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29397 {
29398 prune_unused_types_mark (die, 1);
29399 break;
29400 }
29401 }
29402
29403 if (debug_str_hash)
29404 debug_str_hash->empty ();
29405 if (skeleton_debug_str_hash)
29406 skeleton_debug_str_hash->empty ();
29407 prune_unused_types_prune (comp_unit_die ());
29408 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29409 {
29410 node = *pnode;
29411 if (!node->die->die_mark)
29412 *pnode = node->next;
29413 else
29414 {
29415 prune_unused_types_prune (node->die);
29416 pnode = &node->next;
29417 }
29418 }
29419 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29420 prune_unused_types_prune (ctnode->root_die);
29421
29422 /* Leave the marks clear. */
29423 prune_unmark_dies (comp_unit_die ());
29424 for (node = limbo_die_list; node; node = node->next)
29425 prune_unmark_dies (node->die);
29426 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29427 prune_unmark_dies (ctnode->root_die);
29428 }
29429
29430 /* Helpers to manipulate hash table of comdat type units. */
29431
29432 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29433 {
29434 static inline hashval_t hash (const comdat_type_node *);
29435 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29436 };
29437
29438 inline hashval_t
29439 comdat_type_hasher::hash (const comdat_type_node *type_node)
29440 {
29441 hashval_t h;
29442 memcpy (&h, type_node->signature, sizeof (h));
29443 return h;
29444 }
29445
29446 inline bool
29447 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29448 const comdat_type_node *type_node_2)
29449 {
29450 return (! memcmp (type_node_1->signature, type_node_2->signature,
29451 DWARF_TYPE_SIGNATURE_SIZE));
29452 }
29453
29454 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29455 to the location it would have been added, should we know its
29456 DECL_ASSEMBLER_NAME when we added other attributes. This will
29457 probably improve compactness of debug info, removing equivalent
29458 abbrevs, and hide any differences caused by deferring the
29459 computation of the assembler name, triggered by e.g. PCH. */
29460
29461 static inline void
29462 move_linkage_attr (dw_die_ref die)
29463 {
29464 unsigned ix = vec_safe_length (die->die_attr);
29465 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29466
29467 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29468 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29469
29470 while (--ix > 0)
29471 {
29472 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29473
29474 if (prev->dw_attr == DW_AT_decl_line
29475 || prev->dw_attr == DW_AT_decl_column
29476 || prev->dw_attr == DW_AT_name)
29477 break;
29478 }
29479
29480 if (ix != vec_safe_length (die->die_attr) - 1)
29481 {
29482 die->die_attr->pop ();
29483 die->die_attr->quick_insert (ix, linkage);
29484 }
29485 }
29486
29487 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29488 referenced from typed stack ops and count how often they are used. */
29489
29490 static void
29491 mark_base_types (dw_loc_descr_ref loc)
29492 {
29493 dw_die_ref base_type = NULL;
29494
29495 for (; loc; loc = loc->dw_loc_next)
29496 {
29497 switch (loc->dw_loc_opc)
29498 {
29499 case DW_OP_regval_type:
29500 case DW_OP_deref_type:
29501 case DW_OP_GNU_regval_type:
29502 case DW_OP_GNU_deref_type:
29503 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29504 break;
29505 case DW_OP_convert:
29506 case DW_OP_reinterpret:
29507 case DW_OP_GNU_convert:
29508 case DW_OP_GNU_reinterpret:
29509 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29510 continue;
29511 /* FALLTHRU */
29512 case DW_OP_const_type:
29513 case DW_OP_GNU_const_type:
29514 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29515 break;
29516 case DW_OP_entry_value:
29517 case DW_OP_GNU_entry_value:
29518 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29519 continue;
29520 default:
29521 continue;
29522 }
29523 gcc_assert (base_type->die_parent == comp_unit_die ());
29524 if (base_type->die_mark)
29525 base_type->die_mark++;
29526 else
29527 {
29528 base_types.safe_push (base_type);
29529 base_type->die_mark = 1;
29530 }
29531 }
29532 }
29533
29534 /* Comparison function for sorting marked base types. */
29535
29536 static int
29537 base_type_cmp (const void *x, const void *y)
29538 {
29539 dw_die_ref dx = *(const dw_die_ref *) x;
29540 dw_die_ref dy = *(const dw_die_ref *) y;
29541 unsigned int byte_size1, byte_size2;
29542 unsigned int encoding1, encoding2;
29543 unsigned int align1, align2;
29544 if (dx->die_mark > dy->die_mark)
29545 return -1;
29546 if (dx->die_mark < dy->die_mark)
29547 return 1;
29548 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29549 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29550 if (byte_size1 < byte_size2)
29551 return 1;
29552 if (byte_size1 > byte_size2)
29553 return -1;
29554 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29555 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29556 if (encoding1 < encoding2)
29557 return 1;
29558 if (encoding1 > encoding2)
29559 return -1;
29560 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29561 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29562 if (align1 < align2)
29563 return 1;
29564 if (align1 > align2)
29565 return -1;
29566 return 0;
29567 }
29568
29569 /* Move base types marked by mark_base_types as early as possible
29570 in the CU, sorted by decreasing usage count both to make the
29571 uleb128 references as small as possible and to make sure they
29572 will have die_offset already computed by calc_die_sizes when
29573 sizes of typed stack loc ops is computed. */
29574
29575 static void
29576 move_marked_base_types (void)
29577 {
29578 unsigned int i;
29579 dw_die_ref base_type, die, c;
29580
29581 if (base_types.is_empty ())
29582 return;
29583
29584 /* Sort by decreasing usage count, they will be added again in that
29585 order later on. */
29586 base_types.qsort (base_type_cmp);
29587 die = comp_unit_die ();
29588 c = die->die_child;
29589 do
29590 {
29591 dw_die_ref prev = c;
29592 c = c->die_sib;
29593 while (c->die_mark)
29594 {
29595 remove_child_with_prev (c, prev);
29596 /* As base types got marked, there must be at least
29597 one node other than DW_TAG_base_type. */
29598 gcc_assert (die->die_child != NULL);
29599 c = prev->die_sib;
29600 }
29601 }
29602 while (c != die->die_child);
29603 gcc_assert (die->die_child);
29604 c = die->die_child;
29605 for (i = 0; base_types.iterate (i, &base_type); i++)
29606 {
29607 base_type->die_mark = 0;
29608 base_type->die_sib = c->die_sib;
29609 c->die_sib = base_type;
29610 c = base_type;
29611 }
29612 }
29613
29614 /* Helper function for resolve_addr, attempt to resolve
29615 one CONST_STRING, return true if successful. Similarly verify that
29616 SYMBOL_REFs refer to variables emitted in the current CU. */
29617
29618 static bool
29619 resolve_one_addr (rtx *addr)
29620 {
29621 rtx rtl = *addr;
29622
29623 if (GET_CODE (rtl) == CONST_STRING)
29624 {
29625 size_t len = strlen (XSTR (rtl, 0)) + 1;
29626 tree t = build_string (len, XSTR (rtl, 0));
29627 tree tlen = size_int (len - 1);
29628 TREE_TYPE (t)
29629 = build_array_type (char_type_node, build_index_type (tlen));
29630 rtl = lookup_constant_def (t);
29631 if (!rtl || !MEM_P (rtl))
29632 return false;
29633 rtl = XEXP (rtl, 0);
29634 if (GET_CODE (rtl) == SYMBOL_REF
29635 && SYMBOL_REF_DECL (rtl)
29636 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29637 return false;
29638 vec_safe_push (used_rtx_array, rtl);
29639 *addr = rtl;
29640 return true;
29641 }
29642
29643 if (GET_CODE (rtl) == SYMBOL_REF
29644 && SYMBOL_REF_DECL (rtl))
29645 {
29646 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29647 {
29648 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29649 return false;
29650 }
29651 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29652 return false;
29653 }
29654
29655 if (GET_CODE (rtl) == CONST)
29656 {
29657 subrtx_ptr_iterator::array_type array;
29658 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29659 if (!resolve_one_addr (*iter))
29660 return false;
29661 }
29662
29663 return true;
29664 }
29665
29666 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29667 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29668 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29669
29670 static rtx
29671 string_cst_pool_decl (tree t)
29672 {
29673 rtx rtl = output_constant_def (t, 1);
29674 unsigned char *array;
29675 dw_loc_descr_ref l;
29676 tree decl;
29677 size_t len;
29678 dw_die_ref ref;
29679
29680 if (!rtl || !MEM_P (rtl))
29681 return NULL_RTX;
29682 rtl = XEXP (rtl, 0);
29683 if (GET_CODE (rtl) != SYMBOL_REF
29684 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29685 return NULL_RTX;
29686
29687 decl = SYMBOL_REF_DECL (rtl);
29688 if (!lookup_decl_die (decl))
29689 {
29690 len = TREE_STRING_LENGTH (t);
29691 vec_safe_push (used_rtx_array, rtl);
29692 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29693 array = ggc_vec_alloc<unsigned char> (len);
29694 memcpy (array, TREE_STRING_POINTER (t), len);
29695 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29696 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29697 l->dw_loc_oprnd2.v.val_vec.length = len;
29698 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29699 l->dw_loc_oprnd2.v.val_vec.array = array;
29700 add_AT_loc (ref, DW_AT_location, l);
29701 equate_decl_number_to_die (decl, ref);
29702 }
29703 return rtl;
29704 }
29705
29706 /* Helper function of resolve_addr_in_expr. LOC is
29707 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29708 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29709 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29710 with DW_OP_implicit_pointer if possible
29711 and return true, if unsuccessful, return false. */
29712
29713 static bool
29714 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29715 {
29716 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29717 HOST_WIDE_INT offset = 0;
29718 dw_die_ref ref = NULL;
29719 tree decl;
29720
29721 if (GET_CODE (rtl) == CONST
29722 && GET_CODE (XEXP (rtl, 0)) == PLUS
29723 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29724 {
29725 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29726 rtl = XEXP (XEXP (rtl, 0), 0);
29727 }
29728 if (GET_CODE (rtl) == CONST_STRING)
29729 {
29730 size_t len = strlen (XSTR (rtl, 0)) + 1;
29731 tree t = build_string (len, XSTR (rtl, 0));
29732 tree tlen = size_int (len - 1);
29733
29734 TREE_TYPE (t)
29735 = build_array_type (char_type_node, build_index_type (tlen));
29736 rtl = string_cst_pool_decl (t);
29737 if (!rtl)
29738 return false;
29739 }
29740 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29741 {
29742 decl = SYMBOL_REF_DECL (rtl);
29743 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29744 {
29745 ref = lookup_decl_die (decl);
29746 if (ref && (get_AT (ref, DW_AT_location)
29747 || get_AT (ref, DW_AT_const_value)))
29748 {
29749 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29750 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29751 loc->dw_loc_oprnd1.val_entry = NULL;
29752 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29753 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29754 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29755 loc->dw_loc_oprnd2.v.val_int = offset;
29756 return true;
29757 }
29758 }
29759 }
29760 return false;
29761 }
29762
29763 /* Helper function for resolve_addr, handle one location
29764 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29765 the location list couldn't be resolved. */
29766
29767 static bool
29768 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29769 {
29770 dw_loc_descr_ref keep = NULL;
29771 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29772 switch (loc->dw_loc_opc)
29773 {
29774 case DW_OP_addr:
29775 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29776 {
29777 if ((prev == NULL
29778 || prev->dw_loc_opc == DW_OP_piece
29779 || prev->dw_loc_opc == DW_OP_bit_piece)
29780 && loc->dw_loc_next
29781 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29782 && (!dwarf_strict || dwarf_version >= 5)
29783 && optimize_one_addr_into_implicit_ptr (loc))
29784 break;
29785 return false;
29786 }
29787 break;
29788 case DW_OP_GNU_addr_index:
29789 case DW_OP_addrx:
29790 case DW_OP_GNU_const_index:
29791 case DW_OP_constx:
29792 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29793 || loc->dw_loc_opc == DW_OP_addrx)
29794 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29795 || loc->dw_loc_opc == DW_OP_constx)
29796 && loc->dtprel))
29797 {
29798 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29799 if (!resolve_one_addr (&rtl))
29800 return false;
29801 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29802 loc->dw_loc_oprnd1.val_entry
29803 = add_addr_table_entry (rtl, ate_kind_rtx);
29804 }
29805 break;
29806 case DW_OP_const4u:
29807 case DW_OP_const8u:
29808 if (loc->dtprel
29809 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29810 return false;
29811 break;
29812 case DW_OP_plus_uconst:
29813 if (size_of_loc_descr (loc)
29814 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29815 + 1
29816 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29817 {
29818 dw_loc_descr_ref repl
29819 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29820 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29821 add_loc_descr (&repl, loc->dw_loc_next);
29822 *loc = *repl;
29823 }
29824 break;
29825 case DW_OP_implicit_value:
29826 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29827 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29828 return false;
29829 break;
29830 case DW_OP_implicit_pointer:
29831 case DW_OP_GNU_implicit_pointer:
29832 case DW_OP_GNU_parameter_ref:
29833 case DW_OP_GNU_variable_value:
29834 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29835 {
29836 dw_die_ref ref
29837 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29838 if (ref == NULL)
29839 return false;
29840 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29841 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29842 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29843 }
29844 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29845 {
29846 if (prev == NULL
29847 && loc->dw_loc_next == NULL
29848 && AT_class (a) == dw_val_class_loc)
29849 switch (a->dw_attr)
29850 {
29851 /* Following attributes allow both exprloc and reference,
29852 so if the whole expression is DW_OP_GNU_variable_value
29853 alone we could transform it into reference. */
29854 case DW_AT_byte_size:
29855 case DW_AT_bit_size:
29856 case DW_AT_lower_bound:
29857 case DW_AT_upper_bound:
29858 case DW_AT_bit_stride:
29859 case DW_AT_count:
29860 case DW_AT_allocated:
29861 case DW_AT_associated:
29862 case DW_AT_byte_stride:
29863 a->dw_attr_val.val_class = dw_val_class_die_ref;
29864 a->dw_attr_val.val_entry = NULL;
29865 a->dw_attr_val.v.val_die_ref.die
29866 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29867 a->dw_attr_val.v.val_die_ref.external = 0;
29868 return true;
29869 default:
29870 break;
29871 }
29872 if (dwarf_strict)
29873 return false;
29874 }
29875 break;
29876 case DW_OP_const_type:
29877 case DW_OP_regval_type:
29878 case DW_OP_deref_type:
29879 case DW_OP_convert:
29880 case DW_OP_reinterpret:
29881 case DW_OP_GNU_const_type:
29882 case DW_OP_GNU_regval_type:
29883 case DW_OP_GNU_deref_type:
29884 case DW_OP_GNU_convert:
29885 case DW_OP_GNU_reinterpret:
29886 while (loc->dw_loc_next
29887 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29888 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29889 {
29890 dw_die_ref base1, base2;
29891 unsigned enc1, enc2, size1, size2;
29892 if (loc->dw_loc_opc == DW_OP_regval_type
29893 || loc->dw_loc_opc == DW_OP_deref_type
29894 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29895 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29896 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29897 else if (loc->dw_loc_oprnd1.val_class
29898 == dw_val_class_unsigned_const)
29899 break;
29900 else
29901 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29902 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29903 == dw_val_class_unsigned_const)
29904 break;
29905 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29906 gcc_assert (base1->die_tag == DW_TAG_base_type
29907 && base2->die_tag == DW_TAG_base_type);
29908 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29909 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29910 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29911 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29912 if (size1 == size2
29913 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29914 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29915 && loc != keep)
29916 || enc1 == enc2))
29917 {
29918 /* Optimize away next DW_OP_convert after
29919 adjusting LOC's base type die reference. */
29920 if (loc->dw_loc_opc == DW_OP_regval_type
29921 || loc->dw_loc_opc == DW_OP_deref_type
29922 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29923 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29924 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29925 else
29926 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29927 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29928 continue;
29929 }
29930 /* Don't change integer DW_OP_convert after e.g. floating
29931 point typed stack entry. */
29932 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29933 keep = loc->dw_loc_next;
29934 break;
29935 }
29936 break;
29937 default:
29938 break;
29939 }
29940 return true;
29941 }
29942
29943 /* Helper function of resolve_addr. DIE had DW_AT_location of
29944 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29945 and DW_OP_addr couldn't be resolved. resolve_addr has already
29946 removed the DW_AT_location attribute. This function attempts to
29947 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29948 to it or DW_AT_const_value attribute, if possible. */
29949
29950 static void
29951 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29952 {
29953 if (!VAR_P (decl)
29954 || lookup_decl_die (decl) != die
29955 || DECL_EXTERNAL (decl)
29956 || !TREE_STATIC (decl)
29957 || DECL_INITIAL (decl) == NULL_TREE
29958 || DECL_P (DECL_INITIAL (decl))
29959 || get_AT (die, DW_AT_const_value))
29960 return;
29961
29962 tree init = DECL_INITIAL (decl);
29963 HOST_WIDE_INT offset = 0;
29964 /* For variables that have been optimized away and thus
29965 don't have a memory location, see if we can emit
29966 DW_AT_const_value instead. */
29967 if (tree_add_const_value_attribute (die, init))
29968 return;
29969 if (dwarf_strict && dwarf_version < 5)
29970 return;
29971 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
29972 and ADDR_EXPR refers to a decl that has DW_AT_location or
29973 DW_AT_const_value (but isn't addressable, otherwise
29974 resolving the original DW_OP_addr wouldn't fail), see if
29975 we can add DW_OP_implicit_pointer. */
29976 STRIP_NOPS (init);
29977 if (TREE_CODE (init) == POINTER_PLUS_EXPR
29978 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
29979 {
29980 offset = tree_to_shwi (TREE_OPERAND (init, 1));
29981 init = TREE_OPERAND (init, 0);
29982 STRIP_NOPS (init);
29983 }
29984 if (TREE_CODE (init) != ADDR_EXPR)
29985 return;
29986 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
29987 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
29988 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
29989 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
29990 && TREE_OPERAND (init, 0) != decl))
29991 {
29992 dw_die_ref ref;
29993 dw_loc_descr_ref l;
29994
29995 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
29996 {
29997 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
29998 if (!rtl)
29999 return;
30000 decl = SYMBOL_REF_DECL (rtl);
30001 }
30002 else
30003 decl = TREE_OPERAND (init, 0);
30004 ref = lookup_decl_die (decl);
30005 if (ref == NULL
30006 || (!get_AT (ref, DW_AT_location)
30007 && !get_AT (ref, DW_AT_const_value)))
30008 return;
30009 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
30010 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30011 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30012 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30013 add_AT_loc (die, DW_AT_location, l);
30014 }
30015 }
30016
30017 /* Return NULL if l is a DWARF expression, or first op that is not
30018 valid DWARF expression. */
30019
30020 static dw_loc_descr_ref
30021 non_dwarf_expression (dw_loc_descr_ref l)
30022 {
30023 while (l)
30024 {
30025 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30026 return l;
30027 switch (l->dw_loc_opc)
30028 {
30029 case DW_OP_regx:
30030 case DW_OP_implicit_value:
30031 case DW_OP_stack_value:
30032 case DW_OP_implicit_pointer:
30033 case DW_OP_GNU_implicit_pointer:
30034 case DW_OP_GNU_parameter_ref:
30035 case DW_OP_piece:
30036 case DW_OP_bit_piece:
30037 return l;
30038 default:
30039 break;
30040 }
30041 l = l->dw_loc_next;
30042 }
30043 return NULL;
30044 }
30045
30046 /* Return adjusted copy of EXPR:
30047 If it is empty DWARF expression, return it.
30048 If it is valid non-empty DWARF expression,
30049 return copy of EXPR with DW_OP_deref appended to it.
30050 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30051 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30052 If it is DWARF expression followed by DW_OP_stack_value, return
30053 copy of the DWARF expression without anything appended.
30054 Otherwise, return NULL. */
30055
30056 static dw_loc_descr_ref
30057 copy_deref_exprloc (dw_loc_descr_ref expr)
30058 {
30059 dw_loc_descr_ref tail = NULL;
30060
30061 if (expr == NULL)
30062 return NULL;
30063
30064 dw_loc_descr_ref l = non_dwarf_expression (expr);
30065 if (l && l->dw_loc_next)
30066 return NULL;
30067
30068 if (l)
30069 {
30070 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30071 tail = new_loc_descr ((enum dwarf_location_atom)
30072 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30073 0, 0);
30074 else
30075 switch (l->dw_loc_opc)
30076 {
30077 case DW_OP_regx:
30078 tail = new_loc_descr (DW_OP_bregx,
30079 l->dw_loc_oprnd1.v.val_unsigned, 0);
30080 break;
30081 case DW_OP_stack_value:
30082 break;
30083 default:
30084 return NULL;
30085 }
30086 }
30087 else
30088 tail = new_loc_descr (DW_OP_deref, 0, 0);
30089
30090 dw_loc_descr_ref ret = NULL, *p = &ret;
30091 while (expr != l)
30092 {
30093 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30094 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30095 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30096 p = &(*p)->dw_loc_next;
30097 expr = expr->dw_loc_next;
30098 }
30099 *p = tail;
30100 return ret;
30101 }
30102
30103 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30104 reference to a variable or argument, adjust it if needed and return:
30105 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30106 attribute if present should be removed
30107 0 keep the attribute perhaps with minor modifications, no need to rescan
30108 1 if the attribute has been successfully adjusted. */
30109
30110 static int
30111 optimize_string_length (dw_attr_node *a)
30112 {
30113 dw_loc_descr_ref l = AT_loc (a), lv;
30114 dw_die_ref die;
30115 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30116 {
30117 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30118 die = lookup_decl_die (decl);
30119 if (die)
30120 {
30121 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30122 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30123 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30124 }
30125 else
30126 return -1;
30127 }
30128 else
30129 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30130
30131 /* DWARF5 allows reference class, so we can then reference the DIE.
30132 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30133 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30134 {
30135 a->dw_attr_val.val_class = dw_val_class_die_ref;
30136 a->dw_attr_val.val_entry = NULL;
30137 a->dw_attr_val.v.val_die_ref.die = die;
30138 a->dw_attr_val.v.val_die_ref.external = 0;
30139 return 0;
30140 }
30141
30142 dw_attr_node *av = get_AT (die, DW_AT_location);
30143 dw_loc_list_ref d;
30144 bool non_dwarf_expr = false;
30145
30146 if (av == NULL)
30147 return dwarf_strict ? -1 : 0;
30148 switch (AT_class (av))
30149 {
30150 case dw_val_class_loc_list:
30151 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30152 if (d->expr && non_dwarf_expression (d->expr))
30153 non_dwarf_expr = true;
30154 break;
30155 case dw_val_class_view_list:
30156 gcc_unreachable ();
30157 case dw_val_class_loc:
30158 lv = AT_loc (av);
30159 if (lv == NULL)
30160 return dwarf_strict ? -1 : 0;
30161 if (non_dwarf_expression (lv))
30162 non_dwarf_expr = true;
30163 break;
30164 default:
30165 return dwarf_strict ? -1 : 0;
30166 }
30167
30168 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30169 into DW_OP_call4 or DW_OP_GNU_variable_value into
30170 DW_OP_call4 DW_OP_deref, do so. */
30171 if (!non_dwarf_expr
30172 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30173 {
30174 l->dw_loc_opc = DW_OP_call4;
30175 if (l->dw_loc_next)
30176 l->dw_loc_next = NULL;
30177 else
30178 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30179 return 0;
30180 }
30181
30182 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30183 copy over the DW_AT_location attribute from die to a. */
30184 if (l->dw_loc_next != NULL)
30185 {
30186 a->dw_attr_val = av->dw_attr_val;
30187 return 1;
30188 }
30189
30190 dw_loc_list_ref list, *p;
30191 switch (AT_class (av))
30192 {
30193 case dw_val_class_loc_list:
30194 p = &list;
30195 list = NULL;
30196 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30197 {
30198 lv = copy_deref_exprloc (d->expr);
30199 if (lv)
30200 {
30201 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30202 p = &(*p)->dw_loc_next;
30203 }
30204 else if (!dwarf_strict && d->expr)
30205 return 0;
30206 }
30207 if (list == NULL)
30208 return dwarf_strict ? -1 : 0;
30209 a->dw_attr_val.val_class = dw_val_class_loc_list;
30210 gen_llsym (list);
30211 *AT_loc_list_ptr (a) = list;
30212 return 1;
30213 case dw_val_class_loc:
30214 lv = copy_deref_exprloc (AT_loc (av));
30215 if (lv == NULL)
30216 return dwarf_strict ? -1 : 0;
30217 a->dw_attr_val.v.val_loc = lv;
30218 return 1;
30219 default:
30220 gcc_unreachable ();
30221 }
30222 }
30223
30224 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30225 an address in .rodata section if the string literal is emitted there,
30226 or remove the containing location list or replace DW_AT_const_value
30227 with DW_AT_location and empty location expression, if it isn't found
30228 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30229 to something that has been emitted in the current CU. */
30230
30231 static void
30232 resolve_addr (dw_die_ref die)
30233 {
30234 dw_die_ref c;
30235 dw_attr_node *a;
30236 dw_loc_list_ref *curr, *start, loc;
30237 unsigned ix;
30238 bool remove_AT_byte_size = false;
30239
30240 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30241 switch (AT_class (a))
30242 {
30243 case dw_val_class_loc_list:
30244 start = curr = AT_loc_list_ptr (a);
30245 loc = *curr;
30246 gcc_assert (loc);
30247 /* The same list can be referenced more than once. See if we have
30248 already recorded the result from a previous pass. */
30249 if (loc->replaced)
30250 *curr = loc->dw_loc_next;
30251 else if (!loc->resolved_addr)
30252 {
30253 /* As things stand, we do not expect or allow one die to
30254 reference a suffix of another die's location list chain.
30255 References must be identical or completely separate.
30256 There is therefore no need to cache the result of this
30257 pass on any list other than the first; doing so
30258 would lead to unnecessary writes. */
30259 while (*curr)
30260 {
30261 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30262 if (!resolve_addr_in_expr (a, (*curr)->expr))
30263 {
30264 dw_loc_list_ref next = (*curr)->dw_loc_next;
30265 dw_loc_descr_ref l = (*curr)->expr;
30266
30267 if (next && (*curr)->ll_symbol)
30268 {
30269 gcc_assert (!next->ll_symbol);
30270 next->ll_symbol = (*curr)->ll_symbol;
30271 next->vl_symbol = (*curr)->vl_symbol;
30272 }
30273 if (dwarf_split_debug_info)
30274 remove_loc_list_addr_table_entries (l);
30275 *curr = next;
30276 }
30277 else
30278 {
30279 mark_base_types ((*curr)->expr);
30280 curr = &(*curr)->dw_loc_next;
30281 }
30282 }
30283 if (loc == *start)
30284 loc->resolved_addr = 1;
30285 else
30286 {
30287 loc->replaced = 1;
30288 loc->dw_loc_next = *start;
30289 }
30290 }
30291 if (!*start)
30292 {
30293 remove_AT (die, a->dw_attr);
30294 ix--;
30295 }
30296 break;
30297 case dw_val_class_view_list:
30298 {
30299 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30300 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30301 dw_val_node *llnode
30302 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30303 /* If we no longer have a loclist, or it no longer needs
30304 views, drop this attribute. */
30305 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30306 {
30307 remove_AT (die, a->dw_attr);
30308 ix--;
30309 }
30310 break;
30311 }
30312 case dw_val_class_loc:
30313 {
30314 dw_loc_descr_ref l = AT_loc (a);
30315 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30316 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30317 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30318 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30319 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30320 with DW_FORM_ref referencing the same DIE as
30321 DW_OP_GNU_variable_value used to reference. */
30322 if (a->dw_attr == DW_AT_string_length
30323 && l
30324 && l->dw_loc_opc == DW_OP_GNU_variable_value
30325 && (l->dw_loc_next == NULL
30326 || (l->dw_loc_next->dw_loc_next == NULL
30327 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30328 {
30329 switch (optimize_string_length (a))
30330 {
30331 case -1:
30332 remove_AT (die, a->dw_attr);
30333 ix--;
30334 /* If we drop DW_AT_string_length, we need to drop also
30335 DW_AT_{string_length_,}byte_size. */
30336 remove_AT_byte_size = true;
30337 continue;
30338 default:
30339 break;
30340 case 1:
30341 /* Even if we keep the optimized DW_AT_string_length,
30342 it might have changed AT_class, so process it again. */
30343 ix--;
30344 continue;
30345 }
30346 }
30347 /* For -gdwarf-2 don't attempt to optimize
30348 DW_AT_data_member_location containing
30349 DW_OP_plus_uconst - older consumers might
30350 rely on it being that op instead of a more complex,
30351 but shorter, location description. */
30352 if ((dwarf_version > 2
30353 || a->dw_attr != DW_AT_data_member_location
30354 || l == NULL
30355 || l->dw_loc_opc != DW_OP_plus_uconst
30356 || l->dw_loc_next != NULL)
30357 && !resolve_addr_in_expr (a, l))
30358 {
30359 if (dwarf_split_debug_info)
30360 remove_loc_list_addr_table_entries (l);
30361 if (l != NULL
30362 && l->dw_loc_next == NULL
30363 && l->dw_loc_opc == DW_OP_addr
30364 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30365 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30366 && a->dw_attr == DW_AT_location)
30367 {
30368 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30369 remove_AT (die, a->dw_attr);
30370 ix--;
30371 optimize_location_into_implicit_ptr (die, decl);
30372 break;
30373 }
30374 if (a->dw_attr == DW_AT_string_length)
30375 /* If we drop DW_AT_string_length, we need to drop also
30376 DW_AT_{string_length_,}byte_size. */
30377 remove_AT_byte_size = true;
30378 remove_AT (die, a->dw_attr);
30379 ix--;
30380 }
30381 else
30382 mark_base_types (l);
30383 }
30384 break;
30385 case dw_val_class_addr:
30386 if (a->dw_attr == DW_AT_const_value
30387 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30388 {
30389 if (AT_index (a) != NOT_INDEXED)
30390 remove_addr_table_entry (a->dw_attr_val.val_entry);
30391 remove_AT (die, a->dw_attr);
30392 ix--;
30393 }
30394 if ((die->die_tag == DW_TAG_call_site
30395 && a->dw_attr == DW_AT_call_origin)
30396 || (die->die_tag == DW_TAG_GNU_call_site
30397 && a->dw_attr == DW_AT_abstract_origin))
30398 {
30399 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30400 dw_die_ref tdie = lookup_decl_die (tdecl);
30401 dw_die_ref cdie;
30402 if (tdie == NULL
30403 && DECL_EXTERNAL (tdecl)
30404 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30405 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30406 {
30407 dw_die_ref pdie = cdie;
30408 /* Make sure we don't add these DIEs into type units.
30409 We could emit skeleton DIEs for context (namespaces,
30410 outer structs/classes) and a skeleton DIE for the
30411 innermost context with DW_AT_signature pointing to the
30412 type unit. See PR78835. */
30413 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30414 pdie = pdie->die_parent;
30415 if (pdie == NULL)
30416 {
30417 /* Creating a full DIE for tdecl is overly expensive and
30418 at this point even wrong when in the LTO phase
30419 as it can end up generating new type DIEs we didn't
30420 output and thus optimize_external_refs will crash. */
30421 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30422 add_AT_flag (tdie, DW_AT_external, 1);
30423 add_AT_flag (tdie, DW_AT_declaration, 1);
30424 add_linkage_attr (tdie, tdecl);
30425 add_name_and_src_coords_attributes (tdie, tdecl, true);
30426 equate_decl_number_to_die (tdecl, tdie);
30427 }
30428 }
30429 if (tdie)
30430 {
30431 a->dw_attr_val.val_class = dw_val_class_die_ref;
30432 a->dw_attr_val.v.val_die_ref.die = tdie;
30433 a->dw_attr_val.v.val_die_ref.external = 0;
30434 }
30435 else
30436 {
30437 if (AT_index (a) != NOT_INDEXED)
30438 remove_addr_table_entry (a->dw_attr_val.val_entry);
30439 remove_AT (die, a->dw_attr);
30440 ix--;
30441 }
30442 }
30443 break;
30444 default:
30445 break;
30446 }
30447
30448 if (remove_AT_byte_size)
30449 remove_AT (die, dwarf_version >= 5
30450 ? DW_AT_string_length_byte_size
30451 : DW_AT_byte_size);
30452
30453 FOR_EACH_CHILD (die, c, resolve_addr (c));
30454 }
30455 \f
30456 /* Helper routines for optimize_location_lists.
30457 This pass tries to share identical local lists in .debug_loc
30458 section. */
30459
30460 /* Iteratively hash operands of LOC opcode into HSTATE. */
30461
30462 static void
30463 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30464 {
30465 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30466 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30467
30468 switch (loc->dw_loc_opc)
30469 {
30470 case DW_OP_const4u:
30471 case DW_OP_const8u:
30472 if (loc->dtprel)
30473 goto hash_addr;
30474 /* FALLTHRU */
30475 case DW_OP_const1u:
30476 case DW_OP_const1s:
30477 case DW_OP_const2u:
30478 case DW_OP_const2s:
30479 case DW_OP_const4s:
30480 case DW_OP_const8s:
30481 case DW_OP_constu:
30482 case DW_OP_consts:
30483 case DW_OP_pick:
30484 case DW_OP_plus_uconst:
30485 case DW_OP_breg0:
30486 case DW_OP_breg1:
30487 case DW_OP_breg2:
30488 case DW_OP_breg3:
30489 case DW_OP_breg4:
30490 case DW_OP_breg5:
30491 case DW_OP_breg6:
30492 case DW_OP_breg7:
30493 case DW_OP_breg8:
30494 case DW_OP_breg9:
30495 case DW_OP_breg10:
30496 case DW_OP_breg11:
30497 case DW_OP_breg12:
30498 case DW_OP_breg13:
30499 case DW_OP_breg14:
30500 case DW_OP_breg15:
30501 case DW_OP_breg16:
30502 case DW_OP_breg17:
30503 case DW_OP_breg18:
30504 case DW_OP_breg19:
30505 case DW_OP_breg20:
30506 case DW_OP_breg21:
30507 case DW_OP_breg22:
30508 case DW_OP_breg23:
30509 case DW_OP_breg24:
30510 case DW_OP_breg25:
30511 case DW_OP_breg26:
30512 case DW_OP_breg27:
30513 case DW_OP_breg28:
30514 case DW_OP_breg29:
30515 case DW_OP_breg30:
30516 case DW_OP_breg31:
30517 case DW_OP_regx:
30518 case DW_OP_fbreg:
30519 case DW_OP_piece:
30520 case DW_OP_deref_size:
30521 case DW_OP_xderef_size:
30522 hstate.add_object (val1->v.val_int);
30523 break;
30524 case DW_OP_skip:
30525 case DW_OP_bra:
30526 {
30527 int offset;
30528
30529 gcc_assert (val1->val_class == dw_val_class_loc);
30530 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30531 hstate.add_object (offset);
30532 }
30533 break;
30534 case DW_OP_implicit_value:
30535 hstate.add_object (val1->v.val_unsigned);
30536 switch (val2->val_class)
30537 {
30538 case dw_val_class_const:
30539 hstate.add_object (val2->v.val_int);
30540 break;
30541 case dw_val_class_vec:
30542 {
30543 unsigned int elt_size = val2->v.val_vec.elt_size;
30544 unsigned int len = val2->v.val_vec.length;
30545
30546 hstate.add_int (elt_size);
30547 hstate.add_int (len);
30548 hstate.add (val2->v.val_vec.array, len * elt_size);
30549 }
30550 break;
30551 case dw_val_class_const_double:
30552 hstate.add_object (val2->v.val_double.low);
30553 hstate.add_object (val2->v.val_double.high);
30554 break;
30555 case dw_val_class_wide_int:
30556 hstate.add (val2->v.val_wide->get_val (),
30557 get_full_len (*val2->v.val_wide)
30558 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30559 break;
30560 case dw_val_class_addr:
30561 inchash::add_rtx (val2->v.val_addr, hstate);
30562 break;
30563 default:
30564 gcc_unreachable ();
30565 }
30566 break;
30567 case DW_OP_bregx:
30568 case DW_OP_bit_piece:
30569 hstate.add_object (val1->v.val_int);
30570 hstate.add_object (val2->v.val_int);
30571 break;
30572 case DW_OP_addr:
30573 hash_addr:
30574 if (loc->dtprel)
30575 {
30576 unsigned char dtprel = 0xd1;
30577 hstate.add_object (dtprel);
30578 }
30579 inchash::add_rtx (val1->v.val_addr, hstate);
30580 break;
30581 case DW_OP_GNU_addr_index:
30582 case DW_OP_addrx:
30583 case DW_OP_GNU_const_index:
30584 case DW_OP_constx:
30585 {
30586 if (loc->dtprel)
30587 {
30588 unsigned char dtprel = 0xd1;
30589 hstate.add_object (dtprel);
30590 }
30591 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30592 }
30593 break;
30594 case DW_OP_implicit_pointer:
30595 case DW_OP_GNU_implicit_pointer:
30596 hstate.add_int (val2->v.val_int);
30597 break;
30598 case DW_OP_entry_value:
30599 case DW_OP_GNU_entry_value:
30600 hstate.add_object (val1->v.val_loc);
30601 break;
30602 case DW_OP_regval_type:
30603 case DW_OP_deref_type:
30604 case DW_OP_GNU_regval_type:
30605 case DW_OP_GNU_deref_type:
30606 {
30607 unsigned int byte_size
30608 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30609 unsigned int encoding
30610 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30611 hstate.add_object (val1->v.val_int);
30612 hstate.add_object (byte_size);
30613 hstate.add_object (encoding);
30614 }
30615 break;
30616 case DW_OP_convert:
30617 case DW_OP_reinterpret:
30618 case DW_OP_GNU_convert:
30619 case DW_OP_GNU_reinterpret:
30620 if (val1->val_class == dw_val_class_unsigned_const)
30621 {
30622 hstate.add_object (val1->v.val_unsigned);
30623 break;
30624 }
30625 /* FALLTHRU */
30626 case DW_OP_const_type:
30627 case DW_OP_GNU_const_type:
30628 {
30629 unsigned int byte_size
30630 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30631 unsigned int encoding
30632 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30633 hstate.add_object (byte_size);
30634 hstate.add_object (encoding);
30635 if (loc->dw_loc_opc != DW_OP_const_type
30636 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30637 break;
30638 hstate.add_object (val2->val_class);
30639 switch (val2->val_class)
30640 {
30641 case dw_val_class_const:
30642 hstate.add_object (val2->v.val_int);
30643 break;
30644 case dw_val_class_vec:
30645 {
30646 unsigned int elt_size = val2->v.val_vec.elt_size;
30647 unsigned int len = val2->v.val_vec.length;
30648
30649 hstate.add_object (elt_size);
30650 hstate.add_object (len);
30651 hstate.add (val2->v.val_vec.array, len * elt_size);
30652 }
30653 break;
30654 case dw_val_class_const_double:
30655 hstate.add_object (val2->v.val_double.low);
30656 hstate.add_object (val2->v.val_double.high);
30657 break;
30658 case dw_val_class_wide_int:
30659 hstate.add (val2->v.val_wide->get_val (),
30660 get_full_len (*val2->v.val_wide)
30661 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30662 break;
30663 default:
30664 gcc_unreachable ();
30665 }
30666 }
30667 break;
30668
30669 default:
30670 /* Other codes have no operands. */
30671 break;
30672 }
30673 }
30674
30675 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30676
30677 static inline void
30678 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30679 {
30680 dw_loc_descr_ref l;
30681 bool sizes_computed = false;
30682 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30683 size_of_locs (loc);
30684
30685 for (l = loc; l != NULL; l = l->dw_loc_next)
30686 {
30687 enum dwarf_location_atom opc = l->dw_loc_opc;
30688 hstate.add_object (opc);
30689 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30690 {
30691 size_of_locs (loc);
30692 sizes_computed = true;
30693 }
30694 hash_loc_operands (l, hstate);
30695 }
30696 }
30697
30698 /* Compute hash of the whole location list LIST_HEAD. */
30699
30700 static inline void
30701 hash_loc_list (dw_loc_list_ref list_head)
30702 {
30703 dw_loc_list_ref curr = list_head;
30704 inchash::hash hstate;
30705
30706 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30707 {
30708 hstate.add (curr->begin, strlen (curr->begin) + 1);
30709 hstate.add (curr->end, strlen (curr->end) + 1);
30710 hstate.add_object (curr->vbegin);
30711 hstate.add_object (curr->vend);
30712 if (curr->section)
30713 hstate.add (curr->section, strlen (curr->section) + 1);
30714 hash_locs (curr->expr, hstate);
30715 }
30716 list_head->hash = hstate.end ();
30717 }
30718
30719 /* Return true if X and Y opcodes have the same operands. */
30720
30721 static inline bool
30722 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30723 {
30724 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30725 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30726 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30727 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30728
30729 switch (x->dw_loc_opc)
30730 {
30731 case DW_OP_const4u:
30732 case DW_OP_const8u:
30733 if (x->dtprel)
30734 goto hash_addr;
30735 /* FALLTHRU */
30736 case DW_OP_const1u:
30737 case DW_OP_const1s:
30738 case DW_OP_const2u:
30739 case DW_OP_const2s:
30740 case DW_OP_const4s:
30741 case DW_OP_const8s:
30742 case DW_OP_constu:
30743 case DW_OP_consts:
30744 case DW_OP_pick:
30745 case DW_OP_plus_uconst:
30746 case DW_OP_breg0:
30747 case DW_OP_breg1:
30748 case DW_OP_breg2:
30749 case DW_OP_breg3:
30750 case DW_OP_breg4:
30751 case DW_OP_breg5:
30752 case DW_OP_breg6:
30753 case DW_OP_breg7:
30754 case DW_OP_breg8:
30755 case DW_OP_breg9:
30756 case DW_OP_breg10:
30757 case DW_OP_breg11:
30758 case DW_OP_breg12:
30759 case DW_OP_breg13:
30760 case DW_OP_breg14:
30761 case DW_OP_breg15:
30762 case DW_OP_breg16:
30763 case DW_OP_breg17:
30764 case DW_OP_breg18:
30765 case DW_OP_breg19:
30766 case DW_OP_breg20:
30767 case DW_OP_breg21:
30768 case DW_OP_breg22:
30769 case DW_OP_breg23:
30770 case DW_OP_breg24:
30771 case DW_OP_breg25:
30772 case DW_OP_breg26:
30773 case DW_OP_breg27:
30774 case DW_OP_breg28:
30775 case DW_OP_breg29:
30776 case DW_OP_breg30:
30777 case DW_OP_breg31:
30778 case DW_OP_regx:
30779 case DW_OP_fbreg:
30780 case DW_OP_piece:
30781 case DW_OP_deref_size:
30782 case DW_OP_xderef_size:
30783 return valx1->v.val_int == valy1->v.val_int;
30784 case DW_OP_skip:
30785 case DW_OP_bra:
30786 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30787 can cause irrelevant differences in dw_loc_addr. */
30788 gcc_assert (valx1->val_class == dw_val_class_loc
30789 && valy1->val_class == dw_val_class_loc
30790 && (dwarf_split_debug_info
30791 || x->dw_loc_addr == y->dw_loc_addr));
30792 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30793 case DW_OP_implicit_value:
30794 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30795 || valx2->val_class != valy2->val_class)
30796 return false;
30797 switch (valx2->val_class)
30798 {
30799 case dw_val_class_const:
30800 return valx2->v.val_int == valy2->v.val_int;
30801 case dw_val_class_vec:
30802 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30803 && valx2->v.val_vec.length == valy2->v.val_vec.length
30804 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30805 valx2->v.val_vec.elt_size
30806 * valx2->v.val_vec.length) == 0;
30807 case dw_val_class_const_double:
30808 return valx2->v.val_double.low == valy2->v.val_double.low
30809 && valx2->v.val_double.high == valy2->v.val_double.high;
30810 case dw_val_class_wide_int:
30811 return *valx2->v.val_wide == *valy2->v.val_wide;
30812 case dw_val_class_addr:
30813 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30814 default:
30815 gcc_unreachable ();
30816 }
30817 case DW_OP_bregx:
30818 case DW_OP_bit_piece:
30819 return valx1->v.val_int == valy1->v.val_int
30820 && valx2->v.val_int == valy2->v.val_int;
30821 case DW_OP_addr:
30822 hash_addr:
30823 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30824 case DW_OP_GNU_addr_index:
30825 case DW_OP_addrx:
30826 case DW_OP_GNU_const_index:
30827 case DW_OP_constx:
30828 {
30829 rtx ax1 = valx1->val_entry->addr.rtl;
30830 rtx ay1 = valy1->val_entry->addr.rtl;
30831 return rtx_equal_p (ax1, ay1);
30832 }
30833 case DW_OP_implicit_pointer:
30834 case DW_OP_GNU_implicit_pointer:
30835 return valx1->val_class == dw_val_class_die_ref
30836 && valx1->val_class == valy1->val_class
30837 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30838 && valx2->v.val_int == valy2->v.val_int;
30839 case DW_OP_entry_value:
30840 case DW_OP_GNU_entry_value:
30841 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30842 case DW_OP_const_type:
30843 case DW_OP_GNU_const_type:
30844 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30845 || valx2->val_class != valy2->val_class)
30846 return false;
30847 switch (valx2->val_class)
30848 {
30849 case dw_val_class_const:
30850 return valx2->v.val_int == valy2->v.val_int;
30851 case dw_val_class_vec:
30852 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30853 && valx2->v.val_vec.length == valy2->v.val_vec.length
30854 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30855 valx2->v.val_vec.elt_size
30856 * valx2->v.val_vec.length) == 0;
30857 case dw_val_class_const_double:
30858 return valx2->v.val_double.low == valy2->v.val_double.low
30859 && valx2->v.val_double.high == valy2->v.val_double.high;
30860 case dw_val_class_wide_int:
30861 return *valx2->v.val_wide == *valy2->v.val_wide;
30862 default:
30863 gcc_unreachable ();
30864 }
30865 case DW_OP_regval_type:
30866 case DW_OP_deref_type:
30867 case DW_OP_GNU_regval_type:
30868 case DW_OP_GNU_deref_type:
30869 return valx1->v.val_int == valy1->v.val_int
30870 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30871 case DW_OP_convert:
30872 case DW_OP_reinterpret:
30873 case DW_OP_GNU_convert:
30874 case DW_OP_GNU_reinterpret:
30875 if (valx1->val_class != valy1->val_class)
30876 return false;
30877 if (valx1->val_class == dw_val_class_unsigned_const)
30878 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30879 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30880 case DW_OP_GNU_parameter_ref:
30881 return valx1->val_class == dw_val_class_die_ref
30882 && valx1->val_class == valy1->val_class
30883 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30884 default:
30885 /* Other codes have no operands. */
30886 return true;
30887 }
30888 }
30889
30890 /* Return true if DWARF location expressions X and Y are the same. */
30891
30892 static inline bool
30893 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30894 {
30895 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30896 if (x->dw_loc_opc != y->dw_loc_opc
30897 || x->dtprel != y->dtprel
30898 || !compare_loc_operands (x, y))
30899 break;
30900 return x == NULL && y == NULL;
30901 }
30902
30903 /* Hashtable helpers. */
30904
30905 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30906 {
30907 static inline hashval_t hash (const dw_loc_list_struct *);
30908 static inline bool equal (const dw_loc_list_struct *,
30909 const dw_loc_list_struct *);
30910 };
30911
30912 /* Return precomputed hash of location list X. */
30913
30914 inline hashval_t
30915 loc_list_hasher::hash (const dw_loc_list_struct *x)
30916 {
30917 return x->hash;
30918 }
30919
30920 /* Return true if location lists A and B are the same. */
30921
30922 inline bool
30923 loc_list_hasher::equal (const dw_loc_list_struct *a,
30924 const dw_loc_list_struct *b)
30925 {
30926 if (a == b)
30927 return 1;
30928 if (a->hash != b->hash)
30929 return 0;
30930 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30931 if (strcmp (a->begin, b->begin) != 0
30932 || strcmp (a->end, b->end) != 0
30933 || (a->section == NULL) != (b->section == NULL)
30934 || (a->section && strcmp (a->section, b->section) != 0)
30935 || a->vbegin != b->vbegin || a->vend != b->vend
30936 || !compare_locs (a->expr, b->expr))
30937 break;
30938 return a == NULL && b == NULL;
30939 }
30940
30941 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30942
30943
30944 /* Recursively optimize location lists referenced from DIE
30945 children and share them whenever possible. */
30946
30947 static void
30948 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30949 {
30950 dw_die_ref c;
30951 dw_attr_node *a;
30952 unsigned ix;
30953 dw_loc_list_struct **slot;
30954 bool drop_locviews = false;
30955 bool has_locviews = false;
30956
30957 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30958 if (AT_class (a) == dw_val_class_loc_list)
30959 {
30960 dw_loc_list_ref list = AT_loc_list (a);
30961 /* TODO: perform some optimizations here, before hashing
30962 it and storing into the hash table. */
30963 hash_loc_list (list);
30964 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30965 if (*slot == NULL)
30966 {
30967 *slot = list;
30968 if (loc_list_has_views (list))
30969 gcc_assert (list->vl_symbol);
30970 else if (list->vl_symbol)
30971 {
30972 drop_locviews = true;
30973 list->vl_symbol = NULL;
30974 }
30975 }
30976 else
30977 {
30978 if (list->vl_symbol && !(*slot)->vl_symbol)
30979 drop_locviews = true;
30980 a->dw_attr_val.v.val_loc_list = *slot;
30981 }
30982 }
30983 else if (AT_class (a) == dw_val_class_view_list)
30984 {
30985 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30986 has_locviews = true;
30987 }
30988
30989
30990 if (drop_locviews && has_locviews)
30991 remove_AT (die, DW_AT_GNU_locviews);
30992
30993 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
30994 }
30995
30996
30997 /* Recursively assign each location list a unique index into the debug_addr
30998 section. */
30999
31000 static void
31001 index_location_lists (dw_die_ref die)
31002 {
31003 dw_die_ref c;
31004 dw_attr_node *a;
31005 unsigned ix;
31006
31007 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31008 if (AT_class (a) == dw_val_class_loc_list)
31009 {
31010 dw_loc_list_ref list = AT_loc_list (a);
31011 dw_loc_list_ref curr;
31012 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31013 {
31014 /* Don't index an entry that has already been indexed
31015 or won't be output. Make sure skip_loc_list_entry doesn't
31016 call size_of_locs, because that might cause circular dependency,
31017 index_location_lists requiring address table indexes to be
31018 computed, but adding new indexes through add_addr_table_entry
31019 and address table index computation requiring no new additions
31020 to the hash table. In the rare case of DWARF[234] >= 64KB
31021 location expression, we'll just waste unused address table entry
31022 for it. */
31023 if (curr->begin_entry != NULL
31024 || skip_loc_list_entry (curr))
31025 continue;
31026
31027 curr->begin_entry
31028 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31029 }
31030 }
31031
31032 FOR_EACH_CHILD (die, c, index_location_lists (c));
31033 }
31034
31035 /* Optimize location lists referenced from DIE
31036 children and share them whenever possible. */
31037
31038 static void
31039 optimize_location_lists (dw_die_ref die)
31040 {
31041 loc_list_hash_type htab (500);
31042 optimize_location_lists_1 (die, &htab);
31043 }
31044 \f
31045 /* Traverse the limbo die list, and add parent/child links. The only
31046 dies without parents that should be here are concrete instances of
31047 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31048 For concrete instances, we can get the parent die from the abstract
31049 instance. */
31050
31051 static void
31052 flush_limbo_die_list (void)
31053 {
31054 limbo_die_node *node;
31055
31056 /* get_context_die calls force_decl_die, which can put new DIEs on the
31057 limbo list in LTO mode when nested functions are put in a different
31058 partition than that of their parent function. */
31059 while ((node = limbo_die_list))
31060 {
31061 dw_die_ref die = node->die;
31062 limbo_die_list = node->next;
31063
31064 if (die->die_parent == NULL)
31065 {
31066 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31067
31068 if (origin && origin->die_parent)
31069 add_child_die (origin->die_parent, die);
31070 else if (is_cu_die (die))
31071 ;
31072 else if (seen_error ())
31073 /* It's OK to be confused by errors in the input. */
31074 add_child_die (comp_unit_die (), die);
31075 else
31076 {
31077 /* In certain situations, the lexical block containing a
31078 nested function can be optimized away, which results
31079 in the nested function die being orphaned. Likewise
31080 with the return type of that nested function. Force
31081 this to be a child of the containing function.
31082
31083 It may happen that even the containing function got fully
31084 inlined and optimized out. In that case we are lost and
31085 assign the empty child. This should not be big issue as
31086 the function is likely unreachable too. */
31087 gcc_assert (node->created_for);
31088
31089 if (DECL_P (node->created_for))
31090 origin = get_context_die (DECL_CONTEXT (node->created_for));
31091 else if (TYPE_P (node->created_for))
31092 origin = scope_die_for (node->created_for, comp_unit_die ());
31093 else
31094 origin = comp_unit_die ();
31095
31096 add_child_die (origin, die);
31097 }
31098 }
31099 }
31100 }
31101
31102 /* Reset DIEs so we can output them again. */
31103
31104 static void
31105 reset_dies (dw_die_ref die)
31106 {
31107 dw_die_ref c;
31108
31109 /* Remove stuff we re-generate. */
31110 die->die_mark = 0;
31111 die->die_offset = 0;
31112 die->die_abbrev = 0;
31113 remove_AT (die, DW_AT_sibling);
31114
31115 FOR_EACH_CHILD (die, c, reset_dies (c));
31116 }
31117
31118 /* Output stuff that dwarf requires at the end of every file,
31119 and generate the DWARF-2 debugging info. */
31120
31121 static void
31122 dwarf2out_finish (const char *filename)
31123 {
31124 comdat_type_node *ctnode;
31125 dw_die_ref main_comp_unit_die;
31126 unsigned char checksum[16];
31127 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31128
31129 /* Flush out any latecomers to the limbo party. */
31130 flush_limbo_die_list ();
31131
31132 if (inline_entry_data_table)
31133 gcc_assert (inline_entry_data_table->elements () == 0);
31134
31135 if (flag_checking)
31136 {
31137 verify_die (comp_unit_die ());
31138 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31139 verify_die (node->die);
31140 }
31141
31142 /* We shouldn't have any symbols with delayed asm names for
31143 DIEs generated after early finish. */
31144 gcc_assert (deferred_asm_name == NULL);
31145
31146 gen_remaining_tmpl_value_param_die_attribute ();
31147
31148 if (flag_generate_lto || flag_generate_offload)
31149 {
31150 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31151
31152 /* Prune stuff so that dwarf2out_finish runs successfully
31153 for the fat part of the object. */
31154 reset_dies (comp_unit_die ());
31155 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31156 reset_dies (node->die);
31157
31158 hash_table<comdat_type_hasher> comdat_type_table (100);
31159 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31160 {
31161 comdat_type_node **slot
31162 = comdat_type_table.find_slot (ctnode, INSERT);
31163
31164 /* Don't reset types twice. */
31165 if (*slot != HTAB_EMPTY_ENTRY)
31166 continue;
31167
31168 /* Remove the pointer to the line table. */
31169 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31170
31171 if (debug_info_level >= DINFO_LEVEL_TERSE)
31172 reset_dies (ctnode->root_die);
31173
31174 *slot = ctnode;
31175 }
31176
31177 /* Reset die CU symbol so we don't output it twice. */
31178 comp_unit_die ()->die_id.die_symbol = NULL;
31179
31180 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31181 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31182 if (have_macinfo)
31183 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31184
31185 /* Remove indirect string decisions. */
31186 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31187 if (debug_line_str_hash)
31188 {
31189 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31190 debug_line_str_hash = NULL;
31191 }
31192 }
31193
31194 #if ENABLE_ASSERT_CHECKING
31195 {
31196 dw_die_ref die = comp_unit_die (), c;
31197 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31198 }
31199 #endif
31200 resolve_addr (comp_unit_die ());
31201 move_marked_base_types ();
31202
31203 if (dump_file)
31204 {
31205 fprintf (dump_file, "DWARF for %s\n", filename);
31206 print_die (comp_unit_die (), dump_file);
31207 }
31208
31209 /* Initialize sections and labels used for actual assembler output. */
31210 unsigned generation = init_sections_and_labels (false);
31211
31212 /* Traverse the DIE's and add sibling attributes to those DIE's that
31213 have children. */
31214 add_sibling_attributes (comp_unit_die ());
31215 limbo_die_node *node;
31216 for (node = cu_die_list; node; node = node->next)
31217 add_sibling_attributes (node->die);
31218 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31219 add_sibling_attributes (ctnode->root_die);
31220
31221 /* When splitting DWARF info, we put some attributes in the
31222 skeleton compile_unit DIE that remains in the .o, while
31223 most attributes go in the DWO compile_unit_die. */
31224 if (dwarf_split_debug_info)
31225 {
31226 limbo_die_node *cu;
31227 main_comp_unit_die = gen_compile_unit_die (NULL);
31228 if (dwarf_version >= 5)
31229 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31230 cu = limbo_die_list;
31231 gcc_assert (cu->die == main_comp_unit_die);
31232 limbo_die_list = limbo_die_list->next;
31233 cu->next = cu_die_list;
31234 cu_die_list = cu;
31235 }
31236 else
31237 main_comp_unit_die = comp_unit_die ();
31238
31239 /* Output a terminator label for the .text section. */
31240 switch_to_section (text_section);
31241 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31242 if (cold_text_section)
31243 {
31244 switch_to_section (cold_text_section);
31245 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31246 }
31247
31248 /* We can only use the low/high_pc attributes if all of the code was
31249 in .text. */
31250 if (!have_multiple_function_sections
31251 || (dwarf_version < 3 && dwarf_strict))
31252 {
31253 /* Don't add if the CU has no associated code. */
31254 if (text_section_used)
31255 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31256 text_end_label, true);
31257 }
31258 else
31259 {
31260 unsigned fde_idx;
31261 dw_fde_ref fde;
31262 bool range_list_added = false;
31263
31264 if (text_section_used)
31265 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31266 text_end_label, &range_list_added, true);
31267 if (cold_text_section_used)
31268 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31269 cold_end_label, &range_list_added, true);
31270
31271 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31272 {
31273 if (DECL_IGNORED_P (fde->decl))
31274 continue;
31275 if (!fde->in_std_section)
31276 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31277 fde->dw_fde_end, &range_list_added,
31278 true);
31279 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31280 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31281 fde->dw_fde_second_end, &range_list_added,
31282 true);
31283 }
31284
31285 if (range_list_added)
31286 {
31287 /* We need to give .debug_loc and .debug_ranges an appropriate
31288 "base address". Use zero so that these addresses become
31289 absolute. Historically, we've emitted the unexpected
31290 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31291 Emit both to give time for other tools to adapt. */
31292 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31293 if (! dwarf_strict && dwarf_version < 4)
31294 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31295
31296 add_ranges (NULL);
31297 }
31298 }
31299
31300 /* AIX Assembler inserts the length, so adjust the reference to match the
31301 offset expected by debuggers. */
31302 strcpy (dl_section_ref, debug_line_section_label);
31303 if (XCOFF_DEBUGGING_INFO)
31304 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31305
31306 if (debug_info_level >= DINFO_LEVEL_TERSE)
31307 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31308 dl_section_ref);
31309
31310 if (have_macinfo)
31311 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31312 macinfo_section_label);
31313
31314 if (dwarf_split_debug_info)
31315 {
31316 if (have_location_lists)
31317 {
31318 /* Since we generate the loclists in the split DWARF .dwo
31319 file itself, we don't need to generate a loclists_base
31320 attribute for the split compile unit DIE. That attribute
31321 (and using relocatable sec_offset FORMs) isn't allowed
31322 for a split compile unit. Only if the .debug_loclists
31323 section was in the main file, would we need to generate a
31324 loclists_base attribute here (for the full or skeleton
31325 unit DIE). */
31326
31327 /* optimize_location_lists calculates the size of the lists,
31328 so index them first, and assign indices to the entries.
31329 Although optimize_location_lists will remove entries from
31330 the table, it only does so for duplicates, and therefore
31331 only reduces ref_counts to 1. */
31332 index_location_lists (comp_unit_die ());
31333 }
31334
31335 if (addr_index_table != NULL)
31336 {
31337 unsigned int index = 0;
31338 addr_index_table
31339 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31340 (&index);
31341 }
31342 }
31343
31344 loc_list_idx = 0;
31345 if (have_location_lists)
31346 {
31347 optimize_location_lists (comp_unit_die ());
31348 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31349 if (dwarf_version >= 5 && dwarf_split_debug_info)
31350 assign_location_list_indexes (comp_unit_die ());
31351 }
31352
31353 save_macinfo_strings ();
31354
31355 if (dwarf_split_debug_info)
31356 {
31357 unsigned int index = 0;
31358
31359 /* Add attributes common to skeleton compile_units and
31360 type_units. Because these attributes include strings, it
31361 must be done before freezing the string table. Top-level
31362 skeleton die attrs are added when the skeleton type unit is
31363 created, so ensure it is created by this point. */
31364 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31365 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31366 }
31367
31368 /* Output all of the compilation units. We put the main one last so that
31369 the offsets are available to output_pubnames. */
31370 for (node = cu_die_list; node; node = node->next)
31371 output_comp_unit (node->die, 0, NULL);
31372
31373 hash_table<comdat_type_hasher> comdat_type_table (100);
31374 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31375 {
31376 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31377
31378 /* Don't output duplicate types. */
31379 if (*slot != HTAB_EMPTY_ENTRY)
31380 continue;
31381
31382 /* Add a pointer to the line table for the main compilation unit
31383 so that the debugger can make sense of DW_AT_decl_file
31384 attributes. */
31385 if (debug_info_level >= DINFO_LEVEL_TERSE)
31386 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31387 (!dwarf_split_debug_info
31388 ? dl_section_ref
31389 : debug_skeleton_line_section_label));
31390
31391 output_comdat_type_unit (ctnode);
31392 *slot = ctnode;
31393 }
31394
31395 if (dwarf_split_debug_info)
31396 {
31397 int mark;
31398 struct md5_ctx ctx;
31399
31400 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31401 index_rnglists ();
31402
31403 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31404 md5_init_ctx (&ctx);
31405 mark = 0;
31406 die_checksum (comp_unit_die (), &ctx, &mark);
31407 unmark_all_dies (comp_unit_die ());
31408 md5_finish_ctx (&ctx, checksum);
31409
31410 if (dwarf_version < 5)
31411 {
31412 /* Use the first 8 bytes of the checksum as the dwo_id,
31413 and add it to both comp-unit DIEs. */
31414 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31415 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31416 }
31417
31418 /* Add the base offset of the ranges table to the skeleton
31419 comp-unit DIE. */
31420 if (!vec_safe_is_empty (ranges_table))
31421 {
31422 if (dwarf_version >= 5)
31423 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31424 ranges_base_label);
31425 else
31426 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31427 ranges_section_label);
31428 }
31429
31430 switch_to_section (debug_addr_section);
31431 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
31432 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
31433 before DWARF5, didn't have a header for .debug_addr units.
31434 DWARF5 specifies a small header when address tables are used. */
31435 if (dwarf_version >= 5)
31436 {
31437 unsigned int last_idx = 0;
31438 unsigned long addrs_length;
31439
31440 addr_index_table->traverse_noresize
31441 <unsigned int *, count_index_addrs> (&last_idx);
31442 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
31443
31444 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31445 dw2_asm_output_data (4, 0xffffffff,
31446 "Escape value for 64-bit DWARF extension");
31447 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
31448 "Length of Address Unit");
31449 dw2_asm_output_data (2, 5, "DWARF addr version");
31450 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
31451 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
31452 }
31453 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31454 output_addr_table ();
31455 }
31456
31457 /* Output the main compilation unit if non-empty or if .debug_macinfo
31458 or .debug_macro will be emitted. */
31459 output_comp_unit (comp_unit_die (), have_macinfo,
31460 dwarf_split_debug_info ? checksum : NULL);
31461
31462 if (dwarf_split_debug_info && info_section_emitted)
31463 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31464
31465 /* Output the abbreviation table. */
31466 if (vec_safe_length (abbrev_die_table) != 1)
31467 {
31468 switch_to_section (debug_abbrev_section);
31469 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31470 output_abbrev_section ();
31471 }
31472
31473 /* Output location list section if necessary. */
31474 if (have_location_lists)
31475 {
31476 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31477 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31478 /* Output the location lists info. */
31479 switch_to_section (debug_loc_section);
31480 if (dwarf_version >= 5)
31481 {
31482 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31483 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31484 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31485 dw2_asm_output_data (4, 0xffffffff,
31486 "Initial length escape value indicating "
31487 "64-bit DWARF extension");
31488 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31489 "Length of Location Lists");
31490 ASM_OUTPUT_LABEL (asm_out_file, l1);
31491 output_dwarf_version ();
31492 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31493 dw2_asm_output_data (1, 0, "Segment Size");
31494 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31495 "Offset Entry Count");
31496 }
31497 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31498 if (dwarf_version >= 5 && dwarf_split_debug_info)
31499 {
31500 unsigned int save_loc_list_idx = loc_list_idx;
31501 loc_list_idx = 0;
31502 output_loclists_offsets (comp_unit_die ());
31503 gcc_assert (save_loc_list_idx == loc_list_idx);
31504 }
31505 output_location_lists (comp_unit_die ());
31506 if (dwarf_version >= 5)
31507 ASM_OUTPUT_LABEL (asm_out_file, l2);
31508 }
31509
31510 output_pubtables ();
31511
31512 /* Output the address range information if a CU (.debug_info section)
31513 was emitted. We output an empty table even if we had no functions
31514 to put in it. This because the consumer has no way to tell the
31515 difference between an empty table that we omitted and failure to
31516 generate a table that would have contained data. */
31517 if (info_section_emitted)
31518 {
31519 switch_to_section (debug_aranges_section);
31520 output_aranges ();
31521 }
31522
31523 /* Output ranges section if necessary. */
31524 if (!vec_safe_is_empty (ranges_table))
31525 {
31526 if (dwarf_version >= 5)
31527 output_rnglists (generation);
31528 else
31529 output_ranges ();
31530 }
31531
31532 /* Have to end the macro section. */
31533 if (have_macinfo)
31534 {
31535 switch_to_section (debug_macinfo_section);
31536 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31537 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31538 : debug_skeleton_line_section_label, false);
31539 dw2_asm_output_data (1, 0, "End compilation unit");
31540 }
31541
31542 /* Output the source line correspondence table. We must do this
31543 even if there is no line information. Otherwise, on an empty
31544 translation unit, we will generate a present, but empty,
31545 .debug_info section. IRIX 6.5 `nm' will then complain when
31546 examining the file. This is done late so that any filenames
31547 used by the debug_info section are marked as 'used'. */
31548 switch_to_section (debug_line_section);
31549 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31550 if (! output_asm_line_debug_info ())
31551 output_line_info (false);
31552
31553 if (dwarf_split_debug_info && info_section_emitted)
31554 {
31555 switch_to_section (debug_skeleton_line_section);
31556 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31557 output_line_info (true);
31558 }
31559
31560 /* If we emitted any indirect strings, output the string table too. */
31561 if (debug_str_hash || skeleton_debug_str_hash)
31562 output_indirect_strings ();
31563 if (debug_line_str_hash)
31564 {
31565 switch_to_section (debug_line_str_section);
31566 const enum dwarf_form form = DW_FORM_line_strp;
31567 debug_line_str_hash->traverse<enum dwarf_form,
31568 output_indirect_string> (form);
31569 }
31570
31571 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31572 symview_upper_bound = 0;
31573 if (zero_view_p)
31574 bitmap_clear (zero_view_p);
31575 }
31576
31577 /* Returns a hash value for X (which really is a variable_value_struct). */
31578
31579 inline hashval_t
31580 variable_value_hasher::hash (variable_value_struct *x)
31581 {
31582 return (hashval_t) x->decl_id;
31583 }
31584
31585 /* Return nonzero if decl_id of variable_value_struct X is the same as
31586 UID of decl Y. */
31587
31588 inline bool
31589 variable_value_hasher::equal (variable_value_struct *x, tree y)
31590 {
31591 return x->decl_id == DECL_UID (y);
31592 }
31593
31594 /* Helper function for resolve_variable_value, handle
31595 DW_OP_GNU_variable_value in one location expression.
31596 Return true if exprloc has been changed into loclist. */
31597
31598 static bool
31599 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31600 {
31601 dw_loc_descr_ref next;
31602 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31603 {
31604 next = loc->dw_loc_next;
31605 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31606 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31607 continue;
31608
31609 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31610 if (DECL_CONTEXT (decl) != current_function_decl)
31611 continue;
31612
31613 dw_die_ref ref = lookup_decl_die (decl);
31614 if (ref)
31615 {
31616 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31617 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31618 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31619 continue;
31620 }
31621 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31622 if (l == NULL)
31623 continue;
31624 if (l->dw_loc_next)
31625 {
31626 if (AT_class (a) != dw_val_class_loc)
31627 continue;
31628 switch (a->dw_attr)
31629 {
31630 /* Following attributes allow both exprloc and loclist
31631 classes, so we can change them into a loclist. */
31632 case DW_AT_location:
31633 case DW_AT_string_length:
31634 case DW_AT_return_addr:
31635 case DW_AT_data_member_location:
31636 case DW_AT_frame_base:
31637 case DW_AT_segment:
31638 case DW_AT_static_link:
31639 case DW_AT_use_location:
31640 case DW_AT_vtable_elem_location:
31641 if (prev)
31642 {
31643 prev->dw_loc_next = NULL;
31644 prepend_loc_descr_to_each (l, AT_loc (a));
31645 }
31646 if (next)
31647 add_loc_descr_to_each (l, next);
31648 a->dw_attr_val.val_class = dw_val_class_loc_list;
31649 a->dw_attr_val.val_entry = NULL;
31650 a->dw_attr_val.v.val_loc_list = l;
31651 have_location_lists = true;
31652 return true;
31653 /* Following attributes allow both exprloc and reference,
31654 so if the whole expression is DW_OP_GNU_variable_value alone
31655 we could transform it into reference. */
31656 case DW_AT_byte_size:
31657 case DW_AT_bit_size:
31658 case DW_AT_lower_bound:
31659 case DW_AT_upper_bound:
31660 case DW_AT_bit_stride:
31661 case DW_AT_count:
31662 case DW_AT_allocated:
31663 case DW_AT_associated:
31664 case DW_AT_byte_stride:
31665 if (prev == NULL && next == NULL)
31666 break;
31667 /* FALLTHRU */
31668 default:
31669 if (dwarf_strict)
31670 continue;
31671 break;
31672 }
31673 /* Create DW_TAG_variable that we can refer to. */
31674 gen_decl_die (decl, NULL_TREE, NULL,
31675 lookup_decl_die (current_function_decl));
31676 ref = lookup_decl_die (decl);
31677 if (ref)
31678 {
31679 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31680 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31681 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31682 }
31683 continue;
31684 }
31685 if (prev)
31686 {
31687 prev->dw_loc_next = l->expr;
31688 add_loc_descr (&prev->dw_loc_next, next);
31689 free_loc_descr (loc, NULL);
31690 next = prev->dw_loc_next;
31691 }
31692 else
31693 {
31694 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31695 add_loc_descr (&loc, next);
31696 next = loc;
31697 }
31698 loc = prev;
31699 }
31700 return false;
31701 }
31702
31703 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31704
31705 static void
31706 resolve_variable_value (dw_die_ref die)
31707 {
31708 dw_attr_node *a;
31709 dw_loc_list_ref loc;
31710 unsigned ix;
31711
31712 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31713 switch (AT_class (a))
31714 {
31715 case dw_val_class_loc:
31716 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31717 break;
31718 /* FALLTHRU */
31719 case dw_val_class_loc_list:
31720 loc = AT_loc_list (a);
31721 gcc_assert (loc);
31722 for (; loc; loc = loc->dw_loc_next)
31723 resolve_variable_value_in_expr (a, loc->expr);
31724 break;
31725 default:
31726 break;
31727 }
31728 }
31729
31730 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31731 temporaries in the current function. */
31732
31733 static void
31734 resolve_variable_values (void)
31735 {
31736 if (!variable_value_hash || !current_function_decl)
31737 return;
31738
31739 struct variable_value_struct *node
31740 = variable_value_hash->find_with_hash (current_function_decl,
31741 DECL_UID (current_function_decl));
31742
31743 if (node == NULL)
31744 return;
31745
31746 unsigned int i;
31747 dw_die_ref die;
31748 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31749 resolve_variable_value (die);
31750 }
31751
31752 /* Helper function for note_variable_value, handle one location
31753 expression. */
31754
31755 static void
31756 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31757 {
31758 for (; loc; loc = loc->dw_loc_next)
31759 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31760 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31761 {
31762 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31763 dw_die_ref ref = lookup_decl_die (decl);
31764 if (! ref && (flag_generate_lto || flag_generate_offload))
31765 {
31766 /* ??? This is somewhat a hack because we do not create DIEs
31767 for variables not in BLOCK trees early but when generating
31768 early LTO output we need the dw_val_class_decl_ref to be
31769 fully resolved. For fat LTO objects we'd also like to
31770 undo this after LTO dwarf output. */
31771 gcc_assert (DECL_CONTEXT (decl));
31772 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31773 gcc_assert (ctx != NULL);
31774 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31775 ref = lookup_decl_die (decl);
31776 gcc_assert (ref != NULL);
31777 }
31778 if (ref)
31779 {
31780 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31781 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31782 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31783 continue;
31784 }
31785 if (VAR_P (decl)
31786 && DECL_CONTEXT (decl)
31787 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31788 && lookup_decl_die (DECL_CONTEXT (decl)))
31789 {
31790 if (!variable_value_hash)
31791 variable_value_hash
31792 = hash_table<variable_value_hasher>::create_ggc (10);
31793
31794 tree fndecl = DECL_CONTEXT (decl);
31795 struct variable_value_struct *node;
31796 struct variable_value_struct **slot
31797 = variable_value_hash->find_slot_with_hash (fndecl,
31798 DECL_UID (fndecl),
31799 INSERT);
31800 if (*slot == NULL)
31801 {
31802 node = ggc_cleared_alloc<variable_value_struct> ();
31803 node->decl_id = DECL_UID (fndecl);
31804 *slot = node;
31805 }
31806 else
31807 node = *slot;
31808
31809 vec_safe_push (node->dies, die);
31810 }
31811 }
31812 }
31813
31814 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31815 with dw_val_class_decl_ref operand. */
31816
31817 static void
31818 note_variable_value (dw_die_ref die)
31819 {
31820 dw_die_ref c;
31821 dw_attr_node *a;
31822 dw_loc_list_ref loc;
31823 unsigned ix;
31824
31825 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31826 switch (AT_class (a))
31827 {
31828 case dw_val_class_loc_list:
31829 loc = AT_loc_list (a);
31830 gcc_assert (loc);
31831 if (!loc->noted_variable_value)
31832 {
31833 loc->noted_variable_value = 1;
31834 for (; loc; loc = loc->dw_loc_next)
31835 note_variable_value_in_expr (die, loc->expr);
31836 }
31837 break;
31838 case dw_val_class_loc:
31839 note_variable_value_in_expr (die, AT_loc (a));
31840 break;
31841 default:
31842 break;
31843 }
31844
31845 /* Mark children. */
31846 FOR_EACH_CHILD (die, c, note_variable_value (c));
31847 }
31848
31849 /* Perform any cleanups needed after the early debug generation pass
31850 has run. */
31851
31852 static void
31853 dwarf2out_early_finish (const char *filename)
31854 {
31855 set_early_dwarf s;
31856 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31857
31858 /* PCH might result in DW_AT_producer string being restored from the
31859 header compilation, so always fill it with empty string initially
31860 and overwrite only here. */
31861 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31862 producer_string = gen_producer_string ();
31863 producer->dw_attr_val.v.val_str->refcount--;
31864 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31865
31866 /* Add the name for the main input file now. We delayed this from
31867 dwarf2out_init to avoid complications with PCH. */
31868 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31869 add_comp_dir_attribute (comp_unit_die ());
31870
31871 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31872 DW_AT_comp_dir into .debug_line_str section. */
31873 if (!output_asm_line_debug_info ()
31874 && dwarf_version >= 5
31875 && DWARF5_USE_DEBUG_LINE_STR)
31876 {
31877 for (int i = 0; i < 2; i++)
31878 {
31879 dw_attr_node *a = get_AT (comp_unit_die (),
31880 i ? DW_AT_comp_dir : DW_AT_name);
31881 if (a == NULL
31882 || AT_class (a) != dw_val_class_str
31883 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31884 continue;
31885
31886 if (! debug_line_str_hash)
31887 debug_line_str_hash
31888 = hash_table<indirect_string_hasher>::create_ggc (10);
31889
31890 struct indirect_string_node *node
31891 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31892 set_indirect_string (node);
31893 node->form = DW_FORM_line_strp;
31894 a->dw_attr_val.v.val_str->refcount--;
31895 a->dw_attr_val.v.val_str = node;
31896 }
31897 }
31898
31899 /* With LTO early dwarf was really finished at compile-time, so make
31900 sure to adjust the phase after annotating the LTRANS CU DIE. */
31901 if (in_lto_p)
31902 {
31903 /* Force DW_TAG_imported_unit to be created now, otherwise
31904 we might end up without it or ordered after DW_TAG_inlined_subroutine
31905 referencing DIEs from it. */
31906 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
31907 {
31908 unsigned i;
31909 tree tu;
31910 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, tu)
31911 maybe_create_die_with_external_ref (tu);
31912 }
31913
31914 early_dwarf_finished = true;
31915 if (dump_file)
31916 {
31917 fprintf (dump_file, "LTO EARLY DWARF for %s\n", filename);
31918 print_die (comp_unit_die (), dump_file);
31919 }
31920 return;
31921 }
31922
31923 /* Walk through the list of incomplete types again, trying once more to
31924 emit full debugging info for them. */
31925 retry_incomplete_types ();
31926
31927 /* The point here is to flush out the limbo list so that it is empty
31928 and we don't need to stream it for LTO. */
31929 flush_limbo_die_list ();
31930
31931 gen_scheduled_generic_parms_dies ();
31932 gen_remaining_tmpl_value_param_die_attribute ();
31933
31934 /* Add DW_AT_linkage_name for all deferred DIEs. */
31935 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31936 {
31937 tree decl = node->created_for;
31938 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31939 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31940 ended up in deferred_asm_name before we knew it was
31941 constant and never written to disk. */
31942 && DECL_ASSEMBLER_NAME (decl))
31943 {
31944 add_linkage_attr (node->die, decl);
31945 move_linkage_attr (node->die);
31946 }
31947 }
31948 deferred_asm_name = NULL;
31949
31950 if (flag_eliminate_unused_debug_types)
31951 prune_unused_types ();
31952
31953 /* Generate separate COMDAT sections for type DIEs. */
31954 if (use_debug_types)
31955 {
31956 break_out_comdat_types (comp_unit_die ());
31957
31958 /* Each new type_unit DIE was added to the limbo die list when created.
31959 Since these have all been added to comdat_type_list, clear the
31960 limbo die list. */
31961 limbo_die_list = NULL;
31962
31963 /* For each new comdat type unit, copy declarations for incomplete
31964 types to make the new unit self-contained (i.e., no direct
31965 references to the main compile unit). */
31966 for (comdat_type_node *ctnode = comdat_type_list;
31967 ctnode != NULL; ctnode = ctnode->next)
31968 copy_decls_for_unworthy_types (ctnode->root_die);
31969 copy_decls_for_unworthy_types (comp_unit_die ());
31970
31971 /* In the process of copying declarations from one unit to another,
31972 we may have left some declarations behind that are no longer
31973 referenced. Prune them. */
31974 prune_unused_types ();
31975 }
31976
31977 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
31978 with dw_val_class_decl_ref operand. */
31979 note_variable_value (comp_unit_die ());
31980 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31981 note_variable_value (node->die);
31982 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
31983 ctnode = ctnode->next)
31984 note_variable_value (ctnode->root_die);
31985 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31986 note_variable_value (node->die);
31987
31988 /* The AT_pubnames attribute needs to go in all skeleton dies, including
31989 both the main_cu and all skeleton TUs. Making this call unconditional
31990 would end up either adding a second copy of the AT_pubnames attribute, or
31991 requiring a special case in add_top_level_skeleton_die_attrs. */
31992 if (!dwarf_split_debug_info)
31993 add_AT_pubnames (comp_unit_die ());
31994
31995 /* The early debug phase is now finished. */
31996 early_dwarf_finished = true;
31997 if (dump_file)
31998 {
31999 fprintf (dump_file, "EARLY DWARF for %s\n", filename);
32000 print_die (comp_unit_die (), dump_file);
32001 }
32002
32003 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
32004 if ((!flag_generate_lto && !flag_generate_offload)
32005 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
32006 copy_lto_debug_sections operation of the simple object support in
32007 libiberty is not implemented for them yet. */
32008 || TARGET_PECOFF || TARGET_COFF)
32009 return;
32010
32011 /* Now as we are going to output for LTO initialize sections and labels
32012 to the LTO variants. We don't need a random-seed postfix as other
32013 LTO sections as linking the LTO debug sections into one in a partial
32014 link is fine. */
32015 init_sections_and_labels (true);
32016
32017 /* The output below is modeled after dwarf2out_finish with all
32018 location related output removed and some LTO specific changes.
32019 Some refactoring might make both smaller and easier to match up. */
32020
32021 /* Traverse the DIE's and add add sibling attributes to those DIE's
32022 that have children. */
32023 add_sibling_attributes (comp_unit_die ());
32024 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32025 add_sibling_attributes (node->die);
32026 for (comdat_type_node *ctnode = comdat_type_list;
32027 ctnode != NULL; ctnode = ctnode->next)
32028 add_sibling_attributes (ctnode->root_die);
32029
32030 /* AIX Assembler inserts the length, so adjust the reference to match the
32031 offset expected by debuggers. */
32032 strcpy (dl_section_ref, debug_line_section_label);
32033 if (XCOFF_DEBUGGING_INFO)
32034 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
32035
32036 if (debug_info_level >= DINFO_LEVEL_TERSE)
32037 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
32038
32039 if (have_macinfo)
32040 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
32041 macinfo_section_label);
32042
32043 save_macinfo_strings ();
32044
32045 if (dwarf_split_debug_info)
32046 {
32047 unsigned int index = 0;
32048 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32049 }
32050
32051 /* Output all of the compilation units. We put the main one last so that
32052 the offsets are available to output_pubnames. */
32053 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32054 output_comp_unit (node->die, 0, NULL);
32055
32056 hash_table<comdat_type_hasher> comdat_type_table (100);
32057 for (comdat_type_node *ctnode = comdat_type_list;
32058 ctnode != NULL; ctnode = ctnode->next)
32059 {
32060 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32061
32062 /* Don't output duplicate types. */
32063 if (*slot != HTAB_EMPTY_ENTRY)
32064 continue;
32065
32066 /* Add a pointer to the line table for the main compilation unit
32067 so that the debugger can make sense of DW_AT_decl_file
32068 attributes. */
32069 if (debug_info_level >= DINFO_LEVEL_TERSE)
32070 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32071 (!dwarf_split_debug_info
32072 ? debug_line_section_label
32073 : debug_skeleton_line_section_label));
32074
32075 output_comdat_type_unit (ctnode);
32076 *slot = ctnode;
32077 }
32078
32079 /* Stick a unique symbol to the main debuginfo section. */
32080 compute_comp_unit_symbol (comp_unit_die ());
32081
32082 /* Output the main compilation unit. We always need it if only for
32083 the CU symbol. */
32084 output_comp_unit (comp_unit_die (), true, NULL);
32085
32086 /* Output the abbreviation table. */
32087 if (vec_safe_length (abbrev_die_table) != 1)
32088 {
32089 switch_to_section (debug_abbrev_section);
32090 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32091 output_abbrev_section ();
32092 }
32093
32094 /* Have to end the macro section. */
32095 if (have_macinfo)
32096 {
32097 /* We have to save macinfo state if we need to output it again
32098 for the FAT part of the object. */
32099 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32100 if (flag_fat_lto_objects)
32101 macinfo_table = macinfo_table->copy ();
32102
32103 switch_to_section (debug_macinfo_section);
32104 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32105 output_macinfo (debug_line_section_label, true);
32106 dw2_asm_output_data (1, 0, "End compilation unit");
32107
32108 if (flag_fat_lto_objects)
32109 {
32110 vec_free (macinfo_table);
32111 macinfo_table = saved_macinfo_table;
32112 }
32113 }
32114
32115 /* Emit a skeleton debug_line section. */
32116 switch_to_section (debug_line_section);
32117 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32118 output_line_info (true);
32119
32120 /* If we emitted any indirect strings, output the string table too. */
32121 if (debug_str_hash || skeleton_debug_str_hash)
32122 output_indirect_strings ();
32123 if (debug_line_str_hash)
32124 {
32125 switch_to_section (debug_line_str_section);
32126 const enum dwarf_form form = DW_FORM_line_strp;
32127 debug_line_str_hash->traverse<enum dwarf_form,
32128 output_indirect_string> (form);
32129 }
32130
32131 /* Switch back to the text section. */
32132 switch_to_section (text_section);
32133 }
32134
32135 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32136 within the same process. For use by toplev::finalize. */
32137
32138 void
32139 dwarf2out_c_finalize (void)
32140 {
32141 last_var_location_insn = NULL;
32142 cached_next_real_insn = NULL;
32143 used_rtx_array = NULL;
32144 incomplete_types = NULL;
32145 debug_info_section = NULL;
32146 debug_skeleton_info_section = NULL;
32147 debug_abbrev_section = NULL;
32148 debug_skeleton_abbrev_section = NULL;
32149 debug_aranges_section = NULL;
32150 debug_addr_section = NULL;
32151 debug_macinfo_section = NULL;
32152 debug_line_section = NULL;
32153 debug_skeleton_line_section = NULL;
32154 debug_loc_section = NULL;
32155 debug_pubnames_section = NULL;
32156 debug_pubtypes_section = NULL;
32157 debug_str_section = NULL;
32158 debug_line_str_section = NULL;
32159 debug_str_dwo_section = NULL;
32160 debug_str_offsets_section = NULL;
32161 debug_ranges_section = NULL;
32162 debug_frame_section = NULL;
32163 fde_vec = NULL;
32164 debug_str_hash = NULL;
32165 debug_line_str_hash = NULL;
32166 skeleton_debug_str_hash = NULL;
32167 dw2_string_counter = 0;
32168 have_multiple_function_sections = false;
32169 text_section_used = false;
32170 cold_text_section_used = false;
32171 cold_text_section = NULL;
32172 current_unit_personality = NULL;
32173
32174 early_dwarf = false;
32175 early_dwarf_finished = false;
32176
32177 next_die_offset = 0;
32178 single_comp_unit_die = NULL;
32179 comdat_type_list = NULL;
32180 limbo_die_list = NULL;
32181 file_table = NULL;
32182 decl_die_table = NULL;
32183 common_block_die_table = NULL;
32184 decl_loc_table = NULL;
32185 call_arg_locations = NULL;
32186 call_arg_loc_last = NULL;
32187 call_site_count = -1;
32188 tail_call_site_count = -1;
32189 cached_dw_loc_list_table = NULL;
32190 abbrev_die_table = NULL;
32191 delete dwarf_proc_stack_usage_map;
32192 dwarf_proc_stack_usage_map = NULL;
32193 line_info_label_num = 0;
32194 cur_line_info_table = NULL;
32195 text_section_line_info = NULL;
32196 cold_text_section_line_info = NULL;
32197 separate_line_info = NULL;
32198 info_section_emitted = false;
32199 pubname_table = NULL;
32200 pubtype_table = NULL;
32201 macinfo_table = NULL;
32202 ranges_table = NULL;
32203 ranges_by_label = NULL;
32204 rnglist_idx = 0;
32205 have_location_lists = false;
32206 loclabel_num = 0;
32207 poc_label_num = 0;
32208 last_emitted_file = NULL;
32209 label_num = 0;
32210 tmpl_value_parm_die_table = NULL;
32211 generic_type_instances = NULL;
32212 frame_pointer_fb_offset = 0;
32213 frame_pointer_fb_offset_valid = false;
32214 base_types.release ();
32215 XDELETEVEC (producer_string);
32216 producer_string = NULL;
32217 }
32218
32219 #include "gt-dwarf2out.h"