[debug] Fix handling of vlas in lto
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105
106 #ifndef XCOFF_DEBUGGING_INFO
107 #define XCOFF_DEBUGGING_INFO 0
108 #endif
109
110 #ifndef HAVE_XCOFF_DWARF_EXTRAS
111 #define HAVE_XCOFF_DWARF_EXTRAS 0
112 #endif
113
114 #ifdef VMS_DEBUGGING_INFO
115 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
116
117 /* Define this macro to be a nonzero value if the directory specifications
118 which are output in the debug info should end with a separator. */
119 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
120 /* Define this macro to evaluate to a nonzero value if GCC should refrain
121 from generating indirect strings in DWARF2 debug information, for instance
122 if your target is stuck with an old version of GDB that is unable to
123 process them properly or uses VMS Debug. */
124 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
125 #else
126 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
127 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
128 #endif
129
130 /* ??? Poison these here until it can be done generically. They've been
131 totally replaced in this file; make sure it stays that way. */
132 #undef DWARF2_UNWIND_INFO
133 #undef DWARF2_FRAME_INFO
134 #if (GCC_VERSION >= 3000)
135 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
136 #endif
137
138 /* The size of the target's pointer type. */
139 #ifndef PTR_SIZE
140 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
141 #endif
142
143 /* Array of RTXes referenced by the debugging information, which therefore
144 must be kept around forever. */
145 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
146
147 /* A pointer to the base of a list of incomplete types which might be
148 completed at some later time. incomplete_types_list needs to be a
149 vec<tree, va_gc> *because we want to tell the garbage collector about
150 it. */
151 static GTY(()) vec<tree, va_gc> *incomplete_types;
152
153 /* Pointers to various DWARF2 sections. */
154 static GTY(()) section *debug_info_section;
155 static GTY(()) section *debug_skeleton_info_section;
156 static GTY(()) section *debug_abbrev_section;
157 static GTY(()) section *debug_skeleton_abbrev_section;
158 static GTY(()) section *debug_aranges_section;
159 static GTY(()) section *debug_addr_section;
160 static GTY(()) section *debug_macinfo_section;
161 static const char *debug_macinfo_section_name;
162 static unsigned macinfo_label_base = 1;
163 static GTY(()) section *debug_line_section;
164 static GTY(()) section *debug_skeleton_line_section;
165 static GTY(()) section *debug_loc_section;
166 static GTY(()) section *debug_pubnames_section;
167 static GTY(()) section *debug_pubtypes_section;
168 static GTY(()) section *debug_str_section;
169 static GTY(()) section *debug_line_str_section;
170 static GTY(()) section *debug_str_dwo_section;
171 static GTY(()) section *debug_str_offsets_section;
172 static GTY(()) section *debug_ranges_section;
173 static GTY(()) section *debug_frame_section;
174
175 /* Maximum size (in bytes) of an artificially generated label. */
176 #define MAX_ARTIFICIAL_LABEL_BYTES 40
177
178 /* According to the (draft) DWARF 3 specification, the initial length
179 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
180 bytes are 0xffffffff, followed by the length stored in the next 8
181 bytes.
182
183 However, the SGI/MIPS ABI uses an initial length which is equal to
184 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
185
186 #ifndef DWARF_INITIAL_LENGTH_SIZE
187 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
188 #endif
189
190 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
191 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
192 #endif
193
194 /* Round SIZE up to the nearest BOUNDARY. */
195 #define DWARF_ROUND(SIZE,BOUNDARY) \
196 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
197
198 /* CIE identifier. */
199 #if HOST_BITS_PER_WIDE_INT >= 64
200 #define DWARF_CIE_ID \
201 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
202 #else
203 #define DWARF_CIE_ID DW_CIE_ID
204 #endif
205
206
207 /* A vector for a table that contains frame description
208 information for each routine. */
209 #define NOT_INDEXED (-1U)
210 #define NO_INDEX_ASSIGNED (-2U)
211
212 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
213
214 struct GTY((for_user)) indirect_string_node {
215 const char *str;
216 unsigned int refcount;
217 enum dwarf_form form;
218 char *label;
219 unsigned int index;
220 };
221
222 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
223 {
224 typedef const char *compare_type;
225
226 static hashval_t hash (indirect_string_node *);
227 static bool equal (indirect_string_node *, const char *);
228 };
229
230 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
231
232 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
233
234 /* With split_debug_info, both the comp_dir and dwo_name go in the
235 main object file, rather than the dwo, similar to the force_direct
236 parameter elsewhere but with additional complications:
237
238 1) The string is needed in both the main object file and the dwo.
239 That is, the comp_dir and dwo_name will appear in both places.
240
241 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
242 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
243
244 3) GCC chooses the form to use late, depending on the size and
245 reference count.
246
247 Rather than forcing the all debug string handling functions and
248 callers to deal with these complications, simply use a separate,
249 special-cased string table for any attribute that should go in the
250 main object file. This limits the complexity to just the places
251 that need it. */
252
253 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
254
255 static GTY(()) int dw2_string_counter;
256
257 /* True if the compilation unit places functions in more than one section. */
258 static GTY(()) bool have_multiple_function_sections = false;
259
260 /* Whether the default text and cold text sections have been used at all. */
261 static GTY(()) bool text_section_used = false;
262 static GTY(()) bool cold_text_section_used = false;
263
264 /* The default cold text section. */
265 static GTY(()) section *cold_text_section;
266
267 /* The DIE for C++14 'auto' in a function return type. */
268 static GTY(()) dw_die_ref auto_die;
269
270 /* The DIE for C++14 'decltype(auto)' in a function return type. */
271 static GTY(()) dw_die_ref decltype_auto_die;
272
273 /* Forward declarations for functions defined in this file. */
274
275 static void output_call_frame_info (int);
276 static void dwarf2out_note_section_used (void);
277
278 /* Personality decl of current unit. Used only when assembler does not support
279 personality CFI. */
280 static GTY(()) rtx current_unit_personality;
281
282 /* Whether an eh_frame section is required. */
283 static GTY(()) bool do_eh_frame = false;
284
285 /* .debug_rnglists next index. */
286 static unsigned int rnglist_idx;
287
288 /* Data and reference forms for relocatable data. */
289 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
290 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
291
292 #ifndef DEBUG_FRAME_SECTION
293 #define DEBUG_FRAME_SECTION ".debug_frame"
294 #endif
295
296 #ifndef FUNC_BEGIN_LABEL
297 #define FUNC_BEGIN_LABEL "LFB"
298 #endif
299
300 #ifndef FUNC_END_LABEL
301 #define FUNC_END_LABEL "LFE"
302 #endif
303
304 #ifndef PROLOGUE_END_LABEL
305 #define PROLOGUE_END_LABEL "LPE"
306 #endif
307
308 #ifndef EPILOGUE_BEGIN_LABEL
309 #define EPILOGUE_BEGIN_LABEL "LEB"
310 #endif
311
312 #ifndef FRAME_BEGIN_LABEL
313 #define FRAME_BEGIN_LABEL "Lframe"
314 #endif
315 #define CIE_AFTER_SIZE_LABEL "LSCIE"
316 #define CIE_END_LABEL "LECIE"
317 #define FDE_LABEL "LSFDE"
318 #define FDE_AFTER_SIZE_LABEL "LASFDE"
319 #define FDE_END_LABEL "LEFDE"
320 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
321 #define LINE_NUMBER_END_LABEL "LELT"
322 #define LN_PROLOG_AS_LABEL "LASLTP"
323 #define LN_PROLOG_END_LABEL "LELTP"
324 #define DIE_LABEL_PREFIX "DW"
325 \f
326 /* Match the base name of a file to the base name of a compilation unit. */
327
328 static int
329 matches_main_base (const char *path)
330 {
331 /* Cache the last query. */
332 static const char *last_path = NULL;
333 static int last_match = 0;
334 if (path != last_path)
335 {
336 const char *base;
337 int length = base_of_path (path, &base);
338 last_path = path;
339 last_match = (length == main_input_baselength
340 && memcmp (base, main_input_basename, length) == 0);
341 }
342 return last_match;
343 }
344
345 #ifdef DEBUG_DEBUG_STRUCT
346
347 static int
348 dump_struct_debug (tree type, enum debug_info_usage usage,
349 enum debug_struct_file criterion, int generic,
350 int matches, int result)
351 {
352 /* Find the type name. */
353 tree type_decl = TYPE_STUB_DECL (type);
354 tree t = type_decl;
355 const char *name = 0;
356 if (TREE_CODE (t) == TYPE_DECL)
357 t = DECL_NAME (t);
358 if (t)
359 name = IDENTIFIER_POINTER (t);
360
361 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
362 criterion,
363 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
364 matches ? "bas" : "hdr",
365 generic ? "gen" : "ord",
366 usage == DINFO_USAGE_DFN ? ";" :
367 usage == DINFO_USAGE_DIR_USE ? "." : "*",
368 result,
369 (void*) type_decl, name);
370 return result;
371 }
372 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
373 dump_struct_debug (type, usage, criterion, generic, matches, result)
374
375 #else
376
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 (result)
379
380 #endif
381
382 /* Get the number of HOST_WIDE_INTs needed to represent the precision
383 of the number. Some constants have a large uniform precision, so
384 we get the precision needed for the actual value of the number. */
385
386 static unsigned int
387 get_full_len (const wide_int &op)
388 {
389 int prec = wi::min_precision (op, UNSIGNED);
390 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
391 / HOST_BITS_PER_WIDE_INT);
392 }
393
394 static bool
395 should_emit_struct_debug (tree type, enum debug_info_usage usage)
396 {
397 enum debug_struct_file criterion;
398 tree type_decl;
399 bool generic = lang_hooks.types.generic_p (type);
400
401 if (generic)
402 criterion = debug_struct_generic[usage];
403 else
404 criterion = debug_struct_ordinary[usage];
405
406 if (criterion == DINFO_STRUCT_FILE_NONE)
407 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
408 if (criterion == DINFO_STRUCT_FILE_ANY)
409 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
410
411 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
412
413 if (type_decl != NULL)
414 {
415 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
416 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
417
418 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
419 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
420 }
421
422 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
423 }
424 \f
425 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
426 switch to the data section instead, and write out a synthetic start label
427 for collect2 the first time around. */
428
429 static void
430 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
431 {
432 if (eh_frame_section == 0)
433 {
434 int flags;
435
436 if (EH_TABLES_CAN_BE_READ_ONLY)
437 {
438 int fde_encoding;
439 int per_encoding;
440 int lsda_encoding;
441
442 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
443 /*global=*/0);
444 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
445 /*global=*/1);
446 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
447 /*global=*/0);
448 flags = ((! flag_pic
449 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
450 && (fde_encoding & 0x70) != DW_EH_PE_aligned
451 && (per_encoding & 0x70) != DW_EH_PE_absptr
452 && (per_encoding & 0x70) != DW_EH_PE_aligned
453 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
454 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
455 ? 0 : SECTION_WRITE);
456 }
457 else
458 flags = SECTION_WRITE;
459
460 #ifdef EH_FRAME_SECTION_NAME
461 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
462 #else
463 eh_frame_section = ((flags == SECTION_WRITE)
464 ? data_section : readonly_data_section);
465 #endif /* EH_FRAME_SECTION_NAME */
466 }
467
468 switch_to_section (eh_frame_section);
469
470 #ifdef EH_FRAME_THROUGH_COLLECT2
471 /* We have no special eh_frame section. Emit special labels to guide
472 collect2. */
473 if (!back)
474 {
475 tree label = get_file_function_name ("F");
476 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
477 targetm.asm_out.globalize_label (asm_out_file,
478 IDENTIFIER_POINTER (label));
479 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
480 }
481 #endif
482 }
483
484 /* Switch [BACK] to the eh or debug frame table section, depending on
485 FOR_EH. */
486
487 static void
488 switch_to_frame_table_section (int for_eh, bool back)
489 {
490 if (for_eh)
491 switch_to_eh_frame_section (back);
492 else
493 {
494 if (!debug_frame_section)
495 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
496 SECTION_DEBUG, NULL);
497 switch_to_section (debug_frame_section);
498 }
499 }
500
501 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
502
503 enum dw_cfi_oprnd_type
504 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
505 {
506 switch (cfi)
507 {
508 case DW_CFA_nop:
509 case DW_CFA_GNU_window_save:
510 case DW_CFA_remember_state:
511 case DW_CFA_restore_state:
512 return dw_cfi_oprnd_unused;
513
514 case DW_CFA_set_loc:
515 case DW_CFA_advance_loc1:
516 case DW_CFA_advance_loc2:
517 case DW_CFA_advance_loc4:
518 case DW_CFA_MIPS_advance_loc8:
519 return dw_cfi_oprnd_addr;
520
521 case DW_CFA_offset:
522 case DW_CFA_offset_extended:
523 case DW_CFA_def_cfa:
524 case DW_CFA_offset_extended_sf:
525 case DW_CFA_def_cfa_sf:
526 case DW_CFA_restore:
527 case DW_CFA_restore_extended:
528 case DW_CFA_undefined:
529 case DW_CFA_same_value:
530 case DW_CFA_def_cfa_register:
531 case DW_CFA_register:
532 case DW_CFA_expression:
533 case DW_CFA_val_expression:
534 return dw_cfi_oprnd_reg_num;
535
536 case DW_CFA_def_cfa_offset:
537 case DW_CFA_GNU_args_size:
538 case DW_CFA_def_cfa_offset_sf:
539 return dw_cfi_oprnd_offset;
540
541 case DW_CFA_def_cfa_expression:
542 return dw_cfi_oprnd_loc;
543
544 default:
545 gcc_unreachable ();
546 }
547 }
548
549 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
550
551 enum dw_cfi_oprnd_type
552 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
553 {
554 switch (cfi)
555 {
556 case DW_CFA_def_cfa:
557 case DW_CFA_def_cfa_sf:
558 case DW_CFA_offset:
559 case DW_CFA_offset_extended_sf:
560 case DW_CFA_offset_extended:
561 return dw_cfi_oprnd_offset;
562
563 case DW_CFA_register:
564 return dw_cfi_oprnd_reg_num;
565
566 case DW_CFA_expression:
567 case DW_CFA_val_expression:
568 return dw_cfi_oprnd_loc;
569
570 case DW_CFA_def_cfa_expression:
571 return dw_cfi_oprnd_cfa_loc;
572
573 default:
574 return dw_cfi_oprnd_unused;
575 }
576 }
577
578 /* Output one FDE. */
579
580 static void
581 output_fde (dw_fde_ref fde, bool for_eh, bool second,
582 char *section_start_label, int fde_encoding, char *augmentation,
583 bool any_lsda_needed, int lsda_encoding)
584 {
585 const char *begin, *end;
586 static unsigned int j;
587 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
588
589 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
590 /* empty */ 0);
591 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
592 for_eh + j);
593 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
594 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
595 if (!XCOFF_DEBUGGING_INFO || for_eh)
596 {
597 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
598 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
599 " indicating 64-bit DWARF extension");
600 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
601 "FDE Length");
602 }
603 ASM_OUTPUT_LABEL (asm_out_file, l1);
604
605 if (for_eh)
606 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
607 else
608 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
609 debug_frame_section, "FDE CIE offset");
610
611 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
612 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
613
614 if (for_eh)
615 {
616 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
617 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
618 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
619 "FDE initial location");
620 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
621 end, begin, "FDE address range");
622 }
623 else
624 {
625 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
626 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
627 }
628
629 if (augmentation[0])
630 {
631 if (any_lsda_needed)
632 {
633 int size = size_of_encoded_value (lsda_encoding);
634
635 if (lsda_encoding == DW_EH_PE_aligned)
636 {
637 int offset = ( 4 /* Length */
638 + 4 /* CIE offset */
639 + 2 * size_of_encoded_value (fde_encoding)
640 + 1 /* Augmentation size */ );
641 int pad = -offset & (PTR_SIZE - 1);
642
643 size += pad;
644 gcc_assert (size_of_uleb128 (size) == 1);
645 }
646
647 dw2_asm_output_data_uleb128 (size, "Augmentation size");
648
649 if (fde->uses_eh_lsda)
650 {
651 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
652 fde->funcdef_number);
653 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
654 gen_rtx_SYMBOL_REF (Pmode, l1),
655 false,
656 "Language Specific Data Area");
657 }
658 else
659 {
660 if (lsda_encoding == DW_EH_PE_aligned)
661 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
662 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
663 "Language Specific Data Area (none)");
664 }
665 }
666 else
667 dw2_asm_output_data_uleb128 (0, "Augmentation size");
668 }
669
670 /* Loop through the Call Frame Instructions associated with this FDE. */
671 fde->dw_fde_current_label = begin;
672 {
673 size_t from, until, i;
674
675 from = 0;
676 until = vec_safe_length (fde->dw_fde_cfi);
677
678 if (fde->dw_fde_second_begin == NULL)
679 ;
680 else if (!second)
681 until = fde->dw_fde_switch_cfi_index;
682 else
683 from = fde->dw_fde_switch_cfi_index;
684
685 for (i = from; i < until; i++)
686 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
687 }
688
689 /* If we are to emit a ref/link from function bodies to their frame tables,
690 do it now. This is typically performed to make sure that tables
691 associated with functions are dragged with them and not discarded in
692 garbage collecting links. We need to do this on a per function basis to
693 cope with -ffunction-sections. */
694
695 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
696 /* Switch to the function section, emit the ref to the tables, and
697 switch *back* into the table section. */
698 switch_to_section (function_section (fde->decl));
699 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
700 switch_to_frame_table_section (for_eh, true);
701 #endif
702
703 /* Pad the FDE out to an address sized boundary. */
704 ASM_OUTPUT_ALIGN (asm_out_file,
705 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
706 ASM_OUTPUT_LABEL (asm_out_file, l2);
707
708 j += 2;
709 }
710
711 /* Return true if frame description entry FDE is needed for EH. */
712
713 static bool
714 fde_needed_for_eh_p (dw_fde_ref fde)
715 {
716 if (flag_asynchronous_unwind_tables)
717 return true;
718
719 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
720 return true;
721
722 if (fde->uses_eh_lsda)
723 return true;
724
725 /* If exceptions are enabled, we have collected nothrow info. */
726 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
727 return false;
728
729 return true;
730 }
731
732 /* Output the call frame information used to record information
733 that relates to calculating the frame pointer, and records the
734 location of saved registers. */
735
736 static void
737 output_call_frame_info (int for_eh)
738 {
739 unsigned int i;
740 dw_fde_ref fde;
741 dw_cfi_ref cfi;
742 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
743 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
744 bool any_lsda_needed = false;
745 char augmentation[6];
746 int augmentation_size;
747 int fde_encoding = DW_EH_PE_absptr;
748 int per_encoding = DW_EH_PE_absptr;
749 int lsda_encoding = DW_EH_PE_absptr;
750 int return_reg;
751 rtx personality = NULL;
752 int dw_cie_version;
753
754 /* Don't emit a CIE if there won't be any FDEs. */
755 if (!fde_vec)
756 return;
757
758 /* Nothing to do if the assembler's doing it all. */
759 if (dwarf2out_do_cfi_asm ())
760 return;
761
762 /* If we don't have any functions we'll want to unwind out of, don't emit
763 any EH unwind information. If we make FDEs linkonce, we may have to
764 emit an empty label for an FDE that wouldn't otherwise be emitted. We
765 want to avoid having an FDE kept around when the function it refers to
766 is discarded. Example where this matters: a primary function template
767 in C++ requires EH information, an explicit specialization doesn't. */
768 if (for_eh)
769 {
770 bool any_eh_needed = false;
771
772 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
773 {
774 if (fde->uses_eh_lsda)
775 any_eh_needed = any_lsda_needed = true;
776 else if (fde_needed_for_eh_p (fde))
777 any_eh_needed = true;
778 else if (TARGET_USES_WEAK_UNWIND_INFO)
779 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
780 }
781
782 if (!any_eh_needed)
783 return;
784 }
785
786 /* We're going to be generating comments, so turn on app. */
787 if (flag_debug_asm)
788 app_enable ();
789
790 /* Switch to the proper frame section, first time. */
791 switch_to_frame_table_section (for_eh, false);
792
793 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
794 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
795
796 /* Output the CIE. */
797 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
798 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
799 if (!XCOFF_DEBUGGING_INFO || for_eh)
800 {
801 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
802 dw2_asm_output_data (4, 0xffffffff,
803 "Initial length escape value indicating 64-bit DWARF extension");
804 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
805 "Length of Common Information Entry");
806 }
807 ASM_OUTPUT_LABEL (asm_out_file, l1);
808
809 /* Now that the CIE pointer is PC-relative for EH,
810 use 0 to identify the CIE. */
811 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
812 (for_eh ? 0 : DWARF_CIE_ID),
813 "CIE Identifier Tag");
814
815 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
816 use CIE version 1, unless that would produce incorrect results
817 due to overflowing the return register column. */
818 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
819 dw_cie_version = 1;
820 if (return_reg >= 256 || dwarf_version > 2)
821 dw_cie_version = 3;
822 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
823
824 augmentation[0] = 0;
825 augmentation_size = 0;
826
827 personality = current_unit_personality;
828 if (for_eh)
829 {
830 char *p;
831
832 /* Augmentation:
833 z Indicates that a uleb128 is present to size the
834 augmentation section.
835 L Indicates the encoding (and thus presence) of
836 an LSDA pointer in the FDE augmentation.
837 R Indicates a non-default pointer encoding for
838 FDE code pointers.
839 P Indicates the presence of an encoding + language
840 personality routine in the CIE augmentation. */
841
842 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
843 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
844 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
845
846 p = augmentation + 1;
847 if (personality)
848 {
849 *p++ = 'P';
850 augmentation_size += 1 + size_of_encoded_value (per_encoding);
851 assemble_external_libcall (personality);
852 }
853 if (any_lsda_needed)
854 {
855 *p++ = 'L';
856 augmentation_size += 1;
857 }
858 if (fde_encoding != DW_EH_PE_absptr)
859 {
860 *p++ = 'R';
861 augmentation_size += 1;
862 }
863 if (p > augmentation + 1)
864 {
865 augmentation[0] = 'z';
866 *p = '\0';
867 }
868
869 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
870 if (personality && per_encoding == DW_EH_PE_aligned)
871 {
872 int offset = ( 4 /* Length */
873 + 4 /* CIE Id */
874 + 1 /* CIE version */
875 + strlen (augmentation) + 1 /* Augmentation */
876 + size_of_uleb128 (1) /* Code alignment */
877 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
878 + 1 /* RA column */
879 + 1 /* Augmentation size */
880 + 1 /* Personality encoding */ );
881 int pad = -offset & (PTR_SIZE - 1);
882
883 augmentation_size += pad;
884
885 /* Augmentations should be small, so there's scarce need to
886 iterate for a solution. Die if we exceed one uleb128 byte. */
887 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
888 }
889 }
890
891 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
892 if (dw_cie_version >= 4)
893 {
894 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
895 dw2_asm_output_data (1, 0, "CIE Segment Size");
896 }
897 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
898 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
899 "CIE Data Alignment Factor");
900
901 if (dw_cie_version == 1)
902 dw2_asm_output_data (1, return_reg, "CIE RA Column");
903 else
904 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
905
906 if (augmentation[0])
907 {
908 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
909 if (personality)
910 {
911 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
912 eh_data_format_name (per_encoding));
913 dw2_asm_output_encoded_addr_rtx (per_encoding,
914 personality,
915 true, NULL);
916 }
917
918 if (any_lsda_needed)
919 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
920 eh_data_format_name (lsda_encoding));
921
922 if (fde_encoding != DW_EH_PE_absptr)
923 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
924 eh_data_format_name (fde_encoding));
925 }
926
927 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
928 output_cfi (cfi, NULL, for_eh);
929
930 /* Pad the CIE out to an address sized boundary. */
931 ASM_OUTPUT_ALIGN (asm_out_file,
932 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
933 ASM_OUTPUT_LABEL (asm_out_file, l2);
934
935 /* Loop through all of the FDE's. */
936 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
937 {
938 unsigned int k;
939
940 /* Don't emit EH unwind info for leaf functions that don't need it. */
941 if (for_eh && !fde_needed_for_eh_p (fde))
942 continue;
943
944 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
945 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
946 augmentation, any_lsda_needed, lsda_encoding);
947 }
948
949 if (for_eh && targetm.terminate_dw2_eh_frame_info)
950 dw2_asm_output_data (4, 0, "End of Table");
951
952 /* Turn off app to make assembly quicker. */
953 if (flag_debug_asm)
954 app_disable ();
955 }
956
957 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
958
959 static void
960 dwarf2out_do_cfi_startproc (bool second)
961 {
962 int enc;
963 rtx ref;
964
965 fprintf (asm_out_file, "\t.cfi_startproc\n");
966
967 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
968 eh unwinders. */
969 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
970 return;
971
972 rtx personality = get_personality_function (current_function_decl);
973
974 if (personality)
975 {
976 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
977 ref = personality;
978
979 /* ??? The GAS support isn't entirely consistent. We have to
980 handle indirect support ourselves, but PC-relative is done
981 in the assembler. Further, the assembler can't handle any
982 of the weirder relocation types. */
983 if (enc & DW_EH_PE_indirect)
984 ref = dw2_force_const_mem (ref, true);
985
986 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
987 output_addr_const (asm_out_file, ref);
988 fputc ('\n', asm_out_file);
989 }
990
991 if (crtl->uses_eh_lsda)
992 {
993 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
994
995 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
996 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
997 current_function_funcdef_no);
998 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
999 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1000
1001 if (enc & DW_EH_PE_indirect)
1002 ref = dw2_force_const_mem (ref, true);
1003
1004 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1005 output_addr_const (asm_out_file, ref);
1006 fputc ('\n', asm_out_file);
1007 }
1008 }
1009
1010 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1011 this allocation may be done before pass_final. */
1012
1013 dw_fde_ref
1014 dwarf2out_alloc_current_fde (void)
1015 {
1016 dw_fde_ref fde;
1017
1018 fde = ggc_cleared_alloc<dw_fde_node> ();
1019 fde->decl = current_function_decl;
1020 fde->funcdef_number = current_function_funcdef_no;
1021 fde->fde_index = vec_safe_length (fde_vec);
1022 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1023 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1024 fde->nothrow = crtl->nothrow;
1025 fde->drap_reg = INVALID_REGNUM;
1026 fde->vdrap_reg = INVALID_REGNUM;
1027
1028 /* Record the FDE associated with this function. */
1029 cfun->fde = fde;
1030 vec_safe_push (fde_vec, fde);
1031
1032 return fde;
1033 }
1034
1035 /* Output a marker (i.e. a label) for the beginning of a function, before
1036 the prologue. */
1037
1038 void
1039 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1040 unsigned int column ATTRIBUTE_UNUSED,
1041 const char *file ATTRIBUTE_UNUSED)
1042 {
1043 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1044 char * dup_label;
1045 dw_fde_ref fde;
1046 section *fnsec;
1047 bool do_frame;
1048
1049 current_function_func_begin_label = NULL;
1050
1051 do_frame = dwarf2out_do_frame ();
1052
1053 /* ??? current_function_func_begin_label is also used by except.c for
1054 call-site information. We must emit this label if it might be used. */
1055 if (!do_frame
1056 && (!flag_exceptions
1057 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1058 return;
1059
1060 fnsec = function_section (current_function_decl);
1061 switch_to_section (fnsec);
1062 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1063 current_function_funcdef_no);
1064 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1065 current_function_funcdef_no);
1066 dup_label = xstrdup (label);
1067 current_function_func_begin_label = dup_label;
1068
1069 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1070 if (!do_frame)
1071 return;
1072
1073 /* Unlike the debug version, the EH version of frame unwind info is a per-
1074 function setting so we need to record whether we need it for the unit. */
1075 do_eh_frame |= dwarf2out_do_eh_frame ();
1076
1077 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1078 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1079 would include pass_dwarf2_frame. If we've not created the FDE yet,
1080 do so now. */
1081 fde = cfun->fde;
1082 if (fde == NULL)
1083 fde = dwarf2out_alloc_current_fde ();
1084
1085 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1086 fde->dw_fde_begin = dup_label;
1087 fde->dw_fde_current_label = dup_label;
1088 fde->in_std_section = (fnsec == text_section
1089 || (cold_text_section && fnsec == cold_text_section));
1090
1091 /* We only want to output line number information for the genuine dwarf2
1092 prologue case, not the eh frame case. */
1093 #ifdef DWARF2_DEBUGGING_INFO
1094 if (file)
1095 dwarf2out_source_line (line, column, file, 0, true);
1096 #endif
1097
1098 if (dwarf2out_do_cfi_asm ())
1099 dwarf2out_do_cfi_startproc (false);
1100 else
1101 {
1102 rtx personality = get_personality_function (current_function_decl);
1103 if (!current_unit_personality)
1104 current_unit_personality = personality;
1105
1106 /* We cannot keep a current personality per function as without CFI
1107 asm, at the point where we emit the CFI data, there is no current
1108 function anymore. */
1109 if (personality && current_unit_personality != personality)
1110 sorry ("multiple EH personalities are supported only with assemblers "
1111 "supporting .cfi_personality directive");
1112 }
1113 }
1114
1115 /* Output a marker (i.e. a label) for the end of the generated code
1116 for a function prologue. This gets called *after* the prologue code has
1117 been generated. */
1118
1119 void
1120 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1121 const char *file ATTRIBUTE_UNUSED)
1122 {
1123 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1124
1125 /* Output a label to mark the endpoint of the code generated for this
1126 function. */
1127 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1128 current_function_funcdef_no);
1129 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1130 current_function_funcdef_no);
1131 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1132 }
1133
1134 /* Output a marker (i.e. a label) for the beginning of the generated code
1135 for a function epilogue. This gets called *before* the prologue code has
1136 been generated. */
1137
1138 void
1139 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1140 const char *file ATTRIBUTE_UNUSED)
1141 {
1142 dw_fde_ref fde = cfun->fde;
1143 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1144
1145 if (fde->dw_fde_vms_begin_epilogue)
1146 return;
1147
1148 /* Output a label to mark the endpoint of the code generated for this
1149 function. */
1150 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1151 current_function_funcdef_no);
1152 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1153 current_function_funcdef_no);
1154 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1155 }
1156
1157 /* Output a marker (i.e. a label) for the absolute end of the generated code
1158 for a function definition. This gets called *after* the epilogue code has
1159 been generated. */
1160
1161 void
1162 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1163 const char *file ATTRIBUTE_UNUSED)
1164 {
1165 dw_fde_ref fde;
1166 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1167
1168 last_var_location_insn = NULL;
1169 cached_next_real_insn = NULL;
1170
1171 if (dwarf2out_do_cfi_asm ())
1172 fprintf (asm_out_file, "\t.cfi_endproc\n");
1173
1174 /* Output a label to mark the endpoint of the code generated for this
1175 function. */
1176 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1177 current_function_funcdef_no);
1178 ASM_OUTPUT_LABEL (asm_out_file, label);
1179 fde = cfun->fde;
1180 gcc_assert (fde != NULL);
1181 if (fde->dw_fde_second_begin == NULL)
1182 fde->dw_fde_end = xstrdup (label);
1183 }
1184
1185 void
1186 dwarf2out_frame_finish (void)
1187 {
1188 /* Output call frame information. */
1189 if (targetm.debug_unwind_info () == UI_DWARF2)
1190 output_call_frame_info (0);
1191
1192 /* Output another copy for the unwinder. */
1193 if (do_eh_frame)
1194 output_call_frame_info (1);
1195 }
1196
1197 /* Note that the current function section is being used for code. */
1198
1199 static void
1200 dwarf2out_note_section_used (void)
1201 {
1202 section *sec = current_function_section ();
1203 if (sec == text_section)
1204 text_section_used = true;
1205 else if (sec == cold_text_section)
1206 cold_text_section_used = true;
1207 }
1208
1209 static void var_location_switch_text_section (void);
1210 static void set_cur_line_info_table (section *);
1211
1212 void
1213 dwarf2out_switch_text_section (void)
1214 {
1215 section *sect;
1216 dw_fde_ref fde = cfun->fde;
1217
1218 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1219
1220 if (!in_cold_section_p)
1221 {
1222 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1223 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1224 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1225 }
1226 else
1227 {
1228 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1229 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1230 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1231 }
1232 have_multiple_function_sections = true;
1233
1234 /* There is no need to mark used sections when not debugging. */
1235 if (cold_text_section != NULL)
1236 dwarf2out_note_section_used ();
1237
1238 if (dwarf2out_do_cfi_asm ())
1239 fprintf (asm_out_file, "\t.cfi_endproc\n");
1240
1241 /* Now do the real section switch. */
1242 sect = current_function_section ();
1243 switch_to_section (sect);
1244
1245 fde->second_in_std_section
1246 = (sect == text_section
1247 || (cold_text_section && sect == cold_text_section));
1248
1249 if (dwarf2out_do_cfi_asm ())
1250 dwarf2out_do_cfi_startproc (true);
1251
1252 var_location_switch_text_section ();
1253
1254 if (cold_text_section != NULL)
1255 set_cur_line_info_table (sect);
1256 }
1257 \f
1258 /* And now, the subset of the debugging information support code necessary
1259 for emitting location expressions. */
1260
1261 /* Data about a single source file. */
1262 struct GTY((for_user)) dwarf_file_data {
1263 const char * filename;
1264 int emitted_number;
1265 };
1266
1267 /* Describe an entry into the .debug_addr section. */
1268
1269 enum ate_kind {
1270 ate_kind_rtx,
1271 ate_kind_rtx_dtprel,
1272 ate_kind_label
1273 };
1274
1275 struct GTY((for_user)) addr_table_entry {
1276 enum ate_kind kind;
1277 unsigned int refcount;
1278 unsigned int index;
1279 union addr_table_entry_struct_union
1280 {
1281 rtx GTY ((tag ("0"))) rtl;
1282 char * GTY ((tag ("1"))) label;
1283 }
1284 GTY ((desc ("%1.kind"))) addr;
1285 };
1286
1287 typedef unsigned int var_loc_view;
1288
1289 /* Location lists are ranges + location descriptions for that range,
1290 so you can track variables that are in different places over
1291 their entire life. */
1292 typedef struct GTY(()) dw_loc_list_struct {
1293 dw_loc_list_ref dw_loc_next;
1294 const char *begin; /* Label and addr_entry for start of range */
1295 addr_table_entry *begin_entry;
1296 const char *end; /* Label for end of range */
1297 char *ll_symbol; /* Label for beginning of location list.
1298 Only on head of list. */
1299 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1300 const char *section; /* Section this loclist is relative to */
1301 dw_loc_descr_ref expr;
1302 var_loc_view vbegin, vend;
1303 hashval_t hash;
1304 /* True if all addresses in this and subsequent lists are known to be
1305 resolved. */
1306 bool resolved_addr;
1307 /* True if this list has been replaced by dw_loc_next. */
1308 bool replaced;
1309 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1310 section. */
1311 unsigned char emitted : 1;
1312 /* True if hash field is index rather than hash value. */
1313 unsigned char num_assigned : 1;
1314 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1315 unsigned char offset_emitted : 1;
1316 /* True if note_variable_value_in_expr has been called on it. */
1317 unsigned char noted_variable_value : 1;
1318 /* True if the range should be emitted even if begin and end
1319 are the same. */
1320 bool force;
1321 } dw_loc_list_node;
1322
1323 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1324 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1325
1326 /* Convert a DWARF stack opcode into its string name. */
1327
1328 static const char *
1329 dwarf_stack_op_name (unsigned int op)
1330 {
1331 const char *name = get_DW_OP_name (op);
1332
1333 if (name != NULL)
1334 return name;
1335
1336 return "OP_<unknown>";
1337 }
1338
1339 /* Return TRUE iff we're to output location view lists as a separate
1340 attribute next to the location lists, as an extension compatible
1341 with DWARF 2 and above. */
1342
1343 static inline bool
1344 dwarf2out_locviews_in_attribute ()
1345 {
1346 return debug_variable_location_views == 1;
1347 }
1348
1349 /* Return TRUE iff we're to output location view lists as part of the
1350 location lists, as proposed for standardization after DWARF 5. */
1351
1352 static inline bool
1353 dwarf2out_locviews_in_loclist ()
1354 {
1355 #ifndef DW_LLE_view_pair
1356 return false;
1357 #else
1358 return debug_variable_location_views == -1;
1359 #endif
1360 }
1361
1362 /* Return a pointer to a newly allocated location description. Location
1363 descriptions are simple expression terms that can be strung
1364 together to form more complicated location (address) descriptions. */
1365
1366 static inline dw_loc_descr_ref
1367 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1368 unsigned HOST_WIDE_INT oprnd2)
1369 {
1370 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1371
1372 descr->dw_loc_opc = op;
1373 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1374 descr->dw_loc_oprnd1.val_entry = NULL;
1375 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1376 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1377 descr->dw_loc_oprnd2.val_entry = NULL;
1378 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1379
1380 return descr;
1381 }
1382
1383 /* Add a location description term to a location description expression. */
1384
1385 static inline void
1386 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1387 {
1388 dw_loc_descr_ref *d;
1389
1390 /* Find the end of the chain. */
1391 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1392 ;
1393
1394 *d = descr;
1395 }
1396
1397 /* Compare two location operands for exact equality. */
1398
1399 static bool
1400 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1401 {
1402 if (a->val_class != b->val_class)
1403 return false;
1404 switch (a->val_class)
1405 {
1406 case dw_val_class_none:
1407 return true;
1408 case dw_val_class_addr:
1409 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1410
1411 case dw_val_class_offset:
1412 case dw_val_class_unsigned_const:
1413 case dw_val_class_const:
1414 case dw_val_class_unsigned_const_implicit:
1415 case dw_val_class_const_implicit:
1416 case dw_val_class_range_list:
1417 /* These are all HOST_WIDE_INT, signed or unsigned. */
1418 return a->v.val_unsigned == b->v.val_unsigned;
1419
1420 case dw_val_class_loc:
1421 return a->v.val_loc == b->v.val_loc;
1422 case dw_val_class_loc_list:
1423 return a->v.val_loc_list == b->v.val_loc_list;
1424 case dw_val_class_view_list:
1425 return a->v.val_view_list == b->v.val_view_list;
1426 case dw_val_class_die_ref:
1427 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1428 case dw_val_class_fde_ref:
1429 return a->v.val_fde_index == b->v.val_fde_index;
1430 case dw_val_class_symview:
1431 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1432 case dw_val_class_lbl_id:
1433 case dw_val_class_lineptr:
1434 case dw_val_class_macptr:
1435 case dw_val_class_loclistsptr:
1436 case dw_val_class_high_pc:
1437 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1438 case dw_val_class_str:
1439 return a->v.val_str == b->v.val_str;
1440 case dw_val_class_flag:
1441 return a->v.val_flag == b->v.val_flag;
1442 case dw_val_class_file:
1443 case dw_val_class_file_implicit:
1444 return a->v.val_file == b->v.val_file;
1445 case dw_val_class_decl_ref:
1446 return a->v.val_decl_ref == b->v.val_decl_ref;
1447
1448 case dw_val_class_const_double:
1449 return (a->v.val_double.high == b->v.val_double.high
1450 && a->v.val_double.low == b->v.val_double.low);
1451
1452 case dw_val_class_wide_int:
1453 return *a->v.val_wide == *b->v.val_wide;
1454
1455 case dw_val_class_vec:
1456 {
1457 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1458 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1459
1460 return (a_len == b_len
1461 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1462 }
1463
1464 case dw_val_class_data8:
1465 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1466
1467 case dw_val_class_vms_delta:
1468 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1469 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1470
1471 case dw_val_class_discr_value:
1472 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1473 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1474 case dw_val_class_discr_list:
1475 /* It makes no sense comparing two discriminant value lists. */
1476 return false;
1477 }
1478 gcc_unreachable ();
1479 }
1480
1481 /* Compare two location atoms for exact equality. */
1482
1483 static bool
1484 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1485 {
1486 if (a->dw_loc_opc != b->dw_loc_opc)
1487 return false;
1488
1489 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1490 address size, but since we always allocate cleared storage it
1491 should be zero for other types of locations. */
1492 if (a->dtprel != b->dtprel)
1493 return false;
1494
1495 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1496 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1497 }
1498
1499 /* Compare two complete location expressions for exact equality. */
1500
1501 bool
1502 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1503 {
1504 while (1)
1505 {
1506 if (a == b)
1507 return true;
1508 if (a == NULL || b == NULL)
1509 return false;
1510 if (!loc_descr_equal_p_1 (a, b))
1511 return false;
1512
1513 a = a->dw_loc_next;
1514 b = b->dw_loc_next;
1515 }
1516 }
1517
1518
1519 /* Add a constant POLY_OFFSET to a location expression. */
1520
1521 static void
1522 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1523 {
1524 dw_loc_descr_ref loc;
1525 HOST_WIDE_INT *p;
1526
1527 gcc_assert (*list_head != NULL);
1528
1529 if (known_eq (poly_offset, 0))
1530 return;
1531
1532 /* Find the end of the chain. */
1533 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1534 ;
1535
1536 HOST_WIDE_INT offset;
1537 if (!poly_offset.is_constant (&offset))
1538 {
1539 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1540 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1541 return;
1542 }
1543
1544 p = NULL;
1545 if (loc->dw_loc_opc == DW_OP_fbreg
1546 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1547 p = &loc->dw_loc_oprnd1.v.val_int;
1548 else if (loc->dw_loc_opc == DW_OP_bregx)
1549 p = &loc->dw_loc_oprnd2.v.val_int;
1550
1551 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1552 offset. Don't optimize if an signed integer overflow would happen. */
1553 if (p != NULL
1554 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1555 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1556 *p += offset;
1557
1558 else if (offset > 0)
1559 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1560
1561 else
1562 {
1563 loc->dw_loc_next
1564 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1565 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1566 }
1567 }
1568
1569 /* Return a pointer to a newly allocated location description for
1570 REG and OFFSET. */
1571
1572 static inline dw_loc_descr_ref
1573 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1574 {
1575 HOST_WIDE_INT const_offset;
1576 if (offset.is_constant (&const_offset))
1577 {
1578 if (reg <= 31)
1579 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1580 const_offset, 0);
1581 else
1582 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1583 }
1584 else
1585 {
1586 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1587 loc_descr_plus_const (&ret, offset);
1588 return ret;
1589 }
1590 }
1591
1592 /* Add a constant OFFSET to a location list. */
1593
1594 static void
1595 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1596 {
1597 dw_loc_list_ref d;
1598 for (d = list_head; d != NULL; d = d->dw_loc_next)
1599 loc_descr_plus_const (&d->expr, offset);
1600 }
1601
1602 #define DWARF_REF_SIZE \
1603 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1604
1605 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1606 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1607 DW_FORM_data16 with 128 bits. */
1608 #define DWARF_LARGEST_DATA_FORM_BITS \
1609 (dwarf_version >= 5 ? 128 : 64)
1610
1611 /* Utility inline function for construction of ops that were GNU extension
1612 before DWARF 5. */
1613 static inline enum dwarf_location_atom
1614 dwarf_OP (enum dwarf_location_atom op)
1615 {
1616 switch (op)
1617 {
1618 case DW_OP_implicit_pointer:
1619 if (dwarf_version < 5)
1620 return DW_OP_GNU_implicit_pointer;
1621 break;
1622
1623 case DW_OP_entry_value:
1624 if (dwarf_version < 5)
1625 return DW_OP_GNU_entry_value;
1626 break;
1627
1628 case DW_OP_const_type:
1629 if (dwarf_version < 5)
1630 return DW_OP_GNU_const_type;
1631 break;
1632
1633 case DW_OP_regval_type:
1634 if (dwarf_version < 5)
1635 return DW_OP_GNU_regval_type;
1636 break;
1637
1638 case DW_OP_deref_type:
1639 if (dwarf_version < 5)
1640 return DW_OP_GNU_deref_type;
1641 break;
1642
1643 case DW_OP_convert:
1644 if (dwarf_version < 5)
1645 return DW_OP_GNU_convert;
1646 break;
1647
1648 case DW_OP_reinterpret:
1649 if (dwarf_version < 5)
1650 return DW_OP_GNU_reinterpret;
1651 break;
1652
1653 case DW_OP_addrx:
1654 if (dwarf_version < 5)
1655 return DW_OP_GNU_addr_index;
1656 break;
1657
1658 case DW_OP_constx:
1659 if (dwarf_version < 5)
1660 return DW_OP_GNU_const_index;
1661 break;
1662
1663 default:
1664 break;
1665 }
1666 return op;
1667 }
1668
1669 /* Similarly for attributes. */
1670 static inline enum dwarf_attribute
1671 dwarf_AT (enum dwarf_attribute at)
1672 {
1673 switch (at)
1674 {
1675 case DW_AT_call_return_pc:
1676 if (dwarf_version < 5)
1677 return DW_AT_low_pc;
1678 break;
1679
1680 case DW_AT_call_tail_call:
1681 if (dwarf_version < 5)
1682 return DW_AT_GNU_tail_call;
1683 break;
1684
1685 case DW_AT_call_origin:
1686 if (dwarf_version < 5)
1687 return DW_AT_abstract_origin;
1688 break;
1689
1690 case DW_AT_call_target:
1691 if (dwarf_version < 5)
1692 return DW_AT_GNU_call_site_target;
1693 break;
1694
1695 case DW_AT_call_target_clobbered:
1696 if (dwarf_version < 5)
1697 return DW_AT_GNU_call_site_target_clobbered;
1698 break;
1699
1700 case DW_AT_call_parameter:
1701 if (dwarf_version < 5)
1702 return DW_AT_abstract_origin;
1703 break;
1704
1705 case DW_AT_call_value:
1706 if (dwarf_version < 5)
1707 return DW_AT_GNU_call_site_value;
1708 break;
1709
1710 case DW_AT_call_data_value:
1711 if (dwarf_version < 5)
1712 return DW_AT_GNU_call_site_data_value;
1713 break;
1714
1715 case DW_AT_call_all_calls:
1716 if (dwarf_version < 5)
1717 return DW_AT_GNU_all_call_sites;
1718 break;
1719
1720 case DW_AT_call_all_tail_calls:
1721 if (dwarf_version < 5)
1722 return DW_AT_GNU_all_tail_call_sites;
1723 break;
1724
1725 case DW_AT_dwo_name:
1726 if (dwarf_version < 5)
1727 return DW_AT_GNU_dwo_name;
1728 break;
1729
1730 case DW_AT_addr_base:
1731 if (dwarf_version < 5)
1732 return DW_AT_GNU_addr_base;
1733 break;
1734
1735 default:
1736 break;
1737 }
1738 return at;
1739 }
1740
1741 /* And similarly for tags. */
1742 static inline enum dwarf_tag
1743 dwarf_TAG (enum dwarf_tag tag)
1744 {
1745 switch (tag)
1746 {
1747 case DW_TAG_call_site:
1748 if (dwarf_version < 5)
1749 return DW_TAG_GNU_call_site;
1750 break;
1751
1752 case DW_TAG_call_site_parameter:
1753 if (dwarf_version < 5)
1754 return DW_TAG_GNU_call_site_parameter;
1755 break;
1756
1757 default:
1758 break;
1759 }
1760 return tag;
1761 }
1762
1763 /* And similarly for forms. */
1764 static inline enum dwarf_form
1765 dwarf_FORM (enum dwarf_form form)
1766 {
1767 switch (form)
1768 {
1769 case DW_FORM_addrx:
1770 if (dwarf_version < 5)
1771 return DW_FORM_GNU_addr_index;
1772 break;
1773
1774 case DW_FORM_strx:
1775 if (dwarf_version < 5)
1776 return DW_FORM_GNU_str_index;
1777 break;
1778
1779 default:
1780 break;
1781 }
1782 return form;
1783 }
1784
1785 static unsigned long int get_base_type_offset (dw_die_ref);
1786
1787 /* Return the size of a location descriptor. */
1788
1789 static unsigned long
1790 size_of_loc_descr (dw_loc_descr_ref loc)
1791 {
1792 unsigned long size = 1;
1793
1794 switch (loc->dw_loc_opc)
1795 {
1796 case DW_OP_addr:
1797 size += DWARF2_ADDR_SIZE;
1798 break;
1799 case DW_OP_GNU_addr_index:
1800 case DW_OP_addrx:
1801 case DW_OP_GNU_const_index:
1802 case DW_OP_constx:
1803 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1804 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1805 break;
1806 case DW_OP_const1u:
1807 case DW_OP_const1s:
1808 size += 1;
1809 break;
1810 case DW_OP_const2u:
1811 case DW_OP_const2s:
1812 size += 2;
1813 break;
1814 case DW_OP_const4u:
1815 case DW_OP_const4s:
1816 size += 4;
1817 break;
1818 case DW_OP_const8u:
1819 case DW_OP_const8s:
1820 size += 8;
1821 break;
1822 case DW_OP_constu:
1823 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1824 break;
1825 case DW_OP_consts:
1826 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1827 break;
1828 case DW_OP_pick:
1829 size += 1;
1830 break;
1831 case DW_OP_plus_uconst:
1832 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1833 break;
1834 case DW_OP_skip:
1835 case DW_OP_bra:
1836 size += 2;
1837 break;
1838 case DW_OP_breg0:
1839 case DW_OP_breg1:
1840 case DW_OP_breg2:
1841 case DW_OP_breg3:
1842 case DW_OP_breg4:
1843 case DW_OP_breg5:
1844 case DW_OP_breg6:
1845 case DW_OP_breg7:
1846 case DW_OP_breg8:
1847 case DW_OP_breg9:
1848 case DW_OP_breg10:
1849 case DW_OP_breg11:
1850 case DW_OP_breg12:
1851 case DW_OP_breg13:
1852 case DW_OP_breg14:
1853 case DW_OP_breg15:
1854 case DW_OP_breg16:
1855 case DW_OP_breg17:
1856 case DW_OP_breg18:
1857 case DW_OP_breg19:
1858 case DW_OP_breg20:
1859 case DW_OP_breg21:
1860 case DW_OP_breg22:
1861 case DW_OP_breg23:
1862 case DW_OP_breg24:
1863 case DW_OP_breg25:
1864 case DW_OP_breg26:
1865 case DW_OP_breg27:
1866 case DW_OP_breg28:
1867 case DW_OP_breg29:
1868 case DW_OP_breg30:
1869 case DW_OP_breg31:
1870 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1871 break;
1872 case DW_OP_regx:
1873 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1874 break;
1875 case DW_OP_fbreg:
1876 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1877 break;
1878 case DW_OP_bregx:
1879 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1880 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1881 break;
1882 case DW_OP_piece:
1883 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1884 break;
1885 case DW_OP_bit_piece:
1886 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1887 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1888 break;
1889 case DW_OP_deref_size:
1890 case DW_OP_xderef_size:
1891 size += 1;
1892 break;
1893 case DW_OP_call2:
1894 size += 2;
1895 break;
1896 case DW_OP_call4:
1897 size += 4;
1898 break;
1899 case DW_OP_call_ref:
1900 case DW_OP_GNU_variable_value:
1901 size += DWARF_REF_SIZE;
1902 break;
1903 case DW_OP_implicit_value:
1904 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1905 + loc->dw_loc_oprnd1.v.val_unsigned;
1906 break;
1907 case DW_OP_implicit_pointer:
1908 case DW_OP_GNU_implicit_pointer:
1909 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1910 break;
1911 case DW_OP_entry_value:
1912 case DW_OP_GNU_entry_value:
1913 {
1914 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1915 size += size_of_uleb128 (op_size) + op_size;
1916 break;
1917 }
1918 case DW_OP_const_type:
1919 case DW_OP_GNU_const_type:
1920 {
1921 unsigned long o
1922 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1923 size += size_of_uleb128 (o) + 1;
1924 switch (loc->dw_loc_oprnd2.val_class)
1925 {
1926 case dw_val_class_vec:
1927 size += loc->dw_loc_oprnd2.v.val_vec.length
1928 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1929 break;
1930 case dw_val_class_const:
1931 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1932 break;
1933 case dw_val_class_const_double:
1934 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1935 break;
1936 case dw_val_class_wide_int:
1937 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1938 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1939 break;
1940 default:
1941 gcc_unreachable ();
1942 }
1943 break;
1944 }
1945 case DW_OP_regval_type:
1946 case DW_OP_GNU_regval_type:
1947 {
1948 unsigned long o
1949 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1950 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1951 + size_of_uleb128 (o);
1952 }
1953 break;
1954 case DW_OP_deref_type:
1955 case DW_OP_GNU_deref_type:
1956 {
1957 unsigned long o
1958 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1959 size += 1 + size_of_uleb128 (o);
1960 }
1961 break;
1962 case DW_OP_convert:
1963 case DW_OP_reinterpret:
1964 case DW_OP_GNU_convert:
1965 case DW_OP_GNU_reinterpret:
1966 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1967 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1968 else
1969 {
1970 unsigned long o
1971 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1972 size += size_of_uleb128 (o);
1973 }
1974 break;
1975 case DW_OP_GNU_parameter_ref:
1976 size += 4;
1977 break;
1978 default:
1979 break;
1980 }
1981
1982 return size;
1983 }
1984
1985 /* Return the size of a series of location descriptors. */
1986
1987 unsigned long
1988 size_of_locs (dw_loc_descr_ref loc)
1989 {
1990 dw_loc_descr_ref l;
1991 unsigned long size;
1992
1993 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1994 field, to avoid writing to a PCH file. */
1995 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1996 {
1997 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1998 break;
1999 size += size_of_loc_descr (l);
2000 }
2001 if (! l)
2002 return size;
2003
2004 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2005 {
2006 l->dw_loc_addr = size;
2007 size += size_of_loc_descr (l);
2008 }
2009
2010 return size;
2011 }
2012
2013 /* Return the size of the value in a DW_AT_discr_value attribute. */
2014
2015 static int
2016 size_of_discr_value (dw_discr_value *discr_value)
2017 {
2018 if (discr_value->pos)
2019 return size_of_uleb128 (discr_value->v.uval);
2020 else
2021 return size_of_sleb128 (discr_value->v.sval);
2022 }
2023
2024 /* Return the size of the value in a DW_AT_discr_list attribute. */
2025
2026 static int
2027 size_of_discr_list (dw_discr_list_ref discr_list)
2028 {
2029 int size = 0;
2030
2031 for (dw_discr_list_ref list = discr_list;
2032 list != NULL;
2033 list = list->dw_discr_next)
2034 {
2035 /* One byte for the discriminant value descriptor, and then one or two
2036 LEB128 numbers, depending on whether it's a single case label or a
2037 range label. */
2038 size += 1;
2039 size += size_of_discr_value (&list->dw_discr_lower_bound);
2040 if (list->dw_discr_range != 0)
2041 size += size_of_discr_value (&list->dw_discr_upper_bound);
2042 }
2043 return size;
2044 }
2045
2046 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2047 static void get_ref_die_offset_label (char *, dw_die_ref);
2048 static unsigned long int get_ref_die_offset (dw_die_ref);
2049
2050 /* Output location description stack opcode's operands (if any).
2051 The for_eh_or_skip parameter controls whether register numbers are
2052 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2053 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2054 info). This should be suppressed for the cases that have not been converted
2055 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2056
2057 static void
2058 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2059 {
2060 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2061 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2062
2063 switch (loc->dw_loc_opc)
2064 {
2065 #ifdef DWARF2_DEBUGGING_INFO
2066 case DW_OP_const2u:
2067 case DW_OP_const2s:
2068 dw2_asm_output_data (2, val1->v.val_int, NULL);
2069 break;
2070 case DW_OP_const4u:
2071 if (loc->dtprel)
2072 {
2073 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2074 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2075 val1->v.val_addr);
2076 fputc ('\n', asm_out_file);
2077 break;
2078 }
2079 /* FALLTHRU */
2080 case DW_OP_const4s:
2081 dw2_asm_output_data (4, val1->v.val_int, NULL);
2082 break;
2083 case DW_OP_const8u:
2084 if (loc->dtprel)
2085 {
2086 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2087 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2088 val1->v.val_addr);
2089 fputc ('\n', asm_out_file);
2090 break;
2091 }
2092 /* FALLTHRU */
2093 case DW_OP_const8s:
2094 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2095 dw2_asm_output_data (8, val1->v.val_int, NULL);
2096 break;
2097 case DW_OP_skip:
2098 case DW_OP_bra:
2099 {
2100 int offset;
2101
2102 gcc_assert (val1->val_class == dw_val_class_loc);
2103 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2104
2105 dw2_asm_output_data (2, offset, NULL);
2106 }
2107 break;
2108 case DW_OP_implicit_value:
2109 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2110 switch (val2->val_class)
2111 {
2112 case dw_val_class_const:
2113 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2114 break;
2115 case dw_val_class_vec:
2116 {
2117 unsigned int elt_size = val2->v.val_vec.elt_size;
2118 unsigned int len = val2->v.val_vec.length;
2119 unsigned int i;
2120 unsigned char *p;
2121
2122 if (elt_size > sizeof (HOST_WIDE_INT))
2123 {
2124 elt_size /= 2;
2125 len *= 2;
2126 }
2127 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2128 i < len;
2129 i++, p += elt_size)
2130 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2131 "fp or vector constant word %u", i);
2132 }
2133 break;
2134 case dw_val_class_const_double:
2135 {
2136 unsigned HOST_WIDE_INT first, second;
2137
2138 if (WORDS_BIG_ENDIAN)
2139 {
2140 first = val2->v.val_double.high;
2141 second = val2->v.val_double.low;
2142 }
2143 else
2144 {
2145 first = val2->v.val_double.low;
2146 second = val2->v.val_double.high;
2147 }
2148 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2149 first, NULL);
2150 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2151 second, NULL);
2152 }
2153 break;
2154 case dw_val_class_wide_int:
2155 {
2156 int i;
2157 int len = get_full_len (*val2->v.val_wide);
2158 if (WORDS_BIG_ENDIAN)
2159 for (i = len - 1; i >= 0; --i)
2160 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2161 val2->v.val_wide->elt (i), NULL);
2162 else
2163 for (i = 0; i < len; ++i)
2164 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2165 val2->v.val_wide->elt (i), NULL);
2166 }
2167 break;
2168 case dw_val_class_addr:
2169 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2170 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2171 break;
2172 default:
2173 gcc_unreachable ();
2174 }
2175 break;
2176 #else
2177 case DW_OP_const2u:
2178 case DW_OP_const2s:
2179 case DW_OP_const4u:
2180 case DW_OP_const4s:
2181 case DW_OP_const8u:
2182 case DW_OP_const8s:
2183 case DW_OP_skip:
2184 case DW_OP_bra:
2185 case DW_OP_implicit_value:
2186 /* We currently don't make any attempt to make sure these are
2187 aligned properly like we do for the main unwind info, so
2188 don't support emitting things larger than a byte if we're
2189 only doing unwinding. */
2190 gcc_unreachable ();
2191 #endif
2192 case DW_OP_const1u:
2193 case DW_OP_const1s:
2194 dw2_asm_output_data (1, val1->v.val_int, NULL);
2195 break;
2196 case DW_OP_constu:
2197 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2198 break;
2199 case DW_OP_consts:
2200 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2201 break;
2202 case DW_OP_pick:
2203 dw2_asm_output_data (1, val1->v.val_int, NULL);
2204 break;
2205 case DW_OP_plus_uconst:
2206 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2207 break;
2208 case DW_OP_breg0:
2209 case DW_OP_breg1:
2210 case DW_OP_breg2:
2211 case DW_OP_breg3:
2212 case DW_OP_breg4:
2213 case DW_OP_breg5:
2214 case DW_OP_breg6:
2215 case DW_OP_breg7:
2216 case DW_OP_breg8:
2217 case DW_OP_breg9:
2218 case DW_OP_breg10:
2219 case DW_OP_breg11:
2220 case DW_OP_breg12:
2221 case DW_OP_breg13:
2222 case DW_OP_breg14:
2223 case DW_OP_breg15:
2224 case DW_OP_breg16:
2225 case DW_OP_breg17:
2226 case DW_OP_breg18:
2227 case DW_OP_breg19:
2228 case DW_OP_breg20:
2229 case DW_OP_breg21:
2230 case DW_OP_breg22:
2231 case DW_OP_breg23:
2232 case DW_OP_breg24:
2233 case DW_OP_breg25:
2234 case DW_OP_breg26:
2235 case DW_OP_breg27:
2236 case DW_OP_breg28:
2237 case DW_OP_breg29:
2238 case DW_OP_breg30:
2239 case DW_OP_breg31:
2240 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2241 break;
2242 case DW_OP_regx:
2243 {
2244 unsigned r = val1->v.val_unsigned;
2245 if (for_eh_or_skip >= 0)
2246 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2247 gcc_assert (size_of_uleb128 (r)
2248 == size_of_uleb128 (val1->v.val_unsigned));
2249 dw2_asm_output_data_uleb128 (r, NULL);
2250 }
2251 break;
2252 case DW_OP_fbreg:
2253 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2254 break;
2255 case DW_OP_bregx:
2256 {
2257 unsigned r = val1->v.val_unsigned;
2258 if (for_eh_or_skip >= 0)
2259 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2260 gcc_assert (size_of_uleb128 (r)
2261 == size_of_uleb128 (val1->v.val_unsigned));
2262 dw2_asm_output_data_uleb128 (r, NULL);
2263 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2264 }
2265 break;
2266 case DW_OP_piece:
2267 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2268 break;
2269 case DW_OP_bit_piece:
2270 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2271 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2272 break;
2273 case DW_OP_deref_size:
2274 case DW_OP_xderef_size:
2275 dw2_asm_output_data (1, val1->v.val_int, NULL);
2276 break;
2277
2278 case DW_OP_addr:
2279 if (loc->dtprel)
2280 {
2281 if (targetm.asm_out.output_dwarf_dtprel)
2282 {
2283 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2284 DWARF2_ADDR_SIZE,
2285 val1->v.val_addr);
2286 fputc ('\n', asm_out_file);
2287 }
2288 else
2289 gcc_unreachable ();
2290 }
2291 else
2292 {
2293 #ifdef DWARF2_DEBUGGING_INFO
2294 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2295 #else
2296 gcc_unreachable ();
2297 #endif
2298 }
2299 break;
2300
2301 case DW_OP_GNU_addr_index:
2302 case DW_OP_addrx:
2303 case DW_OP_GNU_const_index:
2304 case DW_OP_constx:
2305 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2306 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2307 "(index into .debug_addr)");
2308 break;
2309
2310 case DW_OP_call2:
2311 case DW_OP_call4:
2312 {
2313 unsigned long die_offset
2314 = get_ref_die_offset (val1->v.val_die_ref.die);
2315 /* Make sure the offset has been computed and that we can encode it as
2316 an operand. */
2317 gcc_assert (die_offset > 0
2318 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2319 ? 0xffff
2320 : 0xffffffff));
2321 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2322 die_offset, NULL);
2323 }
2324 break;
2325
2326 case DW_OP_call_ref:
2327 case DW_OP_GNU_variable_value:
2328 {
2329 char label[MAX_ARTIFICIAL_LABEL_BYTES
2330 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2331 gcc_assert (val1->val_class == dw_val_class_die_ref);
2332 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2333 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2334 }
2335 break;
2336
2337 case DW_OP_implicit_pointer:
2338 case DW_OP_GNU_implicit_pointer:
2339 {
2340 char label[MAX_ARTIFICIAL_LABEL_BYTES
2341 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2342 gcc_assert (val1->val_class == dw_val_class_die_ref);
2343 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2344 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2345 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2346 }
2347 break;
2348
2349 case DW_OP_entry_value:
2350 case DW_OP_GNU_entry_value:
2351 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2352 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2353 break;
2354
2355 case DW_OP_const_type:
2356 case DW_OP_GNU_const_type:
2357 {
2358 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2359 gcc_assert (o);
2360 dw2_asm_output_data_uleb128 (o, NULL);
2361 switch (val2->val_class)
2362 {
2363 case dw_val_class_const:
2364 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2365 dw2_asm_output_data (1, l, NULL);
2366 dw2_asm_output_data (l, val2->v.val_int, NULL);
2367 break;
2368 case dw_val_class_vec:
2369 {
2370 unsigned int elt_size = val2->v.val_vec.elt_size;
2371 unsigned int len = val2->v.val_vec.length;
2372 unsigned int i;
2373 unsigned char *p;
2374
2375 l = len * elt_size;
2376 dw2_asm_output_data (1, l, NULL);
2377 if (elt_size > sizeof (HOST_WIDE_INT))
2378 {
2379 elt_size /= 2;
2380 len *= 2;
2381 }
2382 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2383 i < len;
2384 i++, p += elt_size)
2385 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2386 "fp or vector constant word %u", i);
2387 }
2388 break;
2389 case dw_val_class_const_double:
2390 {
2391 unsigned HOST_WIDE_INT first, second;
2392 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2393
2394 dw2_asm_output_data (1, 2 * l, NULL);
2395 if (WORDS_BIG_ENDIAN)
2396 {
2397 first = val2->v.val_double.high;
2398 second = val2->v.val_double.low;
2399 }
2400 else
2401 {
2402 first = val2->v.val_double.low;
2403 second = val2->v.val_double.high;
2404 }
2405 dw2_asm_output_data (l, first, NULL);
2406 dw2_asm_output_data (l, second, NULL);
2407 }
2408 break;
2409 case dw_val_class_wide_int:
2410 {
2411 int i;
2412 int len = get_full_len (*val2->v.val_wide);
2413 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2414
2415 dw2_asm_output_data (1, len * l, NULL);
2416 if (WORDS_BIG_ENDIAN)
2417 for (i = len - 1; i >= 0; --i)
2418 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2419 else
2420 for (i = 0; i < len; ++i)
2421 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2422 }
2423 break;
2424 default:
2425 gcc_unreachable ();
2426 }
2427 }
2428 break;
2429 case DW_OP_regval_type:
2430 case DW_OP_GNU_regval_type:
2431 {
2432 unsigned r = val1->v.val_unsigned;
2433 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2434 gcc_assert (o);
2435 if (for_eh_or_skip >= 0)
2436 {
2437 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2438 gcc_assert (size_of_uleb128 (r)
2439 == size_of_uleb128 (val1->v.val_unsigned));
2440 }
2441 dw2_asm_output_data_uleb128 (r, NULL);
2442 dw2_asm_output_data_uleb128 (o, NULL);
2443 }
2444 break;
2445 case DW_OP_deref_type:
2446 case DW_OP_GNU_deref_type:
2447 {
2448 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2449 gcc_assert (o);
2450 dw2_asm_output_data (1, val1->v.val_int, NULL);
2451 dw2_asm_output_data_uleb128 (o, NULL);
2452 }
2453 break;
2454 case DW_OP_convert:
2455 case DW_OP_reinterpret:
2456 case DW_OP_GNU_convert:
2457 case DW_OP_GNU_reinterpret:
2458 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2459 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2460 else
2461 {
2462 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2463 gcc_assert (o);
2464 dw2_asm_output_data_uleb128 (o, NULL);
2465 }
2466 break;
2467
2468 case DW_OP_GNU_parameter_ref:
2469 {
2470 unsigned long o;
2471 gcc_assert (val1->val_class == dw_val_class_die_ref);
2472 o = get_ref_die_offset (val1->v.val_die_ref.die);
2473 dw2_asm_output_data (4, o, NULL);
2474 }
2475 break;
2476
2477 default:
2478 /* Other codes have no operands. */
2479 break;
2480 }
2481 }
2482
2483 /* Output a sequence of location operations.
2484 The for_eh_or_skip parameter controls whether register numbers are
2485 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2486 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2487 info). This should be suppressed for the cases that have not been converted
2488 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2489
2490 void
2491 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2492 {
2493 for (; loc != NULL; loc = loc->dw_loc_next)
2494 {
2495 enum dwarf_location_atom opc = loc->dw_loc_opc;
2496 /* Output the opcode. */
2497 if (for_eh_or_skip >= 0
2498 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2499 {
2500 unsigned r = (opc - DW_OP_breg0);
2501 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2502 gcc_assert (r <= 31);
2503 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2504 }
2505 else if (for_eh_or_skip >= 0
2506 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2507 {
2508 unsigned r = (opc - DW_OP_reg0);
2509 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2510 gcc_assert (r <= 31);
2511 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2512 }
2513
2514 dw2_asm_output_data (1, opc,
2515 "%s", dwarf_stack_op_name (opc));
2516
2517 /* Output the operand(s) (if any). */
2518 output_loc_operands (loc, for_eh_or_skip);
2519 }
2520 }
2521
2522 /* Output location description stack opcode's operands (if any).
2523 The output is single bytes on a line, suitable for .cfi_escape. */
2524
2525 static void
2526 output_loc_operands_raw (dw_loc_descr_ref loc)
2527 {
2528 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2529 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2530
2531 switch (loc->dw_loc_opc)
2532 {
2533 case DW_OP_addr:
2534 case DW_OP_GNU_addr_index:
2535 case DW_OP_addrx:
2536 case DW_OP_GNU_const_index:
2537 case DW_OP_constx:
2538 case DW_OP_implicit_value:
2539 /* We cannot output addresses in .cfi_escape, only bytes. */
2540 gcc_unreachable ();
2541
2542 case DW_OP_const1u:
2543 case DW_OP_const1s:
2544 case DW_OP_pick:
2545 case DW_OP_deref_size:
2546 case DW_OP_xderef_size:
2547 fputc (',', asm_out_file);
2548 dw2_asm_output_data_raw (1, val1->v.val_int);
2549 break;
2550
2551 case DW_OP_const2u:
2552 case DW_OP_const2s:
2553 fputc (',', asm_out_file);
2554 dw2_asm_output_data_raw (2, val1->v.val_int);
2555 break;
2556
2557 case DW_OP_const4u:
2558 case DW_OP_const4s:
2559 fputc (',', asm_out_file);
2560 dw2_asm_output_data_raw (4, val1->v.val_int);
2561 break;
2562
2563 case DW_OP_const8u:
2564 case DW_OP_const8s:
2565 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2566 fputc (',', asm_out_file);
2567 dw2_asm_output_data_raw (8, val1->v.val_int);
2568 break;
2569
2570 case DW_OP_skip:
2571 case DW_OP_bra:
2572 {
2573 int offset;
2574
2575 gcc_assert (val1->val_class == dw_val_class_loc);
2576 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2577
2578 fputc (',', asm_out_file);
2579 dw2_asm_output_data_raw (2, offset);
2580 }
2581 break;
2582
2583 case DW_OP_regx:
2584 {
2585 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2586 gcc_assert (size_of_uleb128 (r)
2587 == size_of_uleb128 (val1->v.val_unsigned));
2588 fputc (',', asm_out_file);
2589 dw2_asm_output_data_uleb128_raw (r);
2590 }
2591 break;
2592
2593 case DW_OP_constu:
2594 case DW_OP_plus_uconst:
2595 case DW_OP_piece:
2596 fputc (',', asm_out_file);
2597 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2598 break;
2599
2600 case DW_OP_bit_piece:
2601 fputc (',', asm_out_file);
2602 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2603 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2604 break;
2605
2606 case DW_OP_consts:
2607 case DW_OP_breg0:
2608 case DW_OP_breg1:
2609 case DW_OP_breg2:
2610 case DW_OP_breg3:
2611 case DW_OP_breg4:
2612 case DW_OP_breg5:
2613 case DW_OP_breg6:
2614 case DW_OP_breg7:
2615 case DW_OP_breg8:
2616 case DW_OP_breg9:
2617 case DW_OP_breg10:
2618 case DW_OP_breg11:
2619 case DW_OP_breg12:
2620 case DW_OP_breg13:
2621 case DW_OP_breg14:
2622 case DW_OP_breg15:
2623 case DW_OP_breg16:
2624 case DW_OP_breg17:
2625 case DW_OP_breg18:
2626 case DW_OP_breg19:
2627 case DW_OP_breg20:
2628 case DW_OP_breg21:
2629 case DW_OP_breg22:
2630 case DW_OP_breg23:
2631 case DW_OP_breg24:
2632 case DW_OP_breg25:
2633 case DW_OP_breg26:
2634 case DW_OP_breg27:
2635 case DW_OP_breg28:
2636 case DW_OP_breg29:
2637 case DW_OP_breg30:
2638 case DW_OP_breg31:
2639 case DW_OP_fbreg:
2640 fputc (',', asm_out_file);
2641 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2642 break;
2643
2644 case DW_OP_bregx:
2645 {
2646 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2647 gcc_assert (size_of_uleb128 (r)
2648 == size_of_uleb128 (val1->v.val_unsigned));
2649 fputc (',', asm_out_file);
2650 dw2_asm_output_data_uleb128_raw (r);
2651 fputc (',', asm_out_file);
2652 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2653 }
2654 break;
2655
2656 case DW_OP_implicit_pointer:
2657 case DW_OP_entry_value:
2658 case DW_OP_const_type:
2659 case DW_OP_regval_type:
2660 case DW_OP_deref_type:
2661 case DW_OP_convert:
2662 case DW_OP_reinterpret:
2663 case DW_OP_GNU_implicit_pointer:
2664 case DW_OP_GNU_entry_value:
2665 case DW_OP_GNU_const_type:
2666 case DW_OP_GNU_regval_type:
2667 case DW_OP_GNU_deref_type:
2668 case DW_OP_GNU_convert:
2669 case DW_OP_GNU_reinterpret:
2670 case DW_OP_GNU_parameter_ref:
2671 gcc_unreachable ();
2672 break;
2673
2674 default:
2675 /* Other codes have no operands. */
2676 break;
2677 }
2678 }
2679
2680 void
2681 output_loc_sequence_raw (dw_loc_descr_ref loc)
2682 {
2683 while (1)
2684 {
2685 enum dwarf_location_atom opc = loc->dw_loc_opc;
2686 /* Output the opcode. */
2687 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2688 {
2689 unsigned r = (opc - DW_OP_breg0);
2690 r = DWARF2_FRAME_REG_OUT (r, 1);
2691 gcc_assert (r <= 31);
2692 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2693 }
2694 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2695 {
2696 unsigned r = (opc - DW_OP_reg0);
2697 r = DWARF2_FRAME_REG_OUT (r, 1);
2698 gcc_assert (r <= 31);
2699 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2700 }
2701 /* Output the opcode. */
2702 fprintf (asm_out_file, "%#x", opc);
2703 output_loc_operands_raw (loc);
2704
2705 if (!loc->dw_loc_next)
2706 break;
2707 loc = loc->dw_loc_next;
2708
2709 fputc (',', asm_out_file);
2710 }
2711 }
2712
2713 /* This function builds a dwarf location descriptor sequence from a
2714 dw_cfa_location, adding the given OFFSET to the result of the
2715 expression. */
2716
2717 struct dw_loc_descr_node *
2718 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2719 {
2720 struct dw_loc_descr_node *head, *tmp;
2721
2722 offset += cfa->offset;
2723
2724 if (cfa->indirect)
2725 {
2726 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2727 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2728 head->dw_loc_oprnd1.val_entry = NULL;
2729 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2730 add_loc_descr (&head, tmp);
2731 loc_descr_plus_const (&head, offset);
2732 }
2733 else
2734 head = new_reg_loc_descr (cfa->reg, offset);
2735
2736 return head;
2737 }
2738
2739 /* This function builds a dwarf location descriptor sequence for
2740 the address at OFFSET from the CFA when stack is aligned to
2741 ALIGNMENT byte. */
2742
2743 struct dw_loc_descr_node *
2744 build_cfa_aligned_loc (dw_cfa_location *cfa,
2745 poly_int64 offset, HOST_WIDE_INT alignment)
2746 {
2747 struct dw_loc_descr_node *head;
2748 unsigned int dwarf_fp
2749 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2750
2751 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2752 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2753 {
2754 head = new_reg_loc_descr (dwarf_fp, 0);
2755 add_loc_descr (&head, int_loc_descriptor (alignment));
2756 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2757 loc_descr_plus_const (&head, offset);
2758 }
2759 else
2760 head = new_reg_loc_descr (dwarf_fp, offset);
2761 return head;
2762 }
2763 \f
2764 /* And now, the support for symbolic debugging information. */
2765
2766 /* .debug_str support. */
2767
2768 static void dwarf2out_init (const char *);
2769 static void dwarf2out_finish (const char *);
2770 static void dwarf2out_early_finish (const char *);
2771 static void dwarf2out_assembly_start (void);
2772 static void dwarf2out_define (unsigned int, const char *);
2773 static void dwarf2out_undef (unsigned int, const char *);
2774 static void dwarf2out_start_source_file (unsigned, const char *);
2775 static void dwarf2out_end_source_file (unsigned);
2776 static void dwarf2out_function_decl (tree);
2777 static void dwarf2out_begin_block (unsigned, unsigned);
2778 static void dwarf2out_end_block (unsigned, unsigned);
2779 static bool dwarf2out_ignore_block (const_tree);
2780 static void dwarf2out_early_global_decl (tree);
2781 static void dwarf2out_late_global_decl (tree);
2782 static void dwarf2out_type_decl (tree, int);
2783 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2784 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2785 dw_die_ref);
2786 static void dwarf2out_abstract_function (tree);
2787 static void dwarf2out_var_location (rtx_insn *);
2788 static void dwarf2out_inline_entry (tree);
2789 static void dwarf2out_size_function (tree);
2790 static void dwarf2out_begin_function (tree);
2791 static void dwarf2out_end_function (unsigned int);
2792 static void dwarf2out_register_main_translation_unit (tree unit);
2793 static void dwarf2out_set_name (tree, tree);
2794 static void dwarf2out_register_external_die (tree decl, const char *sym,
2795 unsigned HOST_WIDE_INT off);
2796 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2797 unsigned HOST_WIDE_INT *off);
2798
2799 /* The debug hooks structure. */
2800
2801 const struct gcc_debug_hooks dwarf2_debug_hooks =
2802 {
2803 dwarf2out_init,
2804 dwarf2out_finish,
2805 dwarf2out_early_finish,
2806 dwarf2out_assembly_start,
2807 dwarf2out_define,
2808 dwarf2out_undef,
2809 dwarf2out_start_source_file,
2810 dwarf2out_end_source_file,
2811 dwarf2out_begin_block,
2812 dwarf2out_end_block,
2813 dwarf2out_ignore_block,
2814 dwarf2out_source_line,
2815 dwarf2out_begin_prologue,
2816 #if VMS_DEBUGGING_INFO
2817 dwarf2out_vms_end_prologue,
2818 dwarf2out_vms_begin_epilogue,
2819 #else
2820 debug_nothing_int_charstar,
2821 debug_nothing_int_charstar,
2822 #endif
2823 dwarf2out_end_epilogue,
2824 dwarf2out_begin_function,
2825 dwarf2out_end_function, /* end_function */
2826 dwarf2out_register_main_translation_unit,
2827 dwarf2out_function_decl, /* function_decl */
2828 dwarf2out_early_global_decl,
2829 dwarf2out_late_global_decl,
2830 dwarf2out_type_decl, /* type_decl */
2831 dwarf2out_imported_module_or_decl,
2832 dwarf2out_die_ref_for_decl,
2833 dwarf2out_register_external_die,
2834 debug_nothing_tree, /* deferred_inline_function */
2835 /* The DWARF 2 backend tries to reduce debugging bloat by not
2836 emitting the abstract description of inline functions until
2837 something tries to reference them. */
2838 dwarf2out_abstract_function, /* outlining_inline_function */
2839 debug_nothing_rtx_code_label, /* label */
2840 debug_nothing_int, /* handle_pch */
2841 dwarf2out_var_location,
2842 dwarf2out_inline_entry, /* inline_entry */
2843 dwarf2out_size_function, /* size_function */
2844 dwarf2out_switch_text_section,
2845 dwarf2out_set_name,
2846 1, /* start_end_main_source_file */
2847 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2848 };
2849
2850 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2851 {
2852 dwarf2out_init,
2853 debug_nothing_charstar,
2854 debug_nothing_charstar,
2855 dwarf2out_assembly_start,
2856 debug_nothing_int_charstar,
2857 debug_nothing_int_charstar,
2858 debug_nothing_int_charstar,
2859 debug_nothing_int,
2860 debug_nothing_int_int, /* begin_block */
2861 debug_nothing_int_int, /* end_block */
2862 debug_true_const_tree, /* ignore_block */
2863 dwarf2out_source_line, /* source_line */
2864 debug_nothing_int_int_charstar, /* begin_prologue */
2865 debug_nothing_int_charstar, /* end_prologue */
2866 debug_nothing_int_charstar, /* begin_epilogue */
2867 debug_nothing_int_charstar, /* end_epilogue */
2868 debug_nothing_tree, /* begin_function */
2869 debug_nothing_int, /* end_function */
2870 debug_nothing_tree, /* register_main_translation_unit */
2871 debug_nothing_tree, /* function_decl */
2872 debug_nothing_tree, /* early_global_decl */
2873 debug_nothing_tree, /* late_global_decl */
2874 debug_nothing_tree_int, /* type_decl */
2875 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2876 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2877 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2878 debug_nothing_tree, /* deferred_inline_function */
2879 debug_nothing_tree, /* outlining_inline_function */
2880 debug_nothing_rtx_code_label, /* label */
2881 debug_nothing_int, /* handle_pch */
2882 debug_nothing_rtx_insn, /* var_location */
2883 debug_nothing_tree, /* inline_entry */
2884 debug_nothing_tree, /* size_function */
2885 debug_nothing_void, /* switch_text_section */
2886 debug_nothing_tree_tree, /* set_name */
2887 0, /* start_end_main_source_file */
2888 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2889 };
2890 \f
2891 /* NOTE: In the comments in this file, many references are made to
2892 "Debugging Information Entries". This term is abbreviated as `DIE'
2893 throughout the remainder of this file. */
2894
2895 /* An internal representation of the DWARF output is built, and then
2896 walked to generate the DWARF debugging info. The walk of the internal
2897 representation is done after the entire program has been compiled.
2898 The types below are used to describe the internal representation. */
2899
2900 /* Whether to put type DIEs into their own section .debug_types instead
2901 of making them part of the .debug_info section. Only supported for
2902 Dwarf V4 or higher and the user didn't disable them through
2903 -fno-debug-types-section. It is more efficient to put them in a
2904 separate comdat sections since the linker will then be able to
2905 remove duplicates. But not all tools support .debug_types sections
2906 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2907 it is DW_UT_type unit type in .debug_info section. */
2908
2909 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2910
2911 /* Various DIE's use offsets relative to the beginning of the
2912 .debug_info section to refer to each other. */
2913
2914 typedef long int dw_offset;
2915
2916 struct comdat_type_node;
2917
2918 /* The entries in the line_info table more-or-less mirror the opcodes
2919 that are used in the real dwarf line table. Arrays of these entries
2920 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2921 supported. */
2922
2923 enum dw_line_info_opcode {
2924 /* Emit DW_LNE_set_address; the operand is the label index. */
2925 LI_set_address,
2926
2927 /* Emit a row to the matrix with the given line. This may be done
2928 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2929 special opcodes. */
2930 LI_set_line,
2931
2932 /* Emit a DW_LNS_set_file. */
2933 LI_set_file,
2934
2935 /* Emit a DW_LNS_set_column. */
2936 LI_set_column,
2937
2938 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2939 LI_negate_stmt,
2940
2941 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2942 LI_set_prologue_end,
2943 LI_set_epilogue_begin,
2944
2945 /* Emit a DW_LNE_set_discriminator. */
2946 LI_set_discriminator,
2947
2948 /* Output a Fixed Advance PC; the target PC is the label index; the
2949 base PC is the previous LI_adv_address or LI_set_address entry.
2950 We only use this when emitting debug views without assembler
2951 support, at explicit user request. Ideally, we should only use
2952 it when the offset might be zero but we can't tell: it's the only
2953 way to maybe change the PC without resetting the view number. */
2954 LI_adv_address
2955 };
2956
2957 typedef struct GTY(()) dw_line_info_struct {
2958 enum dw_line_info_opcode opcode;
2959 unsigned int val;
2960 } dw_line_info_entry;
2961
2962
2963 struct GTY(()) dw_line_info_table {
2964 /* The label that marks the end of this section. */
2965 const char *end_label;
2966
2967 /* The values for the last row of the matrix, as collected in the table.
2968 These are used to minimize the changes to the next row. */
2969 unsigned int file_num;
2970 unsigned int line_num;
2971 unsigned int column_num;
2972 int discrim_num;
2973 bool is_stmt;
2974 bool in_use;
2975
2976 /* This denotes the NEXT view number.
2977
2978 If it is 0, it is known that the NEXT view will be the first view
2979 at the given PC.
2980
2981 If it is -1, we're forcing the view number to be reset, e.g. at a
2982 function entry.
2983
2984 The meaning of other nonzero values depends on whether we're
2985 computing views internally or leaving it for the assembler to do
2986 so. If we're emitting them internally, view denotes the view
2987 number since the last known advance of PC. If we're leaving it
2988 for the assembler, it denotes the LVU label number that we're
2989 going to ask the assembler to assign. */
2990 var_loc_view view;
2991
2992 /* This counts the number of symbolic views emitted in this table
2993 since the latest view reset. Its max value, over all tables,
2994 sets symview_upper_bound. */
2995 var_loc_view symviews_since_reset;
2996
2997 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
2998 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
2999 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3000 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3001
3002 vec<dw_line_info_entry, va_gc> *entries;
3003 };
3004
3005 /* This is an upper bound for view numbers that the assembler may
3006 assign to symbolic views output in this translation. It is used to
3007 decide how big a field to use to represent view numbers in
3008 symview-classed attributes. */
3009
3010 static var_loc_view symview_upper_bound;
3011
3012 /* If we're keep track of location views and their reset points, and
3013 INSN is a reset point (i.e., it necessarily advances the PC), mark
3014 the next view in TABLE as reset. */
3015
3016 static void
3017 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3018 {
3019 if (!debug_internal_reset_location_views)
3020 return;
3021
3022 /* Maybe turn (part of?) this test into a default target hook. */
3023 int reset = 0;
3024
3025 if (targetm.reset_location_view)
3026 reset = targetm.reset_location_view (insn);
3027
3028 if (reset)
3029 ;
3030 else if (JUMP_TABLE_DATA_P (insn))
3031 reset = 1;
3032 else if (GET_CODE (insn) == USE
3033 || GET_CODE (insn) == CLOBBER
3034 || GET_CODE (insn) == ASM_INPUT
3035 || asm_noperands (insn) >= 0)
3036 ;
3037 else if (get_attr_min_length (insn) > 0)
3038 reset = 1;
3039
3040 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3041 RESET_NEXT_VIEW (table->view);
3042 }
3043
3044 /* Each DIE attribute has a field specifying the attribute kind,
3045 a link to the next attribute in the chain, and an attribute value.
3046 Attributes are typically linked below the DIE they modify. */
3047
3048 typedef struct GTY(()) dw_attr_struct {
3049 enum dwarf_attribute dw_attr;
3050 dw_val_node dw_attr_val;
3051 }
3052 dw_attr_node;
3053
3054
3055 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3056 The children of each node form a circular list linked by
3057 die_sib. die_child points to the node *before* the "first" child node. */
3058
3059 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3060 union die_symbol_or_type_node
3061 {
3062 const char * GTY ((tag ("0"))) die_symbol;
3063 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3064 }
3065 GTY ((desc ("%0.comdat_type_p"))) die_id;
3066 vec<dw_attr_node, va_gc> *die_attr;
3067 dw_die_ref die_parent;
3068 dw_die_ref die_child;
3069 dw_die_ref die_sib;
3070 dw_die_ref die_definition; /* ref from a specification to its definition */
3071 dw_offset die_offset;
3072 unsigned long die_abbrev;
3073 int die_mark;
3074 unsigned int decl_id;
3075 enum dwarf_tag die_tag;
3076 /* Die is used and must not be pruned as unused. */
3077 BOOL_BITFIELD die_perennial_p : 1;
3078 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3079 /* For an external ref to die_symbol if die_offset contains an extra
3080 offset to that symbol. */
3081 BOOL_BITFIELD with_offset : 1;
3082 /* Whether this DIE was removed from the DIE tree, for example via
3083 prune_unused_types. We don't consider those present from the
3084 DIE lookup routines. */
3085 BOOL_BITFIELD removed : 1;
3086 /* Lots of spare bits. */
3087 }
3088 die_node;
3089
3090 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3091 static bool early_dwarf;
3092 static bool early_dwarf_finished;
3093 struct set_early_dwarf {
3094 bool saved;
3095 set_early_dwarf () : saved(early_dwarf)
3096 {
3097 gcc_assert (! early_dwarf_finished);
3098 early_dwarf = true;
3099 }
3100 ~set_early_dwarf () { early_dwarf = saved; }
3101 };
3102
3103 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3104 #define FOR_EACH_CHILD(die, c, expr) do { \
3105 c = die->die_child; \
3106 if (c) do { \
3107 c = c->die_sib; \
3108 expr; \
3109 } while (c != die->die_child); \
3110 } while (0)
3111
3112 /* The pubname structure */
3113
3114 typedef struct GTY(()) pubname_struct {
3115 dw_die_ref die;
3116 const char *name;
3117 }
3118 pubname_entry;
3119
3120
3121 struct GTY(()) dw_ranges {
3122 const char *label;
3123 /* If this is positive, it's a block number, otherwise it's a
3124 bitwise-negated index into dw_ranges_by_label. */
3125 int num;
3126 /* Index for the range list for DW_FORM_rnglistx. */
3127 unsigned int idx : 31;
3128 /* True if this range might be possibly in a different section
3129 from previous entry. */
3130 unsigned int maybe_new_sec : 1;
3131 };
3132
3133 /* A structure to hold a macinfo entry. */
3134
3135 typedef struct GTY(()) macinfo_struct {
3136 unsigned char code;
3137 unsigned HOST_WIDE_INT lineno;
3138 const char *info;
3139 }
3140 macinfo_entry;
3141
3142
3143 struct GTY(()) dw_ranges_by_label {
3144 const char *begin;
3145 const char *end;
3146 };
3147
3148 /* The comdat type node structure. */
3149 struct GTY(()) comdat_type_node
3150 {
3151 dw_die_ref root_die;
3152 dw_die_ref type_die;
3153 dw_die_ref skeleton_die;
3154 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3155 comdat_type_node *next;
3156 };
3157
3158 /* A list of DIEs for which we can't determine ancestry (parent_die
3159 field) just yet. Later in dwarf2out_finish we will fill in the
3160 missing bits. */
3161 typedef struct GTY(()) limbo_die_struct {
3162 dw_die_ref die;
3163 /* The tree for which this DIE was created. We use this to
3164 determine ancestry later. */
3165 tree created_for;
3166 struct limbo_die_struct *next;
3167 }
3168 limbo_die_node;
3169
3170 typedef struct skeleton_chain_struct
3171 {
3172 dw_die_ref old_die;
3173 dw_die_ref new_die;
3174 struct skeleton_chain_struct *parent;
3175 }
3176 skeleton_chain_node;
3177
3178 /* Define a macro which returns nonzero for a TYPE_DECL which was
3179 implicitly generated for a type.
3180
3181 Note that, unlike the C front-end (which generates a NULL named
3182 TYPE_DECL node for each complete tagged type, each array type,
3183 and each function type node created) the C++ front-end generates
3184 a _named_ TYPE_DECL node for each tagged type node created.
3185 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3186 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3187 front-end, but for each type, tagged or not. */
3188
3189 #define TYPE_DECL_IS_STUB(decl) \
3190 (DECL_NAME (decl) == NULL_TREE \
3191 || (DECL_ARTIFICIAL (decl) \
3192 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3193 /* This is necessary for stub decls that \
3194 appear in nested inline functions. */ \
3195 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3196 && (decl_ultimate_origin (decl) \
3197 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3198
3199 /* Information concerning the compilation unit's programming
3200 language, and compiler version. */
3201
3202 /* Fixed size portion of the DWARF compilation unit header. */
3203 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3204 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3205 + (dwarf_version >= 5 ? 4 : 3))
3206
3207 /* Fixed size portion of the DWARF comdat type unit header. */
3208 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3209 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3210 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3211
3212 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3213 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3214 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3215
3216 /* Fixed size portion of public names info. */
3217 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3218
3219 /* Fixed size portion of the address range info. */
3220 #define DWARF_ARANGES_HEADER_SIZE \
3221 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3222 DWARF2_ADDR_SIZE * 2) \
3223 - DWARF_INITIAL_LENGTH_SIZE)
3224
3225 /* Size of padding portion in the address range info. It must be
3226 aligned to twice the pointer size. */
3227 #define DWARF_ARANGES_PAD_SIZE \
3228 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3229 DWARF2_ADDR_SIZE * 2) \
3230 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3231
3232 /* Use assembler line directives if available. */
3233 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3234 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3235 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3236 #else
3237 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3238 #endif
3239 #endif
3240
3241 /* Use assembler views in line directives if available. */
3242 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3243 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3244 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3245 #else
3246 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3247 #endif
3248 #endif
3249
3250 /* Return true if GCC configure detected assembler support for .loc. */
3251
3252 bool
3253 dwarf2out_default_as_loc_support (void)
3254 {
3255 return DWARF2_ASM_LINE_DEBUG_INFO;
3256 #if (GCC_VERSION >= 3000)
3257 # undef DWARF2_ASM_LINE_DEBUG_INFO
3258 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3259 #endif
3260 }
3261
3262 /* Return true if GCC configure detected assembler support for views
3263 in .loc directives. */
3264
3265 bool
3266 dwarf2out_default_as_locview_support (void)
3267 {
3268 return DWARF2_ASM_VIEW_DEBUG_INFO;
3269 #if (GCC_VERSION >= 3000)
3270 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3271 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3272 #endif
3273 }
3274
3275 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3276 view computation, and it refers to a view identifier for which we
3277 will not emit a label because it is known to map to a view number
3278 zero. We won't allocate the bitmap if we're not using assembler
3279 support for location views, but we have to make the variable
3280 visible for GGC and for code that will be optimized out for lack of
3281 support but that's still parsed and compiled. We could abstract it
3282 out with macros, but it's not worth it. */
3283 static GTY(()) bitmap zero_view_p;
3284
3285 /* Evaluate to TRUE iff N is known to identify the first location view
3286 at its PC. When not using assembler location view computation,
3287 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3288 and views label numbers recorded in it are the ones known to be
3289 zero. */
3290 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3291 || (N) == (var_loc_view)-1 \
3292 || (zero_view_p \
3293 && bitmap_bit_p (zero_view_p, (N))))
3294
3295 /* Return true iff we're to emit .loc directives for the assembler to
3296 generate line number sections.
3297
3298 When we're not emitting views, all we need from the assembler is
3299 support for .loc directives.
3300
3301 If we are emitting views, we can only use the assembler's .loc
3302 support if it also supports views.
3303
3304 When the compiler is emitting the line number programs and
3305 computing view numbers itself, it resets view numbers at known PC
3306 changes and counts from that, and then it emits view numbers as
3307 literal constants in locviewlists. There are cases in which the
3308 compiler is not sure about PC changes, e.g. when extra alignment is
3309 requested for a label. In these cases, the compiler may not reset
3310 the view counter, and the potential PC advance in the line number
3311 program will use an opcode that does not reset the view counter
3312 even if the PC actually changes, so that compiler and debug info
3313 consumer can keep view numbers in sync.
3314
3315 When the compiler defers view computation to the assembler, it
3316 emits symbolic view numbers in locviewlists, with the exception of
3317 views known to be zero (forced resets, or reset after
3318 compiler-visible PC changes): instead of emitting symbols for
3319 these, we emit literal zero and assert the assembler agrees with
3320 the compiler's assessment. We could use symbolic views everywhere,
3321 instead of special-casing zero views, but then we'd be unable to
3322 optimize out locviewlists that contain only zeros. */
3323
3324 static bool
3325 output_asm_line_debug_info (void)
3326 {
3327 return (dwarf2out_as_loc_support
3328 && (dwarf2out_as_locview_support
3329 || !debug_variable_location_views));
3330 }
3331
3332 /* Minimum line offset in a special line info. opcode.
3333 This value was chosen to give a reasonable range of values. */
3334 #define DWARF_LINE_BASE -10
3335
3336 /* First special line opcode - leave room for the standard opcodes. */
3337 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3338
3339 /* Range of line offsets in a special line info. opcode. */
3340 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3341
3342 /* Flag that indicates the initial value of the is_stmt_start flag.
3343 In the present implementation, we do not mark any lines as
3344 the beginning of a source statement, because that information
3345 is not made available by the GCC front-end. */
3346 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3347
3348 /* Maximum number of operations per instruction bundle. */
3349 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3350 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3351 #endif
3352
3353 /* This location is used by calc_die_sizes() to keep track
3354 the offset of each DIE within the .debug_info section. */
3355 static unsigned long next_die_offset;
3356
3357 /* Record the root of the DIE's built for the current compilation unit. */
3358 static GTY(()) dw_die_ref single_comp_unit_die;
3359
3360 /* A list of type DIEs that have been separated into comdat sections. */
3361 static GTY(()) comdat_type_node *comdat_type_list;
3362
3363 /* A list of CU DIEs that have been separated. */
3364 static GTY(()) limbo_die_node *cu_die_list;
3365
3366 /* A list of DIEs with a NULL parent waiting to be relocated. */
3367 static GTY(()) limbo_die_node *limbo_die_list;
3368
3369 /* A list of DIEs for which we may have to generate
3370 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3371 static GTY(()) limbo_die_node *deferred_asm_name;
3372
3373 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3374 {
3375 typedef const char *compare_type;
3376
3377 static hashval_t hash (dwarf_file_data *);
3378 static bool equal (dwarf_file_data *, const char *);
3379 };
3380
3381 /* Filenames referenced by this compilation unit. */
3382 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3383
3384 struct decl_die_hasher : ggc_ptr_hash<die_node>
3385 {
3386 typedef tree compare_type;
3387
3388 static hashval_t hash (die_node *);
3389 static bool equal (die_node *, tree);
3390 };
3391 /* A hash table of references to DIE's that describe declarations.
3392 The key is a DECL_UID() which is a unique number identifying each decl. */
3393 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3394
3395 struct GTY ((for_user)) variable_value_struct {
3396 unsigned int decl_id;
3397 vec<dw_die_ref, va_gc> *dies;
3398 };
3399
3400 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3401 {
3402 typedef tree compare_type;
3403
3404 static hashval_t hash (variable_value_struct *);
3405 static bool equal (variable_value_struct *, tree);
3406 };
3407 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3408 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3409 DECL_CONTEXT of the referenced VAR_DECLs. */
3410 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3411
3412 struct block_die_hasher : ggc_ptr_hash<die_struct>
3413 {
3414 static hashval_t hash (die_struct *);
3415 static bool equal (die_struct *, die_struct *);
3416 };
3417
3418 /* A hash table of references to DIE's that describe COMMON blocks.
3419 The key is DECL_UID() ^ die_parent. */
3420 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3421
3422 typedef struct GTY(()) die_arg_entry_struct {
3423 dw_die_ref die;
3424 tree arg;
3425 } die_arg_entry;
3426
3427
3428 /* Node of the variable location list. */
3429 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3430 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3431 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3432 in mode of the EXPR_LIST node and first EXPR_LIST operand
3433 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3434 location or NULL for padding. For larger bitsizes,
3435 mode is 0 and first operand is a CONCAT with bitsize
3436 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3437 NULL as second operand. */
3438 rtx GTY (()) loc;
3439 const char * GTY (()) label;
3440 struct var_loc_node * GTY (()) next;
3441 var_loc_view view;
3442 };
3443
3444 /* Variable location list. */
3445 struct GTY ((for_user)) var_loc_list_def {
3446 struct var_loc_node * GTY (()) first;
3447
3448 /* Pointer to the last but one or last element of the
3449 chained list. If the list is empty, both first and
3450 last are NULL, if the list contains just one node
3451 or the last node certainly is not redundant, it points
3452 to the last node, otherwise points to the last but one.
3453 Do not mark it for GC because it is marked through the chain. */
3454 struct var_loc_node * GTY ((skip ("%h"))) last;
3455
3456 /* Pointer to the last element before section switch,
3457 if NULL, either sections weren't switched or first
3458 is after section switch. */
3459 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3460
3461 /* DECL_UID of the variable decl. */
3462 unsigned int decl_id;
3463 };
3464 typedef struct var_loc_list_def var_loc_list;
3465
3466 /* Call argument location list. */
3467 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3468 rtx GTY (()) call_arg_loc_note;
3469 const char * GTY (()) label;
3470 tree GTY (()) block;
3471 bool tail_call_p;
3472 rtx GTY (()) symbol_ref;
3473 struct call_arg_loc_node * GTY (()) next;
3474 };
3475
3476
3477 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3478 {
3479 typedef const_tree compare_type;
3480
3481 static hashval_t hash (var_loc_list *);
3482 static bool equal (var_loc_list *, const_tree);
3483 };
3484
3485 /* Table of decl location linked lists. */
3486 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3487
3488 /* Head and tail of call_arg_loc chain. */
3489 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3490 static struct call_arg_loc_node *call_arg_loc_last;
3491
3492 /* Number of call sites in the current function. */
3493 static int call_site_count = -1;
3494 /* Number of tail call sites in the current function. */
3495 static int tail_call_site_count = -1;
3496
3497 /* A cached location list. */
3498 struct GTY ((for_user)) cached_dw_loc_list_def {
3499 /* The DECL_UID of the decl that this entry describes. */
3500 unsigned int decl_id;
3501
3502 /* The cached location list. */
3503 dw_loc_list_ref loc_list;
3504 };
3505 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3506
3507 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3508 {
3509
3510 typedef const_tree compare_type;
3511
3512 static hashval_t hash (cached_dw_loc_list *);
3513 static bool equal (cached_dw_loc_list *, const_tree);
3514 };
3515
3516 /* Table of cached location lists. */
3517 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3518
3519 /* A vector of references to DIE's that are uniquely identified by their tag,
3520 presence/absence of children DIE's, and list of attribute/value pairs. */
3521 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3522
3523 /* A hash map to remember the stack usage for DWARF procedures. The value
3524 stored is the stack size difference between before the DWARF procedure
3525 invokation and after it returned. In other words, for a DWARF procedure
3526 that consumes N stack slots and that pushes M ones, this stores M - N. */
3527 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3528
3529 /* A global counter for generating labels for line number data. */
3530 static unsigned int line_info_label_num;
3531
3532 /* The current table to which we should emit line number information
3533 for the current function. This will be set up at the beginning of
3534 assembly for the function. */
3535 static GTY(()) dw_line_info_table *cur_line_info_table;
3536
3537 /* The two default tables of line number info. */
3538 static GTY(()) dw_line_info_table *text_section_line_info;
3539 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3540
3541 /* The set of all non-default tables of line number info. */
3542 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3543
3544 /* A flag to tell pubnames/types export if there is an info section to
3545 refer to. */
3546 static bool info_section_emitted;
3547
3548 /* A pointer to the base of a table that contains a list of publicly
3549 accessible names. */
3550 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3551
3552 /* A pointer to the base of a table that contains a list of publicly
3553 accessible types. */
3554 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3555
3556 /* A pointer to the base of a table that contains a list of macro
3557 defines/undefines (and file start/end markers). */
3558 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3559
3560 /* True if .debug_macinfo or .debug_macros section is going to be
3561 emitted. */
3562 #define have_macinfo \
3563 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3564 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3565 && !macinfo_table->is_empty ())
3566
3567 /* Vector of dies for which we should generate .debug_ranges info. */
3568 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3569
3570 /* Vector of pairs of labels referenced in ranges_table. */
3571 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3572
3573 /* Whether we have location lists that need outputting */
3574 static GTY(()) bool have_location_lists;
3575
3576 /* Unique label counter. */
3577 static GTY(()) unsigned int loclabel_num;
3578
3579 /* Unique label counter for point-of-call tables. */
3580 static GTY(()) unsigned int poc_label_num;
3581
3582 /* The last file entry emitted by maybe_emit_file(). */
3583 static GTY(()) struct dwarf_file_data * last_emitted_file;
3584
3585 /* Number of internal labels generated by gen_internal_sym(). */
3586 static GTY(()) int label_num;
3587
3588 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3589
3590 /* Instances of generic types for which we need to generate debug
3591 info that describe their generic parameters and arguments. That
3592 generation needs to happen once all types are properly laid out so
3593 we do it at the end of compilation. */
3594 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3595
3596 /* Offset from the "steady-state frame pointer" to the frame base,
3597 within the current function. */
3598 static poly_int64 frame_pointer_fb_offset;
3599 static bool frame_pointer_fb_offset_valid;
3600
3601 static vec<dw_die_ref> base_types;
3602
3603 /* Flags to represent a set of attribute classes for attributes that represent
3604 a scalar value (bounds, pointers, ...). */
3605 enum dw_scalar_form
3606 {
3607 dw_scalar_form_constant = 0x01,
3608 dw_scalar_form_exprloc = 0x02,
3609 dw_scalar_form_reference = 0x04
3610 };
3611
3612 /* Forward declarations for functions defined in this file. */
3613
3614 static int is_pseudo_reg (const_rtx);
3615 static tree type_main_variant (tree);
3616 static int is_tagged_type (const_tree);
3617 static const char *dwarf_tag_name (unsigned);
3618 static const char *dwarf_attr_name (unsigned);
3619 static const char *dwarf_form_name (unsigned);
3620 static tree decl_ultimate_origin (const_tree);
3621 static tree decl_class_context (tree);
3622 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3623 static inline enum dw_val_class AT_class (dw_attr_node *);
3624 static inline unsigned int AT_index (dw_attr_node *);
3625 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3626 static inline unsigned AT_flag (dw_attr_node *);
3627 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3628 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3629 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3630 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3631 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3632 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3633 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3634 unsigned int, unsigned char *);
3635 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3636 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3637 static inline const char *AT_string (dw_attr_node *);
3638 static enum dwarf_form AT_string_form (dw_attr_node *);
3639 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3640 static void add_AT_specification (dw_die_ref, dw_die_ref);
3641 static inline dw_die_ref AT_ref (dw_attr_node *);
3642 static inline int AT_ref_external (dw_attr_node *);
3643 static inline void set_AT_ref_external (dw_attr_node *, int);
3644 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3645 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3646 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3647 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3648 dw_loc_list_ref);
3649 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3650 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3651 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3652 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3653 static void remove_addr_table_entry (addr_table_entry *);
3654 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3655 static inline rtx AT_addr (dw_attr_node *);
3656 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3657 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3658 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3659 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3660 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3661 const char *);
3662 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3663 unsigned HOST_WIDE_INT);
3664 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3665 unsigned long, bool);
3666 static inline const char *AT_lbl (dw_attr_node *);
3667 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3668 static const char *get_AT_low_pc (dw_die_ref);
3669 static const char *get_AT_hi_pc (dw_die_ref);
3670 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3671 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3672 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3673 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3674 static bool is_cxx (void);
3675 static bool is_cxx (const_tree);
3676 static bool is_fortran (void);
3677 static bool is_ada (void);
3678 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3679 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3680 static void add_child_die (dw_die_ref, dw_die_ref);
3681 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3682 static dw_die_ref lookup_type_die (tree);
3683 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3684 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3685 static void equate_type_number_to_die (tree, dw_die_ref);
3686 static dw_die_ref lookup_decl_die (tree);
3687 static var_loc_list *lookup_decl_loc (const_tree);
3688 static void equate_decl_number_to_die (tree, dw_die_ref);
3689 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3690 static void print_spaces (FILE *);
3691 static void print_die (dw_die_ref, FILE *);
3692 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3693 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3694 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3695 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3696 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3697 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3698 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3699 struct md5_ctx *, int *);
3700 struct checksum_attributes;
3701 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3702 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3703 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3704 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3705 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3706 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3707 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3708 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3709 static int is_type_die (dw_die_ref);
3710 static int is_comdat_die (dw_die_ref);
3711 static inline bool is_template_instantiation (dw_die_ref);
3712 static int is_declaration_die (dw_die_ref);
3713 static int should_move_die_to_comdat (dw_die_ref);
3714 static dw_die_ref clone_as_declaration (dw_die_ref);
3715 static dw_die_ref clone_die (dw_die_ref);
3716 static dw_die_ref clone_tree (dw_die_ref);
3717 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3718 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3719 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3720 static dw_die_ref generate_skeleton (dw_die_ref);
3721 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3722 dw_die_ref,
3723 dw_die_ref);
3724 static void break_out_comdat_types (dw_die_ref);
3725 static void copy_decls_for_unworthy_types (dw_die_ref);
3726
3727 static void add_sibling_attributes (dw_die_ref);
3728 static void output_location_lists (dw_die_ref);
3729 static int constant_size (unsigned HOST_WIDE_INT);
3730 static unsigned long size_of_die (dw_die_ref);
3731 static void calc_die_sizes (dw_die_ref);
3732 static void calc_base_type_die_sizes (void);
3733 static void mark_dies (dw_die_ref);
3734 static void unmark_dies (dw_die_ref);
3735 static void unmark_all_dies (dw_die_ref);
3736 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3737 static unsigned long size_of_aranges (void);
3738 static enum dwarf_form value_format (dw_attr_node *);
3739 static void output_value_format (dw_attr_node *);
3740 static void output_abbrev_section (void);
3741 static void output_die_abbrevs (unsigned long, dw_die_ref);
3742 static void output_die (dw_die_ref);
3743 static void output_compilation_unit_header (enum dwarf_unit_type);
3744 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3745 static void output_comdat_type_unit (comdat_type_node *);
3746 static const char *dwarf2_name (tree, int);
3747 static void add_pubname (tree, dw_die_ref);
3748 static void add_enumerator_pubname (const char *, dw_die_ref);
3749 static void add_pubname_string (const char *, dw_die_ref);
3750 static void add_pubtype (tree, dw_die_ref);
3751 static void output_pubnames (vec<pubname_entry, va_gc> *);
3752 static void output_aranges (void);
3753 static unsigned int add_ranges (const_tree, bool = false);
3754 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3755 bool *, bool);
3756 static void output_ranges (void);
3757 static dw_line_info_table *new_line_info_table (void);
3758 static void output_line_info (bool);
3759 static void output_file_names (void);
3760 static dw_die_ref base_type_die (tree, bool);
3761 static int is_base_type (tree);
3762 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3763 static int decl_quals (const_tree);
3764 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3765 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3766 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3767 static int type_is_enum (const_tree);
3768 static unsigned int dbx_reg_number (const_rtx);
3769 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3770 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3771 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3772 enum var_init_status);
3773 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3774 enum var_init_status);
3775 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3776 enum var_init_status);
3777 static int is_based_loc (const_rtx);
3778 static bool resolve_one_addr (rtx *);
3779 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3780 enum var_init_status);
3781 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3782 enum var_init_status);
3783 struct loc_descr_context;
3784 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3785 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3786 static dw_loc_list_ref loc_list_from_tree (tree, int,
3787 struct loc_descr_context *);
3788 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3789 struct loc_descr_context *);
3790 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3791 static tree field_type (const_tree);
3792 static unsigned int simple_type_align_in_bits (const_tree);
3793 static unsigned int simple_decl_align_in_bits (const_tree);
3794 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3795 struct vlr_context;
3796 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3797 HOST_WIDE_INT *);
3798 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3799 dw_loc_list_ref);
3800 static void add_data_member_location_attribute (dw_die_ref, tree,
3801 struct vlr_context *);
3802 static bool add_const_value_attribute (dw_die_ref, rtx);
3803 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3804 static void insert_wide_int (const wide_int &, unsigned char *, int);
3805 static void insert_float (const_rtx, unsigned char *);
3806 static rtx rtl_for_decl_location (tree);
3807 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3808 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3809 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3810 static void add_name_attribute (dw_die_ref, const char *);
3811 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3812 static void add_comp_dir_attribute (dw_die_ref);
3813 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3814 struct loc_descr_context *);
3815 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3816 struct loc_descr_context *);
3817 static void add_subscript_info (dw_die_ref, tree, bool);
3818 static void add_byte_size_attribute (dw_die_ref, tree);
3819 static void add_alignment_attribute (dw_die_ref, tree);
3820 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3821 struct vlr_context *);
3822 static void add_bit_size_attribute (dw_die_ref, tree);
3823 static void add_prototyped_attribute (dw_die_ref, tree);
3824 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3825 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3826 static void add_src_coords_attributes (dw_die_ref, tree);
3827 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3828 static void add_discr_value (dw_die_ref, dw_discr_value *);
3829 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3830 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3831 static dw_die_ref scope_die_for (tree, dw_die_ref);
3832 static inline int local_scope_p (dw_die_ref);
3833 static inline int class_scope_p (dw_die_ref);
3834 static inline int class_or_namespace_scope_p (dw_die_ref);
3835 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3836 static void add_calling_convention_attribute (dw_die_ref, tree);
3837 static const char *type_tag (const_tree);
3838 static tree member_declared_type (const_tree);
3839 #if 0
3840 static const char *decl_start_label (tree);
3841 #endif
3842 static void gen_array_type_die (tree, dw_die_ref);
3843 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3844 #if 0
3845 static void gen_entry_point_die (tree, dw_die_ref);
3846 #endif
3847 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3848 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3849 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3850 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3851 static void gen_formal_types_die (tree, dw_die_ref);
3852 static void gen_subprogram_die (tree, dw_die_ref);
3853 static void gen_variable_die (tree, tree, dw_die_ref);
3854 static void gen_const_die (tree, dw_die_ref);
3855 static void gen_label_die (tree, dw_die_ref);
3856 static void gen_lexical_block_die (tree, dw_die_ref);
3857 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3858 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3859 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3860 static dw_die_ref gen_compile_unit_die (const char *);
3861 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3862 static void gen_member_die (tree, dw_die_ref);
3863 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3864 enum debug_info_usage);
3865 static void gen_subroutine_type_die (tree, dw_die_ref);
3866 static void gen_typedef_die (tree, dw_die_ref);
3867 static void gen_type_die (tree, dw_die_ref);
3868 static void gen_block_die (tree, dw_die_ref);
3869 static void decls_for_scope (tree, dw_die_ref);
3870 static bool is_naming_typedef_decl (const_tree);
3871 static inline dw_die_ref get_context_die (tree);
3872 static void gen_namespace_die (tree, dw_die_ref);
3873 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3874 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3875 static dw_die_ref force_decl_die (tree);
3876 static dw_die_ref force_type_die (tree);
3877 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3878 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3879 static struct dwarf_file_data * lookup_filename (const char *);
3880 static void retry_incomplete_types (void);
3881 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3882 static void gen_generic_params_dies (tree);
3883 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3884 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3885 static void splice_child_die (dw_die_ref, dw_die_ref);
3886 static int file_info_cmp (const void *, const void *);
3887 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3888 const char *, var_loc_view, const char *);
3889 static void output_loc_list (dw_loc_list_ref);
3890 static char *gen_internal_sym (const char *);
3891 static bool want_pubnames (void);
3892
3893 static void prune_unmark_dies (dw_die_ref);
3894 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3895 static void prune_unused_types_mark (dw_die_ref, int);
3896 static void prune_unused_types_walk (dw_die_ref);
3897 static void prune_unused_types_walk_attribs (dw_die_ref);
3898 static void prune_unused_types_prune (dw_die_ref);
3899 static void prune_unused_types (void);
3900 static int maybe_emit_file (struct dwarf_file_data *fd);
3901 static inline const char *AT_vms_delta1 (dw_attr_node *);
3902 static inline const char *AT_vms_delta2 (dw_attr_node *);
3903 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3904 const char *, const char *);
3905 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3906 static void gen_remaining_tmpl_value_param_die_attribute (void);
3907 static bool generic_type_p (tree);
3908 static void schedule_generic_params_dies_gen (tree t);
3909 static void gen_scheduled_generic_parms_dies (void);
3910 static void resolve_variable_values (void);
3911
3912 static const char *comp_dir_string (void);
3913
3914 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3915
3916 /* enum for tracking thread-local variables whose address is really an offset
3917 relative to the TLS pointer, which will need link-time relocation, but will
3918 not need relocation by the DWARF consumer. */
3919
3920 enum dtprel_bool
3921 {
3922 dtprel_false = 0,
3923 dtprel_true = 1
3924 };
3925
3926 /* Return the operator to use for an address of a variable. For dtprel_true, we
3927 use DW_OP_const*. For regular variables, which need both link-time
3928 relocation and consumer-level relocation (e.g., to account for shared objects
3929 loaded at a random address), we use DW_OP_addr*. */
3930
3931 static inline enum dwarf_location_atom
3932 dw_addr_op (enum dtprel_bool dtprel)
3933 {
3934 if (dtprel == dtprel_true)
3935 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3936 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3937 else
3938 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3939 }
3940
3941 /* Return a pointer to a newly allocated address location description. If
3942 dwarf_split_debug_info is true, then record the address with the appropriate
3943 relocation. */
3944 static inline dw_loc_descr_ref
3945 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3946 {
3947 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3948
3949 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3950 ref->dw_loc_oprnd1.v.val_addr = addr;
3951 ref->dtprel = dtprel;
3952 if (dwarf_split_debug_info)
3953 ref->dw_loc_oprnd1.val_entry
3954 = add_addr_table_entry (addr,
3955 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3956 else
3957 ref->dw_loc_oprnd1.val_entry = NULL;
3958
3959 return ref;
3960 }
3961
3962 /* Section names used to hold DWARF debugging information. */
3963
3964 #ifndef DEBUG_INFO_SECTION
3965 #define DEBUG_INFO_SECTION ".debug_info"
3966 #endif
3967 #ifndef DEBUG_DWO_INFO_SECTION
3968 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3969 #endif
3970 #ifndef DEBUG_LTO_INFO_SECTION
3971 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3972 #endif
3973 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3974 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3975 #endif
3976 #ifndef DEBUG_ABBREV_SECTION
3977 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3978 #endif
3979 #ifndef DEBUG_LTO_ABBREV_SECTION
3980 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3981 #endif
3982 #ifndef DEBUG_DWO_ABBREV_SECTION
3983 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3984 #endif
3985 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3986 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3987 #endif
3988 #ifndef DEBUG_ARANGES_SECTION
3989 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3990 #endif
3991 #ifndef DEBUG_ADDR_SECTION
3992 #define DEBUG_ADDR_SECTION ".debug_addr"
3993 #endif
3994 #ifndef DEBUG_MACINFO_SECTION
3995 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3996 #endif
3997 #ifndef DEBUG_LTO_MACINFO_SECTION
3998 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
3999 #endif
4000 #ifndef DEBUG_DWO_MACINFO_SECTION
4001 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4002 #endif
4003 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4004 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4005 #endif
4006 #ifndef DEBUG_MACRO_SECTION
4007 #define DEBUG_MACRO_SECTION ".debug_macro"
4008 #endif
4009 #ifndef DEBUG_LTO_MACRO_SECTION
4010 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4011 #endif
4012 #ifndef DEBUG_DWO_MACRO_SECTION
4013 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4014 #endif
4015 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4016 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4017 #endif
4018 #ifndef DEBUG_LINE_SECTION
4019 #define DEBUG_LINE_SECTION ".debug_line"
4020 #endif
4021 #ifndef DEBUG_LTO_LINE_SECTION
4022 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4023 #endif
4024 #ifndef DEBUG_DWO_LINE_SECTION
4025 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4026 #endif
4027 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4028 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4029 #endif
4030 #ifndef DEBUG_LOC_SECTION
4031 #define DEBUG_LOC_SECTION ".debug_loc"
4032 #endif
4033 #ifndef DEBUG_DWO_LOC_SECTION
4034 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4035 #endif
4036 #ifndef DEBUG_LOCLISTS_SECTION
4037 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4038 #endif
4039 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4040 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4041 #endif
4042 #ifndef DEBUG_PUBNAMES_SECTION
4043 #define DEBUG_PUBNAMES_SECTION \
4044 ((debug_generate_pub_sections == 2) \
4045 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4046 #endif
4047 #ifndef DEBUG_PUBTYPES_SECTION
4048 #define DEBUG_PUBTYPES_SECTION \
4049 ((debug_generate_pub_sections == 2) \
4050 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4051 #endif
4052 #ifndef DEBUG_STR_OFFSETS_SECTION
4053 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4054 #endif
4055 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4056 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4057 #endif
4058 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4059 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4060 #endif
4061 #ifndef DEBUG_STR_SECTION
4062 #define DEBUG_STR_SECTION ".debug_str"
4063 #endif
4064 #ifndef DEBUG_LTO_STR_SECTION
4065 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4066 #endif
4067 #ifndef DEBUG_STR_DWO_SECTION
4068 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4069 #endif
4070 #ifndef DEBUG_LTO_STR_DWO_SECTION
4071 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4072 #endif
4073 #ifndef DEBUG_RANGES_SECTION
4074 #define DEBUG_RANGES_SECTION ".debug_ranges"
4075 #endif
4076 #ifndef DEBUG_RNGLISTS_SECTION
4077 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4078 #endif
4079 #ifndef DEBUG_LINE_STR_SECTION
4080 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4081 #endif
4082 #ifndef DEBUG_LTO_LINE_STR_SECTION
4083 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4084 #endif
4085
4086 /* Standard ELF section names for compiled code and data. */
4087 #ifndef TEXT_SECTION_NAME
4088 #define TEXT_SECTION_NAME ".text"
4089 #endif
4090
4091 /* Section flags for .debug_str section. */
4092 #define DEBUG_STR_SECTION_FLAGS \
4093 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4094 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4095 : SECTION_DEBUG)
4096
4097 /* Section flags for .debug_str.dwo section. */
4098 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4099
4100 /* Attribute used to refer to the macro section. */
4101 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4102 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4103
4104 /* Labels we insert at beginning sections we can reference instead of
4105 the section names themselves. */
4106
4107 #ifndef TEXT_SECTION_LABEL
4108 #define TEXT_SECTION_LABEL "Ltext"
4109 #endif
4110 #ifndef COLD_TEXT_SECTION_LABEL
4111 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4112 #endif
4113 #ifndef DEBUG_LINE_SECTION_LABEL
4114 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4115 #endif
4116 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4117 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4118 #endif
4119 #ifndef DEBUG_INFO_SECTION_LABEL
4120 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4121 #endif
4122 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4123 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4124 #endif
4125 #ifndef DEBUG_ABBREV_SECTION_LABEL
4126 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4127 #endif
4128 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4129 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4130 #endif
4131 #ifndef DEBUG_ADDR_SECTION_LABEL
4132 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4133 #endif
4134 #ifndef DEBUG_LOC_SECTION_LABEL
4135 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4136 #endif
4137 #ifndef DEBUG_RANGES_SECTION_LABEL
4138 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4139 #endif
4140 #ifndef DEBUG_MACINFO_SECTION_LABEL
4141 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4142 #endif
4143 #ifndef DEBUG_MACRO_SECTION_LABEL
4144 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4145 #endif
4146 #define SKELETON_COMP_DIE_ABBREV 1
4147 #define SKELETON_TYPE_DIE_ABBREV 2
4148
4149 /* Definitions of defaults for formats and names of various special
4150 (artificial) labels which may be generated within this file (when the -g
4151 options is used and DWARF2_DEBUGGING_INFO is in effect.
4152 If necessary, these may be overridden from within the tm.h file, but
4153 typically, overriding these defaults is unnecessary. */
4154
4155 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4156 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4157 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4158 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4159 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4160 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4161 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4162 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4163 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4164 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4169 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4170
4171 #ifndef TEXT_END_LABEL
4172 #define TEXT_END_LABEL "Letext"
4173 #endif
4174 #ifndef COLD_END_LABEL
4175 #define COLD_END_LABEL "Letext_cold"
4176 #endif
4177 #ifndef BLOCK_BEGIN_LABEL
4178 #define BLOCK_BEGIN_LABEL "LBB"
4179 #endif
4180 #ifndef BLOCK_INLINE_ENTRY_LABEL
4181 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4182 #endif
4183 #ifndef BLOCK_END_LABEL
4184 #define BLOCK_END_LABEL "LBE"
4185 #endif
4186 #ifndef LINE_CODE_LABEL
4187 #define LINE_CODE_LABEL "LM"
4188 #endif
4189
4190 \f
4191 /* Return the root of the DIE's built for the current compilation unit. */
4192 static dw_die_ref
4193 comp_unit_die (void)
4194 {
4195 if (!single_comp_unit_die)
4196 single_comp_unit_die = gen_compile_unit_die (NULL);
4197 return single_comp_unit_die;
4198 }
4199
4200 /* We allow a language front-end to designate a function that is to be
4201 called to "demangle" any name before it is put into a DIE. */
4202
4203 static const char *(*demangle_name_func) (const char *);
4204
4205 void
4206 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4207 {
4208 demangle_name_func = func;
4209 }
4210
4211 /* Test if rtl node points to a pseudo register. */
4212
4213 static inline int
4214 is_pseudo_reg (const_rtx rtl)
4215 {
4216 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4217 || (GET_CODE (rtl) == SUBREG
4218 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4219 }
4220
4221 /* Return a reference to a type, with its const and volatile qualifiers
4222 removed. */
4223
4224 static inline tree
4225 type_main_variant (tree type)
4226 {
4227 type = TYPE_MAIN_VARIANT (type);
4228
4229 /* ??? There really should be only one main variant among any group of
4230 variants of a given type (and all of the MAIN_VARIANT values for all
4231 members of the group should point to that one type) but sometimes the C
4232 front-end messes this up for array types, so we work around that bug
4233 here. */
4234 if (TREE_CODE (type) == ARRAY_TYPE)
4235 while (type != TYPE_MAIN_VARIANT (type))
4236 type = TYPE_MAIN_VARIANT (type);
4237
4238 return type;
4239 }
4240
4241 /* Return nonzero if the given type node represents a tagged type. */
4242
4243 static inline int
4244 is_tagged_type (const_tree type)
4245 {
4246 enum tree_code code = TREE_CODE (type);
4247
4248 return (code == RECORD_TYPE || code == UNION_TYPE
4249 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4250 }
4251
4252 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4253
4254 static void
4255 get_ref_die_offset_label (char *label, dw_die_ref ref)
4256 {
4257 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4258 }
4259
4260 /* Return die_offset of a DIE reference to a base type. */
4261
4262 static unsigned long int
4263 get_base_type_offset (dw_die_ref ref)
4264 {
4265 if (ref->die_offset)
4266 return ref->die_offset;
4267 if (comp_unit_die ()->die_abbrev)
4268 {
4269 calc_base_type_die_sizes ();
4270 gcc_assert (ref->die_offset);
4271 }
4272 return ref->die_offset;
4273 }
4274
4275 /* Return die_offset of a DIE reference other than base type. */
4276
4277 static unsigned long int
4278 get_ref_die_offset (dw_die_ref ref)
4279 {
4280 gcc_assert (ref->die_offset);
4281 return ref->die_offset;
4282 }
4283
4284 /* Convert a DIE tag into its string name. */
4285
4286 static const char *
4287 dwarf_tag_name (unsigned int tag)
4288 {
4289 const char *name = get_DW_TAG_name (tag);
4290
4291 if (name != NULL)
4292 return name;
4293
4294 return "DW_TAG_<unknown>";
4295 }
4296
4297 /* Convert a DWARF attribute code into its string name. */
4298
4299 static const char *
4300 dwarf_attr_name (unsigned int attr)
4301 {
4302 const char *name;
4303
4304 switch (attr)
4305 {
4306 #if VMS_DEBUGGING_INFO
4307 case DW_AT_HP_prologue:
4308 return "DW_AT_HP_prologue";
4309 #else
4310 case DW_AT_MIPS_loop_unroll_factor:
4311 return "DW_AT_MIPS_loop_unroll_factor";
4312 #endif
4313
4314 #if VMS_DEBUGGING_INFO
4315 case DW_AT_HP_epilogue:
4316 return "DW_AT_HP_epilogue";
4317 #else
4318 case DW_AT_MIPS_stride:
4319 return "DW_AT_MIPS_stride";
4320 #endif
4321 }
4322
4323 name = get_DW_AT_name (attr);
4324
4325 if (name != NULL)
4326 return name;
4327
4328 return "DW_AT_<unknown>";
4329 }
4330
4331 /* Convert a DWARF value form code into its string name. */
4332
4333 static const char *
4334 dwarf_form_name (unsigned int form)
4335 {
4336 const char *name = get_DW_FORM_name (form);
4337
4338 if (name != NULL)
4339 return name;
4340
4341 return "DW_FORM_<unknown>";
4342 }
4343 \f
4344 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4345 instance of an inlined instance of a decl which is local to an inline
4346 function, so we have to trace all of the way back through the origin chain
4347 to find out what sort of node actually served as the original seed for the
4348 given block. */
4349
4350 static tree
4351 decl_ultimate_origin (const_tree decl)
4352 {
4353 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4354 return NULL_TREE;
4355
4356 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4357 we're trying to output the abstract instance of this function. */
4358 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4359 return NULL_TREE;
4360
4361 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4362 most distant ancestor, this should never happen. */
4363 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4364
4365 return DECL_ABSTRACT_ORIGIN (decl);
4366 }
4367
4368 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4369 of a virtual function may refer to a base class, so we check the 'this'
4370 parameter. */
4371
4372 static tree
4373 decl_class_context (tree decl)
4374 {
4375 tree context = NULL_TREE;
4376
4377 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4378 context = DECL_CONTEXT (decl);
4379 else
4380 context = TYPE_MAIN_VARIANT
4381 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4382
4383 if (context && !TYPE_P (context))
4384 context = NULL_TREE;
4385
4386 return context;
4387 }
4388 \f
4389 /* Add an attribute/value pair to a DIE. */
4390
4391 static inline void
4392 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4393 {
4394 /* Maybe this should be an assert? */
4395 if (die == NULL)
4396 return;
4397
4398 if (flag_checking)
4399 {
4400 /* Check we do not add duplicate attrs. Can't use get_AT here
4401 because that recurses to the specification/abstract origin DIE. */
4402 dw_attr_node *a;
4403 unsigned ix;
4404 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4405 gcc_assert (a->dw_attr != attr->dw_attr);
4406 }
4407
4408 vec_safe_reserve (die->die_attr, 1);
4409 vec_safe_push (die->die_attr, *attr);
4410 }
4411
4412 static inline enum dw_val_class
4413 AT_class (dw_attr_node *a)
4414 {
4415 return a->dw_attr_val.val_class;
4416 }
4417
4418 /* Return the index for any attribute that will be referenced with a
4419 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4420 indices are stored in dw_attr_val.v.val_str for reference counting
4421 pruning. */
4422
4423 static inline unsigned int
4424 AT_index (dw_attr_node *a)
4425 {
4426 if (AT_class (a) == dw_val_class_str)
4427 return a->dw_attr_val.v.val_str->index;
4428 else if (a->dw_attr_val.val_entry != NULL)
4429 return a->dw_attr_val.val_entry->index;
4430 return NOT_INDEXED;
4431 }
4432
4433 /* Add a flag value attribute to a DIE. */
4434
4435 static inline void
4436 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4437 {
4438 dw_attr_node attr;
4439
4440 attr.dw_attr = attr_kind;
4441 attr.dw_attr_val.val_class = dw_val_class_flag;
4442 attr.dw_attr_val.val_entry = NULL;
4443 attr.dw_attr_val.v.val_flag = flag;
4444 add_dwarf_attr (die, &attr);
4445 }
4446
4447 static inline unsigned
4448 AT_flag (dw_attr_node *a)
4449 {
4450 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4451 return a->dw_attr_val.v.val_flag;
4452 }
4453
4454 /* Add a signed integer attribute value to a DIE. */
4455
4456 static inline void
4457 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4458 {
4459 dw_attr_node attr;
4460
4461 attr.dw_attr = attr_kind;
4462 attr.dw_attr_val.val_class = dw_val_class_const;
4463 attr.dw_attr_val.val_entry = NULL;
4464 attr.dw_attr_val.v.val_int = int_val;
4465 add_dwarf_attr (die, &attr);
4466 }
4467
4468 static inline HOST_WIDE_INT
4469 AT_int (dw_attr_node *a)
4470 {
4471 gcc_assert (a && (AT_class (a) == dw_val_class_const
4472 || AT_class (a) == dw_val_class_const_implicit));
4473 return a->dw_attr_val.v.val_int;
4474 }
4475
4476 /* Add an unsigned integer attribute value to a DIE. */
4477
4478 static inline void
4479 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4480 unsigned HOST_WIDE_INT unsigned_val)
4481 {
4482 dw_attr_node attr;
4483
4484 attr.dw_attr = attr_kind;
4485 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4486 attr.dw_attr_val.val_entry = NULL;
4487 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4488 add_dwarf_attr (die, &attr);
4489 }
4490
4491 static inline unsigned HOST_WIDE_INT
4492 AT_unsigned (dw_attr_node *a)
4493 {
4494 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4495 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4496 return a->dw_attr_val.v.val_unsigned;
4497 }
4498
4499 /* Add an unsigned wide integer attribute value to a DIE. */
4500
4501 static inline void
4502 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4503 const wide_int& w)
4504 {
4505 dw_attr_node attr;
4506
4507 attr.dw_attr = attr_kind;
4508 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4509 attr.dw_attr_val.val_entry = NULL;
4510 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4511 *attr.dw_attr_val.v.val_wide = w;
4512 add_dwarf_attr (die, &attr);
4513 }
4514
4515 /* Add an unsigned double integer attribute value to a DIE. */
4516
4517 static inline void
4518 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4519 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4520 {
4521 dw_attr_node attr;
4522
4523 attr.dw_attr = attr_kind;
4524 attr.dw_attr_val.val_class = dw_val_class_const_double;
4525 attr.dw_attr_val.val_entry = NULL;
4526 attr.dw_attr_val.v.val_double.high = high;
4527 attr.dw_attr_val.v.val_double.low = low;
4528 add_dwarf_attr (die, &attr);
4529 }
4530
4531 /* Add a floating point attribute value to a DIE and return it. */
4532
4533 static inline void
4534 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4535 unsigned int length, unsigned int elt_size, unsigned char *array)
4536 {
4537 dw_attr_node attr;
4538
4539 attr.dw_attr = attr_kind;
4540 attr.dw_attr_val.val_class = dw_val_class_vec;
4541 attr.dw_attr_val.val_entry = NULL;
4542 attr.dw_attr_val.v.val_vec.length = length;
4543 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4544 attr.dw_attr_val.v.val_vec.array = array;
4545 add_dwarf_attr (die, &attr);
4546 }
4547
4548 /* Add an 8-byte data attribute value to a DIE. */
4549
4550 static inline void
4551 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4552 unsigned char data8[8])
4553 {
4554 dw_attr_node attr;
4555
4556 attr.dw_attr = attr_kind;
4557 attr.dw_attr_val.val_class = dw_val_class_data8;
4558 attr.dw_attr_val.val_entry = NULL;
4559 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4560 add_dwarf_attr (die, &attr);
4561 }
4562
4563 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4564 dwarf_split_debug_info, address attributes in dies destined for the
4565 final executable have force_direct set to avoid using indexed
4566 references. */
4567
4568 static inline void
4569 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4570 bool force_direct)
4571 {
4572 dw_attr_node attr;
4573 char * lbl_id;
4574
4575 lbl_id = xstrdup (lbl_low);
4576 attr.dw_attr = DW_AT_low_pc;
4577 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4578 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4579 if (dwarf_split_debug_info && !force_direct)
4580 attr.dw_attr_val.val_entry
4581 = add_addr_table_entry (lbl_id, ate_kind_label);
4582 else
4583 attr.dw_attr_val.val_entry = NULL;
4584 add_dwarf_attr (die, &attr);
4585
4586 attr.dw_attr = DW_AT_high_pc;
4587 if (dwarf_version < 4)
4588 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4589 else
4590 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4591 lbl_id = xstrdup (lbl_high);
4592 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4593 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4594 && dwarf_split_debug_info && !force_direct)
4595 attr.dw_attr_val.val_entry
4596 = add_addr_table_entry (lbl_id, ate_kind_label);
4597 else
4598 attr.dw_attr_val.val_entry = NULL;
4599 add_dwarf_attr (die, &attr);
4600 }
4601
4602 /* Hash and equality functions for debug_str_hash. */
4603
4604 hashval_t
4605 indirect_string_hasher::hash (indirect_string_node *x)
4606 {
4607 return htab_hash_string (x->str);
4608 }
4609
4610 bool
4611 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4612 {
4613 return strcmp (x1->str, x2) == 0;
4614 }
4615
4616 /* Add STR to the given string hash table. */
4617
4618 static struct indirect_string_node *
4619 find_AT_string_in_table (const char *str,
4620 hash_table<indirect_string_hasher> *table)
4621 {
4622 struct indirect_string_node *node;
4623
4624 indirect_string_node **slot
4625 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4626 if (*slot == NULL)
4627 {
4628 node = ggc_cleared_alloc<indirect_string_node> ();
4629 node->str = ggc_strdup (str);
4630 *slot = node;
4631 }
4632 else
4633 node = *slot;
4634
4635 node->refcount++;
4636 return node;
4637 }
4638
4639 /* Add STR to the indirect string hash table. */
4640
4641 static struct indirect_string_node *
4642 find_AT_string (const char *str)
4643 {
4644 if (! debug_str_hash)
4645 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4646
4647 return find_AT_string_in_table (str, debug_str_hash);
4648 }
4649
4650 /* Add a string attribute value to a DIE. */
4651
4652 static inline void
4653 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4654 {
4655 dw_attr_node attr;
4656 struct indirect_string_node *node;
4657
4658 node = find_AT_string (str);
4659
4660 attr.dw_attr = attr_kind;
4661 attr.dw_attr_val.val_class = dw_val_class_str;
4662 attr.dw_attr_val.val_entry = NULL;
4663 attr.dw_attr_val.v.val_str = node;
4664 add_dwarf_attr (die, &attr);
4665 }
4666
4667 static inline const char *
4668 AT_string (dw_attr_node *a)
4669 {
4670 gcc_assert (a && AT_class (a) == dw_val_class_str);
4671 return a->dw_attr_val.v.val_str->str;
4672 }
4673
4674 /* Call this function directly to bypass AT_string_form's logic to put
4675 the string inline in the die. */
4676
4677 static void
4678 set_indirect_string (struct indirect_string_node *node)
4679 {
4680 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4681 /* Already indirect is a no op. */
4682 if (node->form == DW_FORM_strp
4683 || node->form == DW_FORM_line_strp
4684 || node->form == dwarf_FORM (DW_FORM_strx))
4685 {
4686 gcc_assert (node->label);
4687 return;
4688 }
4689 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4690 ++dw2_string_counter;
4691 node->label = xstrdup (label);
4692
4693 if (!dwarf_split_debug_info)
4694 {
4695 node->form = DW_FORM_strp;
4696 node->index = NOT_INDEXED;
4697 }
4698 else
4699 {
4700 node->form = dwarf_FORM (DW_FORM_strx);
4701 node->index = NO_INDEX_ASSIGNED;
4702 }
4703 }
4704
4705 /* A helper function for dwarf2out_finish, called to reset indirect
4706 string decisions done for early LTO dwarf output before fat object
4707 dwarf output. */
4708
4709 int
4710 reset_indirect_string (indirect_string_node **h, void *)
4711 {
4712 struct indirect_string_node *node = *h;
4713 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4714 {
4715 free (node->label);
4716 node->label = NULL;
4717 node->form = (dwarf_form) 0;
4718 node->index = 0;
4719 }
4720 return 1;
4721 }
4722
4723 /* Find out whether a string should be output inline in DIE
4724 or out-of-line in .debug_str section. */
4725
4726 static enum dwarf_form
4727 find_string_form (struct indirect_string_node *node)
4728 {
4729 unsigned int len;
4730
4731 if (node->form)
4732 return node->form;
4733
4734 len = strlen (node->str) + 1;
4735
4736 /* If the string is shorter or equal to the size of the reference, it is
4737 always better to put it inline. */
4738 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4739 return node->form = DW_FORM_string;
4740
4741 /* If we cannot expect the linker to merge strings in .debug_str
4742 section, only put it into .debug_str if it is worth even in this
4743 single module. */
4744 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4745 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4746 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4747 return node->form = DW_FORM_string;
4748
4749 set_indirect_string (node);
4750
4751 return node->form;
4752 }
4753
4754 /* Find out whether the string referenced from the attribute should be
4755 output inline in DIE or out-of-line in .debug_str section. */
4756
4757 static enum dwarf_form
4758 AT_string_form (dw_attr_node *a)
4759 {
4760 gcc_assert (a && AT_class (a) == dw_val_class_str);
4761 return find_string_form (a->dw_attr_val.v.val_str);
4762 }
4763
4764 /* Add a DIE reference attribute value to a DIE. */
4765
4766 static inline void
4767 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4768 {
4769 dw_attr_node attr;
4770 gcc_checking_assert (targ_die != NULL);
4771
4772 /* With LTO we can end up trying to reference something we didn't create
4773 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4774 if (targ_die == NULL)
4775 return;
4776
4777 attr.dw_attr = attr_kind;
4778 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4779 attr.dw_attr_val.val_entry = NULL;
4780 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4781 attr.dw_attr_val.v.val_die_ref.external = 0;
4782 add_dwarf_attr (die, &attr);
4783 }
4784
4785 /* Change DIE reference REF to point to NEW_DIE instead. */
4786
4787 static inline void
4788 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4789 {
4790 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4791 ref->dw_attr_val.v.val_die_ref.die = new_die;
4792 ref->dw_attr_val.v.val_die_ref.external = 0;
4793 }
4794
4795 /* Add an AT_specification attribute to a DIE, and also make the back
4796 pointer from the specification to the definition. */
4797
4798 static inline void
4799 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4800 {
4801 add_AT_die_ref (die, DW_AT_specification, targ_die);
4802 gcc_assert (!targ_die->die_definition);
4803 targ_die->die_definition = die;
4804 }
4805
4806 static inline dw_die_ref
4807 AT_ref (dw_attr_node *a)
4808 {
4809 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4810 return a->dw_attr_val.v.val_die_ref.die;
4811 }
4812
4813 static inline int
4814 AT_ref_external (dw_attr_node *a)
4815 {
4816 if (a && AT_class (a) == dw_val_class_die_ref)
4817 return a->dw_attr_val.v.val_die_ref.external;
4818
4819 return 0;
4820 }
4821
4822 static inline void
4823 set_AT_ref_external (dw_attr_node *a, int i)
4824 {
4825 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4826 a->dw_attr_val.v.val_die_ref.external = i;
4827 }
4828
4829 /* Add an FDE reference attribute value to a DIE. */
4830
4831 static inline void
4832 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4833 {
4834 dw_attr_node attr;
4835
4836 attr.dw_attr = attr_kind;
4837 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4838 attr.dw_attr_val.val_entry = NULL;
4839 attr.dw_attr_val.v.val_fde_index = targ_fde;
4840 add_dwarf_attr (die, &attr);
4841 }
4842
4843 /* Add a location description attribute value to a DIE. */
4844
4845 static inline void
4846 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4847 {
4848 dw_attr_node attr;
4849
4850 attr.dw_attr = attr_kind;
4851 attr.dw_attr_val.val_class = dw_val_class_loc;
4852 attr.dw_attr_val.val_entry = NULL;
4853 attr.dw_attr_val.v.val_loc = loc;
4854 add_dwarf_attr (die, &attr);
4855 }
4856
4857 static inline dw_loc_descr_ref
4858 AT_loc (dw_attr_node *a)
4859 {
4860 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4861 return a->dw_attr_val.v.val_loc;
4862 }
4863
4864 static inline void
4865 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4866 {
4867 dw_attr_node attr;
4868
4869 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4870 return;
4871
4872 attr.dw_attr = attr_kind;
4873 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4874 attr.dw_attr_val.val_entry = NULL;
4875 attr.dw_attr_val.v.val_loc_list = loc_list;
4876 add_dwarf_attr (die, &attr);
4877 have_location_lists = true;
4878 }
4879
4880 static inline dw_loc_list_ref
4881 AT_loc_list (dw_attr_node *a)
4882 {
4883 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4884 return a->dw_attr_val.v.val_loc_list;
4885 }
4886
4887 /* Add a view list attribute to DIE. It must have a DW_AT_location
4888 attribute, because the view list complements the location list. */
4889
4890 static inline void
4891 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4892 {
4893 dw_attr_node attr;
4894
4895 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4896 return;
4897
4898 attr.dw_attr = attr_kind;
4899 attr.dw_attr_val.val_class = dw_val_class_view_list;
4900 attr.dw_attr_val.val_entry = NULL;
4901 attr.dw_attr_val.v.val_view_list = die;
4902 add_dwarf_attr (die, &attr);
4903 gcc_checking_assert (get_AT (die, DW_AT_location));
4904 gcc_assert (have_location_lists);
4905 }
4906
4907 /* Return a pointer to the location list referenced by the attribute.
4908 If the named attribute is a view list, look up the corresponding
4909 DW_AT_location attribute and return its location list. */
4910
4911 static inline dw_loc_list_ref *
4912 AT_loc_list_ptr (dw_attr_node *a)
4913 {
4914 gcc_assert (a);
4915 switch (AT_class (a))
4916 {
4917 case dw_val_class_loc_list:
4918 return &a->dw_attr_val.v.val_loc_list;
4919 case dw_val_class_view_list:
4920 {
4921 dw_attr_node *l;
4922 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4923 if (!l)
4924 return NULL;
4925 gcc_checking_assert (l + 1 == a);
4926 return AT_loc_list_ptr (l);
4927 }
4928 default:
4929 gcc_unreachable ();
4930 }
4931 }
4932
4933 /* Return the location attribute value associated with a view list
4934 attribute value. */
4935
4936 static inline dw_val_node *
4937 view_list_to_loc_list_val_node (dw_val_node *val)
4938 {
4939 gcc_assert (val->val_class == dw_val_class_view_list);
4940 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4941 if (!loc)
4942 return NULL;
4943 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4944 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4945 return &loc->dw_attr_val;
4946 }
4947
4948 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4949 {
4950 static hashval_t hash (addr_table_entry *);
4951 static bool equal (addr_table_entry *, addr_table_entry *);
4952 };
4953
4954 /* Table of entries into the .debug_addr section. */
4955
4956 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4957
4958 /* Hash an address_table_entry. */
4959
4960 hashval_t
4961 addr_hasher::hash (addr_table_entry *a)
4962 {
4963 inchash::hash hstate;
4964 switch (a->kind)
4965 {
4966 case ate_kind_rtx:
4967 hstate.add_int (0);
4968 break;
4969 case ate_kind_rtx_dtprel:
4970 hstate.add_int (1);
4971 break;
4972 case ate_kind_label:
4973 return htab_hash_string (a->addr.label);
4974 default:
4975 gcc_unreachable ();
4976 }
4977 inchash::add_rtx (a->addr.rtl, hstate);
4978 return hstate.end ();
4979 }
4980
4981 /* Determine equality for two address_table_entries. */
4982
4983 bool
4984 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4985 {
4986 if (a1->kind != a2->kind)
4987 return 0;
4988 switch (a1->kind)
4989 {
4990 case ate_kind_rtx:
4991 case ate_kind_rtx_dtprel:
4992 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4993 case ate_kind_label:
4994 return strcmp (a1->addr.label, a2->addr.label) == 0;
4995 default:
4996 gcc_unreachable ();
4997 }
4998 }
4999
5000 /* Initialize an addr_table_entry. */
5001
5002 void
5003 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
5004 {
5005 e->kind = kind;
5006 switch (kind)
5007 {
5008 case ate_kind_rtx:
5009 case ate_kind_rtx_dtprel:
5010 e->addr.rtl = (rtx) addr;
5011 break;
5012 case ate_kind_label:
5013 e->addr.label = (char *) addr;
5014 break;
5015 }
5016 e->refcount = 0;
5017 e->index = NO_INDEX_ASSIGNED;
5018 }
5019
5020 /* Add attr to the address table entry to the table. Defer setting an
5021 index until output time. */
5022
5023 static addr_table_entry *
5024 add_addr_table_entry (void *addr, enum ate_kind kind)
5025 {
5026 addr_table_entry *node;
5027 addr_table_entry finder;
5028
5029 gcc_assert (dwarf_split_debug_info);
5030 if (! addr_index_table)
5031 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5032 init_addr_table_entry (&finder, kind, addr);
5033 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5034
5035 if (*slot == HTAB_EMPTY_ENTRY)
5036 {
5037 node = ggc_cleared_alloc<addr_table_entry> ();
5038 init_addr_table_entry (node, kind, addr);
5039 *slot = node;
5040 }
5041 else
5042 node = *slot;
5043
5044 node->refcount++;
5045 return node;
5046 }
5047
5048 /* Remove an entry from the addr table by decrementing its refcount.
5049 Strictly, decrementing the refcount would be enough, but the
5050 assertion that the entry is actually in the table has found
5051 bugs. */
5052
5053 static void
5054 remove_addr_table_entry (addr_table_entry *entry)
5055 {
5056 gcc_assert (dwarf_split_debug_info && addr_index_table);
5057 /* After an index is assigned, the table is frozen. */
5058 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5059 entry->refcount--;
5060 }
5061
5062 /* Given a location list, remove all addresses it refers to from the
5063 address_table. */
5064
5065 static void
5066 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5067 {
5068 for (; descr; descr = descr->dw_loc_next)
5069 if (descr->dw_loc_oprnd1.val_entry != NULL)
5070 {
5071 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5072 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5073 }
5074 }
5075
5076 /* A helper function for dwarf2out_finish called through
5077 htab_traverse. Assign an addr_table_entry its index. All entries
5078 must be collected into the table when this function is called,
5079 because the indexing code relies on htab_traverse to traverse nodes
5080 in the same order for each run. */
5081
5082 int
5083 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5084 {
5085 addr_table_entry *node = *h;
5086
5087 /* Don't index unreferenced nodes. */
5088 if (node->refcount == 0)
5089 return 1;
5090
5091 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5092 node->index = *index;
5093 *index += 1;
5094
5095 return 1;
5096 }
5097
5098 /* Add an address constant attribute value to a DIE. When using
5099 dwarf_split_debug_info, address attributes in dies destined for the
5100 final executable should be direct references--setting the parameter
5101 force_direct ensures this behavior. */
5102
5103 static inline void
5104 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5105 bool force_direct)
5106 {
5107 dw_attr_node attr;
5108
5109 attr.dw_attr = attr_kind;
5110 attr.dw_attr_val.val_class = dw_val_class_addr;
5111 attr.dw_attr_val.v.val_addr = addr;
5112 if (dwarf_split_debug_info && !force_direct)
5113 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5114 else
5115 attr.dw_attr_val.val_entry = NULL;
5116 add_dwarf_attr (die, &attr);
5117 }
5118
5119 /* Get the RTX from to an address DIE attribute. */
5120
5121 static inline rtx
5122 AT_addr (dw_attr_node *a)
5123 {
5124 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5125 return a->dw_attr_val.v.val_addr;
5126 }
5127
5128 /* Add a file attribute value to a DIE. */
5129
5130 static inline void
5131 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5132 struct dwarf_file_data *fd)
5133 {
5134 dw_attr_node attr;
5135
5136 attr.dw_attr = attr_kind;
5137 attr.dw_attr_val.val_class = dw_val_class_file;
5138 attr.dw_attr_val.val_entry = NULL;
5139 attr.dw_attr_val.v.val_file = fd;
5140 add_dwarf_attr (die, &attr);
5141 }
5142
5143 /* Get the dwarf_file_data from a file DIE attribute. */
5144
5145 static inline struct dwarf_file_data *
5146 AT_file (dw_attr_node *a)
5147 {
5148 gcc_assert (a && (AT_class (a) == dw_val_class_file
5149 || AT_class (a) == dw_val_class_file_implicit));
5150 return a->dw_attr_val.v.val_file;
5151 }
5152
5153 /* Add a vms delta attribute value to a DIE. */
5154
5155 static inline void
5156 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5157 const char *lbl1, const char *lbl2)
5158 {
5159 dw_attr_node attr;
5160
5161 attr.dw_attr = attr_kind;
5162 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5163 attr.dw_attr_val.val_entry = NULL;
5164 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5165 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5166 add_dwarf_attr (die, &attr);
5167 }
5168
5169 /* Add a symbolic view identifier attribute value to a DIE. */
5170
5171 static inline void
5172 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5173 const char *view_label)
5174 {
5175 dw_attr_node attr;
5176
5177 attr.dw_attr = attr_kind;
5178 attr.dw_attr_val.val_class = dw_val_class_symview;
5179 attr.dw_attr_val.val_entry = NULL;
5180 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5181 add_dwarf_attr (die, &attr);
5182 }
5183
5184 /* Add a label identifier attribute value to a DIE. */
5185
5186 static inline void
5187 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5188 const char *lbl_id)
5189 {
5190 dw_attr_node attr;
5191
5192 attr.dw_attr = attr_kind;
5193 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5194 attr.dw_attr_val.val_entry = NULL;
5195 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5196 if (dwarf_split_debug_info)
5197 attr.dw_attr_val.val_entry
5198 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5199 ate_kind_label);
5200 add_dwarf_attr (die, &attr);
5201 }
5202
5203 /* Add a section offset attribute value to a DIE, an offset into the
5204 debug_line section. */
5205
5206 static inline void
5207 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5208 const char *label)
5209 {
5210 dw_attr_node attr;
5211
5212 attr.dw_attr = attr_kind;
5213 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5214 attr.dw_attr_val.val_entry = NULL;
5215 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5216 add_dwarf_attr (die, &attr);
5217 }
5218
5219 /* Add a section offset attribute value to a DIE, an offset into the
5220 debug_loclists section. */
5221
5222 static inline void
5223 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5224 const char *label)
5225 {
5226 dw_attr_node attr;
5227
5228 attr.dw_attr = attr_kind;
5229 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
5230 attr.dw_attr_val.val_entry = NULL;
5231 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5232 add_dwarf_attr (die, &attr);
5233 }
5234
5235 /* Add a section offset attribute value to a DIE, an offset into the
5236 debug_macinfo section. */
5237
5238 static inline void
5239 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5240 const char *label)
5241 {
5242 dw_attr_node attr;
5243
5244 attr.dw_attr = attr_kind;
5245 attr.dw_attr_val.val_class = dw_val_class_macptr;
5246 attr.dw_attr_val.val_entry = NULL;
5247 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5248 add_dwarf_attr (die, &attr);
5249 }
5250
5251 /* Add an offset attribute value to a DIE. */
5252
5253 static inline void
5254 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
5255 unsigned HOST_WIDE_INT offset)
5256 {
5257 dw_attr_node attr;
5258
5259 attr.dw_attr = attr_kind;
5260 attr.dw_attr_val.val_class = dw_val_class_offset;
5261 attr.dw_attr_val.val_entry = NULL;
5262 attr.dw_attr_val.v.val_offset = offset;
5263 add_dwarf_attr (die, &attr);
5264 }
5265
5266 /* Add a range_list attribute value to a DIE. When using
5267 dwarf_split_debug_info, address attributes in dies destined for the
5268 final executable should be direct references--setting the parameter
5269 force_direct ensures this behavior. */
5270
5271 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5272 #define RELOCATED_OFFSET (NULL)
5273
5274 static void
5275 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5276 long unsigned int offset, bool force_direct)
5277 {
5278 dw_attr_node attr;
5279
5280 attr.dw_attr = attr_kind;
5281 attr.dw_attr_val.val_class = dw_val_class_range_list;
5282 /* For the range_list attribute, use val_entry to store whether the
5283 offset should follow split-debug-info or normal semantics. This
5284 value is read in output_range_list_offset. */
5285 if (dwarf_split_debug_info && !force_direct)
5286 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5287 else
5288 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5289 attr.dw_attr_val.v.val_offset = offset;
5290 add_dwarf_attr (die, &attr);
5291 }
5292
5293 /* Return the start label of a delta attribute. */
5294
5295 static inline const char *
5296 AT_vms_delta1 (dw_attr_node *a)
5297 {
5298 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5299 return a->dw_attr_val.v.val_vms_delta.lbl1;
5300 }
5301
5302 /* Return the end label of a delta attribute. */
5303
5304 static inline const char *
5305 AT_vms_delta2 (dw_attr_node *a)
5306 {
5307 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5308 return a->dw_attr_val.v.val_vms_delta.lbl2;
5309 }
5310
5311 static inline const char *
5312 AT_lbl (dw_attr_node *a)
5313 {
5314 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5315 || AT_class (a) == dw_val_class_lineptr
5316 || AT_class (a) == dw_val_class_macptr
5317 || AT_class (a) == dw_val_class_loclistsptr
5318 || AT_class (a) == dw_val_class_high_pc));
5319 return a->dw_attr_val.v.val_lbl_id;
5320 }
5321
5322 /* Get the attribute of type attr_kind. */
5323
5324 static dw_attr_node *
5325 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5326 {
5327 dw_attr_node *a;
5328 unsigned ix;
5329 dw_die_ref spec = NULL;
5330
5331 if (! die)
5332 return NULL;
5333
5334 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5335 if (a->dw_attr == attr_kind)
5336 return a;
5337 else if (a->dw_attr == DW_AT_specification
5338 || a->dw_attr == DW_AT_abstract_origin)
5339 spec = AT_ref (a);
5340
5341 if (spec)
5342 return get_AT (spec, attr_kind);
5343
5344 return NULL;
5345 }
5346
5347 /* Returns the parent of the declaration of DIE. */
5348
5349 static dw_die_ref
5350 get_die_parent (dw_die_ref die)
5351 {
5352 dw_die_ref t;
5353
5354 if (!die)
5355 return NULL;
5356
5357 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5358 || (t = get_AT_ref (die, DW_AT_specification)))
5359 die = t;
5360
5361 return die->die_parent;
5362 }
5363
5364 /* Return the "low pc" attribute value, typically associated with a subprogram
5365 DIE. Return null if the "low pc" attribute is either not present, or if it
5366 cannot be represented as an assembler label identifier. */
5367
5368 static inline const char *
5369 get_AT_low_pc (dw_die_ref die)
5370 {
5371 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5372
5373 return a ? AT_lbl (a) : NULL;
5374 }
5375
5376 /* Return the "high pc" attribute value, typically associated with a subprogram
5377 DIE. Return null if the "high pc" attribute is either not present, or if it
5378 cannot be represented as an assembler label identifier. */
5379
5380 static inline const char *
5381 get_AT_hi_pc (dw_die_ref die)
5382 {
5383 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5384
5385 return a ? AT_lbl (a) : NULL;
5386 }
5387
5388 /* Return the value of the string attribute designated by ATTR_KIND, or
5389 NULL if it is not present. */
5390
5391 static inline const char *
5392 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5393 {
5394 dw_attr_node *a = get_AT (die, attr_kind);
5395
5396 return a ? AT_string (a) : NULL;
5397 }
5398
5399 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5400 if it is not present. */
5401
5402 static inline int
5403 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5404 {
5405 dw_attr_node *a = get_AT (die, attr_kind);
5406
5407 return a ? AT_flag (a) : 0;
5408 }
5409
5410 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5411 if it is not present. */
5412
5413 static inline unsigned
5414 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5415 {
5416 dw_attr_node *a = get_AT (die, attr_kind);
5417
5418 return a ? AT_unsigned (a) : 0;
5419 }
5420
5421 static inline dw_die_ref
5422 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5423 {
5424 dw_attr_node *a = get_AT (die, attr_kind);
5425
5426 return a ? AT_ref (a) : NULL;
5427 }
5428
5429 static inline struct dwarf_file_data *
5430 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5431 {
5432 dw_attr_node *a = get_AT (die, attr_kind);
5433
5434 return a ? AT_file (a) : NULL;
5435 }
5436
5437 /* Return TRUE if the language is C++. */
5438
5439 static inline bool
5440 is_cxx (void)
5441 {
5442 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5443
5444 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5445 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5446 }
5447
5448 /* Return TRUE if DECL was created by the C++ frontend. */
5449
5450 static bool
5451 is_cxx (const_tree decl)
5452 {
5453 if (in_lto_p)
5454 {
5455 const_tree context = get_ultimate_context (decl);
5456 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5457 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5458 }
5459 return is_cxx ();
5460 }
5461
5462 /* Return TRUE if the language is Fortran. */
5463
5464 static inline bool
5465 is_fortran (void)
5466 {
5467 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5468
5469 return (lang == DW_LANG_Fortran77
5470 || lang == DW_LANG_Fortran90
5471 || lang == DW_LANG_Fortran95
5472 || lang == DW_LANG_Fortran03
5473 || lang == DW_LANG_Fortran08);
5474 }
5475
5476 static inline bool
5477 is_fortran (const_tree decl)
5478 {
5479 if (in_lto_p)
5480 {
5481 const_tree context = get_ultimate_context (decl);
5482 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5483 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5484 "GNU Fortran", 11) == 0
5485 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5486 "GNU F77") == 0);
5487 }
5488 return is_fortran ();
5489 }
5490
5491 /* Return TRUE if the language is Ada. */
5492
5493 static inline bool
5494 is_ada (void)
5495 {
5496 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5497
5498 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5499 }
5500
5501 /* Remove the specified attribute if present. Return TRUE if removal
5502 was successful. */
5503
5504 static bool
5505 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5506 {
5507 dw_attr_node *a;
5508 unsigned ix;
5509
5510 if (! die)
5511 return false;
5512
5513 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5514 if (a->dw_attr == attr_kind)
5515 {
5516 if (AT_class (a) == dw_val_class_str)
5517 if (a->dw_attr_val.v.val_str->refcount)
5518 a->dw_attr_val.v.val_str->refcount--;
5519
5520 /* vec::ordered_remove should help reduce the number of abbrevs
5521 that are needed. */
5522 die->die_attr->ordered_remove (ix);
5523 return true;
5524 }
5525 return false;
5526 }
5527
5528 /* Remove CHILD from its parent. PREV must have the property that
5529 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5530
5531 static void
5532 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5533 {
5534 gcc_assert (child->die_parent == prev->die_parent);
5535 gcc_assert (prev->die_sib == child);
5536 if (prev == child)
5537 {
5538 gcc_assert (child->die_parent->die_child == child);
5539 prev = NULL;
5540 }
5541 else
5542 prev->die_sib = child->die_sib;
5543 if (child->die_parent->die_child == child)
5544 child->die_parent->die_child = prev;
5545 child->die_sib = NULL;
5546 }
5547
5548 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5549 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5550
5551 static void
5552 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5553 {
5554 dw_die_ref parent = old_child->die_parent;
5555
5556 gcc_assert (parent == prev->die_parent);
5557 gcc_assert (prev->die_sib == old_child);
5558
5559 new_child->die_parent = parent;
5560 if (prev == old_child)
5561 {
5562 gcc_assert (parent->die_child == old_child);
5563 new_child->die_sib = new_child;
5564 }
5565 else
5566 {
5567 prev->die_sib = new_child;
5568 new_child->die_sib = old_child->die_sib;
5569 }
5570 if (old_child->die_parent->die_child == old_child)
5571 old_child->die_parent->die_child = new_child;
5572 old_child->die_sib = NULL;
5573 }
5574
5575 /* Move all children from OLD_PARENT to NEW_PARENT. */
5576
5577 static void
5578 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5579 {
5580 dw_die_ref c;
5581 new_parent->die_child = old_parent->die_child;
5582 old_parent->die_child = NULL;
5583 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5584 }
5585
5586 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5587 matches TAG. */
5588
5589 static void
5590 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5591 {
5592 dw_die_ref c;
5593
5594 c = die->die_child;
5595 if (c) do {
5596 dw_die_ref prev = c;
5597 c = c->die_sib;
5598 while (c->die_tag == tag)
5599 {
5600 remove_child_with_prev (c, prev);
5601 c->die_parent = NULL;
5602 /* Might have removed every child. */
5603 if (die->die_child == NULL)
5604 return;
5605 c = prev->die_sib;
5606 }
5607 } while (c != die->die_child);
5608 }
5609
5610 /* Add a CHILD_DIE as the last child of DIE. */
5611
5612 static void
5613 add_child_die (dw_die_ref die, dw_die_ref child_die)
5614 {
5615 /* FIXME this should probably be an assert. */
5616 if (! die || ! child_die)
5617 return;
5618 gcc_assert (die != child_die);
5619
5620 child_die->die_parent = die;
5621 if (die->die_child)
5622 {
5623 child_die->die_sib = die->die_child->die_sib;
5624 die->die_child->die_sib = child_die;
5625 }
5626 else
5627 child_die->die_sib = child_die;
5628 die->die_child = child_die;
5629 }
5630
5631 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5632
5633 static void
5634 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5635 dw_die_ref after_die)
5636 {
5637 gcc_assert (die
5638 && child_die
5639 && after_die
5640 && die->die_child
5641 && die != child_die);
5642
5643 child_die->die_parent = die;
5644 child_die->die_sib = after_die->die_sib;
5645 after_die->die_sib = child_die;
5646 if (die->die_child == after_die)
5647 die->die_child = child_die;
5648 }
5649
5650 /* Unassociate CHILD from its parent, and make its parent be
5651 NEW_PARENT. */
5652
5653 static void
5654 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5655 {
5656 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5657 if (p->die_sib == child)
5658 {
5659 remove_child_with_prev (child, p);
5660 break;
5661 }
5662 add_child_die (new_parent, child);
5663 }
5664
5665 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5666 is the specification, to the end of PARENT's list of children.
5667 This is done by removing and re-adding it. */
5668
5669 static void
5670 splice_child_die (dw_die_ref parent, dw_die_ref child)
5671 {
5672 /* We want the declaration DIE from inside the class, not the
5673 specification DIE at toplevel. */
5674 if (child->die_parent != parent)
5675 {
5676 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5677
5678 if (tmp)
5679 child = tmp;
5680 }
5681
5682 gcc_assert (child->die_parent == parent
5683 || (child->die_parent
5684 == get_AT_ref (parent, DW_AT_specification)));
5685
5686 reparent_child (child, parent);
5687 }
5688
5689 /* Create and return a new die with TAG_VALUE as tag. */
5690
5691 static inline dw_die_ref
5692 new_die_raw (enum dwarf_tag tag_value)
5693 {
5694 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5695 die->die_tag = tag_value;
5696 return die;
5697 }
5698
5699 /* Create and return a new die with a parent of PARENT_DIE. If
5700 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5701 associated tree T must be supplied to determine parenthood
5702 later. */
5703
5704 static inline dw_die_ref
5705 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5706 {
5707 dw_die_ref die = new_die_raw (tag_value);
5708
5709 if (parent_die != NULL)
5710 add_child_die (parent_die, die);
5711 else
5712 {
5713 limbo_die_node *limbo_node;
5714
5715 /* No DIEs created after early dwarf should end up in limbo,
5716 because the limbo list should not persist past LTO
5717 streaming. */
5718 if (tag_value != DW_TAG_compile_unit
5719 /* These are allowed because they're generated while
5720 breaking out COMDAT units late. */
5721 && tag_value != DW_TAG_type_unit
5722 && tag_value != DW_TAG_skeleton_unit
5723 && !early_dwarf
5724 /* Allow nested functions to live in limbo because they will
5725 only temporarily live there, as decls_for_scope will fix
5726 them up. */
5727 && (TREE_CODE (t) != FUNCTION_DECL
5728 || !decl_function_context (t))
5729 /* Same as nested functions above but for types. Types that
5730 are local to a function will be fixed in
5731 decls_for_scope. */
5732 && (!RECORD_OR_UNION_TYPE_P (t)
5733 || !TYPE_CONTEXT (t)
5734 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5735 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5736 especially in the ltrans stage, but once we implement LTO
5737 dwarf streaming, we should remove this exception. */
5738 && !in_lto_p)
5739 {
5740 fprintf (stderr, "symbol ended up in limbo too late:");
5741 debug_generic_stmt (t);
5742 gcc_unreachable ();
5743 }
5744
5745 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5746 limbo_node->die = die;
5747 limbo_node->created_for = t;
5748 limbo_node->next = limbo_die_list;
5749 limbo_die_list = limbo_node;
5750 }
5751
5752 return die;
5753 }
5754
5755 /* Return the DIE associated with the given type specifier. */
5756
5757 static inline dw_die_ref
5758 lookup_type_die (tree type)
5759 {
5760 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5761 if (die && die->removed)
5762 {
5763 TYPE_SYMTAB_DIE (type) = NULL;
5764 return NULL;
5765 }
5766 return die;
5767 }
5768
5769 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5770 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5771 anonymous type instead the one of the naming typedef. */
5772
5773 static inline dw_die_ref
5774 strip_naming_typedef (tree type, dw_die_ref type_die)
5775 {
5776 if (type
5777 && TREE_CODE (type) == RECORD_TYPE
5778 && type_die
5779 && type_die->die_tag == DW_TAG_typedef
5780 && is_naming_typedef_decl (TYPE_NAME (type)))
5781 type_die = get_AT_ref (type_die, DW_AT_type);
5782 return type_die;
5783 }
5784
5785 /* Like lookup_type_die, but if type is an anonymous type named by a
5786 typedef[1], return the DIE of the anonymous type instead the one of
5787 the naming typedef. This is because in gen_typedef_die, we did
5788 equate the anonymous struct named by the typedef with the DIE of
5789 the naming typedef. So by default, lookup_type_die on an anonymous
5790 struct yields the DIE of the naming typedef.
5791
5792 [1]: Read the comment of is_naming_typedef_decl to learn about what
5793 a naming typedef is. */
5794
5795 static inline dw_die_ref
5796 lookup_type_die_strip_naming_typedef (tree type)
5797 {
5798 dw_die_ref die = lookup_type_die (type);
5799 return strip_naming_typedef (type, die);
5800 }
5801
5802 /* Equate a DIE to a given type specifier. */
5803
5804 static inline void
5805 equate_type_number_to_die (tree type, dw_die_ref type_die)
5806 {
5807 TYPE_SYMTAB_DIE (type) = type_die;
5808 }
5809
5810 /* Returns a hash value for X (which really is a die_struct). */
5811
5812 inline hashval_t
5813 decl_die_hasher::hash (die_node *x)
5814 {
5815 return (hashval_t) x->decl_id;
5816 }
5817
5818 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5819
5820 inline bool
5821 decl_die_hasher::equal (die_node *x, tree y)
5822 {
5823 return (x->decl_id == DECL_UID (y));
5824 }
5825
5826 /* Return the DIE associated with a given declaration. */
5827
5828 static inline dw_die_ref
5829 lookup_decl_die (tree decl)
5830 {
5831 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5832 NO_INSERT);
5833 if (!die)
5834 return NULL;
5835 if ((*die)->removed)
5836 {
5837 decl_die_table->clear_slot (die);
5838 return NULL;
5839 }
5840 return *die;
5841 }
5842
5843
5844 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5845 style reference. Return true if we found one refering to a DIE for
5846 DECL, otherwise return false. */
5847
5848 static bool
5849 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5850 unsigned HOST_WIDE_INT *off)
5851 {
5852 dw_die_ref die;
5853
5854 if (in_lto_p && !decl_die_table)
5855 return false;
5856
5857 if (TREE_CODE (decl) == BLOCK)
5858 die = BLOCK_DIE (decl);
5859 else
5860 die = lookup_decl_die (decl);
5861 if (!die)
5862 return false;
5863
5864 /* During WPA stage and incremental linking we currently use DIEs
5865 to store the decl <-> label + offset map. That's quite inefficient
5866 but it works for now. */
5867 if (in_lto_p)
5868 {
5869 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5870 if (!ref)
5871 {
5872 gcc_assert (die == comp_unit_die ());
5873 return false;
5874 }
5875 *off = ref->die_offset;
5876 *sym = ref->die_id.die_symbol;
5877 return true;
5878 }
5879
5880 /* Similar to get_ref_die_offset_label, but using the "correct"
5881 label. */
5882 *off = die->die_offset;
5883 while (die->die_parent)
5884 die = die->die_parent;
5885 /* For the containing CU DIE we compute a die_symbol in
5886 compute_comp_unit_symbol. */
5887 gcc_assert (die->die_tag == DW_TAG_compile_unit
5888 && die->die_id.die_symbol != NULL);
5889 *sym = die->die_id.die_symbol;
5890 return true;
5891 }
5892
5893 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5894
5895 static void
5896 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5897 const char *symbol, HOST_WIDE_INT offset)
5898 {
5899 /* Create a fake DIE that contains the reference. Don't use
5900 new_die because we don't want to end up in the limbo list. */
5901 dw_die_ref ref = new_die_raw (die->die_tag);
5902 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5903 ref->die_offset = offset;
5904 ref->with_offset = 1;
5905 add_AT_die_ref (die, attr_kind, ref);
5906 }
5907
5908 /* Create a DIE for DECL if required and add a reference to a DIE
5909 at SYMBOL + OFFSET which contains attributes dumped early. */
5910
5911 static void
5912 dwarf2out_register_external_die (tree decl, const char *sym,
5913 unsigned HOST_WIDE_INT off)
5914 {
5915 if (debug_info_level == DINFO_LEVEL_NONE)
5916 return;
5917
5918 if ((flag_wpa
5919 || flag_incremental_link == INCREMENTAL_LINK_LTO) && !decl_die_table)
5920 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5921
5922 dw_die_ref die
5923 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5924 gcc_assert (!die);
5925
5926 tree ctx;
5927 dw_die_ref parent = NULL;
5928 /* Need to lookup a DIE for the decls context - the containing
5929 function or translation unit. */
5930 if (TREE_CODE (decl) == BLOCK)
5931 {
5932 ctx = BLOCK_SUPERCONTEXT (decl);
5933 /* ??? We do not output DIEs for all scopes thus skip as
5934 many DIEs as needed. */
5935 while (TREE_CODE (ctx) == BLOCK
5936 && !BLOCK_DIE (ctx))
5937 ctx = BLOCK_SUPERCONTEXT (ctx);
5938 }
5939 else
5940 ctx = DECL_CONTEXT (decl);
5941 /* Peel types in the context stack. */
5942 while (ctx && TYPE_P (ctx))
5943 ctx = TYPE_CONTEXT (ctx);
5944 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5945 if (debug_info_level <= DINFO_LEVEL_TERSE)
5946 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5947 ctx = DECL_CONTEXT (ctx);
5948 if (ctx)
5949 {
5950 if (TREE_CODE (ctx) == BLOCK)
5951 parent = BLOCK_DIE (ctx);
5952 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5953 /* Keep the 1:1 association during WPA. */
5954 && !flag_wpa
5955 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5956 /* Otherwise all late annotations go to the main CU which
5957 imports the original CUs. */
5958 parent = comp_unit_die ();
5959 else if (TREE_CODE (ctx) == FUNCTION_DECL
5960 && TREE_CODE (decl) != FUNCTION_DECL
5961 && TREE_CODE (decl) != PARM_DECL
5962 && TREE_CODE (decl) != RESULT_DECL
5963 && TREE_CODE (decl) != BLOCK)
5964 /* Leave function local entities parent determination to when
5965 we process scope vars. */
5966 ;
5967 else
5968 parent = lookup_decl_die (ctx);
5969 }
5970 else
5971 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5972 Handle this case gracefully by globalizing stuff. */
5973 parent = comp_unit_die ();
5974 /* Create a DIE "stub". */
5975 switch (TREE_CODE (decl))
5976 {
5977 case TRANSLATION_UNIT_DECL:
5978 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
5979 {
5980 die = comp_unit_die ();
5981 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5982 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5983 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5984 to create a DIE for the original CUs. */
5985 return;
5986 }
5987 /* Keep the 1:1 association during WPA. */
5988 die = new_die (DW_TAG_compile_unit, NULL, decl);
5989 break;
5990 case NAMESPACE_DECL:
5991 if (is_fortran (decl))
5992 die = new_die (DW_TAG_module, parent, decl);
5993 else
5994 die = new_die (DW_TAG_namespace, parent, decl);
5995 break;
5996 case FUNCTION_DECL:
5997 die = new_die (DW_TAG_subprogram, parent, decl);
5998 break;
5999 case VAR_DECL:
6000 die = new_die (DW_TAG_variable, parent, decl);
6001 break;
6002 case RESULT_DECL:
6003 die = new_die (DW_TAG_variable, parent, decl);
6004 break;
6005 case PARM_DECL:
6006 die = new_die (DW_TAG_formal_parameter, parent, decl);
6007 break;
6008 case CONST_DECL:
6009 die = new_die (DW_TAG_constant, parent, decl);
6010 break;
6011 case LABEL_DECL:
6012 die = new_die (DW_TAG_label, parent, decl);
6013 break;
6014 case BLOCK:
6015 die = new_die (DW_TAG_lexical_block, parent, decl);
6016 break;
6017 default:
6018 gcc_unreachable ();
6019 }
6020 if (TREE_CODE (decl) == BLOCK)
6021 BLOCK_DIE (decl) = die;
6022 else
6023 equate_decl_number_to_die (decl, die);
6024
6025 /* Add a reference to the DIE providing early debug at $sym + off. */
6026 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6027 }
6028
6029 /* Returns a hash value for X (which really is a var_loc_list). */
6030
6031 inline hashval_t
6032 decl_loc_hasher::hash (var_loc_list *x)
6033 {
6034 return (hashval_t) x->decl_id;
6035 }
6036
6037 /* Return nonzero if decl_id of var_loc_list X is the same as
6038 UID of decl *Y. */
6039
6040 inline bool
6041 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6042 {
6043 return (x->decl_id == DECL_UID (y));
6044 }
6045
6046 /* Return the var_loc list associated with a given declaration. */
6047
6048 static inline var_loc_list *
6049 lookup_decl_loc (const_tree decl)
6050 {
6051 if (!decl_loc_table)
6052 return NULL;
6053 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6054 }
6055
6056 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6057
6058 inline hashval_t
6059 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6060 {
6061 return (hashval_t) x->decl_id;
6062 }
6063
6064 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6065 UID of decl *Y. */
6066
6067 inline bool
6068 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6069 {
6070 return (x->decl_id == DECL_UID (y));
6071 }
6072
6073 /* Equate a DIE to a particular declaration. */
6074
6075 static void
6076 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6077 {
6078 unsigned int decl_id = DECL_UID (decl);
6079
6080 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6081 decl_die->decl_id = decl_id;
6082 }
6083
6084 /* Return how many bits covers PIECE EXPR_LIST. */
6085
6086 static HOST_WIDE_INT
6087 decl_piece_bitsize (rtx piece)
6088 {
6089 int ret = (int) GET_MODE (piece);
6090 if (ret)
6091 return ret;
6092 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6093 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6094 return INTVAL (XEXP (XEXP (piece, 0), 0));
6095 }
6096
6097 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6098
6099 static rtx *
6100 decl_piece_varloc_ptr (rtx piece)
6101 {
6102 if ((int) GET_MODE (piece))
6103 return &XEXP (piece, 0);
6104 else
6105 return &XEXP (XEXP (piece, 0), 1);
6106 }
6107
6108 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6109 Next is the chain of following piece nodes. */
6110
6111 static rtx_expr_list *
6112 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6113 {
6114 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6115 return alloc_EXPR_LIST (bitsize, loc_note, next);
6116 else
6117 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6118 GEN_INT (bitsize),
6119 loc_note), next);
6120 }
6121
6122 /* Return rtx that should be stored into loc field for
6123 LOC_NOTE and BITPOS/BITSIZE. */
6124
6125 static rtx
6126 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6127 HOST_WIDE_INT bitsize)
6128 {
6129 if (bitsize != -1)
6130 {
6131 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6132 if (bitpos != 0)
6133 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6134 }
6135 return loc_note;
6136 }
6137
6138 /* This function either modifies location piece list *DEST in
6139 place (if SRC and INNER is NULL), or copies location piece list
6140 *SRC to *DEST while modifying it. Location BITPOS is modified
6141 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6142 not copied and if needed some padding around it is added.
6143 When modifying in place, DEST should point to EXPR_LIST where
6144 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6145 to the start of the whole list and INNER points to the EXPR_LIST
6146 where earlier pieces cover PIECE_BITPOS bits. */
6147
6148 static void
6149 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6150 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6151 HOST_WIDE_INT bitsize, rtx loc_note)
6152 {
6153 HOST_WIDE_INT diff;
6154 bool copy = inner != NULL;
6155
6156 if (copy)
6157 {
6158 /* First copy all nodes preceding the current bitpos. */
6159 while (src != inner)
6160 {
6161 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6162 decl_piece_bitsize (*src), NULL_RTX);
6163 dest = &XEXP (*dest, 1);
6164 src = &XEXP (*src, 1);
6165 }
6166 }
6167 /* Add padding if needed. */
6168 if (bitpos != piece_bitpos)
6169 {
6170 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6171 copy ? NULL_RTX : *dest);
6172 dest = &XEXP (*dest, 1);
6173 }
6174 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6175 {
6176 gcc_assert (!copy);
6177 /* A piece with correct bitpos and bitsize already exist,
6178 just update the location for it and return. */
6179 *decl_piece_varloc_ptr (*dest) = loc_note;
6180 return;
6181 }
6182 /* Add the piece that changed. */
6183 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6184 dest = &XEXP (*dest, 1);
6185 /* Skip over pieces that overlap it. */
6186 diff = bitpos - piece_bitpos + bitsize;
6187 if (!copy)
6188 src = dest;
6189 while (diff > 0 && *src)
6190 {
6191 rtx piece = *src;
6192 diff -= decl_piece_bitsize (piece);
6193 if (copy)
6194 src = &XEXP (piece, 1);
6195 else
6196 {
6197 *src = XEXP (piece, 1);
6198 free_EXPR_LIST_node (piece);
6199 }
6200 }
6201 /* Add padding if needed. */
6202 if (diff < 0 && *src)
6203 {
6204 if (!copy)
6205 dest = src;
6206 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6207 dest = &XEXP (*dest, 1);
6208 }
6209 if (!copy)
6210 return;
6211 /* Finally copy all nodes following it. */
6212 while (*src)
6213 {
6214 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6215 decl_piece_bitsize (*src), NULL_RTX);
6216 dest = &XEXP (*dest, 1);
6217 src = &XEXP (*src, 1);
6218 }
6219 }
6220
6221 /* Add a variable location node to the linked list for DECL. */
6222
6223 static struct var_loc_node *
6224 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6225 {
6226 unsigned int decl_id;
6227 var_loc_list *temp;
6228 struct var_loc_node *loc = NULL;
6229 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6230
6231 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6232 {
6233 tree realdecl = DECL_DEBUG_EXPR (decl);
6234 if (handled_component_p (realdecl)
6235 || (TREE_CODE (realdecl) == MEM_REF
6236 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6237 {
6238 bool reverse;
6239 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6240 &bitsize, &reverse);
6241 if (!innerdecl
6242 || !DECL_P (innerdecl)
6243 || DECL_IGNORED_P (innerdecl)
6244 || TREE_STATIC (innerdecl)
6245 || bitsize == 0
6246 || bitpos + bitsize > 256)
6247 return NULL;
6248 decl = innerdecl;
6249 }
6250 }
6251
6252 decl_id = DECL_UID (decl);
6253 var_loc_list **slot
6254 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6255 if (*slot == NULL)
6256 {
6257 temp = ggc_cleared_alloc<var_loc_list> ();
6258 temp->decl_id = decl_id;
6259 *slot = temp;
6260 }
6261 else
6262 temp = *slot;
6263
6264 /* For PARM_DECLs try to keep around the original incoming value,
6265 even if that means we'll emit a zero-range .debug_loc entry. */
6266 if (temp->last
6267 && temp->first == temp->last
6268 && TREE_CODE (decl) == PARM_DECL
6269 && NOTE_P (temp->first->loc)
6270 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6271 && DECL_INCOMING_RTL (decl)
6272 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6273 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6274 == GET_CODE (DECL_INCOMING_RTL (decl))
6275 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6276 && (bitsize != -1
6277 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6278 NOTE_VAR_LOCATION_LOC (loc_note))
6279 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6280 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6281 {
6282 loc = ggc_cleared_alloc<var_loc_node> ();
6283 temp->first->next = loc;
6284 temp->last = loc;
6285 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6286 }
6287 else if (temp->last)
6288 {
6289 struct var_loc_node *last = temp->last, *unused = NULL;
6290 rtx *piece_loc = NULL, last_loc_note;
6291 HOST_WIDE_INT piece_bitpos = 0;
6292 if (last->next)
6293 {
6294 last = last->next;
6295 gcc_assert (last->next == NULL);
6296 }
6297 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6298 {
6299 piece_loc = &last->loc;
6300 do
6301 {
6302 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6303 if (piece_bitpos + cur_bitsize > bitpos)
6304 break;
6305 piece_bitpos += cur_bitsize;
6306 piece_loc = &XEXP (*piece_loc, 1);
6307 }
6308 while (*piece_loc);
6309 }
6310 /* TEMP->LAST here is either pointer to the last but one or
6311 last element in the chained list, LAST is pointer to the
6312 last element. */
6313 if (label && strcmp (last->label, label) == 0 && last->view == view)
6314 {
6315 /* For SRA optimized variables if there weren't any real
6316 insns since last note, just modify the last node. */
6317 if (piece_loc != NULL)
6318 {
6319 adjust_piece_list (piece_loc, NULL, NULL,
6320 bitpos, piece_bitpos, bitsize, loc_note);
6321 return NULL;
6322 }
6323 /* If the last note doesn't cover any instructions, remove it. */
6324 if (temp->last != last)
6325 {
6326 temp->last->next = NULL;
6327 unused = last;
6328 last = temp->last;
6329 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6330 }
6331 else
6332 {
6333 gcc_assert (temp->first == temp->last
6334 || (temp->first->next == temp->last
6335 && TREE_CODE (decl) == PARM_DECL));
6336 memset (temp->last, '\0', sizeof (*temp->last));
6337 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6338 return temp->last;
6339 }
6340 }
6341 if (bitsize == -1 && NOTE_P (last->loc))
6342 last_loc_note = last->loc;
6343 else if (piece_loc != NULL
6344 && *piece_loc != NULL_RTX
6345 && piece_bitpos == bitpos
6346 && decl_piece_bitsize (*piece_loc) == bitsize)
6347 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6348 else
6349 last_loc_note = NULL_RTX;
6350 /* If the current location is the same as the end of the list,
6351 and either both or neither of the locations is uninitialized,
6352 we have nothing to do. */
6353 if (last_loc_note == NULL_RTX
6354 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6355 NOTE_VAR_LOCATION_LOC (loc_note)))
6356 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6357 != NOTE_VAR_LOCATION_STATUS (loc_note))
6358 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6359 == VAR_INIT_STATUS_UNINITIALIZED)
6360 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6361 == VAR_INIT_STATUS_UNINITIALIZED))))
6362 {
6363 /* Add LOC to the end of list and update LAST. If the last
6364 element of the list has been removed above, reuse its
6365 memory for the new node, otherwise allocate a new one. */
6366 if (unused)
6367 {
6368 loc = unused;
6369 memset (loc, '\0', sizeof (*loc));
6370 }
6371 else
6372 loc = ggc_cleared_alloc<var_loc_node> ();
6373 if (bitsize == -1 || piece_loc == NULL)
6374 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6375 else
6376 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6377 bitpos, piece_bitpos, bitsize, loc_note);
6378 last->next = loc;
6379 /* Ensure TEMP->LAST will point either to the new last but one
6380 element of the chain, or to the last element in it. */
6381 if (last != temp->last)
6382 temp->last = last;
6383 }
6384 else if (unused)
6385 ggc_free (unused);
6386 }
6387 else
6388 {
6389 loc = ggc_cleared_alloc<var_loc_node> ();
6390 temp->first = loc;
6391 temp->last = loc;
6392 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6393 }
6394 return loc;
6395 }
6396 \f
6397 /* Keep track of the number of spaces used to indent the
6398 output of the debugging routines that print the structure of
6399 the DIE internal representation. */
6400 static int print_indent;
6401
6402 /* Indent the line the number of spaces given by print_indent. */
6403
6404 static inline void
6405 print_spaces (FILE *outfile)
6406 {
6407 fprintf (outfile, "%*s", print_indent, "");
6408 }
6409
6410 /* Print a type signature in hex. */
6411
6412 static inline void
6413 print_signature (FILE *outfile, char *sig)
6414 {
6415 int i;
6416
6417 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6418 fprintf (outfile, "%02x", sig[i] & 0xff);
6419 }
6420
6421 static inline void
6422 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6423 {
6424 if (discr_value->pos)
6425 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6426 else
6427 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6428 }
6429
6430 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6431
6432 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6433 RECURSE, output location descriptor operations. */
6434
6435 static void
6436 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6437 {
6438 switch (val->val_class)
6439 {
6440 case dw_val_class_addr:
6441 fprintf (outfile, "address");
6442 break;
6443 case dw_val_class_offset:
6444 fprintf (outfile, "offset");
6445 break;
6446 case dw_val_class_loc:
6447 fprintf (outfile, "location descriptor");
6448 if (val->v.val_loc == NULL)
6449 fprintf (outfile, " -> <null>\n");
6450 else if (recurse)
6451 {
6452 fprintf (outfile, ":\n");
6453 print_indent += 4;
6454 print_loc_descr (val->v.val_loc, outfile);
6455 print_indent -= 4;
6456 }
6457 else
6458 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6459 break;
6460 case dw_val_class_loc_list:
6461 fprintf (outfile, "location list -> label:%s",
6462 val->v.val_loc_list->ll_symbol);
6463 break;
6464 case dw_val_class_view_list:
6465 val = view_list_to_loc_list_val_node (val);
6466 fprintf (outfile, "location list with views -> labels:%s and %s",
6467 val->v.val_loc_list->ll_symbol,
6468 val->v.val_loc_list->vl_symbol);
6469 break;
6470 case dw_val_class_range_list:
6471 fprintf (outfile, "range list");
6472 break;
6473 case dw_val_class_const:
6474 case dw_val_class_const_implicit:
6475 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6476 break;
6477 case dw_val_class_unsigned_const:
6478 case dw_val_class_unsigned_const_implicit:
6479 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6480 break;
6481 case dw_val_class_const_double:
6482 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6483 HOST_WIDE_INT_PRINT_UNSIGNED")",
6484 val->v.val_double.high,
6485 val->v.val_double.low);
6486 break;
6487 case dw_val_class_wide_int:
6488 {
6489 int i = val->v.val_wide->get_len ();
6490 fprintf (outfile, "constant (");
6491 gcc_assert (i > 0);
6492 if (val->v.val_wide->elt (i - 1) == 0)
6493 fprintf (outfile, "0x");
6494 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6495 val->v.val_wide->elt (--i));
6496 while (--i >= 0)
6497 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6498 val->v.val_wide->elt (i));
6499 fprintf (outfile, ")");
6500 break;
6501 }
6502 case dw_val_class_vec:
6503 fprintf (outfile, "floating-point or vector constant");
6504 break;
6505 case dw_val_class_flag:
6506 fprintf (outfile, "%u", val->v.val_flag);
6507 break;
6508 case dw_val_class_die_ref:
6509 if (val->v.val_die_ref.die != NULL)
6510 {
6511 dw_die_ref die = val->v.val_die_ref.die;
6512
6513 if (die->comdat_type_p)
6514 {
6515 fprintf (outfile, "die -> signature: ");
6516 print_signature (outfile,
6517 die->die_id.die_type_node->signature);
6518 }
6519 else if (die->die_id.die_symbol)
6520 {
6521 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6522 if (die->with_offset)
6523 fprintf (outfile, " + %ld", die->die_offset);
6524 }
6525 else
6526 fprintf (outfile, "die -> %ld", die->die_offset);
6527 fprintf (outfile, " (%p)", (void *) die);
6528 }
6529 else
6530 fprintf (outfile, "die -> <null>");
6531 break;
6532 case dw_val_class_vms_delta:
6533 fprintf (outfile, "delta: @slotcount(%s-%s)",
6534 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6535 break;
6536 case dw_val_class_symview:
6537 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6538 break;
6539 case dw_val_class_lbl_id:
6540 case dw_val_class_lineptr:
6541 case dw_val_class_macptr:
6542 case dw_val_class_loclistsptr:
6543 case dw_val_class_high_pc:
6544 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6545 break;
6546 case dw_val_class_str:
6547 if (val->v.val_str->str != NULL)
6548 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6549 else
6550 fprintf (outfile, "<null>");
6551 break;
6552 case dw_val_class_file:
6553 case dw_val_class_file_implicit:
6554 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6555 val->v.val_file->emitted_number);
6556 break;
6557 case dw_val_class_data8:
6558 {
6559 int i;
6560
6561 for (i = 0; i < 8; i++)
6562 fprintf (outfile, "%02x", val->v.val_data8[i]);
6563 break;
6564 }
6565 case dw_val_class_discr_value:
6566 print_discr_value (outfile, &val->v.val_discr_value);
6567 break;
6568 case dw_val_class_discr_list:
6569 for (dw_discr_list_ref node = val->v.val_discr_list;
6570 node != NULL;
6571 node = node->dw_discr_next)
6572 {
6573 if (node->dw_discr_range)
6574 {
6575 fprintf (outfile, " .. ");
6576 print_discr_value (outfile, &node->dw_discr_lower_bound);
6577 print_discr_value (outfile, &node->dw_discr_upper_bound);
6578 }
6579 else
6580 print_discr_value (outfile, &node->dw_discr_lower_bound);
6581
6582 if (node->dw_discr_next != NULL)
6583 fprintf (outfile, " | ");
6584 }
6585 default:
6586 break;
6587 }
6588 }
6589
6590 /* Likewise, for a DIE attribute. */
6591
6592 static void
6593 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6594 {
6595 print_dw_val (&a->dw_attr_val, recurse, outfile);
6596 }
6597
6598
6599 /* Print the list of operands in the LOC location description to OUTFILE. This
6600 routine is a debugging aid only. */
6601
6602 static void
6603 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6604 {
6605 dw_loc_descr_ref l = loc;
6606
6607 if (loc == NULL)
6608 {
6609 print_spaces (outfile);
6610 fprintf (outfile, "<null>\n");
6611 return;
6612 }
6613
6614 for (l = loc; l != NULL; l = l->dw_loc_next)
6615 {
6616 print_spaces (outfile);
6617 fprintf (outfile, "(%p) %s",
6618 (void *) l,
6619 dwarf_stack_op_name (l->dw_loc_opc));
6620 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6621 {
6622 fprintf (outfile, " ");
6623 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6624 }
6625 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6626 {
6627 fprintf (outfile, ", ");
6628 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6629 }
6630 fprintf (outfile, "\n");
6631 }
6632 }
6633
6634 /* Print the information associated with a given DIE, and its children.
6635 This routine is a debugging aid only. */
6636
6637 static void
6638 print_die (dw_die_ref die, FILE *outfile)
6639 {
6640 dw_attr_node *a;
6641 dw_die_ref c;
6642 unsigned ix;
6643
6644 print_spaces (outfile);
6645 fprintf (outfile, "DIE %4ld: %s (%p)\n",
6646 die->die_offset, dwarf_tag_name (die->die_tag),
6647 (void*) die);
6648 print_spaces (outfile);
6649 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6650 fprintf (outfile, " offset: %ld", die->die_offset);
6651 fprintf (outfile, " mark: %d\n", die->die_mark);
6652
6653 if (die->comdat_type_p)
6654 {
6655 print_spaces (outfile);
6656 fprintf (outfile, " signature: ");
6657 print_signature (outfile, die->die_id.die_type_node->signature);
6658 fprintf (outfile, "\n");
6659 }
6660
6661 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6662 {
6663 print_spaces (outfile);
6664 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6665
6666 print_attribute (a, true, outfile);
6667 fprintf (outfile, "\n");
6668 }
6669
6670 if (die->die_child != NULL)
6671 {
6672 print_indent += 4;
6673 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6674 print_indent -= 4;
6675 }
6676 if (print_indent == 0)
6677 fprintf (outfile, "\n");
6678 }
6679
6680 /* Print the list of operations in the LOC location description. */
6681
6682 DEBUG_FUNCTION void
6683 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6684 {
6685 print_loc_descr (loc, stderr);
6686 }
6687
6688 /* Print the information collected for a given DIE. */
6689
6690 DEBUG_FUNCTION void
6691 debug_dwarf_die (dw_die_ref die)
6692 {
6693 print_die (die, stderr);
6694 }
6695
6696 DEBUG_FUNCTION void
6697 debug (die_struct &ref)
6698 {
6699 print_die (&ref, stderr);
6700 }
6701
6702 DEBUG_FUNCTION void
6703 debug (die_struct *ptr)
6704 {
6705 if (ptr)
6706 debug (*ptr);
6707 else
6708 fprintf (stderr, "<nil>\n");
6709 }
6710
6711
6712 /* Print all DWARF information collected for the compilation unit.
6713 This routine is a debugging aid only. */
6714
6715 DEBUG_FUNCTION void
6716 debug_dwarf (void)
6717 {
6718 print_indent = 0;
6719 print_die (comp_unit_die (), stderr);
6720 }
6721
6722 /* Verify the DIE tree structure. */
6723
6724 DEBUG_FUNCTION void
6725 verify_die (dw_die_ref die)
6726 {
6727 gcc_assert (!die->die_mark);
6728 if (die->die_parent == NULL
6729 && die->die_sib == NULL)
6730 return;
6731 /* Verify the die_sib list is cyclic. */
6732 dw_die_ref x = die;
6733 do
6734 {
6735 x->die_mark = 1;
6736 x = x->die_sib;
6737 }
6738 while (x && !x->die_mark);
6739 gcc_assert (x == die);
6740 x = die;
6741 do
6742 {
6743 /* Verify all dies have the same parent. */
6744 gcc_assert (x->die_parent == die->die_parent);
6745 if (x->die_child)
6746 {
6747 /* Verify the child has the proper parent and recurse. */
6748 gcc_assert (x->die_child->die_parent == x);
6749 verify_die (x->die_child);
6750 }
6751 x->die_mark = 0;
6752 x = x->die_sib;
6753 }
6754 while (x && x->die_mark);
6755 }
6756
6757 /* Sanity checks on DIEs. */
6758
6759 static void
6760 check_die (dw_die_ref die)
6761 {
6762 unsigned ix;
6763 dw_attr_node *a;
6764 bool inline_found = false;
6765 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6766 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6767 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6768 {
6769 switch (a->dw_attr)
6770 {
6771 case DW_AT_inline:
6772 if (a->dw_attr_val.v.val_unsigned)
6773 inline_found = true;
6774 break;
6775 case DW_AT_location:
6776 ++n_location;
6777 break;
6778 case DW_AT_low_pc:
6779 ++n_low_pc;
6780 break;
6781 case DW_AT_high_pc:
6782 ++n_high_pc;
6783 break;
6784 case DW_AT_artificial:
6785 ++n_artificial;
6786 break;
6787 case DW_AT_decl_column:
6788 ++n_decl_column;
6789 break;
6790 case DW_AT_decl_line:
6791 ++n_decl_line;
6792 break;
6793 case DW_AT_decl_file:
6794 ++n_decl_file;
6795 break;
6796 default:
6797 break;
6798 }
6799 }
6800 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6801 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6802 {
6803 fprintf (stderr, "Duplicate attributes in DIE:\n");
6804 debug_dwarf_die (die);
6805 gcc_unreachable ();
6806 }
6807 if (inline_found)
6808 {
6809 /* A debugging information entry that is a member of an abstract
6810 instance tree [that has DW_AT_inline] should not contain any
6811 attributes which describe aspects of the subroutine which vary
6812 between distinct inlined expansions or distinct out-of-line
6813 expansions. */
6814 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6815 gcc_assert (a->dw_attr != DW_AT_low_pc
6816 && a->dw_attr != DW_AT_high_pc
6817 && a->dw_attr != DW_AT_location
6818 && a->dw_attr != DW_AT_frame_base
6819 && a->dw_attr != DW_AT_call_all_calls
6820 && a->dw_attr != DW_AT_GNU_all_call_sites);
6821 }
6822 }
6823 \f
6824 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6825 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6826 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6827
6828 /* Calculate the checksum of a location expression. */
6829
6830 static inline void
6831 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6832 {
6833 int tem;
6834 inchash::hash hstate;
6835 hashval_t hash;
6836
6837 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6838 CHECKSUM (tem);
6839 hash_loc_operands (loc, hstate);
6840 hash = hstate.end();
6841 CHECKSUM (hash);
6842 }
6843
6844 /* Calculate the checksum of an attribute. */
6845
6846 static void
6847 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6848 {
6849 dw_loc_descr_ref loc;
6850 rtx r;
6851
6852 CHECKSUM (at->dw_attr);
6853
6854 /* We don't care that this was compiled with a different compiler
6855 snapshot; if the output is the same, that's what matters. */
6856 if (at->dw_attr == DW_AT_producer)
6857 return;
6858
6859 switch (AT_class (at))
6860 {
6861 case dw_val_class_const:
6862 case dw_val_class_const_implicit:
6863 CHECKSUM (at->dw_attr_val.v.val_int);
6864 break;
6865 case dw_val_class_unsigned_const:
6866 case dw_val_class_unsigned_const_implicit:
6867 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6868 break;
6869 case dw_val_class_const_double:
6870 CHECKSUM (at->dw_attr_val.v.val_double);
6871 break;
6872 case dw_val_class_wide_int:
6873 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6874 get_full_len (*at->dw_attr_val.v.val_wide)
6875 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6876 break;
6877 case dw_val_class_vec:
6878 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6879 (at->dw_attr_val.v.val_vec.length
6880 * at->dw_attr_val.v.val_vec.elt_size));
6881 break;
6882 case dw_val_class_flag:
6883 CHECKSUM (at->dw_attr_val.v.val_flag);
6884 break;
6885 case dw_val_class_str:
6886 CHECKSUM_STRING (AT_string (at));
6887 break;
6888
6889 case dw_val_class_addr:
6890 r = AT_addr (at);
6891 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6892 CHECKSUM_STRING (XSTR (r, 0));
6893 break;
6894
6895 case dw_val_class_offset:
6896 CHECKSUM (at->dw_attr_val.v.val_offset);
6897 break;
6898
6899 case dw_val_class_loc:
6900 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6901 loc_checksum (loc, ctx);
6902 break;
6903
6904 case dw_val_class_die_ref:
6905 die_checksum (AT_ref (at), ctx, mark);
6906 break;
6907
6908 case dw_val_class_fde_ref:
6909 case dw_val_class_vms_delta:
6910 case dw_val_class_symview:
6911 case dw_val_class_lbl_id:
6912 case dw_val_class_lineptr:
6913 case dw_val_class_macptr:
6914 case dw_val_class_loclistsptr:
6915 case dw_val_class_high_pc:
6916 break;
6917
6918 case dw_val_class_file:
6919 case dw_val_class_file_implicit:
6920 CHECKSUM_STRING (AT_file (at)->filename);
6921 break;
6922
6923 case dw_val_class_data8:
6924 CHECKSUM (at->dw_attr_val.v.val_data8);
6925 break;
6926
6927 default:
6928 break;
6929 }
6930 }
6931
6932 /* Calculate the checksum of a DIE. */
6933
6934 static void
6935 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6936 {
6937 dw_die_ref c;
6938 dw_attr_node *a;
6939 unsigned ix;
6940
6941 /* To avoid infinite recursion. */
6942 if (die->die_mark)
6943 {
6944 CHECKSUM (die->die_mark);
6945 return;
6946 }
6947 die->die_mark = ++(*mark);
6948
6949 CHECKSUM (die->die_tag);
6950
6951 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6952 attr_checksum (a, ctx, mark);
6953
6954 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6955 }
6956
6957 #undef CHECKSUM
6958 #undef CHECKSUM_BLOCK
6959 #undef CHECKSUM_STRING
6960
6961 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6962 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6963 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6964 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6965 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6966 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6967 #define CHECKSUM_ATTR(FOO) \
6968 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6969
6970 /* Calculate the checksum of a number in signed LEB128 format. */
6971
6972 static void
6973 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6974 {
6975 unsigned char byte;
6976 bool more;
6977
6978 while (1)
6979 {
6980 byte = (value & 0x7f);
6981 value >>= 7;
6982 more = !((value == 0 && (byte & 0x40) == 0)
6983 || (value == -1 && (byte & 0x40) != 0));
6984 if (more)
6985 byte |= 0x80;
6986 CHECKSUM (byte);
6987 if (!more)
6988 break;
6989 }
6990 }
6991
6992 /* Calculate the checksum of a number in unsigned LEB128 format. */
6993
6994 static void
6995 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6996 {
6997 while (1)
6998 {
6999 unsigned char byte = (value & 0x7f);
7000 value >>= 7;
7001 if (value != 0)
7002 /* More bytes to follow. */
7003 byte |= 0x80;
7004 CHECKSUM (byte);
7005 if (value == 0)
7006 break;
7007 }
7008 }
7009
7010 /* Checksum the context of the DIE. This adds the names of any
7011 surrounding namespaces or structures to the checksum. */
7012
7013 static void
7014 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7015 {
7016 const char *name;
7017 dw_die_ref spec;
7018 int tag = die->die_tag;
7019
7020 if (tag != DW_TAG_namespace
7021 && tag != DW_TAG_structure_type
7022 && tag != DW_TAG_class_type)
7023 return;
7024
7025 name = get_AT_string (die, DW_AT_name);
7026
7027 spec = get_AT_ref (die, DW_AT_specification);
7028 if (spec != NULL)
7029 die = spec;
7030
7031 if (die->die_parent != NULL)
7032 checksum_die_context (die->die_parent, ctx);
7033
7034 CHECKSUM_ULEB128 ('C');
7035 CHECKSUM_ULEB128 (tag);
7036 if (name != NULL)
7037 CHECKSUM_STRING (name);
7038 }
7039
7040 /* Calculate the checksum of a location expression. */
7041
7042 static inline void
7043 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7044 {
7045 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7046 were emitted as a DW_FORM_sdata instead of a location expression. */
7047 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7048 {
7049 CHECKSUM_ULEB128 (DW_FORM_sdata);
7050 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7051 return;
7052 }
7053
7054 /* Otherwise, just checksum the raw location expression. */
7055 while (loc != NULL)
7056 {
7057 inchash::hash hstate;
7058 hashval_t hash;
7059
7060 CHECKSUM_ULEB128 (loc->dtprel);
7061 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7062 hash_loc_operands (loc, hstate);
7063 hash = hstate.end ();
7064 CHECKSUM (hash);
7065 loc = loc->dw_loc_next;
7066 }
7067 }
7068
7069 /* Calculate the checksum of an attribute. */
7070
7071 static void
7072 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7073 struct md5_ctx *ctx, int *mark)
7074 {
7075 dw_loc_descr_ref loc;
7076 rtx r;
7077
7078 if (AT_class (at) == dw_val_class_die_ref)
7079 {
7080 dw_die_ref target_die = AT_ref (at);
7081
7082 /* For pointer and reference types, we checksum only the (qualified)
7083 name of the target type (if there is a name). For friend entries,
7084 we checksum only the (qualified) name of the target type or function.
7085 This allows the checksum to remain the same whether the target type
7086 is complete or not. */
7087 if ((at->dw_attr == DW_AT_type
7088 && (tag == DW_TAG_pointer_type
7089 || tag == DW_TAG_reference_type
7090 || tag == DW_TAG_rvalue_reference_type
7091 || tag == DW_TAG_ptr_to_member_type))
7092 || (at->dw_attr == DW_AT_friend
7093 && tag == DW_TAG_friend))
7094 {
7095 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7096
7097 if (name_attr != NULL)
7098 {
7099 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7100
7101 if (decl == NULL)
7102 decl = target_die;
7103 CHECKSUM_ULEB128 ('N');
7104 CHECKSUM_ULEB128 (at->dw_attr);
7105 if (decl->die_parent != NULL)
7106 checksum_die_context (decl->die_parent, ctx);
7107 CHECKSUM_ULEB128 ('E');
7108 CHECKSUM_STRING (AT_string (name_attr));
7109 return;
7110 }
7111 }
7112
7113 /* For all other references to another DIE, we check to see if the
7114 target DIE has already been visited. If it has, we emit a
7115 backward reference; if not, we descend recursively. */
7116 if (target_die->die_mark > 0)
7117 {
7118 CHECKSUM_ULEB128 ('R');
7119 CHECKSUM_ULEB128 (at->dw_attr);
7120 CHECKSUM_ULEB128 (target_die->die_mark);
7121 }
7122 else
7123 {
7124 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7125
7126 if (decl == NULL)
7127 decl = target_die;
7128 target_die->die_mark = ++(*mark);
7129 CHECKSUM_ULEB128 ('T');
7130 CHECKSUM_ULEB128 (at->dw_attr);
7131 if (decl->die_parent != NULL)
7132 checksum_die_context (decl->die_parent, ctx);
7133 die_checksum_ordered (target_die, ctx, mark);
7134 }
7135 return;
7136 }
7137
7138 CHECKSUM_ULEB128 ('A');
7139 CHECKSUM_ULEB128 (at->dw_attr);
7140
7141 switch (AT_class (at))
7142 {
7143 case dw_val_class_const:
7144 case dw_val_class_const_implicit:
7145 CHECKSUM_ULEB128 (DW_FORM_sdata);
7146 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7147 break;
7148
7149 case dw_val_class_unsigned_const:
7150 case dw_val_class_unsigned_const_implicit:
7151 CHECKSUM_ULEB128 (DW_FORM_sdata);
7152 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7153 break;
7154
7155 case dw_val_class_const_double:
7156 CHECKSUM_ULEB128 (DW_FORM_block);
7157 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7158 CHECKSUM (at->dw_attr_val.v.val_double);
7159 break;
7160
7161 case dw_val_class_wide_int:
7162 CHECKSUM_ULEB128 (DW_FORM_block);
7163 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7164 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7165 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7166 get_full_len (*at->dw_attr_val.v.val_wide)
7167 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7168 break;
7169
7170 case dw_val_class_vec:
7171 CHECKSUM_ULEB128 (DW_FORM_block);
7172 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7173 * at->dw_attr_val.v.val_vec.elt_size);
7174 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7175 (at->dw_attr_val.v.val_vec.length
7176 * at->dw_attr_val.v.val_vec.elt_size));
7177 break;
7178
7179 case dw_val_class_flag:
7180 CHECKSUM_ULEB128 (DW_FORM_flag);
7181 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7182 break;
7183
7184 case dw_val_class_str:
7185 CHECKSUM_ULEB128 (DW_FORM_string);
7186 CHECKSUM_STRING (AT_string (at));
7187 break;
7188
7189 case dw_val_class_addr:
7190 r = AT_addr (at);
7191 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7192 CHECKSUM_ULEB128 (DW_FORM_string);
7193 CHECKSUM_STRING (XSTR (r, 0));
7194 break;
7195
7196 case dw_val_class_offset:
7197 CHECKSUM_ULEB128 (DW_FORM_sdata);
7198 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7199 break;
7200
7201 case dw_val_class_loc:
7202 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7203 loc_checksum_ordered (loc, ctx);
7204 break;
7205
7206 case dw_val_class_fde_ref:
7207 case dw_val_class_symview:
7208 case dw_val_class_lbl_id:
7209 case dw_val_class_lineptr:
7210 case dw_val_class_macptr:
7211 case dw_val_class_loclistsptr:
7212 case dw_val_class_high_pc:
7213 break;
7214
7215 case dw_val_class_file:
7216 case dw_val_class_file_implicit:
7217 CHECKSUM_ULEB128 (DW_FORM_string);
7218 CHECKSUM_STRING (AT_file (at)->filename);
7219 break;
7220
7221 case dw_val_class_data8:
7222 CHECKSUM (at->dw_attr_val.v.val_data8);
7223 break;
7224
7225 default:
7226 break;
7227 }
7228 }
7229
7230 struct checksum_attributes
7231 {
7232 dw_attr_node *at_name;
7233 dw_attr_node *at_type;
7234 dw_attr_node *at_friend;
7235 dw_attr_node *at_accessibility;
7236 dw_attr_node *at_address_class;
7237 dw_attr_node *at_alignment;
7238 dw_attr_node *at_allocated;
7239 dw_attr_node *at_artificial;
7240 dw_attr_node *at_associated;
7241 dw_attr_node *at_binary_scale;
7242 dw_attr_node *at_bit_offset;
7243 dw_attr_node *at_bit_size;
7244 dw_attr_node *at_bit_stride;
7245 dw_attr_node *at_byte_size;
7246 dw_attr_node *at_byte_stride;
7247 dw_attr_node *at_const_value;
7248 dw_attr_node *at_containing_type;
7249 dw_attr_node *at_count;
7250 dw_attr_node *at_data_location;
7251 dw_attr_node *at_data_member_location;
7252 dw_attr_node *at_decimal_scale;
7253 dw_attr_node *at_decimal_sign;
7254 dw_attr_node *at_default_value;
7255 dw_attr_node *at_digit_count;
7256 dw_attr_node *at_discr;
7257 dw_attr_node *at_discr_list;
7258 dw_attr_node *at_discr_value;
7259 dw_attr_node *at_encoding;
7260 dw_attr_node *at_endianity;
7261 dw_attr_node *at_explicit;
7262 dw_attr_node *at_is_optional;
7263 dw_attr_node *at_location;
7264 dw_attr_node *at_lower_bound;
7265 dw_attr_node *at_mutable;
7266 dw_attr_node *at_ordering;
7267 dw_attr_node *at_picture_string;
7268 dw_attr_node *at_prototyped;
7269 dw_attr_node *at_small;
7270 dw_attr_node *at_segment;
7271 dw_attr_node *at_string_length;
7272 dw_attr_node *at_string_length_bit_size;
7273 dw_attr_node *at_string_length_byte_size;
7274 dw_attr_node *at_threads_scaled;
7275 dw_attr_node *at_upper_bound;
7276 dw_attr_node *at_use_location;
7277 dw_attr_node *at_use_UTF8;
7278 dw_attr_node *at_variable_parameter;
7279 dw_attr_node *at_virtuality;
7280 dw_attr_node *at_visibility;
7281 dw_attr_node *at_vtable_elem_location;
7282 };
7283
7284 /* Collect the attributes that we will want to use for the checksum. */
7285
7286 static void
7287 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7288 {
7289 dw_attr_node *a;
7290 unsigned ix;
7291
7292 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7293 {
7294 switch (a->dw_attr)
7295 {
7296 case DW_AT_name:
7297 attrs->at_name = a;
7298 break;
7299 case DW_AT_type:
7300 attrs->at_type = a;
7301 break;
7302 case DW_AT_friend:
7303 attrs->at_friend = a;
7304 break;
7305 case DW_AT_accessibility:
7306 attrs->at_accessibility = a;
7307 break;
7308 case DW_AT_address_class:
7309 attrs->at_address_class = a;
7310 break;
7311 case DW_AT_alignment:
7312 attrs->at_alignment = a;
7313 break;
7314 case DW_AT_allocated:
7315 attrs->at_allocated = a;
7316 break;
7317 case DW_AT_artificial:
7318 attrs->at_artificial = a;
7319 break;
7320 case DW_AT_associated:
7321 attrs->at_associated = a;
7322 break;
7323 case DW_AT_binary_scale:
7324 attrs->at_binary_scale = a;
7325 break;
7326 case DW_AT_bit_offset:
7327 attrs->at_bit_offset = a;
7328 break;
7329 case DW_AT_bit_size:
7330 attrs->at_bit_size = a;
7331 break;
7332 case DW_AT_bit_stride:
7333 attrs->at_bit_stride = a;
7334 break;
7335 case DW_AT_byte_size:
7336 attrs->at_byte_size = a;
7337 break;
7338 case DW_AT_byte_stride:
7339 attrs->at_byte_stride = a;
7340 break;
7341 case DW_AT_const_value:
7342 attrs->at_const_value = a;
7343 break;
7344 case DW_AT_containing_type:
7345 attrs->at_containing_type = a;
7346 break;
7347 case DW_AT_count:
7348 attrs->at_count = a;
7349 break;
7350 case DW_AT_data_location:
7351 attrs->at_data_location = a;
7352 break;
7353 case DW_AT_data_member_location:
7354 attrs->at_data_member_location = a;
7355 break;
7356 case DW_AT_decimal_scale:
7357 attrs->at_decimal_scale = a;
7358 break;
7359 case DW_AT_decimal_sign:
7360 attrs->at_decimal_sign = a;
7361 break;
7362 case DW_AT_default_value:
7363 attrs->at_default_value = a;
7364 break;
7365 case DW_AT_digit_count:
7366 attrs->at_digit_count = a;
7367 break;
7368 case DW_AT_discr:
7369 attrs->at_discr = a;
7370 break;
7371 case DW_AT_discr_list:
7372 attrs->at_discr_list = a;
7373 break;
7374 case DW_AT_discr_value:
7375 attrs->at_discr_value = a;
7376 break;
7377 case DW_AT_encoding:
7378 attrs->at_encoding = a;
7379 break;
7380 case DW_AT_endianity:
7381 attrs->at_endianity = a;
7382 break;
7383 case DW_AT_explicit:
7384 attrs->at_explicit = a;
7385 break;
7386 case DW_AT_is_optional:
7387 attrs->at_is_optional = a;
7388 break;
7389 case DW_AT_location:
7390 attrs->at_location = a;
7391 break;
7392 case DW_AT_lower_bound:
7393 attrs->at_lower_bound = a;
7394 break;
7395 case DW_AT_mutable:
7396 attrs->at_mutable = a;
7397 break;
7398 case DW_AT_ordering:
7399 attrs->at_ordering = a;
7400 break;
7401 case DW_AT_picture_string:
7402 attrs->at_picture_string = a;
7403 break;
7404 case DW_AT_prototyped:
7405 attrs->at_prototyped = a;
7406 break;
7407 case DW_AT_small:
7408 attrs->at_small = a;
7409 break;
7410 case DW_AT_segment:
7411 attrs->at_segment = a;
7412 break;
7413 case DW_AT_string_length:
7414 attrs->at_string_length = a;
7415 break;
7416 case DW_AT_string_length_bit_size:
7417 attrs->at_string_length_bit_size = a;
7418 break;
7419 case DW_AT_string_length_byte_size:
7420 attrs->at_string_length_byte_size = a;
7421 break;
7422 case DW_AT_threads_scaled:
7423 attrs->at_threads_scaled = a;
7424 break;
7425 case DW_AT_upper_bound:
7426 attrs->at_upper_bound = a;
7427 break;
7428 case DW_AT_use_location:
7429 attrs->at_use_location = a;
7430 break;
7431 case DW_AT_use_UTF8:
7432 attrs->at_use_UTF8 = a;
7433 break;
7434 case DW_AT_variable_parameter:
7435 attrs->at_variable_parameter = a;
7436 break;
7437 case DW_AT_virtuality:
7438 attrs->at_virtuality = a;
7439 break;
7440 case DW_AT_visibility:
7441 attrs->at_visibility = a;
7442 break;
7443 case DW_AT_vtable_elem_location:
7444 attrs->at_vtable_elem_location = a;
7445 break;
7446 default:
7447 break;
7448 }
7449 }
7450 }
7451
7452 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7453
7454 static void
7455 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7456 {
7457 dw_die_ref c;
7458 dw_die_ref decl;
7459 struct checksum_attributes attrs;
7460
7461 CHECKSUM_ULEB128 ('D');
7462 CHECKSUM_ULEB128 (die->die_tag);
7463
7464 memset (&attrs, 0, sizeof (attrs));
7465
7466 decl = get_AT_ref (die, DW_AT_specification);
7467 if (decl != NULL)
7468 collect_checksum_attributes (&attrs, decl);
7469 collect_checksum_attributes (&attrs, die);
7470
7471 CHECKSUM_ATTR (attrs.at_name);
7472 CHECKSUM_ATTR (attrs.at_accessibility);
7473 CHECKSUM_ATTR (attrs.at_address_class);
7474 CHECKSUM_ATTR (attrs.at_allocated);
7475 CHECKSUM_ATTR (attrs.at_artificial);
7476 CHECKSUM_ATTR (attrs.at_associated);
7477 CHECKSUM_ATTR (attrs.at_binary_scale);
7478 CHECKSUM_ATTR (attrs.at_bit_offset);
7479 CHECKSUM_ATTR (attrs.at_bit_size);
7480 CHECKSUM_ATTR (attrs.at_bit_stride);
7481 CHECKSUM_ATTR (attrs.at_byte_size);
7482 CHECKSUM_ATTR (attrs.at_byte_stride);
7483 CHECKSUM_ATTR (attrs.at_const_value);
7484 CHECKSUM_ATTR (attrs.at_containing_type);
7485 CHECKSUM_ATTR (attrs.at_count);
7486 CHECKSUM_ATTR (attrs.at_data_location);
7487 CHECKSUM_ATTR (attrs.at_data_member_location);
7488 CHECKSUM_ATTR (attrs.at_decimal_scale);
7489 CHECKSUM_ATTR (attrs.at_decimal_sign);
7490 CHECKSUM_ATTR (attrs.at_default_value);
7491 CHECKSUM_ATTR (attrs.at_digit_count);
7492 CHECKSUM_ATTR (attrs.at_discr);
7493 CHECKSUM_ATTR (attrs.at_discr_list);
7494 CHECKSUM_ATTR (attrs.at_discr_value);
7495 CHECKSUM_ATTR (attrs.at_encoding);
7496 CHECKSUM_ATTR (attrs.at_endianity);
7497 CHECKSUM_ATTR (attrs.at_explicit);
7498 CHECKSUM_ATTR (attrs.at_is_optional);
7499 CHECKSUM_ATTR (attrs.at_location);
7500 CHECKSUM_ATTR (attrs.at_lower_bound);
7501 CHECKSUM_ATTR (attrs.at_mutable);
7502 CHECKSUM_ATTR (attrs.at_ordering);
7503 CHECKSUM_ATTR (attrs.at_picture_string);
7504 CHECKSUM_ATTR (attrs.at_prototyped);
7505 CHECKSUM_ATTR (attrs.at_small);
7506 CHECKSUM_ATTR (attrs.at_segment);
7507 CHECKSUM_ATTR (attrs.at_string_length);
7508 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7509 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7510 CHECKSUM_ATTR (attrs.at_threads_scaled);
7511 CHECKSUM_ATTR (attrs.at_upper_bound);
7512 CHECKSUM_ATTR (attrs.at_use_location);
7513 CHECKSUM_ATTR (attrs.at_use_UTF8);
7514 CHECKSUM_ATTR (attrs.at_variable_parameter);
7515 CHECKSUM_ATTR (attrs.at_virtuality);
7516 CHECKSUM_ATTR (attrs.at_visibility);
7517 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7518 CHECKSUM_ATTR (attrs.at_type);
7519 CHECKSUM_ATTR (attrs.at_friend);
7520 CHECKSUM_ATTR (attrs.at_alignment);
7521
7522 /* Checksum the child DIEs. */
7523 c = die->die_child;
7524 if (c) do {
7525 dw_attr_node *name_attr;
7526
7527 c = c->die_sib;
7528 name_attr = get_AT (c, DW_AT_name);
7529 if (is_template_instantiation (c))
7530 {
7531 /* Ignore instantiations of member type and function templates. */
7532 }
7533 else if (name_attr != NULL
7534 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7535 {
7536 /* Use a shallow checksum for named nested types and member
7537 functions. */
7538 CHECKSUM_ULEB128 ('S');
7539 CHECKSUM_ULEB128 (c->die_tag);
7540 CHECKSUM_STRING (AT_string (name_attr));
7541 }
7542 else
7543 {
7544 /* Use a deep checksum for other children. */
7545 /* Mark this DIE so it gets processed when unmarking. */
7546 if (c->die_mark == 0)
7547 c->die_mark = -1;
7548 die_checksum_ordered (c, ctx, mark);
7549 }
7550 } while (c != die->die_child);
7551
7552 CHECKSUM_ULEB128 (0);
7553 }
7554
7555 /* Add a type name and tag to a hash. */
7556 static void
7557 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7558 {
7559 CHECKSUM_ULEB128 (tag);
7560 CHECKSUM_STRING (name);
7561 }
7562
7563 #undef CHECKSUM
7564 #undef CHECKSUM_STRING
7565 #undef CHECKSUM_ATTR
7566 #undef CHECKSUM_LEB128
7567 #undef CHECKSUM_ULEB128
7568
7569 /* Generate the type signature for DIE. This is computed by generating an
7570 MD5 checksum over the DIE's tag, its relevant attributes, and its
7571 children. Attributes that are references to other DIEs are processed
7572 by recursion, using the MARK field to prevent infinite recursion.
7573 If the DIE is nested inside a namespace or another type, we also
7574 need to include that context in the signature. The lower 64 bits
7575 of the resulting MD5 checksum comprise the signature. */
7576
7577 static void
7578 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7579 {
7580 int mark;
7581 const char *name;
7582 unsigned char checksum[16];
7583 struct md5_ctx ctx;
7584 dw_die_ref decl;
7585 dw_die_ref parent;
7586
7587 name = get_AT_string (die, DW_AT_name);
7588 decl = get_AT_ref (die, DW_AT_specification);
7589 parent = get_die_parent (die);
7590
7591 /* First, compute a signature for just the type name (and its surrounding
7592 context, if any. This is stored in the type unit DIE for link-time
7593 ODR (one-definition rule) checking. */
7594
7595 if (is_cxx () && name != NULL)
7596 {
7597 md5_init_ctx (&ctx);
7598
7599 /* Checksum the names of surrounding namespaces and structures. */
7600 if (parent != NULL)
7601 checksum_die_context (parent, &ctx);
7602
7603 /* Checksum the current DIE. */
7604 die_odr_checksum (die->die_tag, name, &ctx);
7605 md5_finish_ctx (&ctx, checksum);
7606
7607 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7608 }
7609
7610 /* Next, compute the complete type signature. */
7611
7612 md5_init_ctx (&ctx);
7613 mark = 1;
7614 die->die_mark = mark;
7615
7616 /* Checksum the names of surrounding namespaces and structures. */
7617 if (parent != NULL)
7618 checksum_die_context (parent, &ctx);
7619
7620 /* Checksum the DIE and its children. */
7621 die_checksum_ordered (die, &ctx, &mark);
7622 unmark_all_dies (die);
7623 md5_finish_ctx (&ctx, checksum);
7624
7625 /* Store the signature in the type node and link the type DIE and the
7626 type node together. */
7627 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7628 DWARF_TYPE_SIGNATURE_SIZE);
7629 die->comdat_type_p = true;
7630 die->die_id.die_type_node = type_node;
7631 type_node->type_die = die;
7632
7633 /* If the DIE is a specification, link its declaration to the type node
7634 as well. */
7635 if (decl != NULL)
7636 {
7637 decl->comdat_type_p = true;
7638 decl->die_id.die_type_node = type_node;
7639 }
7640 }
7641
7642 /* Do the location expressions look same? */
7643 static inline int
7644 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7645 {
7646 return loc1->dw_loc_opc == loc2->dw_loc_opc
7647 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7648 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7649 }
7650
7651 /* Do the values look the same? */
7652 static int
7653 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7654 {
7655 dw_loc_descr_ref loc1, loc2;
7656 rtx r1, r2;
7657
7658 if (v1->val_class != v2->val_class)
7659 return 0;
7660
7661 switch (v1->val_class)
7662 {
7663 case dw_val_class_const:
7664 case dw_val_class_const_implicit:
7665 return v1->v.val_int == v2->v.val_int;
7666 case dw_val_class_unsigned_const:
7667 case dw_val_class_unsigned_const_implicit:
7668 return v1->v.val_unsigned == v2->v.val_unsigned;
7669 case dw_val_class_const_double:
7670 return v1->v.val_double.high == v2->v.val_double.high
7671 && v1->v.val_double.low == v2->v.val_double.low;
7672 case dw_val_class_wide_int:
7673 return *v1->v.val_wide == *v2->v.val_wide;
7674 case dw_val_class_vec:
7675 if (v1->v.val_vec.length != v2->v.val_vec.length
7676 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7677 return 0;
7678 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7679 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7680 return 0;
7681 return 1;
7682 case dw_val_class_flag:
7683 return v1->v.val_flag == v2->v.val_flag;
7684 case dw_val_class_str:
7685 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7686
7687 case dw_val_class_addr:
7688 r1 = v1->v.val_addr;
7689 r2 = v2->v.val_addr;
7690 if (GET_CODE (r1) != GET_CODE (r2))
7691 return 0;
7692 return !rtx_equal_p (r1, r2);
7693
7694 case dw_val_class_offset:
7695 return v1->v.val_offset == v2->v.val_offset;
7696
7697 case dw_val_class_loc:
7698 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7699 loc1 && loc2;
7700 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7701 if (!same_loc_p (loc1, loc2, mark))
7702 return 0;
7703 return !loc1 && !loc2;
7704
7705 case dw_val_class_die_ref:
7706 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7707
7708 case dw_val_class_symview:
7709 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7710
7711 case dw_val_class_fde_ref:
7712 case dw_val_class_vms_delta:
7713 case dw_val_class_lbl_id:
7714 case dw_val_class_lineptr:
7715 case dw_val_class_macptr:
7716 case dw_val_class_loclistsptr:
7717 case dw_val_class_high_pc:
7718 return 1;
7719
7720 case dw_val_class_file:
7721 case dw_val_class_file_implicit:
7722 return v1->v.val_file == v2->v.val_file;
7723
7724 case dw_val_class_data8:
7725 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7726
7727 default:
7728 return 1;
7729 }
7730 }
7731
7732 /* Do the attributes look the same? */
7733
7734 static int
7735 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7736 {
7737 if (at1->dw_attr != at2->dw_attr)
7738 return 0;
7739
7740 /* We don't care that this was compiled with a different compiler
7741 snapshot; if the output is the same, that's what matters. */
7742 if (at1->dw_attr == DW_AT_producer)
7743 return 1;
7744
7745 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7746 }
7747
7748 /* Do the dies look the same? */
7749
7750 static int
7751 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7752 {
7753 dw_die_ref c1, c2;
7754 dw_attr_node *a1;
7755 unsigned ix;
7756
7757 /* To avoid infinite recursion. */
7758 if (die1->die_mark)
7759 return die1->die_mark == die2->die_mark;
7760 die1->die_mark = die2->die_mark = ++(*mark);
7761
7762 if (die1->die_tag != die2->die_tag)
7763 return 0;
7764
7765 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7766 return 0;
7767
7768 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7769 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7770 return 0;
7771
7772 c1 = die1->die_child;
7773 c2 = die2->die_child;
7774 if (! c1)
7775 {
7776 if (c2)
7777 return 0;
7778 }
7779 else
7780 for (;;)
7781 {
7782 if (!same_die_p (c1, c2, mark))
7783 return 0;
7784 c1 = c1->die_sib;
7785 c2 = c2->die_sib;
7786 if (c1 == die1->die_child)
7787 {
7788 if (c2 == die2->die_child)
7789 break;
7790 else
7791 return 0;
7792 }
7793 }
7794
7795 return 1;
7796 }
7797
7798 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7799 children, and set die_symbol. */
7800
7801 static void
7802 compute_comp_unit_symbol (dw_die_ref unit_die)
7803 {
7804 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7805 const char *base = die_name ? lbasename (die_name) : "anonymous";
7806 char *name = XALLOCAVEC (char, strlen (base) + 64);
7807 char *p;
7808 int i, mark;
7809 unsigned char checksum[16];
7810 struct md5_ctx ctx;
7811
7812 /* Compute the checksum of the DIE, then append part of it as hex digits to
7813 the name filename of the unit. */
7814
7815 md5_init_ctx (&ctx);
7816 mark = 0;
7817 die_checksum (unit_die, &ctx, &mark);
7818 unmark_all_dies (unit_die);
7819 md5_finish_ctx (&ctx, checksum);
7820
7821 /* When we this for comp_unit_die () we have a DW_AT_name that might
7822 not start with a letter but with anything valid for filenames and
7823 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7824 character is not a letter. */
7825 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7826 clean_symbol_name (name);
7827
7828 p = name + strlen (name);
7829 for (i = 0; i < 4; i++)
7830 {
7831 sprintf (p, "%.2x", checksum[i]);
7832 p += 2;
7833 }
7834
7835 unit_die->die_id.die_symbol = xstrdup (name);
7836 }
7837
7838 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7839
7840 static int
7841 is_type_die (dw_die_ref die)
7842 {
7843 switch (die->die_tag)
7844 {
7845 case DW_TAG_array_type:
7846 case DW_TAG_class_type:
7847 case DW_TAG_interface_type:
7848 case DW_TAG_enumeration_type:
7849 case DW_TAG_pointer_type:
7850 case DW_TAG_reference_type:
7851 case DW_TAG_rvalue_reference_type:
7852 case DW_TAG_string_type:
7853 case DW_TAG_structure_type:
7854 case DW_TAG_subroutine_type:
7855 case DW_TAG_union_type:
7856 case DW_TAG_ptr_to_member_type:
7857 case DW_TAG_set_type:
7858 case DW_TAG_subrange_type:
7859 case DW_TAG_base_type:
7860 case DW_TAG_const_type:
7861 case DW_TAG_file_type:
7862 case DW_TAG_packed_type:
7863 case DW_TAG_volatile_type:
7864 case DW_TAG_typedef:
7865 return 1;
7866 default:
7867 return 0;
7868 }
7869 }
7870
7871 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7872 Basically, we want to choose the bits that are likely to be shared between
7873 compilations (types) and leave out the bits that are specific to individual
7874 compilations (functions). */
7875
7876 static int
7877 is_comdat_die (dw_die_ref c)
7878 {
7879 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7880 we do for stabs. The advantage is a greater likelihood of sharing between
7881 objects that don't include headers in the same order (and therefore would
7882 put the base types in a different comdat). jason 8/28/00 */
7883
7884 if (c->die_tag == DW_TAG_base_type)
7885 return 0;
7886
7887 if (c->die_tag == DW_TAG_pointer_type
7888 || c->die_tag == DW_TAG_reference_type
7889 || c->die_tag == DW_TAG_rvalue_reference_type
7890 || c->die_tag == DW_TAG_const_type
7891 || c->die_tag == DW_TAG_volatile_type)
7892 {
7893 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7894
7895 return t ? is_comdat_die (t) : 0;
7896 }
7897
7898 return is_type_die (c);
7899 }
7900
7901 /* Returns true iff C is a compile-unit DIE. */
7902
7903 static inline bool
7904 is_cu_die (dw_die_ref c)
7905 {
7906 return c && (c->die_tag == DW_TAG_compile_unit
7907 || c->die_tag == DW_TAG_skeleton_unit);
7908 }
7909
7910 /* Returns true iff C is a unit DIE of some sort. */
7911
7912 static inline bool
7913 is_unit_die (dw_die_ref c)
7914 {
7915 return c && (c->die_tag == DW_TAG_compile_unit
7916 || c->die_tag == DW_TAG_partial_unit
7917 || c->die_tag == DW_TAG_type_unit
7918 || c->die_tag == DW_TAG_skeleton_unit);
7919 }
7920
7921 /* Returns true iff C is a namespace DIE. */
7922
7923 static inline bool
7924 is_namespace_die (dw_die_ref c)
7925 {
7926 return c && c->die_tag == DW_TAG_namespace;
7927 }
7928
7929 /* Returns true iff C is a class or structure DIE. */
7930
7931 static inline bool
7932 is_class_die (dw_die_ref c)
7933 {
7934 return c && (c->die_tag == DW_TAG_class_type
7935 || c->die_tag == DW_TAG_structure_type);
7936 }
7937
7938 /* Return non-zero if this DIE is a template parameter. */
7939
7940 static inline bool
7941 is_template_parameter (dw_die_ref die)
7942 {
7943 switch (die->die_tag)
7944 {
7945 case DW_TAG_template_type_param:
7946 case DW_TAG_template_value_param:
7947 case DW_TAG_GNU_template_template_param:
7948 case DW_TAG_GNU_template_parameter_pack:
7949 return true;
7950 default:
7951 return false;
7952 }
7953 }
7954
7955 /* Return non-zero if this DIE represents a template instantiation. */
7956
7957 static inline bool
7958 is_template_instantiation (dw_die_ref die)
7959 {
7960 dw_die_ref c;
7961
7962 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7963 return false;
7964 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7965 return false;
7966 }
7967
7968 static char *
7969 gen_internal_sym (const char *prefix)
7970 {
7971 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7972
7973 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7974 return xstrdup (buf);
7975 }
7976
7977 /* Return non-zero if this DIE is a declaration. */
7978
7979 static int
7980 is_declaration_die (dw_die_ref die)
7981 {
7982 dw_attr_node *a;
7983 unsigned ix;
7984
7985 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7986 if (a->dw_attr == DW_AT_declaration)
7987 return 1;
7988
7989 return 0;
7990 }
7991
7992 /* Return non-zero if this DIE is nested inside a subprogram. */
7993
7994 static int
7995 is_nested_in_subprogram (dw_die_ref die)
7996 {
7997 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7998
7999 if (decl == NULL)
8000 decl = die;
8001 return local_scope_p (decl);
8002 }
8003
8004 /* Return non-zero if this DIE contains a defining declaration of a
8005 subprogram. */
8006
8007 static int
8008 contains_subprogram_definition (dw_die_ref die)
8009 {
8010 dw_die_ref c;
8011
8012 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
8013 return 1;
8014 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
8015 return 0;
8016 }
8017
8018 /* Return non-zero if this is a type DIE that should be moved to a
8019 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8020 unit type. */
8021
8022 static int
8023 should_move_die_to_comdat (dw_die_ref die)
8024 {
8025 switch (die->die_tag)
8026 {
8027 case DW_TAG_class_type:
8028 case DW_TAG_structure_type:
8029 case DW_TAG_enumeration_type:
8030 case DW_TAG_union_type:
8031 /* Don't move declarations, inlined instances, types nested in a
8032 subprogram, or types that contain subprogram definitions. */
8033 if (is_declaration_die (die)
8034 || get_AT (die, DW_AT_abstract_origin)
8035 || is_nested_in_subprogram (die)
8036 || contains_subprogram_definition (die))
8037 return 0;
8038 return 1;
8039 case DW_TAG_array_type:
8040 case DW_TAG_interface_type:
8041 case DW_TAG_pointer_type:
8042 case DW_TAG_reference_type:
8043 case DW_TAG_rvalue_reference_type:
8044 case DW_TAG_string_type:
8045 case DW_TAG_subroutine_type:
8046 case DW_TAG_ptr_to_member_type:
8047 case DW_TAG_set_type:
8048 case DW_TAG_subrange_type:
8049 case DW_TAG_base_type:
8050 case DW_TAG_const_type:
8051 case DW_TAG_file_type:
8052 case DW_TAG_packed_type:
8053 case DW_TAG_volatile_type:
8054 case DW_TAG_typedef:
8055 default:
8056 return 0;
8057 }
8058 }
8059
8060 /* Make a clone of DIE. */
8061
8062 static dw_die_ref
8063 clone_die (dw_die_ref die)
8064 {
8065 dw_die_ref clone = new_die_raw (die->die_tag);
8066 dw_attr_node *a;
8067 unsigned ix;
8068
8069 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8070 add_dwarf_attr (clone, a);
8071
8072 return clone;
8073 }
8074
8075 /* Make a clone of the tree rooted at DIE. */
8076
8077 static dw_die_ref
8078 clone_tree (dw_die_ref die)
8079 {
8080 dw_die_ref c;
8081 dw_die_ref clone = clone_die (die);
8082
8083 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8084
8085 return clone;
8086 }
8087
8088 /* Make a clone of DIE as a declaration. */
8089
8090 static dw_die_ref
8091 clone_as_declaration (dw_die_ref die)
8092 {
8093 dw_die_ref clone;
8094 dw_die_ref decl;
8095 dw_attr_node *a;
8096 unsigned ix;
8097
8098 /* If the DIE is already a declaration, just clone it. */
8099 if (is_declaration_die (die))
8100 return clone_die (die);
8101
8102 /* If the DIE is a specification, just clone its declaration DIE. */
8103 decl = get_AT_ref (die, DW_AT_specification);
8104 if (decl != NULL)
8105 {
8106 clone = clone_die (decl);
8107 if (die->comdat_type_p)
8108 add_AT_die_ref (clone, DW_AT_signature, die);
8109 return clone;
8110 }
8111
8112 clone = new_die_raw (die->die_tag);
8113
8114 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8115 {
8116 /* We don't want to copy over all attributes.
8117 For example we don't want DW_AT_byte_size because otherwise we will no
8118 longer have a declaration and GDB will treat it as a definition. */
8119
8120 switch (a->dw_attr)
8121 {
8122 case DW_AT_abstract_origin:
8123 case DW_AT_artificial:
8124 case DW_AT_containing_type:
8125 case DW_AT_external:
8126 case DW_AT_name:
8127 case DW_AT_type:
8128 case DW_AT_virtuality:
8129 case DW_AT_linkage_name:
8130 case DW_AT_MIPS_linkage_name:
8131 add_dwarf_attr (clone, a);
8132 break;
8133 case DW_AT_byte_size:
8134 case DW_AT_alignment:
8135 default:
8136 break;
8137 }
8138 }
8139
8140 if (die->comdat_type_p)
8141 add_AT_die_ref (clone, DW_AT_signature, die);
8142
8143 add_AT_flag (clone, DW_AT_declaration, 1);
8144 return clone;
8145 }
8146
8147
8148 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8149
8150 struct decl_table_entry
8151 {
8152 dw_die_ref orig;
8153 dw_die_ref copy;
8154 };
8155
8156 /* Helpers to manipulate hash table of copied declarations. */
8157
8158 /* Hashtable helpers. */
8159
8160 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8161 {
8162 typedef die_struct *compare_type;
8163 static inline hashval_t hash (const decl_table_entry *);
8164 static inline bool equal (const decl_table_entry *, const die_struct *);
8165 };
8166
8167 inline hashval_t
8168 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8169 {
8170 return htab_hash_pointer (entry->orig);
8171 }
8172
8173 inline bool
8174 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8175 const die_struct *entry2)
8176 {
8177 return entry1->orig == entry2;
8178 }
8179
8180 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8181
8182 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8183 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8184 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8185 to check if the ancestor has already been copied into UNIT. */
8186
8187 static dw_die_ref
8188 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8189 decl_hash_type *decl_table)
8190 {
8191 dw_die_ref parent = die->die_parent;
8192 dw_die_ref new_parent = unit;
8193 dw_die_ref copy;
8194 decl_table_entry **slot = NULL;
8195 struct decl_table_entry *entry = NULL;
8196
8197 if (decl_table)
8198 {
8199 /* Check if the entry has already been copied to UNIT. */
8200 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8201 INSERT);
8202 if (*slot != HTAB_EMPTY_ENTRY)
8203 {
8204 entry = *slot;
8205 return entry->copy;
8206 }
8207
8208 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8209 entry = XCNEW (struct decl_table_entry);
8210 entry->orig = die;
8211 entry->copy = NULL;
8212 *slot = entry;
8213 }
8214
8215 if (parent != NULL)
8216 {
8217 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8218 if (spec != NULL)
8219 parent = spec;
8220 if (!is_unit_die (parent))
8221 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8222 }
8223
8224 copy = clone_as_declaration (die);
8225 add_child_die (new_parent, copy);
8226
8227 if (decl_table)
8228 {
8229 /* Record the pointer to the copy. */
8230 entry->copy = copy;
8231 }
8232
8233 return copy;
8234 }
8235 /* Copy the declaration context to the new type unit DIE. This includes
8236 any surrounding namespace or type declarations. If the DIE has an
8237 AT_specification attribute, it also includes attributes and children
8238 attached to the specification, and returns a pointer to the original
8239 parent of the declaration DIE. Returns NULL otherwise. */
8240
8241 static dw_die_ref
8242 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8243 {
8244 dw_die_ref decl;
8245 dw_die_ref new_decl;
8246 dw_die_ref orig_parent = NULL;
8247
8248 decl = get_AT_ref (die, DW_AT_specification);
8249 if (decl == NULL)
8250 decl = die;
8251 else
8252 {
8253 unsigned ix;
8254 dw_die_ref c;
8255 dw_attr_node *a;
8256
8257 /* The original DIE will be changed to a declaration, and must
8258 be moved to be a child of the original declaration DIE. */
8259 orig_parent = decl->die_parent;
8260
8261 /* Copy the type node pointer from the new DIE to the original
8262 declaration DIE so we can forward references later. */
8263 decl->comdat_type_p = true;
8264 decl->die_id.die_type_node = die->die_id.die_type_node;
8265
8266 remove_AT (die, DW_AT_specification);
8267
8268 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8269 {
8270 if (a->dw_attr != DW_AT_name
8271 && a->dw_attr != DW_AT_declaration
8272 && a->dw_attr != DW_AT_external)
8273 add_dwarf_attr (die, a);
8274 }
8275
8276 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8277 }
8278
8279 if (decl->die_parent != NULL
8280 && !is_unit_die (decl->die_parent))
8281 {
8282 new_decl = copy_ancestor_tree (unit, decl, NULL);
8283 if (new_decl != NULL)
8284 {
8285 remove_AT (new_decl, DW_AT_signature);
8286 add_AT_specification (die, new_decl);
8287 }
8288 }
8289
8290 return orig_parent;
8291 }
8292
8293 /* Generate the skeleton ancestor tree for the given NODE, then clone
8294 the DIE and add the clone into the tree. */
8295
8296 static void
8297 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8298 {
8299 if (node->new_die != NULL)
8300 return;
8301
8302 node->new_die = clone_as_declaration (node->old_die);
8303
8304 if (node->parent != NULL)
8305 {
8306 generate_skeleton_ancestor_tree (node->parent);
8307 add_child_die (node->parent->new_die, node->new_die);
8308 }
8309 }
8310
8311 /* Generate a skeleton tree of DIEs containing any declarations that are
8312 found in the original tree. We traverse the tree looking for declaration
8313 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8314
8315 static void
8316 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8317 {
8318 skeleton_chain_node node;
8319 dw_die_ref c;
8320 dw_die_ref first;
8321 dw_die_ref prev = NULL;
8322 dw_die_ref next = NULL;
8323
8324 node.parent = parent;
8325
8326 first = c = parent->old_die->die_child;
8327 if (c)
8328 next = c->die_sib;
8329 if (c) do {
8330 if (prev == NULL || prev->die_sib == c)
8331 prev = c;
8332 c = next;
8333 next = (c == first ? NULL : c->die_sib);
8334 node.old_die = c;
8335 node.new_die = NULL;
8336 if (is_declaration_die (c))
8337 {
8338 if (is_template_instantiation (c))
8339 {
8340 /* Instantiated templates do not need to be cloned into the
8341 type unit. Just move the DIE and its children back to
8342 the skeleton tree (in the main CU). */
8343 remove_child_with_prev (c, prev);
8344 add_child_die (parent->new_die, c);
8345 c = prev;
8346 }
8347 else if (c->comdat_type_p)
8348 {
8349 /* This is the skeleton of earlier break_out_comdat_types
8350 type. Clone the existing DIE, but keep the children
8351 under the original (which is in the main CU). */
8352 dw_die_ref clone = clone_die (c);
8353
8354 replace_child (c, clone, prev);
8355 generate_skeleton_ancestor_tree (parent);
8356 add_child_die (parent->new_die, c);
8357 c = clone;
8358 continue;
8359 }
8360 else
8361 {
8362 /* Clone the existing DIE, move the original to the skeleton
8363 tree (which is in the main CU), and put the clone, with
8364 all the original's children, where the original came from
8365 (which is about to be moved to the type unit). */
8366 dw_die_ref clone = clone_die (c);
8367 move_all_children (c, clone);
8368
8369 /* If the original has a DW_AT_object_pointer attribute,
8370 it would now point to a child DIE just moved to the
8371 cloned tree, so we need to remove that attribute from
8372 the original. */
8373 remove_AT (c, DW_AT_object_pointer);
8374
8375 replace_child (c, clone, prev);
8376 generate_skeleton_ancestor_tree (parent);
8377 add_child_die (parent->new_die, c);
8378 node.old_die = clone;
8379 node.new_die = c;
8380 c = clone;
8381 }
8382 }
8383 generate_skeleton_bottom_up (&node);
8384 } while (next != NULL);
8385 }
8386
8387 /* Wrapper function for generate_skeleton_bottom_up. */
8388
8389 static dw_die_ref
8390 generate_skeleton (dw_die_ref die)
8391 {
8392 skeleton_chain_node node;
8393
8394 node.old_die = die;
8395 node.new_die = NULL;
8396 node.parent = NULL;
8397
8398 /* If this type definition is nested inside another type,
8399 and is not an instantiation of a template, always leave
8400 at least a declaration in its place. */
8401 if (die->die_parent != NULL
8402 && is_type_die (die->die_parent)
8403 && !is_template_instantiation (die))
8404 node.new_die = clone_as_declaration (die);
8405
8406 generate_skeleton_bottom_up (&node);
8407 return node.new_die;
8408 }
8409
8410 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8411 declaration. The original DIE is moved to a new compile unit so that
8412 existing references to it follow it to the new location. If any of the
8413 original DIE's descendants is a declaration, we need to replace the
8414 original DIE with a skeleton tree and move the declarations back into the
8415 skeleton tree. */
8416
8417 static dw_die_ref
8418 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8419 dw_die_ref prev)
8420 {
8421 dw_die_ref skeleton, orig_parent;
8422
8423 /* Copy the declaration context to the type unit DIE. If the returned
8424 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8425 that DIE. */
8426 orig_parent = copy_declaration_context (unit, child);
8427
8428 skeleton = generate_skeleton (child);
8429 if (skeleton == NULL)
8430 remove_child_with_prev (child, prev);
8431 else
8432 {
8433 skeleton->comdat_type_p = true;
8434 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8435
8436 /* If the original DIE was a specification, we need to put
8437 the skeleton under the parent DIE of the declaration.
8438 This leaves the original declaration in the tree, but
8439 it will be pruned later since there are no longer any
8440 references to it. */
8441 if (orig_parent != NULL)
8442 {
8443 remove_child_with_prev (child, prev);
8444 add_child_die (orig_parent, skeleton);
8445 }
8446 else
8447 replace_child (child, skeleton, prev);
8448 }
8449
8450 return skeleton;
8451 }
8452
8453 static void
8454 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8455 comdat_type_node *type_node,
8456 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8457
8458 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8459 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8460 DWARF procedure references in the DW_AT_location attribute. */
8461
8462 static dw_die_ref
8463 copy_dwarf_procedure (dw_die_ref die,
8464 comdat_type_node *type_node,
8465 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8466 {
8467 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8468
8469 /* DWARF procedures are not supposed to have children... */
8470 gcc_assert (die->die_child == NULL);
8471
8472 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8473 gcc_assert (vec_safe_length (die->die_attr) == 1
8474 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8475
8476 /* Do not copy more than once DWARF procedures. */
8477 bool existed;
8478 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8479 if (existed)
8480 return die_copy;
8481
8482 die_copy = clone_die (die);
8483 add_child_die (type_node->root_die, die_copy);
8484 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8485 return die_copy;
8486 }
8487
8488 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8489 procedures in DIE's attributes. */
8490
8491 static void
8492 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8493 comdat_type_node *type_node,
8494 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8495 {
8496 dw_attr_node *a;
8497 unsigned i;
8498
8499 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8500 {
8501 dw_loc_descr_ref loc;
8502
8503 if (a->dw_attr_val.val_class != dw_val_class_loc)
8504 continue;
8505
8506 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8507 {
8508 switch (loc->dw_loc_opc)
8509 {
8510 case DW_OP_call2:
8511 case DW_OP_call4:
8512 case DW_OP_call_ref:
8513 gcc_assert (loc->dw_loc_oprnd1.val_class
8514 == dw_val_class_die_ref);
8515 loc->dw_loc_oprnd1.v.val_die_ref.die
8516 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8517 type_node,
8518 copied_dwarf_procs);
8519
8520 default:
8521 break;
8522 }
8523 }
8524 }
8525 }
8526
8527 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8528 rewrite references to point to the copies.
8529
8530 References are looked for in DIE's attributes and recursively in all its
8531 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8532 mapping from old DWARF procedures to their copy. It is used not to copy
8533 twice the same DWARF procedure under TYPE_NODE. */
8534
8535 static void
8536 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8537 comdat_type_node *type_node,
8538 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8539 {
8540 dw_die_ref c;
8541
8542 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8543 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8544 type_node,
8545 copied_dwarf_procs));
8546 }
8547
8548 /* Traverse the DIE and set up additional .debug_types or .debug_info
8549 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8550 section. */
8551
8552 static void
8553 break_out_comdat_types (dw_die_ref die)
8554 {
8555 dw_die_ref c;
8556 dw_die_ref first;
8557 dw_die_ref prev = NULL;
8558 dw_die_ref next = NULL;
8559 dw_die_ref unit = NULL;
8560
8561 first = c = die->die_child;
8562 if (c)
8563 next = c->die_sib;
8564 if (c) do {
8565 if (prev == NULL || prev->die_sib == c)
8566 prev = c;
8567 c = next;
8568 next = (c == first ? NULL : c->die_sib);
8569 if (should_move_die_to_comdat (c))
8570 {
8571 dw_die_ref replacement;
8572 comdat_type_node *type_node;
8573
8574 /* Break out nested types into their own type units. */
8575 break_out_comdat_types (c);
8576
8577 /* Create a new type unit DIE as the root for the new tree, and
8578 add it to the list of comdat types. */
8579 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8580 add_AT_unsigned (unit, DW_AT_language,
8581 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8582 type_node = ggc_cleared_alloc<comdat_type_node> ();
8583 type_node->root_die = unit;
8584 type_node->next = comdat_type_list;
8585 comdat_type_list = type_node;
8586
8587 /* Generate the type signature. */
8588 generate_type_signature (c, type_node);
8589
8590 /* Copy the declaration context, attributes, and children of the
8591 declaration into the new type unit DIE, then remove this DIE
8592 from the main CU (or replace it with a skeleton if necessary). */
8593 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8594 type_node->skeleton_die = replacement;
8595
8596 /* Add the DIE to the new compunit. */
8597 add_child_die (unit, c);
8598
8599 /* Types can reference DWARF procedures for type size or data location
8600 expressions. Calls in DWARF expressions cannot target procedures
8601 that are not in the same section. So we must copy DWARF procedures
8602 along with this type and then rewrite references to them. */
8603 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8604 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8605
8606 if (replacement != NULL)
8607 c = replacement;
8608 }
8609 else if (c->die_tag == DW_TAG_namespace
8610 || c->die_tag == DW_TAG_class_type
8611 || c->die_tag == DW_TAG_structure_type
8612 || c->die_tag == DW_TAG_union_type)
8613 {
8614 /* Look for nested types that can be broken out. */
8615 break_out_comdat_types (c);
8616 }
8617 } while (next != NULL);
8618 }
8619
8620 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8621 Enter all the cloned children into the hash table decl_table. */
8622
8623 static dw_die_ref
8624 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8625 {
8626 dw_die_ref c;
8627 dw_die_ref clone;
8628 struct decl_table_entry *entry;
8629 decl_table_entry **slot;
8630
8631 if (die->die_tag == DW_TAG_subprogram)
8632 clone = clone_as_declaration (die);
8633 else
8634 clone = clone_die (die);
8635
8636 slot = decl_table->find_slot_with_hash (die,
8637 htab_hash_pointer (die), INSERT);
8638
8639 /* Assert that DIE isn't in the hash table yet. If it would be there
8640 before, the ancestors would be necessarily there as well, therefore
8641 clone_tree_partial wouldn't be called. */
8642 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8643
8644 entry = XCNEW (struct decl_table_entry);
8645 entry->orig = die;
8646 entry->copy = clone;
8647 *slot = entry;
8648
8649 if (die->die_tag != DW_TAG_subprogram)
8650 FOR_EACH_CHILD (die, c,
8651 add_child_die (clone, clone_tree_partial (c, decl_table)));
8652
8653 return clone;
8654 }
8655
8656 /* Walk the DIE and its children, looking for references to incomplete
8657 or trivial types that are unmarked (i.e., that are not in the current
8658 type_unit). */
8659
8660 static void
8661 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8662 {
8663 dw_die_ref c;
8664 dw_attr_node *a;
8665 unsigned ix;
8666
8667 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8668 {
8669 if (AT_class (a) == dw_val_class_die_ref)
8670 {
8671 dw_die_ref targ = AT_ref (a);
8672 decl_table_entry **slot;
8673 struct decl_table_entry *entry;
8674
8675 if (targ->die_mark != 0 || targ->comdat_type_p)
8676 continue;
8677
8678 slot = decl_table->find_slot_with_hash (targ,
8679 htab_hash_pointer (targ),
8680 INSERT);
8681
8682 if (*slot != HTAB_EMPTY_ENTRY)
8683 {
8684 /* TARG has already been copied, so we just need to
8685 modify the reference to point to the copy. */
8686 entry = *slot;
8687 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8688 }
8689 else
8690 {
8691 dw_die_ref parent = unit;
8692 dw_die_ref copy = clone_die (targ);
8693
8694 /* Record in DECL_TABLE that TARG has been copied.
8695 Need to do this now, before the recursive call,
8696 because DECL_TABLE may be expanded and SLOT
8697 would no longer be a valid pointer. */
8698 entry = XCNEW (struct decl_table_entry);
8699 entry->orig = targ;
8700 entry->copy = copy;
8701 *slot = entry;
8702
8703 /* If TARG is not a declaration DIE, we need to copy its
8704 children. */
8705 if (!is_declaration_die (targ))
8706 {
8707 FOR_EACH_CHILD (
8708 targ, c,
8709 add_child_die (copy,
8710 clone_tree_partial (c, decl_table)));
8711 }
8712
8713 /* Make sure the cloned tree is marked as part of the
8714 type unit. */
8715 mark_dies (copy);
8716
8717 /* If TARG has surrounding context, copy its ancestor tree
8718 into the new type unit. */
8719 if (targ->die_parent != NULL
8720 && !is_unit_die (targ->die_parent))
8721 parent = copy_ancestor_tree (unit, targ->die_parent,
8722 decl_table);
8723
8724 add_child_die (parent, copy);
8725 a->dw_attr_val.v.val_die_ref.die = copy;
8726
8727 /* Make sure the newly-copied DIE is walked. If it was
8728 installed in a previously-added context, it won't
8729 get visited otherwise. */
8730 if (parent != unit)
8731 {
8732 /* Find the highest point of the newly-added tree,
8733 mark each node along the way, and walk from there. */
8734 parent->die_mark = 1;
8735 while (parent->die_parent
8736 && parent->die_parent->die_mark == 0)
8737 {
8738 parent = parent->die_parent;
8739 parent->die_mark = 1;
8740 }
8741 copy_decls_walk (unit, parent, decl_table);
8742 }
8743 }
8744 }
8745 }
8746
8747 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8748 }
8749
8750 /* Copy declarations for "unworthy" types into the new comdat section.
8751 Incomplete types, modified types, and certain other types aren't broken
8752 out into comdat sections of their own, so they don't have a signature,
8753 and we need to copy the declaration into the same section so that we
8754 don't have an external reference. */
8755
8756 static void
8757 copy_decls_for_unworthy_types (dw_die_ref unit)
8758 {
8759 mark_dies (unit);
8760 decl_hash_type decl_table (10);
8761 copy_decls_walk (unit, unit, &decl_table);
8762 unmark_dies (unit);
8763 }
8764
8765 /* Traverse the DIE and add a sibling attribute if it may have the
8766 effect of speeding up access to siblings. To save some space,
8767 avoid generating sibling attributes for DIE's without children. */
8768
8769 static void
8770 add_sibling_attributes (dw_die_ref die)
8771 {
8772 dw_die_ref c;
8773
8774 if (! die->die_child)
8775 return;
8776
8777 if (die->die_parent && die != die->die_parent->die_child)
8778 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8779
8780 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8781 }
8782
8783 /* Output all location lists for the DIE and its children. */
8784
8785 static void
8786 output_location_lists (dw_die_ref die)
8787 {
8788 dw_die_ref c;
8789 dw_attr_node *a;
8790 unsigned ix;
8791
8792 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8793 if (AT_class (a) == dw_val_class_loc_list)
8794 output_loc_list (AT_loc_list (a));
8795
8796 FOR_EACH_CHILD (die, c, output_location_lists (c));
8797 }
8798
8799 /* During assign_location_list_indexes and output_loclists_offset the
8800 current index, after it the number of assigned indexes (i.e. how
8801 large the .debug_loclists* offset table should be). */
8802 static unsigned int loc_list_idx;
8803
8804 /* Output all location list offsets for the DIE and its children. */
8805
8806 static void
8807 output_loclists_offsets (dw_die_ref die)
8808 {
8809 dw_die_ref c;
8810 dw_attr_node *a;
8811 unsigned ix;
8812
8813 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8814 if (AT_class (a) == dw_val_class_loc_list)
8815 {
8816 dw_loc_list_ref l = AT_loc_list (a);
8817 if (l->offset_emitted)
8818 continue;
8819 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8820 loc_section_label, NULL);
8821 gcc_assert (l->hash == loc_list_idx);
8822 loc_list_idx++;
8823 l->offset_emitted = true;
8824 }
8825
8826 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8827 }
8828
8829 /* Recursively set indexes of location lists. */
8830
8831 static void
8832 assign_location_list_indexes (dw_die_ref die)
8833 {
8834 dw_die_ref c;
8835 dw_attr_node *a;
8836 unsigned ix;
8837
8838 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8839 if (AT_class (a) == dw_val_class_loc_list)
8840 {
8841 dw_loc_list_ref list = AT_loc_list (a);
8842 if (!list->num_assigned)
8843 {
8844 list->num_assigned = true;
8845 list->hash = loc_list_idx++;
8846 }
8847 }
8848
8849 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8850 }
8851
8852 /* We want to limit the number of external references, because they are
8853 larger than local references: a relocation takes multiple words, and
8854 even a sig8 reference is always eight bytes, whereas a local reference
8855 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8856 So if we encounter multiple external references to the same type DIE, we
8857 make a local typedef stub for it and redirect all references there.
8858
8859 This is the element of the hash table for keeping track of these
8860 references. */
8861
8862 struct external_ref
8863 {
8864 dw_die_ref type;
8865 dw_die_ref stub;
8866 unsigned n_refs;
8867 };
8868
8869 /* Hashtable helpers. */
8870
8871 struct external_ref_hasher : free_ptr_hash <external_ref>
8872 {
8873 static inline hashval_t hash (const external_ref *);
8874 static inline bool equal (const external_ref *, const external_ref *);
8875 };
8876
8877 inline hashval_t
8878 external_ref_hasher::hash (const external_ref *r)
8879 {
8880 dw_die_ref die = r->type;
8881 hashval_t h = 0;
8882
8883 /* We can't use the address of the DIE for hashing, because
8884 that will make the order of the stub DIEs non-deterministic. */
8885 if (! die->comdat_type_p)
8886 /* We have a symbol; use it to compute a hash. */
8887 h = htab_hash_string (die->die_id.die_symbol);
8888 else
8889 {
8890 /* We have a type signature; use a subset of the bits as the hash.
8891 The 8-byte signature is at least as large as hashval_t. */
8892 comdat_type_node *type_node = die->die_id.die_type_node;
8893 memcpy (&h, type_node->signature, sizeof (h));
8894 }
8895 return h;
8896 }
8897
8898 inline bool
8899 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8900 {
8901 return r1->type == r2->type;
8902 }
8903
8904 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8905
8906 /* Return a pointer to the external_ref for references to DIE. */
8907
8908 static struct external_ref *
8909 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8910 {
8911 struct external_ref ref, *ref_p;
8912 external_ref **slot;
8913
8914 ref.type = die;
8915 slot = map->find_slot (&ref, INSERT);
8916 if (*slot != HTAB_EMPTY_ENTRY)
8917 return *slot;
8918
8919 ref_p = XCNEW (struct external_ref);
8920 ref_p->type = die;
8921 *slot = ref_p;
8922 return ref_p;
8923 }
8924
8925 /* Subroutine of optimize_external_refs, below.
8926
8927 If we see a type skeleton, record it as our stub. If we see external
8928 references, remember how many we've seen. */
8929
8930 static void
8931 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8932 {
8933 dw_die_ref c;
8934 dw_attr_node *a;
8935 unsigned ix;
8936 struct external_ref *ref_p;
8937
8938 if (is_type_die (die)
8939 && (c = get_AT_ref (die, DW_AT_signature)))
8940 {
8941 /* This is a local skeleton; use it for local references. */
8942 ref_p = lookup_external_ref (map, c);
8943 ref_p->stub = die;
8944 }
8945
8946 /* Scan the DIE references, and remember any that refer to DIEs from
8947 other CUs (i.e. those which are not marked). */
8948 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8949 if (AT_class (a) == dw_val_class_die_ref
8950 && (c = AT_ref (a))->die_mark == 0
8951 && is_type_die (c))
8952 {
8953 ref_p = lookup_external_ref (map, c);
8954 ref_p->n_refs++;
8955 }
8956
8957 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8958 }
8959
8960 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8961 points to an external_ref, DATA is the CU we're processing. If we don't
8962 already have a local stub, and we have multiple refs, build a stub. */
8963
8964 int
8965 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8966 {
8967 struct external_ref *ref_p = *slot;
8968
8969 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8970 {
8971 /* We have multiple references to this type, so build a small stub.
8972 Both of these forms are a bit dodgy from the perspective of the
8973 DWARF standard, since technically they should have names. */
8974 dw_die_ref cu = data;
8975 dw_die_ref type = ref_p->type;
8976 dw_die_ref stub = NULL;
8977
8978 if (type->comdat_type_p)
8979 {
8980 /* If we refer to this type via sig8, use AT_signature. */
8981 stub = new_die (type->die_tag, cu, NULL_TREE);
8982 add_AT_die_ref (stub, DW_AT_signature, type);
8983 }
8984 else
8985 {
8986 /* Otherwise, use a typedef with no name. */
8987 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8988 add_AT_die_ref (stub, DW_AT_type, type);
8989 }
8990
8991 stub->die_mark++;
8992 ref_p->stub = stub;
8993 }
8994 return 1;
8995 }
8996
8997 /* DIE is a unit; look through all the DIE references to see if there are
8998 any external references to types, and if so, create local stubs for
8999 them which will be applied in build_abbrev_table. This is useful because
9000 references to local DIEs are smaller. */
9001
9002 static external_ref_hash_type *
9003 optimize_external_refs (dw_die_ref die)
9004 {
9005 external_ref_hash_type *map = new external_ref_hash_type (10);
9006 optimize_external_refs_1 (die, map);
9007 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9008 return map;
9009 }
9010
9011 /* The following 3 variables are temporaries that are computed only during the
9012 build_abbrev_table call and used and released during the following
9013 optimize_abbrev_table call. */
9014
9015 /* First abbrev_id that can be optimized based on usage. */
9016 static unsigned int abbrev_opt_start;
9017
9018 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9019 abbrev_id smaller than this, because they must be already sized
9020 during build_abbrev_table). */
9021 static unsigned int abbrev_opt_base_type_end;
9022
9023 /* Vector of usage counts during build_abbrev_table. Indexed by
9024 abbrev_id - abbrev_opt_start. */
9025 static vec<unsigned int> abbrev_usage_count;
9026
9027 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9028 static vec<dw_die_ref> sorted_abbrev_dies;
9029
9030 /* The format of each DIE (and its attribute value pairs) is encoded in an
9031 abbreviation table. This routine builds the abbreviation table and assigns
9032 a unique abbreviation id for each abbreviation entry. The children of each
9033 die are visited recursively. */
9034
9035 static void
9036 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9037 {
9038 unsigned int abbrev_id = 0;
9039 dw_die_ref c;
9040 dw_attr_node *a;
9041 unsigned ix;
9042 dw_die_ref abbrev;
9043
9044 /* Scan the DIE references, and replace any that refer to
9045 DIEs from other CUs (i.e. those which are not marked) with
9046 the local stubs we built in optimize_external_refs. */
9047 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9048 if (AT_class (a) == dw_val_class_die_ref
9049 && (c = AT_ref (a))->die_mark == 0)
9050 {
9051 struct external_ref *ref_p;
9052 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9053
9054 ref_p = lookup_external_ref (extern_map, c);
9055 if (ref_p->stub && ref_p->stub != die)
9056 change_AT_die_ref (a, ref_p->stub);
9057 else
9058 /* We aren't changing this reference, so mark it external. */
9059 set_AT_ref_external (a, 1);
9060 }
9061
9062 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9063 {
9064 dw_attr_node *die_a, *abbrev_a;
9065 unsigned ix;
9066 bool ok = true;
9067
9068 if (abbrev_id == 0)
9069 continue;
9070 if (abbrev->die_tag != die->die_tag)
9071 continue;
9072 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9073 continue;
9074
9075 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9076 continue;
9077
9078 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9079 {
9080 abbrev_a = &(*abbrev->die_attr)[ix];
9081 if ((abbrev_a->dw_attr != die_a->dw_attr)
9082 || (value_format (abbrev_a) != value_format (die_a)))
9083 {
9084 ok = false;
9085 break;
9086 }
9087 }
9088 if (ok)
9089 break;
9090 }
9091
9092 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9093 {
9094 vec_safe_push (abbrev_die_table, die);
9095 if (abbrev_opt_start)
9096 abbrev_usage_count.safe_push (0);
9097 }
9098 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9099 {
9100 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9101 sorted_abbrev_dies.safe_push (die);
9102 }
9103
9104 die->die_abbrev = abbrev_id;
9105 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9106 }
9107
9108 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9109 by die_abbrev's usage count, from the most commonly used
9110 abbreviation to the least. */
9111
9112 static int
9113 die_abbrev_cmp (const void *p1, const void *p2)
9114 {
9115 dw_die_ref die1 = *(const dw_die_ref *) p1;
9116 dw_die_ref die2 = *(const dw_die_ref *) p2;
9117
9118 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9119 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9120
9121 if (die1->die_abbrev >= abbrev_opt_base_type_end
9122 && die2->die_abbrev >= abbrev_opt_base_type_end)
9123 {
9124 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9125 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9126 return -1;
9127 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9128 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9129 return 1;
9130 }
9131
9132 /* Stabilize the sort. */
9133 if (die1->die_abbrev < die2->die_abbrev)
9134 return -1;
9135 if (die1->die_abbrev > die2->die_abbrev)
9136 return 1;
9137
9138 return 0;
9139 }
9140
9141 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9142 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9143 into dw_val_class_const_implicit or
9144 dw_val_class_unsigned_const_implicit. */
9145
9146 static void
9147 optimize_implicit_const (unsigned int first_id, unsigned int end,
9148 vec<bool> &implicit_consts)
9149 {
9150 /* It never makes sense if there is just one DIE using the abbreviation. */
9151 if (end < first_id + 2)
9152 return;
9153
9154 dw_attr_node *a;
9155 unsigned ix, i;
9156 dw_die_ref die = sorted_abbrev_dies[first_id];
9157 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9158 if (implicit_consts[ix])
9159 {
9160 enum dw_val_class new_class = dw_val_class_none;
9161 switch (AT_class (a))
9162 {
9163 case dw_val_class_unsigned_const:
9164 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9165 continue;
9166
9167 /* The .debug_abbrev section will grow by
9168 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9169 in all the DIEs using that abbreviation. */
9170 if (constant_size (AT_unsigned (a)) * (end - first_id)
9171 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9172 continue;
9173
9174 new_class = dw_val_class_unsigned_const_implicit;
9175 break;
9176
9177 case dw_val_class_const:
9178 new_class = dw_val_class_const_implicit;
9179 break;
9180
9181 case dw_val_class_file:
9182 new_class = dw_val_class_file_implicit;
9183 break;
9184
9185 default:
9186 continue;
9187 }
9188 for (i = first_id; i < end; i++)
9189 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9190 = new_class;
9191 }
9192 }
9193
9194 /* Attempt to optimize abbreviation table from abbrev_opt_start
9195 abbreviation above. */
9196
9197 static void
9198 optimize_abbrev_table (void)
9199 {
9200 if (abbrev_opt_start
9201 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9202 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9203 {
9204 auto_vec<bool, 32> implicit_consts;
9205 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9206
9207 unsigned int abbrev_id = abbrev_opt_start - 1;
9208 unsigned int first_id = ~0U;
9209 unsigned int last_abbrev_id = 0;
9210 unsigned int i;
9211 dw_die_ref die;
9212 if (abbrev_opt_base_type_end > abbrev_opt_start)
9213 abbrev_id = abbrev_opt_base_type_end - 1;
9214 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9215 most commonly used abbreviations come first. */
9216 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9217 {
9218 dw_attr_node *a;
9219 unsigned ix;
9220
9221 /* If calc_base_type_die_sizes has been called, the CU and
9222 base types after it can't be optimized, because we've already
9223 calculated their DIE offsets. We've sorted them first. */
9224 if (die->die_abbrev < abbrev_opt_base_type_end)
9225 continue;
9226 if (die->die_abbrev != last_abbrev_id)
9227 {
9228 last_abbrev_id = die->die_abbrev;
9229 if (dwarf_version >= 5 && first_id != ~0U)
9230 optimize_implicit_const (first_id, i, implicit_consts);
9231 abbrev_id++;
9232 (*abbrev_die_table)[abbrev_id] = die;
9233 if (dwarf_version >= 5)
9234 {
9235 first_id = i;
9236 implicit_consts.truncate (0);
9237
9238 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9239 switch (AT_class (a))
9240 {
9241 case dw_val_class_const:
9242 case dw_val_class_unsigned_const:
9243 case dw_val_class_file:
9244 implicit_consts.safe_push (true);
9245 break;
9246 default:
9247 implicit_consts.safe_push (false);
9248 break;
9249 }
9250 }
9251 }
9252 else if (dwarf_version >= 5)
9253 {
9254 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9255 if (!implicit_consts[ix])
9256 continue;
9257 else
9258 {
9259 dw_attr_node *other_a
9260 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9261 if (!dw_val_equal_p (&a->dw_attr_val,
9262 &other_a->dw_attr_val))
9263 implicit_consts[ix] = false;
9264 }
9265 }
9266 die->die_abbrev = abbrev_id;
9267 }
9268 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9269 if (dwarf_version >= 5 && first_id != ~0U)
9270 optimize_implicit_const (first_id, i, implicit_consts);
9271 }
9272
9273 abbrev_opt_start = 0;
9274 abbrev_opt_base_type_end = 0;
9275 abbrev_usage_count.release ();
9276 sorted_abbrev_dies.release ();
9277 }
9278 \f
9279 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9280
9281 static int
9282 constant_size (unsigned HOST_WIDE_INT value)
9283 {
9284 int log;
9285
9286 if (value == 0)
9287 log = 0;
9288 else
9289 log = floor_log2 (value);
9290
9291 log = log / 8;
9292 log = 1 << (floor_log2 (log) + 1);
9293
9294 return log;
9295 }
9296
9297 /* Return the size of a DIE as it is represented in the
9298 .debug_info section. */
9299
9300 static unsigned long
9301 size_of_die (dw_die_ref die)
9302 {
9303 unsigned long size = 0;
9304 dw_attr_node *a;
9305 unsigned ix;
9306 enum dwarf_form form;
9307
9308 size += size_of_uleb128 (die->die_abbrev);
9309 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9310 {
9311 switch (AT_class (a))
9312 {
9313 case dw_val_class_addr:
9314 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9315 {
9316 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9317 size += size_of_uleb128 (AT_index (a));
9318 }
9319 else
9320 size += DWARF2_ADDR_SIZE;
9321 break;
9322 case dw_val_class_offset:
9323 size += DWARF_OFFSET_SIZE;
9324 break;
9325 case dw_val_class_loc:
9326 {
9327 unsigned long lsize = size_of_locs (AT_loc (a));
9328
9329 /* Block length. */
9330 if (dwarf_version >= 4)
9331 size += size_of_uleb128 (lsize);
9332 else
9333 size += constant_size (lsize);
9334 size += lsize;
9335 }
9336 break;
9337 case dw_val_class_loc_list:
9338 case dw_val_class_view_list:
9339 if (dwarf_split_debug_info && dwarf_version >= 5)
9340 {
9341 gcc_assert (AT_loc_list (a)->num_assigned);
9342 size += size_of_uleb128 (AT_loc_list (a)->hash);
9343 }
9344 else
9345 size += DWARF_OFFSET_SIZE;
9346 break;
9347 case dw_val_class_range_list:
9348 if (value_format (a) == DW_FORM_rnglistx)
9349 {
9350 gcc_assert (rnglist_idx);
9351 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9352 size += size_of_uleb128 (r->idx);
9353 }
9354 else
9355 size += DWARF_OFFSET_SIZE;
9356 break;
9357 case dw_val_class_const:
9358 size += size_of_sleb128 (AT_int (a));
9359 break;
9360 case dw_val_class_unsigned_const:
9361 {
9362 int csize = constant_size (AT_unsigned (a));
9363 if (dwarf_version == 3
9364 && a->dw_attr == DW_AT_data_member_location
9365 && csize >= 4)
9366 size += size_of_uleb128 (AT_unsigned (a));
9367 else
9368 size += csize;
9369 }
9370 break;
9371 case dw_val_class_symview:
9372 if (symview_upper_bound <= 0xff)
9373 size += 1;
9374 else if (symview_upper_bound <= 0xffff)
9375 size += 2;
9376 else if (symview_upper_bound <= 0xffffffff)
9377 size += 4;
9378 else
9379 size += 8;
9380 break;
9381 case dw_val_class_const_implicit:
9382 case dw_val_class_unsigned_const_implicit:
9383 case dw_val_class_file_implicit:
9384 /* These occupy no size in the DIE, just an extra sleb128 in
9385 .debug_abbrev. */
9386 break;
9387 case dw_val_class_const_double:
9388 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9389 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9390 size++; /* block */
9391 break;
9392 case dw_val_class_wide_int:
9393 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9394 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9395 if (get_full_len (*a->dw_attr_val.v.val_wide)
9396 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9397 size++; /* block */
9398 break;
9399 case dw_val_class_vec:
9400 size += constant_size (a->dw_attr_val.v.val_vec.length
9401 * a->dw_attr_val.v.val_vec.elt_size)
9402 + a->dw_attr_val.v.val_vec.length
9403 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9404 break;
9405 case dw_val_class_flag:
9406 if (dwarf_version >= 4)
9407 /* Currently all add_AT_flag calls pass in 1 as last argument,
9408 so DW_FORM_flag_present can be used. If that ever changes,
9409 we'll need to use DW_FORM_flag and have some optimization
9410 in build_abbrev_table that will change those to
9411 DW_FORM_flag_present if it is set to 1 in all DIEs using
9412 the same abbrev entry. */
9413 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9414 else
9415 size += 1;
9416 break;
9417 case dw_val_class_die_ref:
9418 if (AT_ref_external (a))
9419 {
9420 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9421 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9422 is sized by target address length, whereas in DWARF3
9423 it's always sized as an offset. */
9424 if (use_debug_types)
9425 size += DWARF_TYPE_SIGNATURE_SIZE;
9426 else if (dwarf_version == 2)
9427 size += DWARF2_ADDR_SIZE;
9428 else
9429 size += DWARF_OFFSET_SIZE;
9430 }
9431 else
9432 size += DWARF_OFFSET_SIZE;
9433 break;
9434 case dw_val_class_fde_ref:
9435 size += DWARF_OFFSET_SIZE;
9436 break;
9437 case dw_val_class_lbl_id:
9438 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9439 {
9440 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9441 size += size_of_uleb128 (AT_index (a));
9442 }
9443 else
9444 size += DWARF2_ADDR_SIZE;
9445 break;
9446 case dw_val_class_lineptr:
9447 case dw_val_class_macptr:
9448 case dw_val_class_loclistsptr:
9449 size += DWARF_OFFSET_SIZE;
9450 break;
9451 case dw_val_class_str:
9452 form = AT_string_form (a);
9453 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9454 size += DWARF_OFFSET_SIZE;
9455 else if (form == dwarf_FORM (DW_FORM_strx))
9456 size += size_of_uleb128 (AT_index (a));
9457 else
9458 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9459 break;
9460 case dw_val_class_file:
9461 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9462 break;
9463 case dw_val_class_data8:
9464 size += 8;
9465 break;
9466 case dw_val_class_vms_delta:
9467 size += DWARF_OFFSET_SIZE;
9468 break;
9469 case dw_val_class_high_pc:
9470 size += DWARF2_ADDR_SIZE;
9471 break;
9472 case dw_val_class_discr_value:
9473 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9474 break;
9475 case dw_val_class_discr_list:
9476 {
9477 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9478
9479 /* This is a block, so we have the block length and then its
9480 data. */
9481 size += constant_size (block_size) + block_size;
9482 }
9483 break;
9484 default:
9485 gcc_unreachable ();
9486 }
9487 }
9488
9489 return size;
9490 }
9491
9492 /* Size the debugging information associated with a given DIE. Visits the
9493 DIE's children recursively. Updates the global variable next_die_offset, on
9494 each time through. Uses the current value of next_die_offset to update the
9495 die_offset field in each DIE. */
9496
9497 static void
9498 calc_die_sizes (dw_die_ref die)
9499 {
9500 dw_die_ref c;
9501
9502 gcc_assert (die->die_offset == 0
9503 || (unsigned long int) die->die_offset == next_die_offset);
9504 die->die_offset = next_die_offset;
9505 next_die_offset += size_of_die (die);
9506
9507 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9508
9509 if (die->die_child != NULL)
9510 /* Count the null byte used to terminate sibling lists. */
9511 next_die_offset += 1;
9512 }
9513
9514 /* Size just the base type children at the start of the CU.
9515 This is needed because build_abbrev needs to size locs
9516 and sizing of type based stack ops needs to know die_offset
9517 values for the base types. */
9518
9519 static void
9520 calc_base_type_die_sizes (void)
9521 {
9522 unsigned long die_offset = (dwarf_split_debug_info
9523 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9524 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9525 unsigned int i;
9526 dw_die_ref base_type;
9527 #if ENABLE_ASSERT_CHECKING
9528 dw_die_ref prev = comp_unit_die ()->die_child;
9529 #endif
9530
9531 die_offset += size_of_die (comp_unit_die ());
9532 for (i = 0; base_types.iterate (i, &base_type); i++)
9533 {
9534 #if ENABLE_ASSERT_CHECKING
9535 gcc_assert (base_type->die_offset == 0
9536 && prev->die_sib == base_type
9537 && base_type->die_child == NULL
9538 && base_type->die_abbrev);
9539 prev = base_type;
9540 #endif
9541 if (abbrev_opt_start
9542 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9543 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9544 base_type->die_offset = die_offset;
9545 die_offset += size_of_die (base_type);
9546 }
9547 }
9548
9549 /* Set the marks for a die and its children. We do this so
9550 that we know whether or not a reference needs to use FORM_ref_addr; only
9551 DIEs in the same CU will be marked. We used to clear out the offset
9552 and use that as the flag, but ran into ordering problems. */
9553
9554 static void
9555 mark_dies (dw_die_ref die)
9556 {
9557 dw_die_ref c;
9558
9559 gcc_assert (!die->die_mark);
9560
9561 die->die_mark = 1;
9562 FOR_EACH_CHILD (die, c, mark_dies (c));
9563 }
9564
9565 /* Clear the marks for a die and its children. */
9566
9567 static void
9568 unmark_dies (dw_die_ref die)
9569 {
9570 dw_die_ref c;
9571
9572 if (! use_debug_types)
9573 gcc_assert (die->die_mark);
9574
9575 die->die_mark = 0;
9576 FOR_EACH_CHILD (die, c, unmark_dies (c));
9577 }
9578
9579 /* Clear the marks for a die, its children and referred dies. */
9580
9581 static void
9582 unmark_all_dies (dw_die_ref die)
9583 {
9584 dw_die_ref c;
9585 dw_attr_node *a;
9586 unsigned ix;
9587
9588 if (!die->die_mark)
9589 return;
9590 die->die_mark = 0;
9591
9592 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9593
9594 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9595 if (AT_class (a) == dw_val_class_die_ref)
9596 unmark_all_dies (AT_ref (a));
9597 }
9598
9599 /* Calculate if the entry should appear in the final output file. It may be
9600 from a pruned a type. */
9601
9602 static bool
9603 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9604 {
9605 /* By limiting gnu pubnames to definitions only, gold can generate a
9606 gdb index without entries for declarations, which don't include
9607 enough information to be useful. */
9608 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9609 return false;
9610
9611 if (table == pubname_table)
9612 {
9613 /* Enumerator names are part of the pubname table, but the
9614 parent DW_TAG_enumeration_type die may have been pruned.
9615 Don't output them if that is the case. */
9616 if (p->die->die_tag == DW_TAG_enumerator &&
9617 (p->die->die_parent == NULL
9618 || !p->die->die_parent->die_perennial_p))
9619 return false;
9620
9621 /* Everything else in the pubname table is included. */
9622 return true;
9623 }
9624
9625 /* The pubtypes table shouldn't include types that have been
9626 pruned. */
9627 return (p->die->die_offset != 0
9628 || !flag_eliminate_unused_debug_types);
9629 }
9630
9631 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9632 generated for the compilation unit. */
9633
9634 static unsigned long
9635 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9636 {
9637 unsigned long size;
9638 unsigned i;
9639 pubname_entry *p;
9640 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9641
9642 size = DWARF_PUBNAMES_HEADER_SIZE;
9643 FOR_EACH_VEC_ELT (*names, i, p)
9644 if (include_pubname_in_output (names, p))
9645 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9646
9647 size += DWARF_OFFSET_SIZE;
9648 return size;
9649 }
9650
9651 /* Return the size of the information in the .debug_aranges section. */
9652
9653 static unsigned long
9654 size_of_aranges (void)
9655 {
9656 unsigned long size;
9657
9658 size = DWARF_ARANGES_HEADER_SIZE;
9659
9660 /* Count the address/length pair for this compilation unit. */
9661 if (text_section_used)
9662 size += 2 * DWARF2_ADDR_SIZE;
9663 if (cold_text_section_used)
9664 size += 2 * DWARF2_ADDR_SIZE;
9665 if (have_multiple_function_sections)
9666 {
9667 unsigned fde_idx;
9668 dw_fde_ref fde;
9669
9670 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9671 {
9672 if (DECL_IGNORED_P (fde->decl))
9673 continue;
9674 if (!fde->in_std_section)
9675 size += 2 * DWARF2_ADDR_SIZE;
9676 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9677 size += 2 * DWARF2_ADDR_SIZE;
9678 }
9679 }
9680
9681 /* Count the two zero words used to terminated the address range table. */
9682 size += 2 * DWARF2_ADDR_SIZE;
9683 return size;
9684 }
9685 \f
9686 /* Select the encoding of an attribute value. */
9687
9688 static enum dwarf_form
9689 value_format (dw_attr_node *a)
9690 {
9691 switch (AT_class (a))
9692 {
9693 case dw_val_class_addr:
9694 /* Only very few attributes allow DW_FORM_addr. */
9695 switch (a->dw_attr)
9696 {
9697 case DW_AT_low_pc:
9698 case DW_AT_high_pc:
9699 case DW_AT_entry_pc:
9700 case DW_AT_trampoline:
9701 return (AT_index (a) == NOT_INDEXED
9702 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9703 default:
9704 break;
9705 }
9706 switch (DWARF2_ADDR_SIZE)
9707 {
9708 case 1:
9709 return DW_FORM_data1;
9710 case 2:
9711 return DW_FORM_data2;
9712 case 4:
9713 return DW_FORM_data4;
9714 case 8:
9715 return DW_FORM_data8;
9716 default:
9717 gcc_unreachable ();
9718 }
9719 case dw_val_class_loc_list:
9720 case dw_val_class_view_list:
9721 if (dwarf_split_debug_info
9722 && dwarf_version >= 5
9723 && AT_loc_list (a)->num_assigned)
9724 return DW_FORM_loclistx;
9725 /* FALLTHRU */
9726 case dw_val_class_range_list:
9727 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9728 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9729 care about sizes of .debug* sections in shared libraries and
9730 executables and don't take into account relocations that affect just
9731 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9732 table in the .debug_rnglists section. */
9733 if (dwarf_split_debug_info
9734 && dwarf_version >= 5
9735 && AT_class (a) == dw_val_class_range_list
9736 && rnglist_idx
9737 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9738 return DW_FORM_rnglistx;
9739 if (dwarf_version >= 4)
9740 return DW_FORM_sec_offset;
9741 /* FALLTHRU */
9742 case dw_val_class_vms_delta:
9743 case dw_val_class_offset:
9744 switch (DWARF_OFFSET_SIZE)
9745 {
9746 case 4:
9747 return DW_FORM_data4;
9748 case 8:
9749 return DW_FORM_data8;
9750 default:
9751 gcc_unreachable ();
9752 }
9753 case dw_val_class_loc:
9754 if (dwarf_version >= 4)
9755 return DW_FORM_exprloc;
9756 switch (constant_size (size_of_locs (AT_loc (a))))
9757 {
9758 case 1:
9759 return DW_FORM_block1;
9760 case 2:
9761 return DW_FORM_block2;
9762 case 4:
9763 return DW_FORM_block4;
9764 default:
9765 gcc_unreachable ();
9766 }
9767 case dw_val_class_const:
9768 return DW_FORM_sdata;
9769 case dw_val_class_unsigned_const:
9770 switch (constant_size (AT_unsigned (a)))
9771 {
9772 case 1:
9773 return DW_FORM_data1;
9774 case 2:
9775 return DW_FORM_data2;
9776 case 4:
9777 /* In DWARF3 DW_AT_data_member_location with
9778 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9779 constant, so we need to use DW_FORM_udata if we need
9780 a large constant. */
9781 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9782 return DW_FORM_udata;
9783 return DW_FORM_data4;
9784 case 8:
9785 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9786 return DW_FORM_udata;
9787 return DW_FORM_data8;
9788 default:
9789 gcc_unreachable ();
9790 }
9791 case dw_val_class_const_implicit:
9792 case dw_val_class_unsigned_const_implicit:
9793 case dw_val_class_file_implicit:
9794 return DW_FORM_implicit_const;
9795 case dw_val_class_const_double:
9796 switch (HOST_BITS_PER_WIDE_INT)
9797 {
9798 case 8:
9799 return DW_FORM_data2;
9800 case 16:
9801 return DW_FORM_data4;
9802 case 32:
9803 return DW_FORM_data8;
9804 case 64:
9805 if (dwarf_version >= 5)
9806 return DW_FORM_data16;
9807 /* FALLTHRU */
9808 default:
9809 return DW_FORM_block1;
9810 }
9811 case dw_val_class_wide_int:
9812 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9813 {
9814 case 8:
9815 return DW_FORM_data1;
9816 case 16:
9817 return DW_FORM_data2;
9818 case 32:
9819 return DW_FORM_data4;
9820 case 64:
9821 return DW_FORM_data8;
9822 case 128:
9823 if (dwarf_version >= 5)
9824 return DW_FORM_data16;
9825 /* FALLTHRU */
9826 default:
9827 return DW_FORM_block1;
9828 }
9829 case dw_val_class_symview:
9830 /* ??? We might use uleb128, but then we'd have to compute
9831 .debug_info offsets in the assembler. */
9832 if (symview_upper_bound <= 0xff)
9833 return DW_FORM_data1;
9834 else if (symview_upper_bound <= 0xffff)
9835 return DW_FORM_data2;
9836 else if (symview_upper_bound <= 0xffffffff)
9837 return DW_FORM_data4;
9838 else
9839 return DW_FORM_data8;
9840 case dw_val_class_vec:
9841 switch (constant_size (a->dw_attr_val.v.val_vec.length
9842 * a->dw_attr_val.v.val_vec.elt_size))
9843 {
9844 case 1:
9845 return DW_FORM_block1;
9846 case 2:
9847 return DW_FORM_block2;
9848 case 4:
9849 return DW_FORM_block4;
9850 default:
9851 gcc_unreachable ();
9852 }
9853 case dw_val_class_flag:
9854 if (dwarf_version >= 4)
9855 {
9856 /* Currently all add_AT_flag calls pass in 1 as last argument,
9857 so DW_FORM_flag_present can be used. If that ever changes,
9858 we'll need to use DW_FORM_flag and have some optimization
9859 in build_abbrev_table that will change those to
9860 DW_FORM_flag_present if it is set to 1 in all DIEs using
9861 the same abbrev entry. */
9862 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9863 return DW_FORM_flag_present;
9864 }
9865 return DW_FORM_flag;
9866 case dw_val_class_die_ref:
9867 if (AT_ref_external (a))
9868 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9869 else
9870 return DW_FORM_ref;
9871 case dw_val_class_fde_ref:
9872 return DW_FORM_data;
9873 case dw_val_class_lbl_id:
9874 return (AT_index (a) == NOT_INDEXED
9875 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9876 case dw_val_class_lineptr:
9877 case dw_val_class_macptr:
9878 case dw_val_class_loclistsptr:
9879 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9880 case dw_val_class_str:
9881 return AT_string_form (a);
9882 case dw_val_class_file:
9883 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9884 {
9885 case 1:
9886 return DW_FORM_data1;
9887 case 2:
9888 return DW_FORM_data2;
9889 case 4:
9890 return DW_FORM_data4;
9891 default:
9892 gcc_unreachable ();
9893 }
9894
9895 case dw_val_class_data8:
9896 return DW_FORM_data8;
9897
9898 case dw_val_class_high_pc:
9899 switch (DWARF2_ADDR_SIZE)
9900 {
9901 case 1:
9902 return DW_FORM_data1;
9903 case 2:
9904 return DW_FORM_data2;
9905 case 4:
9906 return DW_FORM_data4;
9907 case 8:
9908 return DW_FORM_data8;
9909 default:
9910 gcc_unreachable ();
9911 }
9912
9913 case dw_val_class_discr_value:
9914 return (a->dw_attr_val.v.val_discr_value.pos
9915 ? DW_FORM_udata
9916 : DW_FORM_sdata);
9917 case dw_val_class_discr_list:
9918 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9919 {
9920 case 1:
9921 return DW_FORM_block1;
9922 case 2:
9923 return DW_FORM_block2;
9924 case 4:
9925 return DW_FORM_block4;
9926 default:
9927 gcc_unreachable ();
9928 }
9929
9930 default:
9931 gcc_unreachable ();
9932 }
9933 }
9934
9935 /* Output the encoding of an attribute value. */
9936
9937 static void
9938 output_value_format (dw_attr_node *a)
9939 {
9940 enum dwarf_form form = value_format (a);
9941
9942 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9943 }
9944
9945 /* Given a die and id, produce the appropriate abbreviations. */
9946
9947 static void
9948 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9949 {
9950 unsigned ix;
9951 dw_attr_node *a_attr;
9952
9953 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9954 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9955 dwarf_tag_name (abbrev->die_tag));
9956
9957 if (abbrev->die_child != NULL)
9958 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9959 else
9960 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9961
9962 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9963 {
9964 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9965 dwarf_attr_name (a_attr->dw_attr));
9966 output_value_format (a_attr);
9967 if (value_format (a_attr) == DW_FORM_implicit_const)
9968 {
9969 if (AT_class (a_attr) == dw_val_class_file_implicit)
9970 {
9971 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9972 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9973 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9974 }
9975 else
9976 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9977 }
9978 }
9979
9980 dw2_asm_output_data (1, 0, NULL);
9981 dw2_asm_output_data (1, 0, NULL);
9982 }
9983
9984
9985 /* Output the .debug_abbrev section which defines the DIE abbreviation
9986 table. */
9987
9988 static void
9989 output_abbrev_section (void)
9990 {
9991 unsigned int abbrev_id;
9992 dw_die_ref abbrev;
9993
9994 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9995 if (abbrev_id != 0)
9996 output_die_abbrevs (abbrev_id, abbrev);
9997
9998 /* Terminate the table. */
9999 dw2_asm_output_data (1, 0, NULL);
10000 }
10001
10002 /* Return a new location list, given the begin and end range, and the
10003 expression. */
10004
10005 static inline dw_loc_list_ref
10006 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10007 const char *end, var_loc_view vend,
10008 const char *section)
10009 {
10010 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10011
10012 retlist->begin = begin;
10013 retlist->begin_entry = NULL;
10014 retlist->end = end;
10015 retlist->expr = expr;
10016 retlist->section = section;
10017 retlist->vbegin = vbegin;
10018 retlist->vend = vend;
10019
10020 return retlist;
10021 }
10022
10023 /* Return true iff there's any nonzero view number in the loc list.
10024
10025 ??? When views are not enabled, we'll often extend a single range
10026 to the entire function, so that we emit a single location
10027 expression rather than a location list. With views, even with a
10028 single range, we'll output a list if start or end have a nonzero
10029 view. If we change this, we may want to stop splitting a single
10030 range in dw_loc_list just because of a nonzero view, even if it
10031 straddles across hot/cold partitions. */
10032
10033 static bool
10034 loc_list_has_views (dw_loc_list_ref list)
10035 {
10036 if (!debug_variable_location_views)
10037 return false;
10038
10039 for (dw_loc_list_ref loc = list;
10040 loc != NULL; loc = loc->dw_loc_next)
10041 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10042 return true;
10043
10044 return false;
10045 }
10046
10047 /* Generate a new internal symbol for this location list node, if it
10048 hasn't got one yet. */
10049
10050 static inline void
10051 gen_llsym (dw_loc_list_ref list)
10052 {
10053 gcc_assert (!list->ll_symbol);
10054 list->ll_symbol = gen_internal_sym ("LLST");
10055
10056 if (!loc_list_has_views (list))
10057 return;
10058
10059 if (dwarf2out_locviews_in_attribute ())
10060 {
10061 /* Use the same label_num for the view list. */
10062 label_num--;
10063 list->vl_symbol = gen_internal_sym ("LVUS");
10064 }
10065 else
10066 list->vl_symbol = list->ll_symbol;
10067 }
10068
10069 /* Generate a symbol for the list, but only if we really want to emit
10070 it as a list. */
10071
10072 static inline void
10073 maybe_gen_llsym (dw_loc_list_ref list)
10074 {
10075 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10076 return;
10077
10078 gen_llsym (list);
10079 }
10080
10081 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10082 NULL, don't consider size of the location expression. If we're not
10083 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10084 representation in *SIZEP. */
10085
10086 static bool
10087 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10088 {
10089 /* Don't output an entry that starts and ends at the same address. */
10090 if (strcmp (curr->begin, curr->end) == 0
10091 && curr->vbegin == curr->vend && !curr->force)
10092 return true;
10093
10094 if (!sizep)
10095 return false;
10096
10097 unsigned long size = size_of_locs (curr->expr);
10098
10099 /* If the expression is too large, drop it on the floor. We could
10100 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10101 in the expression, but >= 64KB expressions for a single value
10102 in a single range are unlikely very useful. */
10103 if (dwarf_version < 5 && size > 0xffff)
10104 return true;
10105
10106 *sizep = size;
10107
10108 return false;
10109 }
10110
10111 /* Output a view pair loclist entry for CURR, if it requires one. */
10112
10113 static void
10114 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10115 {
10116 if (!dwarf2out_locviews_in_loclist ())
10117 return;
10118
10119 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10120 return;
10121
10122 #ifdef DW_LLE_view_pair
10123 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10124
10125 if (dwarf2out_as_locview_support)
10126 {
10127 if (ZERO_VIEW_P (curr->vbegin))
10128 dw2_asm_output_data_uleb128 (0, "Location view begin");
10129 else
10130 {
10131 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10132 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10133 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10134 }
10135
10136 if (ZERO_VIEW_P (curr->vend))
10137 dw2_asm_output_data_uleb128 (0, "Location view end");
10138 else
10139 {
10140 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10141 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10142 dw2_asm_output_symname_uleb128 (label, "Location view end");
10143 }
10144 }
10145 else
10146 {
10147 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10148 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10149 }
10150 #endif /* DW_LLE_view_pair */
10151
10152 return;
10153 }
10154
10155 /* Output the location list given to us. */
10156
10157 static void
10158 output_loc_list (dw_loc_list_ref list_head)
10159 {
10160 int vcount = 0, lcount = 0;
10161
10162 if (list_head->emitted)
10163 return;
10164 list_head->emitted = true;
10165
10166 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10167 {
10168 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10169
10170 for (dw_loc_list_ref curr = list_head; curr != NULL;
10171 curr = curr->dw_loc_next)
10172 {
10173 unsigned long size;
10174
10175 if (skip_loc_list_entry (curr, &size))
10176 continue;
10177
10178 vcount++;
10179
10180 /* ?? dwarf_split_debug_info? */
10181 if (dwarf2out_as_locview_support)
10182 {
10183 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10184
10185 if (!ZERO_VIEW_P (curr->vbegin))
10186 {
10187 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10188 dw2_asm_output_symname_uleb128 (label,
10189 "View list begin (%s)",
10190 list_head->vl_symbol);
10191 }
10192 else
10193 dw2_asm_output_data_uleb128 (0,
10194 "View list begin (%s)",
10195 list_head->vl_symbol);
10196
10197 if (!ZERO_VIEW_P (curr->vend))
10198 {
10199 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10200 dw2_asm_output_symname_uleb128 (label,
10201 "View list end (%s)",
10202 list_head->vl_symbol);
10203 }
10204 else
10205 dw2_asm_output_data_uleb128 (0,
10206 "View list end (%s)",
10207 list_head->vl_symbol);
10208 }
10209 else
10210 {
10211 dw2_asm_output_data_uleb128 (curr->vbegin,
10212 "View list begin (%s)",
10213 list_head->vl_symbol);
10214 dw2_asm_output_data_uleb128 (curr->vend,
10215 "View list end (%s)",
10216 list_head->vl_symbol);
10217 }
10218 }
10219 }
10220
10221 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10222
10223 const char *last_section = NULL;
10224 const char *base_label = NULL;
10225
10226 /* Walk the location list, and output each range + expression. */
10227 for (dw_loc_list_ref curr = list_head; curr != NULL;
10228 curr = curr->dw_loc_next)
10229 {
10230 unsigned long size;
10231
10232 /* Skip this entry? If we skip it here, we must skip it in the
10233 view list above as well. */
10234 if (skip_loc_list_entry (curr, &size))
10235 continue;
10236
10237 lcount++;
10238
10239 if (dwarf_version >= 5)
10240 {
10241 if (dwarf_split_debug_info)
10242 {
10243 dwarf2out_maybe_output_loclist_view_pair (curr);
10244 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10245 uleb128 index into .debug_addr and uleb128 length. */
10246 dw2_asm_output_data (1, DW_LLE_startx_length,
10247 "DW_LLE_startx_length (%s)",
10248 list_head->ll_symbol);
10249 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10250 "Location list range start index "
10251 "(%s)", curr->begin);
10252 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10253 For that case we probably need to emit DW_LLE_startx_endx,
10254 but we'd need 2 .debug_addr entries rather than just one. */
10255 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10256 "Location list length (%s)",
10257 list_head->ll_symbol);
10258 }
10259 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10260 {
10261 dwarf2out_maybe_output_loclist_view_pair (curr);
10262 /* If all code is in .text section, the base address is
10263 already provided by the CU attributes. Use
10264 DW_LLE_offset_pair where both addresses are uleb128 encoded
10265 offsets against that base. */
10266 dw2_asm_output_data (1, DW_LLE_offset_pair,
10267 "DW_LLE_offset_pair (%s)",
10268 list_head->ll_symbol);
10269 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10270 "Location list begin address (%s)",
10271 list_head->ll_symbol);
10272 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10273 "Location list end address (%s)",
10274 list_head->ll_symbol);
10275 }
10276 else if (HAVE_AS_LEB128)
10277 {
10278 /* Otherwise, find out how many consecutive entries could share
10279 the same base entry. If just one, emit DW_LLE_start_length,
10280 otherwise emit DW_LLE_base_address for the base address
10281 followed by a series of DW_LLE_offset_pair. */
10282 if (last_section == NULL || curr->section != last_section)
10283 {
10284 dw_loc_list_ref curr2;
10285 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10286 curr2 = curr2->dw_loc_next)
10287 {
10288 if (strcmp (curr2->begin, curr2->end) == 0
10289 && !curr2->force)
10290 continue;
10291 break;
10292 }
10293 if (curr2 == NULL || curr->section != curr2->section)
10294 last_section = NULL;
10295 else
10296 {
10297 last_section = curr->section;
10298 base_label = curr->begin;
10299 dw2_asm_output_data (1, DW_LLE_base_address,
10300 "DW_LLE_base_address (%s)",
10301 list_head->ll_symbol);
10302 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10303 "Base address (%s)",
10304 list_head->ll_symbol);
10305 }
10306 }
10307 /* Only one entry with the same base address. Use
10308 DW_LLE_start_length with absolute address and uleb128
10309 length. */
10310 if (last_section == NULL)
10311 {
10312 dwarf2out_maybe_output_loclist_view_pair (curr);
10313 dw2_asm_output_data (1, DW_LLE_start_length,
10314 "DW_LLE_start_length (%s)",
10315 list_head->ll_symbol);
10316 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10317 "Location list begin address (%s)",
10318 list_head->ll_symbol);
10319 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10320 "Location list length "
10321 "(%s)", list_head->ll_symbol);
10322 }
10323 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10324 DW_LLE_base_address. */
10325 else
10326 {
10327 dwarf2out_maybe_output_loclist_view_pair (curr);
10328 dw2_asm_output_data (1, DW_LLE_offset_pair,
10329 "DW_LLE_offset_pair (%s)",
10330 list_head->ll_symbol);
10331 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10332 "Location list begin address "
10333 "(%s)", list_head->ll_symbol);
10334 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10335 "Location list end address "
10336 "(%s)", list_head->ll_symbol);
10337 }
10338 }
10339 /* The assembler does not support .uleb128 directive. Emit
10340 DW_LLE_start_end with a pair of absolute addresses. */
10341 else
10342 {
10343 dwarf2out_maybe_output_loclist_view_pair (curr);
10344 dw2_asm_output_data (1, DW_LLE_start_end,
10345 "DW_LLE_start_end (%s)",
10346 list_head->ll_symbol);
10347 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10348 "Location list begin address (%s)",
10349 list_head->ll_symbol);
10350 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10351 "Location list end address (%s)",
10352 list_head->ll_symbol);
10353 }
10354 }
10355 else if (dwarf_split_debug_info)
10356 {
10357 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10358 and 4 byte length. */
10359 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10360 "Location list start/length entry (%s)",
10361 list_head->ll_symbol);
10362 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10363 "Location list range start index (%s)",
10364 curr->begin);
10365 /* The length field is 4 bytes. If we ever need to support
10366 an 8-byte length, we can add a new DW_LLE code or fall back
10367 to DW_LLE_GNU_start_end_entry. */
10368 dw2_asm_output_delta (4, curr->end, curr->begin,
10369 "Location list range length (%s)",
10370 list_head->ll_symbol);
10371 }
10372 else if (!have_multiple_function_sections)
10373 {
10374 /* Pair of relative addresses against start of text section. */
10375 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10376 "Location list begin address (%s)",
10377 list_head->ll_symbol);
10378 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10379 "Location list end address (%s)",
10380 list_head->ll_symbol);
10381 }
10382 else
10383 {
10384 /* Pair of absolute addresses. */
10385 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10386 "Location list begin address (%s)",
10387 list_head->ll_symbol);
10388 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10389 "Location list end address (%s)",
10390 list_head->ll_symbol);
10391 }
10392
10393 /* Output the block length for this list of location operations. */
10394 if (dwarf_version >= 5)
10395 dw2_asm_output_data_uleb128 (size, "Location expression size");
10396 else
10397 {
10398 gcc_assert (size <= 0xffff);
10399 dw2_asm_output_data (2, size, "Location expression size");
10400 }
10401
10402 output_loc_sequence (curr->expr, -1);
10403 }
10404
10405 /* And finally list termination. */
10406 if (dwarf_version >= 5)
10407 dw2_asm_output_data (1, DW_LLE_end_of_list,
10408 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10409 else if (dwarf_split_debug_info)
10410 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10411 "Location list terminator (%s)",
10412 list_head->ll_symbol);
10413 else
10414 {
10415 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10416 "Location list terminator begin (%s)",
10417 list_head->ll_symbol);
10418 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10419 "Location list terminator end (%s)",
10420 list_head->ll_symbol);
10421 }
10422
10423 gcc_assert (!list_head->vl_symbol
10424 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10425 }
10426
10427 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10428 section. Emit a relocated reference if val_entry is NULL, otherwise,
10429 emit an indirect reference. */
10430
10431 static void
10432 output_range_list_offset (dw_attr_node *a)
10433 {
10434 const char *name = dwarf_attr_name (a->dw_attr);
10435
10436 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10437 {
10438 if (dwarf_version >= 5)
10439 {
10440 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10441 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10442 debug_ranges_section, "%s", name);
10443 }
10444 else
10445 {
10446 char *p = strchr (ranges_section_label, '\0');
10447 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10448 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10449 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10450 debug_ranges_section, "%s", name);
10451 *p = '\0';
10452 }
10453 }
10454 else if (dwarf_version >= 5)
10455 {
10456 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10457 gcc_assert (rnglist_idx);
10458 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10459 }
10460 else
10461 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10462 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10463 "%s (offset from %s)", name, ranges_section_label);
10464 }
10465
10466 /* Output the offset into the debug_loc section. */
10467
10468 static void
10469 output_loc_list_offset (dw_attr_node *a)
10470 {
10471 char *sym = AT_loc_list (a)->ll_symbol;
10472
10473 gcc_assert (sym);
10474 if (!dwarf_split_debug_info)
10475 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10476 "%s", dwarf_attr_name (a->dw_attr));
10477 else if (dwarf_version >= 5)
10478 {
10479 gcc_assert (AT_loc_list (a)->num_assigned);
10480 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10481 dwarf_attr_name (a->dw_attr),
10482 sym);
10483 }
10484 else
10485 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10486 "%s", dwarf_attr_name (a->dw_attr));
10487 }
10488
10489 /* Output the offset into the debug_loc section. */
10490
10491 static void
10492 output_view_list_offset (dw_attr_node *a)
10493 {
10494 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10495
10496 gcc_assert (sym);
10497 if (dwarf_split_debug_info)
10498 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10499 "%s", dwarf_attr_name (a->dw_attr));
10500 else
10501 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10502 "%s", dwarf_attr_name (a->dw_attr));
10503 }
10504
10505 /* Output an attribute's index or value appropriately. */
10506
10507 static void
10508 output_attr_index_or_value (dw_attr_node *a)
10509 {
10510 const char *name = dwarf_attr_name (a->dw_attr);
10511
10512 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10513 {
10514 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10515 return;
10516 }
10517 switch (AT_class (a))
10518 {
10519 case dw_val_class_addr:
10520 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10521 break;
10522 case dw_val_class_high_pc:
10523 case dw_val_class_lbl_id:
10524 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10525 break;
10526 default:
10527 gcc_unreachable ();
10528 }
10529 }
10530
10531 /* Output a type signature. */
10532
10533 static inline void
10534 output_signature (const char *sig, const char *name)
10535 {
10536 int i;
10537
10538 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10539 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10540 }
10541
10542 /* Output a discriminant value. */
10543
10544 static inline void
10545 output_discr_value (dw_discr_value *discr_value, const char *name)
10546 {
10547 if (discr_value->pos)
10548 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10549 else
10550 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10551 }
10552
10553 /* Output the DIE and its attributes. Called recursively to generate
10554 the definitions of each child DIE. */
10555
10556 static void
10557 output_die (dw_die_ref die)
10558 {
10559 dw_attr_node *a;
10560 dw_die_ref c;
10561 unsigned long size;
10562 unsigned ix;
10563
10564 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10565 (unsigned long)die->die_offset,
10566 dwarf_tag_name (die->die_tag));
10567
10568 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10569 {
10570 const char *name = dwarf_attr_name (a->dw_attr);
10571
10572 switch (AT_class (a))
10573 {
10574 case dw_val_class_addr:
10575 output_attr_index_or_value (a);
10576 break;
10577
10578 case dw_val_class_offset:
10579 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10580 "%s", name);
10581 break;
10582
10583 case dw_val_class_range_list:
10584 output_range_list_offset (a);
10585 break;
10586
10587 case dw_val_class_loc:
10588 size = size_of_locs (AT_loc (a));
10589
10590 /* Output the block length for this list of location operations. */
10591 if (dwarf_version >= 4)
10592 dw2_asm_output_data_uleb128 (size, "%s", name);
10593 else
10594 dw2_asm_output_data (constant_size (size), size, "%s", name);
10595
10596 output_loc_sequence (AT_loc (a), -1);
10597 break;
10598
10599 case dw_val_class_const:
10600 /* ??? It would be slightly more efficient to use a scheme like is
10601 used for unsigned constants below, but gdb 4.x does not sign
10602 extend. Gdb 5.x does sign extend. */
10603 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10604 break;
10605
10606 case dw_val_class_unsigned_const:
10607 {
10608 int csize = constant_size (AT_unsigned (a));
10609 if (dwarf_version == 3
10610 && a->dw_attr == DW_AT_data_member_location
10611 && csize >= 4)
10612 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10613 else
10614 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10615 }
10616 break;
10617
10618 case dw_val_class_symview:
10619 {
10620 int vsize;
10621 if (symview_upper_bound <= 0xff)
10622 vsize = 1;
10623 else if (symview_upper_bound <= 0xffff)
10624 vsize = 2;
10625 else if (symview_upper_bound <= 0xffffffff)
10626 vsize = 4;
10627 else
10628 vsize = 8;
10629 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10630 "%s", name);
10631 }
10632 break;
10633
10634 case dw_val_class_const_implicit:
10635 if (flag_debug_asm)
10636 fprintf (asm_out_file, "\t\t\t%s %s ("
10637 HOST_WIDE_INT_PRINT_DEC ")\n",
10638 ASM_COMMENT_START, name, AT_int (a));
10639 break;
10640
10641 case dw_val_class_unsigned_const_implicit:
10642 if (flag_debug_asm)
10643 fprintf (asm_out_file, "\t\t\t%s %s ("
10644 HOST_WIDE_INT_PRINT_HEX ")\n",
10645 ASM_COMMENT_START, name, AT_unsigned (a));
10646 break;
10647
10648 case dw_val_class_const_double:
10649 {
10650 unsigned HOST_WIDE_INT first, second;
10651
10652 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10653 dw2_asm_output_data (1,
10654 HOST_BITS_PER_DOUBLE_INT
10655 / HOST_BITS_PER_CHAR,
10656 NULL);
10657
10658 if (WORDS_BIG_ENDIAN)
10659 {
10660 first = a->dw_attr_val.v.val_double.high;
10661 second = a->dw_attr_val.v.val_double.low;
10662 }
10663 else
10664 {
10665 first = a->dw_attr_val.v.val_double.low;
10666 second = a->dw_attr_val.v.val_double.high;
10667 }
10668
10669 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10670 first, "%s", name);
10671 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10672 second, NULL);
10673 }
10674 break;
10675
10676 case dw_val_class_wide_int:
10677 {
10678 int i;
10679 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10680 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10681 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10682 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10683 * l, NULL);
10684
10685 if (WORDS_BIG_ENDIAN)
10686 for (i = len - 1; i >= 0; --i)
10687 {
10688 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10689 "%s", name);
10690 name = "";
10691 }
10692 else
10693 for (i = 0; i < len; ++i)
10694 {
10695 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10696 "%s", name);
10697 name = "";
10698 }
10699 }
10700 break;
10701
10702 case dw_val_class_vec:
10703 {
10704 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10705 unsigned int len = a->dw_attr_val.v.val_vec.length;
10706 unsigned int i;
10707 unsigned char *p;
10708
10709 dw2_asm_output_data (constant_size (len * elt_size),
10710 len * elt_size, "%s", name);
10711 if (elt_size > sizeof (HOST_WIDE_INT))
10712 {
10713 elt_size /= 2;
10714 len *= 2;
10715 }
10716 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10717 i < len;
10718 i++, p += elt_size)
10719 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10720 "fp or vector constant word %u", i);
10721 break;
10722 }
10723
10724 case dw_val_class_flag:
10725 if (dwarf_version >= 4)
10726 {
10727 /* Currently all add_AT_flag calls pass in 1 as last argument,
10728 so DW_FORM_flag_present can be used. If that ever changes,
10729 we'll need to use DW_FORM_flag and have some optimization
10730 in build_abbrev_table that will change those to
10731 DW_FORM_flag_present if it is set to 1 in all DIEs using
10732 the same abbrev entry. */
10733 gcc_assert (AT_flag (a) == 1);
10734 if (flag_debug_asm)
10735 fprintf (asm_out_file, "\t\t\t%s %s\n",
10736 ASM_COMMENT_START, name);
10737 break;
10738 }
10739 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10740 break;
10741
10742 case dw_val_class_loc_list:
10743 output_loc_list_offset (a);
10744 break;
10745
10746 case dw_val_class_view_list:
10747 output_view_list_offset (a);
10748 break;
10749
10750 case dw_val_class_die_ref:
10751 if (AT_ref_external (a))
10752 {
10753 if (AT_ref (a)->comdat_type_p)
10754 {
10755 comdat_type_node *type_node
10756 = AT_ref (a)->die_id.die_type_node;
10757
10758 gcc_assert (type_node);
10759 output_signature (type_node->signature, name);
10760 }
10761 else
10762 {
10763 const char *sym = AT_ref (a)->die_id.die_symbol;
10764 int size;
10765
10766 gcc_assert (sym);
10767 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10768 length, whereas in DWARF3 it's always sized as an
10769 offset. */
10770 if (dwarf_version == 2)
10771 size = DWARF2_ADDR_SIZE;
10772 else
10773 size = DWARF_OFFSET_SIZE;
10774 /* ??? We cannot unconditionally output die_offset if
10775 non-zero - others might create references to those
10776 DIEs via symbols.
10777 And we do not clear its DIE offset after outputting it
10778 (and the label refers to the actual DIEs, not the
10779 DWARF CU unit header which is when using label + offset
10780 would be the correct thing to do).
10781 ??? This is the reason for the with_offset flag. */
10782 if (AT_ref (a)->with_offset)
10783 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10784 debug_info_section, "%s", name);
10785 else
10786 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10787 name);
10788 }
10789 }
10790 else
10791 {
10792 gcc_assert (AT_ref (a)->die_offset);
10793 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10794 "%s", name);
10795 }
10796 break;
10797
10798 case dw_val_class_fde_ref:
10799 {
10800 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10801
10802 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10803 a->dw_attr_val.v.val_fde_index * 2);
10804 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10805 "%s", name);
10806 }
10807 break;
10808
10809 case dw_val_class_vms_delta:
10810 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10811 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10812 AT_vms_delta2 (a), AT_vms_delta1 (a),
10813 "%s", name);
10814 #else
10815 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10816 AT_vms_delta2 (a), AT_vms_delta1 (a),
10817 "%s", name);
10818 #endif
10819 break;
10820
10821 case dw_val_class_lbl_id:
10822 output_attr_index_or_value (a);
10823 break;
10824
10825 case dw_val_class_lineptr:
10826 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10827 debug_line_section, "%s", name);
10828 break;
10829
10830 case dw_val_class_macptr:
10831 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10832 debug_macinfo_section, "%s", name);
10833 break;
10834
10835 case dw_val_class_loclistsptr:
10836 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10837 debug_loc_section, "%s", name);
10838 break;
10839
10840 case dw_val_class_str:
10841 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10842 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10843 a->dw_attr_val.v.val_str->label,
10844 debug_str_section,
10845 "%s: \"%s\"", name, AT_string (a));
10846 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10847 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10848 a->dw_attr_val.v.val_str->label,
10849 debug_line_str_section,
10850 "%s: \"%s\"", name, AT_string (a));
10851 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10852 dw2_asm_output_data_uleb128 (AT_index (a),
10853 "%s: \"%s\"", name, AT_string (a));
10854 else
10855 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10856 break;
10857
10858 case dw_val_class_file:
10859 {
10860 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10861
10862 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10863 a->dw_attr_val.v.val_file->filename);
10864 break;
10865 }
10866
10867 case dw_val_class_file_implicit:
10868 if (flag_debug_asm)
10869 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10870 ASM_COMMENT_START, name,
10871 maybe_emit_file (a->dw_attr_val.v.val_file),
10872 a->dw_attr_val.v.val_file->filename);
10873 break;
10874
10875 case dw_val_class_data8:
10876 {
10877 int i;
10878
10879 for (i = 0; i < 8; i++)
10880 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10881 i == 0 ? "%s" : NULL, name);
10882 break;
10883 }
10884
10885 case dw_val_class_high_pc:
10886 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10887 get_AT_low_pc (die), "DW_AT_high_pc");
10888 break;
10889
10890 case dw_val_class_discr_value:
10891 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10892 break;
10893
10894 case dw_val_class_discr_list:
10895 {
10896 dw_discr_list_ref list = AT_discr_list (a);
10897 const int size = size_of_discr_list (list);
10898
10899 /* This is a block, so output its length first. */
10900 dw2_asm_output_data (constant_size (size), size,
10901 "%s: block size", name);
10902
10903 for (; list != NULL; list = list->dw_discr_next)
10904 {
10905 /* One byte for the discriminant value descriptor, and then as
10906 many LEB128 numbers as required. */
10907 if (list->dw_discr_range)
10908 dw2_asm_output_data (1, DW_DSC_range,
10909 "%s: DW_DSC_range", name);
10910 else
10911 dw2_asm_output_data (1, DW_DSC_label,
10912 "%s: DW_DSC_label", name);
10913
10914 output_discr_value (&list->dw_discr_lower_bound, name);
10915 if (list->dw_discr_range)
10916 output_discr_value (&list->dw_discr_upper_bound, name);
10917 }
10918 break;
10919 }
10920
10921 default:
10922 gcc_unreachable ();
10923 }
10924 }
10925
10926 FOR_EACH_CHILD (die, c, output_die (c));
10927
10928 /* Add null byte to terminate sibling list. */
10929 if (die->die_child != NULL)
10930 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10931 (unsigned long) die->die_offset);
10932 }
10933
10934 /* Output the dwarf version number. */
10935
10936 static void
10937 output_dwarf_version ()
10938 {
10939 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10940 views in loclist. That will change eventually. */
10941 if (dwarf_version == 6)
10942 {
10943 static bool once;
10944 if (!once)
10945 {
10946 warning (0,
10947 "-gdwarf-6 is output as version 5 with incompatibilities");
10948 once = true;
10949 }
10950 dw2_asm_output_data (2, 5, "DWARF version number");
10951 }
10952 else
10953 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10954 }
10955
10956 /* Output the compilation unit that appears at the beginning of the
10957 .debug_info section, and precedes the DIE descriptions. */
10958
10959 static void
10960 output_compilation_unit_header (enum dwarf_unit_type ut)
10961 {
10962 if (!XCOFF_DEBUGGING_INFO)
10963 {
10964 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10965 dw2_asm_output_data (4, 0xffffffff,
10966 "Initial length escape value indicating 64-bit DWARF extension");
10967 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10968 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10969 "Length of Compilation Unit Info");
10970 }
10971
10972 output_dwarf_version ();
10973 if (dwarf_version >= 5)
10974 {
10975 const char *name;
10976 switch (ut)
10977 {
10978 case DW_UT_compile: name = "DW_UT_compile"; break;
10979 case DW_UT_type: name = "DW_UT_type"; break;
10980 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10981 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10982 default: gcc_unreachable ();
10983 }
10984 dw2_asm_output_data (1, ut, "%s", name);
10985 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10986 }
10987 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10988 debug_abbrev_section,
10989 "Offset Into Abbrev. Section");
10990 if (dwarf_version < 5)
10991 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10992 }
10993
10994 /* Output the compilation unit DIE and its children. */
10995
10996 static void
10997 output_comp_unit (dw_die_ref die, int output_if_empty,
10998 const unsigned char *dwo_id)
10999 {
11000 const char *secname, *oldsym;
11001 char *tmp;
11002
11003 /* Unless we are outputting main CU, we may throw away empty ones. */
11004 if (!output_if_empty && die->die_child == NULL)
11005 return;
11006
11007 /* Even if there are no children of this DIE, we must output the information
11008 about the compilation unit. Otherwise, on an empty translation unit, we
11009 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11010 will then complain when examining the file. First mark all the DIEs in
11011 this CU so we know which get local refs. */
11012 mark_dies (die);
11013
11014 external_ref_hash_type *extern_map = optimize_external_refs (die);
11015
11016 /* For now, optimize only the main CU, in order to optimize the rest
11017 we'd need to see all of them earlier. Leave the rest for post-linking
11018 tools like DWZ. */
11019 if (die == comp_unit_die ())
11020 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11021
11022 build_abbrev_table (die, extern_map);
11023
11024 optimize_abbrev_table ();
11025
11026 delete extern_map;
11027
11028 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11029 next_die_offset = (dwo_id
11030 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11031 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11032 calc_die_sizes (die);
11033
11034 oldsym = die->die_id.die_symbol;
11035 if (oldsym && die->comdat_type_p)
11036 {
11037 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11038
11039 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11040 secname = tmp;
11041 die->die_id.die_symbol = NULL;
11042 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11043 }
11044 else
11045 {
11046 switch_to_section (debug_info_section);
11047 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11048 info_section_emitted = true;
11049 }
11050
11051 /* For LTO cross unit DIE refs we want a symbol on the start of the
11052 debuginfo section, not on the CU DIE. */
11053 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11054 {
11055 /* ??? No way to get visibility assembled without a decl. */
11056 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11057 get_identifier (oldsym), char_type_node);
11058 TREE_PUBLIC (decl) = true;
11059 TREE_STATIC (decl) = true;
11060 DECL_ARTIFICIAL (decl) = true;
11061 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11062 DECL_VISIBILITY_SPECIFIED (decl) = true;
11063 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11064 #ifdef ASM_WEAKEN_LABEL
11065 /* We prefer a .weak because that handles duplicates from duplicate
11066 archive members in a graceful way. */
11067 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11068 #else
11069 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11070 #endif
11071 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11072 }
11073
11074 /* Output debugging information. */
11075 output_compilation_unit_header (dwo_id
11076 ? DW_UT_split_compile : DW_UT_compile);
11077 if (dwarf_version >= 5)
11078 {
11079 if (dwo_id != NULL)
11080 for (int i = 0; i < 8; i++)
11081 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11082 }
11083 output_die (die);
11084
11085 /* Leave the marks on the main CU, so we can check them in
11086 output_pubnames. */
11087 if (oldsym)
11088 {
11089 unmark_dies (die);
11090 die->die_id.die_symbol = oldsym;
11091 }
11092 }
11093
11094 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11095 and .debug_pubtypes. This is configured per-target, but can be
11096 overridden by the -gpubnames or -gno-pubnames options. */
11097
11098 static inline bool
11099 want_pubnames (void)
11100 {
11101 if (debug_info_level <= DINFO_LEVEL_TERSE)
11102 return false;
11103 if (debug_generate_pub_sections != -1)
11104 return debug_generate_pub_sections;
11105 return targetm.want_debug_pub_sections;
11106 }
11107
11108 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11109
11110 static void
11111 add_AT_pubnames (dw_die_ref die)
11112 {
11113 if (want_pubnames ())
11114 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11115 }
11116
11117 /* Add a string attribute value to a skeleton DIE. */
11118
11119 static inline void
11120 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11121 const char *str)
11122 {
11123 dw_attr_node attr;
11124 struct indirect_string_node *node;
11125
11126 if (! skeleton_debug_str_hash)
11127 skeleton_debug_str_hash
11128 = hash_table<indirect_string_hasher>::create_ggc (10);
11129
11130 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11131 find_string_form (node);
11132 if (node->form == dwarf_FORM (DW_FORM_strx))
11133 node->form = DW_FORM_strp;
11134
11135 attr.dw_attr = attr_kind;
11136 attr.dw_attr_val.val_class = dw_val_class_str;
11137 attr.dw_attr_val.val_entry = NULL;
11138 attr.dw_attr_val.v.val_str = node;
11139 add_dwarf_attr (die, &attr);
11140 }
11141
11142 /* Helper function to generate top-level dies for skeleton debug_info and
11143 debug_types. */
11144
11145 static void
11146 add_top_level_skeleton_die_attrs (dw_die_ref die)
11147 {
11148 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11149 const char *comp_dir = comp_dir_string ();
11150
11151 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11152 if (comp_dir != NULL)
11153 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11154 add_AT_pubnames (die);
11155 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11156 }
11157
11158 /* Output skeleton debug sections that point to the dwo file. */
11159
11160 static void
11161 output_skeleton_debug_sections (dw_die_ref comp_unit,
11162 const unsigned char *dwo_id)
11163 {
11164 /* These attributes will be found in the full debug_info section. */
11165 remove_AT (comp_unit, DW_AT_producer);
11166 remove_AT (comp_unit, DW_AT_language);
11167
11168 switch_to_section (debug_skeleton_info_section);
11169 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11170
11171 /* Produce the skeleton compilation-unit header. This one differs enough from
11172 a normal CU header that it's better not to call output_compilation_unit
11173 header. */
11174 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11175 dw2_asm_output_data (4, 0xffffffff,
11176 "Initial length escape value indicating 64-bit "
11177 "DWARF extension");
11178
11179 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11180 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11181 - DWARF_INITIAL_LENGTH_SIZE
11182 + size_of_die (comp_unit),
11183 "Length of Compilation Unit Info");
11184 output_dwarf_version ();
11185 if (dwarf_version >= 5)
11186 {
11187 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11188 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11189 }
11190 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11191 debug_skeleton_abbrev_section,
11192 "Offset Into Abbrev. Section");
11193 if (dwarf_version < 5)
11194 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11195 else
11196 for (int i = 0; i < 8; i++)
11197 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11198
11199 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11200 output_die (comp_unit);
11201
11202 /* Build the skeleton debug_abbrev section. */
11203 switch_to_section (debug_skeleton_abbrev_section);
11204 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11205
11206 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11207
11208 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11209 }
11210
11211 /* Output a comdat type unit DIE and its children. */
11212
11213 static void
11214 output_comdat_type_unit (comdat_type_node *node)
11215 {
11216 const char *secname;
11217 char *tmp;
11218 int i;
11219 #if defined (OBJECT_FORMAT_ELF)
11220 tree comdat_key;
11221 #endif
11222
11223 /* First mark all the DIEs in this CU so we know which get local refs. */
11224 mark_dies (node->root_die);
11225
11226 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11227
11228 build_abbrev_table (node->root_die, extern_map);
11229
11230 delete extern_map;
11231 extern_map = NULL;
11232
11233 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11234 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11235 calc_die_sizes (node->root_die);
11236
11237 #if defined (OBJECT_FORMAT_ELF)
11238 if (dwarf_version >= 5)
11239 {
11240 if (!dwarf_split_debug_info)
11241 secname = ".debug_info";
11242 else
11243 secname = ".debug_info.dwo";
11244 }
11245 else if (!dwarf_split_debug_info)
11246 secname = ".debug_types";
11247 else
11248 secname = ".debug_types.dwo";
11249
11250 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11251 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11252 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11253 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11254 comdat_key = get_identifier (tmp);
11255 targetm.asm_out.named_section (secname,
11256 SECTION_DEBUG | SECTION_LINKONCE,
11257 comdat_key);
11258 #else
11259 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11260 sprintf (tmp, (dwarf_version >= 5
11261 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11262 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11263 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11264 secname = tmp;
11265 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11266 #endif
11267
11268 /* Output debugging information. */
11269 output_compilation_unit_header (dwarf_split_debug_info
11270 ? DW_UT_split_type : DW_UT_type);
11271 output_signature (node->signature, "Type Signature");
11272 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11273 "Offset to Type DIE");
11274 output_die (node->root_die);
11275
11276 unmark_dies (node->root_die);
11277 }
11278
11279 /* Return the DWARF2/3 pubname associated with a decl. */
11280
11281 static const char *
11282 dwarf2_name (tree decl, int scope)
11283 {
11284 if (DECL_NAMELESS (decl))
11285 return NULL;
11286 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11287 }
11288
11289 /* Add a new entry to .debug_pubnames if appropriate. */
11290
11291 static void
11292 add_pubname_string (const char *str, dw_die_ref die)
11293 {
11294 pubname_entry e;
11295
11296 e.die = die;
11297 e.name = xstrdup (str);
11298 vec_safe_push (pubname_table, e);
11299 }
11300
11301 static void
11302 add_pubname (tree decl, dw_die_ref die)
11303 {
11304 if (!want_pubnames ())
11305 return;
11306
11307 /* Don't add items to the table when we expect that the consumer will have
11308 just read the enclosing die. For example, if the consumer is looking at a
11309 class_member, it will either be inside the class already, or will have just
11310 looked up the class to find the member. Either way, searching the class is
11311 faster than searching the index. */
11312 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11313 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11314 {
11315 const char *name = dwarf2_name (decl, 1);
11316
11317 if (name)
11318 add_pubname_string (name, die);
11319 }
11320 }
11321
11322 /* Add an enumerator to the pubnames section. */
11323
11324 static void
11325 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11326 {
11327 pubname_entry e;
11328
11329 gcc_assert (scope_name);
11330 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11331 e.die = die;
11332 vec_safe_push (pubname_table, e);
11333 }
11334
11335 /* Add a new entry to .debug_pubtypes if appropriate. */
11336
11337 static void
11338 add_pubtype (tree decl, dw_die_ref die)
11339 {
11340 pubname_entry e;
11341
11342 if (!want_pubnames ())
11343 return;
11344
11345 if ((TREE_PUBLIC (decl)
11346 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11347 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11348 {
11349 tree scope = NULL;
11350 const char *scope_name = "";
11351 const char *sep = is_cxx () ? "::" : ".";
11352 const char *name;
11353
11354 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11355 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11356 {
11357 scope_name = lang_hooks.dwarf_name (scope, 1);
11358 if (scope_name != NULL && scope_name[0] != '\0')
11359 scope_name = concat (scope_name, sep, NULL);
11360 else
11361 scope_name = "";
11362 }
11363
11364 if (TYPE_P (decl))
11365 name = type_tag (decl);
11366 else
11367 name = lang_hooks.dwarf_name (decl, 1);
11368
11369 /* If we don't have a name for the type, there's no point in adding
11370 it to the table. */
11371 if (name != NULL && name[0] != '\0')
11372 {
11373 e.die = die;
11374 e.name = concat (scope_name, name, NULL);
11375 vec_safe_push (pubtype_table, e);
11376 }
11377
11378 /* Although it might be more consistent to add the pubinfo for the
11379 enumerators as their dies are created, they should only be added if the
11380 enum type meets the criteria above. So rather than re-check the parent
11381 enum type whenever an enumerator die is created, just output them all
11382 here. This isn't protected by the name conditional because anonymous
11383 enums don't have names. */
11384 if (die->die_tag == DW_TAG_enumeration_type)
11385 {
11386 dw_die_ref c;
11387
11388 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11389 }
11390 }
11391 }
11392
11393 /* Output a single entry in the pubnames table. */
11394
11395 static void
11396 output_pubname (dw_offset die_offset, pubname_entry *entry)
11397 {
11398 dw_die_ref die = entry->die;
11399 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11400
11401 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11402
11403 if (debug_generate_pub_sections == 2)
11404 {
11405 /* This logic follows gdb's method for determining the value of the flag
11406 byte. */
11407 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11408 switch (die->die_tag)
11409 {
11410 case DW_TAG_typedef:
11411 case DW_TAG_base_type:
11412 case DW_TAG_subrange_type:
11413 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11414 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11415 break;
11416 case DW_TAG_enumerator:
11417 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11418 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11419 if (!is_cxx ())
11420 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11421 break;
11422 case DW_TAG_subprogram:
11423 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11424 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11425 if (!is_ada ())
11426 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11427 break;
11428 case DW_TAG_constant:
11429 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11430 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11431 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11432 break;
11433 case DW_TAG_variable:
11434 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11435 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11436 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11437 break;
11438 case DW_TAG_namespace:
11439 case DW_TAG_imported_declaration:
11440 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11441 break;
11442 case DW_TAG_class_type:
11443 case DW_TAG_interface_type:
11444 case DW_TAG_structure_type:
11445 case DW_TAG_union_type:
11446 case DW_TAG_enumeration_type:
11447 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11448 if (!is_cxx ())
11449 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11450 break;
11451 default:
11452 /* An unusual tag. Leave the flag-byte empty. */
11453 break;
11454 }
11455 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11456 "GDB-index flags");
11457 }
11458
11459 dw2_asm_output_nstring (entry->name, -1, "external name");
11460 }
11461
11462
11463 /* Output the public names table used to speed up access to externally
11464 visible names; or the public types table used to find type definitions. */
11465
11466 static void
11467 output_pubnames (vec<pubname_entry, va_gc> *names)
11468 {
11469 unsigned i;
11470 unsigned long pubnames_length = size_of_pubnames (names);
11471 pubname_entry *pub;
11472
11473 if (!XCOFF_DEBUGGING_INFO)
11474 {
11475 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11476 dw2_asm_output_data (4, 0xffffffff,
11477 "Initial length escape value indicating 64-bit DWARF extension");
11478 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11479 "Pub Info Length");
11480 }
11481
11482 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11483 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11484
11485 if (dwarf_split_debug_info)
11486 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11487 debug_skeleton_info_section,
11488 "Offset of Compilation Unit Info");
11489 else
11490 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11491 debug_info_section,
11492 "Offset of Compilation Unit Info");
11493 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11494 "Compilation Unit Length");
11495
11496 FOR_EACH_VEC_ELT (*names, i, pub)
11497 {
11498 if (include_pubname_in_output (names, pub))
11499 {
11500 dw_offset die_offset = pub->die->die_offset;
11501
11502 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11503 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11504 gcc_assert (pub->die->die_mark);
11505
11506 /* If we're putting types in their own .debug_types sections,
11507 the .debug_pubtypes table will still point to the compile
11508 unit (not the type unit), so we want to use the offset of
11509 the skeleton DIE (if there is one). */
11510 if (pub->die->comdat_type_p && names == pubtype_table)
11511 {
11512 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11513
11514 if (type_node != NULL)
11515 die_offset = (type_node->skeleton_die != NULL
11516 ? type_node->skeleton_die->die_offset
11517 : comp_unit_die ()->die_offset);
11518 }
11519
11520 output_pubname (die_offset, pub);
11521 }
11522 }
11523
11524 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11525 }
11526
11527 /* Output public names and types tables if necessary. */
11528
11529 static void
11530 output_pubtables (void)
11531 {
11532 if (!want_pubnames () || !info_section_emitted)
11533 return;
11534
11535 switch_to_section (debug_pubnames_section);
11536 output_pubnames (pubname_table);
11537 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11538 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11539 simply won't look for the section. */
11540 switch_to_section (debug_pubtypes_section);
11541 output_pubnames (pubtype_table);
11542 }
11543
11544
11545 /* Output the information that goes into the .debug_aranges table.
11546 Namely, define the beginning and ending address range of the
11547 text section generated for this compilation unit. */
11548
11549 static void
11550 output_aranges (void)
11551 {
11552 unsigned i;
11553 unsigned long aranges_length = size_of_aranges ();
11554
11555 if (!XCOFF_DEBUGGING_INFO)
11556 {
11557 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11558 dw2_asm_output_data (4, 0xffffffff,
11559 "Initial length escape value indicating 64-bit DWARF extension");
11560 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11561 "Length of Address Ranges Info");
11562 }
11563
11564 /* Version number for aranges is still 2, even up to DWARF5. */
11565 dw2_asm_output_data (2, 2, "DWARF aranges version");
11566 if (dwarf_split_debug_info)
11567 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11568 debug_skeleton_info_section,
11569 "Offset of Compilation Unit Info");
11570 else
11571 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11572 debug_info_section,
11573 "Offset of Compilation Unit Info");
11574 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11575 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11576
11577 /* We need to align to twice the pointer size here. */
11578 if (DWARF_ARANGES_PAD_SIZE)
11579 {
11580 /* Pad using a 2 byte words so that padding is correct for any
11581 pointer size. */
11582 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11583 2 * DWARF2_ADDR_SIZE);
11584 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11585 dw2_asm_output_data (2, 0, NULL);
11586 }
11587
11588 /* It is necessary not to output these entries if the sections were
11589 not used; if the sections were not used, the length will be 0 and
11590 the address may end up as 0 if the section is discarded by ld
11591 --gc-sections, leaving an invalid (0, 0) entry that can be
11592 confused with the terminator. */
11593 if (text_section_used)
11594 {
11595 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11596 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11597 text_section_label, "Length");
11598 }
11599 if (cold_text_section_used)
11600 {
11601 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11602 "Address");
11603 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11604 cold_text_section_label, "Length");
11605 }
11606
11607 if (have_multiple_function_sections)
11608 {
11609 unsigned fde_idx;
11610 dw_fde_ref fde;
11611
11612 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11613 {
11614 if (DECL_IGNORED_P (fde->decl))
11615 continue;
11616 if (!fde->in_std_section)
11617 {
11618 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11619 "Address");
11620 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11621 fde->dw_fde_begin, "Length");
11622 }
11623 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11624 {
11625 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11626 "Address");
11627 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11628 fde->dw_fde_second_begin, "Length");
11629 }
11630 }
11631 }
11632
11633 /* Output the terminator words. */
11634 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11635 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11636 }
11637
11638 /* Add a new entry to .debug_ranges. Return its index into
11639 ranges_table vector. */
11640
11641 static unsigned int
11642 add_ranges_num (int num, bool maybe_new_sec)
11643 {
11644 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11645 vec_safe_push (ranges_table, r);
11646 return vec_safe_length (ranges_table) - 1;
11647 }
11648
11649 /* Add a new entry to .debug_ranges corresponding to a block, or a
11650 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11651 this entry might be in a different section from previous range. */
11652
11653 static unsigned int
11654 add_ranges (const_tree block, bool maybe_new_sec)
11655 {
11656 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11657 }
11658
11659 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11660 chain, or middle entry of a chain that will be directly referred to. */
11661
11662 static void
11663 note_rnglist_head (unsigned int offset)
11664 {
11665 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11666 return;
11667 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11668 }
11669
11670 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11671 When using dwarf_split_debug_info, address attributes in dies destined
11672 for the final executable should be direct references--setting the
11673 parameter force_direct ensures this behavior. */
11674
11675 static void
11676 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11677 bool *added, bool force_direct)
11678 {
11679 unsigned int in_use = vec_safe_length (ranges_by_label);
11680 unsigned int offset;
11681 dw_ranges_by_label rbl = { begin, end };
11682 vec_safe_push (ranges_by_label, rbl);
11683 offset = add_ranges_num (-(int)in_use - 1, true);
11684 if (!*added)
11685 {
11686 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11687 *added = true;
11688 note_rnglist_head (offset);
11689 }
11690 }
11691
11692 /* Emit .debug_ranges section. */
11693
11694 static void
11695 output_ranges (void)
11696 {
11697 unsigned i;
11698 static const char *const start_fmt = "Offset %#x";
11699 const char *fmt = start_fmt;
11700 dw_ranges *r;
11701
11702 switch_to_section (debug_ranges_section);
11703 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11704 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11705 {
11706 int block_num = r->num;
11707
11708 if (block_num > 0)
11709 {
11710 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11711 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11712
11713 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11714 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11715
11716 /* If all code is in the text section, then the compilation
11717 unit base address defaults to DW_AT_low_pc, which is the
11718 base of the text section. */
11719 if (!have_multiple_function_sections)
11720 {
11721 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11722 text_section_label,
11723 fmt, i * 2 * DWARF2_ADDR_SIZE);
11724 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11725 text_section_label, NULL);
11726 }
11727
11728 /* Otherwise, the compilation unit base address is zero,
11729 which allows us to use absolute addresses, and not worry
11730 about whether the target supports cross-section
11731 arithmetic. */
11732 else
11733 {
11734 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11735 fmt, i * 2 * DWARF2_ADDR_SIZE);
11736 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11737 }
11738
11739 fmt = NULL;
11740 }
11741
11742 /* Negative block_num stands for an index into ranges_by_label. */
11743 else if (block_num < 0)
11744 {
11745 int lab_idx = - block_num - 1;
11746
11747 if (!have_multiple_function_sections)
11748 {
11749 gcc_unreachable ();
11750 #if 0
11751 /* If we ever use add_ranges_by_labels () for a single
11752 function section, all we have to do is to take out
11753 the #if 0 above. */
11754 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11755 (*ranges_by_label)[lab_idx].begin,
11756 text_section_label,
11757 fmt, i * 2 * DWARF2_ADDR_SIZE);
11758 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11759 (*ranges_by_label)[lab_idx].end,
11760 text_section_label, NULL);
11761 #endif
11762 }
11763 else
11764 {
11765 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11766 (*ranges_by_label)[lab_idx].begin,
11767 fmt, i * 2 * DWARF2_ADDR_SIZE);
11768 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11769 (*ranges_by_label)[lab_idx].end,
11770 NULL);
11771 }
11772 }
11773 else
11774 {
11775 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11776 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11777 fmt = start_fmt;
11778 }
11779 }
11780 }
11781
11782 /* Non-zero if .debug_line_str should be used for .debug_line section
11783 strings or strings that are likely shareable with those. */
11784 #define DWARF5_USE_DEBUG_LINE_STR \
11785 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11786 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11787 /* FIXME: there is no .debug_line_str.dwo section, \
11788 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11789 && !dwarf_split_debug_info)
11790
11791 /* Assign .debug_rnglists indexes. */
11792
11793 static void
11794 index_rnglists (void)
11795 {
11796 unsigned i;
11797 dw_ranges *r;
11798
11799 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11800 if (r->label)
11801 r->idx = rnglist_idx++;
11802 }
11803
11804 /* Emit .debug_rnglists section. */
11805
11806 static void
11807 output_rnglists (unsigned generation)
11808 {
11809 unsigned i;
11810 dw_ranges *r;
11811 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11812 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11813 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11814
11815 switch_to_section (debug_ranges_section);
11816 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11817 /* There are up to 4 unique ranges labels per generation.
11818 See also init_sections_and_labels. */
11819 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11820 2 + generation * 4);
11821 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11822 3 + generation * 4);
11823 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11824 dw2_asm_output_data (4, 0xffffffff,
11825 "Initial length escape value indicating "
11826 "64-bit DWARF extension");
11827 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11828 "Length of Range Lists");
11829 ASM_OUTPUT_LABEL (asm_out_file, l1);
11830 output_dwarf_version ();
11831 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11832 dw2_asm_output_data (1, 0, "Segment Size");
11833 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11834 about relocation sizes and primarily care about the size of .debug*
11835 sections in linked shared libraries and executables, then
11836 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11837 into it are usually larger than just DW_FORM_sec_offset offsets
11838 into the .debug_rnglists section. */
11839 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11840 "Offset Entry Count");
11841 if (dwarf_split_debug_info)
11842 {
11843 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11844 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11845 if (r->label)
11846 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11847 ranges_base_label, NULL);
11848 }
11849
11850 const char *lab = "";
11851 unsigned int len = vec_safe_length (ranges_table);
11852 const char *base = NULL;
11853 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11854 {
11855 int block_num = r->num;
11856
11857 if (r->label)
11858 {
11859 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11860 lab = r->label;
11861 }
11862 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11863 base = NULL;
11864 if (block_num > 0)
11865 {
11866 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11867 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11868
11869 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11870 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11871
11872 if (HAVE_AS_LEB128)
11873 {
11874 /* If all code is in the text section, then the compilation
11875 unit base address defaults to DW_AT_low_pc, which is the
11876 base of the text section. */
11877 if (!have_multiple_function_sections)
11878 {
11879 dw2_asm_output_data (1, DW_RLE_offset_pair,
11880 "DW_RLE_offset_pair (%s)", lab);
11881 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11882 "Range begin address (%s)", lab);
11883 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11884 "Range end address (%s)", lab);
11885 continue;
11886 }
11887 if (base == NULL)
11888 {
11889 dw_ranges *r2 = NULL;
11890 if (i < len - 1)
11891 r2 = &(*ranges_table)[i + 1];
11892 if (r2
11893 && r2->num != 0
11894 && r2->label == NULL
11895 && !r2->maybe_new_sec)
11896 {
11897 dw2_asm_output_data (1, DW_RLE_base_address,
11898 "DW_RLE_base_address (%s)", lab);
11899 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11900 "Base address (%s)", lab);
11901 strcpy (basebuf, blabel);
11902 base = basebuf;
11903 }
11904 }
11905 if (base)
11906 {
11907 dw2_asm_output_data (1, DW_RLE_offset_pair,
11908 "DW_RLE_offset_pair (%s)", lab);
11909 dw2_asm_output_delta_uleb128 (blabel, base,
11910 "Range begin address (%s)", lab);
11911 dw2_asm_output_delta_uleb128 (elabel, base,
11912 "Range end address (%s)", lab);
11913 continue;
11914 }
11915 dw2_asm_output_data (1, DW_RLE_start_length,
11916 "DW_RLE_start_length (%s)", lab);
11917 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11918 "Range begin address (%s)", lab);
11919 dw2_asm_output_delta_uleb128 (elabel, blabel,
11920 "Range length (%s)", lab);
11921 }
11922 else
11923 {
11924 dw2_asm_output_data (1, DW_RLE_start_end,
11925 "DW_RLE_start_end (%s)", lab);
11926 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11927 "Range begin address (%s)", lab);
11928 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11929 "Range end address (%s)", lab);
11930 }
11931 }
11932
11933 /* Negative block_num stands for an index into ranges_by_label. */
11934 else if (block_num < 0)
11935 {
11936 int lab_idx = - block_num - 1;
11937 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11938 const char *elabel = (*ranges_by_label)[lab_idx].end;
11939
11940 if (!have_multiple_function_sections)
11941 gcc_unreachable ();
11942 if (HAVE_AS_LEB128)
11943 {
11944 dw2_asm_output_data (1, DW_RLE_start_length,
11945 "DW_RLE_start_length (%s)", lab);
11946 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11947 "Range begin address (%s)", lab);
11948 dw2_asm_output_delta_uleb128 (elabel, blabel,
11949 "Range length (%s)", lab);
11950 }
11951 else
11952 {
11953 dw2_asm_output_data (1, DW_RLE_start_end,
11954 "DW_RLE_start_end (%s)", lab);
11955 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11956 "Range begin address (%s)", lab);
11957 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11958 "Range end address (%s)", lab);
11959 }
11960 }
11961 else
11962 dw2_asm_output_data (1, DW_RLE_end_of_list,
11963 "DW_RLE_end_of_list (%s)", lab);
11964 }
11965 ASM_OUTPUT_LABEL (asm_out_file, l2);
11966 }
11967
11968 /* Data structure containing information about input files. */
11969 struct file_info
11970 {
11971 const char *path; /* Complete file name. */
11972 const char *fname; /* File name part. */
11973 int length; /* Length of entire string. */
11974 struct dwarf_file_data * file_idx; /* Index in input file table. */
11975 int dir_idx; /* Index in directory table. */
11976 };
11977
11978 /* Data structure containing information about directories with source
11979 files. */
11980 struct dir_info
11981 {
11982 const char *path; /* Path including directory name. */
11983 int length; /* Path length. */
11984 int prefix; /* Index of directory entry which is a prefix. */
11985 int count; /* Number of files in this directory. */
11986 int dir_idx; /* Index of directory used as base. */
11987 };
11988
11989 /* Callback function for file_info comparison. We sort by looking at
11990 the directories in the path. */
11991
11992 static int
11993 file_info_cmp (const void *p1, const void *p2)
11994 {
11995 const struct file_info *const s1 = (const struct file_info *) p1;
11996 const struct file_info *const s2 = (const struct file_info *) p2;
11997 const unsigned char *cp1;
11998 const unsigned char *cp2;
11999
12000 /* Take care of file names without directories. We need to make sure that
12001 we return consistent values to qsort since some will get confused if
12002 we return the same value when identical operands are passed in opposite
12003 orders. So if neither has a directory, return 0 and otherwise return
12004 1 or -1 depending on which one has the directory. We want the one with
12005 the directory to sort after the one without, so all no directory files
12006 are at the start (normally only the compilation unit file). */
12007 if ((s1->path == s1->fname || s2->path == s2->fname))
12008 return (s2->path == s2->fname) - (s1->path == s1->fname);
12009
12010 cp1 = (const unsigned char *) s1->path;
12011 cp2 = (const unsigned char *) s2->path;
12012
12013 while (1)
12014 {
12015 ++cp1;
12016 ++cp2;
12017 /* Reached the end of the first path? If so, handle like above,
12018 but now we want longer directory prefixes before shorter ones. */
12019 if ((cp1 == (const unsigned char *) s1->fname)
12020 || (cp2 == (const unsigned char *) s2->fname))
12021 return ((cp1 == (const unsigned char *) s1->fname)
12022 - (cp2 == (const unsigned char *) s2->fname));
12023
12024 /* Character of current path component the same? */
12025 else if (*cp1 != *cp2)
12026 return *cp1 - *cp2;
12027 }
12028 }
12029
12030 struct file_name_acquire_data
12031 {
12032 struct file_info *files;
12033 int used_files;
12034 int max_files;
12035 };
12036
12037 /* Traversal function for the hash table. */
12038
12039 int
12040 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12041 {
12042 struct dwarf_file_data *d = *slot;
12043 struct file_info *fi;
12044 const char *f;
12045
12046 gcc_assert (fnad->max_files >= d->emitted_number);
12047
12048 if (! d->emitted_number)
12049 return 1;
12050
12051 gcc_assert (fnad->max_files != fnad->used_files);
12052
12053 fi = fnad->files + fnad->used_files++;
12054
12055 /* Skip all leading "./". */
12056 f = d->filename;
12057 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12058 f += 2;
12059
12060 /* Create a new array entry. */
12061 fi->path = f;
12062 fi->length = strlen (f);
12063 fi->file_idx = d;
12064
12065 /* Search for the file name part. */
12066 f = strrchr (f, DIR_SEPARATOR);
12067 #if defined (DIR_SEPARATOR_2)
12068 {
12069 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12070
12071 if (g != NULL)
12072 {
12073 if (f == NULL || f < g)
12074 f = g;
12075 }
12076 }
12077 #endif
12078
12079 fi->fname = f == NULL ? fi->path : f + 1;
12080 return 1;
12081 }
12082
12083 /* Helper function for output_file_names. Emit a FORM encoded
12084 string STR, with assembly comment start ENTRY_KIND and
12085 index IDX */
12086
12087 static void
12088 output_line_string (enum dwarf_form form, const char *str,
12089 const char *entry_kind, unsigned int idx)
12090 {
12091 switch (form)
12092 {
12093 case DW_FORM_string:
12094 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12095 break;
12096 case DW_FORM_line_strp:
12097 if (!debug_line_str_hash)
12098 debug_line_str_hash
12099 = hash_table<indirect_string_hasher>::create_ggc (10);
12100
12101 struct indirect_string_node *node;
12102 node = find_AT_string_in_table (str, debug_line_str_hash);
12103 set_indirect_string (node);
12104 node->form = form;
12105 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12106 debug_line_str_section, "%s: %#x: \"%s\"",
12107 entry_kind, 0, node->str);
12108 break;
12109 default:
12110 gcc_unreachable ();
12111 }
12112 }
12113
12114 /* Output the directory table and the file name table. We try to minimize
12115 the total amount of memory needed. A heuristic is used to avoid large
12116 slowdowns with many input files. */
12117
12118 static void
12119 output_file_names (void)
12120 {
12121 struct file_name_acquire_data fnad;
12122 int numfiles;
12123 struct file_info *files;
12124 struct dir_info *dirs;
12125 int *saved;
12126 int *savehere;
12127 int *backmap;
12128 int ndirs;
12129 int idx_offset;
12130 int i;
12131
12132 if (!last_emitted_file)
12133 {
12134 if (dwarf_version >= 5)
12135 {
12136 dw2_asm_output_data (1, 0, "Directory entry format count");
12137 dw2_asm_output_data_uleb128 (0, "Directories count");
12138 dw2_asm_output_data (1, 0, "File name entry format count");
12139 dw2_asm_output_data_uleb128 (0, "File names count");
12140 }
12141 else
12142 {
12143 dw2_asm_output_data (1, 0, "End directory table");
12144 dw2_asm_output_data (1, 0, "End file name table");
12145 }
12146 return;
12147 }
12148
12149 numfiles = last_emitted_file->emitted_number;
12150
12151 /* Allocate the various arrays we need. */
12152 files = XALLOCAVEC (struct file_info, numfiles);
12153 dirs = XALLOCAVEC (struct dir_info, numfiles);
12154
12155 fnad.files = files;
12156 fnad.used_files = 0;
12157 fnad.max_files = numfiles;
12158 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12159 gcc_assert (fnad.used_files == fnad.max_files);
12160
12161 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12162
12163 /* Find all the different directories used. */
12164 dirs[0].path = files[0].path;
12165 dirs[0].length = files[0].fname - files[0].path;
12166 dirs[0].prefix = -1;
12167 dirs[0].count = 1;
12168 dirs[0].dir_idx = 0;
12169 files[0].dir_idx = 0;
12170 ndirs = 1;
12171
12172 for (i = 1; i < numfiles; i++)
12173 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12174 && memcmp (dirs[ndirs - 1].path, files[i].path,
12175 dirs[ndirs - 1].length) == 0)
12176 {
12177 /* Same directory as last entry. */
12178 files[i].dir_idx = ndirs - 1;
12179 ++dirs[ndirs - 1].count;
12180 }
12181 else
12182 {
12183 int j;
12184
12185 /* This is a new directory. */
12186 dirs[ndirs].path = files[i].path;
12187 dirs[ndirs].length = files[i].fname - files[i].path;
12188 dirs[ndirs].count = 1;
12189 dirs[ndirs].dir_idx = ndirs;
12190 files[i].dir_idx = ndirs;
12191
12192 /* Search for a prefix. */
12193 dirs[ndirs].prefix = -1;
12194 for (j = 0; j < ndirs; j++)
12195 if (dirs[j].length < dirs[ndirs].length
12196 && dirs[j].length > 1
12197 && (dirs[ndirs].prefix == -1
12198 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12199 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12200 dirs[ndirs].prefix = j;
12201
12202 ++ndirs;
12203 }
12204
12205 /* Now to the actual work. We have to find a subset of the directories which
12206 allow expressing the file name using references to the directory table
12207 with the least amount of characters. We do not do an exhaustive search
12208 where we would have to check out every combination of every single
12209 possible prefix. Instead we use a heuristic which provides nearly optimal
12210 results in most cases and never is much off. */
12211 saved = XALLOCAVEC (int, ndirs);
12212 savehere = XALLOCAVEC (int, ndirs);
12213
12214 memset (saved, '\0', ndirs * sizeof (saved[0]));
12215 for (i = 0; i < ndirs; i++)
12216 {
12217 int j;
12218 int total;
12219
12220 /* We can always save some space for the current directory. But this
12221 does not mean it will be enough to justify adding the directory. */
12222 savehere[i] = dirs[i].length;
12223 total = (savehere[i] - saved[i]) * dirs[i].count;
12224
12225 for (j = i + 1; j < ndirs; j++)
12226 {
12227 savehere[j] = 0;
12228 if (saved[j] < dirs[i].length)
12229 {
12230 /* Determine whether the dirs[i] path is a prefix of the
12231 dirs[j] path. */
12232 int k;
12233
12234 k = dirs[j].prefix;
12235 while (k != -1 && k != (int) i)
12236 k = dirs[k].prefix;
12237
12238 if (k == (int) i)
12239 {
12240 /* Yes it is. We can possibly save some memory by
12241 writing the filenames in dirs[j] relative to
12242 dirs[i]. */
12243 savehere[j] = dirs[i].length;
12244 total += (savehere[j] - saved[j]) * dirs[j].count;
12245 }
12246 }
12247 }
12248
12249 /* Check whether we can save enough to justify adding the dirs[i]
12250 directory. */
12251 if (total > dirs[i].length + 1)
12252 {
12253 /* It's worthwhile adding. */
12254 for (j = i; j < ndirs; j++)
12255 if (savehere[j] > 0)
12256 {
12257 /* Remember how much we saved for this directory so far. */
12258 saved[j] = savehere[j];
12259
12260 /* Remember the prefix directory. */
12261 dirs[j].dir_idx = i;
12262 }
12263 }
12264 }
12265
12266 /* Emit the directory name table. */
12267 idx_offset = dirs[0].length > 0 ? 1 : 0;
12268 enum dwarf_form str_form = DW_FORM_string;
12269 enum dwarf_form idx_form = DW_FORM_udata;
12270 if (dwarf_version >= 5)
12271 {
12272 const char *comp_dir = comp_dir_string ();
12273 if (comp_dir == NULL)
12274 comp_dir = "";
12275 dw2_asm_output_data (1, 1, "Directory entry format count");
12276 if (DWARF5_USE_DEBUG_LINE_STR)
12277 str_form = DW_FORM_line_strp;
12278 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12279 dw2_asm_output_data_uleb128 (str_form, "%s",
12280 get_DW_FORM_name (str_form));
12281 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12282 if (str_form == DW_FORM_string)
12283 {
12284 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12285 for (i = 1 - idx_offset; i < ndirs; i++)
12286 dw2_asm_output_nstring (dirs[i].path,
12287 dirs[i].length
12288 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12289 "Directory Entry: %#x", i + idx_offset);
12290 }
12291 else
12292 {
12293 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12294 for (i = 1 - idx_offset; i < ndirs; i++)
12295 {
12296 const char *str
12297 = ggc_alloc_string (dirs[i].path,
12298 dirs[i].length
12299 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12300 output_line_string (str_form, str, "Directory Entry",
12301 (unsigned) i + idx_offset);
12302 }
12303 }
12304 }
12305 else
12306 {
12307 for (i = 1 - idx_offset; i < ndirs; i++)
12308 dw2_asm_output_nstring (dirs[i].path,
12309 dirs[i].length
12310 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12311 "Directory Entry: %#x", i + idx_offset);
12312
12313 dw2_asm_output_data (1, 0, "End directory table");
12314 }
12315
12316 /* We have to emit them in the order of emitted_number since that's
12317 used in the debug info generation. To do this efficiently we
12318 generate a back-mapping of the indices first. */
12319 backmap = XALLOCAVEC (int, numfiles);
12320 for (i = 0; i < numfiles; i++)
12321 backmap[files[i].file_idx->emitted_number - 1] = i;
12322
12323 if (dwarf_version >= 5)
12324 {
12325 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12326 if (filename0 == NULL)
12327 filename0 = "";
12328 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12329 DW_FORM_data2. Choose one based on the number of directories
12330 and how much space would they occupy in each encoding.
12331 If we have at most 256 directories, all indexes fit into
12332 a single byte, so DW_FORM_data1 is most compact (if there
12333 are at most 128 directories, DW_FORM_udata would be as
12334 compact as that, but not shorter and slower to decode). */
12335 if (ndirs + idx_offset <= 256)
12336 idx_form = DW_FORM_data1;
12337 /* If there are more than 65536 directories, we have to use
12338 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12339 Otherwise, compute what space would occupy if all the indexes
12340 used DW_FORM_udata - sum - and compare that to how large would
12341 be DW_FORM_data2 encoding, and pick the more efficient one. */
12342 else if (ndirs + idx_offset <= 65536)
12343 {
12344 unsigned HOST_WIDE_INT sum = 1;
12345 for (i = 0; i < numfiles; i++)
12346 {
12347 int file_idx = backmap[i];
12348 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12349 sum += size_of_uleb128 (dir_idx);
12350 }
12351 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12352 idx_form = DW_FORM_data2;
12353 }
12354 #ifdef VMS_DEBUGGING_INFO
12355 dw2_asm_output_data (1, 4, "File name entry format count");
12356 #else
12357 dw2_asm_output_data (1, 2, "File name entry format count");
12358 #endif
12359 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12360 dw2_asm_output_data_uleb128 (str_form, "%s",
12361 get_DW_FORM_name (str_form));
12362 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12363 "DW_LNCT_directory_index");
12364 dw2_asm_output_data_uleb128 (idx_form, "%s",
12365 get_DW_FORM_name (idx_form));
12366 #ifdef VMS_DEBUGGING_INFO
12367 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12368 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12369 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12370 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12371 #endif
12372 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12373
12374 output_line_string (str_form, filename0, "File Entry", 0);
12375
12376 /* Include directory index. */
12377 if (idx_form != DW_FORM_udata)
12378 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12379 0, NULL);
12380 else
12381 dw2_asm_output_data_uleb128 (0, NULL);
12382
12383 #ifdef VMS_DEBUGGING_INFO
12384 dw2_asm_output_data_uleb128 (0, NULL);
12385 dw2_asm_output_data_uleb128 (0, NULL);
12386 #endif
12387 }
12388
12389 /* Now write all the file names. */
12390 for (i = 0; i < numfiles; i++)
12391 {
12392 int file_idx = backmap[i];
12393 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12394
12395 #ifdef VMS_DEBUGGING_INFO
12396 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12397
12398 /* Setting these fields can lead to debugger miscomparisons,
12399 but VMS Debug requires them to be set correctly. */
12400
12401 int ver;
12402 long long cdt;
12403 long siz;
12404 int maxfilelen = (strlen (files[file_idx].path)
12405 + dirs[dir_idx].length
12406 + MAX_VMS_VERSION_LEN + 1);
12407 char *filebuf = XALLOCAVEC (char, maxfilelen);
12408
12409 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12410 snprintf (filebuf, maxfilelen, "%s;%d",
12411 files[file_idx].path + dirs[dir_idx].length, ver);
12412
12413 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12414
12415 /* Include directory index. */
12416 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12417 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12418 dir_idx + idx_offset, NULL);
12419 else
12420 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12421
12422 /* Modification time. */
12423 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12424 &cdt, 0, 0, 0) == 0)
12425 ? cdt : 0, NULL);
12426
12427 /* File length in bytes. */
12428 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12429 0, &siz, 0, 0) == 0)
12430 ? siz : 0, NULL);
12431 #else
12432 output_line_string (str_form,
12433 files[file_idx].path + dirs[dir_idx].length,
12434 "File Entry", (unsigned) i + 1);
12435
12436 /* Include directory index. */
12437 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12438 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12439 dir_idx + idx_offset, NULL);
12440 else
12441 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12442
12443 if (dwarf_version >= 5)
12444 continue;
12445
12446 /* Modification time. */
12447 dw2_asm_output_data_uleb128 (0, NULL);
12448
12449 /* File length in bytes. */
12450 dw2_asm_output_data_uleb128 (0, NULL);
12451 #endif /* VMS_DEBUGGING_INFO */
12452 }
12453
12454 if (dwarf_version < 5)
12455 dw2_asm_output_data (1, 0, "End file name table");
12456 }
12457
12458
12459 /* Output one line number table into the .debug_line section. */
12460
12461 static void
12462 output_one_line_info_table (dw_line_info_table *table)
12463 {
12464 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12465 unsigned int current_line = 1;
12466 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12467 dw_line_info_entry *ent, *prev_addr;
12468 size_t i;
12469 unsigned int view;
12470
12471 view = 0;
12472
12473 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12474 {
12475 switch (ent->opcode)
12476 {
12477 case LI_set_address:
12478 /* ??? Unfortunately, we have little choice here currently, and
12479 must always use the most general form. GCC does not know the
12480 address delta itself, so we can't use DW_LNS_advance_pc. Many
12481 ports do have length attributes which will give an upper bound
12482 on the address range. We could perhaps use length attributes
12483 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12484 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12485
12486 view = 0;
12487
12488 /* This can handle any delta. This takes
12489 4+DWARF2_ADDR_SIZE bytes. */
12490 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12491 debug_variable_location_views
12492 ? ", reset view to 0" : "");
12493 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12494 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12495 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12496
12497 prev_addr = ent;
12498 break;
12499
12500 case LI_adv_address:
12501 {
12502 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12503 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12504 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12505
12506 view++;
12507
12508 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12509 dw2_asm_output_delta (2, line_label, prev_label,
12510 "from %s to %s", prev_label, line_label);
12511
12512 prev_addr = ent;
12513 break;
12514 }
12515
12516 case LI_set_line:
12517 if (ent->val == current_line)
12518 {
12519 /* We still need to start a new row, so output a copy insn. */
12520 dw2_asm_output_data (1, DW_LNS_copy,
12521 "copy line %u", current_line);
12522 }
12523 else
12524 {
12525 int line_offset = ent->val - current_line;
12526 int line_delta = line_offset - DWARF_LINE_BASE;
12527
12528 current_line = ent->val;
12529 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12530 {
12531 /* This can handle deltas from -10 to 234, using the current
12532 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12533 This takes 1 byte. */
12534 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12535 "line %u", current_line);
12536 }
12537 else
12538 {
12539 /* This can handle any delta. This takes at least 4 bytes,
12540 depending on the value being encoded. */
12541 dw2_asm_output_data (1, DW_LNS_advance_line,
12542 "advance to line %u", current_line);
12543 dw2_asm_output_data_sleb128 (line_offset, NULL);
12544 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12545 }
12546 }
12547 break;
12548
12549 case LI_set_file:
12550 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12551 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12552 break;
12553
12554 case LI_set_column:
12555 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12556 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12557 break;
12558
12559 case LI_negate_stmt:
12560 current_is_stmt = !current_is_stmt;
12561 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12562 "is_stmt %d", current_is_stmt);
12563 break;
12564
12565 case LI_set_prologue_end:
12566 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12567 "set prologue end");
12568 break;
12569
12570 case LI_set_epilogue_begin:
12571 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12572 "set epilogue begin");
12573 break;
12574
12575 case LI_set_discriminator:
12576 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12577 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12578 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12579 dw2_asm_output_data_uleb128 (ent->val, NULL);
12580 break;
12581 }
12582 }
12583
12584 /* Emit debug info for the address of the end of the table. */
12585 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12586 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12587 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12588 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12589
12590 dw2_asm_output_data (1, 0, "end sequence");
12591 dw2_asm_output_data_uleb128 (1, NULL);
12592 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12593 }
12594
12595 /* Output the source line number correspondence information. This
12596 information goes into the .debug_line section. */
12597
12598 static void
12599 output_line_info (bool prologue_only)
12600 {
12601 static unsigned int generation;
12602 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12603 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12604 bool saw_one = false;
12605 int opc;
12606
12607 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12608 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12609 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12610 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12611
12612 if (!XCOFF_DEBUGGING_INFO)
12613 {
12614 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12615 dw2_asm_output_data (4, 0xffffffff,
12616 "Initial length escape value indicating 64-bit DWARF extension");
12617 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12618 "Length of Source Line Info");
12619 }
12620
12621 ASM_OUTPUT_LABEL (asm_out_file, l1);
12622
12623 output_dwarf_version ();
12624 if (dwarf_version >= 5)
12625 {
12626 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12627 dw2_asm_output_data (1, 0, "Segment Size");
12628 }
12629 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12630 ASM_OUTPUT_LABEL (asm_out_file, p1);
12631
12632 /* Define the architecture-dependent minimum instruction length (in bytes).
12633 In this implementation of DWARF, this field is used for information
12634 purposes only. Since GCC generates assembly language, we have no
12635 a priori knowledge of how many instruction bytes are generated for each
12636 source line, and therefore can use only the DW_LNE_set_address and
12637 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12638 this as '1', which is "correct enough" for all architectures,
12639 and don't let the target override. */
12640 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12641
12642 if (dwarf_version >= 4)
12643 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12644 "Maximum Operations Per Instruction");
12645 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12646 "Default is_stmt_start flag");
12647 dw2_asm_output_data (1, DWARF_LINE_BASE,
12648 "Line Base Value (Special Opcodes)");
12649 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12650 "Line Range Value (Special Opcodes)");
12651 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12652 "Special Opcode Base");
12653
12654 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12655 {
12656 int n_op_args;
12657 switch (opc)
12658 {
12659 case DW_LNS_advance_pc:
12660 case DW_LNS_advance_line:
12661 case DW_LNS_set_file:
12662 case DW_LNS_set_column:
12663 case DW_LNS_fixed_advance_pc:
12664 case DW_LNS_set_isa:
12665 n_op_args = 1;
12666 break;
12667 default:
12668 n_op_args = 0;
12669 break;
12670 }
12671
12672 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12673 opc, n_op_args);
12674 }
12675
12676 /* Write out the information about the files we use. */
12677 output_file_names ();
12678 ASM_OUTPUT_LABEL (asm_out_file, p2);
12679 if (prologue_only)
12680 {
12681 /* Output the marker for the end of the line number info. */
12682 ASM_OUTPUT_LABEL (asm_out_file, l2);
12683 return;
12684 }
12685
12686 if (separate_line_info)
12687 {
12688 dw_line_info_table *table;
12689 size_t i;
12690
12691 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12692 if (table->in_use)
12693 {
12694 output_one_line_info_table (table);
12695 saw_one = true;
12696 }
12697 }
12698 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12699 {
12700 output_one_line_info_table (cold_text_section_line_info);
12701 saw_one = true;
12702 }
12703
12704 /* ??? Some Darwin linkers crash on a .debug_line section with no
12705 sequences. Further, merely a DW_LNE_end_sequence entry is not
12706 sufficient -- the address column must also be initialized.
12707 Make sure to output at least one set_address/end_sequence pair,
12708 choosing .text since that section is always present. */
12709 if (text_section_line_info->in_use || !saw_one)
12710 output_one_line_info_table (text_section_line_info);
12711
12712 /* Output the marker for the end of the line number info. */
12713 ASM_OUTPUT_LABEL (asm_out_file, l2);
12714 }
12715 \f
12716 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12717
12718 static inline bool
12719 need_endianity_attribute_p (bool reverse)
12720 {
12721 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12722 }
12723
12724 /* Given a pointer to a tree node for some base type, return a pointer to
12725 a DIE that describes the given type. REVERSE is true if the type is
12726 to be interpreted in the reverse storage order wrt the target order.
12727
12728 This routine must only be called for GCC type nodes that correspond to
12729 Dwarf base (fundamental) types. */
12730
12731 static dw_die_ref
12732 base_type_die (tree type, bool reverse)
12733 {
12734 dw_die_ref base_type_result;
12735 enum dwarf_type encoding;
12736 bool fpt_used = false;
12737 struct fixed_point_type_info fpt_info;
12738 tree type_bias = NULL_TREE;
12739
12740 /* If this is a subtype that should not be emitted as a subrange type,
12741 use the base type. See subrange_type_for_debug_p. */
12742 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12743 type = TREE_TYPE (type);
12744
12745 switch (TREE_CODE (type))
12746 {
12747 case INTEGER_TYPE:
12748 if ((dwarf_version >= 4 || !dwarf_strict)
12749 && TYPE_NAME (type)
12750 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12751 && DECL_IS_BUILTIN (TYPE_NAME (type))
12752 && DECL_NAME (TYPE_NAME (type)))
12753 {
12754 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12755 if (strcmp (name, "char16_t") == 0
12756 || strcmp (name, "char32_t") == 0)
12757 {
12758 encoding = DW_ATE_UTF;
12759 break;
12760 }
12761 }
12762 if ((dwarf_version >= 3 || !dwarf_strict)
12763 && lang_hooks.types.get_fixed_point_type_info)
12764 {
12765 memset (&fpt_info, 0, sizeof (fpt_info));
12766 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12767 {
12768 fpt_used = true;
12769 encoding = ((TYPE_UNSIGNED (type))
12770 ? DW_ATE_unsigned_fixed
12771 : DW_ATE_signed_fixed);
12772 break;
12773 }
12774 }
12775 if (TYPE_STRING_FLAG (type))
12776 {
12777 if (TYPE_UNSIGNED (type))
12778 encoding = DW_ATE_unsigned_char;
12779 else
12780 encoding = DW_ATE_signed_char;
12781 }
12782 else if (TYPE_UNSIGNED (type))
12783 encoding = DW_ATE_unsigned;
12784 else
12785 encoding = DW_ATE_signed;
12786
12787 if (!dwarf_strict
12788 && lang_hooks.types.get_type_bias)
12789 type_bias = lang_hooks.types.get_type_bias (type);
12790 break;
12791
12792 case REAL_TYPE:
12793 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12794 {
12795 if (dwarf_version >= 3 || !dwarf_strict)
12796 encoding = DW_ATE_decimal_float;
12797 else
12798 encoding = DW_ATE_lo_user;
12799 }
12800 else
12801 encoding = DW_ATE_float;
12802 break;
12803
12804 case FIXED_POINT_TYPE:
12805 if (!(dwarf_version >= 3 || !dwarf_strict))
12806 encoding = DW_ATE_lo_user;
12807 else if (TYPE_UNSIGNED (type))
12808 encoding = DW_ATE_unsigned_fixed;
12809 else
12810 encoding = DW_ATE_signed_fixed;
12811 break;
12812
12813 /* Dwarf2 doesn't know anything about complex ints, so use
12814 a user defined type for it. */
12815 case COMPLEX_TYPE:
12816 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12817 encoding = DW_ATE_complex_float;
12818 else
12819 encoding = DW_ATE_lo_user;
12820 break;
12821
12822 case BOOLEAN_TYPE:
12823 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12824 encoding = DW_ATE_boolean;
12825 break;
12826
12827 default:
12828 /* No other TREE_CODEs are Dwarf fundamental types. */
12829 gcc_unreachable ();
12830 }
12831
12832 base_type_result = new_die_raw (DW_TAG_base_type);
12833
12834 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12835 int_size_in_bytes (type));
12836 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12837
12838 if (need_endianity_attribute_p (reverse))
12839 add_AT_unsigned (base_type_result, DW_AT_endianity,
12840 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12841
12842 add_alignment_attribute (base_type_result, type);
12843
12844 if (fpt_used)
12845 {
12846 switch (fpt_info.scale_factor_kind)
12847 {
12848 case fixed_point_scale_factor_binary:
12849 add_AT_int (base_type_result, DW_AT_binary_scale,
12850 fpt_info.scale_factor.binary);
12851 break;
12852
12853 case fixed_point_scale_factor_decimal:
12854 add_AT_int (base_type_result, DW_AT_decimal_scale,
12855 fpt_info.scale_factor.decimal);
12856 break;
12857
12858 case fixed_point_scale_factor_arbitrary:
12859 /* Arbitrary scale factors cannot be described in standard DWARF,
12860 yet. */
12861 if (!dwarf_strict)
12862 {
12863 /* Describe the scale factor as a rational constant. */
12864 const dw_die_ref scale_factor
12865 = new_die (DW_TAG_constant, comp_unit_die (), type);
12866
12867 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12868 fpt_info.scale_factor.arbitrary.numerator);
12869 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12870 fpt_info.scale_factor.arbitrary.denominator);
12871
12872 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12873 }
12874 break;
12875
12876 default:
12877 gcc_unreachable ();
12878 }
12879 }
12880
12881 if (type_bias)
12882 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12883 dw_scalar_form_constant
12884 | dw_scalar_form_exprloc
12885 | dw_scalar_form_reference,
12886 NULL);
12887
12888 return base_type_result;
12889 }
12890
12891 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12892 named 'auto' in its type: return true for it, false otherwise. */
12893
12894 static inline bool
12895 is_cxx_auto (tree type)
12896 {
12897 if (is_cxx ())
12898 {
12899 tree name = TYPE_IDENTIFIER (type);
12900 if (name == get_identifier ("auto")
12901 || name == get_identifier ("decltype(auto)"))
12902 return true;
12903 }
12904 return false;
12905 }
12906
12907 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12908 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12909
12910 static inline int
12911 is_base_type (tree type)
12912 {
12913 switch (TREE_CODE (type))
12914 {
12915 case INTEGER_TYPE:
12916 case REAL_TYPE:
12917 case FIXED_POINT_TYPE:
12918 case COMPLEX_TYPE:
12919 case BOOLEAN_TYPE:
12920 return 1;
12921
12922 case VOID_TYPE:
12923 case ARRAY_TYPE:
12924 case RECORD_TYPE:
12925 case UNION_TYPE:
12926 case QUAL_UNION_TYPE:
12927 case ENUMERAL_TYPE:
12928 case FUNCTION_TYPE:
12929 case METHOD_TYPE:
12930 case POINTER_TYPE:
12931 case REFERENCE_TYPE:
12932 case NULLPTR_TYPE:
12933 case OFFSET_TYPE:
12934 case LANG_TYPE:
12935 case VECTOR_TYPE:
12936 return 0;
12937
12938 default:
12939 if (is_cxx_auto (type))
12940 return 0;
12941 gcc_unreachable ();
12942 }
12943
12944 return 0;
12945 }
12946
12947 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12948 node, return the size in bits for the type if it is a constant, or else
12949 return the alignment for the type if the type's size is not constant, or
12950 else return BITS_PER_WORD if the type actually turns out to be an
12951 ERROR_MARK node. */
12952
12953 static inline unsigned HOST_WIDE_INT
12954 simple_type_size_in_bits (const_tree type)
12955 {
12956 if (TREE_CODE (type) == ERROR_MARK)
12957 return BITS_PER_WORD;
12958 else if (TYPE_SIZE (type) == NULL_TREE)
12959 return 0;
12960 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12961 return tree_to_uhwi (TYPE_SIZE (type));
12962 else
12963 return TYPE_ALIGN (type);
12964 }
12965
12966 /* Similarly, but return an offset_int instead of UHWI. */
12967
12968 static inline offset_int
12969 offset_int_type_size_in_bits (const_tree type)
12970 {
12971 if (TREE_CODE (type) == ERROR_MARK)
12972 return BITS_PER_WORD;
12973 else if (TYPE_SIZE (type) == NULL_TREE)
12974 return 0;
12975 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12976 return wi::to_offset (TYPE_SIZE (type));
12977 else
12978 return TYPE_ALIGN (type);
12979 }
12980
12981 /* Given a pointer to a tree node for a subrange type, return a pointer
12982 to a DIE that describes the given type. */
12983
12984 static dw_die_ref
12985 subrange_type_die (tree type, tree low, tree high, tree bias,
12986 dw_die_ref context_die)
12987 {
12988 dw_die_ref subrange_die;
12989 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12990
12991 if (context_die == NULL)
12992 context_die = comp_unit_die ();
12993
12994 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12995
12996 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12997 {
12998 /* The size of the subrange type and its base type do not match,
12999 so we need to generate a size attribute for the subrange type. */
13000 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
13001 }
13002
13003 add_alignment_attribute (subrange_die, type);
13004
13005 if (low)
13006 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13007 if (high)
13008 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13009 if (bias && !dwarf_strict)
13010 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13011 dw_scalar_form_constant
13012 | dw_scalar_form_exprloc
13013 | dw_scalar_form_reference,
13014 NULL);
13015
13016 return subrange_die;
13017 }
13018
13019 /* Returns the (const and/or volatile) cv_qualifiers associated with
13020 the decl node. This will normally be augmented with the
13021 cv_qualifiers of the underlying type in add_type_attribute. */
13022
13023 static int
13024 decl_quals (const_tree decl)
13025 {
13026 return ((TREE_READONLY (decl)
13027 /* The C++ front-end correctly marks reference-typed
13028 variables as readonly, but from a language (and debug
13029 info) standpoint they are not const-qualified. */
13030 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13031 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13032 | (TREE_THIS_VOLATILE (decl)
13033 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13034 }
13035
13036 /* Determine the TYPE whose qualifiers match the largest strict subset
13037 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13038 qualifiers outside QUAL_MASK. */
13039
13040 static int
13041 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13042 {
13043 tree t;
13044 int best_rank = 0, best_qual = 0, max_rank;
13045
13046 type_quals &= qual_mask;
13047 max_rank = popcount_hwi (type_quals) - 1;
13048
13049 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13050 t = TYPE_NEXT_VARIANT (t))
13051 {
13052 int q = TYPE_QUALS (t) & qual_mask;
13053
13054 if ((q & type_quals) == q && q != type_quals
13055 && check_base_type (t, type))
13056 {
13057 int rank = popcount_hwi (q);
13058
13059 if (rank > best_rank)
13060 {
13061 best_rank = rank;
13062 best_qual = q;
13063 }
13064 }
13065 }
13066
13067 return best_qual;
13068 }
13069
13070 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13071 static const dwarf_qual_info_t dwarf_qual_info[] =
13072 {
13073 { TYPE_QUAL_CONST, DW_TAG_const_type },
13074 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13075 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13076 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13077 };
13078 static const unsigned int dwarf_qual_info_size
13079 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13080
13081 /* If DIE is a qualified DIE of some base DIE with the same parent,
13082 return the base DIE, otherwise return NULL. Set MASK to the
13083 qualifiers added compared to the returned DIE. */
13084
13085 static dw_die_ref
13086 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13087 {
13088 unsigned int i;
13089 for (i = 0; i < dwarf_qual_info_size; i++)
13090 if (die->die_tag == dwarf_qual_info[i].t)
13091 break;
13092 if (i == dwarf_qual_info_size)
13093 return NULL;
13094 if (vec_safe_length (die->die_attr) != 1)
13095 return NULL;
13096 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13097 if (type == NULL || type->die_parent != die->die_parent)
13098 return NULL;
13099 *mask |= dwarf_qual_info[i].q;
13100 if (depth)
13101 {
13102 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13103 if (ret)
13104 return ret;
13105 }
13106 return type;
13107 }
13108
13109 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13110 entry that chains the modifiers specified by CV_QUALS in front of the
13111 given type. REVERSE is true if the type is to be interpreted in the
13112 reverse storage order wrt the target order. */
13113
13114 static dw_die_ref
13115 modified_type_die (tree type, int cv_quals, bool reverse,
13116 dw_die_ref context_die)
13117 {
13118 enum tree_code code = TREE_CODE (type);
13119 dw_die_ref mod_type_die;
13120 dw_die_ref sub_die = NULL;
13121 tree item_type = NULL;
13122 tree qualified_type;
13123 tree name, low, high;
13124 dw_die_ref mod_scope;
13125 /* Only these cv-qualifiers are currently handled. */
13126 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13127 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13128 ENCODE_QUAL_ADDR_SPACE(~0U));
13129 const bool reverse_base_type
13130 = need_endianity_attribute_p (reverse) && is_base_type (type);
13131
13132 if (code == ERROR_MARK)
13133 return NULL;
13134
13135 if (lang_hooks.types.get_debug_type)
13136 {
13137 tree debug_type = lang_hooks.types.get_debug_type (type);
13138
13139 if (debug_type != NULL_TREE && debug_type != type)
13140 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13141 }
13142
13143 cv_quals &= cv_qual_mask;
13144
13145 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13146 tag modifier (and not an attribute) old consumers won't be able
13147 to handle it. */
13148 if (dwarf_version < 3)
13149 cv_quals &= ~TYPE_QUAL_RESTRICT;
13150
13151 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13152 if (dwarf_version < 5)
13153 cv_quals &= ~TYPE_QUAL_ATOMIC;
13154
13155 /* See if we already have the appropriately qualified variant of
13156 this type. */
13157 qualified_type = get_qualified_type (type, cv_quals);
13158
13159 if (qualified_type == sizetype)
13160 {
13161 /* Try not to expose the internal sizetype type's name. */
13162 if (TYPE_NAME (qualified_type)
13163 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13164 {
13165 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13166
13167 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13168 && (TYPE_PRECISION (t)
13169 == TYPE_PRECISION (qualified_type))
13170 && (TYPE_UNSIGNED (t)
13171 == TYPE_UNSIGNED (qualified_type)));
13172 qualified_type = t;
13173 }
13174 else if (qualified_type == sizetype
13175 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13176 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13177 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13178 qualified_type = size_type_node;
13179 }
13180
13181 /* If we do, then we can just use its DIE, if it exists. */
13182 if (qualified_type)
13183 {
13184 mod_type_die = lookup_type_die (qualified_type);
13185
13186 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13187 dealt with specially: the DIE with the attribute, if it exists, is
13188 placed immediately after the regular DIE for the same base type. */
13189 if (mod_type_die
13190 && (!reverse_base_type
13191 || ((mod_type_die = mod_type_die->die_sib) != NULL
13192 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13193 return mod_type_die;
13194 }
13195
13196 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13197
13198 /* Handle C typedef types. */
13199 if (name
13200 && TREE_CODE (name) == TYPE_DECL
13201 && DECL_ORIGINAL_TYPE (name)
13202 && !DECL_ARTIFICIAL (name))
13203 {
13204 tree dtype = TREE_TYPE (name);
13205
13206 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13207 if (qualified_type == dtype && !reverse_base_type)
13208 {
13209 tree origin = decl_ultimate_origin (name);
13210
13211 /* Typedef variants that have an abstract origin don't get their own
13212 type DIE (see gen_typedef_die), so fall back on the ultimate
13213 abstract origin instead. */
13214 if (origin != NULL && origin != name)
13215 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13216 context_die);
13217
13218 /* For a named type, use the typedef. */
13219 gen_type_die (qualified_type, context_die);
13220 return lookup_type_die (qualified_type);
13221 }
13222 else
13223 {
13224 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13225 dquals &= cv_qual_mask;
13226 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13227 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13228 /* cv-unqualified version of named type. Just use
13229 the unnamed type to which it refers. */
13230 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13231 reverse, context_die);
13232 /* Else cv-qualified version of named type; fall through. */
13233 }
13234 }
13235
13236 mod_scope = scope_die_for (type, context_die);
13237
13238 if (cv_quals)
13239 {
13240 int sub_quals = 0, first_quals = 0;
13241 unsigned i;
13242 dw_die_ref first = NULL, last = NULL;
13243
13244 /* Determine a lesser qualified type that most closely matches
13245 this one. Then generate DW_TAG_* entries for the remaining
13246 qualifiers. */
13247 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13248 cv_qual_mask);
13249 if (sub_quals && use_debug_types)
13250 {
13251 bool needed = false;
13252 /* If emitting type units, make sure the order of qualifiers
13253 is canonical. Thus, start from unqualified type if
13254 an earlier qualifier is missing in sub_quals, but some later
13255 one is present there. */
13256 for (i = 0; i < dwarf_qual_info_size; i++)
13257 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13258 needed = true;
13259 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13260 {
13261 sub_quals = 0;
13262 break;
13263 }
13264 }
13265 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13266 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13267 {
13268 /* As not all intermediate qualified DIEs have corresponding
13269 tree types, ensure that qualified DIEs in the same scope
13270 as their DW_AT_type are emitted after their DW_AT_type,
13271 only with other qualified DIEs for the same type possibly
13272 in between them. Determine the range of such qualified
13273 DIEs now (first being the base type, last being corresponding
13274 last qualified DIE for it). */
13275 unsigned int count = 0;
13276 first = qualified_die_p (mod_type_die, &first_quals,
13277 dwarf_qual_info_size);
13278 if (first == NULL)
13279 first = mod_type_die;
13280 gcc_assert ((first_quals & ~sub_quals) == 0);
13281 for (count = 0, last = first;
13282 count < (1U << dwarf_qual_info_size);
13283 count++, last = last->die_sib)
13284 {
13285 int quals = 0;
13286 if (last == mod_scope->die_child)
13287 break;
13288 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13289 != first)
13290 break;
13291 }
13292 }
13293
13294 for (i = 0; i < dwarf_qual_info_size; i++)
13295 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13296 {
13297 dw_die_ref d;
13298 if (first && first != last)
13299 {
13300 for (d = first->die_sib; ; d = d->die_sib)
13301 {
13302 int quals = 0;
13303 qualified_die_p (d, &quals, dwarf_qual_info_size);
13304 if (quals == (first_quals | dwarf_qual_info[i].q))
13305 break;
13306 if (d == last)
13307 {
13308 d = NULL;
13309 break;
13310 }
13311 }
13312 if (d)
13313 {
13314 mod_type_die = d;
13315 continue;
13316 }
13317 }
13318 if (first)
13319 {
13320 d = new_die_raw (dwarf_qual_info[i].t);
13321 add_child_die_after (mod_scope, d, last);
13322 last = d;
13323 }
13324 else
13325 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13326 if (mod_type_die)
13327 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13328 mod_type_die = d;
13329 first_quals |= dwarf_qual_info[i].q;
13330 }
13331 }
13332 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13333 {
13334 dwarf_tag tag = DW_TAG_pointer_type;
13335 if (code == REFERENCE_TYPE)
13336 {
13337 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13338 tag = DW_TAG_rvalue_reference_type;
13339 else
13340 tag = DW_TAG_reference_type;
13341 }
13342 mod_type_die = new_die (tag, mod_scope, type);
13343
13344 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13345 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13346 add_alignment_attribute (mod_type_die, type);
13347 item_type = TREE_TYPE (type);
13348
13349 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13350 if (!ADDR_SPACE_GENERIC_P (as))
13351 {
13352 int action = targetm.addr_space.debug (as);
13353 if (action >= 0)
13354 {
13355 /* Positive values indicate an address_class. */
13356 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13357 }
13358 else
13359 {
13360 /* Negative values indicate an (inverted) segment base reg. */
13361 dw_loc_descr_ref d
13362 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13363 add_AT_loc (mod_type_die, DW_AT_segment, d);
13364 }
13365 }
13366 }
13367 else if (code == INTEGER_TYPE
13368 && TREE_TYPE (type) != NULL_TREE
13369 && subrange_type_for_debug_p (type, &low, &high))
13370 {
13371 tree bias = NULL_TREE;
13372 if (lang_hooks.types.get_type_bias)
13373 bias = lang_hooks.types.get_type_bias (type);
13374 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13375 item_type = TREE_TYPE (type);
13376 }
13377 else if (is_base_type (type))
13378 {
13379 mod_type_die = base_type_die (type, reverse);
13380
13381 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13382 if (reverse_base_type)
13383 {
13384 dw_die_ref after_die
13385 = modified_type_die (type, cv_quals, false, context_die);
13386 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13387 }
13388 else
13389 add_child_die (comp_unit_die (), mod_type_die);
13390
13391 add_pubtype (type, mod_type_die);
13392 }
13393 else
13394 {
13395 gen_type_die (type, context_die);
13396
13397 /* We have to get the type_main_variant here (and pass that to the
13398 `lookup_type_die' routine) because the ..._TYPE node we have
13399 might simply be a *copy* of some original type node (where the
13400 copy was created to help us keep track of typedef names) and
13401 that copy might have a different TYPE_UID from the original
13402 ..._TYPE node. */
13403 if (TREE_CODE (type) == FUNCTION_TYPE
13404 || TREE_CODE (type) == METHOD_TYPE)
13405 {
13406 /* For function/method types, can't just use type_main_variant here,
13407 because that can have different ref-qualifiers for C++,
13408 but try to canonicalize. */
13409 tree main = TYPE_MAIN_VARIANT (type);
13410 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13411 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13412 && check_base_type (t, main)
13413 && check_lang_type (t, type))
13414 return lookup_type_die (t);
13415 return lookup_type_die (type);
13416 }
13417 else if (TREE_CODE (type) != VECTOR_TYPE
13418 && TREE_CODE (type) != ARRAY_TYPE)
13419 return lookup_type_die (type_main_variant (type));
13420 else
13421 /* Vectors have the debugging information in the type,
13422 not the main variant. */
13423 return lookup_type_die (type);
13424 }
13425
13426 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13427 don't output a DW_TAG_typedef, since there isn't one in the
13428 user's program; just attach a DW_AT_name to the type.
13429 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13430 if the base type already has the same name. */
13431 if (name
13432 && ((TREE_CODE (name) != TYPE_DECL
13433 && (qualified_type == TYPE_MAIN_VARIANT (type)
13434 || (cv_quals == TYPE_UNQUALIFIED)))
13435 || (TREE_CODE (name) == TYPE_DECL
13436 && TREE_TYPE (name) == qualified_type
13437 && DECL_NAME (name))))
13438 {
13439 if (TREE_CODE (name) == TYPE_DECL)
13440 /* Could just call add_name_and_src_coords_attributes here,
13441 but since this is a builtin type it doesn't have any
13442 useful source coordinates anyway. */
13443 name = DECL_NAME (name);
13444 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13445 }
13446 /* This probably indicates a bug. */
13447 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13448 {
13449 name = TYPE_IDENTIFIER (type);
13450 add_name_attribute (mod_type_die,
13451 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13452 }
13453
13454 if (qualified_type && !reverse_base_type)
13455 equate_type_number_to_die (qualified_type, mod_type_die);
13456
13457 if (item_type)
13458 /* We must do this after the equate_type_number_to_die call, in case
13459 this is a recursive type. This ensures that the modified_type_die
13460 recursion will terminate even if the type is recursive. Recursive
13461 types are possible in Ada. */
13462 sub_die = modified_type_die (item_type,
13463 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13464 reverse,
13465 context_die);
13466
13467 if (sub_die != NULL)
13468 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13469
13470 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13471 if (TYPE_ARTIFICIAL (type))
13472 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13473
13474 return mod_type_die;
13475 }
13476
13477 /* Generate DIEs for the generic parameters of T.
13478 T must be either a generic type or a generic function.
13479 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13480
13481 static void
13482 gen_generic_params_dies (tree t)
13483 {
13484 tree parms, args;
13485 int parms_num, i;
13486 dw_die_ref die = NULL;
13487 int non_default;
13488
13489 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13490 return;
13491
13492 if (TYPE_P (t))
13493 die = lookup_type_die (t);
13494 else if (DECL_P (t))
13495 die = lookup_decl_die (t);
13496
13497 gcc_assert (die);
13498
13499 parms = lang_hooks.get_innermost_generic_parms (t);
13500 if (!parms)
13501 /* T has no generic parameter. It means T is neither a generic type
13502 or function. End of story. */
13503 return;
13504
13505 parms_num = TREE_VEC_LENGTH (parms);
13506 args = lang_hooks.get_innermost_generic_args (t);
13507 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13508 non_default = int_cst_value (TREE_CHAIN (args));
13509 else
13510 non_default = TREE_VEC_LENGTH (args);
13511 for (i = 0; i < parms_num; i++)
13512 {
13513 tree parm, arg, arg_pack_elems;
13514 dw_die_ref parm_die;
13515
13516 parm = TREE_VEC_ELT (parms, i);
13517 arg = TREE_VEC_ELT (args, i);
13518 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13519 gcc_assert (parm && TREE_VALUE (parm) && arg);
13520
13521 if (parm && TREE_VALUE (parm) && arg)
13522 {
13523 /* If PARM represents a template parameter pack,
13524 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13525 by DW_TAG_template_*_parameter DIEs for the argument
13526 pack elements of ARG. Note that ARG would then be
13527 an argument pack. */
13528 if (arg_pack_elems)
13529 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13530 arg_pack_elems,
13531 die);
13532 else
13533 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13534 true /* emit name */, die);
13535 if (i >= non_default)
13536 add_AT_flag (parm_die, DW_AT_default_value, 1);
13537 }
13538 }
13539 }
13540
13541 /* Create and return a DIE for PARM which should be
13542 the representation of a generic type parameter.
13543 For instance, in the C++ front end, PARM would be a template parameter.
13544 ARG is the argument to PARM.
13545 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13546 name of the PARM.
13547 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13548 as a child node. */
13549
13550 static dw_die_ref
13551 generic_parameter_die (tree parm, tree arg,
13552 bool emit_name_p,
13553 dw_die_ref parent_die)
13554 {
13555 dw_die_ref tmpl_die = NULL;
13556 const char *name = NULL;
13557
13558 if (!parm || !DECL_NAME (parm) || !arg)
13559 return NULL;
13560
13561 /* We support non-type generic parameters and arguments,
13562 type generic parameters and arguments, as well as
13563 generic generic parameters (a.k.a. template template parameters in C++)
13564 and arguments. */
13565 if (TREE_CODE (parm) == PARM_DECL)
13566 /* PARM is a nontype generic parameter */
13567 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13568 else if (TREE_CODE (parm) == TYPE_DECL)
13569 /* PARM is a type generic parameter. */
13570 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13571 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13572 /* PARM is a generic generic parameter.
13573 Its DIE is a GNU extension. It shall have a
13574 DW_AT_name attribute to represent the name of the template template
13575 parameter, and a DW_AT_GNU_template_name attribute to represent the
13576 name of the template template argument. */
13577 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13578 parent_die, parm);
13579 else
13580 gcc_unreachable ();
13581
13582 if (tmpl_die)
13583 {
13584 tree tmpl_type;
13585
13586 /* If PARM is a generic parameter pack, it means we are
13587 emitting debug info for a template argument pack element.
13588 In other terms, ARG is a template argument pack element.
13589 In that case, we don't emit any DW_AT_name attribute for
13590 the die. */
13591 if (emit_name_p)
13592 {
13593 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13594 gcc_assert (name);
13595 add_AT_string (tmpl_die, DW_AT_name, name);
13596 }
13597
13598 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13599 {
13600 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13601 TMPL_DIE should have a child DW_AT_type attribute that is set
13602 to the type of the argument to PARM, which is ARG.
13603 If PARM is a type generic parameter, TMPL_DIE should have a
13604 child DW_AT_type that is set to ARG. */
13605 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13606 add_type_attribute (tmpl_die, tmpl_type,
13607 (TREE_THIS_VOLATILE (tmpl_type)
13608 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13609 false, parent_die);
13610 }
13611 else
13612 {
13613 /* So TMPL_DIE is a DIE representing a
13614 a generic generic template parameter, a.k.a template template
13615 parameter in C++ and arg is a template. */
13616
13617 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13618 to the name of the argument. */
13619 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13620 if (name)
13621 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13622 }
13623
13624 if (TREE_CODE (parm) == PARM_DECL)
13625 /* So PARM is a non-type generic parameter.
13626 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13627 attribute of TMPL_DIE which value represents the value
13628 of ARG.
13629 We must be careful here:
13630 The value of ARG might reference some function decls.
13631 We might currently be emitting debug info for a generic
13632 type and types are emitted before function decls, we don't
13633 know if the function decls referenced by ARG will actually be
13634 emitted after cgraph computations.
13635 So must defer the generation of the DW_AT_const_value to
13636 after cgraph is ready. */
13637 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13638 }
13639
13640 return tmpl_die;
13641 }
13642
13643 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13644 PARM_PACK must be a template parameter pack. The returned DIE
13645 will be child DIE of PARENT_DIE. */
13646
13647 static dw_die_ref
13648 template_parameter_pack_die (tree parm_pack,
13649 tree parm_pack_args,
13650 dw_die_ref parent_die)
13651 {
13652 dw_die_ref die;
13653 int j;
13654
13655 gcc_assert (parent_die && parm_pack);
13656
13657 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13658 add_name_and_src_coords_attributes (die, parm_pack);
13659 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13660 generic_parameter_die (parm_pack,
13661 TREE_VEC_ELT (parm_pack_args, j),
13662 false /* Don't emit DW_AT_name */,
13663 die);
13664 return die;
13665 }
13666
13667 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13668 an enumerated type. */
13669
13670 static inline int
13671 type_is_enum (const_tree type)
13672 {
13673 return TREE_CODE (type) == ENUMERAL_TYPE;
13674 }
13675
13676 /* Return the DBX register number described by a given RTL node. */
13677
13678 static unsigned int
13679 dbx_reg_number (const_rtx rtl)
13680 {
13681 unsigned regno = REGNO (rtl);
13682
13683 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13684
13685 #ifdef LEAF_REG_REMAP
13686 if (crtl->uses_only_leaf_regs)
13687 {
13688 int leaf_reg = LEAF_REG_REMAP (regno);
13689 if (leaf_reg != -1)
13690 regno = (unsigned) leaf_reg;
13691 }
13692 #endif
13693
13694 regno = DBX_REGISTER_NUMBER (regno);
13695 gcc_assert (regno != INVALID_REGNUM);
13696 return regno;
13697 }
13698
13699 /* Optionally add a DW_OP_piece term to a location description expression.
13700 DW_OP_piece is only added if the location description expression already
13701 doesn't end with DW_OP_piece. */
13702
13703 static void
13704 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13705 {
13706 dw_loc_descr_ref loc;
13707
13708 if (*list_head != NULL)
13709 {
13710 /* Find the end of the chain. */
13711 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13712 ;
13713
13714 if (loc->dw_loc_opc != DW_OP_piece)
13715 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13716 }
13717 }
13718
13719 /* Return a location descriptor that designates a machine register or
13720 zero if there is none. */
13721
13722 static dw_loc_descr_ref
13723 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13724 {
13725 rtx regs;
13726
13727 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13728 return 0;
13729
13730 /* We only use "frame base" when we're sure we're talking about the
13731 post-prologue local stack frame. We do this by *not* running
13732 register elimination until this point, and recognizing the special
13733 argument pointer and soft frame pointer rtx's.
13734 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13735 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13736 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13737 {
13738 dw_loc_descr_ref result = NULL;
13739
13740 if (dwarf_version >= 4 || !dwarf_strict)
13741 {
13742 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13743 initialized);
13744 if (result)
13745 add_loc_descr (&result,
13746 new_loc_descr (DW_OP_stack_value, 0, 0));
13747 }
13748 return result;
13749 }
13750
13751 regs = targetm.dwarf_register_span (rtl);
13752
13753 if (REG_NREGS (rtl) > 1 || regs)
13754 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13755 else
13756 {
13757 unsigned int dbx_regnum = dbx_reg_number (rtl);
13758 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13759 return 0;
13760 return one_reg_loc_descriptor (dbx_regnum, initialized);
13761 }
13762 }
13763
13764 /* Return a location descriptor that designates a machine register for
13765 a given hard register number. */
13766
13767 static dw_loc_descr_ref
13768 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13769 {
13770 dw_loc_descr_ref reg_loc_descr;
13771
13772 if (regno <= 31)
13773 reg_loc_descr
13774 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13775 else
13776 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13777
13778 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13779 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13780
13781 return reg_loc_descr;
13782 }
13783
13784 /* Given an RTL of a register, return a location descriptor that
13785 designates a value that spans more than one register. */
13786
13787 static dw_loc_descr_ref
13788 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13789 enum var_init_status initialized)
13790 {
13791 int size, i;
13792 dw_loc_descr_ref loc_result = NULL;
13793
13794 /* Simple, contiguous registers. */
13795 if (regs == NULL_RTX)
13796 {
13797 unsigned reg = REGNO (rtl);
13798 int nregs;
13799
13800 #ifdef LEAF_REG_REMAP
13801 if (crtl->uses_only_leaf_regs)
13802 {
13803 int leaf_reg = LEAF_REG_REMAP (reg);
13804 if (leaf_reg != -1)
13805 reg = (unsigned) leaf_reg;
13806 }
13807 #endif
13808
13809 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13810 nregs = REG_NREGS (rtl);
13811
13812 /* At present we only track constant-sized pieces. */
13813 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13814 return NULL;
13815 size /= nregs;
13816
13817 loc_result = NULL;
13818 while (nregs--)
13819 {
13820 dw_loc_descr_ref t;
13821
13822 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13823 VAR_INIT_STATUS_INITIALIZED);
13824 add_loc_descr (&loc_result, t);
13825 add_loc_descr_op_piece (&loc_result, size);
13826 ++reg;
13827 }
13828 return loc_result;
13829 }
13830
13831 /* Now onto stupid register sets in non contiguous locations. */
13832
13833 gcc_assert (GET_CODE (regs) == PARALLEL);
13834
13835 /* At present we only track constant-sized pieces. */
13836 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13837 return NULL;
13838 loc_result = NULL;
13839
13840 for (i = 0; i < XVECLEN (regs, 0); ++i)
13841 {
13842 dw_loc_descr_ref t;
13843
13844 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13845 VAR_INIT_STATUS_INITIALIZED);
13846 add_loc_descr (&loc_result, t);
13847 add_loc_descr_op_piece (&loc_result, size);
13848 }
13849
13850 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13851 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13852 return loc_result;
13853 }
13854
13855 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13856
13857 /* Return a location descriptor that designates a constant i,
13858 as a compound operation from constant (i >> shift), constant shift
13859 and DW_OP_shl. */
13860
13861 static dw_loc_descr_ref
13862 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13863 {
13864 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13865 add_loc_descr (&ret, int_loc_descriptor (shift));
13866 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13867 return ret;
13868 }
13869
13870 /* Return a location descriptor that designates constant POLY_I. */
13871
13872 static dw_loc_descr_ref
13873 int_loc_descriptor (poly_int64 poly_i)
13874 {
13875 enum dwarf_location_atom op;
13876
13877 HOST_WIDE_INT i;
13878 if (!poly_i.is_constant (&i))
13879 {
13880 /* Create location descriptions for the non-constant part and
13881 add any constant offset at the end. */
13882 dw_loc_descr_ref ret = NULL;
13883 HOST_WIDE_INT constant = poly_i.coeffs[0];
13884 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13885 {
13886 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13887 if (coeff != 0)
13888 {
13889 dw_loc_descr_ref start = ret;
13890 unsigned int factor;
13891 int bias;
13892 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13893 (j, &factor, &bias);
13894
13895 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13896 add COEFF * (REGNO / FACTOR) now and subtract
13897 COEFF * BIAS from the final constant part. */
13898 constant -= coeff * bias;
13899 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13900 if (coeff % factor == 0)
13901 coeff /= factor;
13902 else
13903 {
13904 int amount = exact_log2 (factor);
13905 gcc_assert (amount >= 0);
13906 add_loc_descr (&ret, int_loc_descriptor (amount));
13907 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13908 }
13909 if (coeff != 1)
13910 {
13911 add_loc_descr (&ret, int_loc_descriptor (coeff));
13912 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13913 }
13914 if (start)
13915 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13916 }
13917 }
13918 loc_descr_plus_const (&ret, constant);
13919 return ret;
13920 }
13921
13922 /* Pick the smallest representation of a constant, rather than just
13923 defaulting to the LEB encoding. */
13924 if (i >= 0)
13925 {
13926 int clz = clz_hwi (i);
13927 int ctz = ctz_hwi (i);
13928 if (i <= 31)
13929 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13930 else if (i <= 0xff)
13931 op = DW_OP_const1u;
13932 else if (i <= 0xffff)
13933 op = DW_OP_const2u;
13934 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13935 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13936 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13937 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13938 while DW_OP_const4u is 5 bytes. */
13939 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13940 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13941 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13942 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13943 while DW_OP_const4u is 5 bytes. */
13944 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13945
13946 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13947 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13948 <= 4)
13949 {
13950 /* As i >= 2**31, the double cast above will yield a negative number.
13951 Since wrapping is defined in DWARF expressions we can output big
13952 positive integers as small negative ones, regardless of the size
13953 of host wide ints.
13954
13955 Here, since the evaluator will handle 32-bit values and since i >=
13956 2**31, we know it's going to be interpreted as a negative literal:
13957 store it this way if we can do better than 5 bytes this way. */
13958 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13959 }
13960 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13961 op = DW_OP_const4u;
13962
13963 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13964 least 6 bytes: see if we can do better before falling back to it. */
13965 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13966 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13967 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13968 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13969 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13970 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13971 >= HOST_BITS_PER_WIDE_INT)
13972 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13973 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13974 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13975 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13976 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13977 && size_of_uleb128 (i) > 6)
13978 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13979 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13980 else
13981 op = DW_OP_constu;
13982 }
13983 else
13984 {
13985 if (i >= -0x80)
13986 op = DW_OP_const1s;
13987 else if (i >= -0x8000)
13988 op = DW_OP_const2s;
13989 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13990 {
13991 if (size_of_int_loc_descriptor (i) < 5)
13992 {
13993 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13994 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13995 return ret;
13996 }
13997 op = DW_OP_const4s;
13998 }
13999 else
14000 {
14001 if (size_of_int_loc_descriptor (i)
14002 < (unsigned long) 1 + size_of_sleb128 (i))
14003 {
14004 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14005 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14006 return ret;
14007 }
14008 op = DW_OP_consts;
14009 }
14010 }
14011
14012 return new_loc_descr (op, i, 0);
14013 }
14014
14015 /* Likewise, for unsigned constants. */
14016
14017 static dw_loc_descr_ref
14018 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14019 {
14020 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14021 const unsigned HOST_WIDE_INT max_uint
14022 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14023
14024 /* If possible, use the clever signed constants handling. */
14025 if (i <= max_int)
14026 return int_loc_descriptor ((HOST_WIDE_INT) i);
14027
14028 /* Here, we are left with positive numbers that cannot be represented as
14029 HOST_WIDE_INT, i.e.:
14030 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14031
14032 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14033 whereas may be better to output a negative integer: thanks to integer
14034 wrapping, we know that:
14035 x = x - 2 ** DWARF2_ADDR_SIZE
14036 = x - 2 * (max (HOST_WIDE_INT) + 1)
14037 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14038 small negative integers. Let's try that in cases it will clearly improve
14039 the encoding: there is no gain turning DW_OP_const4u into
14040 DW_OP_const4s. */
14041 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14042 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14043 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14044 {
14045 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14046
14047 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14048 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14049 const HOST_WIDE_INT second_shift
14050 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14051
14052 /* So we finally have:
14053 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14054 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14055 return int_loc_descriptor (second_shift);
14056 }
14057
14058 /* Last chance: fallback to a simple constant operation. */
14059 return new_loc_descr
14060 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14061 ? DW_OP_const4u
14062 : DW_OP_const8u,
14063 i, 0);
14064 }
14065
14066 /* Generate and return a location description that computes the unsigned
14067 comparison of the two stack top entries (a OP b where b is the top-most
14068 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14069 LE_EXPR, GT_EXPR or GE_EXPR. */
14070
14071 static dw_loc_descr_ref
14072 uint_comparison_loc_list (enum tree_code kind)
14073 {
14074 enum dwarf_location_atom op, flip_op;
14075 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14076
14077 switch (kind)
14078 {
14079 case LT_EXPR:
14080 op = DW_OP_lt;
14081 break;
14082 case LE_EXPR:
14083 op = DW_OP_le;
14084 break;
14085 case GT_EXPR:
14086 op = DW_OP_gt;
14087 break;
14088 case GE_EXPR:
14089 op = DW_OP_ge;
14090 break;
14091 default:
14092 gcc_unreachable ();
14093 }
14094
14095 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14096 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14097
14098 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14099 possible to perform unsigned comparisons: we just have to distinguish
14100 three cases:
14101
14102 1. when a and b have the same sign (as signed integers); then we should
14103 return: a OP(signed) b;
14104
14105 2. when a is a negative signed integer while b is a positive one, then a
14106 is a greater unsigned integer than b; likewise when a and b's roles
14107 are flipped.
14108
14109 So first, compare the sign of the two operands. */
14110 ret = new_loc_descr (DW_OP_over, 0, 0);
14111 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14112 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14113 /* If they have different signs (i.e. they have different sign bits), then
14114 the stack top value has now the sign bit set and thus it's smaller than
14115 zero. */
14116 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14117 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14118 add_loc_descr (&ret, bra_node);
14119
14120 /* We are in case 1. At this point, we know both operands have the same
14121 sign, to it's safe to use the built-in signed comparison. */
14122 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14123 add_loc_descr (&ret, jmp_node);
14124
14125 /* We are in case 2. Here, we know both operands do not have the same sign,
14126 so we have to flip the signed comparison. */
14127 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14128 tmp = new_loc_descr (flip_op, 0, 0);
14129 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14130 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14131 add_loc_descr (&ret, tmp);
14132
14133 /* This dummy operation is necessary to make the two branches join. */
14134 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14135 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14136 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14137 add_loc_descr (&ret, tmp);
14138
14139 return ret;
14140 }
14141
14142 /* Likewise, but takes the location description lists (might be destructive on
14143 them). Return NULL if either is NULL or if concatenation fails. */
14144
14145 static dw_loc_list_ref
14146 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14147 enum tree_code kind)
14148 {
14149 if (left == NULL || right == NULL)
14150 return NULL;
14151
14152 add_loc_list (&left, right);
14153 if (left == NULL)
14154 return NULL;
14155
14156 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14157 return left;
14158 }
14159
14160 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14161 without actually allocating it. */
14162
14163 static unsigned long
14164 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14165 {
14166 return size_of_int_loc_descriptor (i >> shift)
14167 + size_of_int_loc_descriptor (shift)
14168 + 1;
14169 }
14170
14171 /* Return size_of_locs (int_loc_descriptor (i)) without
14172 actually allocating it. */
14173
14174 static unsigned long
14175 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14176 {
14177 unsigned long s;
14178
14179 if (i >= 0)
14180 {
14181 int clz, ctz;
14182 if (i <= 31)
14183 return 1;
14184 else if (i <= 0xff)
14185 return 2;
14186 else if (i <= 0xffff)
14187 return 3;
14188 clz = clz_hwi (i);
14189 ctz = ctz_hwi (i);
14190 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14191 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14192 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14193 - clz - 5);
14194 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14195 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14196 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14197 - clz - 8);
14198 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14199 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14200 <= 4)
14201 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14202 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14203 return 5;
14204 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14205 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14206 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14207 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14208 - clz - 8);
14209 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14210 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14211 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14212 - clz - 16);
14213 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14214 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14215 && s > 6)
14216 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14217 - clz - 32);
14218 else
14219 return 1 + s;
14220 }
14221 else
14222 {
14223 if (i >= -0x80)
14224 return 2;
14225 else if (i >= -0x8000)
14226 return 3;
14227 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14228 {
14229 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14230 {
14231 s = size_of_int_loc_descriptor (-i) + 1;
14232 if (s < 5)
14233 return s;
14234 }
14235 return 5;
14236 }
14237 else
14238 {
14239 unsigned long r = 1 + size_of_sleb128 (i);
14240 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14241 {
14242 s = size_of_int_loc_descriptor (-i) + 1;
14243 if (s < r)
14244 return s;
14245 }
14246 return r;
14247 }
14248 }
14249 }
14250
14251 /* Return loc description representing "address" of integer value.
14252 This can appear only as toplevel expression. */
14253
14254 static dw_loc_descr_ref
14255 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14256 {
14257 int litsize;
14258 dw_loc_descr_ref loc_result = NULL;
14259
14260 if (!(dwarf_version >= 4 || !dwarf_strict))
14261 return NULL;
14262
14263 litsize = size_of_int_loc_descriptor (i);
14264 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14265 is more compact. For DW_OP_stack_value we need:
14266 litsize + 1 (DW_OP_stack_value)
14267 and for DW_OP_implicit_value:
14268 1 (DW_OP_implicit_value) + 1 (length) + size. */
14269 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14270 {
14271 loc_result = int_loc_descriptor (i);
14272 add_loc_descr (&loc_result,
14273 new_loc_descr (DW_OP_stack_value, 0, 0));
14274 return loc_result;
14275 }
14276
14277 loc_result = new_loc_descr (DW_OP_implicit_value,
14278 size, 0);
14279 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14280 loc_result->dw_loc_oprnd2.v.val_int = i;
14281 return loc_result;
14282 }
14283
14284 /* Return a location descriptor that designates a base+offset location. */
14285
14286 static dw_loc_descr_ref
14287 based_loc_descr (rtx reg, poly_int64 offset,
14288 enum var_init_status initialized)
14289 {
14290 unsigned int regno;
14291 dw_loc_descr_ref result;
14292 dw_fde_ref fde = cfun->fde;
14293
14294 /* We only use "frame base" when we're sure we're talking about the
14295 post-prologue local stack frame. We do this by *not* running
14296 register elimination until this point, and recognizing the special
14297 argument pointer and soft frame pointer rtx's. */
14298 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14299 {
14300 rtx elim = (ira_use_lra_p
14301 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14302 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14303
14304 if (elim != reg)
14305 {
14306 elim = strip_offset_and_add (elim, &offset);
14307 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
14308 && (elim == hard_frame_pointer_rtx
14309 || elim == stack_pointer_rtx))
14310 || elim == (frame_pointer_needed
14311 ? hard_frame_pointer_rtx
14312 : stack_pointer_rtx));
14313
14314 /* If drap register is used to align stack, use frame
14315 pointer + offset to access stack variables. If stack
14316 is aligned without drap, use stack pointer + offset to
14317 access stack variables. */
14318 if (crtl->stack_realign_tried
14319 && reg == frame_pointer_rtx)
14320 {
14321 int base_reg
14322 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14323 ? HARD_FRAME_POINTER_REGNUM
14324 : REGNO (elim));
14325 return new_reg_loc_descr (base_reg, offset);
14326 }
14327
14328 gcc_assert (frame_pointer_fb_offset_valid);
14329 offset += frame_pointer_fb_offset;
14330 HOST_WIDE_INT const_offset;
14331 if (offset.is_constant (&const_offset))
14332 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14333 else
14334 {
14335 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14336 loc_descr_plus_const (&ret, offset);
14337 return ret;
14338 }
14339 }
14340 }
14341
14342 regno = REGNO (reg);
14343 #ifdef LEAF_REG_REMAP
14344 if (crtl->uses_only_leaf_regs)
14345 {
14346 int leaf_reg = LEAF_REG_REMAP (regno);
14347 if (leaf_reg != -1)
14348 regno = (unsigned) leaf_reg;
14349 }
14350 #endif
14351 regno = DWARF_FRAME_REGNUM (regno);
14352
14353 HOST_WIDE_INT const_offset;
14354 if (!optimize && fde
14355 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14356 && offset.is_constant (&const_offset))
14357 {
14358 /* Use cfa+offset to represent the location of arguments passed
14359 on the stack when drap is used to align stack.
14360 Only do this when not optimizing, for optimized code var-tracking
14361 is supposed to track where the arguments live and the register
14362 used as vdrap or drap in some spot might be used for something
14363 else in other part of the routine. */
14364 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14365 }
14366
14367 result = new_reg_loc_descr (regno, offset);
14368
14369 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14370 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14371
14372 return result;
14373 }
14374
14375 /* Return true if this RTL expression describes a base+offset calculation. */
14376
14377 static inline int
14378 is_based_loc (const_rtx rtl)
14379 {
14380 return (GET_CODE (rtl) == PLUS
14381 && ((REG_P (XEXP (rtl, 0))
14382 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14383 && CONST_INT_P (XEXP (rtl, 1)))));
14384 }
14385
14386 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14387 failed. */
14388
14389 static dw_loc_descr_ref
14390 tls_mem_loc_descriptor (rtx mem)
14391 {
14392 tree base;
14393 dw_loc_descr_ref loc_result;
14394
14395 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14396 return NULL;
14397
14398 base = get_base_address (MEM_EXPR (mem));
14399 if (base == NULL
14400 || !VAR_P (base)
14401 || !DECL_THREAD_LOCAL_P (base))
14402 return NULL;
14403
14404 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14405 if (loc_result == NULL)
14406 return NULL;
14407
14408 if (maybe_ne (MEM_OFFSET (mem), 0))
14409 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14410
14411 return loc_result;
14412 }
14413
14414 /* Output debug info about reason why we failed to expand expression as dwarf
14415 expression. */
14416
14417 static void
14418 expansion_failed (tree expr, rtx rtl, char const *reason)
14419 {
14420 if (dump_file && (dump_flags & TDF_DETAILS))
14421 {
14422 fprintf (dump_file, "Failed to expand as dwarf: ");
14423 if (expr)
14424 print_generic_expr (dump_file, expr, dump_flags);
14425 if (rtl)
14426 {
14427 fprintf (dump_file, "\n");
14428 print_rtl (dump_file, rtl);
14429 }
14430 fprintf (dump_file, "\nReason: %s\n", reason);
14431 }
14432 }
14433
14434 /* Helper function for const_ok_for_output. */
14435
14436 static bool
14437 const_ok_for_output_1 (rtx rtl)
14438 {
14439 if (targetm.const_not_ok_for_debug_p (rtl))
14440 {
14441 if (GET_CODE (rtl) != UNSPEC)
14442 {
14443 expansion_failed (NULL_TREE, rtl,
14444 "Expression rejected for debug by the backend.\n");
14445 return false;
14446 }
14447
14448 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14449 the target hook doesn't explicitly allow it in debug info, assume
14450 we can't express it in the debug info. */
14451 /* Don't complain about TLS UNSPECs, those are just too hard to
14452 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14453 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14454 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14455 if (flag_checking
14456 && (XVECLEN (rtl, 0) == 0
14457 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14458 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14459 inform (current_function_decl
14460 ? DECL_SOURCE_LOCATION (current_function_decl)
14461 : UNKNOWN_LOCATION,
14462 #if NUM_UNSPEC_VALUES > 0
14463 "non-delegitimized UNSPEC %s (%d) found in variable location",
14464 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14465 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14466 XINT (rtl, 1));
14467 #else
14468 "non-delegitimized UNSPEC %d found in variable location",
14469 XINT (rtl, 1));
14470 #endif
14471 expansion_failed (NULL_TREE, rtl,
14472 "UNSPEC hasn't been delegitimized.\n");
14473 return false;
14474 }
14475
14476 if (CONST_POLY_INT_P (rtl))
14477 return false;
14478
14479 if (targetm.const_not_ok_for_debug_p (rtl))
14480 {
14481 expansion_failed (NULL_TREE, rtl,
14482 "Expression rejected for debug by the backend.\n");
14483 return false;
14484 }
14485
14486 /* FIXME: Refer to PR60655. It is possible for simplification
14487 of rtl expressions in var tracking to produce such expressions.
14488 We should really identify / validate expressions
14489 enclosed in CONST that can be handled by assemblers on various
14490 targets and only handle legitimate cases here. */
14491 switch (GET_CODE (rtl))
14492 {
14493 case SYMBOL_REF:
14494 break;
14495 case NOT:
14496 case NEG:
14497 return false;
14498 default:
14499 return true;
14500 }
14501
14502 if (CONSTANT_POOL_ADDRESS_P (rtl))
14503 {
14504 bool marked;
14505 get_pool_constant_mark (rtl, &marked);
14506 /* If all references to this pool constant were optimized away,
14507 it was not output and thus we can't represent it. */
14508 if (!marked)
14509 {
14510 expansion_failed (NULL_TREE, rtl,
14511 "Constant was removed from constant pool.\n");
14512 return false;
14513 }
14514 }
14515
14516 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14517 return false;
14518
14519 /* Avoid references to external symbols in debug info, on several targets
14520 the linker might even refuse to link when linking a shared library,
14521 and in many other cases the relocations for .debug_info/.debug_loc are
14522 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14523 to be defined within the same shared library or executable are fine. */
14524 if (SYMBOL_REF_EXTERNAL_P (rtl))
14525 {
14526 tree decl = SYMBOL_REF_DECL (rtl);
14527
14528 if (decl == NULL || !targetm.binds_local_p (decl))
14529 {
14530 expansion_failed (NULL_TREE, rtl,
14531 "Symbol not defined in current TU.\n");
14532 return false;
14533 }
14534 }
14535
14536 return true;
14537 }
14538
14539 /* Return true if constant RTL can be emitted in DW_OP_addr or
14540 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14541 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14542
14543 static bool
14544 const_ok_for_output (rtx rtl)
14545 {
14546 if (GET_CODE (rtl) == SYMBOL_REF)
14547 return const_ok_for_output_1 (rtl);
14548
14549 if (GET_CODE (rtl) == CONST)
14550 {
14551 subrtx_var_iterator::array_type array;
14552 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14553 if (!const_ok_for_output_1 (*iter))
14554 return false;
14555 return true;
14556 }
14557
14558 return true;
14559 }
14560
14561 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14562 if possible, NULL otherwise. */
14563
14564 static dw_die_ref
14565 base_type_for_mode (machine_mode mode, bool unsignedp)
14566 {
14567 dw_die_ref type_die;
14568 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14569
14570 if (type == NULL)
14571 return NULL;
14572 switch (TREE_CODE (type))
14573 {
14574 case INTEGER_TYPE:
14575 case REAL_TYPE:
14576 break;
14577 default:
14578 return NULL;
14579 }
14580 type_die = lookup_type_die (type);
14581 if (!type_die)
14582 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14583 comp_unit_die ());
14584 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14585 return NULL;
14586 return type_die;
14587 }
14588
14589 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14590 type matching MODE, or, if MODE is narrower than or as wide as
14591 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14592 possible. */
14593
14594 static dw_loc_descr_ref
14595 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14596 {
14597 machine_mode outer_mode = mode;
14598 dw_die_ref type_die;
14599 dw_loc_descr_ref cvt;
14600
14601 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14602 {
14603 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14604 return op;
14605 }
14606 type_die = base_type_for_mode (outer_mode, 1);
14607 if (type_die == NULL)
14608 return NULL;
14609 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14610 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14611 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14612 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14613 add_loc_descr (&op, cvt);
14614 return op;
14615 }
14616
14617 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14618
14619 static dw_loc_descr_ref
14620 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14621 dw_loc_descr_ref op1)
14622 {
14623 dw_loc_descr_ref ret = op0;
14624 add_loc_descr (&ret, op1);
14625 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14626 if (STORE_FLAG_VALUE != 1)
14627 {
14628 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14629 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14630 }
14631 return ret;
14632 }
14633
14634 /* Subroutine of scompare_loc_descriptor for the case in which we're
14635 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14636 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14637
14638 static dw_loc_descr_ref
14639 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14640 scalar_int_mode op_mode,
14641 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14642 {
14643 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14644 dw_loc_descr_ref cvt;
14645
14646 if (type_die == NULL)
14647 return NULL;
14648 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14649 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14650 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14651 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14652 add_loc_descr (&op0, cvt);
14653 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14654 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14655 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14656 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14657 add_loc_descr (&op1, cvt);
14658 return compare_loc_descriptor (op, op0, op1);
14659 }
14660
14661 /* Subroutine of scompare_loc_descriptor for the case in which we're
14662 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14663 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14664
14665 static dw_loc_descr_ref
14666 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14667 scalar_int_mode op_mode,
14668 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14669 {
14670 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14671 /* For eq/ne, if the operands are known to be zero-extended,
14672 there is no need to do the fancy shifting up. */
14673 if (op == DW_OP_eq || op == DW_OP_ne)
14674 {
14675 dw_loc_descr_ref last0, last1;
14676 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14677 ;
14678 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14679 ;
14680 /* deref_size zero extends, and for constants we can check
14681 whether they are zero extended or not. */
14682 if (((last0->dw_loc_opc == DW_OP_deref_size
14683 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14684 || (CONST_INT_P (XEXP (rtl, 0))
14685 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14686 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14687 && ((last1->dw_loc_opc == DW_OP_deref_size
14688 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14689 || (CONST_INT_P (XEXP (rtl, 1))
14690 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14691 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14692 return compare_loc_descriptor (op, op0, op1);
14693
14694 /* EQ/NE comparison against constant in narrower type than
14695 DWARF2_ADDR_SIZE can be performed either as
14696 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14697 DW_OP_{eq,ne}
14698 or
14699 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14700 DW_OP_{eq,ne}. Pick whatever is shorter. */
14701 if (CONST_INT_P (XEXP (rtl, 1))
14702 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14703 && (size_of_int_loc_descriptor (shift) + 1
14704 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14705 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14706 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14707 & GET_MODE_MASK (op_mode))))
14708 {
14709 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14710 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14711 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14712 & GET_MODE_MASK (op_mode));
14713 return compare_loc_descriptor (op, op0, op1);
14714 }
14715 }
14716 add_loc_descr (&op0, int_loc_descriptor (shift));
14717 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14718 if (CONST_INT_P (XEXP (rtl, 1)))
14719 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14720 else
14721 {
14722 add_loc_descr (&op1, int_loc_descriptor (shift));
14723 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14724 }
14725 return compare_loc_descriptor (op, op0, op1);
14726 }
14727
14728 /* Return location descriptor for unsigned comparison OP RTL. */
14729
14730 static dw_loc_descr_ref
14731 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14732 machine_mode mem_mode)
14733 {
14734 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14735 dw_loc_descr_ref op0, op1;
14736
14737 if (op_mode == VOIDmode)
14738 op_mode = GET_MODE (XEXP (rtl, 1));
14739 if (op_mode == VOIDmode)
14740 return NULL;
14741
14742 scalar_int_mode int_op_mode;
14743 if (dwarf_strict
14744 && dwarf_version < 5
14745 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14746 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14747 return NULL;
14748
14749 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14750 VAR_INIT_STATUS_INITIALIZED);
14751 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14752 VAR_INIT_STATUS_INITIALIZED);
14753
14754 if (op0 == NULL || op1 == NULL)
14755 return NULL;
14756
14757 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14758 {
14759 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14760 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14761
14762 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14763 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14764 }
14765 return compare_loc_descriptor (op, op0, op1);
14766 }
14767
14768 /* Return location descriptor for unsigned comparison OP RTL. */
14769
14770 static dw_loc_descr_ref
14771 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14772 machine_mode mem_mode)
14773 {
14774 dw_loc_descr_ref op0, op1;
14775
14776 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14777 if (test_op_mode == VOIDmode)
14778 test_op_mode = GET_MODE (XEXP (rtl, 1));
14779
14780 scalar_int_mode op_mode;
14781 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14782 return NULL;
14783
14784 if (dwarf_strict
14785 && dwarf_version < 5
14786 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14787 return NULL;
14788
14789 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14790 VAR_INIT_STATUS_INITIALIZED);
14791 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14792 VAR_INIT_STATUS_INITIALIZED);
14793
14794 if (op0 == NULL || op1 == NULL)
14795 return NULL;
14796
14797 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14798 {
14799 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14800 dw_loc_descr_ref last0, last1;
14801 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14802 ;
14803 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14804 ;
14805 if (CONST_INT_P (XEXP (rtl, 0)))
14806 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14807 /* deref_size zero extends, so no need to mask it again. */
14808 else if (last0->dw_loc_opc != DW_OP_deref_size
14809 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14810 {
14811 add_loc_descr (&op0, int_loc_descriptor (mask));
14812 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14813 }
14814 if (CONST_INT_P (XEXP (rtl, 1)))
14815 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14816 /* deref_size zero extends, so no need to mask it again. */
14817 else if (last1->dw_loc_opc != DW_OP_deref_size
14818 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14819 {
14820 add_loc_descr (&op1, int_loc_descriptor (mask));
14821 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14822 }
14823 }
14824 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14825 {
14826 HOST_WIDE_INT bias = 1;
14827 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14828 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14829 if (CONST_INT_P (XEXP (rtl, 1)))
14830 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14831 + INTVAL (XEXP (rtl, 1)));
14832 else
14833 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14834 bias, 0));
14835 }
14836 return compare_loc_descriptor (op, op0, op1);
14837 }
14838
14839 /* Return location descriptor for {U,S}{MIN,MAX}. */
14840
14841 static dw_loc_descr_ref
14842 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14843 machine_mode mem_mode)
14844 {
14845 enum dwarf_location_atom op;
14846 dw_loc_descr_ref op0, op1, ret;
14847 dw_loc_descr_ref bra_node, drop_node;
14848
14849 scalar_int_mode int_mode;
14850 if (dwarf_strict
14851 && dwarf_version < 5
14852 && (!is_a <scalar_int_mode> (mode, &int_mode)
14853 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14854 return NULL;
14855
14856 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14857 VAR_INIT_STATUS_INITIALIZED);
14858 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14859 VAR_INIT_STATUS_INITIALIZED);
14860
14861 if (op0 == NULL || op1 == NULL)
14862 return NULL;
14863
14864 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14865 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14866 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14867 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14868 {
14869 /* Checked by the caller. */
14870 int_mode = as_a <scalar_int_mode> (mode);
14871 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14872 {
14873 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14874 add_loc_descr (&op0, int_loc_descriptor (mask));
14875 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14876 add_loc_descr (&op1, int_loc_descriptor (mask));
14877 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14878 }
14879 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14880 {
14881 HOST_WIDE_INT bias = 1;
14882 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14883 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14884 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14885 }
14886 }
14887 else if (is_a <scalar_int_mode> (mode, &int_mode)
14888 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14889 {
14890 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14891 add_loc_descr (&op0, int_loc_descriptor (shift));
14892 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14893 add_loc_descr (&op1, int_loc_descriptor (shift));
14894 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14895 }
14896 else if (is_a <scalar_int_mode> (mode, &int_mode)
14897 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14898 {
14899 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14900 dw_loc_descr_ref cvt;
14901 if (type_die == NULL)
14902 return NULL;
14903 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14904 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14905 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14906 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14907 add_loc_descr (&op0, cvt);
14908 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14909 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14910 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14911 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14912 add_loc_descr (&op1, cvt);
14913 }
14914
14915 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14916 op = DW_OP_lt;
14917 else
14918 op = DW_OP_gt;
14919 ret = op0;
14920 add_loc_descr (&ret, op1);
14921 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14922 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14923 add_loc_descr (&ret, bra_node);
14924 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14925 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14926 add_loc_descr (&ret, drop_node);
14927 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14928 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14929 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14930 && is_a <scalar_int_mode> (mode, &int_mode)
14931 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14932 ret = convert_descriptor_to_mode (int_mode, ret);
14933 return ret;
14934 }
14935
14936 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14937 but after converting arguments to type_die, afterwards
14938 convert back to unsigned. */
14939
14940 static dw_loc_descr_ref
14941 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14942 scalar_int_mode mode, machine_mode mem_mode)
14943 {
14944 dw_loc_descr_ref cvt, op0, op1;
14945
14946 if (type_die == NULL)
14947 return NULL;
14948 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14949 VAR_INIT_STATUS_INITIALIZED);
14950 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14951 VAR_INIT_STATUS_INITIALIZED);
14952 if (op0 == NULL || op1 == NULL)
14953 return NULL;
14954 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14955 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14956 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14957 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14958 add_loc_descr (&op0, cvt);
14959 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14960 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14961 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14962 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14963 add_loc_descr (&op1, cvt);
14964 add_loc_descr (&op0, op1);
14965 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14966 return convert_descriptor_to_mode (mode, op0);
14967 }
14968
14969 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14970 const0 is DW_OP_lit0 or corresponding typed constant,
14971 const1 is DW_OP_lit1 or corresponding typed constant
14972 and constMSB is constant with just the MSB bit set
14973 for the mode):
14974 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14975 L1: const0 DW_OP_swap
14976 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14977 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14978 L3: DW_OP_drop
14979 L4: DW_OP_nop
14980
14981 CTZ is similar:
14982 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14983 L1: const0 DW_OP_swap
14984 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14985 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14986 L3: DW_OP_drop
14987 L4: DW_OP_nop
14988
14989 FFS is similar:
14990 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14991 L1: const1 DW_OP_swap
14992 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14993 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14994 L3: DW_OP_drop
14995 L4: DW_OP_nop */
14996
14997 static dw_loc_descr_ref
14998 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14999 machine_mode mem_mode)
15000 {
15001 dw_loc_descr_ref op0, ret, tmp;
15002 HOST_WIDE_INT valv;
15003 dw_loc_descr_ref l1jump, l1label;
15004 dw_loc_descr_ref l2jump, l2label;
15005 dw_loc_descr_ref l3jump, l3label;
15006 dw_loc_descr_ref l4jump, l4label;
15007 rtx msb;
15008
15009 if (GET_MODE (XEXP (rtl, 0)) != mode)
15010 return NULL;
15011
15012 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15013 VAR_INIT_STATUS_INITIALIZED);
15014 if (op0 == NULL)
15015 return NULL;
15016 ret = op0;
15017 if (GET_CODE (rtl) == CLZ)
15018 {
15019 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15020 valv = GET_MODE_BITSIZE (mode);
15021 }
15022 else if (GET_CODE (rtl) == FFS)
15023 valv = 0;
15024 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15025 valv = GET_MODE_BITSIZE (mode);
15026 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15027 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15028 add_loc_descr (&ret, l1jump);
15029 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15030 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15031 VAR_INIT_STATUS_INITIALIZED);
15032 if (tmp == NULL)
15033 return NULL;
15034 add_loc_descr (&ret, tmp);
15035 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15036 add_loc_descr (&ret, l4jump);
15037 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15038 ? const1_rtx : const0_rtx,
15039 mode, mem_mode,
15040 VAR_INIT_STATUS_INITIALIZED);
15041 if (l1label == NULL)
15042 return NULL;
15043 add_loc_descr (&ret, l1label);
15044 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15045 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15046 add_loc_descr (&ret, l2label);
15047 if (GET_CODE (rtl) != CLZ)
15048 msb = const1_rtx;
15049 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15050 msb = GEN_INT (HOST_WIDE_INT_1U
15051 << (GET_MODE_BITSIZE (mode) - 1));
15052 else
15053 msb = immed_wide_int_const
15054 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15055 GET_MODE_PRECISION (mode)), mode);
15056 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15057 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15058 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15059 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15060 else
15061 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15062 VAR_INIT_STATUS_INITIALIZED);
15063 if (tmp == NULL)
15064 return NULL;
15065 add_loc_descr (&ret, tmp);
15066 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15067 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15068 add_loc_descr (&ret, l3jump);
15069 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15070 VAR_INIT_STATUS_INITIALIZED);
15071 if (tmp == NULL)
15072 return NULL;
15073 add_loc_descr (&ret, tmp);
15074 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15075 ? DW_OP_shl : DW_OP_shr, 0, 0));
15076 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15077 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15078 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15079 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15080 add_loc_descr (&ret, l2jump);
15081 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15082 add_loc_descr (&ret, l3label);
15083 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15084 add_loc_descr (&ret, l4label);
15085 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15086 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15087 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15088 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15089 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15090 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15091 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15092 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15093 return ret;
15094 }
15095
15096 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15097 const1 is DW_OP_lit1 or corresponding typed constant):
15098 const0 DW_OP_swap
15099 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15100 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15101 L2: DW_OP_drop
15102
15103 PARITY is similar:
15104 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15105 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15106 L2: DW_OP_drop */
15107
15108 static dw_loc_descr_ref
15109 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15110 machine_mode mem_mode)
15111 {
15112 dw_loc_descr_ref op0, ret, tmp;
15113 dw_loc_descr_ref l1jump, l1label;
15114 dw_loc_descr_ref l2jump, l2label;
15115
15116 if (GET_MODE (XEXP (rtl, 0)) != mode)
15117 return NULL;
15118
15119 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15120 VAR_INIT_STATUS_INITIALIZED);
15121 if (op0 == NULL)
15122 return NULL;
15123 ret = op0;
15124 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15125 VAR_INIT_STATUS_INITIALIZED);
15126 if (tmp == NULL)
15127 return NULL;
15128 add_loc_descr (&ret, tmp);
15129 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15130 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15131 add_loc_descr (&ret, l1label);
15132 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15133 add_loc_descr (&ret, l2jump);
15134 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15135 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15136 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15137 VAR_INIT_STATUS_INITIALIZED);
15138 if (tmp == NULL)
15139 return NULL;
15140 add_loc_descr (&ret, tmp);
15141 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15142 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15143 ? DW_OP_plus : DW_OP_xor, 0, 0));
15144 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15145 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15146 VAR_INIT_STATUS_INITIALIZED);
15147 add_loc_descr (&ret, tmp);
15148 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15149 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15150 add_loc_descr (&ret, l1jump);
15151 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15152 add_loc_descr (&ret, l2label);
15153 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15154 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15155 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15156 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15157 return ret;
15158 }
15159
15160 /* BSWAP (constS is initial shift count, either 56 or 24):
15161 constS const0
15162 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15163 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15164 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15165 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15166 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15167
15168 static dw_loc_descr_ref
15169 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15170 machine_mode mem_mode)
15171 {
15172 dw_loc_descr_ref op0, ret, tmp;
15173 dw_loc_descr_ref l1jump, l1label;
15174 dw_loc_descr_ref l2jump, l2label;
15175
15176 if (BITS_PER_UNIT != 8
15177 || (GET_MODE_BITSIZE (mode) != 32
15178 && GET_MODE_BITSIZE (mode) != 64))
15179 return NULL;
15180
15181 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15182 VAR_INIT_STATUS_INITIALIZED);
15183 if (op0 == NULL)
15184 return NULL;
15185
15186 ret = op0;
15187 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15188 mode, mem_mode,
15189 VAR_INIT_STATUS_INITIALIZED);
15190 if (tmp == NULL)
15191 return NULL;
15192 add_loc_descr (&ret, tmp);
15193 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15194 VAR_INIT_STATUS_INITIALIZED);
15195 if (tmp == NULL)
15196 return NULL;
15197 add_loc_descr (&ret, tmp);
15198 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15199 add_loc_descr (&ret, l1label);
15200 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15201 mode, mem_mode,
15202 VAR_INIT_STATUS_INITIALIZED);
15203 add_loc_descr (&ret, tmp);
15204 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15205 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15206 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15207 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15208 VAR_INIT_STATUS_INITIALIZED);
15209 if (tmp == NULL)
15210 return NULL;
15211 add_loc_descr (&ret, tmp);
15212 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15213 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15214 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15215 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15216 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15217 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15218 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15219 VAR_INIT_STATUS_INITIALIZED);
15220 add_loc_descr (&ret, tmp);
15221 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15222 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15223 add_loc_descr (&ret, l2jump);
15224 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15225 VAR_INIT_STATUS_INITIALIZED);
15226 add_loc_descr (&ret, tmp);
15227 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15228 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15229 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15230 add_loc_descr (&ret, l1jump);
15231 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15232 add_loc_descr (&ret, l2label);
15233 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15234 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15235 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15236 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15237 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15238 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15239 return ret;
15240 }
15241
15242 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15243 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15244 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15245 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15246
15247 ROTATERT is similar:
15248 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15249 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15250 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15251
15252 static dw_loc_descr_ref
15253 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15254 machine_mode mem_mode)
15255 {
15256 rtx rtlop1 = XEXP (rtl, 1);
15257 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15258 int i;
15259
15260 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15261 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15262 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15263 VAR_INIT_STATUS_INITIALIZED);
15264 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15265 VAR_INIT_STATUS_INITIALIZED);
15266 if (op0 == NULL || op1 == NULL)
15267 return NULL;
15268 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15269 for (i = 0; i < 2; i++)
15270 {
15271 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15272 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15273 mode, mem_mode,
15274 VAR_INIT_STATUS_INITIALIZED);
15275 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15276 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15277 ? DW_OP_const4u
15278 : HOST_BITS_PER_WIDE_INT == 64
15279 ? DW_OP_const8u : DW_OP_constu,
15280 GET_MODE_MASK (mode), 0);
15281 else
15282 mask[i] = NULL;
15283 if (mask[i] == NULL)
15284 return NULL;
15285 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15286 }
15287 ret = op0;
15288 add_loc_descr (&ret, op1);
15289 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15290 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15291 if (GET_CODE (rtl) == ROTATERT)
15292 {
15293 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15294 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15295 GET_MODE_BITSIZE (mode), 0));
15296 }
15297 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15298 if (mask[0] != NULL)
15299 add_loc_descr (&ret, mask[0]);
15300 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15301 if (mask[1] != NULL)
15302 {
15303 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15304 add_loc_descr (&ret, mask[1]);
15305 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15306 }
15307 if (GET_CODE (rtl) == ROTATE)
15308 {
15309 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15310 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15311 GET_MODE_BITSIZE (mode), 0));
15312 }
15313 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15314 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15315 return ret;
15316 }
15317
15318 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15319 for DEBUG_PARAMETER_REF RTL. */
15320
15321 static dw_loc_descr_ref
15322 parameter_ref_descriptor (rtx rtl)
15323 {
15324 dw_loc_descr_ref ret;
15325 dw_die_ref ref;
15326
15327 if (dwarf_strict)
15328 return NULL;
15329 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15330 /* With LTO during LTRANS we get the late DIE that refers to the early
15331 DIE, thus we add another indirection here. This seems to confuse
15332 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15333 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15334 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15335 if (ref)
15336 {
15337 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15338 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15339 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15340 }
15341 else
15342 {
15343 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15344 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15345 }
15346 return ret;
15347 }
15348
15349 /* The following routine converts the RTL for a variable or parameter
15350 (resident in memory) into an equivalent Dwarf representation of a
15351 mechanism for getting the address of that same variable onto the top of a
15352 hypothetical "address evaluation" stack.
15353
15354 When creating memory location descriptors, we are effectively transforming
15355 the RTL for a memory-resident object into its Dwarf postfix expression
15356 equivalent. This routine recursively descends an RTL tree, turning
15357 it into Dwarf postfix code as it goes.
15358
15359 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15360
15361 MEM_MODE is the mode of the memory reference, needed to handle some
15362 autoincrement addressing modes.
15363
15364 Return 0 if we can't represent the location. */
15365
15366 dw_loc_descr_ref
15367 mem_loc_descriptor (rtx rtl, machine_mode mode,
15368 machine_mode mem_mode,
15369 enum var_init_status initialized)
15370 {
15371 dw_loc_descr_ref mem_loc_result = NULL;
15372 enum dwarf_location_atom op;
15373 dw_loc_descr_ref op0, op1;
15374 rtx inner = NULL_RTX;
15375 poly_int64 offset;
15376
15377 if (mode == VOIDmode)
15378 mode = GET_MODE (rtl);
15379
15380 /* Note that for a dynamically sized array, the location we will generate a
15381 description of here will be the lowest numbered location which is
15382 actually within the array. That's *not* necessarily the same as the
15383 zeroth element of the array. */
15384
15385 rtl = targetm.delegitimize_address (rtl);
15386
15387 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15388 return NULL;
15389
15390 scalar_int_mode int_mode, inner_mode, op1_mode;
15391 switch (GET_CODE (rtl))
15392 {
15393 case POST_INC:
15394 case POST_DEC:
15395 case POST_MODIFY:
15396 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15397
15398 case SUBREG:
15399 /* The case of a subreg may arise when we have a local (register)
15400 variable or a formal (register) parameter which doesn't quite fill
15401 up an entire register. For now, just assume that it is
15402 legitimate to make the Dwarf info refer to the whole register which
15403 contains the given subreg. */
15404 if (!subreg_lowpart_p (rtl))
15405 break;
15406 inner = SUBREG_REG (rtl);
15407 /* FALLTHRU */
15408 case TRUNCATE:
15409 if (inner == NULL_RTX)
15410 inner = XEXP (rtl, 0);
15411 if (is_a <scalar_int_mode> (mode, &int_mode)
15412 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15413 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15414 #ifdef POINTERS_EXTEND_UNSIGNED
15415 || (int_mode == Pmode && mem_mode != VOIDmode)
15416 #endif
15417 )
15418 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15419 {
15420 mem_loc_result = mem_loc_descriptor (inner,
15421 inner_mode,
15422 mem_mode, initialized);
15423 break;
15424 }
15425 if (dwarf_strict && dwarf_version < 5)
15426 break;
15427 if (is_a <scalar_int_mode> (mode, &int_mode)
15428 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15429 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15430 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15431 {
15432 dw_die_ref type_die;
15433 dw_loc_descr_ref cvt;
15434
15435 mem_loc_result = mem_loc_descriptor (inner,
15436 GET_MODE (inner),
15437 mem_mode, initialized);
15438 if (mem_loc_result == NULL)
15439 break;
15440 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15441 if (type_die == NULL)
15442 {
15443 mem_loc_result = NULL;
15444 break;
15445 }
15446 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15447 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15448 else
15449 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15450 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15451 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15452 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15453 add_loc_descr (&mem_loc_result, cvt);
15454 if (is_a <scalar_int_mode> (mode, &int_mode)
15455 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15456 {
15457 /* Convert it to untyped afterwards. */
15458 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15459 add_loc_descr (&mem_loc_result, cvt);
15460 }
15461 }
15462 break;
15463
15464 case REG:
15465 if (!is_a <scalar_int_mode> (mode, &int_mode)
15466 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15467 && rtl != arg_pointer_rtx
15468 && rtl != frame_pointer_rtx
15469 #ifdef POINTERS_EXTEND_UNSIGNED
15470 && (int_mode != Pmode || mem_mode == VOIDmode)
15471 #endif
15472 ))
15473 {
15474 dw_die_ref type_die;
15475 unsigned int dbx_regnum;
15476
15477 if (dwarf_strict && dwarf_version < 5)
15478 break;
15479 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15480 break;
15481 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15482 if (type_die == NULL)
15483 break;
15484
15485 dbx_regnum = dbx_reg_number (rtl);
15486 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15487 break;
15488 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15489 dbx_regnum, 0);
15490 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15491 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15492 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15493 break;
15494 }
15495 /* Whenever a register number forms a part of the description of the
15496 method for calculating the (dynamic) address of a memory resident
15497 object, DWARF rules require the register number be referred to as
15498 a "base register". This distinction is not based in any way upon
15499 what category of register the hardware believes the given register
15500 belongs to. This is strictly DWARF terminology we're dealing with
15501 here. Note that in cases where the location of a memory-resident
15502 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15503 OP_CONST (0)) the actual DWARF location descriptor that we generate
15504 may just be OP_BASEREG (basereg). This may look deceptively like
15505 the object in question was allocated to a register (rather than in
15506 memory) so DWARF consumers need to be aware of the subtle
15507 distinction between OP_REG and OP_BASEREG. */
15508 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15509 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15510 else if (stack_realign_drap
15511 && crtl->drap_reg
15512 && crtl->args.internal_arg_pointer == rtl
15513 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15514 {
15515 /* If RTL is internal_arg_pointer, which has been optimized
15516 out, use DRAP instead. */
15517 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15518 VAR_INIT_STATUS_INITIALIZED);
15519 }
15520 break;
15521
15522 case SIGN_EXTEND:
15523 case ZERO_EXTEND:
15524 if (!is_a <scalar_int_mode> (mode, &int_mode)
15525 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15526 break;
15527 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15528 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15529 if (op0 == 0)
15530 break;
15531 else if (GET_CODE (rtl) == ZERO_EXTEND
15532 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15533 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15534 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15535 to expand zero extend as two shifts instead of
15536 masking. */
15537 && GET_MODE_SIZE (inner_mode) <= 4)
15538 {
15539 mem_loc_result = op0;
15540 add_loc_descr (&mem_loc_result,
15541 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15542 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15543 }
15544 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15545 {
15546 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15547 shift *= BITS_PER_UNIT;
15548 if (GET_CODE (rtl) == SIGN_EXTEND)
15549 op = DW_OP_shra;
15550 else
15551 op = DW_OP_shr;
15552 mem_loc_result = op0;
15553 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15554 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15555 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15556 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15557 }
15558 else if (!dwarf_strict || dwarf_version >= 5)
15559 {
15560 dw_die_ref type_die1, type_die2;
15561 dw_loc_descr_ref cvt;
15562
15563 type_die1 = base_type_for_mode (inner_mode,
15564 GET_CODE (rtl) == ZERO_EXTEND);
15565 if (type_die1 == NULL)
15566 break;
15567 type_die2 = base_type_for_mode (int_mode, 1);
15568 if (type_die2 == NULL)
15569 break;
15570 mem_loc_result = op0;
15571 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15572 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15573 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15574 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15575 add_loc_descr (&mem_loc_result, cvt);
15576 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15577 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15578 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15579 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15580 add_loc_descr (&mem_loc_result, cvt);
15581 }
15582 break;
15583
15584 case MEM:
15585 {
15586 rtx new_rtl = avoid_constant_pool_reference (rtl);
15587 if (new_rtl != rtl)
15588 {
15589 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15590 initialized);
15591 if (mem_loc_result != NULL)
15592 return mem_loc_result;
15593 }
15594 }
15595 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15596 get_address_mode (rtl), mode,
15597 VAR_INIT_STATUS_INITIALIZED);
15598 if (mem_loc_result == NULL)
15599 mem_loc_result = tls_mem_loc_descriptor (rtl);
15600 if (mem_loc_result != NULL)
15601 {
15602 if (!is_a <scalar_int_mode> (mode, &int_mode)
15603 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15604 {
15605 dw_die_ref type_die;
15606 dw_loc_descr_ref deref;
15607 HOST_WIDE_INT size;
15608
15609 if (dwarf_strict && dwarf_version < 5)
15610 return NULL;
15611 if (!GET_MODE_SIZE (mode).is_constant (&size))
15612 return NULL;
15613 type_die
15614 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15615 if (type_die == NULL)
15616 return NULL;
15617 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15618 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15619 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15620 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15621 add_loc_descr (&mem_loc_result, deref);
15622 }
15623 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15624 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15625 else
15626 add_loc_descr (&mem_loc_result,
15627 new_loc_descr (DW_OP_deref_size,
15628 GET_MODE_SIZE (int_mode), 0));
15629 }
15630 break;
15631
15632 case LO_SUM:
15633 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15634
15635 case LABEL_REF:
15636 /* Some ports can transform a symbol ref into a label ref, because
15637 the symbol ref is too far away and has to be dumped into a constant
15638 pool. */
15639 case CONST:
15640 case SYMBOL_REF:
15641 if (!is_a <scalar_int_mode> (mode, &int_mode)
15642 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15643 #ifdef POINTERS_EXTEND_UNSIGNED
15644 && (int_mode != Pmode || mem_mode == VOIDmode)
15645 #endif
15646 ))
15647 break;
15648 if (GET_CODE (rtl) == SYMBOL_REF
15649 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15650 {
15651 dw_loc_descr_ref temp;
15652
15653 /* If this is not defined, we have no way to emit the data. */
15654 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15655 break;
15656
15657 temp = new_addr_loc_descr (rtl, dtprel_true);
15658
15659 /* We check for DWARF 5 here because gdb did not implement
15660 DW_OP_form_tls_address until after 7.12. */
15661 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15662 ? DW_OP_form_tls_address
15663 : DW_OP_GNU_push_tls_address),
15664 0, 0);
15665 add_loc_descr (&mem_loc_result, temp);
15666
15667 break;
15668 }
15669
15670 if (!const_ok_for_output (rtl))
15671 {
15672 if (GET_CODE (rtl) == CONST)
15673 switch (GET_CODE (XEXP (rtl, 0)))
15674 {
15675 case NOT:
15676 op = DW_OP_not;
15677 goto try_const_unop;
15678 case NEG:
15679 op = DW_OP_neg;
15680 goto try_const_unop;
15681 try_const_unop:
15682 rtx arg;
15683 arg = XEXP (XEXP (rtl, 0), 0);
15684 if (!CONSTANT_P (arg))
15685 arg = gen_rtx_CONST (int_mode, arg);
15686 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15687 initialized);
15688 if (op0)
15689 {
15690 mem_loc_result = op0;
15691 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15692 }
15693 break;
15694 default:
15695 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15696 mem_mode, initialized);
15697 break;
15698 }
15699 break;
15700 }
15701
15702 symref:
15703 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15704 vec_safe_push (used_rtx_array, rtl);
15705 break;
15706
15707 case CONCAT:
15708 case CONCATN:
15709 case VAR_LOCATION:
15710 case DEBUG_IMPLICIT_PTR:
15711 expansion_failed (NULL_TREE, rtl,
15712 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15713 return 0;
15714
15715 case ENTRY_VALUE:
15716 if (dwarf_strict && dwarf_version < 5)
15717 return NULL;
15718 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15719 {
15720 if (!is_a <scalar_int_mode> (mode, &int_mode)
15721 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15722 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15723 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15724 else
15725 {
15726 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15727 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15728 return NULL;
15729 op0 = one_reg_loc_descriptor (dbx_regnum,
15730 VAR_INIT_STATUS_INITIALIZED);
15731 }
15732 }
15733 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15734 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15735 {
15736 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15737 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15738 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15739 return NULL;
15740 }
15741 else
15742 gcc_unreachable ();
15743 if (op0 == NULL)
15744 return NULL;
15745 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15746 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15747 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15748 break;
15749
15750 case DEBUG_PARAMETER_REF:
15751 mem_loc_result = parameter_ref_descriptor (rtl);
15752 break;
15753
15754 case PRE_MODIFY:
15755 /* Extract the PLUS expression nested inside and fall into
15756 PLUS code below. */
15757 rtl = XEXP (rtl, 1);
15758 goto plus;
15759
15760 case PRE_INC:
15761 case PRE_DEC:
15762 /* Turn these into a PLUS expression and fall into the PLUS code
15763 below. */
15764 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15765 gen_int_mode (GET_CODE (rtl) == PRE_INC
15766 ? GET_MODE_UNIT_SIZE (mem_mode)
15767 : -GET_MODE_UNIT_SIZE (mem_mode),
15768 mode));
15769
15770 /* fall through */
15771
15772 case PLUS:
15773 plus:
15774 if (is_based_loc (rtl)
15775 && is_a <scalar_int_mode> (mode, &int_mode)
15776 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15777 || XEXP (rtl, 0) == arg_pointer_rtx
15778 || XEXP (rtl, 0) == frame_pointer_rtx))
15779 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15780 INTVAL (XEXP (rtl, 1)),
15781 VAR_INIT_STATUS_INITIALIZED);
15782 else
15783 {
15784 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15785 VAR_INIT_STATUS_INITIALIZED);
15786 if (mem_loc_result == 0)
15787 break;
15788
15789 if (CONST_INT_P (XEXP (rtl, 1))
15790 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15791 <= DWARF2_ADDR_SIZE))
15792 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15793 else
15794 {
15795 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15796 VAR_INIT_STATUS_INITIALIZED);
15797 if (op1 == 0)
15798 return NULL;
15799 add_loc_descr (&mem_loc_result, op1);
15800 add_loc_descr (&mem_loc_result,
15801 new_loc_descr (DW_OP_plus, 0, 0));
15802 }
15803 }
15804 break;
15805
15806 /* If a pseudo-reg is optimized away, it is possible for it to
15807 be replaced with a MEM containing a multiply or shift. */
15808 case MINUS:
15809 op = DW_OP_minus;
15810 goto do_binop;
15811
15812 case MULT:
15813 op = DW_OP_mul;
15814 goto do_binop;
15815
15816 case DIV:
15817 if ((!dwarf_strict || dwarf_version >= 5)
15818 && is_a <scalar_int_mode> (mode, &int_mode)
15819 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15820 {
15821 mem_loc_result = typed_binop (DW_OP_div, rtl,
15822 base_type_for_mode (mode, 0),
15823 int_mode, mem_mode);
15824 break;
15825 }
15826 op = DW_OP_div;
15827 goto do_binop;
15828
15829 case UMOD:
15830 op = DW_OP_mod;
15831 goto do_binop;
15832
15833 case ASHIFT:
15834 op = DW_OP_shl;
15835 goto do_shift;
15836
15837 case ASHIFTRT:
15838 op = DW_OP_shra;
15839 goto do_shift;
15840
15841 case LSHIFTRT:
15842 op = DW_OP_shr;
15843 goto do_shift;
15844
15845 do_shift:
15846 if (!is_a <scalar_int_mode> (mode, &int_mode))
15847 break;
15848 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15849 VAR_INIT_STATUS_INITIALIZED);
15850 {
15851 rtx rtlop1 = XEXP (rtl, 1);
15852 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15853 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15854 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15855 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15856 VAR_INIT_STATUS_INITIALIZED);
15857 }
15858
15859 if (op0 == 0 || op1 == 0)
15860 break;
15861
15862 mem_loc_result = op0;
15863 add_loc_descr (&mem_loc_result, op1);
15864 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15865 break;
15866
15867 case AND:
15868 op = DW_OP_and;
15869 goto do_binop;
15870
15871 case IOR:
15872 op = DW_OP_or;
15873 goto do_binop;
15874
15875 case XOR:
15876 op = DW_OP_xor;
15877 goto do_binop;
15878
15879 do_binop:
15880 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15881 VAR_INIT_STATUS_INITIALIZED);
15882 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15883 VAR_INIT_STATUS_INITIALIZED);
15884
15885 if (op0 == 0 || op1 == 0)
15886 break;
15887
15888 mem_loc_result = op0;
15889 add_loc_descr (&mem_loc_result, op1);
15890 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15891 break;
15892
15893 case MOD:
15894 if ((!dwarf_strict || dwarf_version >= 5)
15895 && is_a <scalar_int_mode> (mode, &int_mode)
15896 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15897 {
15898 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15899 base_type_for_mode (mode, 0),
15900 int_mode, mem_mode);
15901 break;
15902 }
15903
15904 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15905 VAR_INIT_STATUS_INITIALIZED);
15906 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15907 VAR_INIT_STATUS_INITIALIZED);
15908
15909 if (op0 == 0 || op1 == 0)
15910 break;
15911
15912 mem_loc_result = op0;
15913 add_loc_descr (&mem_loc_result, op1);
15914 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15915 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15916 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15917 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15918 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15919 break;
15920
15921 case UDIV:
15922 if ((!dwarf_strict || dwarf_version >= 5)
15923 && is_a <scalar_int_mode> (mode, &int_mode))
15924 {
15925 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15926 {
15927 op = DW_OP_div;
15928 goto do_binop;
15929 }
15930 mem_loc_result = typed_binop (DW_OP_div, rtl,
15931 base_type_for_mode (int_mode, 1),
15932 int_mode, mem_mode);
15933 }
15934 break;
15935
15936 case NOT:
15937 op = DW_OP_not;
15938 goto do_unop;
15939
15940 case ABS:
15941 op = DW_OP_abs;
15942 goto do_unop;
15943
15944 case NEG:
15945 op = DW_OP_neg;
15946 goto do_unop;
15947
15948 do_unop:
15949 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15950 VAR_INIT_STATUS_INITIALIZED);
15951
15952 if (op0 == 0)
15953 break;
15954
15955 mem_loc_result = op0;
15956 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15957 break;
15958
15959 case CONST_INT:
15960 if (!is_a <scalar_int_mode> (mode, &int_mode)
15961 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15962 #ifdef POINTERS_EXTEND_UNSIGNED
15963 || (int_mode == Pmode
15964 && mem_mode != VOIDmode
15965 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15966 #endif
15967 )
15968 {
15969 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15970 break;
15971 }
15972 if ((!dwarf_strict || dwarf_version >= 5)
15973 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15974 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15975 {
15976 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15977 scalar_int_mode amode;
15978 if (type_die == NULL)
15979 return NULL;
15980 if (INTVAL (rtl) >= 0
15981 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15982 .exists (&amode))
15983 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15984 /* const DW_OP_convert <XXX> vs.
15985 DW_OP_const_type <XXX, 1, const>. */
15986 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15987 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15988 {
15989 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15990 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15991 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15992 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15993 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15994 add_loc_descr (&mem_loc_result, op0);
15995 return mem_loc_result;
15996 }
15997 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15998 INTVAL (rtl));
15999 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16000 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16001 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16002 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16003 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16004 else
16005 {
16006 mem_loc_result->dw_loc_oprnd2.val_class
16007 = dw_val_class_const_double;
16008 mem_loc_result->dw_loc_oprnd2.v.val_double
16009 = double_int::from_shwi (INTVAL (rtl));
16010 }
16011 }
16012 break;
16013
16014 case CONST_DOUBLE:
16015 if (!dwarf_strict || dwarf_version >= 5)
16016 {
16017 dw_die_ref type_die;
16018
16019 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16020 CONST_DOUBLE rtx could represent either a large integer
16021 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16022 the value is always a floating point constant.
16023
16024 When it is an integer, a CONST_DOUBLE is used whenever
16025 the constant requires 2 HWIs to be adequately represented.
16026 We output CONST_DOUBLEs as blocks. */
16027 if (mode == VOIDmode
16028 || (GET_MODE (rtl) == VOIDmode
16029 && maybe_ne (GET_MODE_BITSIZE (mode),
16030 HOST_BITS_PER_DOUBLE_INT)))
16031 break;
16032 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16033 if (type_die == NULL)
16034 return NULL;
16035 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16036 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16037 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16038 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16039 #if TARGET_SUPPORTS_WIDE_INT == 0
16040 if (!SCALAR_FLOAT_MODE_P (mode))
16041 {
16042 mem_loc_result->dw_loc_oprnd2.val_class
16043 = dw_val_class_const_double;
16044 mem_loc_result->dw_loc_oprnd2.v.val_double
16045 = rtx_to_double_int (rtl);
16046 }
16047 else
16048 #endif
16049 {
16050 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16051 unsigned int length = GET_MODE_SIZE (float_mode);
16052 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16053
16054 insert_float (rtl, array);
16055 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16056 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16057 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16058 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16059 }
16060 }
16061 break;
16062
16063 case CONST_WIDE_INT:
16064 if (!dwarf_strict || dwarf_version >= 5)
16065 {
16066 dw_die_ref type_die;
16067
16068 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16069 if (type_die == NULL)
16070 return NULL;
16071 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16072 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16073 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16074 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16075 mem_loc_result->dw_loc_oprnd2.val_class
16076 = dw_val_class_wide_int;
16077 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16078 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16079 }
16080 break;
16081
16082 case CONST_POLY_INT:
16083 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16084 break;
16085
16086 case EQ:
16087 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16088 break;
16089
16090 case GE:
16091 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16092 break;
16093
16094 case GT:
16095 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16096 break;
16097
16098 case LE:
16099 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16100 break;
16101
16102 case LT:
16103 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16104 break;
16105
16106 case NE:
16107 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16108 break;
16109
16110 case GEU:
16111 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16112 break;
16113
16114 case GTU:
16115 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16116 break;
16117
16118 case LEU:
16119 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16120 break;
16121
16122 case LTU:
16123 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16124 break;
16125
16126 case UMIN:
16127 case UMAX:
16128 if (!SCALAR_INT_MODE_P (mode))
16129 break;
16130 /* FALLTHRU */
16131 case SMIN:
16132 case SMAX:
16133 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16134 break;
16135
16136 case ZERO_EXTRACT:
16137 case SIGN_EXTRACT:
16138 if (CONST_INT_P (XEXP (rtl, 1))
16139 && CONST_INT_P (XEXP (rtl, 2))
16140 && is_a <scalar_int_mode> (mode, &int_mode)
16141 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16142 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16143 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16144 && ((unsigned) INTVAL (XEXP (rtl, 1))
16145 + (unsigned) INTVAL (XEXP (rtl, 2))
16146 <= GET_MODE_BITSIZE (int_mode)))
16147 {
16148 int shift, size;
16149 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16150 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16151 if (op0 == 0)
16152 break;
16153 if (GET_CODE (rtl) == SIGN_EXTRACT)
16154 op = DW_OP_shra;
16155 else
16156 op = DW_OP_shr;
16157 mem_loc_result = op0;
16158 size = INTVAL (XEXP (rtl, 1));
16159 shift = INTVAL (XEXP (rtl, 2));
16160 if (BITS_BIG_ENDIAN)
16161 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16162 if (shift + size != (int) DWARF2_ADDR_SIZE)
16163 {
16164 add_loc_descr (&mem_loc_result,
16165 int_loc_descriptor (DWARF2_ADDR_SIZE
16166 - shift - size));
16167 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16168 }
16169 if (size != (int) DWARF2_ADDR_SIZE)
16170 {
16171 add_loc_descr (&mem_loc_result,
16172 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16173 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16174 }
16175 }
16176 break;
16177
16178 case IF_THEN_ELSE:
16179 {
16180 dw_loc_descr_ref op2, bra_node, drop_node;
16181 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16182 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16183 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16184 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16185 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16186 VAR_INIT_STATUS_INITIALIZED);
16187 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16188 VAR_INIT_STATUS_INITIALIZED);
16189 if (op0 == NULL || op1 == NULL || op2 == NULL)
16190 break;
16191
16192 mem_loc_result = op1;
16193 add_loc_descr (&mem_loc_result, op2);
16194 add_loc_descr (&mem_loc_result, op0);
16195 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16196 add_loc_descr (&mem_loc_result, bra_node);
16197 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16198 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16199 add_loc_descr (&mem_loc_result, drop_node);
16200 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16201 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16202 }
16203 break;
16204
16205 case FLOAT_EXTEND:
16206 case FLOAT_TRUNCATE:
16207 case FLOAT:
16208 case UNSIGNED_FLOAT:
16209 case FIX:
16210 case UNSIGNED_FIX:
16211 if (!dwarf_strict || dwarf_version >= 5)
16212 {
16213 dw_die_ref type_die;
16214 dw_loc_descr_ref cvt;
16215
16216 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16217 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16218 if (op0 == NULL)
16219 break;
16220 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16221 && (GET_CODE (rtl) == FLOAT
16222 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16223 {
16224 type_die = base_type_for_mode (int_mode,
16225 GET_CODE (rtl) == UNSIGNED_FLOAT);
16226 if (type_die == NULL)
16227 break;
16228 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16229 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16230 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16231 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16232 add_loc_descr (&op0, cvt);
16233 }
16234 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16235 if (type_die == NULL)
16236 break;
16237 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16238 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16239 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16240 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16241 add_loc_descr (&op0, cvt);
16242 if (is_a <scalar_int_mode> (mode, &int_mode)
16243 && (GET_CODE (rtl) == FIX
16244 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16245 {
16246 op0 = convert_descriptor_to_mode (int_mode, op0);
16247 if (op0 == NULL)
16248 break;
16249 }
16250 mem_loc_result = op0;
16251 }
16252 break;
16253
16254 case CLZ:
16255 case CTZ:
16256 case FFS:
16257 if (is_a <scalar_int_mode> (mode, &int_mode))
16258 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16259 break;
16260
16261 case POPCOUNT:
16262 case PARITY:
16263 if (is_a <scalar_int_mode> (mode, &int_mode))
16264 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16265 break;
16266
16267 case BSWAP:
16268 if (is_a <scalar_int_mode> (mode, &int_mode))
16269 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16270 break;
16271
16272 case ROTATE:
16273 case ROTATERT:
16274 if (is_a <scalar_int_mode> (mode, &int_mode))
16275 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16276 break;
16277
16278 case COMPARE:
16279 /* In theory, we could implement the above. */
16280 /* DWARF cannot represent the unsigned compare operations
16281 natively. */
16282 case SS_MULT:
16283 case US_MULT:
16284 case SS_DIV:
16285 case US_DIV:
16286 case SS_PLUS:
16287 case US_PLUS:
16288 case SS_MINUS:
16289 case US_MINUS:
16290 case SS_NEG:
16291 case US_NEG:
16292 case SS_ABS:
16293 case SS_ASHIFT:
16294 case US_ASHIFT:
16295 case SS_TRUNCATE:
16296 case US_TRUNCATE:
16297 case UNORDERED:
16298 case ORDERED:
16299 case UNEQ:
16300 case UNGE:
16301 case UNGT:
16302 case UNLE:
16303 case UNLT:
16304 case LTGT:
16305 case FRACT_CONVERT:
16306 case UNSIGNED_FRACT_CONVERT:
16307 case SAT_FRACT:
16308 case UNSIGNED_SAT_FRACT:
16309 case SQRT:
16310 case ASM_OPERANDS:
16311 case VEC_MERGE:
16312 case VEC_SELECT:
16313 case VEC_CONCAT:
16314 case VEC_DUPLICATE:
16315 case VEC_SERIES:
16316 case UNSPEC:
16317 case HIGH:
16318 case FMA:
16319 case STRICT_LOW_PART:
16320 case CONST_VECTOR:
16321 case CONST_FIXED:
16322 case CLRSB:
16323 case CLOBBER:
16324 case CLOBBER_HIGH:
16325 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16326 can't express it in the debug info. This can happen e.g. with some
16327 TLS UNSPECs. */
16328 break;
16329
16330 case CONST_STRING:
16331 resolve_one_addr (&rtl);
16332 goto symref;
16333
16334 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16335 the expression. An UNSPEC rtx represents a raw DWARF operation,
16336 new_loc_descr is called for it to build the operation directly.
16337 Otherwise mem_loc_descriptor is called recursively. */
16338 case PARALLEL:
16339 {
16340 int index = 0;
16341 dw_loc_descr_ref exp_result = NULL;
16342
16343 for (; index < XVECLEN (rtl, 0); index++)
16344 {
16345 rtx elem = XVECEXP (rtl, 0, index);
16346 if (GET_CODE (elem) == UNSPEC)
16347 {
16348 /* Each DWARF operation UNSPEC contain two operands, if
16349 one operand is not used for the operation, const0_rtx is
16350 passed. */
16351 gcc_assert (XVECLEN (elem, 0) == 2);
16352
16353 HOST_WIDE_INT dw_op = XINT (elem, 1);
16354 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16355 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16356 exp_result
16357 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16358 oprnd2);
16359 }
16360 else
16361 exp_result
16362 = mem_loc_descriptor (elem, mode, mem_mode,
16363 VAR_INIT_STATUS_INITIALIZED);
16364
16365 if (!mem_loc_result)
16366 mem_loc_result = exp_result;
16367 else
16368 add_loc_descr (&mem_loc_result, exp_result);
16369 }
16370
16371 break;
16372 }
16373
16374 default:
16375 if (flag_checking)
16376 {
16377 print_rtl (stderr, rtl);
16378 gcc_unreachable ();
16379 }
16380 break;
16381 }
16382
16383 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16384 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16385
16386 return mem_loc_result;
16387 }
16388
16389 /* Return a descriptor that describes the concatenation of two locations.
16390 This is typically a complex variable. */
16391
16392 static dw_loc_descr_ref
16393 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16394 {
16395 /* At present we only track constant-sized pieces. */
16396 unsigned int size0, size1;
16397 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16398 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16399 return 0;
16400
16401 dw_loc_descr_ref cc_loc_result = NULL;
16402 dw_loc_descr_ref x0_ref
16403 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16404 dw_loc_descr_ref x1_ref
16405 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16406
16407 if (x0_ref == 0 || x1_ref == 0)
16408 return 0;
16409
16410 cc_loc_result = x0_ref;
16411 add_loc_descr_op_piece (&cc_loc_result, size0);
16412
16413 add_loc_descr (&cc_loc_result, x1_ref);
16414 add_loc_descr_op_piece (&cc_loc_result, size1);
16415
16416 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16417 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16418
16419 return cc_loc_result;
16420 }
16421
16422 /* Return a descriptor that describes the concatenation of N
16423 locations. */
16424
16425 static dw_loc_descr_ref
16426 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16427 {
16428 unsigned int i;
16429 dw_loc_descr_ref cc_loc_result = NULL;
16430 unsigned int n = XVECLEN (concatn, 0);
16431 unsigned int size;
16432
16433 for (i = 0; i < n; ++i)
16434 {
16435 dw_loc_descr_ref ref;
16436 rtx x = XVECEXP (concatn, 0, i);
16437
16438 /* At present we only track constant-sized pieces. */
16439 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16440 return NULL;
16441
16442 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16443 if (ref == NULL)
16444 return NULL;
16445
16446 add_loc_descr (&cc_loc_result, ref);
16447 add_loc_descr_op_piece (&cc_loc_result, size);
16448 }
16449
16450 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16451 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16452
16453 return cc_loc_result;
16454 }
16455
16456 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16457 for DEBUG_IMPLICIT_PTR RTL. */
16458
16459 static dw_loc_descr_ref
16460 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16461 {
16462 dw_loc_descr_ref ret;
16463 dw_die_ref ref;
16464
16465 if (dwarf_strict && dwarf_version < 5)
16466 return NULL;
16467 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16468 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16469 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16470 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16471 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16472 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16473 if (ref)
16474 {
16475 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16476 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16477 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16478 }
16479 else
16480 {
16481 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16482 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16483 }
16484 return ret;
16485 }
16486
16487 /* Output a proper Dwarf location descriptor for a variable or parameter
16488 which is either allocated in a register or in a memory location. For a
16489 register, we just generate an OP_REG and the register number. For a
16490 memory location we provide a Dwarf postfix expression describing how to
16491 generate the (dynamic) address of the object onto the address stack.
16492
16493 MODE is mode of the decl if this loc_descriptor is going to be used in
16494 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16495 allowed, VOIDmode otherwise.
16496
16497 If we don't know how to describe it, return 0. */
16498
16499 static dw_loc_descr_ref
16500 loc_descriptor (rtx rtl, machine_mode mode,
16501 enum var_init_status initialized)
16502 {
16503 dw_loc_descr_ref loc_result = NULL;
16504 scalar_int_mode int_mode;
16505
16506 switch (GET_CODE (rtl))
16507 {
16508 case SUBREG:
16509 /* The case of a subreg may arise when we have a local (register)
16510 variable or a formal (register) parameter which doesn't quite fill
16511 up an entire register. For now, just assume that it is
16512 legitimate to make the Dwarf info refer to the whole register which
16513 contains the given subreg. */
16514 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16515 loc_result = loc_descriptor (SUBREG_REG (rtl),
16516 GET_MODE (SUBREG_REG (rtl)), initialized);
16517 else
16518 goto do_default;
16519 break;
16520
16521 case REG:
16522 loc_result = reg_loc_descriptor (rtl, initialized);
16523 break;
16524
16525 case MEM:
16526 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16527 GET_MODE (rtl), initialized);
16528 if (loc_result == NULL)
16529 loc_result = tls_mem_loc_descriptor (rtl);
16530 if (loc_result == NULL)
16531 {
16532 rtx new_rtl = avoid_constant_pool_reference (rtl);
16533 if (new_rtl != rtl)
16534 loc_result = loc_descriptor (new_rtl, mode, initialized);
16535 }
16536 break;
16537
16538 case CONCAT:
16539 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16540 initialized);
16541 break;
16542
16543 case CONCATN:
16544 loc_result = concatn_loc_descriptor (rtl, initialized);
16545 break;
16546
16547 case VAR_LOCATION:
16548 /* Single part. */
16549 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16550 {
16551 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16552 if (GET_CODE (loc) == EXPR_LIST)
16553 loc = XEXP (loc, 0);
16554 loc_result = loc_descriptor (loc, mode, initialized);
16555 break;
16556 }
16557
16558 rtl = XEXP (rtl, 1);
16559 /* FALLTHRU */
16560
16561 case PARALLEL:
16562 {
16563 rtvec par_elems = XVEC (rtl, 0);
16564 int num_elem = GET_NUM_ELEM (par_elems);
16565 machine_mode mode;
16566 int i, size;
16567
16568 /* Create the first one, so we have something to add to. */
16569 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16570 VOIDmode, initialized);
16571 if (loc_result == NULL)
16572 return NULL;
16573 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16574 /* At present we only track constant-sized pieces. */
16575 if (!GET_MODE_SIZE (mode).is_constant (&size))
16576 return NULL;
16577 add_loc_descr_op_piece (&loc_result, size);
16578 for (i = 1; i < num_elem; i++)
16579 {
16580 dw_loc_descr_ref temp;
16581
16582 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16583 VOIDmode, initialized);
16584 if (temp == NULL)
16585 return NULL;
16586 add_loc_descr (&loc_result, temp);
16587 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16588 /* At present we only track constant-sized pieces. */
16589 if (!GET_MODE_SIZE (mode).is_constant (&size))
16590 return NULL;
16591 add_loc_descr_op_piece (&loc_result, size);
16592 }
16593 }
16594 break;
16595
16596 case CONST_INT:
16597 if (mode != VOIDmode && mode != BLKmode)
16598 {
16599 int_mode = as_a <scalar_int_mode> (mode);
16600 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16601 INTVAL (rtl));
16602 }
16603 break;
16604
16605 case CONST_DOUBLE:
16606 if (mode == VOIDmode)
16607 mode = GET_MODE (rtl);
16608
16609 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16610 {
16611 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16612
16613 /* Note that a CONST_DOUBLE rtx could represent either an integer
16614 or a floating-point constant. A CONST_DOUBLE is used whenever
16615 the constant requires more than one word in order to be
16616 adequately represented. We output CONST_DOUBLEs as blocks. */
16617 scalar_mode smode = as_a <scalar_mode> (mode);
16618 loc_result = new_loc_descr (DW_OP_implicit_value,
16619 GET_MODE_SIZE (smode), 0);
16620 #if TARGET_SUPPORTS_WIDE_INT == 0
16621 if (!SCALAR_FLOAT_MODE_P (smode))
16622 {
16623 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16624 loc_result->dw_loc_oprnd2.v.val_double
16625 = rtx_to_double_int (rtl);
16626 }
16627 else
16628 #endif
16629 {
16630 unsigned int length = GET_MODE_SIZE (smode);
16631 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16632
16633 insert_float (rtl, array);
16634 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16635 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16636 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16637 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16638 }
16639 }
16640 break;
16641
16642 case CONST_WIDE_INT:
16643 if (mode == VOIDmode)
16644 mode = GET_MODE (rtl);
16645
16646 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16647 {
16648 int_mode = as_a <scalar_int_mode> (mode);
16649 loc_result = new_loc_descr (DW_OP_implicit_value,
16650 GET_MODE_SIZE (int_mode), 0);
16651 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16652 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16653 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16654 }
16655 break;
16656
16657 case CONST_VECTOR:
16658 if (mode == VOIDmode)
16659 mode = GET_MODE (rtl);
16660
16661 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16662 {
16663 unsigned int length;
16664 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16665 return NULL;
16666
16667 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16668 unsigned char *array
16669 = ggc_vec_alloc<unsigned char> (length * elt_size);
16670 unsigned int i;
16671 unsigned char *p;
16672 machine_mode imode = GET_MODE_INNER (mode);
16673
16674 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16675 switch (GET_MODE_CLASS (mode))
16676 {
16677 case MODE_VECTOR_INT:
16678 for (i = 0, p = array; i < length; i++, p += elt_size)
16679 {
16680 rtx elt = CONST_VECTOR_ELT (rtl, i);
16681 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16682 }
16683 break;
16684
16685 case MODE_VECTOR_FLOAT:
16686 for (i = 0, p = array; i < length; i++, p += elt_size)
16687 {
16688 rtx elt = CONST_VECTOR_ELT (rtl, i);
16689 insert_float (elt, p);
16690 }
16691 break;
16692
16693 default:
16694 gcc_unreachable ();
16695 }
16696
16697 loc_result = new_loc_descr (DW_OP_implicit_value,
16698 length * elt_size, 0);
16699 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16700 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16701 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16702 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16703 }
16704 break;
16705
16706 case CONST:
16707 if (mode == VOIDmode
16708 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16709 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16710 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16711 {
16712 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16713 break;
16714 }
16715 /* FALLTHROUGH */
16716 case SYMBOL_REF:
16717 if (!const_ok_for_output (rtl))
16718 break;
16719 /* FALLTHROUGH */
16720 case LABEL_REF:
16721 if (is_a <scalar_int_mode> (mode, &int_mode)
16722 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16723 && (dwarf_version >= 4 || !dwarf_strict))
16724 {
16725 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16726 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16727 vec_safe_push (used_rtx_array, rtl);
16728 }
16729 break;
16730
16731 case DEBUG_IMPLICIT_PTR:
16732 loc_result = implicit_ptr_descriptor (rtl, 0);
16733 break;
16734
16735 case PLUS:
16736 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16737 && CONST_INT_P (XEXP (rtl, 1)))
16738 {
16739 loc_result
16740 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16741 break;
16742 }
16743 /* FALLTHRU */
16744 do_default:
16745 default:
16746 if ((is_a <scalar_int_mode> (mode, &int_mode)
16747 && GET_MODE (rtl) == int_mode
16748 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16749 && dwarf_version >= 4)
16750 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16751 {
16752 /* Value expression. */
16753 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16754 if (loc_result)
16755 add_loc_descr (&loc_result,
16756 new_loc_descr (DW_OP_stack_value, 0, 0));
16757 }
16758 break;
16759 }
16760
16761 return loc_result;
16762 }
16763
16764 /* We need to figure out what section we should use as the base for the
16765 address ranges where a given location is valid.
16766 1. If this particular DECL has a section associated with it, use that.
16767 2. If this function has a section associated with it, use that.
16768 3. Otherwise, use the text section.
16769 XXX: If you split a variable across multiple sections, we won't notice. */
16770
16771 static const char *
16772 secname_for_decl (const_tree decl)
16773 {
16774 const char *secname;
16775
16776 if (VAR_OR_FUNCTION_DECL_P (decl)
16777 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16778 && DECL_SECTION_NAME (decl))
16779 secname = DECL_SECTION_NAME (decl);
16780 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16781 secname = DECL_SECTION_NAME (current_function_decl);
16782 else if (cfun && in_cold_section_p)
16783 secname = crtl->subsections.cold_section_label;
16784 else
16785 secname = text_section_label;
16786
16787 return secname;
16788 }
16789
16790 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16791
16792 static bool
16793 decl_by_reference_p (tree decl)
16794 {
16795 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16796 || VAR_P (decl))
16797 && DECL_BY_REFERENCE (decl));
16798 }
16799
16800 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16801 for VARLOC. */
16802
16803 static dw_loc_descr_ref
16804 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16805 enum var_init_status initialized)
16806 {
16807 int have_address = 0;
16808 dw_loc_descr_ref descr;
16809 machine_mode mode;
16810
16811 if (want_address != 2)
16812 {
16813 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16814 /* Single part. */
16815 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16816 {
16817 varloc = PAT_VAR_LOCATION_LOC (varloc);
16818 if (GET_CODE (varloc) == EXPR_LIST)
16819 varloc = XEXP (varloc, 0);
16820 mode = GET_MODE (varloc);
16821 if (MEM_P (varloc))
16822 {
16823 rtx addr = XEXP (varloc, 0);
16824 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16825 mode, initialized);
16826 if (descr)
16827 have_address = 1;
16828 else
16829 {
16830 rtx x = avoid_constant_pool_reference (varloc);
16831 if (x != varloc)
16832 descr = mem_loc_descriptor (x, mode, VOIDmode,
16833 initialized);
16834 }
16835 }
16836 else
16837 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16838 }
16839 else
16840 return 0;
16841 }
16842 else
16843 {
16844 if (GET_CODE (varloc) == VAR_LOCATION)
16845 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16846 else
16847 mode = DECL_MODE (loc);
16848 descr = loc_descriptor (varloc, mode, initialized);
16849 have_address = 1;
16850 }
16851
16852 if (!descr)
16853 return 0;
16854
16855 if (want_address == 2 && !have_address
16856 && (dwarf_version >= 4 || !dwarf_strict))
16857 {
16858 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16859 {
16860 expansion_failed (loc, NULL_RTX,
16861 "DWARF address size mismatch");
16862 return 0;
16863 }
16864 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16865 have_address = 1;
16866 }
16867 /* Show if we can't fill the request for an address. */
16868 if (want_address && !have_address)
16869 {
16870 expansion_failed (loc, NULL_RTX,
16871 "Want address and only have value");
16872 return 0;
16873 }
16874
16875 /* If we've got an address and don't want one, dereference. */
16876 if (!want_address && have_address)
16877 {
16878 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16879 enum dwarf_location_atom op;
16880
16881 if (size > DWARF2_ADDR_SIZE || size == -1)
16882 {
16883 expansion_failed (loc, NULL_RTX,
16884 "DWARF address size mismatch");
16885 return 0;
16886 }
16887 else if (size == DWARF2_ADDR_SIZE)
16888 op = DW_OP_deref;
16889 else
16890 op = DW_OP_deref_size;
16891
16892 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16893 }
16894
16895 return descr;
16896 }
16897
16898 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16899 if it is not possible. */
16900
16901 static dw_loc_descr_ref
16902 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16903 {
16904 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16905 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16906 else if (dwarf_version >= 3 || !dwarf_strict)
16907 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16908 else
16909 return NULL;
16910 }
16911
16912 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16913 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16914
16915 static dw_loc_descr_ref
16916 dw_sra_loc_expr (tree decl, rtx loc)
16917 {
16918 rtx p;
16919 unsigned HOST_WIDE_INT padsize = 0;
16920 dw_loc_descr_ref descr, *descr_tail;
16921 unsigned HOST_WIDE_INT decl_size;
16922 rtx varloc;
16923 enum var_init_status initialized;
16924
16925 if (DECL_SIZE (decl) == NULL
16926 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16927 return NULL;
16928
16929 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16930 descr = NULL;
16931 descr_tail = &descr;
16932
16933 for (p = loc; p; p = XEXP (p, 1))
16934 {
16935 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16936 rtx loc_note = *decl_piece_varloc_ptr (p);
16937 dw_loc_descr_ref cur_descr;
16938 dw_loc_descr_ref *tail, last = NULL;
16939 unsigned HOST_WIDE_INT opsize = 0;
16940
16941 if (loc_note == NULL_RTX
16942 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16943 {
16944 padsize += bitsize;
16945 continue;
16946 }
16947 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16948 varloc = NOTE_VAR_LOCATION (loc_note);
16949 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16950 if (cur_descr == NULL)
16951 {
16952 padsize += bitsize;
16953 continue;
16954 }
16955
16956 /* Check that cur_descr either doesn't use
16957 DW_OP_*piece operations, or their sum is equal
16958 to bitsize. Otherwise we can't embed it. */
16959 for (tail = &cur_descr; *tail != NULL;
16960 tail = &(*tail)->dw_loc_next)
16961 if ((*tail)->dw_loc_opc == DW_OP_piece)
16962 {
16963 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16964 * BITS_PER_UNIT;
16965 last = *tail;
16966 }
16967 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16968 {
16969 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16970 last = *tail;
16971 }
16972
16973 if (last != NULL && opsize != bitsize)
16974 {
16975 padsize += bitsize;
16976 /* Discard the current piece of the descriptor and release any
16977 addr_table entries it uses. */
16978 remove_loc_list_addr_table_entries (cur_descr);
16979 continue;
16980 }
16981
16982 /* If there is a hole, add DW_OP_*piece after empty DWARF
16983 expression, which means that those bits are optimized out. */
16984 if (padsize)
16985 {
16986 if (padsize > decl_size)
16987 {
16988 remove_loc_list_addr_table_entries (cur_descr);
16989 goto discard_descr;
16990 }
16991 decl_size -= padsize;
16992 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16993 if (*descr_tail == NULL)
16994 {
16995 remove_loc_list_addr_table_entries (cur_descr);
16996 goto discard_descr;
16997 }
16998 descr_tail = &(*descr_tail)->dw_loc_next;
16999 padsize = 0;
17000 }
17001 *descr_tail = cur_descr;
17002 descr_tail = tail;
17003 if (bitsize > decl_size)
17004 goto discard_descr;
17005 decl_size -= bitsize;
17006 if (last == NULL)
17007 {
17008 HOST_WIDE_INT offset = 0;
17009 if (GET_CODE (varloc) == VAR_LOCATION
17010 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17011 {
17012 varloc = PAT_VAR_LOCATION_LOC (varloc);
17013 if (GET_CODE (varloc) == EXPR_LIST)
17014 varloc = XEXP (varloc, 0);
17015 }
17016 do
17017 {
17018 if (GET_CODE (varloc) == CONST
17019 || GET_CODE (varloc) == SIGN_EXTEND
17020 || GET_CODE (varloc) == ZERO_EXTEND)
17021 varloc = XEXP (varloc, 0);
17022 else if (GET_CODE (varloc) == SUBREG)
17023 varloc = SUBREG_REG (varloc);
17024 else
17025 break;
17026 }
17027 while (1);
17028 /* DW_OP_bit_size offset should be zero for register
17029 or implicit location descriptions and empty location
17030 descriptions, but for memory addresses needs big endian
17031 adjustment. */
17032 if (MEM_P (varloc))
17033 {
17034 unsigned HOST_WIDE_INT memsize;
17035 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17036 goto discard_descr;
17037 memsize *= BITS_PER_UNIT;
17038 if (memsize != bitsize)
17039 {
17040 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17041 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17042 goto discard_descr;
17043 if (memsize < bitsize)
17044 goto discard_descr;
17045 if (BITS_BIG_ENDIAN)
17046 offset = memsize - bitsize;
17047 }
17048 }
17049
17050 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17051 if (*descr_tail == NULL)
17052 goto discard_descr;
17053 descr_tail = &(*descr_tail)->dw_loc_next;
17054 }
17055 }
17056
17057 /* If there were any non-empty expressions, add padding till the end of
17058 the decl. */
17059 if (descr != NULL && decl_size != 0)
17060 {
17061 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17062 if (*descr_tail == NULL)
17063 goto discard_descr;
17064 }
17065 return descr;
17066
17067 discard_descr:
17068 /* Discard the descriptor and release any addr_table entries it uses. */
17069 remove_loc_list_addr_table_entries (descr);
17070 return NULL;
17071 }
17072
17073 /* Return the dwarf representation of the location list LOC_LIST of
17074 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17075 function. */
17076
17077 static dw_loc_list_ref
17078 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17079 {
17080 const char *endname, *secname;
17081 var_loc_view endview;
17082 rtx varloc;
17083 enum var_init_status initialized;
17084 struct var_loc_node *node;
17085 dw_loc_descr_ref descr;
17086 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17087 dw_loc_list_ref list = NULL;
17088 dw_loc_list_ref *listp = &list;
17089
17090 /* Now that we know what section we are using for a base,
17091 actually construct the list of locations.
17092 The first location information is what is passed to the
17093 function that creates the location list, and the remaining
17094 locations just get added on to that list.
17095 Note that we only know the start address for a location
17096 (IE location changes), so to build the range, we use
17097 the range [current location start, next location start].
17098 This means we have to special case the last node, and generate
17099 a range of [last location start, end of function label]. */
17100
17101 if (cfun && crtl->has_bb_partition)
17102 {
17103 bool save_in_cold_section_p = in_cold_section_p;
17104 in_cold_section_p = first_function_block_is_cold;
17105 if (loc_list->last_before_switch == NULL)
17106 in_cold_section_p = !in_cold_section_p;
17107 secname = secname_for_decl (decl);
17108 in_cold_section_p = save_in_cold_section_p;
17109 }
17110 else
17111 secname = secname_for_decl (decl);
17112
17113 for (node = loc_list->first; node; node = node->next)
17114 {
17115 bool range_across_switch = false;
17116 if (GET_CODE (node->loc) == EXPR_LIST
17117 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17118 {
17119 if (GET_CODE (node->loc) == EXPR_LIST)
17120 {
17121 descr = NULL;
17122 /* This requires DW_OP_{,bit_}piece, which is not usable
17123 inside DWARF expressions. */
17124 if (want_address == 2)
17125 descr = dw_sra_loc_expr (decl, node->loc);
17126 }
17127 else
17128 {
17129 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17130 varloc = NOTE_VAR_LOCATION (node->loc);
17131 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17132 }
17133 if (descr)
17134 {
17135 /* If section switch happens in between node->label
17136 and node->next->label (or end of function) and
17137 we can't emit it as a single entry list,
17138 emit two ranges, first one ending at the end
17139 of first partition and second one starting at the
17140 beginning of second partition. */
17141 if (node == loc_list->last_before_switch
17142 && (node != loc_list->first || loc_list->first->next
17143 /* If we are to emit a view number, we will emit
17144 a loclist rather than a single location
17145 expression for the entire function (see
17146 loc_list_has_views), so we have to split the
17147 range that straddles across partitions. */
17148 || !ZERO_VIEW_P (node->view))
17149 && current_function_decl)
17150 {
17151 endname = cfun->fde->dw_fde_end;
17152 endview = 0;
17153 range_across_switch = true;
17154 }
17155 /* The variable has a location between NODE->LABEL and
17156 NODE->NEXT->LABEL. */
17157 else if (node->next)
17158 endname = node->next->label, endview = node->next->view;
17159 /* If the variable has a location at the last label
17160 it keeps its location until the end of function. */
17161 else if (!current_function_decl)
17162 endname = text_end_label, endview = 0;
17163 else
17164 {
17165 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17166 current_function_funcdef_no);
17167 endname = ggc_strdup (label_id);
17168 endview = 0;
17169 }
17170
17171 *listp = new_loc_list (descr, node->label, node->view,
17172 endname, endview, secname);
17173 if (TREE_CODE (decl) == PARM_DECL
17174 && node == loc_list->first
17175 && NOTE_P (node->loc)
17176 && strcmp (node->label, endname) == 0)
17177 (*listp)->force = true;
17178 listp = &(*listp)->dw_loc_next;
17179 }
17180 }
17181
17182 if (cfun
17183 && crtl->has_bb_partition
17184 && node == loc_list->last_before_switch)
17185 {
17186 bool save_in_cold_section_p = in_cold_section_p;
17187 in_cold_section_p = !first_function_block_is_cold;
17188 secname = secname_for_decl (decl);
17189 in_cold_section_p = save_in_cold_section_p;
17190 }
17191
17192 if (range_across_switch)
17193 {
17194 if (GET_CODE (node->loc) == EXPR_LIST)
17195 descr = dw_sra_loc_expr (decl, node->loc);
17196 else
17197 {
17198 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17199 varloc = NOTE_VAR_LOCATION (node->loc);
17200 descr = dw_loc_list_1 (decl, varloc, want_address,
17201 initialized);
17202 }
17203 gcc_assert (descr);
17204 /* The variable has a location between NODE->LABEL and
17205 NODE->NEXT->LABEL. */
17206 if (node->next)
17207 endname = node->next->label, endview = node->next->view;
17208 else
17209 endname = cfun->fde->dw_fde_second_end, endview = 0;
17210 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17211 endname, endview, secname);
17212 listp = &(*listp)->dw_loc_next;
17213 }
17214 }
17215
17216 /* Try to avoid the overhead of a location list emitting a location
17217 expression instead, but only if we didn't have more than one
17218 location entry in the first place. If some entries were not
17219 representable, we don't want to pretend a single entry that was
17220 applies to the entire scope in which the variable is
17221 available. */
17222 if (list && loc_list->first->next)
17223 gen_llsym (list);
17224 else
17225 maybe_gen_llsym (list);
17226
17227 return list;
17228 }
17229
17230 /* Return if the loc_list has only single element and thus can be represented
17231 as location description. */
17232
17233 static bool
17234 single_element_loc_list_p (dw_loc_list_ref list)
17235 {
17236 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17237 return !list->ll_symbol;
17238 }
17239
17240 /* Duplicate a single element of location list. */
17241
17242 static inline dw_loc_descr_ref
17243 copy_loc_descr (dw_loc_descr_ref ref)
17244 {
17245 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17246 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17247 return copy;
17248 }
17249
17250 /* To each location in list LIST append loc descr REF. */
17251
17252 static void
17253 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17254 {
17255 dw_loc_descr_ref copy;
17256 add_loc_descr (&list->expr, ref);
17257 list = list->dw_loc_next;
17258 while (list)
17259 {
17260 copy = copy_loc_descr (ref);
17261 add_loc_descr (&list->expr, copy);
17262 while (copy->dw_loc_next)
17263 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17264 list = list->dw_loc_next;
17265 }
17266 }
17267
17268 /* To each location in list LIST prepend loc descr REF. */
17269
17270 static void
17271 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17272 {
17273 dw_loc_descr_ref copy;
17274 dw_loc_descr_ref ref_end = list->expr;
17275 add_loc_descr (&ref, list->expr);
17276 list->expr = ref;
17277 list = list->dw_loc_next;
17278 while (list)
17279 {
17280 dw_loc_descr_ref end = list->expr;
17281 list->expr = copy = copy_loc_descr (ref);
17282 while (copy->dw_loc_next != ref_end)
17283 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17284 copy->dw_loc_next = end;
17285 list = list->dw_loc_next;
17286 }
17287 }
17288
17289 /* Given two lists RET and LIST
17290 produce location list that is result of adding expression in LIST
17291 to expression in RET on each position in program.
17292 Might be destructive on both RET and LIST.
17293
17294 TODO: We handle only simple cases of RET or LIST having at most one
17295 element. General case would involve sorting the lists in program order
17296 and merging them that will need some additional work.
17297 Adding that will improve quality of debug info especially for SRA-ed
17298 structures. */
17299
17300 static void
17301 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17302 {
17303 if (!list)
17304 return;
17305 if (!*ret)
17306 {
17307 *ret = list;
17308 return;
17309 }
17310 if (!list->dw_loc_next)
17311 {
17312 add_loc_descr_to_each (*ret, list->expr);
17313 return;
17314 }
17315 if (!(*ret)->dw_loc_next)
17316 {
17317 prepend_loc_descr_to_each (list, (*ret)->expr);
17318 *ret = list;
17319 return;
17320 }
17321 expansion_failed (NULL_TREE, NULL_RTX,
17322 "Don't know how to merge two non-trivial"
17323 " location lists.\n");
17324 *ret = NULL;
17325 return;
17326 }
17327
17328 /* LOC is constant expression. Try a luck, look it up in constant
17329 pool and return its loc_descr of its address. */
17330
17331 static dw_loc_descr_ref
17332 cst_pool_loc_descr (tree loc)
17333 {
17334 /* Get an RTL for this, if something has been emitted. */
17335 rtx rtl = lookup_constant_def (loc);
17336
17337 if (!rtl || !MEM_P (rtl))
17338 {
17339 gcc_assert (!rtl);
17340 return 0;
17341 }
17342 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17343
17344 /* TODO: We might get more coverage if we was actually delaying expansion
17345 of all expressions till end of compilation when constant pools are fully
17346 populated. */
17347 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17348 {
17349 expansion_failed (loc, NULL_RTX,
17350 "CST value in contant pool but not marked.");
17351 return 0;
17352 }
17353 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17354 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17355 }
17356
17357 /* Return dw_loc_list representing address of addr_expr LOC
17358 by looking for inner INDIRECT_REF expression and turning
17359 it into simple arithmetics.
17360
17361 See loc_list_from_tree for the meaning of CONTEXT. */
17362
17363 static dw_loc_list_ref
17364 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17365 loc_descr_context *context)
17366 {
17367 tree obj, offset;
17368 poly_int64 bitsize, bitpos, bytepos;
17369 machine_mode mode;
17370 int unsignedp, reversep, volatilep = 0;
17371 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17372
17373 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17374 &bitsize, &bitpos, &offset, &mode,
17375 &unsignedp, &reversep, &volatilep);
17376 STRIP_NOPS (obj);
17377 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17378 {
17379 expansion_failed (loc, NULL_RTX, "bitfield access");
17380 return 0;
17381 }
17382 if (!INDIRECT_REF_P (obj))
17383 {
17384 expansion_failed (obj,
17385 NULL_RTX, "no indirect ref in inner refrence");
17386 return 0;
17387 }
17388 if (!offset && known_eq (bitpos, 0))
17389 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17390 context);
17391 else if (toplev
17392 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17393 && (dwarf_version >= 4 || !dwarf_strict))
17394 {
17395 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17396 if (!list_ret)
17397 return 0;
17398 if (offset)
17399 {
17400 /* Variable offset. */
17401 list_ret1 = loc_list_from_tree (offset, 0, context);
17402 if (list_ret1 == 0)
17403 return 0;
17404 add_loc_list (&list_ret, list_ret1);
17405 if (!list_ret)
17406 return 0;
17407 add_loc_descr_to_each (list_ret,
17408 new_loc_descr (DW_OP_plus, 0, 0));
17409 }
17410 HOST_WIDE_INT value;
17411 if (bytepos.is_constant (&value) && value > 0)
17412 add_loc_descr_to_each (list_ret,
17413 new_loc_descr (DW_OP_plus_uconst, value, 0));
17414 else if (maybe_ne (bytepos, 0))
17415 loc_list_plus_const (list_ret, bytepos);
17416 add_loc_descr_to_each (list_ret,
17417 new_loc_descr (DW_OP_stack_value, 0, 0));
17418 }
17419 return list_ret;
17420 }
17421
17422 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17423 all operations from LOC are nops, move to the last one. Insert in NOPS all
17424 operations that are skipped. */
17425
17426 static void
17427 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17428 hash_set<dw_loc_descr_ref> &nops)
17429 {
17430 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17431 {
17432 nops.add (loc);
17433 loc = loc->dw_loc_next;
17434 }
17435 }
17436
17437 /* Helper for loc_descr_without_nops: free the location description operation
17438 P. */
17439
17440 bool
17441 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17442 {
17443 ggc_free (loc);
17444 return true;
17445 }
17446
17447 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17448 finishes LOC. */
17449
17450 static void
17451 loc_descr_without_nops (dw_loc_descr_ref &loc)
17452 {
17453 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17454 return;
17455
17456 /* Set of all DW_OP_nop operations we remove. */
17457 hash_set<dw_loc_descr_ref> nops;
17458
17459 /* First, strip all prefix NOP operations in order to keep the head of the
17460 operations list. */
17461 loc_descr_to_next_no_nop (loc, nops);
17462
17463 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17464 {
17465 /* For control flow operations: strip "prefix" nops in destination
17466 labels. */
17467 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17468 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17469 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17470 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17471
17472 /* Do the same for the operations that follow, then move to the next
17473 iteration. */
17474 if (cur->dw_loc_next != NULL)
17475 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17476 cur = cur->dw_loc_next;
17477 }
17478
17479 nops.traverse<void *, free_loc_descr> (NULL);
17480 }
17481
17482
17483 struct dwarf_procedure_info;
17484
17485 /* Helper structure for location descriptions generation. */
17486 struct loc_descr_context
17487 {
17488 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17489 NULL_TREE if DW_OP_push_object_address in invalid for this location
17490 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17491 tree context_type;
17492 /* The ..._DECL node that should be translated as a
17493 DW_OP_push_object_address operation. */
17494 tree base_decl;
17495 /* Information about the DWARF procedure we are currently generating. NULL if
17496 we are not generating a DWARF procedure. */
17497 struct dwarf_procedure_info *dpi;
17498 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17499 by consumer. Used for DW_TAG_generic_subrange attributes. */
17500 bool placeholder_arg;
17501 /* True if PLACEHOLDER_EXPR has been seen. */
17502 bool placeholder_seen;
17503 };
17504
17505 /* DWARF procedures generation
17506
17507 DWARF expressions (aka. location descriptions) are used to encode variable
17508 things such as sizes or offsets. Such computations can have redundant parts
17509 that can be factorized in order to reduce the size of the output debug
17510 information. This is the whole point of DWARF procedures.
17511
17512 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17513 already factorized into functions ("size functions") in order to handle very
17514 big and complex types. Such functions are quite simple: they have integral
17515 arguments, they return an integral result and their body contains only a
17516 return statement with arithmetic expressions. This is the only kind of
17517 function we are interested in translating into DWARF procedures, here.
17518
17519 DWARF expressions and DWARF procedure are executed using a stack, so we have
17520 to define some calling convention for them to interact. Let's say that:
17521
17522 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17523 all arguments in reverse order (right-to-left) so that when the DWARF
17524 procedure execution starts, the first argument is the top of the stack.
17525
17526 - Then, when returning, the DWARF procedure must have consumed all arguments
17527 on the stack, must have pushed the result and touched nothing else.
17528
17529 - Each integral argument and the result are integral types can be hold in a
17530 single stack slot.
17531
17532 - We call "frame offset" the number of stack slots that are "under DWARF
17533 procedure control": it includes the arguments slots, the temporaries and
17534 the result slot. Thus, it is equal to the number of arguments when the
17535 procedure execution starts and must be equal to one (the result) when it
17536 returns. */
17537
17538 /* Helper structure used when generating operations for a DWARF procedure. */
17539 struct dwarf_procedure_info
17540 {
17541 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17542 currently translated. */
17543 tree fndecl;
17544 /* The number of arguments FNDECL takes. */
17545 unsigned args_count;
17546 };
17547
17548 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17549 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17550 equate it to this DIE. */
17551
17552 static dw_die_ref
17553 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17554 dw_die_ref parent_die)
17555 {
17556 dw_die_ref dwarf_proc_die;
17557
17558 if ((dwarf_version < 3 && dwarf_strict)
17559 || location == NULL)
17560 return NULL;
17561
17562 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17563 if (fndecl)
17564 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17565 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17566 return dwarf_proc_die;
17567 }
17568
17569 /* Return whether TYPE is a supported type as a DWARF procedure argument
17570 type or return type (we handle only scalar types and pointer types that
17571 aren't wider than the DWARF expression evaluation stack. */
17572
17573 static bool
17574 is_handled_procedure_type (tree type)
17575 {
17576 return ((INTEGRAL_TYPE_P (type)
17577 || TREE_CODE (type) == OFFSET_TYPE
17578 || TREE_CODE (type) == POINTER_TYPE)
17579 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17580 }
17581
17582 /* Helper for resolve_args_picking: do the same but stop when coming across
17583 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17584 offset *before* evaluating the corresponding operation. */
17585
17586 static bool
17587 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17588 struct dwarf_procedure_info *dpi,
17589 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17590 {
17591 /* The "frame_offset" identifier is already used to name a macro... */
17592 unsigned frame_offset_ = initial_frame_offset;
17593 dw_loc_descr_ref l;
17594
17595 for (l = loc; l != NULL;)
17596 {
17597 bool existed;
17598 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17599
17600 /* If we already met this node, there is nothing to compute anymore. */
17601 if (existed)
17602 {
17603 /* Make sure that the stack size is consistent wherever the execution
17604 flow comes from. */
17605 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17606 break;
17607 }
17608 l_frame_offset = frame_offset_;
17609
17610 /* If needed, relocate the picking offset with respect to the frame
17611 offset. */
17612 if (l->frame_offset_rel)
17613 {
17614 unsigned HOST_WIDE_INT off;
17615 switch (l->dw_loc_opc)
17616 {
17617 case DW_OP_pick:
17618 off = l->dw_loc_oprnd1.v.val_unsigned;
17619 break;
17620 case DW_OP_dup:
17621 off = 0;
17622 break;
17623 case DW_OP_over:
17624 off = 1;
17625 break;
17626 default:
17627 gcc_unreachable ();
17628 }
17629 /* frame_offset_ is the size of the current stack frame, including
17630 incoming arguments. Besides, the arguments are pushed
17631 right-to-left. Thus, in order to access the Nth argument from
17632 this operation node, the picking has to skip temporaries *plus*
17633 one stack slot per argument (0 for the first one, 1 for the second
17634 one, etc.).
17635
17636 The targetted argument number (N) is already set as the operand,
17637 and the number of temporaries can be computed with:
17638 frame_offsets_ - dpi->args_count */
17639 off += frame_offset_ - dpi->args_count;
17640
17641 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17642 if (off > 255)
17643 return false;
17644
17645 if (off == 0)
17646 {
17647 l->dw_loc_opc = DW_OP_dup;
17648 l->dw_loc_oprnd1.v.val_unsigned = 0;
17649 }
17650 else if (off == 1)
17651 {
17652 l->dw_loc_opc = DW_OP_over;
17653 l->dw_loc_oprnd1.v.val_unsigned = 0;
17654 }
17655 else
17656 {
17657 l->dw_loc_opc = DW_OP_pick;
17658 l->dw_loc_oprnd1.v.val_unsigned = off;
17659 }
17660 }
17661
17662 /* Update frame_offset according to the effect the current operation has
17663 on the stack. */
17664 switch (l->dw_loc_opc)
17665 {
17666 case DW_OP_deref:
17667 case DW_OP_swap:
17668 case DW_OP_rot:
17669 case DW_OP_abs:
17670 case DW_OP_neg:
17671 case DW_OP_not:
17672 case DW_OP_plus_uconst:
17673 case DW_OP_skip:
17674 case DW_OP_reg0:
17675 case DW_OP_reg1:
17676 case DW_OP_reg2:
17677 case DW_OP_reg3:
17678 case DW_OP_reg4:
17679 case DW_OP_reg5:
17680 case DW_OP_reg6:
17681 case DW_OP_reg7:
17682 case DW_OP_reg8:
17683 case DW_OP_reg9:
17684 case DW_OP_reg10:
17685 case DW_OP_reg11:
17686 case DW_OP_reg12:
17687 case DW_OP_reg13:
17688 case DW_OP_reg14:
17689 case DW_OP_reg15:
17690 case DW_OP_reg16:
17691 case DW_OP_reg17:
17692 case DW_OP_reg18:
17693 case DW_OP_reg19:
17694 case DW_OP_reg20:
17695 case DW_OP_reg21:
17696 case DW_OP_reg22:
17697 case DW_OP_reg23:
17698 case DW_OP_reg24:
17699 case DW_OP_reg25:
17700 case DW_OP_reg26:
17701 case DW_OP_reg27:
17702 case DW_OP_reg28:
17703 case DW_OP_reg29:
17704 case DW_OP_reg30:
17705 case DW_OP_reg31:
17706 case DW_OP_bregx:
17707 case DW_OP_piece:
17708 case DW_OP_deref_size:
17709 case DW_OP_nop:
17710 case DW_OP_bit_piece:
17711 case DW_OP_implicit_value:
17712 case DW_OP_stack_value:
17713 break;
17714
17715 case DW_OP_addr:
17716 case DW_OP_const1u:
17717 case DW_OP_const1s:
17718 case DW_OP_const2u:
17719 case DW_OP_const2s:
17720 case DW_OP_const4u:
17721 case DW_OP_const4s:
17722 case DW_OP_const8u:
17723 case DW_OP_const8s:
17724 case DW_OP_constu:
17725 case DW_OP_consts:
17726 case DW_OP_dup:
17727 case DW_OP_over:
17728 case DW_OP_pick:
17729 case DW_OP_lit0:
17730 case DW_OP_lit1:
17731 case DW_OP_lit2:
17732 case DW_OP_lit3:
17733 case DW_OP_lit4:
17734 case DW_OP_lit5:
17735 case DW_OP_lit6:
17736 case DW_OP_lit7:
17737 case DW_OP_lit8:
17738 case DW_OP_lit9:
17739 case DW_OP_lit10:
17740 case DW_OP_lit11:
17741 case DW_OP_lit12:
17742 case DW_OP_lit13:
17743 case DW_OP_lit14:
17744 case DW_OP_lit15:
17745 case DW_OP_lit16:
17746 case DW_OP_lit17:
17747 case DW_OP_lit18:
17748 case DW_OP_lit19:
17749 case DW_OP_lit20:
17750 case DW_OP_lit21:
17751 case DW_OP_lit22:
17752 case DW_OP_lit23:
17753 case DW_OP_lit24:
17754 case DW_OP_lit25:
17755 case DW_OP_lit26:
17756 case DW_OP_lit27:
17757 case DW_OP_lit28:
17758 case DW_OP_lit29:
17759 case DW_OP_lit30:
17760 case DW_OP_lit31:
17761 case DW_OP_breg0:
17762 case DW_OP_breg1:
17763 case DW_OP_breg2:
17764 case DW_OP_breg3:
17765 case DW_OP_breg4:
17766 case DW_OP_breg5:
17767 case DW_OP_breg6:
17768 case DW_OP_breg7:
17769 case DW_OP_breg8:
17770 case DW_OP_breg9:
17771 case DW_OP_breg10:
17772 case DW_OP_breg11:
17773 case DW_OP_breg12:
17774 case DW_OP_breg13:
17775 case DW_OP_breg14:
17776 case DW_OP_breg15:
17777 case DW_OP_breg16:
17778 case DW_OP_breg17:
17779 case DW_OP_breg18:
17780 case DW_OP_breg19:
17781 case DW_OP_breg20:
17782 case DW_OP_breg21:
17783 case DW_OP_breg22:
17784 case DW_OP_breg23:
17785 case DW_OP_breg24:
17786 case DW_OP_breg25:
17787 case DW_OP_breg26:
17788 case DW_OP_breg27:
17789 case DW_OP_breg28:
17790 case DW_OP_breg29:
17791 case DW_OP_breg30:
17792 case DW_OP_breg31:
17793 case DW_OP_fbreg:
17794 case DW_OP_push_object_address:
17795 case DW_OP_call_frame_cfa:
17796 case DW_OP_GNU_variable_value:
17797 ++frame_offset_;
17798 break;
17799
17800 case DW_OP_drop:
17801 case DW_OP_xderef:
17802 case DW_OP_and:
17803 case DW_OP_div:
17804 case DW_OP_minus:
17805 case DW_OP_mod:
17806 case DW_OP_mul:
17807 case DW_OP_or:
17808 case DW_OP_plus:
17809 case DW_OP_shl:
17810 case DW_OP_shr:
17811 case DW_OP_shra:
17812 case DW_OP_xor:
17813 case DW_OP_bra:
17814 case DW_OP_eq:
17815 case DW_OP_ge:
17816 case DW_OP_gt:
17817 case DW_OP_le:
17818 case DW_OP_lt:
17819 case DW_OP_ne:
17820 case DW_OP_regx:
17821 case DW_OP_xderef_size:
17822 --frame_offset_;
17823 break;
17824
17825 case DW_OP_call2:
17826 case DW_OP_call4:
17827 case DW_OP_call_ref:
17828 {
17829 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17830 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17831
17832 if (stack_usage == NULL)
17833 return false;
17834 frame_offset_ += *stack_usage;
17835 break;
17836 }
17837
17838 case DW_OP_implicit_pointer:
17839 case DW_OP_entry_value:
17840 case DW_OP_const_type:
17841 case DW_OP_regval_type:
17842 case DW_OP_deref_type:
17843 case DW_OP_convert:
17844 case DW_OP_reinterpret:
17845 case DW_OP_form_tls_address:
17846 case DW_OP_GNU_push_tls_address:
17847 case DW_OP_GNU_uninit:
17848 case DW_OP_GNU_encoded_addr:
17849 case DW_OP_GNU_implicit_pointer:
17850 case DW_OP_GNU_entry_value:
17851 case DW_OP_GNU_const_type:
17852 case DW_OP_GNU_regval_type:
17853 case DW_OP_GNU_deref_type:
17854 case DW_OP_GNU_convert:
17855 case DW_OP_GNU_reinterpret:
17856 case DW_OP_GNU_parameter_ref:
17857 /* loc_list_from_tree will probably not output these operations for
17858 size functions, so assume they will not appear here. */
17859 /* Fall through... */
17860
17861 default:
17862 gcc_unreachable ();
17863 }
17864
17865 /* Now, follow the control flow (except subroutine calls). */
17866 switch (l->dw_loc_opc)
17867 {
17868 case DW_OP_bra:
17869 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17870 frame_offsets))
17871 return false;
17872 /* Fall through. */
17873
17874 case DW_OP_skip:
17875 l = l->dw_loc_oprnd1.v.val_loc;
17876 break;
17877
17878 case DW_OP_stack_value:
17879 return true;
17880
17881 default:
17882 l = l->dw_loc_next;
17883 break;
17884 }
17885 }
17886
17887 return true;
17888 }
17889
17890 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17891 operations) in order to resolve the operand of DW_OP_pick operations that
17892 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17893 offset *before* LOC is executed. Return if all relocations were
17894 successful. */
17895
17896 static bool
17897 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17898 struct dwarf_procedure_info *dpi)
17899 {
17900 /* Associate to all visited operations the frame offset *before* evaluating
17901 this operation. */
17902 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17903
17904 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17905 frame_offsets);
17906 }
17907
17908 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17909 Return NULL if it is not possible. */
17910
17911 static dw_die_ref
17912 function_to_dwarf_procedure (tree fndecl)
17913 {
17914 struct loc_descr_context ctx;
17915 struct dwarf_procedure_info dpi;
17916 dw_die_ref dwarf_proc_die;
17917 tree tree_body = DECL_SAVED_TREE (fndecl);
17918 dw_loc_descr_ref loc_body, epilogue;
17919
17920 tree cursor;
17921 unsigned i;
17922
17923 /* Do not generate multiple DWARF procedures for the same function
17924 declaration. */
17925 dwarf_proc_die = lookup_decl_die (fndecl);
17926 if (dwarf_proc_die != NULL)
17927 return dwarf_proc_die;
17928
17929 /* DWARF procedures are available starting with the DWARFv3 standard. */
17930 if (dwarf_version < 3 && dwarf_strict)
17931 return NULL;
17932
17933 /* We handle only functions for which we still have a body, that return a
17934 supported type and that takes arguments with supported types. Note that
17935 there is no point translating functions that return nothing. */
17936 if (tree_body == NULL_TREE
17937 || DECL_RESULT (fndecl) == NULL_TREE
17938 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17939 return NULL;
17940
17941 for (cursor = DECL_ARGUMENTS (fndecl);
17942 cursor != NULL_TREE;
17943 cursor = TREE_CHAIN (cursor))
17944 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17945 return NULL;
17946
17947 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17948 if (TREE_CODE (tree_body) != RETURN_EXPR)
17949 return NULL;
17950 tree_body = TREE_OPERAND (tree_body, 0);
17951 if (TREE_CODE (tree_body) != MODIFY_EXPR
17952 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17953 return NULL;
17954 tree_body = TREE_OPERAND (tree_body, 1);
17955
17956 /* Try to translate the body expression itself. Note that this will probably
17957 cause an infinite recursion if its call graph has a cycle. This is very
17958 unlikely for size functions, however, so don't bother with such things at
17959 the moment. */
17960 ctx.context_type = NULL_TREE;
17961 ctx.base_decl = NULL_TREE;
17962 ctx.dpi = &dpi;
17963 ctx.placeholder_arg = false;
17964 ctx.placeholder_seen = false;
17965 dpi.fndecl = fndecl;
17966 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17967 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17968 if (!loc_body)
17969 return NULL;
17970
17971 /* After evaluating all operands in "loc_body", we should still have on the
17972 stack all arguments plus the desired function result (top of the stack).
17973 Generate code in order to keep only the result in our stack frame. */
17974 epilogue = NULL;
17975 for (i = 0; i < dpi.args_count; ++i)
17976 {
17977 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17978 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17979 op_couple->dw_loc_next->dw_loc_next = epilogue;
17980 epilogue = op_couple;
17981 }
17982 add_loc_descr (&loc_body, epilogue);
17983 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17984 return NULL;
17985
17986 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17987 because they are considered useful. Now there is an epilogue, they are
17988 not anymore, so give it another try. */
17989 loc_descr_without_nops (loc_body);
17990
17991 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17992 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17993 though, given that size functions do not come from source, so they should
17994 not have a dedicated DW_TAG_subprogram DIE. */
17995 dwarf_proc_die
17996 = new_dwarf_proc_die (loc_body, fndecl,
17997 get_context_die (DECL_CONTEXT (fndecl)));
17998
17999 /* The called DWARF procedure consumes one stack slot per argument and
18000 returns one stack slot. */
18001 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
18002
18003 return dwarf_proc_die;
18004 }
18005
18006
18007 /* Generate Dwarf location list representing LOC.
18008 If WANT_ADDRESS is false, expression computing LOC will be computed
18009 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18010 if WANT_ADDRESS is 2, expression computing address useable in location
18011 will be returned (i.e. DW_OP_reg can be used
18012 to refer to register values).
18013
18014 CONTEXT provides information to customize the location descriptions
18015 generation. Its context_type field specifies what type is implicitly
18016 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18017 will not be generated.
18018
18019 Its DPI field determines whether we are generating a DWARF expression for a
18020 DWARF procedure, so PARM_DECL references are processed specifically.
18021
18022 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18023 and dpi fields were null. */
18024
18025 static dw_loc_list_ref
18026 loc_list_from_tree_1 (tree loc, int want_address,
18027 struct loc_descr_context *context)
18028 {
18029 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18030 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18031 int have_address = 0;
18032 enum dwarf_location_atom op;
18033
18034 /* ??? Most of the time we do not take proper care for sign/zero
18035 extending the values properly. Hopefully this won't be a real
18036 problem... */
18037
18038 if (context != NULL
18039 && context->base_decl == loc
18040 && want_address == 0)
18041 {
18042 if (dwarf_version >= 3 || !dwarf_strict)
18043 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18044 NULL, 0, NULL, 0, NULL);
18045 else
18046 return NULL;
18047 }
18048
18049 switch (TREE_CODE (loc))
18050 {
18051 case ERROR_MARK:
18052 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18053 return 0;
18054
18055 case PLACEHOLDER_EXPR:
18056 /* This case involves extracting fields from an object to determine the
18057 position of other fields. It is supposed to appear only as the first
18058 operand of COMPONENT_REF nodes and to reference precisely the type
18059 that the context allows. */
18060 if (context != NULL
18061 && TREE_TYPE (loc) == context->context_type
18062 && want_address >= 1)
18063 {
18064 if (dwarf_version >= 3 || !dwarf_strict)
18065 {
18066 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18067 have_address = 1;
18068 break;
18069 }
18070 else
18071 return NULL;
18072 }
18073 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18074 the single argument passed by consumer. */
18075 else if (context != NULL
18076 && context->placeholder_arg
18077 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18078 && want_address == 0)
18079 {
18080 ret = new_loc_descr (DW_OP_pick, 0, 0);
18081 ret->frame_offset_rel = 1;
18082 context->placeholder_seen = true;
18083 break;
18084 }
18085 else
18086 expansion_failed (loc, NULL_RTX,
18087 "PLACEHOLDER_EXPR for an unexpected type");
18088 break;
18089
18090 case CALL_EXPR:
18091 {
18092 const int nargs = call_expr_nargs (loc);
18093 tree callee = get_callee_fndecl (loc);
18094 int i;
18095 dw_die_ref dwarf_proc;
18096
18097 if (callee == NULL_TREE)
18098 goto call_expansion_failed;
18099
18100 /* We handle only functions that return an integer. */
18101 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18102 goto call_expansion_failed;
18103
18104 dwarf_proc = function_to_dwarf_procedure (callee);
18105 if (dwarf_proc == NULL)
18106 goto call_expansion_failed;
18107
18108 /* Evaluate arguments right-to-left so that the first argument will
18109 be the top-most one on the stack. */
18110 for (i = nargs - 1; i >= 0; --i)
18111 {
18112 dw_loc_descr_ref loc_descr
18113 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18114 context);
18115
18116 if (loc_descr == NULL)
18117 goto call_expansion_failed;
18118
18119 add_loc_descr (&ret, loc_descr);
18120 }
18121
18122 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18123 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18124 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18125 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18126 add_loc_descr (&ret, ret1);
18127 break;
18128
18129 call_expansion_failed:
18130 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18131 /* There are no opcodes for these operations. */
18132 return 0;
18133 }
18134
18135 case PREINCREMENT_EXPR:
18136 case PREDECREMENT_EXPR:
18137 case POSTINCREMENT_EXPR:
18138 case POSTDECREMENT_EXPR:
18139 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18140 /* There are no opcodes for these operations. */
18141 return 0;
18142
18143 case ADDR_EXPR:
18144 /* If we already want an address, see if there is INDIRECT_REF inside
18145 e.g. for &this->field. */
18146 if (want_address)
18147 {
18148 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18149 (loc, want_address == 2, context);
18150 if (list_ret)
18151 have_address = 1;
18152 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18153 && (ret = cst_pool_loc_descr (loc)))
18154 have_address = 1;
18155 }
18156 /* Otherwise, process the argument and look for the address. */
18157 if (!list_ret && !ret)
18158 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18159 else
18160 {
18161 if (want_address)
18162 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18163 return NULL;
18164 }
18165 break;
18166
18167 case VAR_DECL:
18168 if (DECL_THREAD_LOCAL_P (loc))
18169 {
18170 rtx rtl;
18171 enum dwarf_location_atom tls_op;
18172 enum dtprel_bool dtprel = dtprel_false;
18173
18174 if (targetm.have_tls)
18175 {
18176 /* If this is not defined, we have no way to emit the
18177 data. */
18178 if (!targetm.asm_out.output_dwarf_dtprel)
18179 return 0;
18180
18181 /* The way DW_OP_GNU_push_tls_address is specified, we
18182 can only look up addresses of objects in the current
18183 module. We used DW_OP_addr as first op, but that's
18184 wrong, because DW_OP_addr is relocated by the debug
18185 info consumer, while DW_OP_GNU_push_tls_address
18186 operand shouldn't be. */
18187 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18188 return 0;
18189 dtprel = dtprel_true;
18190 /* We check for DWARF 5 here because gdb did not implement
18191 DW_OP_form_tls_address until after 7.12. */
18192 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18193 : DW_OP_GNU_push_tls_address);
18194 }
18195 else
18196 {
18197 if (!targetm.emutls.debug_form_tls_address
18198 || !(dwarf_version >= 3 || !dwarf_strict))
18199 return 0;
18200 /* We stuffed the control variable into the DECL_VALUE_EXPR
18201 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18202 no longer appear in gimple code. We used the control
18203 variable in specific so that we could pick it up here. */
18204 loc = DECL_VALUE_EXPR (loc);
18205 tls_op = DW_OP_form_tls_address;
18206 }
18207
18208 rtl = rtl_for_decl_location (loc);
18209 if (rtl == NULL_RTX)
18210 return 0;
18211
18212 if (!MEM_P (rtl))
18213 return 0;
18214 rtl = XEXP (rtl, 0);
18215 if (! CONSTANT_P (rtl))
18216 return 0;
18217
18218 ret = new_addr_loc_descr (rtl, dtprel);
18219 ret1 = new_loc_descr (tls_op, 0, 0);
18220 add_loc_descr (&ret, ret1);
18221
18222 have_address = 1;
18223 break;
18224 }
18225 /* FALLTHRU */
18226
18227 case PARM_DECL:
18228 if (context != NULL && context->dpi != NULL
18229 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18230 {
18231 /* We are generating code for a DWARF procedure and we want to access
18232 one of its arguments: find the appropriate argument offset and let
18233 the resolve_args_picking pass compute the offset that complies
18234 with the stack frame size. */
18235 unsigned i = 0;
18236 tree cursor;
18237
18238 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18239 cursor != NULL_TREE && cursor != loc;
18240 cursor = TREE_CHAIN (cursor), ++i)
18241 ;
18242 /* If we are translating a DWARF procedure, all referenced parameters
18243 must belong to the current function. */
18244 gcc_assert (cursor != NULL_TREE);
18245
18246 ret = new_loc_descr (DW_OP_pick, i, 0);
18247 ret->frame_offset_rel = 1;
18248 break;
18249 }
18250 /* FALLTHRU */
18251
18252 case RESULT_DECL:
18253 if (DECL_HAS_VALUE_EXPR_P (loc))
18254 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18255 want_address, context);
18256 /* FALLTHRU */
18257
18258 case FUNCTION_DECL:
18259 {
18260 rtx rtl;
18261 var_loc_list *loc_list = lookup_decl_loc (loc);
18262
18263 if (loc_list && loc_list->first)
18264 {
18265 list_ret = dw_loc_list (loc_list, loc, want_address);
18266 have_address = want_address != 0;
18267 break;
18268 }
18269 rtl = rtl_for_decl_location (loc);
18270 if (rtl == NULL_RTX)
18271 {
18272 if (TREE_CODE (loc) != FUNCTION_DECL
18273 && early_dwarf
18274 && current_function_decl
18275 && want_address != 1
18276 && ! DECL_IGNORED_P (loc)
18277 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18278 || POINTER_TYPE_P (TREE_TYPE (loc)))
18279 && DECL_CONTEXT (loc) == current_function_decl
18280 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18281 <= DWARF2_ADDR_SIZE))
18282 {
18283 dw_die_ref ref = lookup_decl_die (loc);
18284 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18285 if (ref)
18286 {
18287 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18288 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18289 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18290 }
18291 else
18292 {
18293 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18294 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18295 }
18296 break;
18297 }
18298 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18299 return 0;
18300 }
18301 else if (CONST_INT_P (rtl))
18302 {
18303 HOST_WIDE_INT val = INTVAL (rtl);
18304 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18305 val &= GET_MODE_MASK (DECL_MODE (loc));
18306 ret = int_loc_descriptor (val);
18307 }
18308 else if (GET_CODE (rtl) == CONST_STRING)
18309 {
18310 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18311 return 0;
18312 }
18313 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18314 ret = new_addr_loc_descr (rtl, dtprel_false);
18315 else
18316 {
18317 machine_mode mode, mem_mode;
18318
18319 /* Certain constructs can only be represented at top-level. */
18320 if (want_address == 2)
18321 {
18322 ret = loc_descriptor (rtl, VOIDmode,
18323 VAR_INIT_STATUS_INITIALIZED);
18324 have_address = 1;
18325 }
18326 else
18327 {
18328 mode = GET_MODE (rtl);
18329 mem_mode = VOIDmode;
18330 if (MEM_P (rtl))
18331 {
18332 mem_mode = mode;
18333 mode = get_address_mode (rtl);
18334 rtl = XEXP (rtl, 0);
18335 have_address = 1;
18336 }
18337 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18338 VAR_INIT_STATUS_INITIALIZED);
18339 }
18340 if (!ret)
18341 expansion_failed (loc, rtl,
18342 "failed to produce loc descriptor for rtl");
18343 }
18344 }
18345 break;
18346
18347 case MEM_REF:
18348 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18349 {
18350 have_address = 1;
18351 goto do_plus;
18352 }
18353 /* Fallthru. */
18354 case INDIRECT_REF:
18355 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18356 have_address = 1;
18357 break;
18358
18359 case TARGET_MEM_REF:
18360 case SSA_NAME:
18361 case DEBUG_EXPR_DECL:
18362 return NULL;
18363
18364 case COMPOUND_EXPR:
18365 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18366 context);
18367
18368 CASE_CONVERT:
18369 case VIEW_CONVERT_EXPR:
18370 case SAVE_EXPR:
18371 case MODIFY_EXPR:
18372 case NON_LVALUE_EXPR:
18373 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18374 context);
18375
18376 case COMPONENT_REF:
18377 case BIT_FIELD_REF:
18378 case ARRAY_REF:
18379 case ARRAY_RANGE_REF:
18380 case REALPART_EXPR:
18381 case IMAGPART_EXPR:
18382 {
18383 tree obj, offset;
18384 poly_int64 bitsize, bitpos, bytepos;
18385 machine_mode mode;
18386 int unsignedp, reversep, volatilep = 0;
18387
18388 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18389 &unsignedp, &reversep, &volatilep);
18390
18391 gcc_assert (obj != loc);
18392
18393 list_ret = loc_list_from_tree_1 (obj,
18394 want_address == 2
18395 && known_eq (bitpos, 0)
18396 && !offset ? 2 : 1,
18397 context);
18398 /* TODO: We can extract value of the small expression via shifting even
18399 for nonzero bitpos. */
18400 if (list_ret == 0)
18401 return 0;
18402 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18403 || !multiple_p (bitsize, BITS_PER_UNIT))
18404 {
18405 expansion_failed (loc, NULL_RTX,
18406 "bitfield access");
18407 return 0;
18408 }
18409
18410 if (offset != NULL_TREE)
18411 {
18412 /* Variable offset. */
18413 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18414 if (list_ret1 == 0)
18415 return 0;
18416 add_loc_list (&list_ret, list_ret1);
18417 if (!list_ret)
18418 return 0;
18419 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18420 }
18421
18422 HOST_WIDE_INT value;
18423 if (bytepos.is_constant (&value) && value > 0)
18424 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18425 value, 0));
18426 else if (maybe_ne (bytepos, 0))
18427 loc_list_plus_const (list_ret, bytepos);
18428
18429 have_address = 1;
18430 break;
18431 }
18432
18433 case INTEGER_CST:
18434 if ((want_address || !tree_fits_shwi_p (loc))
18435 && (ret = cst_pool_loc_descr (loc)))
18436 have_address = 1;
18437 else if (want_address == 2
18438 && tree_fits_shwi_p (loc)
18439 && (ret = address_of_int_loc_descriptor
18440 (int_size_in_bytes (TREE_TYPE (loc)),
18441 tree_to_shwi (loc))))
18442 have_address = 1;
18443 else if (tree_fits_shwi_p (loc))
18444 ret = int_loc_descriptor (tree_to_shwi (loc));
18445 else if (tree_fits_uhwi_p (loc))
18446 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18447 else
18448 {
18449 expansion_failed (loc, NULL_RTX,
18450 "Integer operand is not host integer");
18451 return 0;
18452 }
18453 break;
18454
18455 case CONSTRUCTOR:
18456 case REAL_CST:
18457 case STRING_CST:
18458 case COMPLEX_CST:
18459 if ((ret = cst_pool_loc_descr (loc)))
18460 have_address = 1;
18461 else if (TREE_CODE (loc) == CONSTRUCTOR)
18462 {
18463 tree type = TREE_TYPE (loc);
18464 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18465 unsigned HOST_WIDE_INT offset = 0;
18466 unsigned HOST_WIDE_INT cnt;
18467 constructor_elt *ce;
18468
18469 if (TREE_CODE (type) == RECORD_TYPE)
18470 {
18471 /* This is very limited, but it's enough to output
18472 pointers to member functions, as long as the
18473 referenced function is defined in the current
18474 translation unit. */
18475 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18476 {
18477 tree val = ce->value;
18478
18479 tree field = ce->index;
18480
18481 if (val)
18482 STRIP_NOPS (val);
18483
18484 if (!field || DECL_BIT_FIELD (field))
18485 {
18486 expansion_failed (loc, NULL_RTX,
18487 "bitfield in record type constructor");
18488 size = offset = (unsigned HOST_WIDE_INT)-1;
18489 ret = NULL;
18490 break;
18491 }
18492
18493 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18494 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18495 gcc_assert (pos + fieldsize <= size);
18496 if (pos < offset)
18497 {
18498 expansion_failed (loc, NULL_RTX,
18499 "out-of-order fields in record constructor");
18500 size = offset = (unsigned HOST_WIDE_INT)-1;
18501 ret = NULL;
18502 break;
18503 }
18504 if (pos > offset)
18505 {
18506 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18507 add_loc_descr (&ret, ret1);
18508 offset = pos;
18509 }
18510 if (val && fieldsize != 0)
18511 {
18512 ret1 = loc_descriptor_from_tree (val, want_address, context);
18513 if (!ret1)
18514 {
18515 expansion_failed (loc, NULL_RTX,
18516 "unsupported expression in field");
18517 size = offset = (unsigned HOST_WIDE_INT)-1;
18518 ret = NULL;
18519 break;
18520 }
18521 add_loc_descr (&ret, ret1);
18522 }
18523 if (fieldsize)
18524 {
18525 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18526 add_loc_descr (&ret, ret1);
18527 offset = pos + fieldsize;
18528 }
18529 }
18530
18531 if (offset != size)
18532 {
18533 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18534 add_loc_descr (&ret, ret1);
18535 offset = size;
18536 }
18537
18538 have_address = !!want_address;
18539 }
18540 else
18541 expansion_failed (loc, NULL_RTX,
18542 "constructor of non-record type");
18543 }
18544 else
18545 /* We can construct small constants here using int_loc_descriptor. */
18546 expansion_failed (loc, NULL_RTX,
18547 "constructor or constant not in constant pool");
18548 break;
18549
18550 case TRUTH_AND_EXPR:
18551 case TRUTH_ANDIF_EXPR:
18552 case BIT_AND_EXPR:
18553 op = DW_OP_and;
18554 goto do_binop;
18555
18556 case TRUTH_XOR_EXPR:
18557 case BIT_XOR_EXPR:
18558 op = DW_OP_xor;
18559 goto do_binop;
18560
18561 case TRUTH_OR_EXPR:
18562 case TRUTH_ORIF_EXPR:
18563 case BIT_IOR_EXPR:
18564 op = DW_OP_or;
18565 goto do_binop;
18566
18567 case FLOOR_DIV_EXPR:
18568 case CEIL_DIV_EXPR:
18569 case ROUND_DIV_EXPR:
18570 case TRUNC_DIV_EXPR:
18571 case EXACT_DIV_EXPR:
18572 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18573 return 0;
18574 op = DW_OP_div;
18575 goto do_binop;
18576
18577 case MINUS_EXPR:
18578 op = DW_OP_minus;
18579 goto do_binop;
18580
18581 case FLOOR_MOD_EXPR:
18582 case CEIL_MOD_EXPR:
18583 case ROUND_MOD_EXPR:
18584 case TRUNC_MOD_EXPR:
18585 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18586 {
18587 op = DW_OP_mod;
18588 goto do_binop;
18589 }
18590 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18591 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18592 if (list_ret == 0 || list_ret1 == 0)
18593 return 0;
18594
18595 add_loc_list (&list_ret, list_ret1);
18596 if (list_ret == 0)
18597 return 0;
18598 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18599 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18600 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18601 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18602 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18603 break;
18604
18605 case MULT_EXPR:
18606 op = DW_OP_mul;
18607 goto do_binop;
18608
18609 case LSHIFT_EXPR:
18610 op = DW_OP_shl;
18611 goto do_binop;
18612
18613 case RSHIFT_EXPR:
18614 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18615 goto do_binop;
18616
18617 case POINTER_PLUS_EXPR:
18618 case PLUS_EXPR:
18619 do_plus:
18620 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18621 {
18622 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18623 smarter to encode their opposite. The DW_OP_plus_uconst operation
18624 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18625 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18626 bytes, Y being the size of the operation that pushes the opposite
18627 of the addend. So let's choose the smallest representation. */
18628 const tree tree_addend = TREE_OPERAND (loc, 1);
18629 offset_int wi_addend;
18630 HOST_WIDE_INT shwi_addend;
18631 dw_loc_descr_ref loc_naddend;
18632
18633 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18634 if (list_ret == 0)
18635 return 0;
18636
18637 /* Try to get the literal to push. It is the opposite of the addend,
18638 so as we rely on wrapping during DWARF evaluation, first decode
18639 the literal as a "DWARF-sized" signed number. */
18640 wi_addend = wi::to_offset (tree_addend);
18641 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18642 shwi_addend = wi_addend.to_shwi ();
18643 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18644 ? int_loc_descriptor (-shwi_addend)
18645 : NULL;
18646
18647 if (loc_naddend != NULL
18648 && ((unsigned) size_of_uleb128 (shwi_addend)
18649 > size_of_loc_descr (loc_naddend)))
18650 {
18651 add_loc_descr_to_each (list_ret, loc_naddend);
18652 add_loc_descr_to_each (list_ret,
18653 new_loc_descr (DW_OP_minus, 0, 0));
18654 }
18655 else
18656 {
18657 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18658 {
18659 loc_naddend = loc_cur;
18660 loc_cur = loc_cur->dw_loc_next;
18661 ggc_free (loc_naddend);
18662 }
18663 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18664 }
18665 break;
18666 }
18667
18668 op = DW_OP_plus;
18669 goto do_binop;
18670
18671 case LE_EXPR:
18672 op = DW_OP_le;
18673 goto do_comp_binop;
18674
18675 case GE_EXPR:
18676 op = DW_OP_ge;
18677 goto do_comp_binop;
18678
18679 case LT_EXPR:
18680 op = DW_OP_lt;
18681 goto do_comp_binop;
18682
18683 case GT_EXPR:
18684 op = DW_OP_gt;
18685 goto do_comp_binop;
18686
18687 do_comp_binop:
18688 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18689 {
18690 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18691 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18692 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18693 TREE_CODE (loc));
18694 break;
18695 }
18696 else
18697 goto do_binop;
18698
18699 case EQ_EXPR:
18700 op = DW_OP_eq;
18701 goto do_binop;
18702
18703 case NE_EXPR:
18704 op = DW_OP_ne;
18705 goto do_binop;
18706
18707 do_binop:
18708 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18709 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18710 if (list_ret == 0 || list_ret1 == 0)
18711 return 0;
18712
18713 add_loc_list (&list_ret, list_ret1);
18714 if (list_ret == 0)
18715 return 0;
18716 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18717 break;
18718
18719 case TRUTH_NOT_EXPR:
18720 case BIT_NOT_EXPR:
18721 op = DW_OP_not;
18722 goto do_unop;
18723
18724 case ABS_EXPR:
18725 op = DW_OP_abs;
18726 goto do_unop;
18727
18728 case NEGATE_EXPR:
18729 op = DW_OP_neg;
18730 goto do_unop;
18731
18732 do_unop:
18733 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18734 if (list_ret == 0)
18735 return 0;
18736
18737 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18738 break;
18739
18740 case MIN_EXPR:
18741 case MAX_EXPR:
18742 {
18743 const enum tree_code code =
18744 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18745
18746 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18747 build2 (code, integer_type_node,
18748 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18749 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18750 }
18751
18752 /* fall through */
18753
18754 case COND_EXPR:
18755 {
18756 dw_loc_descr_ref lhs
18757 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18758 dw_loc_list_ref rhs
18759 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18760 dw_loc_descr_ref bra_node, jump_node, tmp;
18761
18762 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18763 if (list_ret == 0 || lhs == 0 || rhs == 0)
18764 return 0;
18765
18766 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18767 add_loc_descr_to_each (list_ret, bra_node);
18768
18769 add_loc_list (&list_ret, rhs);
18770 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18771 add_loc_descr_to_each (list_ret, jump_node);
18772
18773 add_loc_descr_to_each (list_ret, lhs);
18774 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18775 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18776
18777 /* ??? Need a node to point the skip at. Use a nop. */
18778 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18779 add_loc_descr_to_each (list_ret, tmp);
18780 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18781 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18782 }
18783 break;
18784
18785 case FIX_TRUNC_EXPR:
18786 return 0;
18787
18788 default:
18789 /* Leave front-end specific codes as simply unknown. This comes
18790 up, for instance, with the C STMT_EXPR. */
18791 if ((unsigned int) TREE_CODE (loc)
18792 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18793 {
18794 expansion_failed (loc, NULL_RTX,
18795 "language specific tree node");
18796 return 0;
18797 }
18798
18799 /* Otherwise this is a generic code; we should just lists all of
18800 these explicitly. We forgot one. */
18801 if (flag_checking)
18802 gcc_unreachable ();
18803
18804 /* In a release build, we want to degrade gracefully: better to
18805 generate incomplete debugging information than to crash. */
18806 return NULL;
18807 }
18808
18809 if (!ret && !list_ret)
18810 return 0;
18811
18812 if (want_address == 2 && !have_address
18813 && (dwarf_version >= 4 || !dwarf_strict))
18814 {
18815 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18816 {
18817 expansion_failed (loc, NULL_RTX,
18818 "DWARF address size mismatch");
18819 return 0;
18820 }
18821 if (ret)
18822 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18823 else
18824 add_loc_descr_to_each (list_ret,
18825 new_loc_descr (DW_OP_stack_value, 0, 0));
18826 have_address = 1;
18827 }
18828 /* Show if we can't fill the request for an address. */
18829 if (want_address && !have_address)
18830 {
18831 expansion_failed (loc, NULL_RTX,
18832 "Want address and only have value");
18833 return 0;
18834 }
18835
18836 gcc_assert (!ret || !list_ret);
18837
18838 /* If we've got an address and don't want one, dereference. */
18839 if (!want_address && have_address)
18840 {
18841 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18842
18843 if (size > DWARF2_ADDR_SIZE || size == -1)
18844 {
18845 expansion_failed (loc, NULL_RTX,
18846 "DWARF address size mismatch");
18847 return 0;
18848 }
18849 else if (size == DWARF2_ADDR_SIZE)
18850 op = DW_OP_deref;
18851 else
18852 op = DW_OP_deref_size;
18853
18854 if (ret)
18855 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18856 else
18857 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18858 }
18859 if (ret)
18860 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18861
18862 return list_ret;
18863 }
18864
18865 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18866 expressions. */
18867
18868 static dw_loc_list_ref
18869 loc_list_from_tree (tree loc, int want_address,
18870 struct loc_descr_context *context)
18871 {
18872 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18873
18874 for (dw_loc_list_ref loc_cur = result;
18875 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18876 loc_descr_without_nops (loc_cur->expr);
18877 return result;
18878 }
18879
18880 /* Same as above but return only single location expression. */
18881 static dw_loc_descr_ref
18882 loc_descriptor_from_tree (tree loc, int want_address,
18883 struct loc_descr_context *context)
18884 {
18885 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18886 if (!ret)
18887 return NULL;
18888 if (ret->dw_loc_next)
18889 {
18890 expansion_failed (loc, NULL_RTX,
18891 "Location list where only loc descriptor needed");
18892 return NULL;
18893 }
18894 return ret->expr;
18895 }
18896
18897 /* Given a value, round it up to the lowest multiple of `boundary'
18898 which is not less than the value itself. */
18899
18900 static inline HOST_WIDE_INT
18901 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18902 {
18903 return (((value + boundary - 1) / boundary) * boundary);
18904 }
18905
18906 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18907 pointer to the declared type for the relevant field variable, or return
18908 `integer_type_node' if the given node turns out to be an
18909 ERROR_MARK node. */
18910
18911 static inline tree
18912 field_type (const_tree decl)
18913 {
18914 tree type;
18915
18916 if (TREE_CODE (decl) == ERROR_MARK)
18917 return integer_type_node;
18918
18919 type = DECL_BIT_FIELD_TYPE (decl);
18920 if (type == NULL_TREE)
18921 type = TREE_TYPE (decl);
18922
18923 return type;
18924 }
18925
18926 /* Given a pointer to a tree node, return the alignment in bits for
18927 it, or else return BITS_PER_WORD if the node actually turns out to
18928 be an ERROR_MARK node. */
18929
18930 static inline unsigned
18931 simple_type_align_in_bits (const_tree type)
18932 {
18933 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18934 }
18935
18936 static inline unsigned
18937 simple_decl_align_in_bits (const_tree decl)
18938 {
18939 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18940 }
18941
18942 /* Return the result of rounding T up to ALIGN. */
18943
18944 static inline offset_int
18945 round_up_to_align (const offset_int &t, unsigned int align)
18946 {
18947 return wi::udiv_trunc (t + align - 1, align) * align;
18948 }
18949
18950 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18951 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18952 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18953 if we fail to return the size in one of these two forms. */
18954
18955 static dw_loc_descr_ref
18956 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18957 {
18958 tree tree_size;
18959 struct loc_descr_context ctx;
18960
18961 /* Return a constant integer in priority, if possible. */
18962 *cst_size = int_size_in_bytes (type);
18963 if (*cst_size != -1)
18964 return NULL;
18965
18966 ctx.context_type = const_cast<tree> (type);
18967 ctx.base_decl = NULL_TREE;
18968 ctx.dpi = NULL;
18969 ctx.placeholder_arg = false;
18970 ctx.placeholder_seen = false;
18971
18972 type = TYPE_MAIN_VARIANT (type);
18973 tree_size = TYPE_SIZE_UNIT (type);
18974 return ((tree_size != NULL_TREE)
18975 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18976 : NULL);
18977 }
18978
18979 /* Helper structure for RECORD_TYPE processing. */
18980 struct vlr_context
18981 {
18982 /* Root RECORD_TYPE. It is needed to generate data member location
18983 descriptions in variable-length records (VLR), but also to cope with
18984 variants, which are composed of nested structures multiplexed with
18985 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18986 function processing a FIELD_DECL, it is required to be non null. */
18987 tree struct_type;
18988 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18989 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18990 this variant part as part of the root record (in storage units). For
18991 regular records, it must be NULL_TREE. */
18992 tree variant_part_offset;
18993 };
18994
18995 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18996 addressed byte of the "containing object" for the given FIELD_DECL. If
18997 possible, return a native constant through CST_OFFSET (in which case NULL is
18998 returned); otherwise return a DWARF expression that computes the offset.
18999
19000 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
19001 that offset is, either because the argument turns out to be a pointer to an
19002 ERROR_MARK node, or because the offset expression is too complex for us.
19003
19004 CTX is required: see the comment for VLR_CONTEXT. */
19005
19006 static dw_loc_descr_ref
19007 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19008 HOST_WIDE_INT *cst_offset)
19009 {
19010 tree tree_result;
19011 dw_loc_list_ref loc_result;
19012
19013 *cst_offset = 0;
19014
19015 if (TREE_CODE (decl) == ERROR_MARK)
19016 return NULL;
19017 else
19018 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19019
19020 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19021 case. */
19022 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19023 return NULL;
19024
19025 #ifdef PCC_BITFIELD_TYPE_MATTERS
19026 /* We used to handle only constant offsets in all cases. Now, we handle
19027 properly dynamic byte offsets only when PCC bitfield type doesn't
19028 matter. */
19029 if (PCC_BITFIELD_TYPE_MATTERS
19030 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19031 {
19032 offset_int object_offset_in_bits;
19033 offset_int object_offset_in_bytes;
19034 offset_int bitpos_int;
19035 tree type;
19036 tree field_size_tree;
19037 offset_int deepest_bitpos;
19038 offset_int field_size_in_bits;
19039 unsigned int type_align_in_bits;
19040 unsigned int decl_align_in_bits;
19041 offset_int type_size_in_bits;
19042
19043 bitpos_int = wi::to_offset (bit_position (decl));
19044 type = field_type (decl);
19045 type_size_in_bits = offset_int_type_size_in_bits (type);
19046 type_align_in_bits = simple_type_align_in_bits (type);
19047
19048 field_size_tree = DECL_SIZE (decl);
19049
19050 /* The size could be unspecified if there was an error, or for
19051 a flexible array member. */
19052 if (!field_size_tree)
19053 field_size_tree = bitsize_zero_node;
19054
19055 /* If the size of the field is not constant, use the type size. */
19056 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19057 field_size_in_bits = wi::to_offset (field_size_tree);
19058 else
19059 field_size_in_bits = type_size_in_bits;
19060
19061 decl_align_in_bits = simple_decl_align_in_bits (decl);
19062
19063 /* The GCC front-end doesn't make any attempt to keep track of the
19064 starting bit offset (relative to the start of the containing
19065 structure type) of the hypothetical "containing object" for a
19066 bit-field. Thus, when computing the byte offset value for the
19067 start of the "containing object" of a bit-field, we must deduce
19068 this information on our own. This can be rather tricky to do in
19069 some cases. For example, handling the following structure type
19070 definition when compiling for an i386/i486 target (which only
19071 aligns long long's to 32-bit boundaries) can be very tricky:
19072
19073 struct S { int field1; long long field2:31; };
19074
19075 Fortunately, there is a simple rule-of-thumb which can be used
19076 in such cases. When compiling for an i386/i486, GCC will
19077 allocate 8 bytes for the structure shown above. It decides to
19078 do this based upon one simple rule for bit-field allocation.
19079 GCC allocates each "containing object" for each bit-field at
19080 the first (i.e. lowest addressed) legitimate alignment boundary
19081 (based upon the required minimum alignment for the declared
19082 type of the field) which it can possibly use, subject to the
19083 condition that there is still enough available space remaining
19084 in the containing object (when allocated at the selected point)
19085 to fully accommodate all of the bits of the bit-field itself.
19086
19087 This simple rule makes it obvious why GCC allocates 8 bytes for
19088 each object of the structure type shown above. When looking
19089 for a place to allocate the "containing object" for `field2',
19090 the compiler simply tries to allocate a 64-bit "containing
19091 object" at each successive 32-bit boundary (starting at zero)
19092 until it finds a place to allocate that 64- bit field such that
19093 at least 31 contiguous (and previously unallocated) bits remain
19094 within that selected 64 bit field. (As it turns out, for the
19095 example above, the compiler finds it is OK to allocate the
19096 "containing object" 64-bit field at bit-offset zero within the
19097 structure type.)
19098
19099 Here we attempt to work backwards from the limited set of facts
19100 we're given, and we try to deduce from those facts, where GCC
19101 must have believed that the containing object started (within
19102 the structure type). The value we deduce is then used (by the
19103 callers of this routine) to generate DW_AT_location and
19104 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19105 the case of DW_AT_location, regular fields as well). */
19106
19107 /* Figure out the bit-distance from the start of the structure to
19108 the "deepest" bit of the bit-field. */
19109 deepest_bitpos = bitpos_int + field_size_in_bits;
19110
19111 /* This is the tricky part. Use some fancy footwork to deduce
19112 where the lowest addressed bit of the containing object must
19113 be. */
19114 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19115
19116 /* Round up to type_align by default. This works best for
19117 bitfields. */
19118 object_offset_in_bits
19119 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19120
19121 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19122 {
19123 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19124
19125 /* Round up to decl_align instead. */
19126 object_offset_in_bits
19127 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19128 }
19129
19130 object_offset_in_bytes
19131 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19132 if (ctx->variant_part_offset == NULL_TREE)
19133 {
19134 *cst_offset = object_offset_in_bytes.to_shwi ();
19135 return NULL;
19136 }
19137 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19138 }
19139 else
19140 #endif /* PCC_BITFIELD_TYPE_MATTERS */
19141 tree_result = byte_position (decl);
19142
19143 if (ctx->variant_part_offset != NULL_TREE)
19144 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19145 ctx->variant_part_offset, tree_result);
19146
19147 /* If the byte offset is a constant, it's simplier to handle a native
19148 constant rather than a DWARF expression. */
19149 if (TREE_CODE (tree_result) == INTEGER_CST)
19150 {
19151 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19152 return NULL;
19153 }
19154 struct loc_descr_context loc_ctx = {
19155 ctx->struct_type, /* context_type */
19156 NULL_TREE, /* base_decl */
19157 NULL, /* dpi */
19158 false, /* placeholder_arg */
19159 false /* placeholder_seen */
19160 };
19161 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19162
19163 /* We want a DWARF expression: abort if we only have a location list with
19164 multiple elements. */
19165 if (!loc_result || !single_element_loc_list_p (loc_result))
19166 return NULL;
19167 else
19168 return loc_result->expr;
19169 }
19170 \f
19171 /* The following routines define various Dwarf attributes and any data
19172 associated with them. */
19173
19174 /* Add a location description attribute value to a DIE.
19175
19176 This emits location attributes suitable for whole variables and
19177 whole parameters. Note that the location attributes for struct fields are
19178 generated by the routine `data_member_location_attribute' below. */
19179
19180 static inline void
19181 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19182 dw_loc_list_ref descr)
19183 {
19184 bool check_no_locviews = true;
19185 if (descr == 0)
19186 return;
19187 if (single_element_loc_list_p (descr))
19188 add_AT_loc (die, attr_kind, descr->expr);
19189 else
19190 {
19191 add_AT_loc_list (die, attr_kind, descr);
19192 gcc_assert (descr->ll_symbol);
19193 if (attr_kind == DW_AT_location && descr->vl_symbol
19194 && dwarf2out_locviews_in_attribute ())
19195 {
19196 add_AT_view_list (die, DW_AT_GNU_locviews);
19197 check_no_locviews = false;
19198 }
19199 }
19200
19201 if (check_no_locviews)
19202 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19203 }
19204
19205 /* Add DW_AT_accessibility attribute to DIE if needed. */
19206
19207 static void
19208 add_accessibility_attribute (dw_die_ref die, tree decl)
19209 {
19210 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19211 children, otherwise the default is DW_ACCESS_public. In DWARF2
19212 the default has always been DW_ACCESS_public. */
19213 if (TREE_PROTECTED (decl))
19214 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19215 else if (TREE_PRIVATE (decl))
19216 {
19217 if (dwarf_version == 2
19218 || die->die_parent == NULL
19219 || die->die_parent->die_tag != DW_TAG_class_type)
19220 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19221 }
19222 else if (dwarf_version > 2
19223 && die->die_parent
19224 && die->die_parent->die_tag == DW_TAG_class_type)
19225 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19226 }
19227
19228 /* Attach the specialized form of location attribute used for data members of
19229 struct and union types. In the special case of a FIELD_DECL node which
19230 represents a bit-field, the "offset" part of this special location
19231 descriptor must indicate the distance in bytes from the lowest-addressed
19232 byte of the containing struct or union type to the lowest-addressed byte of
19233 the "containing object" for the bit-field. (See the `field_byte_offset'
19234 function above).
19235
19236 For any given bit-field, the "containing object" is a hypothetical object
19237 (of some integral or enum type) within which the given bit-field lives. The
19238 type of this hypothetical "containing object" is always the same as the
19239 declared type of the individual bit-field itself (for GCC anyway... the
19240 DWARF spec doesn't actually mandate this). Note that it is the size (in
19241 bytes) of the hypothetical "containing object" which will be given in the
19242 DW_AT_byte_size attribute for this bit-field. (See the
19243 `byte_size_attribute' function below.) It is also used when calculating the
19244 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19245 function below.)
19246
19247 CTX is required: see the comment for VLR_CONTEXT. */
19248
19249 static void
19250 add_data_member_location_attribute (dw_die_ref die,
19251 tree decl,
19252 struct vlr_context *ctx)
19253 {
19254 HOST_WIDE_INT offset;
19255 dw_loc_descr_ref loc_descr = 0;
19256
19257 if (TREE_CODE (decl) == TREE_BINFO)
19258 {
19259 /* We're working on the TAG_inheritance for a base class. */
19260 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19261 {
19262 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19263 aren't at a fixed offset from all (sub)objects of the same
19264 type. We need to extract the appropriate offset from our
19265 vtable. The following dwarf expression means
19266
19267 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19268
19269 This is specific to the V3 ABI, of course. */
19270
19271 dw_loc_descr_ref tmp;
19272
19273 /* Make a copy of the object address. */
19274 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19275 add_loc_descr (&loc_descr, tmp);
19276
19277 /* Extract the vtable address. */
19278 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19279 add_loc_descr (&loc_descr, tmp);
19280
19281 /* Calculate the address of the offset. */
19282 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19283 gcc_assert (offset < 0);
19284
19285 tmp = int_loc_descriptor (-offset);
19286 add_loc_descr (&loc_descr, tmp);
19287 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19288 add_loc_descr (&loc_descr, tmp);
19289
19290 /* Extract the offset. */
19291 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19292 add_loc_descr (&loc_descr, tmp);
19293
19294 /* Add it to the object address. */
19295 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19296 add_loc_descr (&loc_descr, tmp);
19297 }
19298 else
19299 offset = tree_to_shwi (BINFO_OFFSET (decl));
19300 }
19301 else
19302 {
19303 loc_descr = field_byte_offset (decl, ctx, &offset);
19304
19305 /* If loc_descr is available then we know the field offset is dynamic.
19306 However, GDB does not handle dynamic field offsets very well at the
19307 moment. */
19308 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19309 {
19310 loc_descr = NULL;
19311 offset = 0;
19312 }
19313
19314 /* Data member location evalutation starts with the base address on the
19315 stack. Compute the field offset and add it to this base address. */
19316 else if (loc_descr != NULL)
19317 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19318 }
19319
19320 if (! loc_descr)
19321 {
19322 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19323 e.g. GDB only added support to it in November 2016. For DWARF5
19324 we need newer debug info consumers anyway. We might change this
19325 to dwarf_version >= 4 once most consumers catched up. */
19326 if (dwarf_version >= 5
19327 && TREE_CODE (decl) == FIELD_DECL
19328 && DECL_BIT_FIELD_TYPE (decl))
19329 {
19330 tree off = bit_position (decl);
19331 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19332 {
19333 remove_AT (die, DW_AT_byte_size);
19334 remove_AT (die, DW_AT_bit_offset);
19335 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19336 return;
19337 }
19338 }
19339 if (dwarf_version > 2)
19340 {
19341 /* Don't need to output a location expression, just the constant. */
19342 if (offset < 0)
19343 add_AT_int (die, DW_AT_data_member_location, offset);
19344 else
19345 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19346 return;
19347 }
19348 else
19349 {
19350 enum dwarf_location_atom op;
19351
19352 /* The DWARF2 standard says that we should assume that the structure
19353 address is already on the stack, so we can specify a structure
19354 field address by using DW_OP_plus_uconst. */
19355 op = DW_OP_plus_uconst;
19356 loc_descr = new_loc_descr (op, offset, 0);
19357 }
19358 }
19359
19360 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19361 }
19362
19363 /* Writes integer values to dw_vec_const array. */
19364
19365 static void
19366 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19367 {
19368 while (size != 0)
19369 {
19370 *dest++ = val & 0xff;
19371 val >>= 8;
19372 --size;
19373 }
19374 }
19375
19376 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19377
19378 static HOST_WIDE_INT
19379 extract_int (const unsigned char *src, unsigned int size)
19380 {
19381 HOST_WIDE_INT val = 0;
19382
19383 src += size;
19384 while (size != 0)
19385 {
19386 val <<= 8;
19387 val |= *--src & 0xff;
19388 --size;
19389 }
19390 return val;
19391 }
19392
19393 /* Writes wide_int values to dw_vec_const array. */
19394
19395 static void
19396 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19397 {
19398 int i;
19399
19400 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19401 {
19402 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19403 return;
19404 }
19405
19406 /* We'd have to extend this code to support odd sizes. */
19407 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19408
19409 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19410
19411 if (WORDS_BIG_ENDIAN)
19412 for (i = n - 1; i >= 0; i--)
19413 {
19414 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19415 dest += sizeof (HOST_WIDE_INT);
19416 }
19417 else
19418 for (i = 0; i < n; i++)
19419 {
19420 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19421 dest += sizeof (HOST_WIDE_INT);
19422 }
19423 }
19424
19425 /* Writes floating point values to dw_vec_const array. */
19426
19427 static void
19428 insert_float (const_rtx rtl, unsigned char *array)
19429 {
19430 long val[4];
19431 int i;
19432 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19433
19434 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19435
19436 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19437 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19438 {
19439 insert_int (val[i], 4, array);
19440 array += 4;
19441 }
19442 }
19443
19444 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19445 does not have a "location" either in memory or in a register. These
19446 things can arise in GNU C when a constant is passed as an actual parameter
19447 to an inlined function. They can also arise in C++ where declared
19448 constants do not necessarily get memory "homes". */
19449
19450 static bool
19451 add_const_value_attribute (dw_die_ref die, rtx rtl)
19452 {
19453 switch (GET_CODE (rtl))
19454 {
19455 case CONST_INT:
19456 {
19457 HOST_WIDE_INT val = INTVAL (rtl);
19458
19459 if (val < 0)
19460 add_AT_int (die, DW_AT_const_value, val);
19461 else
19462 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19463 }
19464 return true;
19465
19466 case CONST_WIDE_INT:
19467 {
19468 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19469 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19470 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19471 wide_int w = wi::zext (w1, prec);
19472 add_AT_wide (die, DW_AT_const_value, w);
19473 }
19474 return true;
19475
19476 case CONST_DOUBLE:
19477 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19478 floating-point constant. A CONST_DOUBLE is used whenever the
19479 constant requires more than one word in order to be adequately
19480 represented. */
19481 if (TARGET_SUPPORTS_WIDE_INT == 0
19482 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19483 add_AT_double (die, DW_AT_const_value,
19484 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19485 else
19486 {
19487 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19488 unsigned int length = GET_MODE_SIZE (mode);
19489 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19490
19491 insert_float (rtl, array);
19492 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19493 }
19494 return true;
19495
19496 case CONST_VECTOR:
19497 {
19498 unsigned int length;
19499 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19500 return false;
19501
19502 machine_mode mode = GET_MODE (rtl);
19503 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19504 unsigned char *array
19505 = ggc_vec_alloc<unsigned char> (length * elt_size);
19506 unsigned int i;
19507 unsigned char *p;
19508 machine_mode imode = GET_MODE_INNER (mode);
19509
19510 switch (GET_MODE_CLASS (mode))
19511 {
19512 case MODE_VECTOR_INT:
19513 for (i = 0, p = array; i < length; i++, p += elt_size)
19514 {
19515 rtx elt = CONST_VECTOR_ELT (rtl, i);
19516 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19517 }
19518 break;
19519
19520 case MODE_VECTOR_FLOAT:
19521 for (i = 0, p = array; i < length; i++, p += elt_size)
19522 {
19523 rtx elt = CONST_VECTOR_ELT (rtl, i);
19524 insert_float (elt, p);
19525 }
19526 break;
19527
19528 default:
19529 gcc_unreachable ();
19530 }
19531
19532 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19533 }
19534 return true;
19535
19536 case CONST_STRING:
19537 if (dwarf_version >= 4 || !dwarf_strict)
19538 {
19539 dw_loc_descr_ref loc_result;
19540 resolve_one_addr (&rtl);
19541 rtl_addr:
19542 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19543 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19544 add_AT_loc (die, DW_AT_location, loc_result);
19545 vec_safe_push (used_rtx_array, rtl);
19546 return true;
19547 }
19548 return false;
19549
19550 case CONST:
19551 if (CONSTANT_P (XEXP (rtl, 0)))
19552 return add_const_value_attribute (die, XEXP (rtl, 0));
19553 /* FALLTHROUGH */
19554 case SYMBOL_REF:
19555 if (!const_ok_for_output (rtl))
19556 return false;
19557 /* FALLTHROUGH */
19558 case LABEL_REF:
19559 if (dwarf_version >= 4 || !dwarf_strict)
19560 goto rtl_addr;
19561 return false;
19562
19563 case PLUS:
19564 /* In cases where an inlined instance of an inline function is passed
19565 the address of an `auto' variable (which is local to the caller) we
19566 can get a situation where the DECL_RTL of the artificial local
19567 variable (for the inlining) which acts as a stand-in for the
19568 corresponding formal parameter (of the inline function) will look
19569 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19570 exactly a compile-time constant expression, but it isn't the address
19571 of the (artificial) local variable either. Rather, it represents the
19572 *value* which the artificial local variable always has during its
19573 lifetime. We currently have no way to represent such quasi-constant
19574 values in Dwarf, so for now we just punt and generate nothing. */
19575 return false;
19576
19577 case HIGH:
19578 case CONST_FIXED:
19579 return false;
19580
19581 case MEM:
19582 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19583 && MEM_READONLY_P (rtl)
19584 && GET_MODE (rtl) == BLKmode)
19585 {
19586 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19587 return true;
19588 }
19589 return false;
19590
19591 default:
19592 /* No other kinds of rtx should be possible here. */
19593 gcc_unreachable ();
19594 }
19595 return false;
19596 }
19597
19598 /* Determine whether the evaluation of EXPR references any variables
19599 or functions which aren't otherwise used (and therefore may not be
19600 output). */
19601 static tree
19602 reference_to_unused (tree * tp, int * walk_subtrees,
19603 void * data ATTRIBUTE_UNUSED)
19604 {
19605 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19606 *walk_subtrees = 0;
19607
19608 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19609 && ! TREE_ASM_WRITTEN (*tp))
19610 return *tp;
19611 /* ??? The C++ FE emits debug information for using decls, so
19612 putting gcc_unreachable here falls over. See PR31899. For now
19613 be conservative. */
19614 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19615 return *tp;
19616 else if (VAR_P (*tp))
19617 {
19618 varpool_node *node = varpool_node::get (*tp);
19619 if (!node || !node->definition)
19620 return *tp;
19621 }
19622 else if (TREE_CODE (*tp) == FUNCTION_DECL
19623 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19624 {
19625 /* The call graph machinery must have finished analyzing,
19626 optimizing and gimplifying the CU by now.
19627 So if *TP has no call graph node associated
19628 to it, it means *TP will not be emitted. */
19629 if (!cgraph_node::get (*tp))
19630 return *tp;
19631 }
19632 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19633 return *tp;
19634
19635 return NULL_TREE;
19636 }
19637
19638 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19639 for use in a later add_const_value_attribute call. */
19640
19641 static rtx
19642 rtl_for_decl_init (tree init, tree type)
19643 {
19644 rtx rtl = NULL_RTX;
19645
19646 STRIP_NOPS (init);
19647
19648 /* If a variable is initialized with a string constant without embedded
19649 zeros, build CONST_STRING. */
19650 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19651 {
19652 tree enttype = TREE_TYPE (type);
19653 tree domain = TYPE_DOMAIN (type);
19654 scalar_int_mode mode;
19655
19656 if (is_int_mode (TYPE_MODE (enttype), &mode)
19657 && GET_MODE_SIZE (mode) == 1
19658 && domain
19659 && TYPE_MAX_VALUE (domain)
19660 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19661 && integer_zerop (TYPE_MIN_VALUE (domain))
19662 && compare_tree_int (TYPE_MAX_VALUE (domain),
19663 TREE_STRING_LENGTH (init) - 1) == 0
19664 && ((size_t) TREE_STRING_LENGTH (init)
19665 == strlen (TREE_STRING_POINTER (init)) + 1))
19666 {
19667 rtl = gen_rtx_CONST_STRING (VOIDmode,
19668 ggc_strdup (TREE_STRING_POINTER (init)));
19669 rtl = gen_rtx_MEM (BLKmode, rtl);
19670 MEM_READONLY_P (rtl) = 1;
19671 }
19672 }
19673 /* Other aggregates, and complex values, could be represented using
19674 CONCAT: FIXME! */
19675 else if (AGGREGATE_TYPE_P (type)
19676 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19677 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19678 || TREE_CODE (type) == COMPLEX_TYPE)
19679 ;
19680 /* Vectors only work if their mode is supported by the target.
19681 FIXME: generic vectors ought to work too. */
19682 else if (TREE_CODE (type) == VECTOR_TYPE
19683 && !VECTOR_MODE_P (TYPE_MODE (type)))
19684 ;
19685 /* If the initializer is something that we know will expand into an
19686 immediate RTL constant, expand it now. We must be careful not to
19687 reference variables which won't be output. */
19688 else if (initializer_constant_valid_p (init, type)
19689 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19690 {
19691 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19692 possible. */
19693 if (TREE_CODE (type) == VECTOR_TYPE)
19694 switch (TREE_CODE (init))
19695 {
19696 case VECTOR_CST:
19697 break;
19698 case CONSTRUCTOR:
19699 if (TREE_CONSTANT (init))
19700 {
19701 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19702 bool constant_p = true;
19703 tree value;
19704 unsigned HOST_WIDE_INT ix;
19705
19706 /* Even when ctor is constant, it might contain non-*_CST
19707 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19708 belong into VECTOR_CST nodes. */
19709 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19710 if (!CONSTANT_CLASS_P (value))
19711 {
19712 constant_p = false;
19713 break;
19714 }
19715
19716 if (constant_p)
19717 {
19718 init = build_vector_from_ctor (type, elts);
19719 break;
19720 }
19721 }
19722 /* FALLTHRU */
19723
19724 default:
19725 return NULL;
19726 }
19727
19728 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19729
19730 /* If expand_expr returns a MEM, it wasn't immediate. */
19731 gcc_assert (!rtl || !MEM_P (rtl));
19732 }
19733
19734 return rtl;
19735 }
19736
19737 /* Generate RTL for the variable DECL to represent its location. */
19738
19739 static rtx
19740 rtl_for_decl_location (tree decl)
19741 {
19742 rtx rtl;
19743
19744 /* Here we have to decide where we are going to say the parameter "lives"
19745 (as far as the debugger is concerned). We only have a couple of
19746 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19747
19748 DECL_RTL normally indicates where the parameter lives during most of the
19749 activation of the function. If optimization is enabled however, this
19750 could be either NULL or else a pseudo-reg. Both of those cases indicate
19751 that the parameter doesn't really live anywhere (as far as the code
19752 generation parts of GCC are concerned) during most of the function's
19753 activation. That will happen (for example) if the parameter is never
19754 referenced within the function.
19755
19756 We could just generate a location descriptor here for all non-NULL
19757 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19758 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19759 where DECL_RTL is NULL or is a pseudo-reg.
19760
19761 Note however that we can only get away with using DECL_INCOMING_RTL as
19762 a backup substitute for DECL_RTL in certain limited cases. In cases
19763 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19764 we can be sure that the parameter was passed using the same type as it is
19765 declared to have within the function, and that its DECL_INCOMING_RTL
19766 points us to a place where a value of that type is passed.
19767
19768 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19769 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19770 because in these cases DECL_INCOMING_RTL points us to a value of some
19771 type which is *different* from the type of the parameter itself. Thus,
19772 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19773 such cases, the debugger would end up (for example) trying to fetch a
19774 `float' from a place which actually contains the first part of a
19775 `double'. That would lead to really incorrect and confusing
19776 output at debug-time.
19777
19778 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19779 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19780 are a couple of exceptions however. On little-endian machines we can
19781 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19782 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19783 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19784 when (on a little-endian machine) a non-prototyped function has a
19785 parameter declared to be of type `short' or `char'. In such cases,
19786 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19787 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19788 passed `int' value. If the debugger then uses that address to fetch
19789 a `short' or a `char' (on a little-endian machine) the result will be
19790 the correct data, so we allow for such exceptional cases below.
19791
19792 Note that our goal here is to describe the place where the given formal
19793 parameter lives during most of the function's activation (i.e. between the
19794 end of the prologue and the start of the epilogue). We'll do that as best
19795 as we can. Note however that if the given formal parameter is modified
19796 sometime during the execution of the function, then a stack backtrace (at
19797 debug-time) will show the function as having been called with the *new*
19798 value rather than the value which was originally passed in. This happens
19799 rarely enough that it is not a major problem, but it *is* a problem, and
19800 I'd like to fix it.
19801
19802 A future version of dwarf2out.c may generate two additional attributes for
19803 any given DW_TAG_formal_parameter DIE which will describe the "passed
19804 type" and the "passed location" for the given formal parameter in addition
19805 to the attributes we now generate to indicate the "declared type" and the
19806 "active location" for each parameter. This additional set of attributes
19807 could be used by debuggers for stack backtraces. Separately, note that
19808 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19809 This happens (for example) for inlined-instances of inline function formal
19810 parameters which are never referenced. This really shouldn't be
19811 happening. All PARM_DECL nodes should get valid non-NULL
19812 DECL_INCOMING_RTL values. FIXME. */
19813
19814 /* Use DECL_RTL as the "location" unless we find something better. */
19815 rtl = DECL_RTL_IF_SET (decl);
19816
19817 /* When generating abstract instances, ignore everything except
19818 constants, symbols living in memory, and symbols living in
19819 fixed registers. */
19820 if (! reload_completed)
19821 {
19822 if (rtl
19823 && (CONSTANT_P (rtl)
19824 || (MEM_P (rtl)
19825 && CONSTANT_P (XEXP (rtl, 0)))
19826 || (REG_P (rtl)
19827 && VAR_P (decl)
19828 && TREE_STATIC (decl))))
19829 {
19830 rtl = targetm.delegitimize_address (rtl);
19831 return rtl;
19832 }
19833 rtl = NULL_RTX;
19834 }
19835 else if (TREE_CODE (decl) == PARM_DECL)
19836 {
19837 if (rtl == NULL_RTX
19838 || is_pseudo_reg (rtl)
19839 || (MEM_P (rtl)
19840 && is_pseudo_reg (XEXP (rtl, 0))
19841 && DECL_INCOMING_RTL (decl)
19842 && MEM_P (DECL_INCOMING_RTL (decl))
19843 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19844 {
19845 tree declared_type = TREE_TYPE (decl);
19846 tree passed_type = DECL_ARG_TYPE (decl);
19847 machine_mode dmode = TYPE_MODE (declared_type);
19848 machine_mode pmode = TYPE_MODE (passed_type);
19849
19850 /* This decl represents a formal parameter which was optimized out.
19851 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19852 all cases where (rtl == NULL_RTX) just below. */
19853 if (dmode == pmode)
19854 rtl = DECL_INCOMING_RTL (decl);
19855 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19856 && SCALAR_INT_MODE_P (dmode)
19857 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19858 && DECL_INCOMING_RTL (decl))
19859 {
19860 rtx inc = DECL_INCOMING_RTL (decl);
19861 if (REG_P (inc))
19862 rtl = inc;
19863 else if (MEM_P (inc))
19864 {
19865 if (BYTES_BIG_ENDIAN)
19866 rtl = adjust_address_nv (inc, dmode,
19867 GET_MODE_SIZE (pmode)
19868 - GET_MODE_SIZE (dmode));
19869 else
19870 rtl = inc;
19871 }
19872 }
19873 }
19874
19875 /* If the parm was passed in registers, but lives on the stack, then
19876 make a big endian correction if the mode of the type of the
19877 parameter is not the same as the mode of the rtl. */
19878 /* ??? This is the same series of checks that are made in dbxout.c before
19879 we reach the big endian correction code there. It isn't clear if all
19880 of these checks are necessary here, but keeping them all is the safe
19881 thing to do. */
19882 else if (MEM_P (rtl)
19883 && XEXP (rtl, 0) != const0_rtx
19884 && ! CONSTANT_P (XEXP (rtl, 0))
19885 /* Not passed in memory. */
19886 && !MEM_P (DECL_INCOMING_RTL (decl))
19887 /* Not passed by invisible reference. */
19888 && (!REG_P (XEXP (rtl, 0))
19889 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19890 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19891 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19892 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19893 #endif
19894 )
19895 /* Big endian correction check. */
19896 && BYTES_BIG_ENDIAN
19897 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19898 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19899 UNITS_PER_WORD))
19900 {
19901 machine_mode addr_mode = get_address_mode (rtl);
19902 poly_int64 offset = (UNITS_PER_WORD
19903 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19904
19905 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19906 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19907 }
19908 }
19909 else if (VAR_P (decl)
19910 && rtl
19911 && MEM_P (rtl)
19912 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19913 {
19914 machine_mode addr_mode = get_address_mode (rtl);
19915 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19916 GET_MODE (rtl));
19917
19918 /* If a variable is declared "register" yet is smaller than
19919 a register, then if we store the variable to memory, it
19920 looks like we're storing a register-sized value, when in
19921 fact we are not. We need to adjust the offset of the
19922 storage location to reflect the actual value's bytes,
19923 else gdb will not be able to display it. */
19924 if (maybe_ne (offset, 0))
19925 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19926 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19927 }
19928
19929 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19930 and will have been substituted directly into all expressions that use it.
19931 C does not have such a concept, but C++ and other languages do. */
19932 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19933 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19934
19935 if (rtl)
19936 rtl = targetm.delegitimize_address (rtl);
19937
19938 /* If we don't look past the constant pool, we risk emitting a
19939 reference to a constant pool entry that isn't referenced from
19940 code, and thus is not emitted. */
19941 if (rtl)
19942 rtl = avoid_constant_pool_reference (rtl);
19943
19944 /* Try harder to get a rtl. If this symbol ends up not being emitted
19945 in the current CU, resolve_addr will remove the expression referencing
19946 it. */
19947 if (rtl == NULL_RTX
19948 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
19949 && VAR_P (decl)
19950 && !DECL_EXTERNAL (decl)
19951 && TREE_STATIC (decl)
19952 && DECL_NAME (decl)
19953 && !DECL_HARD_REGISTER (decl)
19954 && DECL_MODE (decl) != VOIDmode)
19955 {
19956 rtl = make_decl_rtl_for_debug (decl);
19957 if (!MEM_P (rtl)
19958 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19959 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19960 rtl = NULL_RTX;
19961 }
19962
19963 return rtl;
19964 }
19965
19966 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19967 returned. If so, the decl for the COMMON block is returned, and the
19968 value is the offset into the common block for the symbol. */
19969
19970 static tree
19971 fortran_common (tree decl, HOST_WIDE_INT *value)
19972 {
19973 tree val_expr, cvar;
19974 machine_mode mode;
19975 poly_int64 bitsize, bitpos;
19976 tree offset;
19977 HOST_WIDE_INT cbitpos;
19978 int unsignedp, reversep, volatilep = 0;
19979
19980 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19981 it does not have a value (the offset into the common area), or if it
19982 is thread local (as opposed to global) then it isn't common, and shouldn't
19983 be handled as such. */
19984 if (!VAR_P (decl)
19985 || !TREE_STATIC (decl)
19986 || !DECL_HAS_VALUE_EXPR_P (decl)
19987 || !is_fortran ())
19988 return NULL_TREE;
19989
19990 val_expr = DECL_VALUE_EXPR (decl);
19991 if (TREE_CODE (val_expr) != COMPONENT_REF)
19992 return NULL_TREE;
19993
19994 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19995 &unsignedp, &reversep, &volatilep);
19996
19997 if (cvar == NULL_TREE
19998 || !VAR_P (cvar)
19999 || DECL_ARTIFICIAL (cvar)
20000 || !TREE_PUBLIC (cvar)
20001 /* We don't expect to have to cope with variable offsets,
20002 since at present all static data must have a constant size. */
20003 || !bitpos.is_constant (&cbitpos))
20004 return NULL_TREE;
20005
20006 *value = 0;
20007 if (offset != NULL)
20008 {
20009 if (!tree_fits_shwi_p (offset))
20010 return NULL_TREE;
20011 *value = tree_to_shwi (offset);
20012 }
20013 if (cbitpos != 0)
20014 *value += cbitpos / BITS_PER_UNIT;
20015
20016 return cvar;
20017 }
20018
20019 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20020 data attribute for a variable or a parameter. We generate the
20021 DW_AT_const_value attribute only in those cases where the given variable
20022 or parameter does not have a true "location" either in memory or in a
20023 register. This can happen (for example) when a constant is passed as an
20024 actual argument in a call to an inline function. (It's possible that
20025 these things can crop up in other ways also.) Note that one type of
20026 constant value which can be passed into an inlined function is a constant
20027 pointer. This can happen for example if an actual argument in an inlined
20028 function call evaluates to a compile-time constant address.
20029
20030 CACHE_P is true if it is worth caching the location list for DECL,
20031 so that future calls can reuse it rather than regenerate it from scratch.
20032 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20033 since we will need to refer to them each time the function is inlined. */
20034
20035 static bool
20036 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20037 {
20038 rtx rtl;
20039 dw_loc_list_ref list;
20040 var_loc_list *loc_list;
20041 cached_dw_loc_list *cache;
20042
20043 if (early_dwarf)
20044 return false;
20045
20046 if (TREE_CODE (decl) == ERROR_MARK)
20047 return false;
20048
20049 if (get_AT (die, DW_AT_location)
20050 || get_AT (die, DW_AT_const_value))
20051 return true;
20052
20053 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20054 || TREE_CODE (decl) == RESULT_DECL);
20055
20056 /* Try to get some constant RTL for this decl, and use that as the value of
20057 the location. */
20058
20059 rtl = rtl_for_decl_location (decl);
20060 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20061 && add_const_value_attribute (die, rtl))
20062 return true;
20063
20064 /* See if we have single element location list that is equivalent to
20065 a constant value. That way we are better to use add_const_value_attribute
20066 rather than expanding constant value equivalent. */
20067 loc_list = lookup_decl_loc (decl);
20068 if (loc_list
20069 && loc_list->first
20070 && loc_list->first->next == NULL
20071 && NOTE_P (loc_list->first->loc)
20072 && NOTE_VAR_LOCATION (loc_list->first->loc)
20073 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20074 {
20075 struct var_loc_node *node;
20076
20077 node = loc_list->first;
20078 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20079 if (GET_CODE (rtl) == EXPR_LIST)
20080 rtl = XEXP (rtl, 0);
20081 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20082 && add_const_value_attribute (die, rtl))
20083 return true;
20084 }
20085 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20086 list several times. See if we've already cached the contents. */
20087 list = NULL;
20088 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20089 cache_p = false;
20090 if (cache_p)
20091 {
20092 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20093 if (cache)
20094 list = cache->loc_list;
20095 }
20096 if (list == NULL)
20097 {
20098 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20099 NULL);
20100 /* It is usually worth caching this result if the decl is from
20101 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20102 if (cache_p && list && list->dw_loc_next)
20103 {
20104 cached_dw_loc_list **slot
20105 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20106 DECL_UID (decl),
20107 INSERT);
20108 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20109 cache->decl_id = DECL_UID (decl);
20110 cache->loc_list = list;
20111 *slot = cache;
20112 }
20113 }
20114 if (list)
20115 {
20116 add_AT_location_description (die, DW_AT_location, list);
20117 return true;
20118 }
20119 /* None of that worked, so it must not really have a location;
20120 try adding a constant value attribute from the DECL_INITIAL. */
20121 return tree_add_const_value_attribute_for_decl (die, decl);
20122 }
20123
20124 /* Helper function for tree_add_const_value_attribute. Natively encode
20125 initializer INIT into an array. Return true if successful. */
20126
20127 static bool
20128 native_encode_initializer (tree init, unsigned char *array, int size)
20129 {
20130 tree type;
20131
20132 if (init == NULL_TREE)
20133 return false;
20134
20135 STRIP_NOPS (init);
20136 switch (TREE_CODE (init))
20137 {
20138 case STRING_CST:
20139 type = TREE_TYPE (init);
20140 if (TREE_CODE (type) == ARRAY_TYPE)
20141 {
20142 tree enttype = TREE_TYPE (type);
20143 scalar_int_mode mode;
20144
20145 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20146 || GET_MODE_SIZE (mode) != 1)
20147 return false;
20148 if (int_size_in_bytes (type) != size)
20149 return false;
20150 if (size > TREE_STRING_LENGTH (init))
20151 {
20152 memcpy (array, TREE_STRING_POINTER (init),
20153 TREE_STRING_LENGTH (init));
20154 memset (array + TREE_STRING_LENGTH (init),
20155 '\0', size - TREE_STRING_LENGTH (init));
20156 }
20157 else
20158 memcpy (array, TREE_STRING_POINTER (init), size);
20159 return true;
20160 }
20161 return false;
20162 case CONSTRUCTOR:
20163 type = TREE_TYPE (init);
20164 if (int_size_in_bytes (type) != size)
20165 return false;
20166 if (TREE_CODE (type) == ARRAY_TYPE)
20167 {
20168 HOST_WIDE_INT min_index;
20169 unsigned HOST_WIDE_INT cnt;
20170 int curpos = 0, fieldsize;
20171 constructor_elt *ce;
20172
20173 if (TYPE_DOMAIN (type) == NULL_TREE
20174 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20175 return false;
20176
20177 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20178 if (fieldsize <= 0)
20179 return false;
20180
20181 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20182 memset (array, '\0', size);
20183 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20184 {
20185 tree val = ce->value;
20186 tree index = ce->index;
20187 int pos = curpos;
20188 if (index && TREE_CODE (index) == RANGE_EXPR)
20189 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20190 * fieldsize;
20191 else if (index)
20192 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20193
20194 if (val)
20195 {
20196 STRIP_NOPS (val);
20197 if (!native_encode_initializer (val, array + pos, fieldsize))
20198 return false;
20199 }
20200 curpos = pos + fieldsize;
20201 if (index && TREE_CODE (index) == RANGE_EXPR)
20202 {
20203 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20204 - tree_to_shwi (TREE_OPERAND (index, 0));
20205 while (count-- > 0)
20206 {
20207 if (val)
20208 memcpy (array + curpos, array + pos, fieldsize);
20209 curpos += fieldsize;
20210 }
20211 }
20212 gcc_assert (curpos <= size);
20213 }
20214 return true;
20215 }
20216 else if (TREE_CODE (type) == RECORD_TYPE
20217 || TREE_CODE (type) == UNION_TYPE)
20218 {
20219 tree field = NULL_TREE;
20220 unsigned HOST_WIDE_INT cnt;
20221 constructor_elt *ce;
20222
20223 if (int_size_in_bytes (type) != size)
20224 return false;
20225
20226 if (TREE_CODE (type) == RECORD_TYPE)
20227 field = TYPE_FIELDS (type);
20228
20229 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20230 {
20231 tree val = ce->value;
20232 int pos, fieldsize;
20233
20234 if (ce->index != 0)
20235 field = ce->index;
20236
20237 if (val)
20238 STRIP_NOPS (val);
20239
20240 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20241 return false;
20242
20243 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20244 && TYPE_DOMAIN (TREE_TYPE (field))
20245 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20246 return false;
20247 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20248 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20249 return false;
20250 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20251 pos = int_byte_position (field);
20252 gcc_assert (pos + fieldsize <= size);
20253 if (val && fieldsize != 0
20254 && !native_encode_initializer (val, array + pos, fieldsize))
20255 return false;
20256 }
20257 return true;
20258 }
20259 return false;
20260 case VIEW_CONVERT_EXPR:
20261 case NON_LVALUE_EXPR:
20262 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20263 default:
20264 return native_encode_expr (init, array, size) == size;
20265 }
20266 }
20267
20268 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20269 attribute is the const value T. */
20270
20271 static bool
20272 tree_add_const_value_attribute (dw_die_ref die, tree t)
20273 {
20274 tree init;
20275 tree type = TREE_TYPE (t);
20276 rtx rtl;
20277
20278 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20279 return false;
20280
20281 init = t;
20282 gcc_assert (!DECL_P (init));
20283
20284 if (TREE_CODE (init) == INTEGER_CST)
20285 {
20286 if (tree_fits_uhwi_p (init))
20287 {
20288 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20289 return true;
20290 }
20291 if (tree_fits_shwi_p (init))
20292 {
20293 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20294 return true;
20295 }
20296 }
20297 if (! early_dwarf)
20298 {
20299 rtl = rtl_for_decl_init (init, type);
20300 if (rtl)
20301 return add_const_value_attribute (die, rtl);
20302 }
20303 /* If the host and target are sane, try harder. */
20304 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20305 && initializer_constant_valid_p (init, type))
20306 {
20307 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20308 if (size > 0 && (int) size == size)
20309 {
20310 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20311
20312 if (native_encode_initializer (init, array, size))
20313 {
20314 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20315 return true;
20316 }
20317 ggc_free (array);
20318 }
20319 }
20320 return false;
20321 }
20322
20323 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20324 attribute is the const value of T, where T is an integral constant
20325 variable with static storage duration
20326 (so it can't be a PARM_DECL or a RESULT_DECL). */
20327
20328 static bool
20329 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20330 {
20331
20332 if (!decl
20333 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20334 || (VAR_P (decl) && !TREE_STATIC (decl)))
20335 return false;
20336
20337 if (TREE_READONLY (decl)
20338 && ! TREE_THIS_VOLATILE (decl)
20339 && DECL_INITIAL (decl))
20340 /* OK */;
20341 else
20342 return false;
20343
20344 /* Don't add DW_AT_const_value if abstract origin already has one. */
20345 if (get_AT (var_die, DW_AT_const_value))
20346 return false;
20347
20348 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20349 }
20350
20351 /* Convert the CFI instructions for the current function into a
20352 location list. This is used for DW_AT_frame_base when we targeting
20353 a dwarf2 consumer that does not support the dwarf3
20354 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20355 expressions. */
20356
20357 static dw_loc_list_ref
20358 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20359 {
20360 int ix;
20361 dw_fde_ref fde;
20362 dw_loc_list_ref list, *list_tail;
20363 dw_cfi_ref cfi;
20364 dw_cfa_location last_cfa, next_cfa;
20365 const char *start_label, *last_label, *section;
20366 dw_cfa_location remember;
20367
20368 fde = cfun->fde;
20369 gcc_assert (fde != NULL);
20370
20371 section = secname_for_decl (current_function_decl);
20372 list_tail = &list;
20373 list = NULL;
20374
20375 memset (&next_cfa, 0, sizeof (next_cfa));
20376 next_cfa.reg = INVALID_REGNUM;
20377 remember = next_cfa;
20378
20379 start_label = fde->dw_fde_begin;
20380
20381 /* ??? Bald assumption that the CIE opcode list does not contain
20382 advance opcodes. */
20383 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20384 lookup_cfa_1 (cfi, &next_cfa, &remember);
20385
20386 last_cfa = next_cfa;
20387 last_label = start_label;
20388
20389 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20390 {
20391 /* If the first partition contained no CFI adjustments, the
20392 CIE opcodes apply to the whole first partition. */
20393 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20394 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20395 list_tail =&(*list_tail)->dw_loc_next;
20396 start_label = last_label = fde->dw_fde_second_begin;
20397 }
20398
20399 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20400 {
20401 switch (cfi->dw_cfi_opc)
20402 {
20403 case DW_CFA_set_loc:
20404 case DW_CFA_advance_loc1:
20405 case DW_CFA_advance_loc2:
20406 case DW_CFA_advance_loc4:
20407 if (!cfa_equal_p (&last_cfa, &next_cfa))
20408 {
20409 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20410 start_label, 0, last_label, 0, section);
20411
20412 list_tail = &(*list_tail)->dw_loc_next;
20413 last_cfa = next_cfa;
20414 start_label = last_label;
20415 }
20416 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20417 break;
20418
20419 case DW_CFA_advance_loc:
20420 /* The encoding is complex enough that we should never emit this. */
20421 gcc_unreachable ();
20422
20423 default:
20424 lookup_cfa_1 (cfi, &next_cfa, &remember);
20425 break;
20426 }
20427 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20428 {
20429 if (!cfa_equal_p (&last_cfa, &next_cfa))
20430 {
20431 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20432 start_label, 0, last_label, 0, section);
20433
20434 list_tail = &(*list_tail)->dw_loc_next;
20435 last_cfa = next_cfa;
20436 start_label = last_label;
20437 }
20438 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20439 start_label, 0, fde->dw_fde_end, 0, section);
20440 list_tail = &(*list_tail)->dw_loc_next;
20441 start_label = last_label = fde->dw_fde_second_begin;
20442 }
20443 }
20444
20445 if (!cfa_equal_p (&last_cfa, &next_cfa))
20446 {
20447 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20448 start_label, 0, last_label, 0, section);
20449 list_tail = &(*list_tail)->dw_loc_next;
20450 start_label = last_label;
20451 }
20452
20453 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20454 start_label, 0,
20455 fde->dw_fde_second_begin
20456 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20457 section);
20458
20459 maybe_gen_llsym (list);
20460
20461 return list;
20462 }
20463
20464 /* Compute a displacement from the "steady-state frame pointer" to the
20465 frame base (often the same as the CFA), and store it in
20466 frame_pointer_fb_offset. OFFSET is added to the displacement
20467 before the latter is negated. */
20468
20469 static void
20470 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20471 {
20472 rtx reg, elim;
20473
20474 #ifdef FRAME_POINTER_CFA_OFFSET
20475 reg = frame_pointer_rtx;
20476 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20477 #else
20478 reg = arg_pointer_rtx;
20479 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20480 #endif
20481
20482 elim = (ira_use_lra_p
20483 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20484 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20485 elim = strip_offset_and_add (elim, &offset);
20486
20487 frame_pointer_fb_offset = -offset;
20488
20489 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20490 in which to eliminate. This is because it's stack pointer isn't
20491 directly accessible as a register within the ISA. To work around
20492 this, assume that while we cannot provide a proper value for
20493 frame_pointer_fb_offset, we won't need one either. */
20494 frame_pointer_fb_offset_valid
20495 = ((SUPPORTS_STACK_ALIGNMENT
20496 && (elim == hard_frame_pointer_rtx
20497 || elim == stack_pointer_rtx))
20498 || elim == (frame_pointer_needed
20499 ? hard_frame_pointer_rtx
20500 : stack_pointer_rtx));
20501 }
20502
20503 /* Generate a DW_AT_name attribute given some string value to be included as
20504 the value of the attribute. */
20505
20506 static void
20507 add_name_attribute (dw_die_ref die, const char *name_string)
20508 {
20509 if (name_string != NULL && *name_string != 0)
20510 {
20511 if (demangle_name_func)
20512 name_string = (*demangle_name_func) (name_string);
20513
20514 add_AT_string (die, DW_AT_name, name_string);
20515 }
20516 }
20517
20518 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20519 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20520 of TYPE accordingly.
20521
20522 ??? This is a temporary measure until after we're able to generate
20523 regular DWARF for the complex Ada type system. */
20524
20525 static void
20526 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20527 dw_die_ref context_die)
20528 {
20529 tree dtype;
20530 dw_die_ref dtype_die;
20531
20532 if (!lang_hooks.types.descriptive_type)
20533 return;
20534
20535 dtype = lang_hooks.types.descriptive_type (type);
20536 if (!dtype)
20537 return;
20538
20539 dtype_die = lookup_type_die (dtype);
20540 if (!dtype_die)
20541 {
20542 gen_type_die (dtype, context_die);
20543 dtype_die = lookup_type_die (dtype);
20544 gcc_assert (dtype_die);
20545 }
20546
20547 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20548 }
20549
20550 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20551
20552 static const char *
20553 comp_dir_string (void)
20554 {
20555 const char *wd;
20556 char *wd1;
20557 static const char *cached_wd = NULL;
20558
20559 if (cached_wd != NULL)
20560 return cached_wd;
20561
20562 wd = get_src_pwd ();
20563 if (wd == NULL)
20564 return NULL;
20565
20566 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20567 {
20568 int wdlen;
20569
20570 wdlen = strlen (wd);
20571 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20572 strcpy (wd1, wd);
20573 wd1 [wdlen] = DIR_SEPARATOR;
20574 wd1 [wdlen + 1] = 0;
20575 wd = wd1;
20576 }
20577
20578 cached_wd = remap_debug_filename (wd);
20579 return cached_wd;
20580 }
20581
20582 /* Generate a DW_AT_comp_dir attribute for DIE. */
20583
20584 static void
20585 add_comp_dir_attribute (dw_die_ref die)
20586 {
20587 const char * wd = comp_dir_string ();
20588 if (wd != NULL)
20589 add_AT_string (die, DW_AT_comp_dir, wd);
20590 }
20591
20592 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20593 pointer computation, ...), output a representation for that bound according
20594 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20595 loc_list_from_tree for the meaning of CONTEXT. */
20596
20597 static void
20598 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20599 int forms, struct loc_descr_context *context)
20600 {
20601 dw_die_ref context_die, decl_die = NULL;
20602 dw_loc_list_ref list;
20603 bool strip_conversions = true;
20604 bool placeholder_seen = false;
20605
20606 while (strip_conversions)
20607 switch (TREE_CODE (value))
20608 {
20609 case ERROR_MARK:
20610 case SAVE_EXPR:
20611 return;
20612
20613 CASE_CONVERT:
20614 case VIEW_CONVERT_EXPR:
20615 value = TREE_OPERAND (value, 0);
20616 break;
20617
20618 default:
20619 strip_conversions = false;
20620 break;
20621 }
20622
20623 /* If possible and permitted, output the attribute as a constant. */
20624 if ((forms & dw_scalar_form_constant) != 0
20625 && TREE_CODE (value) == INTEGER_CST)
20626 {
20627 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20628
20629 /* If HOST_WIDE_INT is big enough then represent the bound as
20630 a constant value. We need to choose a form based on
20631 whether the type is signed or unsigned. We cannot just
20632 call add_AT_unsigned if the value itself is positive
20633 (add_AT_unsigned might add the unsigned value encoded as
20634 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20635 bounds type and then sign extend any unsigned values found
20636 for signed types. This is needed only for
20637 DW_AT_{lower,upper}_bound, since for most other attributes,
20638 consumers will treat DW_FORM_data[1248] as unsigned values,
20639 regardless of the underlying type. */
20640 if (prec <= HOST_BITS_PER_WIDE_INT
20641 || tree_fits_uhwi_p (value))
20642 {
20643 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20644 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20645 else
20646 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20647 }
20648 else
20649 /* Otherwise represent the bound as an unsigned value with
20650 the precision of its type. The precision and signedness
20651 of the type will be necessary to re-interpret it
20652 unambiguously. */
20653 add_AT_wide (die, attr, wi::to_wide (value));
20654 return;
20655 }
20656
20657 /* Otherwise, if it's possible and permitted too, output a reference to
20658 another DIE. */
20659 if ((forms & dw_scalar_form_reference) != 0)
20660 {
20661 tree decl = NULL_TREE;
20662
20663 /* Some type attributes reference an outer type. For instance, the upper
20664 bound of an array may reference an embedding record (this happens in
20665 Ada). */
20666 if (TREE_CODE (value) == COMPONENT_REF
20667 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20668 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20669 decl = TREE_OPERAND (value, 1);
20670
20671 else if (VAR_P (value)
20672 || TREE_CODE (value) == PARM_DECL
20673 || TREE_CODE (value) == RESULT_DECL)
20674 decl = value;
20675
20676 if (decl != NULL_TREE)
20677 {
20678 decl_die = lookup_decl_die (decl);
20679
20680 /* ??? Can this happen, or should the variable have been bound
20681 first? Probably it can, since I imagine that we try to create
20682 the types of parameters in the order in which they exist in
20683 the list, and won't have created a forward reference to a
20684 later parameter. */
20685 if (decl_die != NULL)
20686 {
20687 if (get_AT (decl_die, DW_AT_location)
20688 || get_AT (decl_die, DW_AT_const_value))
20689 {
20690 add_AT_die_ref (die, attr, decl_die);
20691 return;
20692 }
20693 }
20694 }
20695 }
20696
20697 /* Last chance: try to create a stack operation procedure to evaluate the
20698 value. Do nothing if even that is not possible or permitted. */
20699 if ((forms & dw_scalar_form_exprloc) == 0)
20700 return;
20701
20702 list = loc_list_from_tree (value, 2, context);
20703 if (context && context->placeholder_arg)
20704 {
20705 placeholder_seen = context->placeholder_seen;
20706 context->placeholder_seen = false;
20707 }
20708 if (list == NULL || single_element_loc_list_p (list))
20709 {
20710 /* If this attribute is not a reference nor constant, it is
20711 a DWARF expression rather than location description. For that
20712 loc_list_from_tree (value, 0, &context) is needed. */
20713 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20714 if (list2 && single_element_loc_list_p (list2))
20715 {
20716 if (placeholder_seen)
20717 {
20718 struct dwarf_procedure_info dpi;
20719 dpi.fndecl = NULL_TREE;
20720 dpi.args_count = 1;
20721 if (!resolve_args_picking (list2->expr, 1, &dpi))
20722 return;
20723 }
20724 add_AT_loc (die, attr, list2->expr);
20725 return;
20726 }
20727 }
20728
20729 /* If that failed to give a single element location list, fall back to
20730 outputting this as a reference... still if permitted. */
20731 if (list == NULL
20732 || (forms & dw_scalar_form_reference) == 0
20733 || placeholder_seen)
20734 return;
20735
20736 if (!decl_die)
20737 {
20738 if (current_function_decl == 0)
20739 context_die = comp_unit_die ();
20740 else
20741 context_die = lookup_decl_die (current_function_decl);
20742
20743 decl_die = new_die (DW_TAG_variable, context_die, value);
20744 add_AT_flag (decl_die, DW_AT_artificial, 1);
20745 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20746 context_die);
20747 }
20748
20749 add_AT_location_description (decl_die, DW_AT_location, list);
20750 add_AT_die_ref (die, attr, decl_die);
20751 }
20752
20753 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20754 default. */
20755
20756 static int
20757 lower_bound_default (void)
20758 {
20759 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20760 {
20761 case DW_LANG_C:
20762 case DW_LANG_C89:
20763 case DW_LANG_C99:
20764 case DW_LANG_C11:
20765 case DW_LANG_C_plus_plus:
20766 case DW_LANG_C_plus_plus_11:
20767 case DW_LANG_C_plus_plus_14:
20768 case DW_LANG_ObjC:
20769 case DW_LANG_ObjC_plus_plus:
20770 return 0;
20771 case DW_LANG_Fortran77:
20772 case DW_LANG_Fortran90:
20773 case DW_LANG_Fortran95:
20774 case DW_LANG_Fortran03:
20775 case DW_LANG_Fortran08:
20776 return 1;
20777 case DW_LANG_UPC:
20778 case DW_LANG_D:
20779 case DW_LANG_Python:
20780 return dwarf_version >= 4 ? 0 : -1;
20781 case DW_LANG_Ada95:
20782 case DW_LANG_Ada83:
20783 case DW_LANG_Cobol74:
20784 case DW_LANG_Cobol85:
20785 case DW_LANG_Modula2:
20786 case DW_LANG_PLI:
20787 return dwarf_version >= 4 ? 1 : -1;
20788 default:
20789 return -1;
20790 }
20791 }
20792
20793 /* Given a tree node describing an array bound (either lower or upper) output
20794 a representation for that bound. */
20795
20796 static void
20797 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20798 tree bound, struct loc_descr_context *context)
20799 {
20800 int dflt;
20801
20802 while (1)
20803 switch (TREE_CODE (bound))
20804 {
20805 /* Strip all conversions. */
20806 CASE_CONVERT:
20807 case VIEW_CONVERT_EXPR:
20808 bound = TREE_OPERAND (bound, 0);
20809 break;
20810
20811 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20812 are even omitted when they are the default. */
20813 case INTEGER_CST:
20814 /* If the value for this bound is the default one, we can even omit the
20815 attribute. */
20816 if (bound_attr == DW_AT_lower_bound
20817 && tree_fits_shwi_p (bound)
20818 && (dflt = lower_bound_default ()) != -1
20819 && tree_to_shwi (bound) == dflt)
20820 return;
20821
20822 /* FALLTHRU */
20823
20824 default:
20825 /* Because of the complex interaction there can be with other GNAT
20826 encodings, GDB isn't ready yet to handle proper DWARF description
20827 for self-referencial subrange bounds: let GNAT encodings do the
20828 magic in such a case. */
20829 if (is_ada ()
20830 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20831 && contains_placeholder_p (bound))
20832 return;
20833
20834 add_scalar_info (subrange_die, bound_attr, bound,
20835 dw_scalar_form_constant
20836 | dw_scalar_form_exprloc
20837 | dw_scalar_form_reference,
20838 context);
20839 return;
20840 }
20841 }
20842
20843 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20844 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20845 Note that the block of subscript information for an array type also
20846 includes information about the element type of the given array type.
20847
20848 This function reuses previously set type and bound information if
20849 available. */
20850
20851 static void
20852 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20853 {
20854 unsigned dimension_number;
20855 tree lower, upper;
20856 dw_die_ref child = type_die->die_child;
20857
20858 for (dimension_number = 0;
20859 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20860 type = TREE_TYPE (type), dimension_number++)
20861 {
20862 tree domain = TYPE_DOMAIN (type);
20863
20864 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20865 break;
20866
20867 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20868 and (in GNU C only) variable bounds. Handle all three forms
20869 here. */
20870
20871 /* Find and reuse a previously generated DW_TAG_subrange_type if
20872 available.
20873
20874 For multi-dimensional arrays, as we iterate through the
20875 various dimensions in the enclosing for loop above, we also
20876 iterate through the DIE children and pick at each
20877 DW_TAG_subrange_type previously generated (if available).
20878 Each child DW_TAG_subrange_type DIE describes the range of
20879 the current dimension. At this point we should have as many
20880 DW_TAG_subrange_type's as we have dimensions in the
20881 array. */
20882 dw_die_ref subrange_die = NULL;
20883 if (child)
20884 while (1)
20885 {
20886 child = child->die_sib;
20887 if (child->die_tag == DW_TAG_subrange_type)
20888 subrange_die = child;
20889 if (child == type_die->die_child)
20890 {
20891 /* If we wrapped around, stop looking next time. */
20892 child = NULL;
20893 break;
20894 }
20895 if (child->die_tag == DW_TAG_subrange_type)
20896 break;
20897 }
20898 if (!subrange_die)
20899 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20900
20901 if (domain)
20902 {
20903 /* We have an array type with specified bounds. */
20904 lower = TYPE_MIN_VALUE (domain);
20905 upper = TYPE_MAX_VALUE (domain);
20906
20907 /* Define the index type. */
20908 if (TREE_TYPE (domain)
20909 && !get_AT (subrange_die, DW_AT_type))
20910 {
20911 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20912 TREE_TYPE field. We can't emit debug info for this
20913 because it is an unnamed integral type. */
20914 if (TREE_CODE (domain) == INTEGER_TYPE
20915 && TYPE_NAME (domain) == NULL_TREE
20916 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20917 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20918 ;
20919 else
20920 add_type_attribute (subrange_die, TREE_TYPE (domain),
20921 TYPE_UNQUALIFIED, false, type_die);
20922 }
20923
20924 /* ??? If upper is NULL, the array has unspecified length,
20925 but it does have a lower bound. This happens with Fortran
20926 dimension arr(N:*)
20927 Since the debugger is definitely going to need to know N
20928 to produce useful results, go ahead and output the lower
20929 bound solo, and hope the debugger can cope. */
20930
20931 if (!get_AT (subrange_die, DW_AT_lower_bound))
20932 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20933 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
20934 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20935 }
20936
20937 /* Otherwise we have an array type with an unspecified length. The
20938 DWARF-2 spec does not say how to handle this; let's just leave out the
20939 bounds. */
20940 }
20941 }
20942
20943 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20944
20945 static void
20946 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20947 {
20948 dw_die_ref decl_die;
20949 HOST_WIDE_INT size;
20950 dw_loc_descr_ref size_expr = NULL;
20951
20952 switch (TREE_CODE (tree_node))
20953 {
20954 case ERROR_MARK:
20955 size = 0;
20956 break;
20957 case ENUMERAL_TYPE:
20958 case RECORD_TYPE:
20959 case UNION_TYPE:
20960 case QUAL_UNION_TYPE:
20961 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20962 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20963 {
20964 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20965 return;
20966 }
20967 size_expr = type_byte_size (tree_node, &size);
20968 break;
20969 case FIELD_DECL:
20970 /* For a data member of a struct or union, the DW_AT_byte_size is
20971 generally given as the number of bytes normally allocated for an
20972 object of the *declared* type of the member itself. This is true
20973 even for bit-fields. */
20974 size = int_size_in_bytes (field_type (tree_node));
20975 break;
20976 default:
20977 gcc_unreachable ();
20978 }
20979
20980 /* Support for dynamically-sized objects was introduced by DWARFv3.
20981 At the moment, GDB does not handle variable byte sizes very well,
20982 though. */
20983 if ((dwarf_version >= 3 || !dwarf_strict)
20984 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20985 && size_expr != NULL)
20986 add_AT_loc (die, DW_AT_byte_size, size_expr);
20987
20988 /* Note that `size' might be -1 when we get to this point. If it is, that
20989 indicates that the byte size of the entity in question is variable and
20990 that we could not generate a DWARF expression that computes it. */
20991 if (size >= 0)
20992 add_AT_unsigned (die, DW_AT_byte_size, size);
20993 }
20994
20995 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20996 alignment. */
20997
20998 static void
20999 add_alignment_attribute (dw_die_ref die, tree tree_node)
21000 {
21001 if (dwarf_version < 5 && dwarf_strict)
21002 return;
21003
21004 unsigned align;
21005
21006 if (DECL_P (tree_node))
21007 {
21008 if (!DECL_USER_ALIGN (tree_node))
21009 return;
21010
21011 align = DECL_ALIGN_UNIT (tree_node);
21012 }
21013 else if (TYPE_P (tree_node))
21014 {
21015 if (!TYPE_USER_ALIGN (tree_node))
21016 return;
21017
21018 align = TYPE_ALIGN_UNIT (tree_node);
21019 }
21020 else
21021 gcc_unreachable ();
21022
21023 add_AT_unsigned (die, DW_AT_alignment, align);
21024 }
21025
21026 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21027 which specifies the distance in bits from the highest order bit of the
21028 "containing object" for the bit-field to the highest order bit of the
21029 bit-field itself.
21030
21031 For any given bit-field, the "containing object" is a hypothetical object
21032 (of some integral or enum type) within which the given bit-field lives. The
21033 type of this hypothetical "containing object" is always the same as the
21034 declared type of the individual bit-field itself. The determination of the
21035 exact location of the "containing object" for a bit-field is rather
21036 complicated. It's handled by the `field_byte_offset' function (above).
21037
21038 CTX is required: see the comment for VLR_CONTEXT.
21039
21040 Note that it is the size (in bytes) of the hypothetical "containing object"
21041 which will be given in the DW_AT_byte_size attribute for this bit-field.
21042 (See `byte_size_attribute' above). */
21043
21044 static inline void
21045 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21046 {
21047 HOST_WIDE_INT object_offset_in_bytes;
21048 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21049 HOST_WIDE_INT bitpos_int;
21050 HOST_WIDE_INT highest_order_object_bit_offset;
21051 HOST_WIDE_INT highest_order_field_bit_offset;
21052 HOST_WIDE_INT bit_offset;
21053
21054 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21055
21056 /* Must be a field and a bit field. */
21057 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21058
21059 /* We can't yet handle bit-fields whose offsets are variable, so if we
21060 encounter such things, just return without generating any attribute
21061 whatsoever. Likewise for variable or too large size. */
21062 if (! tree_fits_shwi_p (bit_position (decl))
21063 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21064 return;
21065
21066 bitpos_int = int_bit_position (decl);
21067
21068 /* Note that the bit offset is always the distance (in bits) from the
21069 highest-order bit of the "containing object" to the highest-order bit of
21070 the bit-field itself. Since the "high-order end" of any object or field
21071 is different on big-endian and little-endian machines, the computation
21072 below must take account of these differences. */
21073 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21074 highest_order_field_bit_offset = bitpos_int;
21075
21076 if (! BYTES_BIG_ENDIAN)
21077 {
21078 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21079 highest_order_object_bit_offset +=
21080 simple_type_size_in_bits (original_type);
21081 }
21082
21083 bit_offset
21084 = (! BYTES_BIG_ENDIAN
21085 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21086 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21087
21088 if (bit_offset < 0)
21089 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21090 else
21091 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21092 }
21093
21094 /* For a FIELD_DECL node which represents a bit field, output an attribute
21095 which specifies the length in bits of the given field. */
21096
21097 static inline void
21098 add_bit_size_attribute (dw_die_ref die, tree decl)
21099 {
21100 /* Must be a field and a bit field. */
21101 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21102 && DECL_BIT_FIELD_TYPE (decl));
21103
21104 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21105 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21106 }
21107
21108 /* If the compiled language is ANSI C, then add a 'prototyped'
21109 attribute, if arg types are given for the parameters of a function. */
21110
21111 static inline void
21112 add_prototyped_attribute (dw_die_ref die, tree func_type)
21113 {
21114 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21115 {
21116 case DW_LANG_C:
21117 case DW_LANG_C89:
21118 case DW_LANG_C99:
21119 case DW_LANG_C11:
21120 case DW_LANG_ObjC:
21121 if (prototype_p (func_type))
21122 add_AT_flag (die, DW_AT_prototyped, 1);
21123 break;
21124 default:
21125 break;
21126 }
21127 }
21128
21129 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21130 by looking in the type declaration, the object declaration equate table or
21131 the block mapping. */
21132
21133 static inline dw_die_ref
21134 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21135 {
21136 dw_die_ref origin_die = NULL;
21137
21138 if (DECL_P (origin))
21139 {
21140 dw_die_ref c;
21141 origin_die = lookup_decl_die (origin);
21142 /* "Unwrap" the decls DIE which we put in the imported unit context.
21143 We are looking for the abstract copy here. */
21144 if (in_lto_p
21145 && origin_die
21146 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
21147 /* ??? Identify this better. */
21148 && c->with_offset)
21149 origin_die = c;
21150 }
21151 else if (TYPE_P (origin))
21152 origin_die = lookup_type_die (origin);
21153 else if (TREE_CODE (origin) == BLOCK)
21154 origin_die = BLOCK_DIE (origin);
21155
21156 /* XXX: Functions that are never lowered don't always have correct block
21157 trees (in the case of java, they simply have no block tree, in some other
21158 languages). For these functions, there is nothing we can really do to
21159 output correct debug info for inlined functions in all cases. Rather
21160 than die, we'll just produce deficient debug info now, in that we will
21161 have variables without a proper abstract origin. In the future, when all
21162 functions are lowered, we should re-add a gcc_assert (origin_die)
21163 here. */
21164
21165 if (origin_die)
21166 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21167 return origin_die;
21168 }
21169
21170 /* We do not currently support the pure_virtual attribute. */
21171
21172 static inline void
21173 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21174 {
21175 if (DECL_VINDEX (func_decl))
21176 {
21177 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21178
21179 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21180 add_AT_loc (die, DW_AT_vtable_elem_location,
21181 new_loc_descr (DW_OP_constu,
21182 tree_to_shwi (DECL_VINDEX (func_decl)),
21183 0));
21184
21185 /* GNU extension: Record what type this method came from originally. */
21186 if (debug_info_level > DINFO_LEVEL_TERSE
21187 && DECL_CONTEXT (func_decl))
21188 add_AT_die_ref (die, DW_AT_containing_type,
21189 lookup_type_die (DECL_CONTEXT (func_decl)));
21190 }
21191 }
21192 \f
21193 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21194 given decl. This used to be a vendor extension until after DWARF 4
21195 standardized it. */
21196
21197 static void
21198 add_linkage_attr (dw_die_ref die, tree decl)
21199 {
21200 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21201
21202 /* Mimic what assemble_name_raw does with a leading '*'. */
21203 if (name[0] == '*')
21204 name = &name[1];
21205
21206 if (dwarf_version >= 4)
21207 add_AT_string (die, DW_AT_linkage_name, name);
21208 else
21209 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21210 }
21211
21212 /* Add source coordinate attributes for the given decl. */
21213
21214 static void
21215 add_src_coords_attributes (dw_die_ref die, tree decl)
21216 {
21217 expanded_location s;
21218
21219 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21220 return;
21221 s = expand_location (DECL_SOURCE_LOCATION (decl));
21222 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21223 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21224 if (debug_column_info && s.column)
21225 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21226 }
21227
21228 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21229
21230 static void
21231 add_linkage_name_raw (dw_die_ref die, tree decl)
21232 {
21233 /* Defer until we have an assembler name set. */
21234 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21235 {
21236 limbo_die_node *asm_name;
21237
21238 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21239 asm_name->die = die;
21240 asm_name->created_for = decl;
21241 asm_name->next = deferred_asm_name;
21242 deferred_asm_name = asm_name;
21243 }
21244 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21245 add_linkage_attr (die, decl);
21246 }
21247
21248 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21249
21250 static void
21251 add_linkage_name (dw_die_ref die, tree decl)
21252 {
21253 if (debug_info_level > DINFO_LEVEL_NONE
21254 && VAR_OR_FUNCTION_DECL_P (decl)
21255 && TREE_PUBLIC (decl)
21256 && !(VAR_P (decl) && DECL_REGISTER (decl))
21257 && die->die_tag != DW_TAG_member)
21258 add_linkage_name_raw (die, decl);
21259 }
21260
21261 /* Add a DW_AT_name attribute and source coordinate attribute for the
21262 given decl, but only if it actually has a name. */
21263
21264 static void
21265 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21266 bool no_linkage_name)
21267 {
21268 tree decl_name;
21269
21270 decl_name = DECL_NAME (decl);
21271 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21272 {
21273 const char *name = dwarf2_name (decl, 0);
21274 if (name)
21275 add_name_attribute (die, name);
21276 if (! DECL_ARTIFICIAL (decl))
21277 add_src_coords_attributes (die, decl);
21278
21279 if (!no_linkage_name)
21280 add_linkage_name (die, decl);
21281 }
21282
21283 #ifdef VMS_DEBUGGING_INFO
21284 /* Get the function's name, as described by its RTL. This may be different
21285 from the DECL_NAME name used in the source file. */
21286 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21287 {
21288 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21289 XEXP (DECL_RTL (decl), 0), false);
21290 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21291 }
21292 #endif /* VMS_DEBUGGING_INFO */
21293 }
21294
21295 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21296
21297 static void
21298 add_discr_value (dw_die_ref die, dw_discr_value *value)
21299 {
21300 dw_attr_node attr;
21301
21302 attr.dw_attr = DW_AT_discr_value;
21303 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21304 attr.dw_attr_val.val_entry = NULL;
21305 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21306 if (value->pos)
21307 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21308 else
21309 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21310 add_dwarf_attr (die, &attr);
21311 }
21312
21313 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21314
21315 static void
21316 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21317 {
21318 dw_attr_node attr;
21319
21320 attr.dw_attr = DW_AT_discr_list;
21321 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21322 attr.dw_attr_val.val_entry = NULL;
21323 attr.dw_attr_val.v.val_discr_list = discr_list;
21324 add_dwarf_attr (die, &attr);
21325 }
21326
21327 static inline dw_discr_list_ref
21328 AT_discr_list (dw_attr_node *attr)
21329 {
21330 return attr->dw_attr_val.v.val_discr_list;
21331 }
21332
21333 #ifdef VMS_DEBUGGING_INFO
21334 /* Output the debug main pointer die for VMS */
21335
21336 void
21337 dwarf2out_vms_debug_main_pointer (void)
21338 {
21339 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21340 dw_die_ref die;
21341
21342 /* Allocate the VMS debug main subprogram die. */
21343 die = new_die_raw (DW_TAG_subprogram);
21344 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21345 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21346 current_function_funcdef_no);
21347 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21348
21349 /* Make it the first child of comp_unit_die (). */
21350 die->die_parent = comp_unit_die ();
21351 if (comp_unit_die ()->die_child)
21352 {
21353 die->die_sib = comp_unit_die ()->die_child->die_sib;
21354 comp_unit_die ()->die_child->die_sib = die;
21355 }
21356 else
21357 {
21358 die->die_sib = die;
21359 comp_unit_die ()->die_child = die;
21360 }
21361 }
21362 #endif /* VMS_DEBUGGING_INFO */
21363
21364 /* walk_tree helper function for uses_local_type, below. */
21365
21366 static tree
21367 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21368 {
21369 if (!TYPE_P (*tp))
21370 *walk_subtrees = 0;
21371 else
21372 {
21373 tree name = TYPE_NAME (*tp);
21374 if (name && DECL_P (name) && decl_function_context (name))
21375 return *tp;
21376 }
21377 return NULL_TREE;
21378 }
21379
21380 /* If TYPE involves a function-local type (including a local typedef to a
21381 non-local type), returns that type; otherwise returns NULL_TREE. */
21382
21383 static tree
21384 uses_local_type (tree type)
21385 {
21386 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21387 return used;
21388 }
21389
21390 /* Return the DIE for the scope that immediately contains this type.
21391 Non-named types that do not involve a function-local type get global
21392 scope. Named types nested in namespaces or other types get their
21393 containing scope. All other types (i.e. function-local named types) get
21394 the current active scope. */
21395
21396 static dw_die_ref
21397 scope_die_for (tree t, dw_die_ref context_die)
21398 {
21399 dw_die_ref scope_die = NULL;
21400 tree containing_scope;
21401
21402 /* Non-types always go in the current scope. */
21403 gcc_assert (TYPE_P (t));
21404
21405 /* Use the scope of the typedef, rather than the scope of the type
21406 it refers to. */
21407 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21408 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21409 else
21410 containing_scope = TYPE_CONTEXT (t);
21411
21412 /* Use the containing namespace if there is one. */
21413 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21414 {
21415 if (context_die == lookup_decl_die (containing_scope))
21416 /* OK */;
21417 else if (debug_info_level > DINFO_LEVEL_TERSE)
21418 context_die = get_context_die (containing_scope);
21419 else
21420 containing_scope = NULL_TREE;
21421 }
21422
21423 /* Ignore function type "scopes" from the C frontend. They mean that
21424 a tagged type is local to a parmlist of a function declarator, but
21425 that isn't useful to DWARF. */
21426 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21427 containing_scope = NULL_TREE;
21428
21429 if (SCOPE_FILE_SCOPE_P (containing_scope))
21430 {
21431 /* If T uses a local type keep it local as well, to avoid references
21432 to function-local DIEs from outside the function. */
21433 if (current_function_decl && uses_local_type (t))
21434 scope_die = context_die;
21435 else
21436 scope_die = comp_unit_die ();
21437 }
21438 else if (TYPE_P (containing_scope))
21439 {
21440 /* For types, we can just look up the appropriate DIE. */
21441 if (debug_info_level > DINFO_LEVEL_TERSE)
21442 scope_die = get_context_die (containing_scope);
21443 else
21444 {
21445 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21446 if (scope_die == NULL)
21447 scope_die = comp_unit_die ();
21448 }
21449 }
21450 else
21451 scope_die = context_die;
21452
21453 return scope_die;
21454 }
21455
21456 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21457
21458 static inline int
21459 local_scope_p (dw_die_ref context_die)
21460 {
21461 for (; context_die; context_die = context_die->die_parent)
21462 if (context_die->die_tag == DW_TAG_inlined_subroutine
21463 || context_die->die_tag == DW_TAG_subprogram)
21464 return 1;
21465
21466 return 0;
21467 }
21468
21469 /* Returns nonzero if CONTEXT_DIE is a class. */
21470
21471 static inline int
21472 class_scope_p (dw_die_ref context_die)
21473 {
21474 return (context_die
21475 && (context_die->die_tag == DW_TAG_structure_type
21476 || context_die->die_tag == DW_TAG_class_type
21477 || context_die->die_tag == DW_TAG_interface_type
21478 || context_die->die_tag == DW_TAG_union_type));
21479 }
21480
21481 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21482 whether or not to treat a DIE in this context as a declaration. */
21483
21484 static inline int
21485 class_or_namespace_scope_p (dw_die_ref context_die)
21486 {
21487 return (class_scope_p (context_die)
21488 || (context_die && context_die->die_tag == DW_TAG_namespace));
21489 }
21490
21491 /* Many forms of DIEs require a "type description" attribute. This
21492 routine locates the proper "type descriptor" die for the type given
21493 by 'type' plus any additional qualifiers given by 'cv_quals', and
21494 adds a DW_AT_type attribute below the given die. */
21495
21496 static void
21497 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21498 bool reverse, dw_die_ref context_die)
21499 {
21500 enum tree_code code = TREE_CODE (type);
21501 dw_die_ref type_die = NULL;
21502
21503 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21504 or fixed-point type, use the inner type. This is because we have no
21505 support for unnamed types in base_type_die. This can happen if this is
21506 an Ada subrange type. Correct solution is emit a subrange type die. */
21507 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21508 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21509 type = TREE_TYPE (type), code = TREE_CODE (type);
21510
21511 if (code == ERROR_MARK
21512 /* Handle a special case. For functions whose return type is void, we
21513 generate *no* type attribute. (Note that no object may have type
21514 `void', so this only applies to function return types). */
21515 || code == VOID_TYPE)
21516 return;
21517
21518 type_die = modified_type_die (type,
21519 cv_quals | TYPE_QUALS (type),
21520 reverse,
21521 context_die);
21522
21523 if (type_die != NULL)
21524 add_AT_die_ref (object_die, DW_AT_type, type_die);
21525 }
21526
21527 /* Given an object die, add the calling convention attribute for the
21528 function call type. */
21529 static void
21530 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21531 {
21532 enum dwarf_calling_convention value = DW_CC_normal;
21533
21534 value = ((enum dwarf_calling_convention)
21535 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21536
21537 if (is_fortran ()
21538 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21539 {
21540 /* DWARF 2 doesn't provide a way to identify a program's source-level
21541 entry point. DW_AT_calling_convention attributes are only meant
21542 to describe functions' calling conventions. However, lacking a
21543 better way to signal the Fortran main program, we used this for
21544 a long time, following existing custom. Now, DWARF 4 has
21545 DW_AT_main_subprogram, which we add below, but some tools still
21546 rely on the old way, which we thus keep. */
21547 value = DW_CC_program;
21548
21549 if (dwarf_version >= 4 || !dwarf_strict)
21550 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21551 }
21552
21553 /* Only add the attribute if the backend requests it, and
21554 is not DW_CC_normal. */
21555 if (value && (value != DW_CC_normal))
21556 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21557 }
21558
21559 /* Given a tree pointer to a struct, class, union, or enum type node, return
21560 a pointer to the (string) tag name for the given type, or zero if the type
21561 was declared without a tag. */
21562
21563 static const char *
21564 type_tag (const_tree type)
21565 {
21566 const char *name = 0;
21567
21568 if (TYPE_NAME (type) != 0)
21569 {
21570 tree t = 0;
21571
21572 /* Find the IDENTIFIER_NODE for the type name. */
21573 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21574 && !TYPE_NAMELESS (type))
21575 t = TYPE_NAME (type);
21576
21577 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21578 a TYPE_DECL node, regardless of whether or not a `typedef' was
21579 involved. */
21580 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21581 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21582 {
21583 /* We want to be extra verbose. Don't call dwarf_name if
21584 DECL_NAME isn't set. The default hook for decl_printable_name
21585 doesn't like that, and in this context it's correct to return
21586 0, instead of "<anonymous>" or the like. */
21587 if (DECL_NAME (TYPE_NAME (type))
21588 && !DECL_NAMELESS (TYPE_NAME (type)))
21589 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21590 }
21591
21592 /* Now get the name as a string, or invent one. */
21593 if (!name && t != 0)
21594 name = IDENTIFIER_POINTER (t);
21595 }
21596
21597 return (name == 0 || *name == '\0') ? 0 : name;
21598 }
21599
21600 /* Return the type associated with a data member, make a special check
21601 for bit field types. */
21602
21603 static inline tree
21604 member_declared_type (const_tree member)
21605 {
21606 return (DECL_BIT_FIELD_TYPE (member)
21607 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21608 }
21609
21610 /* Get the decl's label, as described by its RTL. This may be different
21611 from the DECL_NAME name used in the source file. */
21612
21613 #if 0
21614 static const char *
21615 decl_start_label (tree decl)
21616 {
21617 rtx x;
21618 const char *fnname;
21619
21620 x = DECL_RTL (decl);
21621 gcc_assert (MEM_P (x));
21622
21623 x = XEXP (x, 0);
21624 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21625
21626 fnname = XSTR (x, 0);
21627 return fnname;
21628 }
21629 #endif
21630 \f
21631 /* For variable-length arrays that have been previously generated, but
21632 may be incomplete due to missing subscript info, fill the subscript
21633 info. Return TRUE if this is one of those cases. */
21634 static bool
21635 fill_variable_array_bounds (tree type)
21636 {
21637 if (TREE_ASM_WRITTEN (type)
21638 && TREE_CODE (type) == ARRAY_TYPE
21639 && variably_modified_type_p (type, NULL))
21640 {
21641 dw_die_ref array_die = lookup_type_die (type);
21642 if (!array_die)
21643 return false;
21644 add_subscript_info (array_die, type, !is_ada ());
21645 return true;
21646 }
21647 return false;
21648 }
21649
21650 /* These routines generate the internal representation of the DIE's for
21651 the compilation unit. Debugging information is collected by walking
21652 the declaration trees passed in from dwarf2out_decl(). */
21653
21654 static void
21655 gen_array_type_die (tree type, dw_die_ref context_die)
21656 {
21657 dw_die_ref array_die;
21658
21659 /* GNU compilers represent multidimensional array types as sequences of one
21660 dimensional array types whose element types are themselves array types.
21661 We sometimes squish that down to a single array_type DIE with multiple
21662 subscripts in the Dwarf debugging info. The draft Dwarf specification
21663 say that we are allowed to do this kind of compression in C, because
21664 there is no difference between an array of arrays and a multidimensional
21665 array. We don't do this for Ada to remain as close as possible to the
21666 actual representation, which is especially important against the language
21667 flexibilty wrt arrays of variable size. */
21668
21669 bool collapse_nested_arrays = !is_ada ();
21670
21671 if (fill_variable_array_bounds (type))
21672 return;
21673
21674 dw_die_ref scope_die = scope_die_for (type, context_die);
21675 tree element_type;
21676
21677 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21678 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21679 if (TYPE_STRING_FLAG (type)
21680 && TREE_CODE (type) == ARRAY_TYPE
21681 && is_fortran ()
21682 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21683 {
21684 HOST_WIDE_INT size;
21685
21686 array_die = new_die (DW_TAG_string_type, scope_die, type);
21687 add_name_attribute (array_die, type_tag (type));
21688 equate_type_number_to_die (type, array_die);
21689 size = int_size_in_bytes (type);
21690 if (size >= 0)
21691 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21692 /* ??? We can't annotate types late, but for LTO we may not
21693 generate a location early either (gfortran.dg/save_6.f90). */
21694 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21695 && TYPE_DOMAIN (type) != NULL_TREE
21696 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21697 {
21698 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21699 tree rszdecl = szdecl;
21700
21701 size = int_size_in_bytes (TREE_TYPE (szdecl));
21702 if (!DECL_P (szdecl))
21703 {
21704 if (TREE_CODE (szdecl) == INDIRECT_REF
21705 && DECL_P (TREE_OPERAND (szdecl, 0)))
21706 {
21707 rszdecl = TREE_OPERAND (szdecl, 0);
21708 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21709 != DWARF2_ADDR_SIZE)
21710 size = 0;
21711 }
21712 else
21713 size = 0;
21714 }
21715 if (size > 0)
21716 {
21717 dw_loc_list_ref loc
21718 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21719 NULL);
21720 if (loc)
21721 {
21722 add_AT_location_description (array_die, DW_AT_string_length,
21723 loc);
21724 if (size != DWARF2_ADDR_SIZE)
21725 add_AT_unsigned (array_die, dwarf_version >= 5
21726 ? DW_AT_string_length_byte_size
21727 : DW_AT_byte_size, size);
21728 }
21729 }
21730 }
21731 return;
21732 }
21733
21734 array_die = new_die (DW_TAG_array_type, scope_die, type);
21735 add_name_attribute (array_die, type_tag (type));
21736 equate_type_number_to_die (type, array_die);
21737
21738 if (TREE_CODE (type) == VECTOR_TYPE)
21739 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21740
21741 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21742 if (is_fortran ()
21743 && TREE_CODE (type) == ARRAY_TYPE
21744 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21745 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21746 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21747
21748 #if 0
21749 /* We default the array ordering. Debuggers will probably do the right
21750 things even if DW_AT_ordering is not present. It's not even an issue
21751 until we start to get into multidimensional arrays anyway. If a debugger
21752 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21753 then we'll have to put the DW_AT_ordering attribute back in. (But if
21754 and when we find out that we need to put these in, we will only do so
21755 for multidimensional arrays. */
21756 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21757 #endif
21758
21759 if (TREE_CODE (type) == VECTOR_TYPE)
21760 {
21761 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21762 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21763 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21764 add_bound_info (subrange_die, DW_AT_upper_bound,
21765 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21766 }
21767 else
21768 add_subscript_info (array_die, type, collapse_nested_arrays);
21769
21770 /* Add representation of the type of the elements of this array type and
21771 emit the corresponding DIE if we haven't done it already. */
21772 element_type = TREE_TYPE (type);
21773 if (collapse_nested_arrays)
21774 while (TREE_CODE (element_type) == ARRAY_TYPE)
21775 {
21776 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21777 break;
21778 element_type = TREE_TYPE (element_type);
21779 }
21780
21781 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21782 TREE_CODE (type) == ARRAY_TYPE
21783 && TYPE_REVERSE_STORAGE_ORDER (type),
21784 context_die);
21785
21786 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21787 if (TYPE_ARTIFICIAL (type))
21788 add_AT_flag (array_die, DW_AT_artificial, 1);
21789
21790 if (get_AT (array_die, DW_AT_name))
21791 add_pubtype (type, array_die);
21792
21793 add_alignment_attribute (array_die, type);
21794 }
21795
21796 /* This routine generates DIE for array with hidden descriptor, details
21797 are filled into *info by a langhook. */
21798
21799 static void
21800 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21801 dw_die_ref context_die)
21802 {
21803 const dw_die_ref scope_die = scope_die_for (type, context_die);
21804 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21805 struct loc_descr_context context = { type, info->base_decl, NULL,
21806 false, false };
21807 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21808 int dim;
21809
21810 add_name_attribute (array_die, type_tag (type));
21811 equate_type_number_to_die (type, array_die);
21812
21813 if (info->ndimensions > 1)
21814 switch (info->ordering)
21815 {
21816 case array_descr_ordering_row_major:
21817 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21818 break;
21819 case array_descr_ordering_column_major:
21820 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21821 break;
21822 default:
21823 break;
21824 }
21825
21826 if (dwarf_version >= 3 || !dwarf_strict)
21827 {
21828 if (info->data_location)
21829 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21830 dw_scalar_form_exprloc, &context);
21831 if (info->associated)
21832 add_scalar_info (array_die, DW_AT_associated, info->associated,
21833 dw_scalar_form_constant
21834 | dw_scalar_form_exprloc
21835 | dw_scalar_form_reference, &context);
21836 if (info->allocated)
21837 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21838 dw_scalar_form_constant
21839 | dw_scalar_form_exprloc
21840 | dw_scalar_form_reference, &context);
21841 if (info->stride)
21842 {
21843 const enum dwarf_attribute attr
21844 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21845 const int forms
21846 = (info->stride_in_bits)
21847 ? dw_scalar_form_constant
21848 : (dw_scalar_form_constant
21849 | dw_scalar_form_exprloc
21850 | dw_scalar_form_reference);
21851
21852 add_scalar_info (array_die, attr, info->stride, forms, &context);
21853 }
21854 }
21855 if (dwarf_version >= 5)
21856 {
21857 if (info->rank)
21858 {
21859 add_scalar_info (array_die, DW_AT_rank, info->rank,
21860 dw_scalar_form_constant
21861 | dw_scalar_form_exprloc, &context);
21862 subrange_tag = DW_TAG_generic_subrange;
21863 context.placeholder_arg = true;
21864 }
21865 }
21866
21867 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21868
21869 for (dim = 0; dim < info->ndimensions; dim++)
21870 {
21871 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21872
21873 if (info->dimen[dim].bounds_type)
21874 add_type_attribute (subrange_die,
21875 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21876 false, context_die);
21877 if (info->dimen[dim].lower_bound)
21878 add_bound_info (subrange_die, DW_AT_lower_bound,
21879 info->dimen[dim].lower_bound, &context);
21880 if (info->dimen[dim].upper_bound)
21881 add_bound_info (subrange_die, DW_AT_upper_bound,
21882 info->dimen[dim].upper_bound, &context);
21883 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21884 add_scalar_info (subrange_die, DW_AT_byte_stride,
21885 info->dimen[dim].stride,
21886 dw_scalar_form_constant
21887 | dw_scalar_form_exprloc
21888 | dw_scalar_form_reference,
21889 &context);
21890 }
21891
21892 gen_type_die (info->element_type, context_die);
21893 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21894 TREE_CODE (type) == ARRAY_TYPE
21895 && TYPE_REVERSE_STORAGE_ORDER (type),
21896 context_die);
21897
21898 if (get_AT (array_die, DW_AT_name))
21899 add_pubtype (type, array_die);
21900
21901 add_alignment_attribute (array_die, type);
21902 }
21903
21904 #if 0
21905 static void
21906 gen_entry_point_die (tree decl, dw_die_ref context_die)
21907 {
21908 tree origin = decl_ultimate_origin (decl);
21909 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21910
21911 if (origin != NULL)
21912 add_abstract_origin_attribute (decl_die, origin);
21913 else
21914 {
21915 add_name_and_src_coords_attributes (decl_die, decl);
21916 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21917 TYPE_UNQUALIFIED, false, context_die);
21918 }
21919
21920 if (DECL_ABSTRACT_P (decl))
21921 equate_decl_number_to_die (decl, decl_die);
21922 else
21923 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21924 }
21925 #endif
21926
21927 /* Walk through the list of incomplete types again, trying once more to
21928 emit full debugging info for them. */
21929
21930 static void
21931 retry_incomplete_types (void)
21932 {
21933 set_early_dwarf s;
21934 int i;
21935
21936 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21937 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21938 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21939 vec_safe_truncate (incomplete_types, 0);
21940 }
21941
21942 /* Determine what tag to use for a record type. */
21943
21944 static enum dwarf_tag
21945 record_type_tag (tree type)
21946 {
21947 if (! lang_hooks.types.classify_record)
21948 return DW_TAG_structure_type;
21949
21950 switch (lang_hooks.types.classify_record (type))
21951 {
21952 case RECORD_IS_STRUCT:
21953 return DW_TAG_structure_type;
21954
21955 case RECORD_IS_CLASS:
21956 return DW_TAG_class_type;
21957
21958 case RECORD_IS_INTERFACE:
21959 if (dwarf_version >= 3 || !dwarf_strict)
21960 return DW_TAG_interface_type;
21961 return DW_TAG_structure_type;
21962
21963 default:
21964 gcc_unreachable ();
21965 }
21966 }
21967
21968 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21969 include all of the information about the enumeration values also. Each
21970 enumerated type name/value is listed as a child of the enumerated type
21971 DIE. */
21972
21973 static dw_die_ref
21974 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21975 {
21976 dw_die_ref type_die = lookup_type_die (type);
21977 dw_die_ref orig_type_die = type_die;
21978
21979 if (type_die == NULL)
21980 {
21981 type_die = new_die (DW_TAG_enumeration_type,
21982 scope_die_for (type, context_die), type);
21983 equate_type_number_to_die (type, type_die);
21984 add_name_attribute (type_die, type_tag (type));
21985 if ((dwarf_version >= 4 || !dwarf_strict)
21986 && ENUM_IS_SCOPED (type))
21987 add_AT_flag (type_die, DW_AT_enum_class, 1);
21988 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
21989 add_AT_flag (type_die, DW_AT_declaration, 1);
21990 if (!dwarf_strict)
21991 add_AT_unsigned (type_die, DW_AT_encoding,
21992 TYPE_UNSIGNED (type)
21993 ? DW_ATE_unsigned
21994 : DW_ATE_signed);
21995 }
21996 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
21997 return type_die;
21998 else
21999 remove_AT (type_die, DW_AT_declaration);
22000
22001 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22002 given enum type is incomplete, do not generate the DW_AT_byte_size
22003 attribute or the DW_AT_element_list attribute. */
22004 if (TYPE_SIZE (type))
22005 {
22006 tree link;
22007
22008 if (!ENUM_IS_OPAQUE (type))
22009 TREE_ASM_WRITTEN (type) = 1;
22010 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22011 add_byte_size_attribute (type_die, type);
22012 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22013 add_alignment_attribute (type_die, type);
22014 if ((dwarf_version >= 3 || !dwarf_strict)
22015 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22016 {
22017 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22018 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22019 context_die);
22020 }
22021 if (TYPE_STUB_DECL (type) != NULL_TREE)
22022 {
22023 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22024 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22025 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22026 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22027 }
22028
22029 /* If the first reference to this type was as the return type of an
22030 inline function, then it may not have a parent. Fix this now. */
22031 if (type_die->die_parent == NULL)
22032 add_child_die (scope_die_for (type, context_die), type_die);
22033
22034 for (link = TYPE_VALUES (type);
22035 link != NULL; link = TREE_CHAIN (link))
22036 {
22037 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22038 tree value = TREE_VALUE (link);
22039
22040 gcc_assert (!ENUM_IS_OPAQUE (type));
22041 add_name_attribute (enum_die,
22042 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22043
22044 if (TREE_CODE (value) == CONST_DECL)
22045 value = DECL_INITIAL (value);
22046
22047 if (simple_type_size_in_bits (TREE_TYPE (value))
22048 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22049 {
22050 /* For constant forms created by add_AT_unsigned DWARF
22051 consumers (GDB, elfutils, etc.) always zero extend
22052 the value. Only when the actual value is negative
22053 do we need to use add_AT_int to generate a constant
22054 form that can represent negative values. */
22055 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22056 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22057 add_AT_unsigned (enum_die, DW_AT_const_value,
22058 (unsigned HOST_WIDE_INT) val);
22059 else
22060 add_AT_int (enum_die, DW_AT_const_value, val);
22061 }
22062 else
22063 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22064 that here. TODO: This should be re-worked to use correct
22065 signed/unsigned double tags for all cases. */
22066 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22067 }
22068
22069 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22070 if (TYPE_ARTIFICIAL (type)
22071 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22072 add_AT_flag (type_die, DW_AT_artificial, 1);
22073 }
22074 else
22075 add_AT_flag (type_die, DW_AT_declaration, 1);
22076
22077 add_pubtype (type, type_die);
22078
22079 return type_die;
22080 }
22081
22082 /* Generate a DIE to represent either a real live formal parameter decl or to
22083 represent just the type of some formal parameter position in some function
22084 type.
22085
22086 Note that this routine is a bit unusual because its argument may be a
22087 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22088 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22089 node. If it's the former then this function is being called to output a
22090 DIE to represent a formal parameter object (or some inlining thereof). If
22091 it's the latter, then this function is only being called to output a
22092 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22093 argument type of some subprogram type.
22094 If EMIT_NAME_P is true, name and source coordinate attributes
22095 are emitted. */
22096
22097 static dw_die_ref
22098 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22099 dw_die_ref context_die)
22100 {
22101 tree node_or_origin = node ? node : origin;
22102 tree ultimate_origin;
22103 dw_die_ref parm_die = NULL;
22104
22105 if (DECL_P (node_or_origin))
22106 {
22107 parm_die = lookup_decl_die (node);
22108
22109 /* If the contexts differ, we may not be talking about the same
22110 thing.
22111 ??? When in LTO the DIE parent is the "abstract" copy and the
22112 context_die is the specification "copy". But this whole block
22113 should eventually be no longer needed. */
22114 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22115 {
22116 if (!DECL_ABSTRACT_P (node))
22117 {
22118 /* This can happen when creating an inlined instance, in
22119 which case we need to create a new DIE that will get
22120 annotated with DW_AT_abstract_origin. */
22121 parm_die = NULL;
22122 }
22123 else
22124 gcc_unreachable ();
22125 }
22126
22127 if (parm_die && parm_die->die_parent == NULL)
22128 {
22129 /* Check that parm_die already has the right attributes that
22130 we would have added below. If any attributes are
22131 missing, fall through to add them. */
22132 if (! DECL_ABSTRACT_P (node_or_origin)
22133 && !get_AT (parm_die, DW_AT_location)
22134 && !get_AT (parm_die, DW_AT_const_value))
22135 /* We are missing location info, and are about to add it. */
22136 ;
22137 else
22138 {
22139 add_child_die (context_die, parm_die);
22140 return parm_die;
22141 }
22142 }
22143 }
22144
22145 /* If we have a previously generated DIE, use it, unless this is an
22146 concrete instance (origin != NULL), in which case we need a new
22147 DIE with a corresponding DW_AT_abstract_origin. */
22148 bool reusing_die;
22149 if (parm_die && origin == NULL)
22150 reusing_die = true;
22151 else
22152 {
22153 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22154 reusing_die = false;
22155 }
22156
22157 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22158 {
22159 case tcc_declaration:
22160 ultimate_origin = decl_ultimate_origin (node_or_origin);
22161 if (node || ultimate_origin)
22162 origin = ultimate_origin;
22163
22164 if (reusing_die)
22165 goto add_location;
22166
22167 if (origin != NULL)
22168 add_abstract_origin_attribute (parm_die, origin);
22169 else if (emit_name_p)
22170 add_name_and_src_coords_attributes (parm_die, node);
22171 if (origin == NULL
22172 || (! DECL_ABSTRACT_P (node_or_origin)
22173 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22174 decl_function_context
22175 (node_or_origin))))
22176 {
22177 tree type = TREE_TYPE (node_or_origin);
22178 if (decl_by_reference_p (node_or_origin))
22179 add_type_attribute (parm_die, TREE_TYPE (type),
22180 TYPE_UNQUALIFIED,
22181 false, context_die);
22182 else
22183 add_type_attribute (parm_die, type,
22184 decl_quals (node_or_origin),
22185 false, context_die);
22186 }
22187 if (origin == NULL && DECL_ARTIFICIAL (node))
22188 add_AT_flag (parm_die, DW_AT_artificial, 1);
22189 add_location:
22190 if (node && node != origin)
22191 equate_decl_number_to_die (node, parm_die);
22192 if (! DECL_ABSTRACT_P (node_or_origin))
22193 add_location_or_const_value_attribute (parm_die, node_or_origin,
22194 node == NULL);
22195
22196 break;
22197
22198 case tcc_type:
22199 /* We were called with some kind of a ..._TYPE node. */
22200 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22201 context_die);
22202 break;
22203
22204 default:
22205 gcc_unreachable ();
22206 }
22207
22208 return parm_die;
22209 }
22210
22211 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22212 children DW_TAG_formal_parameter DIEs representing the arguments of the
22213 parameter pack.
22214
22215 PARM_PACK must be a function parameter pack.
22216 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22217 must point to the subsequent arguments of the function PACK_ARG belongs to.
22218 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22219 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22220 following the last one for which a DIE was generated. */
22221
22222 static dw_die_ref
22223 gen_formal_parameter_pack_die (tree parm_pack,
22224 tree pack_arg,
22225 dw_die_ref subr_die,
22226 tree *next_arg)
22227 {
22228 tree arg;
22229 dw_die_ref parm_pack_die;
22230
22231 gcc_assert (parm_pack
22232 && lang_hooks.function_parameter_pack_p (parm_pack)
22233 && subr_die);
22234
22235 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22236 add_src_coords_attributes (parm_pack_die, parm_pack);
22237
22238 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22239 {
22240 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22241 parm_pack))
22242 break;
22243 gen_formal_parameter_die (arg, NULL,
22244 false /* Don't emit name attribute. */,
22245 parm_pack_die);
22246 }
22247 if (next_arg)
22248 *next_arg = arg;
22249 return parm_pack_die;
22250 }
22251
22252 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22253 at the end of an (ANSI prototyped) formal parameters list. */
22254
22255 static void
22256 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22257 {
22258 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22259 }
22260
22261 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22262 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22263 parameters as specified in some function type specification (except for
22264 those which appear as part of a function *definition*). */
22265
22266 static void
22267 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22268 {
22269 tree link;
22270 tree formal_type = NULL;
22271 tree first_parm_type;
22272 tree arg;
22273
22274 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22275 {
22276 arg = DECL_ARGUMENTS (function_or_method_type);
22277 function_or_method_type = TREE_TYPE (function_or_method_type);
22278 }
22279 else
22280 arg = NULL_TREE;
22281
22282 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22283
22284 /* Make our first pass over the list of formal parameter types and output a
22285 DW_TAG_formal_parameter DIE for each one. */
22286 for (link = first_parm_type; link; )
22287 {
22288 dw_die_ref parm_die;
22289
22290 formal_type = TREE_VALUE (link);
22291 if (formal_type == void_type_node)
22292 break;
22293
22294 /* Output a (nameless) DIE to represent the formal parameter itself. */
22295 parm_die = gen_formal_parameter_die (formal_type, NULL,
22296 true /* Emit name attribute. */,
22297 context_die);
22298 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22299 && link == first_parm_type)
22300 {
22301 add_AT_flag (parm_die, DW_AT_artificial, 1);
22302 if (dwarf_version >= 3 || !dwarf_strict)
22303 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22304 }
22305 else if (arg && DECL_ARTIFICIAL (arg))
22306 add_AT_flag (parm_die, DW_AT_artificial, 1);
22307
22308 link = TREE_CHAIN (link);
22309 if (arg)
22310 arg = DECL_CHAIN (arg);
22311 }
22312
22313 /* If this function type has an ellipsis, add a
22314 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22315 if (formal_type != void_type_node)
22316 gen_unspecified_parameters_die (function_or_method_type, context_die);
22317
22318 /* Make our second (and final) pass over the list of formal parameter types
22319 and output DIEs to represent those types (as necessary). */
22320 for (link = TYPE_ARG_TYPES (function_or_method_type);
22321 link && TREE_VALUE (link);
22322 link = TREE_CHAIN (link))
22323 gen_type_die (TREE_VALUE (link), context_die);
22324 }
22325
22326 /* We want to generate the DIE for TYPE so that we can generate the
22327 die for MEMBER, which has been defined; we will need to refer back
22328 to the member declaration nested within TYPE. If we're trying to
22329 generate minimal debug info for TYPE, processing TYPE won't do the
22330 trick; we need to attach the member declaration by hand. */
22331
22332 static void
22333 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22334 {
22335 gen_type_die (type, context_die);
22336
22337 /* If we're trying to avoid duplicate debug info, we may not have
22338 emitted the member decl for this function. Emit it now. */
22339 if (TYPE_STUB_DECL (type)
22340 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22341 && ! lookup_decl_die (member))
22342 {
22343 dw_die_ref type_die;
22344 gcc_assert (!decl_ultimate_origin (member));
22345
22346 type_die = lookup_type_die_strip_naming_typedef (type);
22347 if (TREE_CODE (member) == FUNCTION_DECL)
22348 gen_subprogram_die (member, type_die);
22349 else if (TREE_CODE (member) == FIELD_DECL)
22350 {
22351 /* Ignore the nameless fields that are used to skip bits but handle
22352 C++ anonymous unions and structs. */
22353 if (DECL_NAME (member) != NULL_TREE
22354 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22355 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22356 {
22357 struct vlr_context vlr_ctx = {
22358 DECL_CONTEXT (member), /* struct_type */
22359 NULL_TREE /* variant_part_offset */
22360 };
22361 gen_type_die (member_declared_type (member), type_die);
22362 gen_field_die (member, &vlr_ctx, type_die);
22363 }
22364 }
22365 else
22366 gen_variable_die (member, NULL_TREE, type_die);
22367 }
22368 }
22369 \f
22370 /* Forward declare these functions, because they are mutually recursive
22371 with their set_block_* pairing functions. */
22372 static void set_decl_origin_self (tree);
22373
22374 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22375 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22376 that it points to the node itself, thus indicating that the node is its
22377 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22378 the given node is NULL, recursively descend the decl/block tree which
22379 it is the root of, and for each other ..._DECL or BLOCK node contained
22380 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22381 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22382 values to point to themselves. */
22383
22384 static void
22385 set_block_origin_self (tree stmt)
22386 {
22387 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22388 {
22389 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22390
22391 {
22392 tree local_decl;
22393
22394 for (local_decl = BLOCK_VARS (stmt);
22395 local_decl != NULL_TREE;
22396 local_decl = DECL_CHAIN (local_decl))
22397 /* Do not recurse on nested functions since the inlining status
22398 of parent and child can be different as per the DWARF spec. */
22399 if (TREE_CODE (local_decl) != FUNCTION_DECL
22400 && !DECL_EXTERNAL (local_decl))
22401 set_decl_origin_self (local_decl);
22402 }
22403
22404 {
22405 tree subblock;
22406
22407 for (subblock = BLOCK_SUBBLOCKS (stmt);
22408 subblock != NULL_TREE;
22409 subblock = BLOCK_CHAIN (subblock))
22410 set_block_origin_self (subblock); /* Recurse. */
22411 }
22412 }
22413 }
22414
22415 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22416 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22417 node to so that it points to the node itself, thus indicating that the
22418 node represents its own (abstract) origin. Additionally, if the
22419 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22420 the decl/block tree of which the given node is the root of, and for
22421 each other ..._DECL or BLOCK node contained therein whose
22422 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22423 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22424 point to themselves. */
22425
22426 static void
22427 set_decl_origin_self (tree decl)
22428 {
22429 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22430 {
22431 DECL_ABSTRACT_ORIGIN (decl) = decl;
22432 if (TREE_CODE (decl) == FUNCTION_DECL)
22433 {
22434 tree arg;
22435
22436 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22437 DECL_ABSTRACT_ORIGIN (arg) = arg;
22438 if (DECL_INITIAL (decl) != NULL_TREE
22439 && DECL_INITIAL (decl) != error_mark_node)
22440 set_block_origin_self (DECL_INITIAL (decl));
22441 }
22442 }
22443 }
22444 \f
22445 /* Mark the early DIE for DECL as the abstract instance. */
22446
22447 static void
22448 dwarf2out_abstract_function (tree decl)
22449 {
22450 dw_die_ref old_die;
22451
22452 /* Make sure we have the actual abstract inline, not a clone. */
22453 decl = DECL_ORIGIN (decl);
22454
22455 if (DECL_IGNORED_P (decl))
22456 return;
22457
22458 old_die = lookup_decl_die (decl);
22459 /* With early debug we always have an old DIE unless we are in LTO
22460 and the user did not compile but only link with debug. */
22461 if (in_lto_p && ! old_die)
22462 return;
22463 gcc_assert (old_die != NULL);
22464 if (get_AT (old_die, DW_AT_inline)
22465 || get_AT (old_die, DW_AT_abstract_origin))
22466 /* We've already generated the abstract instance. */
22467 return;
22468
22469 /* Go ahead and put DW_AT_inline on the DIE. */
22470 if (DECL_DECLARED_INLINE_P (decl))
22471 {
22472 if (cgraph_function_possibly_inlined_p (decl))
22473 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22474 else
22475 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22476 }
22477 else
22478 {
22479 if (cgraph_function_possibly_inlined_p (decl))
22480 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22481 else
22482 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22483 }
22484
22485 if (DECL_DECLARED_INLINE_P (decl)
22486 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22487 add_AT_flag (old_die, DW_AT_artificial, 1);
22488
22489 set_decl_origin_self (decl);
22490 }
22491
22492 /* Helper function of premark_used_types() which gets called through
22493 htab_traverse.
22494
22495 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22496 marked as unused by prune_unused_types. */
22497
22498 bool
22499 premark_used_types_helper (tree const &type, void *)
22500 {
22501 dw_die_ref die;
22502
22503 die = lookup_type_die (type);
22504 if (die != NULL)
22505 die->die_perennial_p = 1;
22506 return true;
22507 }
22508
22509 /* Helper function of premark_types_used_by_global_vars which gets called
22510 through htab_traverse.
22511
22512 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22513 marked as unused by prune_unused_types. The DIE of the type is marked
22514 only if the global variable using the type will actually be emitted. */
22515
22516 int
22517 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22518 void *)
22519 {
22520 struct types_used_by_vars_entry *entry;
22521 dw_die_ref die;
22522
22523 entry = (struct types_used_by_vars_entry *) *slot;
22524 gcc_assert (entry->type != NULL
22525 && entry->var_decl != NULL);
22526 die = lookup_type_die (entry->type);
22527 if (die)
22528 {
22529 /* Ask cgraph if the global variable really is to be emitted.
22530 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22531 varpool_node *node = varpool_node::get (entry->var_decl);
22532 if (node && node->definition)
22533 {
22534 die->die_perennial_p = 1;
22535 /* Keep the parent DIEs as well. */
22536 while ((die = die->die_parent) && die->die_perennial_p == 0)
22537 die->die_perennial_p = 1;
22538 }
22539 }
22540 return 1;
22541 }
22542
22543 /* Mark all members of used_types_hash as perennial. */
22544
22545 static void
22546 premark_used_types (struct function *fun)
22547 {
22548 if (fun && fun->used_types_hash)
22549 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22550 }
22551
22552 /* Mark all members of types_used_by_vars_entry as perennial. */
22553
22554 static void
22555 premark_types_used_by_global_vars (void)
22556 {
22557 if (types_used_by_vars_hash)
22558 types_used_by_vars_hash
22559 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22560 }
22561
22562 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22563 for CA_LOC call arg loc node. */
22564
22565 static dw_die_ref
22566 gen_call_site_die (tree decl, dw_die_ref subr_die,
22567 struct call_arg_loc_node *ca_loc)
22568 {
22569 dw_die_ref stmt_die = NULL, die;
22570 tree block = ca_loc->block;
22571
22572 while (block
22573 && block != DECL_INITIAL (decl)
22574 && TREE_CODE (block) == BLOCK)
22575 {
22576 stmt_die = BLOCK_DIE (block);
22577 if (stmt_die)
22578 break;
22579 block = BLOCK_SUPERCONTEXT (block);
22580 }
22581 if (stmt_die == NULL)
22582 stmt_die = subr_die;
22583 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22584 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22585 if (ca_loc->tail_call_p)
22586 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22587 if (ca_loc->symbol_ref)
22588 {
22589 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22590 if (tdie)
22591 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22592 else
22593 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22594 false);
22595 }
22596 return die;
22597 }
22598
22599 /* Generate a DIE to represent a declared function (either file-scope or
22600 block-local). */
22601
22602 static void
22603 gen_subprogram_die (tree decl, dw_die_ref context_die)
22604 {
22605 tree origin = decl_ultimate_origin (decl);
22606 dw_die_ref subr_die;
22607 dw_die_ref old_die = lookup_decl_die (decl);
22608
22609 /* This function gets called multiple times for different stages of
22610 the debug process. For example, for func() in this code:
22611
22612 namespace S
22613 {
22614 void func() { ... }
22615 }
22616
22617 ...we get called 4 times. Twice in early debug and twice in
22618 late debug:
22619
22620 Early debug
22621 -----------
22622
22623 1. Once while generating func() within the namespace. This is
22624 the declaration. The declaration bit below is set, as the
22625 context is the namespace.
22626
22627 A new DIE will be generated with DW_AT_declaration set.
22628
22629 2. Once for func() itself. This is the specification. The
22630 declaration bit below is clear as the context is the CU.
22631
22632 We will use the cached DIE from (1) to create a new DIE with
22633 DW_AT_specification pointing to the declaration in (1).
22634
22635 Late debug via rest_of_handle_final()
22636 -------------------------------------
22637
22638 3. Once generating func() within the namespace. This is also the
22639 declaration, as in (1), but this time we will early exit below
22640 as we have a cached DIE and a declaration needs no additional
22641 annotations (no locations), as the source declaration line
22642 info is enough.
22643
22644 4. Once for func() itself. As in (2), this is the specification,
22645 but this time we will re-use the cached DIE, and just annotate
22646 it with the location information that should now be available.
22647
22648 For something without namespaces, but with abstract instances, we
22649 are also called a multiple times:
22650
22651 class Base
22652 {
22653 public:
22654 Base (); // constructor declaration (1)
22655 };
22656
22657 Base::Base () { } // constructor specification (2)
22658
22659 Early debug
22660 -----------
22661
22662 1. Once for the Base() constructor by virtue of it being a
22663 member of the Base class. This is done via
22664 rest_of_type_compilation.
22665
22666 This is a declaration, so a new DIE will be created with
22667 DW_AT_declaration.
22668
22669 2. Once for the Base() constructor definition, but this time
22670 while generating the abstract instance of the base
22671 constructor (__base_ctor) which is being generated via early
22672 debug of reachable functions.
22673
22674 Even though we have a cached version of the declaration (1),
22675 we will create a DW_AT_specification of the declaration DIE
22676 in (1).
22677
22678 3. Once for the __base_ctor itself, but this time, we generate
22679 an DW_AT_abstract_origin version of the DW_AT_specification in
22680 (2).
22681
22682 Late debug via rest_of_handle_final
22683 -----------------------------------
22684
22685 4. One final time for the __base_ctor (which will have a cached
22686 DIE with DW_AT_abstract_origin created in (3). This time,
22687 we will just annotate the location information now
22688 available.
22689 */
22690 int declaration = (current_function_decl != decl
22691 || class_or_namespace_scope_p (context_die));
22692
22693 /* A declaration that has been previously dumped needs no
22694 additional information. */
22695 if (old_die && declaration)
22696 return;
22697
22698 /* Now that the C++ front end lazily declares artificial member fns, we
22699 might need to retrofit the declaration into its class. */
22700 if (!declaration && !origin && !old_die
22701 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22702 && !class_or_namespace_scope_p (context_die)
22703 && debug_info_level > DINFO_LEVEL_TERSE)
22704 old_die = force_decl_die (decl);
22705
22706 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22707 if (origin != NULL)
22708 {
22709 gcc_assert (!declaration || local_scope_p (context_die));
22710
22711 /* Fixup die_parent for the abstract instance of a nested
22712 inline function. */
22713 if (old_die && old_die->die_parent == NULL)
22714 add_child_die (context_die, old_die);
22715
22716 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22717 {
22718 /* If we have a DW_AT_abstract_origin we have a working
22719 cached version. */
22720 subr_die = old_die;
22721 }
22722 else
22723 {
22724 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22725 add_abstract_origin_attribute (subr_die, origin);
22726 /* This is where the actual code for a cloned function is.
22727 Let's emit linkage name attribute for it. This helps
22728 debuggers to e.g, set breakpoints into
22729 constructors/destructors when the user asks "break
22730 K::K". */
22731 add_linkage_name (subr_die, decl);
22732 }
22733 }
22734 /* A cached copy, possibly from early dwarf generation. Reuse as
22735 much as possible. */
22736 else if (old_die)
22737 {
22738 if (!get_AT_flag (old_die, DW_AT_declaration)
22739 /* We can have a normal definition following an inline one in the
22740 case of redefinition of GNU C extern inlines.
22741 It seems reasonable to use AT_specification in this case. */
22742 && !get_AT (old_die, DW_AT_inline))
22743 {
22744 /* Detect and ignore this case, where we are trying to output
22745 something we have already output. */
22746 if (get_AT (old_die, DW_AT_low_pc)
22747 || get_AT (old_die, DW_AT_ranges))
22748 return;
22749
22750 /* If we have no location information, this must be a
22751 partially generated DIE from early dwarf generation.
22752 Fall through and generate it. */
22753 }
22754
22755 /* If the definition comes from the same place as the declaration,
22756 maybe use the old DIE. We always want the DIE for this function
22757 that has the *_pc attributes to be under comp_unit_die so the
22758 debugger can find it. We also need to do this for abstract
22759 instances of inlines, since the spec requires the out-of-line copy
22760 to have the same parent. For local class methods, this doesn't
22761 apply; we just use the old DIE. */
22762 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22763 struct dwarf_file_data * file_index = lookup_filename (s.file);
22764 if (((is_unit_die (old_die->die_parent)
22765 /* This condition fixes the inconsistency/ICE with the
22766 following Fortran test (or some derivative thereof) while
22767 building libgfortran:
22768
22769 module some_m
22770 contains
22771 logical function funky (FLAG)
22772 funky = .true.
22773 end function
22774 end module
22775 */
22776 || (old_die->die_parent
22777 && old_die->die_parent->die_tag == DW_TAG_module)
22778 || local_scope_p (old_die->die_parent)
22779 || context_die == NULL)
22780 && (DECL_ARTIFICIAL (decl)
22781 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22782 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22783 == (unsigned) s.line)
22784 && (!debug_column_info
22785 || s.column == 0
22786 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22787 == (unsigned) s.column)))))
22788 /* With LTO if there's an abstract instance for
22789 the old DIE, this is a concrete instance and
22790 thus re-use the DIE. */
22791 || get_AT (old_die, DW_AT_abstract_origin))
22792 {
22793 subr_die = old_die;
22794
22795 /* Clear out the declaration attribute, but leave the
22796 parameters so they can be augmented with location
22797 information later. Unless this was a declaration, in
22798 which case, wipe out the nameless parameters and recreate
22799 them further down. */
22800 if (remove_AT (subr_die, DW_AT_declaration))
22801 {
22802
22803 remove_AT (subr_die, DW_AT_object_pointer);
22804 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22805 }
22806 }
22807 /* Make a specification pointing to the previously built
22808 declaration. */
22809 else
22810 {
22811 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22812 add_AT_specification (subr_die, old_die);
22813 add_pubname (decl, subr_die);
22814 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22815 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22816 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22817 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22818 if (debug_column_info
22819 && s.column
22820 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22821 != (unsigned) s.column))
22822 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22823
22824 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22825 emit the real type on the definition die. */
22826 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22827 {
22828 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22829 if (die == auto_die || die == decltype_auto_die)
22830 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22831 TYPE_UNQUALIFIED, false, context_die);
22832 }
22833
22834 /* When we process the method declaration, we haven't seen
22835 the out-of-class defaulted definition yet, so we have to
22836 recheck now. */
22837 if ((dwarf_version >= 5 || ! dwarf_strict)
22838 && !get_AT (subr_die, DW_AT_defaulted))
22839 {
22840 int defaulted
22841 = lang_hooks.decls.decl_dwarf_attribute (decl,
22842 DW_AT_defaulted);
22843 if (defaulted != -1)
22844 {
22845 /* Other values must have been handled before. */
22846 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22847 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22848 }
22849 }
22850 }
22851 }
22852 /* Create a fresh DIE for anything else. */
22853 else
22854 {
22855 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22856
22857 if (TREE_PUBLIC (decl))
22858 add_AT_flag (subr_die, DW_AT_external, 1);
22859
22860 add_name_and_src_coords_attributes (subr_die, decl);
22861 add_pubname (decl, subr_die);
22862 if (debug_info_level > DINFO_LEVEL_TERSE)
22863 {
22864 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22865 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22866 TYPE_UNQUALIFIED, false, context_die);
22867 }
22868
22869 add_pure_or_virtual_attribute (subr_die, decl);
22870 if (DECL_ARTIFICIAL (decl))
22871 add_AT_flag (subr_die, DW_AT_artificial, 1);
22872
22873 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22874 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22875
22876 add_alignment_attribute (subr_die, decl);
22877
22878 add_accessibility_attribute (subr_die, decl);
22879 }
22880
22881 /* Unless we have an existing non-declaration DIE, equate the new
22882 DIE. */
22883 if (!old_die || is_declaration_die (old_die))
22884 equate_decl_number_to_die (decl, subr_die);
22885
22886 if (declaration)
22887 {
22888 if (!old_die || !get_AT (old_die, DW_AT_inline))
22889 {
22890 add_AT_flag (subr_die, DW_AT_declaration, 1);
22891
22892 /* If this is an explicit function declaration then generate
22893 a DW_AT_explicit attribute. */
22894 if ((dwarf_version >= 3 || !dwarf_strict)
22895 && lang_hooks.decls.decl_dwarf_attribute (decl,
22896 DW_AT_explicit) == 1)
22897 add_AT_flag (subr_die, DW_AT_explicit, 1);
22898
22899 /* If this is a C++11 deleted special function member then generate
22900 a DW_AT_deleted attribute. */
22901 if ((dwarf_version >= 5 || !dwarf_strict)
22902 && lang_hooks.decls.decl_dwarf_attribute (decl,
22903 DW_AT_deleted) == 1)
22904 add_AT_flag (subr_die, DW_AT_deleted, 1);
22905
22906 /* If this is a C++11 defaulted special function member then
22907 generate a DW_AT_defaulted attribute. */
22908 if (dwarf_version >= 5 || !dwarf_strict)
22909 {
22910 int defaulted
22911 = lang_hooks.decls.decl_dwarf_attribute (decl,
22912 DW_AT_defaulted);
22913 if (defaulted != -1)
22914 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22915 }
22916
22917 /* If this is a C++11 non-static member function with & ref-qualifier
22918 then generate a DW_AT_reference attribute. */
22919 if ((dwarf_version >= 5 || !dwarf_strict)
22920 && lang_hooks.decls.decl_dwarf_attribute (decl,
22921 DW_AT_reference) == 1)
22922 add_AT_flag (subr_die, DW_AT_reference, 1);
22923
22924 /* If this is a C++11 non-static member function with &&
22925 ref-qualifier then generate a DW_AT_reference attribute. */
22926 if ((dwarf_version >= 5 || !dwarf_strict)
22927 && lang_hooks.decls.decl_dwarf_attribute (decl,
22928 DW_AT_rvalue_reference)
22929 == 1)
22930 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22931 }
22932 }
22933 /* For non DECL_EXTERNALs, if range information is available, fill
22934 the DIE with it. */
22935 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22936 {
22937 HOST_WIDE_INT cfa_fb_offset;
22938
22939 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22940
22941 if (!crtl->has_bb_partition)
22942 {
22943 dw_fde_ref fde = fun->fde;
22944 if (fde->dw_fde_begin)
22945 {
22946 /* We have already generated the labels. */
22947 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22948 fde->dw_fde_end, false);
22949 }
22950 else
22951 {
22952 /* Create start/end labels and add the range. */
22953 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22954 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22955 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22956 current_function_funcdef_no);
22957 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22958 current_function_funcdef_no);
22959 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22960 false);
22961 }
22962
22963 #if VMS_DEBUGGING_INFO
22964 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22965 Section 2.3 Prologue and Epilogue Attributes:
22966 When a breakpoint is set on entry to a function, it is generally
22967 desirable for execution to be suspended, not on the very first
22968 instruction of the function, but rather at a point after the
22969 function's frame has been set up, after any language defined local
22970 declaration processing has been completed, and before execution of
22971 the first statement of the function begins. Debuggers generally
22972 cannot properly determine where this point is. Similarly for a
22973 breakpoint set on exit from a function. The prologue and epilogue
22974 attributes allow a compiler to communicate the location(s) to use. */
22975
22976 {
22977 if (fde->dw_fde_vms_end_prologue)
22978 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22979 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22980
22981 if (fde->dw_fde_vms_begin_epilogue)
22982 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22983 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22984 }
22985 #endif
22986
22987 }
22988 else
22989 {
22990 /* Generate pubnames entries for the split function code ranges. */
22991 dw_fde_ref fde = fun->fde;
22992
22993 if (fde->dw_fde_second_begin)
22994 {
22995 if (dwarf_version >= 3 || !dwarf_strict)
22996 {
22997 /* We should use ranges for non-contiguous code section
22998 addresses. Use the actual code range for the initial
22999 section, since the HOT/COLD labels might precede an
23000 alignment offset. */
23001 bool range_list_added = false;
23002 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23003 fde->dw_fde_end, &range_list_added,
23004 false);
23005 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23006 fde->dw_fde_second_end,
23007 &range_list_added, false);
23008 if (range_list_added)
23009 add_ranges (NULL);
23010 }
23011 else
23012 {
23013 /* There is no real support in DW2 for this .. so we make
23014 a work-around. First, emit the pub name for the segment
23015 containing the function label. Then make and emit a
23016 simplified subprogram DIE for the second segment with the
23017 name pre-fixed by __hot/cold_sect_of_. We use the same
23018 linkage name for the second die so that gdb will find both
23019 sections when given "b foo". */
23020 const char *name = NULL;
23021 tree decl_name = DECL_NAME (decl);
23022 dw_die_ref seg_die;
23023
23024 /* Do the 'primary' section. */
23025 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23026 fde->dw_fde_end, false);
23027
23028 /* Build a minimal DIE for the secondary section. */
23029 seg_die = new_die (DW_TAG_subprogram,
23030 subr_die->die_parent, decl);
23031
23032 if (TREE_PUBLIC (decl))
23033 add_AT_flag (seg_die, DW_AT_external, 1);
23034
23035 if (decl_name != NULL
23036 && IDENTIFIER_POINTER (decl_name) != NULL)
23037 {
23038 name = dwarf2_name (decl, 1);
23039 if (! DECL_ARTIFICIAL (decl))
23040 add_src_coords_attributes (seg_die, decl);
23041
23042 add_linkage_name (seg_die, decl);
23043 }
23044 gcc_assert (name != NULL);
23045 add_pure_or_virtual_attribute (seg_die, decl);
23046 if (DECL_ARTIFICIAL (decl))
23047 add_AT_flag (seg_die, DW_AT_artificial, 1);
23048
23049 name = concat ("__second_sect_of_", name, NULL);
23050 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23051 fde->dw_fde_second_end, false);
23052 add_name_attribute (seg_die, name);
23053 if (want_pubnames ())
23054 add_pubname_string (name, seg_die);
23055 }
23056 }
23057 else
23058 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23059 false);
23060 }
23061
23062 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23063
23064 /* We define the "frame base" as the function's CFA. This is more
23065 convenient for several reasons: (1) It's stable across the prologue
23066 and epilogue, which makes it better than just a frame pointer,
23067 (2) With dwarf3, there exists a one-byte encoding that allows us
23068 to reference the .debug_frame data by proxy, but failing that,
23069 (3) We can at least reuse the code inspection and interpretation
23070 code that determines the CFA position at various points in the
23071 function. */
23072 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23073 {
23074 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23075 add_AT_loc (subr_die, DW_AT_frame_base, op);
23076 }
23077 else
23078 {
23079 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23080 if (list->dw_loc_next)
23081 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23082 else
23083 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23084 }
23085
23086 /* Compute a displacement from the "steady-state frame pointer" to
23087 the CFA. The former is what all stack slots and argument slots
23088 will reference in the rtl; the latter is what we've told the
23089 debugger about. We'll need to adjust all frame_base references
23090 by this displacement. */
23091 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23092
23093 if (fun->static_chain_decl)
23094 {
23095 /* DWARF requires here a location expression that computes the
23096 address of the enclosing subprogram's frame base. The machinery
23097 in tree-nested.c is supposed to store this specific address in the
23098 last field of the FRAME record. */
23099 const tree frame_type
23100 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23101 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23102
23103 tree fb_expr
23104 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23105 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23106 fb_expr, fb_decl, NULL_TREE);
23107
23108 add_AT_location_description (subr_die, DW_AT_static_link,
23109 loc_list_from_tree (fb_expr, 0, NULL));
23110 }
23111
23112 resolve_variable_values ();
23113 }
23114
23115 /* Generate child dies for template paramaters. */
23116 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23117 gen_generic_params_dies (decl);
23118
23119 /* Now output descriptions of the arguments for this function. This gets
23120 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23121 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23122 `...' at the end of the formal parameter list. In order to find out if
23123 there was a trailing ellipsis or not, we must instead look at the type
23124 associated with the FUNCTION_DECL. This will be a node of type
23125 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23126 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23127 an ellipsis at the end. */
23128
23129 /* In the case where we are describing a mere function declaration, all we
23130 need to do here (and all we *can* do here) is to describe the *types* of
23131 its formal parameters. */
23132 if (debug_info_level <= DINFO_LEVEL_TERSE)
23133 ;
23134 else if (declaration)
23135 gen_formal_types_die (decl, subr_die);
23136 else
23137 {
23138 /* Generate DIEs to represent all known formal parameters. */
23139 tree parm = DECL_ARGUMENTS (decl);
23140 tree generic_decl = early_dwarf
23141 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23142 tree generic_decl_parm = generic_decl
23143 ? DECL_ARGUMENTS (generic_decl)
23144 : NULL;
23145
23146 /* Now we want to walk the list of parameters of the function and
23147 emit their relevant DIEs.
23148
23149 We consider the case of DECL being an instance of a generic function
23150 as well as it being a normal function.
23151
23152 If DECL is an instance of a generic function we walk the
23153 parameters of the generic function declaration _and_ the parameters of
23154 DECL itself. This is useful because we want to emit specific DIEs for
23155 function parameter packs and those are declared as part of the
23156 generic function declaration. In that particular case,
23157 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23158 That DIE has children DIEs representing the set of arguments
23159 of the pack. Note that the set of pack arguments can be empty.
23160 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23161 children DIE.
23162
23163 Otherwise, we just consider the parameters of DECL. */
23164 while (generic_decl_parm || parm)
23165 {
23166 if (generic_decl_parm
23167 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23168 gen_formal_parameter_pack_die (generic_decl_parm,
23169 parm, subr_die,
23170 &parm);
23171 else if (parm)
23172 {
23173 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23174
23175 if (early_dwarf
23176 && parm == DECL_ARGUMENTS (decl)
23177 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23178 && parm_die
23179 && (dwarf_version >= 3 || !dwarf_strict))
23180 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23181
23182 parm = DECL_CHAIN (parm);
23183 }
23184 else if (parm)
23185 parm = DECL_CHAIN (parm);
23186
23187 if (generic_decl_parm)
23188 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23189 }
23190
23191 /* Decide whether we need an unspecified_parameters DIE at the end.
23192 There are 2 more cases to do this for: 1) the ansi ... declaration -
23193 this is detectable when the end of the arg list is not a
23194 void_type_node 2) an unprototyped function declaration (not a
23195 definition). This just means that we have no info about the
23196 parameters at all. */
23197 if (early_dwarf)
23198 {
23199 if (prototype_p (TREE_TYPE (decl)))
23200 {
23201 /* This is the prototyped case, check for.... */
23202 if (stdarg_p (TREE_TYPE (decl)))
23203 gen_unspecified_parameters_die (decl, subr_die);
23204 }
23205 else if (DECL_INITIAL (decl) == NULL_TREE)
23206 gen_unspecified_parameters_die (decl, subr_die);
23207 }
23208 }
23209
23210 if (subr_die != old_die)
23211 /* Add the calling convention attribute if requested. */
23212 add_calling_convention_attribute (subr_die, decl);
23213
23214 /* Output Dwarf info for all of the stuff within the body of the function
23215 (if it has one - it may be just a declaration).
23216
23217 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23218 a function. This BLOCK actually represents the outermost binding contour
23219 for the function, i.e. the contour in which the function's formal
23220 parameters and labels get declared. Curiously, it appears that the front
23221 end doesn't actually put the PARM_DECL nodes for the current function onto
23222 the BLOCK_VARS list for this outer scope, but are strung off of the
23223 DECL_ARGUMENTS list for the function instead.
23224
23225 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23226 the LABEL_DECL nodes for the function however, and we output DWARF info
23227 for those in decls_for_scope. Just within the `outer_scope' there will be
23228 a BLOCK node representing the function's outermost pair of curly braces,
23229 and any blocks used for the base and member initializers of a C++
23230 constructor function. */
23231 tree outer_scope = DECL_INITIAL (decl);
23232 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23233 {
23234 int call_site_note_count = 0;
23235 int tail_call_site_note_count = 0;
23236
23237 /* Emit a DW_TAG_variable DIE for a named return value. */
23238 if (DECL_NAME (DECL_RESULT (decl)))
23239 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23240
23241 /* The first time through decls_for_scope we will generate the
23242 DIEs for the locals. The second time, we fill in the
23243 location info. */
23244 decls_for_scope (outer_scope, subr_die);
23245
23246 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23247 {
23248 struct call_arg_loc_node *ca_loc;
23249 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23250 {
23251 dw_die_ref die = NULL;
23252 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23253 rtx arg, next_arg;
23254
23255 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23256 ? XEXP (ca_loc->call_arg_loc_note, 0)
23257 : NULL_RTX);
23258 arg; arg = next_arg)
23259 {
23260 dw_loc_descr_ref reg, val;
23261 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23262 dw_die_ref cdie, tdie = NULL;
23263
23264 next_arg = XEXP (arg, 1);
23265 if (REG_P (XEXP (XEXP (arg, 0), 0))
23266 && next_arg
23267 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23268 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23269 && REGNO (XEXP (XEXP (arg, 0), 0))
23270 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23271 next_arg = XEXP (next_arg, 1);
23272 if (mode == VOIDmode)
23273 {
23274 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23275 if (mode == VOIDmode)
23276 mode = GET_MODE (XEXP (arg, 0));
23277 }
23278 if (mode == VOIDmode || mode == BLKmode)
23279 continue;
23280 /* Get dynamic information about call target only if we
23281 have no static information: we cannot generate both
23282 DW_AT_call_origin and DW_AT_call_target
23283 attributes. */
23284 if (ca_loc->symbol_ref == NULL_RTX)
23285 {
23286 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23287 {
23288 tloc = XEXP (XEXP (arg, 0), 1);
23289 continue;
23290 }
23291 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23292 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23293 {
23294 tlocc = XEXP (XEXP (arg, 0), 1);
23295 continue;
23296 }
23297 }
23298 reg = NULL;
23299 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23300 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23301 VAR_INIT_STATUS_INITIALIZED);
23302 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23303 {
23304 rtx mem = XEXP (XEXP (arg, 0), 0);
23305 reg = mem_loc_descriptor (XEXP (mem, 0),
23306 get_address_mode (mem),
23307 GET_MODE (mem),
23308 VAR_INIT_STATUS_INITIALIZED);
23309 }
23310 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23311 == DEBUG_PARAMETER_REF)
23312 {
23313 tree tdecl
23314 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23315 tdie = lookup_decl_die (tdecl);
23316 if (tdie == NULL)
23317 continue;
23318 }
23319 else
23320 continue;
23321 if (reg == NULL
23322 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23323 != DEBUG_PARAMETER_REF)
23324 continue;
23325 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23326 VOIDmode,
23327 VAR_INIT_STATUS_INITIALIZED);
23328 if (val == NULL)
23329 continue;
23330 if (die == NULL)
23331 die = gen_call_site_die (decl, subr_die, ca_loc);
23332 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23333 NULL_TREE);
23334 if (reg != NULL)
23335 add_AT_loc (cdie, DW_AT_location, reg);
23336 else if (tdie != NULL)
23337 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23338 tdie);
23339 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23340 if (next_arg != XEXP (arg, 1))
23341 {
23342 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23343 if (mode == VOIDmode)
23344 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23345 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23346 0), 1),
23347 mode, VOIDmode,
23348 VAR_INIT_STATUS_INITIALIZED);
23349 if (val != NULL)
23350 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23351 val);
23352 }
23353 }
23354 if (die == NULL
23355 && (ca_loc->symbol_ref || tloc))
23356 die = gen_call_site_die (decl, subr_die, ca_loc);
23357 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23358 {
23359 dw_loc_descr_ref tval = NULL;
23360
23361 if (tloc != NULL_RTX)
23362 tval = mem_loc_descriptor (tloc,
23363 GET_MODE (tloc) == VOIDmode
23364 ? Pmode : GET_MODE (tloc),
23365 VOIDmode,
23366 VAR_INIT_STATUS_INITIALIZED);
23367 if (tval)
23368 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23369 else if (tlocc != NULL_RTX)
23370 {
23371 tval = mem_loc_descriptor (tlocc,
23372 GET_MODE (tlocc) == VOIDmode
23373 ? Pmode : GET_MODE (tlocc),
23374 VOIDmode,
23375 VAR_INIT_STATUS_INITIALIZED);
23376 if (tval)
23377 add_AT_loc (die,
23378 dwarf_AT (DW_AT_call_target_clobbered),
23379 tval);
23380 }
23381 }
23382 if (die != NULL)
23383 {
23384 call_site_note_count++;
23385 if (ca_loc->tail_call_p)
23386 tail_call_site_note_count++;
23387 }
23388 }
23389 }
23390 call_arg_locations = NULL;
23391 call_arg_loc_last = NULL;
23392 if (tail_call_site_count >= 0
23393 && tail_call_site_count == tail_call_site_note_count
23394 && (!dwarf_strict || dwarf_version >= 5))
23395 {
23396 if (call_site_count >= 0
23397 && call_site_count == call_site_note_count)
23398 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23399 else
23400 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23401 }
23402 call_site_count = -1;
23403 tail_call_site_count = -1;
23404 }
23405
23406 /* Mark used types after we have created DIEs for the functions scopes. */
23407 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23408 }
23409
23410 /* Returns a hash value for X (which really is a die_struct). */
23411
23412 hashval_t
23413 block_die_hasher::hash (die_struct *d)
23414 {
23415 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23416 }
23417
23418 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23419 as decl_id and die_parent of die_struct Y. */
23420
23421 bool
23422 block_die_hasher::equal (die_struct *x, die_struct *y)
23423 {
23424 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23425 }
23426
23427 /* Hold information about markers for inlined entry points. */
23428 struct GTY ((for_user)) inline_entry_data
23429 {
23430 /* The block that's the inlined_function_outer_scope for an inlined
23431 function. */
23432 tree block;
23433
23434 /* The label at the inlined entry point. */
23435 const char *label_pfx;
23436 unsigned int label_num;
23437
23438 /* The view number to be used as the inlined entry point. */
23439 var_loc_view view;
23440 };
23441
23442 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23443 {
23444 typedef tree compare_type;
23445 static inline hashval_t hash (const inline_entry_data *);
23446 static inline bool equal (const inline_entry_data *, const_tree);
23447 };
23448
23449 /* Hash table routines for inline_entry_data. */
23450
23451 inline hashval_t
23452 inline_entry_data_hasher::hash (const inline_entry_data *data)
23453 {
23454 return htab_hash_pointer (data->block);
23455 }
23456
23457 inline bool
23458 inline_entry_data_hasher::equal (const inline_entry_data *data,
23459 const_tree block)
23460 {
23461 return data->block == block;
23462 }
23463
23464 /* Inlined entry points pending DIE creation in this compilation unit. */
23465
23466 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23467
23468
23469 /* Return TRUE if DECL, which may have been previously generated as
23470 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23471 true if decl (or its origin) is either an extern declaration or a
23472 class/namespace scoped declaration.
23473
23474 The declare_in_namespace support causes us to get two DIEs for one
23475 variable, both of which are declarations. We want to avoid
23476 considering one to be a specification, so we must test for
23477 DECLARATION and DW_AT_declaration. */
23478 static inline bool
23479 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23480 {
23481 return (old_die && TREE_STATIC (decl) && !declaration
23482 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23483 }
23484
23485 /* Return true if DECL is a local static. */
23486
23487 static inline bool
23488 local_function_static (tree decl)
23489 {
23490 gcc_assert (VAR_P (decl));
23491 return TREE_STATIC (decl)
23492 && DECL_CONTEXT (decl)
23493 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23494 }
23495
23496 /* Generate a DIE to represent a declared data object.
23497 Either DECL or ORIGIN must be non-null. */
23498
23499 static void
23500 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23501 {
23502 HOST_WIDE_INT off = 0;
23503 tree com_decl;
23504 tree decl_or_origin = decl ? decl : origin;
23505 tree ultimate_origin;
23506 dw_die_ref var_die;
23507 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23508 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23509 || class_or_namespace_scope_p (context_die));
23510 bool specialization_p = false;
23511 bool no_linkage_name = false;
23512
23513 /* While C++ inline static data members have definitions inside of the
23514 class, force the first DIE to be a declaration, then let gen_member_die
23515 reparent it to the class context and call gen_variable_die again
23516 to create the outside of the class DIE for the definition. */
23517 if (!declaration
23518 && old_die == NULL
23519 && decl
23520 && DECL_CONTEXT (decl)
23521 && TYPE_P (DECL_CONTEXT (decl))
23522 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23523 {
23524 declaration = true;
23525 if (dwarf_version < 5)
23526 no_linkage_name = true;
23527 }
23528
23529 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23530 if (decl || ultimate_origin)
23531 origin = ultimate_origin;
23532 com_decl = fortran_common (decl_or_origin, &off);
23533
23534 /* Symbol in common gets emitted as a child of the common block, in the form
23535 of a data member. */
23536 if (com_decl)
23537 {
23538 dw_die_ref com_die;
23539 dw_loc_list_ref loc = NULL;
23540 die_node com_die_arg;
23541
23542 var_die = lookup_decl_die (decl_or_origin);
23543 if (var_die)
23544 {
23545 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23546 {
23547 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23548 if (loc)
23549 {
23550 if (off)
23551 {
23552 /* Optimize the common case. */
23553 if (single_element_loc_list_p (loc)
23554 && loc->expr->dw_loc_opc == DW_OP_addr
23555 && loc->expr->dw_loc_next == NULL
23556 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23557 == SYMBOL_REF)
23558 {
23559 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23560 loc->expr->dw_loc_oprnd1.v.val_addr
23561 = plus_constant (GET_MODE (x), x , off);
23562 }
23563 else
23564 loc_list_plus_const (loc, off);
23565 }
23566 add_AT_location_description (var_die, DW_AT_location, loc);
23567 remove_AT (var_die, DW_AT_declaration);
23568 }
23569 }
23570 return;
23571 }
23572
23573 if (common_block_die_table == NULL)
23574 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23575
23576 com_die_arg.decl_id = DECL_UID (com_decl);
23577 com_die_arg.die_parent = context_die;
23578 com_die = common_block_die_table->find (&com_die_arg);
23579 if (! early_dwarf)
23580 loc = loc_list_from_tree (com_decl, 2, NULL);
23581 if (com_die == NULL)
23582 {
23583 const char *cnam
23584 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23585 die_node **slot;
23586
23587 com_die = new_die (DW_TAG_common_block, context_die, decl);
23588 add_name_and_src_coords_attributes (com_die, com_decl);
23589 if (loc)
23590 {
23591 add_AT_location_description (com_die, DW_AT_location, loc);
23592 /* Avoid sharing the same loc descriptor between
23593 DW_TAG_common_block and DW_TAG_variable. */
23594 loc = loc_list_from_tree (com_decl, 2, NULL);
23595 }
23596 else if (DECL_EXTERNAL (decl_or_origin))
23597 add_AT_flag (com_die, DW_AT_declaration, 1);
23598 if (want_pubnames ())
23599 add_pubname_string (cnam, com_die); /* ??? needed? */
23600 com_die->decl_id = DECL_UID (com_decl);
23601 slot = common_block_die_table->find_slot (com_die, INSERT);
23602 *slot = com_die;
23603 }
23604 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23605 {
23606 add_AT_location_description (com_die, DW_AT_location, loc);
23607 loc = loc_list_from_tree (com_decl, 2, NULL);
23608 remove_AT (com_die, DW_AT_declaration);
23609 }
23610 var_die = new_die (DW_TAG_variable, com_die, decl);
23611 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23612 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23613 decl_quals (decl_or_origin), false,
23614 context_die);
23615 add_alignment_attribute (var_die, decl);
23616 add_AT_flag (var_die, DW_AT_external, 1);
23617 if (loc)
23618 {
23619 if (off)
23620 {
23621 /* Optimize the common case. */
23622 if (single_element_loc_list_p (loc)
23623 && loc->expr->dw_loc_opc == DW_OP_addr
23624 && loc->expr->dw_loc_next == NULL
23625 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23626 {
23627 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23628 loc->expr->dw_loc_oprnd1.v.val_addr
23629 = plus_constant (GET_MODE (x), x, off);
23630 }
23631 else
23632 loc_list_plus_const (loc, off);
23633 }
23634 add_AT_location_description (var_die, DW_AT_location, loc);
23635 }
23636 else if (DECL_EXTERNAL (decl_or_origin))
23637 add_AT_flag (var_die, DW_AT_declaration, 1);
23638 if (decl)
23639 equate_decl_number_to_die (decl, var_die);
23640 return;
23641 }
23642
23643 if (old_die)
23644 {
23645 if (declaration)
23646 {
23647 /* A declaration that has been previously dumped, needs no
23648 further annotations, since it doesn't need location on
23649 the second pass. */
23650 return;
23651 }
23652 else if (decl_will_get_specification_p (old_die, decl, declaration)
23653 && !get_AT (old_die, DW_AT_specification))
23654 {
23655 /* Fall-thru so we can make a new variable die along with a
23656 DW_AT_specification. */
23657 }
23658 else if (origin && old_die->die_parent != context_die)
23659 {
23660 /* If we will be creating an inlined instance, we need a
23661 new DIE that will get annotated with
23662 DW_AT_abstract_origin. */
23663 gcc_assert (!DECL_ABSTRACT_P (decl));
23664 }
23665 else
23666 {
23667 /* If a DIE was dumped early, it still needs location info.
23668 Skip to where we fill the location bits. */
23669 var_die = old_die;
23670
23671 /* ??? In LTRANS we cannot annotate early created variably
23672 modified type DIEs without copying them and adjusting all
23673 references to them. Thus we dumped them again. Also add a
23674 reference to them but beware of -g0 compile and -g link
23675 in which case the reference will be already present. */
23676 tree type = TREE_TYPE (decl_or_origin);
23677 if (in_lto_p
23678 && ! get_AT (var_die, DW_AT_type)
23679 && variably_modified_type_p
23680 (type, decl_function_context (decl_or_origin)))
23681 {
23682 if (decl_by_reference_p (decl_or_origin))
23683 add_type_attribute (var_die, TREE_TYPE (type),
23684 TYPE_UNQUALIFIED, false, context_die);
23685 else
23686 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23687 false, context_die);
23688 }
23689
23690 goto gen_variable_die_location;
23691 }
23692 }
23693
23694 /* For static data members, the declaration in the class is supposed
23695 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23696 also in DWARF2; the specification should still be DW_TAG_variable
23697 referencing the DW_TAG_member DIE. */
23698 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23699 var_die = new_die (DW_TAG_member, context_die, decl);
23700 else
23701 var_die = new_die (DW_TAG_variable, context_die, decl);
23702
23703 if (origin != NULL)
23704 add_abstract_origin_attribute (var_die, origin);
23705
23706 /* Loop unrolling can create multiple blocks that refer to the same
23707 static variable, so we must test for the DW_AT_declaration flag.
23708
23709 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23710 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23711 sharing them.
23712
23713 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23714 else if (decl_will_get_specification_p (old_die, decl, declaration))
23715 {
23716 /* This is a definition of a C++ class level static. */
23717 add_AT_specification (var_die, old_die);
23718 specialization_p = true;
23719 if (DECL_NAME (decl))
23720 {
23721 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23722 struct dwarf_file_data * file_index = lookup_filename (s.file);
23723
23724 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23725 add_AT_file (var_die, DW_AT_decl_file, file_index);
23726
23727 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23728 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23729
23730 if (debug_column_info
23731 && s.column
23732 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23733 != (unsigned) s.column))
23734 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23735
23736 if (old_die->die_tag == DW_TAG_member)
23737 add_linkage_name (var_die, decl);
23738 }
23739 }
23740 else
23741 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23742
23743 if ((origin == NULL && !specialization_p)
23744 || (origin != NULL
23745 && !DECL_ABSTRACT_P (decl_or_origin)
23746 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23747 decl_function_context
23748 (decl_or_origin))))
23749 {
23750 tree type = TREE_TYPE (decl_or_origin);
23751
23752 if (decl_by_reference_p (decl_or_origin))
23753 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23754 context_die);
23755 else
23756 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23757 context_die);
23758 }
23759
23760 if (origin == NULL && !specialization_p)
23761 {
23762 if (TREE_PUBLIC (decl))
23763 add_AT_flag (var_die, DW_AT_external, 1);
23764
23765 if (DECL_ARTIFICIAL (decl))
23766 add_AT_flag (var_die, DW_AT_artificial, 1);
23767
23768 add_alignment_attribute (var_die, decl);
23769
23770 add_accessibility_attribute (var_die, decl);
23771 }
23772
23773 if (declaration)
23774 add_AT_flag (var_die, DW_AT_declaration, 1);
23775
23776 if (decl && (DECL_ABSTRACT_P (decl)
23777 || !old_die || is_declaration_die (old_die)))
23778 equate_decl_number_to_die (decl, var_die);
23779
23780 gen_variable_die_location:
23781 if (! declaration
23782 && (! DECL_ABSTRACT_P (decl_or_origin)
23783 /* Local static vars are shared between all clones/inlines,
23784 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23785 already set. */
23786 || (VAR_P (decl_or_origin)
23787 && TREE_STATIC (decl_or_origin)
23788 && DECL_RTL_SET_P (decl_or_origin))))
23789 {
23790 if (early_dwarf)
23791 add_pubname (decl_or_origin, var_die);
23792 else
23793 add_location_or_const_value_attribute (var_die, decl_or_origin,
23794 decl == NULL);
23795 }
23796 else
23797 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23798
23799 if ((dwarf_version >= 4 || !dwarf_strict)
23800 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23801 DW_AT_const_expr) == 1
23802 && !get_AT (var_die, DW_AT_const_expr)
23803 && !specialization_p)
23804 add_AT_flag (var_die, DW_AT_const_expr, 1);
23805
23806 if (!dwarf_strict)
23807 {
23808 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23809 DW_AT_inline);
23810 if (inl != -1
23811 && !get_AT (var_die, DW_AT_inline)
23812 && !specialization_p)
23813 add_AT_unsigned (var_die, DW_AT_inline, inl);
23814 }
23815 }
23816
23817 /* Generate a DIE to represent a named constant. */
23818
23819 static void
23820 gen_const_die (tree decl, dw_die_ref context_die)
23821 {
23822 dw_die_ref const_die;
23823 tree type = TREE_TYPE (decl);
23824
23825 const_die = lookup_decl_die (decl);
23826 if (const_die)
23827 return;
23828
23829 const_die = new_die (DW_TAG_constant, context_die, decl);
23830 equate_decl_number_to_die (decl, const_die);
23831 add_name_and_src_coords_attributes (const_die, decl);
23832 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23833 if (TREE_PUBLIC (decl))
23834 add_AT_flag (const_die, DW_AT_external, 1);
23835 if (DECL_ARTIFICIAL (decl))
23836 add_AT_flag (const_die, DW_AT_artificial, 1);
23837 tree_add_const_value_attribute_for_decl (const_die, decl);
23838 }
23839
23840 /* Generate a DIE to represent a label identifier. */
23841
23842 static void
23843 gen_label_die (tree decl, dw_die_ref context_die)
23844 {
23845 tree origin = decl_ultimate_origin (decl);
23846 dw_die_ref lbl_die = lookup_decl_die (decl);
23847 rtx insn;
23848 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23849
23850 if (!lbl_die)
23851 {
23852 lbl_die = new_die (DW_TAG_label, context_die, decl);
23853 equate_decl_number_to_die (decl, lbl_die);
23854
23855 if (origin != NULL)
23856 add_abstract_origin_attribute (lbl_die, origin);
23857 else
23858 add_name_and_src_coords_attributes (lbl_die, decl);
23859 }
23860
23861 if (DECL_ABSTRACT_P (decl))
23862 equate_decl_number_to_die (decl, lbl_die);
23863 else if (! early_dwarf)
23864 {
23865 insn = DECL_RTL_IF_SET (decl);
23866
23867 /* Deleted labels are programmer specified labels which have been
23868 eliminated because of various optimizations. We still emit them
23869 here so that it is possible to put breakpoints on them. */
23870 if (insn
23871 && (LABEL_P (insn)
23872 || ((NOTE_P (insn)
23873 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23874 {
23875 /* When optimization is enabled (via -O) some parts of the compiler
23876 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23877 represent source-level labels which were explicitly declared by
23878 the user. This really shouldn't be happening though, so catch
23879 it if it ever does happen. */
23880 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23881
23882 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23883 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23884 }
23885 else if (insn
23886 && NOTE_P (insn)
23887 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23888 && CODE_LABEL_NUMBER (insn) != -1)
23889 {
23890 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23891 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23892 }
23893 }
23894 }
23895
23896 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23897 attributes to the DIE for a block STMT, to describe where the inlined
23898 function was called from. This is similar to add_src_coords_attributes. */
23899
23900 static inline void
23901 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23902 {
23903 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23904
23905 if (dwarf_version >= 3 || !dwarf_strict)
23906 {
23907 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23908 add_AT_unsigned (die, DW_AT_call_line, s.line);
23909 if (debug_column_info && s.column)
23910 add_AT_unsigned (die, DW_AT_call_column, s.column);
23911 }
23912 }
23913
23914
23915 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23916 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23917
23918 static inline void
23919 add_high_low_attributes (tree stmt, dw_die_ref die)
23920 {
23921 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23922
23923 if (inline_entry_data **iedp
23924 = !inline_entry_data_table ? NULL
23925 : inline_entry_data_table->find_slot_with_hash (stmt,
23926 htab_hash_pointer (stmt),
23927 NO_INSERT))
23928 {
23929 inline_entry_data *ied = *iedp;
23930 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
23931 gcc_assert (debug_inline_points);
23932 gcc_assert (inlined_function_outer_scope_p (stmt));
23933
23934 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
23935 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23936
23937 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
23938 && !dwarf_strict)
23939 {
23940 if (!output_asm_line_debug_info ())
23941 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
23942 else
23943 {
23944 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
23945 /* FIXME: this will resolve to a small number. Could we
23946 possibly emit smaller data? Ideally we'd emit a
23947 uleb128, but that would make the size of DIEs
23948 impossible for the compiler to compute, since it's
23949 the assembler that computes the value of the view
23950 label in this case. Ideally, we'd have a single form
23951 encompassing both the address and the view, and
23952 indirecting them through a table might make things
23953 easier, but even that would be more wasteful,
23954 space-wise, than what we have now. */
23955 add_AT_symview (die, DW_AT_GNU_entry_view, label);
23956 }
23957 }
23958
23959 inline_entry_data_table->clear_slot (iedp);
23960 }
23961
23962 if (BLOCK_FRAGMENT_CHAIN (stmt)
23963 && (dwarf_version >= 3 || !dwarf_strict))
23964 {
23965 tree chain, superblock = NULL_TREE;
23966 dw_die_ref pdie;
23967 dw_attr_node *attr = NULL;
23968
23969 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
23970 {
23971 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23972 BLOCK_NUMBER (stmt));
23973 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23974 }
23975
23976 /* Optimize duplicate .debug_ranges lists or even tails of
23977 lists. If this BLOCK has same ranges as its supercontext,
23978 lookup DW_AT_ranges attribute in the supercontext (and
23979 recursively so), verify that the ranges_table contains the
23980 right values and use it instead of adding a new .debug_range. */
23981 for (chain = stmt, pdie = die;
23982 BLOCK_SAME_RANGE (chain);
23983 chain = BLOCK_SUPERCONTEXT (chain))
23984 {
23985 dw_attr_node *new_attr;
23986
23987 pdie = pdie->die_parent;
23988 if (pdie == NULL)
23989 break;
23990 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
23991 break;
23992 new_attr = get_AT (pdie, DW_AT_ranges);
23993 if (new_attr == NULL
23994 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
23995 break;
23996 attr = new_attr;
23997 superblock = BLOCK_SUPERCONTEXT (chain);
23998 }
23999 if (attr != NULL
24000 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24001 == BLOCK_NUMBER (superblock))
24002 && BLOCK_FRAGMENT_CHAIN (superblock))
24003 {
24004 unsigned long off = attr->dw_attr_val.v.val_offset;
24005 unsigned long supercnt = 0, thiscnt = 0;
24006 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24007 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24008 {
24009 ++supercnt;
24010 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24011 == BLOCK_NUMBER (chain));
24012 }
24013 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24014 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24015 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24016 ++thiscnt;
24017 gcc_assert (supercnt >= thiscnt);
24018 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24019 false);
24020 note_rnglist_head (off + supercnt - thiscnt);
24021 return;
24022 }
24023
24024 unsigned int offset = add_ranges (stmt, true);
24025 add_AT_range_list (die, DW_AT_ranges, offset, false);
24026 note_rnglist_head (offset);
24027
24028 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24029 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24030 do
24031 {
24032 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24033 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24034 chain = BLOCK_FRAGMENT_CHAIN (chain);
24035 }
24036 while (chain);
24037 add_ranges (NULL);
24038 }
24039 else
24040 {
24041 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24042 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24043 BLOCK_NUMBER (stmt));
24044 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24045 BLOCK_NUMBER (stmt));
24046 add_AT_low_high_pc (die, label, label_high, false);
24047 }
24048 }
24049
24050 /* Generate a DIE for a lexical block. */
24051
24052 static void
24053 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24054 {
24055 dw_die_ref old_die = BLOCK_DIE (stmt);
24056 dw_die_ref stmt_die = NULL;
24057 if (!old_die)
24058 {
24059 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24060 BLOCK_DIE (stmt) = stmt_die;
24061 }
24062
24063 if (BLOCK_ABSTRACT (stmt))
24064 {
24065 if (old_die)
24066 {
24067 /* This must have been generated early and it won't even
24068 need location information since it's a DW_AT_inline
24069 function. */
24070 if (flag_checking)
24071 for (dw_die_ref c = context_die; c; c = c->die_parent)
24072 if (c->die_tag == DW_TAG_inlined_subroutine
24073 || c->die_tag == DW_TAG_subprogram)
24074 {
24075 gcc_assert (get_AT (c, DW_AT_inline));
24076 break;
24077 }
24078 return;
24079 }
24080 }
24081 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
24082 {
24083 /* If this is an inlined instance, create a new lexical die for
24084 anything below to attach DW_AT_abstract_origin to. */
24085 if (old_die)
24086 {
24087 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24088 BLOCK_DIE (stmt) = stmt_die;
24089 old_die = NULL;
24090 }
24091
24092 tree origin = block_ultimate_origin (stmt);
24093 if (origin != NULL_TREE && origin != stmt)
24094 add_abstract_origin_attribute (stmt_die, origin);
24095 }
24096
24097 if (old_die)
24098 stmt_die = old_die;
24099
24100 /* A non abstract block whose blocks have already been reordered
24101 should have the instruction range for this block. If so, set the
24102 high/low attributes. */
24103 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
24104 {
24105 gcc_assert (stmt_die);
24106 add_high_low_attributes (stmt, stmt_die);
24107 }
24108
24109 decls_for_scope (stmt, stmt_die);
24110 }
24111
24112 /* Generate a DIE for an inlined subprogram. */
24113
24114 static void
24115 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24116 {
24117 tree decl;
24118
24119 /* The instance of function that is effectively being inlined shall not
24120 be abstract. */
24121 gcc_assert (! BLOCK_ABSTRACT (stmt));
24122
24123 decl = block_ultimate_origin (stmt);
24124
24125 /* Make sure any inlined functions are known to be inlineable. */
24126 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24127 || cgraph_function_possibly_inlined_p (decl));
24128
24129 if (! BLOCK_ABSTRACT (stmt))
24130 {
24131 dw_die_ref subr_die
24132 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24133
24134 if (call_arg_locations || debug_inline_points)
24135 BLOCK_DIE (stmt) = subr_die;
24136 add_abstract_origin_attribute (subr_die, decl);
24137 if (TREE_ASM_WRITTEN (stmt))
24138 add_high_low_attributes (stmt, subr_die);
24139 add_call_src_coords_attributes (stmt, subr_die);
24140
24141 decls_for_scope (stmt, subr_die);
24142 }
24143 }
24144
24145 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24146 the comment for VLR_CONTEXT. */
24147
24148 static void
24149 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24150 {
24151 dw_die_ref decl_die;
24152
24153 if (TREE_TYPE (decl) == error_mark_node)
24154 return;
24155
24156 decl_die = new_die (DW_TAG_member, context_die, decl);
24157 add_name_and_src_coords_attributes (decl_die, decl);
24158 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24159 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24160 context_die);
24161
24162 if (DECL_BIT_FIELD_TYPE (decl))
24163 {
24164 add_byte_size_attribute (decl_die, decl);
24165 add_bit_size_attribute (decl_die, decl);
24166 add_bit_offset_attribute (decl_die, decl, ctx);
24167 }
24168
24169 add_alignment_attribute (decl_die, decl);
24170
24171 /* If we have a variant part offset, then we are supposed to process a member
24172 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24173 trees. */
24174 gcc_assert (ctx->variant_part_offset == NULL_TREE
24175 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24176 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24177 add_data_member_location_attribute (decl_die, decl, ctx);
24178
24179 if (DECL_ARTIFICIAL (decl))
24180 add_AT_flag (decl_die, DW_AT_artificial, 1);
24181
24182 add_accessibility_attribute (decl_die, decl);
24183
24184 /* Equate decl number to die, so that we can look up this decl later on. */
24185 equate_decl_number_to_die (decl, decl_die);
24186 }
24187
24188 /* Generate a DIE for a pointer to a member type. TYPE can be an
24189 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24190 pointer to member function. */
24191
24192 static void
24193 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24194 {
24195 if (lookup_type_die (type))
24196 return;
24197
24198 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24199 scope_die_for (type, context_die), type);
24200
24201 equate_type_number_to_die (type, ptr_die);
24202 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24203 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24204 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24205 context_die);
24206 add_alignment_attribute (ptr_die, type);
24207
24208 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24209 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24210 {
24211 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24212 add_AT_loc (ptr_die, DW_AT_use_location, op);
24213 }
24214 }
24215
24216 static char *producer_string;
24217
24218 /* Return a heap allocated producer string including command line options
24219 if -grecord-gcc-switches. */
24220
24221 static char *
24222 gen_producer_string (void)
24223 {
24224 size_t j;
24225 auto_vec<const char *> switches;
24226 const char *language_string = lang_hooks.name;
24227 char *producer, *tail;
24228 const char *p;
24229 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24230 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24231
24232 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24233 switch (save_decoded_options[j].opt_index)
24234 {
24235 case OPT_o:
24236 case OPT_d:
24237 case OPT_dumpbase:
24238 case OPT_dumpdir:
24239 case OPT_auxbase:
24240 case OPT_auxbase_strip:
24241 case OPT_quiet:
24242 case OPT_version:
24243 case OPT_v:
24244 case OPT_w:
24245 case OPT_L:
24246 case OPT_D:
24247 case OPT_I:
24248 case OPT_U:
24249 case OPT_SPECIAL_unknown:
24250 case OPT_SPECIAL_ignore:
24251 case OPT_SPECIAL_deprecated:
24252 case OPT_SPECIAL_program_name:
24253 case OPT_SPECIAL_input_file:
24254 case OPT_grecord_gcc_switches:
24255 case OPT__output_pch_:
24256 case OPT_fdiagnostics_show_location_:
24257 case OPT_fdiagnostics_show_option:
24258 case OPT_fdiagnostics_show_caret:
24259 case OPT_fdiagnostics_show_labels:
24260 case OPT_fdiagnostics_show_line_numbers:
24261 case OPT_fdiagnostics_color_:
24262 case OPT_fverbose_asm:
24263 case OPT____:
24264 case OPT__sysroot_:
24265 case OPT_nostdinc:
24266 case OPT_nostdinc__:
24267 case OPT_fpreprocessed:
24268 case OPT_fltrans_output_list_:
24269 case OPT_fresolution_:
24270 case OPT_fdebug_prefix_map_:
24271 case OPT_fmacro_prefix_map_:
24272 case OPT_ffile_prefix_map_:
24273 case OPT_fcompare_debug:
24274 case OPT_fchecking:
24275 case OPT_fchecking_:
24276 /* Ignore these. */
24277 continue;
24278 default:
24279 if (cl_options[save_decoded_options[j].opt_index].flags
24280 & CL_NO_DWARF_RECORD)
24281 continue;
24282 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24283 == '-');
24284 switch (save_decoded_options[j].canonical_option[0][1])
24285 {
24286 case 'M':
24287 case 'i':
24288 case 'W':
24289 continue;
24290 case 'f':
24291 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24292 "dump", 4) == 0)
24293 continue;
24294 break;
24295 default:
24296 break;
24297 }
24298 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24299 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24300 break;
24301 }
24302
24303 producer = XNEWVEC (char, plen + 1 + len + 1);
24304 tail = producer;
24305 sprintf (tail, "%s %s", language_string, version_string);
24306 tail += plen;
24307
24308 FOR_EACH_VEC_ELT (switches, j, p)
24309 {
24310 len = strlen (p);
24311 *tail = ' ';
24312 memcpy (tail + 1, p, len);
24313 tail += len + 1;
24314 }
24315
24316 *tail = '\0';
24317 return producer;
24318 }
24319
24320 /* Given a C and/or C++ language/version string return the "highest".
24321 C++ is assumed to be "higher" than C in this case. Used for merging
24322 LTO translation unit languages. */
24323 static const char *
24324 highest_c_language (const char *lang1, const char *lang2)
24325 {
24326 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24327 return "GNU C++17";
24328 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24329 return "GNU C++14";
24330 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24331 return "GNU C++11";
24332 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24333 return "GNU C++98";
24334
24335 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24336 return "GNU C17";
24337 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24338 return "GNU C11";
24339 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24340 return "GNU C99";
24341 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24342 return "GNU C89";
24343
24344 gcc_unreachable ();
24345 }
24346
24347
24348 /* Generate the DIE for the compilation unit. */
24349
24350 static dw_die_ref
24351 gen_compile_unit_die (const char *filename)
24352 {
24353 dw_die_ref die;
24354 const char *language_string = lang_hooks.name;
24355 int language;
24356
24357 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24358
24359 if (filename)
24360 {
24361 add_name_attribute (die, filename);
24362 /* Don't add cwd for <built-in>. */
24363 if (filename[0] != '<')
24364 add_comp_dir_attribute (die);
24365 }
24366
24367 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24368
24369 /* If our producer is LTO try to figure out a common language to use
24370 from the global list of translation units. */
24371 if (strcmp (language_string, "GNU GIMPLE") == 0)
24372 {
24373 unsigned i;
24374 tree t;
24375 const char *common_lang = NULL;
24376
24377 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24378 {
24379 if (!TRANSLATION_UNIT_LANGUAGE (t))
24380 continue;
24381 if (!common_lang)
24382 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24383 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24384 ;
24385 else if (strncmp (common_lang, "GNU C", 5) == 0
24386 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24387 /* Mixing C and C++ is ok, use C++ in that case. */
24388 common_lang = highest_c_language (common_lang,
24389 TRANSLATION_UNIT_LANGUAGE (t));
24390 else
24391 {
24392 /* Fall back to C. */
24393 common_lang = NULL;
24394 break;
24395 }
24396 }
24397
24398 if (common_lang)
24399 language_string = common_lang;
24400 }
24401
24402 language = DW_LANG_C;
24403 if (strncmp (language_string, "GNU C", 5) == 0
24404 && ISDIGIT (language_string[5]))
24405 {
24406 language = DW_LANG_C89;
24407 if (dwarf_version >= 3 || !dwarf_strict)
24408 {
24409 if (strcmp (language_string, "GNU C89") != 0)
24410 language = DW_LANG_C99;
24411
24412 if (dwarf_version >= 5 /* || !dwarf_strict */)
24413 if (strcmp (language_string, "GNU C11") == 0
24414 || strcmp (language_string, "GNU C17") == 0)
24415 language = DW_LANG_C11;
24416 }
24417 }
24418 else if (strncmp (language_string, "GNU C++", 7) == 0)
24419 {
24420 language = DW_LANG_C_plus_plus;
24421 if (dwarf_version >= 5 /* || !dwarf_strict */)
24422 {
24423 if (strcmp (language_string, "GNU C++11") == 0)
24424 language = DW_LANG_C_plus_plus_11;
24425 else if (strcmp (language_string, "GNU C++14") == 0)
24426 language = DW_LANG_C_plus_plus_14;
24427 else if (strcmp (language_string, "GNU C++17") == 0)
24428 /* For now. */
24429 language = DW_LANG_C_plus_plus_14;
24430 }
24431 }
24432 else if (strcmp (language_string, "GNU F77") == 0)
24433 language = DW_LANG_Fortran77;
24434 else if (dwarf_version >= 3 || !dwarf_strict)
24435 {
24436 if (strcmp (language_string, "GNU Ada") == 0)
24437 language = DW_LANG_Ada95;
24438 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24439 {
24440 language = DW_LANG_Fortran95;
24441 if (dwarf_version >= 5 /* || !dwarf_strict */)
24442 {
24443 if (strcmp (language_string, "GNU Fortran2003") == 0)
24444 language = DW_LANG_Fortran03;
24445 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24446 language = DW_LANG_Fortran08;
24447 }
24448 }
24449 else if (strcmp (language_string, "GNU Objective-C") == 0)
24450 language = DW_LANG_ObjC;
24451 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24452 language = DW_LANG_ObjC_plus_plus;
24453 else if (dwarf_version >= 5 || !dwarf_strict)
24454 {
24455 if (strcmp (language_string, "GNU Go") == 0)
24456 language = DW_LANG_Go;
24457 }
24458 }
24459 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24460 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24461 language = DW_LANG_Fortran90;
24462 /* Likewise for Ada. */
24463 else if (strcmp (language_string, "GNU Ada") == 0)
24464 language = DW_LANG_Ada83;
24465
24466 add_AT_unsigned (die, DW_AT_language, language);
24467
24468 switch (language)
24469 {
24470 case DW_LANG_Fortran77:
24471 case DW_LANG_Fortran90:
24472 case DW_LANG_Fortran95:
24473 case DW_LANG_Fortran03:
24474 case DW_LANG_Fortran08:
24475 /* Fortran has case insensitive identifiers and the front-end
24476 lowercases everything. */
24477 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24478 break;
24479 default:
24480 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24481 break;
24482 }
24483 return die;
24484 }
24485
24486 /* Generate the DIE for a base class. */
24487
24488 static void
24489 gen_inheritance_die (tree binfo, tree access, tree type,
24490 dw_die_ref context_die)
24491 {
24492 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24493 struct vlr_context ctx = { type, NULL };
24494
24495 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24496 context_die);
24497 add_data_member_location_attribute (die, binfo, &ctx);
24498
24499 if (BINFO_VIRTUAL_P (binfo))
24500 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24501
24502 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24503 children, otherwise the default is DW_ACCESS_public. In DWARF2
24504 the default has always been DW_ACCESS_private. */
24505 if (access == access_public_node)
24506 {
24507 if (dwarf_version == 2
24508 || context_die->die_tag == DW_TAG_class_type)
24509 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24510 }
24511 else if (access == access_protected_node)
24512 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24513 else if (dwarf_version > 2
24514 && context_die->die_tag != DW_TAG_class_type)
24515 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24516 }
24517
24518 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24519 structure. */
24520 static bool
24521 is_variant_part (tree decl)
24522 {
24523 return (TREE_CODE (decl) == FIELD_DECL
24524 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24525 }
24526
24527 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24528 return the FIELD_DECL. Return NULL_TREE otherwise. */
24529
24530 static tree
24531 analyze_discr_in_predicate (tree operand, tree struct_type)
24532 {
24533 bool continue_stripping = true;
24534 while (continue_stripping)
24535 switch (TREE_CODE (operand))
24536 {
24537 CASE_CONVERT:
24538 operand = TREE_OPERAND (operand, 0);
24539 break;
24540 default:
24541 continue_stripping = false;
24542 break;
24543 }
24544
24545 /* Match field access to members of struct_type only. */
24546 if (TREE_CODE (operand) == COMPONENT_REF
24547 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24548 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24549 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24550 return TREE_OPERAND (operand, 1);
24551 else
24552 return NULL_TREE;
24553 }
24554
24555 /* Check that SRC is a constant integer that can be represented as a native
24556 integer constant (either signed or unsigned). If so, store it into DEST and
24557 return true. Return false otherwise. */
24558
24559 static bool
24560 get_discr_value (tree src, dw_discr_value *dest)
24561 {
24562 tree discr_type = TREE_TYPE (src);
24563
24564 if (lang_hooks.types.get_debug_type)
24565 {
24566 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24567 if (debug_type != NULL)
24568 discr_type = debug_type;
24569 }
24570
24571 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24572 return false;
24573
24574 /* Signedness can vary between the original type and the debug type. This
24575 can happen for character types in Ada for instance: the character type
24576 used for code generation can be signed, to be compatible with the C one,
24577 but from a debugger point of view, it must be unsigned. */
24578 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24579 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24580
24581 if (is_orig_unsigned != is_debug_unsigned)
24582 src = fold_convert (discr_type, src);
24583
24584 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24585 return false;
24586
24587 dest->pos = is_debug_unsigned;
24588 if (is_debug_unsigned)
24589 dest->v.uval = tree_to_uhwi (src);
24590 else
24591 dest->v.sval = tree_to_shwi (src);
24592
24593 return true;
24594 }
24595
24596 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24597 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24598 store NULL_TREE in DISCR_DECL. Otherwise:
24599
24600 - store the discriminant field in STRUCT_TYPE that controls the variant
24601 part to *DISCR_DECL
24602
24603 - put in *DISCR_LISTS_P an array where for each variant, the item
24604 represents the corresponding matching list of discriminant values.
24605
24606 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24607 the above array.
24608
24609 Note that when the array is allocated (i.e. when the analysis is
24610 successful), it is up to the caller to free the array. */
24611
24612 static void
24613 analyze_variants_discr (tree variant_part_decl,
24614 tree struct_type,
24615 tree *discr_decl,
24616 dw_discr_list_ref **discr_lists_p,
24617 unsigned *discr_lists_length)
24618 {
24619 tree variant_part_type = TREE_TYPE (variant_part_decl);
24620 tree variant;
24621 dw_discr_list_ref *discr_lists;
24622 unsigned i;
24623
24624 /* Compute how many variants there are in this variant part. */
24625 *discr_lists_length = 0;
24626 for (variant = TYPE_FIELDS (variant_part_type);
24627 variant != NULL_TREE;
24628 variant = DECL_CHAIN (variant))
24629 ++*discr_lists_length;
24630
24631 *discr_decl = NULL_TREE;
24632 *discr_lists_p
24633 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24634 sizeof (**discr_lists_p));
24635 discr_lists = *discr_lists_p;
24636
24637 /* And then analyze all variants to extract discriminant information for all
24638 of them. This analysis is conservative: as soon as we detect something we
24639 do not support, abort everything and pretend we found nothing. */
24640 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24641 variant != NULL_TREE;
24642 variant = DECL_CHAIN (variant), ++i)
24643 {
24644 tree match_expr = DECL_QUALIFIER (variant);
24645
24646 /* Now, try to analyze the predicate and deduce a discriminant for
24647 it. */
24648 if (match_expr == boolean_true_node)
24649 /* Typically happens for the default variant: it matches all cases that
24650 previous variants rejected. Don't output any matching value for
24651 this one. */
24652 continue;
24653
24654 /* The following loop tries to iterate over each discriminant
24655 possibility: single values or ranges. */
24656 while (match_expr != NULL_TREE)
24657 {
24658 tree next_round_match_expr;
24659 tree candidate_discr = NULL_TREE;
24660 dw_discr_list_ref new_node = NULL;
24661
24662 /* Possibilities are matched one after the other by nested
24663 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24664 continue with the rest at next iteration. */
24665 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24666 {
24667 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24668 match_expr = TREE_OPERAND (match_expr, 1);
24669 }
24670 else
24671 next_round_match_expr = NULL_TREE;
24672
24673 if (match_expr == boolean_false_node)
24674 /* This sub-expression matches nothing: just wait for the next
24675 one. */
24676 ;
24677
24678 else if (TREE_CODE (match_expr) == EQ_EXPR)
24679 {
24680 /* We are matching: <discr_field> == <integer_cst>
24681 This sub-expression matches a single value. */
24682 tree integer_cst = TREE_OPERAND (match_expr, 1);
24683
24684 candidate_discr
24685 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24686 struct_type);
24687
24688 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24689 if (!get_discr_value (integer_cst,
24690 &new_node->dw_discr_lower_bound))
24691 goto abort;
24692 new_node->dw_discr_range = false;
24693 }
24694
24695 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24696 {
24697 /* We are matching:
24698 <discr_field> > <integer_cst>
24699 && <discr_field> < <integer_cst>.
24700 This sub-expression matches the range of values between the
24701 two matched integer constants. Note that comparisons can be
24702 inclusive or exclusive. */
24703 tree candidate_discr_1, candidate_discr_2;
24704 tree lower_cst, upper_cst;
24705 bool lower_cst_included, upper_cst_included;
24706 tree lower_op = TREE_OPERAND (match_expr, 0);
24707 tree upper_op = TREE_OPERAND (match_expr, 1);
24708
24709 /* When the comparison is exclusive, the integer constant is not
24710 the discriminant range bound we are looking for: we will have
24711 to increment or decrement it. */
24712 if (TREE_CODE (lower_op) == GE_EXPR)
24713 lower_cst_included = true;
24714 else if (TREE_CODE (lower_op) == GT_EXPR)
24715 lower_cst_included = false;
24716 else
24717 goto abort;
24718
24719 if (TREE_CODE (upper_op) == LE_EXPR)
24720 upper_cst_included = true;
24721 else if (TREE_CODE (upper_op) == LT_EXPR)
24722 upper_cst_included = false;
24723 else
24724 goto abort;
24725
24726 /* Extract the discriminant from the first operand and check it
24727 is consistant with the same analysis in the second
24728 operand. */
24729 candidate_discr_1
24730 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24731 struct_type);
24732 candidate_discr_2
24733 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24734 struct_type);
24735 if (candidate_discr_1 == candidate_discr_2)
24736 candidate_discr = candidate_discr_1;
24737 else
24738 goto abort;
24739
24740 /* Extract bounds from both. */
24741 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24742 lower_cst = TREE_OPERAND (lower_op, 1);
24743 upper_cst = TREE_OPERAND (upper_op, 1);
24744
24745 if (!lower_cst_included)
24746 lower_cst
24747 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24748 build_int_cst (TREE_TYPE (lower_cst), 1));
24749 if (!upper_cst_included)
24750 upper_cst
24751 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24752 build_int_cst (TREE_TYPE (upper_cst), 1));
24753
24754 if (!get_discr_value (lower_cst,
24755 &new_node->dw_discr_lower_bound)
24756 || !get_discr_value (upper_cst,
24757 &new_node->dw_discr_upper_bound))
24758 goto abort;
24759
24760 new_node->dw_discr_range = true;
24761 }
24762
24763 else
24764 /* Unsupported sub-expression: we cannot determine the set of
24765 matching discriminant values. Abort everything. */
24766 goto abort;
24767
24768 /* If the discriminant info is not consistant with what we saw so
24769 far, consider the analysis failed and abort everything. */
24770 if (candidate_discr == NULL_TREE
24771 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24772 goto abort;
24773 else
24774 *discr_decl = candidate_discr;
24775
24776 if (new_node != NULL)
24777 {
24778 new_node->dw_discr_next = discr_lists[i];
24779 discr_lists[i] = new_node;
24780 }
24781 match_expr = next_round_match_expr;
24782 }
24783 }
24784
24785 /* If we reach this point, we could match everything we were interested
24786 in. */
24787 return;
24788
24789 abort:
24790 /* Clean all data structure and return no result. */
24791 free (*discr_lists_p);
24792 *discr_lists_p = NULL;
24793 *discr_decl = NULL_TREE;
24794 }
24795
24796 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24797 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24798 under CONTEXT_DIE.
24799
24800 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24801 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24802 this type, which are record types, represent the available variants and each
24803 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24804 values are inferred from these attributes.
24805
24806 In trees, the offsets for the fields inside these sub-records are relative
24807 to the variant part itself, whereas the corresponding DIEs should have
24808 offset attributes that are relative to the embedding record base address.
24809 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24810 must be an expression that computes the offset of the variant part to
24811 describe in DWARF. */
24812
24813 static void
24814 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24815 dw_die_ref context_die)
24816 {
24817 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24818 tree variant_part_offset = vlr_ctx->variant_part_offset;
24819 struct loc_descr_context ctx = {
24820 vlr_ctx->struct_type, /* context_type */
24821 NULL_TREE, /* base_decl */
24822 NULL, /* dpi */
24823 false, /* placeholder_arg */
24824 false /* placeholder_seen */
24825 };
24826
24827 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24828 NULL_TREE if there is no such field. */
24829 tree discr_decl = NULL_TREE;
24830 dw_discr_list_ref *discr_lists;
24831 unsigned discr_lists_length = 0;
24832 unsigned i;
24833
24834 dw_die_ref dwarf_proc_die = NULL;
24835 dw_die_ref variant_part_die
24836 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24837
24838 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24839
24840 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24841 &discr_decl, &discr_lists, &discr_lists_length);
24842
24843 if (discr_decl != NULL_TREE)
24844 {
24845 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24846
24847 if (discr_die)
24848 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24849 else
24850 /* We have no DIE for the discriminant, so just discard all
24851 discrimimant information in the output. */
24852 discr_decl = NULL_TREE;
24853 }
24854
24855 /* If the offset for this variant part is more complex than a constant,
24856 create a DWARF procedure for it so that we will not have to generate DWARF
24857 expressions for it for each member. */
24858 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24859 && (dwarf_version >= 3 || !dwarf_strict))
24860 {
24861 const tree dwarf_proc_fndecl
24862 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24863 build_function_type (TREE_TYPE (variant_part_offset),
24864 NULL_TREE));
24865 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24866 const dw_loc_descr_ref dwarf_proc_body
24867 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24868
24869 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24870 dwarf_proc_fndecl, context_die);
24871 if (dwarf_proc_die != NULL)
24872 variant_part_offset = dwarf_proc_call;
24873 }
24874
24875 /* Output DIEs for all variants. */
24876 i = 0;
24877 for (tree variant = TYPE_FIELDS (variant_part_type);
24878 variant != NULL_TREE;
24879 variant = DECL_CHAIN (variant), ++i)
24880 {
24881 tree variant_type = TREE_TYPE (variant);
24882 dw_die_ref variant_die;
24883
24884 /* All variants (i.e. members of a variant part) are supposed to be
24885 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24886 under these records. */
24887 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24888
24889 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24890 equate_decl_number_to_die (variant, variant_die);
24891
24892 /* Output discriminant values this variant matches, if any. */
24893 if (discr_decl == NULL || discr_lists[i] == NULL)
24894 /* In the case we have discriminant information at all, this is
24895 probably the default variant: as the standard says, don't
24896 output any discriminant value/list attribute. */
24897 ;
24898 else if (discr_lists[i]->dw_discr_next == NULL
24899 && !discr_lists[i]->dw_discr_range)
24900 /* If there is only one accepted value, don't bother outputting a
24901 list. */
24902 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24903 else
24904 add_discr_list (variant_die, discr_lists[i]);
24905
24906 for (tree member = TYPE_FIELDS (variant_type);
24907 member != NULL_TREE;
24908 member = DECL_CHAIN (member))
24909 {
24910 struct vlr_context vlr_sub_ctx = {
24911 vlr_ctx->struct_type, /* struct_type */
24912 NULL /* variant_part_offset */
24913 };
24914 if (is_variant_part (member))
24915 {
24916 /* All offsets for fields inside variant parts are relative to
24917 the top-level embedding RECORD_TYPE's base address. On the
24918 other hand, offsets in GCC's types are relative to the
24919 nested-most variant part. So we have to sum offsets each time
24920 we recurse. */
24921
24922 vlr_sub_ctx.variant_part_offset
24923 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24924 variant_part_offset, byte_position (member));
24925 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24926 }
24927 else
24928 {
24929 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24930 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24931 }
24932 }
24933 }
24934
24935 free (discr_lists);
24936 }
24937
24938 /* Generate a DIE for a class member. */
24939
24940 static void
24941 gen_member_die (tree type, dw_die_ref context_die)
24942 {
24943 tree member;
24944 tree binfo = TYPE_BINFO (type);
24945
24946 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24947
24948 /* If this is not an incomplete type, output descriptions of each of its
24949 members. Note that as we output the DIEs necessary to represent the
24950 members of this record or union type, we will also be trying to output
24951 DIEs to represent the *types* of those members. However the `type'
24952 function (above) will specifically avoid generating type DIEs for member
24953 types *within* the list of member DIEs for this (containing) type except
24954 for those types (of members) which are explicitly marked as also being
24955 members of this (containing) type themselves. The g++ front- end can
24956 force any given type to be treated as a member of some other (containing)
24957 type by setting the TYPE_CONTEXT of the given (member) type to point to
24958 the TREE node representing the appropriate (containing) type. */
24959
24960 /* First output info about the base classes. */
24961 if (binfo)
24962 {
24963 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24964 int i;
24965 tree base;
24966
24967 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24968 gen_inheritance_die (base,
24969 (accesses ? (*accesses)[i] : access_public_node),
24970 type,
24971 context_die);
24972 }
24973
24974 /* Now output info about the data members and type members. */
24975 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24976 {
24977 struct vlr_context vlr_ctx = { type, NULL_TREE };
24978 bool static_inline_p
24979 = (TREE_STATIC (member)
24980 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24981 != -1));
24982
24983 /* Ignore clones. */
24984 if (DECL_ABSTRACT_ORIGIN (member))
24985 continue;
24986
24987 /* If we thought we were generating minimal debug info for TYPE
24988 and then changed our minds, some of the member declarations
24989 may have already been defined. Don't define them again, but
24990 do put them in the right order. */
24991
24992 if (dw_die_ref child = lookup_decl_die (member))
24993 {
24994 /* Handle inline static data members, which only have in-class
24995 declarations. */
24996 dw_die_ref ref = NULL;
24997 if (child->die_tag == DW_TAG_variable
24998 && child->die_parent == comp_unit_die ())
24999 {
25000 ref = get_AT_ref (child, DW_AT_specification);
25001 /* For C++17 inline static data members followed by redundant
25002 out of class redeclaration, we might get here with
25003 child being the DIE created for the out of class
25004 redeclaration and with its DW_AT_specification being
25005 the DIE created for in-class definition. We want to
25006 reparent the latter, and don't want to create another
25007 DIE with DW_AT_specification in that case, because
25008 we already have one. */
25009 if (ref
25010 && static_inline_p
25011 && ref->die_tag == DW_TAG_variable
25012 && ref->die_parent == comp_unit_die ()
25013 && get_AT (ref, DW_AT_specification) == NULL)
25014 {
25015 child = ref;
25016 ref = NULL;
25017 static_inline_p = false;
25018 }
25019 }
25020
25021 if (child->die_tag == DW_TAG_variable
25022 && child->die_parent == comp_unit_die ()
25023 && ref == NULL)
25024 {
25025 reparent_child (child, context_die);
25026 if (dwarf_version < 5)
25027 child->die_tag = DW_TAG_member;
25028 }
25029 else
25030 splice_child_die (context_die, child);
25031 }
25032
25033 /* Do not generate standard DWARF for variant parts if we are generating
25034 the corresponding GNAT encodings: DIEs generated for both would
25035 conflict in our mappings. */
25036 else if (is_variant_part (member)
25037 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25038 {
25039 vlr_ctx.variant_part_offset = byte_position (member);
25040 gen_variant_part (member, &vlr_ctx, context_die);
25041 }
25042 else
25043 {
25044 vlr_ctx.variant_part_offset = NULL_TREE;
25045 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25046 }
25047
25048 /* For C++ inline static data members emit immediately a DW_TAG_variable
25049 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25050 DW_AT_specification. */
25051 if (static_inline_p)
25052 {
25053 int old_extern = DECL_EXTERNAL (member);
25054 DECL_EXTERNAL (member) = 0;
25055 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25056 DECL_EXTERNAL (member) = old_extern;
25057 }
25058 }
25059 }
25060
25061 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25062 is set, we pretend that the type was never defined, so we only get the
25063 member DIEs needed by later specification DIEs. */
25064
25065 static void
25066 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25067 enum debug_info_usage usage)
25068 {
25069 if (TREE_ASM_WRITTEN (type))
25070 {
25071 /* Fill in the bound of variable-length fields in late dwarf if
25072 still incomplete. */
25073 if (!early_dwarf && variably_modified_type_p (type, NULL))
25074 for (tree member = TYPE_FIELDS (type);
25075 member;
25076 member = DECL_CHAIN (member))
25077 fill_variable_array_bounds (TREE_TYPE (member));
25078 return;
25079 }
25080
25081 dw_die_ref type_die = lookup_type_die (type);
25082 dw_die_ref scope_die = 0;
25083 int nested = 0;
25084 int complete = (TYPE_SIZE (type)
25085 && (! TYPE_STUB_DECL (type)
25086 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25087 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25088 complete = complete && should_emit_struct_debug (type, usage);
25089
25090 if (type_die && ! complete)
25091 return;
25092
25093 if (TYPE_CONTEXT (type) != NULL_TREE
25094 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25095 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25096 nested = 1;
25097
25098 scope_die = scope_die_for (type, context_die);
25099
25100 /* Generate child dies for template paramaters. */
25101 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25102 schedule_generic_params_dies_gen (type);
25103
25104 if (! type_die || (nested && is_cu_die (scope_die)))
25105 /* First occurrence of type or toplevel definition of nested class. */
25106 {
25107 dw_die_ref old_die = type_die;
25108
25109 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25110 ? record_type_tag (type) : DW_TAG_union_type,
25111 scope_die, type);
25112 equate_type_number_to_die (type, type_die);
25113 if (old_die)
25114 add_AT_specification (type_die, old_die);
25115 else
25116 add_name_attribute (type_die, type_tag (type));
25117 }
25118 else
25119 remove_AT (type_die, DW_AT_declaration);
25120
25121 /* If this type has been completed, then give it a byte_size attribute and
25122 then give a list of members. */
25123 if (complete && !ns_decl)
25124 {
25125 /* Prevent infinite recursion in cases where the type of some member of
25126 this type is expressed in terms of this type itself. */
25127 TREE_ASM_WRITTEN (type) = 1;
25128 add_byte_size_attribute (type_die, type);
25129 add_alignment_attribute (type_die, type);
25130 if (TYPE_STUB_DECL (type) != NULL_TREE)
25131 {
25132 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25133 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25134 }
25135
25136 /* If the first reference to this type was as the return type of an
25137 inline function, then it may not have a parent. Fix this now. */
25138 if (type_die->die_parent == NULL)
25139 add_child_die (scope_die, type_die);
25140
25141 gen_member_die (type, type_die);
25142
25143 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25144 if (TYPE_ARTIFICIAL (type))
25145 add_AT_flag (type_die, DW_AT_artificial, 1);
25146
25147 /* GNU extension: Record what type our vtable lives in. */
25148 if (TYPE_VFIELD (type))
25149 {
25150 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25151
25152 gen_type_die (vtype, context_die);
25153 add_AT_die_ref (type_die, DW_AT_containing_type,
25154 lookup_type_die (vtype));
25155 }
25156 }
25157 else
25158 {
25159 add_AT_flag (type_die, DW_AT_declaration, 1);
25160
25161 /* We don't need to do this for function-local types. */
25162 if (TYPE_STUB_DECL (type)
25163 && ! decl_function_context (TYPE_STUB_DECL (type)))
25164 vec_safe_push (incomplete_types, type);
25165 }
25166
25167 if (get_AT (type_die, DW_AT_name))
25168 add_pubtype (type, type_die);
25169 }
25170
25171 /* Generate a DIE for a subroutine _type_. */
25172
25173 static void
25174 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25175 {
25176 tree return_type = TREE_TYPE (type);
25177 dw_die_ref subr_die
25178 = new_die (DW_TAG_subroutine_type,
25179 scope_die_for (type, context_die), type);
25180
25181 equate_type_number_to_die (type, subr_die);
25182 add_prototyped_attribute (subr_die, type);
25183 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25184 context_die);
25185 add_alignment_attribute (subr_die, type);
25186 gen_formal_types_die (type, subr_die);
25187
25188 if (get_AT (subr_die, DW_AT_name))
25189 add_pubtype (type, subr_die);
25190 if ((dwarf_version >= 5 || !dwarf_strict)
25191 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25192 add_AT_flag (subr_die, DW_AT_reference, 1);
25193 if ((dwarf_version >= 5 || !dwarf_strict)
25194 && lang_hooks.types.type_dwarf_attribute (type,
25195 DW_AT_rvalue_reference) != -1)
25196 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25197 }
25198
25199 /* Generate a DIE for a type definition. */
25200
25201 static void
25202 gen_typedef_die (tree decl, dw_die_ref context_die)
25203 {
25204 dw_die_ref type_die;
25205 tree type;
25206
25207 if (TREE_ASM_WRITTEN (decl))
25208 {
25209 if (DECL_ORIGINAL_TYPE (decl))
25210 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25211 return;
25212 }
25213
25214 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25215 checks in process_scope_var and modified_type_die), this should be called
25216 only for original types. */
25217 gcc_assert (decl_ultimate_origin (decl) == NULL
25218 || decl_ultimate_origin (decl) == decl);
25219
25220 TREE_ASM_WRITTEN (decl) = 1;
25221 type_die = new_die (DW_TAG_typedef, context_die, decl);
25222
25223 add_name_and_src_coords_attributes (type_die, decl);
25224 if (DECL_ORIGINAL_TYPE (decl))
25225 {
25226 type = DECL_ORIGINAL_TYPE (decl);
25227 if (type == error_mark_node)
25228 return;
25229
25230 gcc_assert (type != TREE_TYPE (decl));
25231 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25232 }
25233 else
25234 {
25235 type = TREE_TYPE (decl);
25236 if (type == error_mark_node)
25237 return;
25238
25239 if (is_naming_typedef_decl (TYPE_NAME (type)))
25240 {
25241 /* Here, we are in the case of decl being a typedef naming
25242 an anonymous type, e.g:
25243 typedef struct {...} foo;
25244 In that case TREE_TYPE (decl) is not a typedef variant
25245 type and TYPE_NAME of the anonymous type is set to the
25246 TYPE_DECL of the typedef. This construct is emitted by
25247 the C++ FE.
25248
25249 TYPE is the anonymous struct named by the typedef
25250 DECL. As we need the DW_AT_type attribute of the
25251 DW_TAG_typedef to point to the DIE of TYPE, let's
25252 generate that DIE right away. add_type_attribute
25253 called below will then pick (via lookup_type_die) that
25254 anonymous struct DIE. */
25255 if (!TREE_ASM_WRITTEN (type))
25256 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25257
25258 /* This is a GNU Extension. We are adding a
25259 DW_AT_linkage_name attribute to the DIE of the
25260 anonymous struct TYPE. The value of that attribute
25261 is the name of the typedef decl naming the anonymous
25262 struct. This greatly eases the work of consumers of
25263 this debug info. */
25264 add_linkage_name_raw (lookup_type_die (type), decl);
25265 }
25266 }
25267
25268 add_type_attribute (type_die, type, decl_quals (decl), false,
25269 context_die);
25270
25271 if (is_naming_typedef_decl (decl))
25272 /* We want that all subsequent calls to lookup_type_die with
25273 TYPE in argument yield the DW_TAG_typedef we have just
25274 created. */
25275 equate_type_number_to_die (type, type_die);
25276
25277 add_alignment_attribute (type_die, TREE_TYPE (decl));
25278
25279 add_accessibility_attribute (type_die, decl);
25280
25281 if (DECL_ABSTRACT_P (decl))
25282 equate_decl_number_to_die (decl, type_die);
25283
25284 if (get_AT (type_die, DW_AT_name))
25285 add_pubtype (decl, type_die);
25286 }
25287
25288 /* Generate a DIE for a struct, class, enum or union type. */
25289
25290 static void
25291 gen_tagged_type_die (tree type,
25292 dw_die_ref context_die,
25293 enum debug_info_usage usage)
25294 {
25295 if (type == NULL_TREE
25296 || !is_tagged_type (type))
25297 return;
25298
25299 if (TREE_ASM_WRITTEN (type))
25300 ;
25301 /* If this is a nested type whose containing class hasn't been written
25302 out yet, writing it out will cover this one, too. This does not apply
25303 to instantiations of member class templates; they need to be added to
25304 the containing class as they are generated. FIXME: This hurts the
25305 idea of combining type decls from multiple TUs, since we can't predict
25306 what set of template instantiations we'll get. */
25307 else if (TYPE_CONTEXT (type)
25308 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25309 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25310 {
25311 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25312
25313 if (TREE_ASM_WRITTEN (type))
25314 return;
25315
25316 /* If that failed, attach ourselves to the stub. */
25317 context_die = lookup_type_die (TYPE_CONTEXT (type));
25318 }
25319 else if (TYPE_CONTEXT (type) != NULL_TREE
25320 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25321 {
25322 /* If this type is local to a function that hasn't been written
25323 out yet, use a NULL context for now; it will be fixed up in
25324 decls_for_scope. */
25325 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25326 /* A declaration DIE doesn't count; nested types need to go in the
25327 specification. */
25328 if (context_die && is_declaration_die (context_die))
25329 context_die = NULL;
25330 }
25331 else
25332 context_die = declare_in_namespace (type, context_die);
25333
25334 if (TREE_CODE (type) == ENUMERAL_TYPE)
25335 {
25336 /* This might have been written out by the call to
25337 declare_in_namespace. */
25338 if (!TREE_ASM_WRITTEN (type))
25339 gen_enumeration_type_die (type, context_die);
25340 }
25341 else
25342 gen_struct_or_union_type_die (type, context_die, usage);
25343
25344 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25345 it up if it is ever completed. gen_*_type_die will set it for us
25346 when appropriate. */
25347 }
25348
25349 /* Generate a type description DIE. */
25350
25351 static void
25352 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25353 enum debug_info_usage usage)
25354 {
25355 struct array_descr_info info;
25356
25357 if (type == NULL_TREE || type == error_mark_node)
25358 return;
25359
25360 if (flag_checking && type)
25361 verify_type (type);
25362
25363 if (TYPE_NAME (type) != NULL_TREE
25364 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25365 && is_redundant_typedef (TYPE_NAME (type))
25366 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25367 /* The DECL of this type is a typedef we don't want to emit debug
25368 info for but we want debug info for its underlying typedef.
25369 This can happen for e.g, the injected-class-name of a C++
25370 type. */
25371 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25372
25373 /* If TYPE is a typedef type variant, let's generate debug info
25374 for the parent typedef which TYPE is a type of. */
25375 if (typedef_variant_p (type))
25376 {
25377 if (TREE_ASM_WRITTEN (type))
25378 return;
25379
25380 tree name = TYPE_NAME (type);
25381 tree origin = decl_ultimate_origin (name);
25382 if (origin != NULL && origin != name)
25383 {
25384 gen_decl_die (origin, NULL, NULL, context_die);
25385 return;
25386 }
25387
25388 /* Prevent broken recursion; we can't hand off to the same type. */
25389 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25390
25391 /* Give typedefs the right scope. */
25392 context_die = scope_die_for (type, context_die);
25393
25394 TREE_ASM_WRITTEN (type) = 1;
25395
25396 gen_decl_die (name, NULL, NULL, context_die);
25397 return;
25398 }
25399
25400 /* If type is an anonymous tagged type named by a typedef, let's
25401 generate debug info for the typedef. */
25402 if (is_naming_typedef_decl (TYPE_NAME (type)))
25403 {
25404 /* Give typedefs the right scope. */
25405 context_die = scope_die_for (type, context_die);
25406
25407 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25408 return;
25409 }
25410
25411 if (lang_hooks.types.get_debug_type)
25412 {
25413 tree debug_type = lang_hooks.types.get_debug_type (type);
25414
25415 if (debug_type != NULL_TREE && debug_type != type)
25416 {
25417 gen_type_die_with_usage (debug_type, context_die, usage);
25418 return;
25419 }
25420 }
25421
25422 /* We are going to output a DIE to represent the unqualified version
25423 of this type (i.e. without any const or volatile qualifiers) so
25424 get the main variant (i.e. the unqualified version) of this type
25425 now. (Vectors and arrays are special because the debugging info is in the
25426 cloned type itself. Similarly function/method types can contain extra
25427 ref-qualification). */
25428 if (TREE_CODE (type) == FUNCTION_TYPE
25429 || TREE_CODE (type) == METHOD_TYPE)
25430 {
25431 /* For function/method types, can't use type_main_variant here,
25432 because that can have different ref-qualifiers for C++,
25433 but try to canonicalize. */
25434 tree main = TYPE_MAIN_VARIANT (type);
25435 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25436 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25437 && check_base_type (t, main)
25438 && check_lang_type (t, type))
25439 {
25440 type = t;
25441 break;
25442 }
25443 }
25444 else if (TREE_CODE (type) != VECTOR_TYPE
25445 && TREE_CODE (type) != ARRAY_TYPE)
25446 type = type_main_variant (type);
25447
25448 /* If this is an array type with hidden descriptor, handle it first. */
25449 if (!TREE_ASM_WRITTEN (type)
25450 && lang_hooks.types.get_array_descr_info)
25451 {
25452 memset (&info, 0, sizeof (info));
25453 if (lang_hooks.types.get_array_descr_info (type, &info))
25454 {
25455 /* Fortran sometimes emits array types with no dimension. */
25456 gcc_assert (info.ndimensions >= 0
25457 && (info.ndimensions
25458 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25459 gen_descr_array_type_die (type, &info, context_die);
25460 TREE_ASM_WRITTEN (type) = 1;
25461 return;
25462 }
25463 }
25464
25465 if (TREE_ASM_WRITTEN (type))
25466 {
25467 /* Variable-length types may be incomplete even if
25468 TREE_ASM_WRITTEN. For such types, fall through to
25469 gen_array_type_die() and possibly fill in
25470 DW_AT_{upper,lower}_bound attributes. */
25471 if ((TREE_CODE (type) != ARRAY_TYPE
25472 && TREE_CODE (type) != RECORD_TYPE
25473 && TREE_CODE (type) != UNION_TYPE
25474 && TREE_CODE (type) != QUAL_UNION_TYPE)
25475 || !variably_modified_type_p (type, NULL))
25476 return;
25477 }
25478
25479 switch (TREE_CODE (type))
25480 {
25481 case ERROR_MARK:
25482 break;
25483
25484 case POINTER_TYPE:
25485 case REFERENCE_TYPE:
25486 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25487 ensures that the gen_type_die recursion will terminate even if the
25488 type is recursive. Recursive types are possible in Ada. */
25489 /* ??? We could perhaps do this for all types before the switch
25490 statement. */
25491 TREE_ASM_WRITTEN (type) = 1;
25492
25493 /* For these types, all that is required is that we output a DIE (or a
25494 set of DIEs) to represent the "basis" type. */
25495 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25496 DINFO_USAGE_IND_USE);
25497 break;
25498
25499 case OFFSET_TYPE:
25500 /* This code is used for C++ pointer-to-data-member types.
25501 Output a description of the relevant class type. */
25502 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25503 DINFO_USAGE_IND_USE);
25504
25505 /* Output a description of the type of the object pointed to. */
25506 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25507 DINFO_USAGE_IND_USE);
25508
25509 /* Now output a DIE to represent this pointer-to-data-member type
25510 itself. */
25511 gen_ptr_to_mbr_type_die (type, context_die);
25512 break;
25513
25514 case FUNCTION_TYPE:
25515 /* Force out return type (in case it wasn't forced out already). */
25516 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25517 DINFO_USAGE_DIR_USE);
25518 gen_subroutine_type_die (type, context_die);
25519 break;
25520
25521 case METHOD_TYPE:
25522 /* Force out return type (in case it wasn't forced out already). */
25523 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25524 DINFO_USAGE_DIR_USE);
25525 gen_subroutine_type_die (type, context_die);
25526 break;
25527
25528 case ARRAY_TYPE:
25529 case VECTOR_TYPE:
25530 gen_array_type_die (type, context_die);
25531 break;
25532
25533 case ENUMERAL_TYPE:
25534 case RECORD_TYPE:
25535 case UNION_TYPE:
25536 case QUAL_UNION_TYPE:
25537 gen_tagged_type_die (type, context_die, usage);
25538 return;
25539
25540 case VOID_TYPE:
25541 case INTEGER_TYPE:
25542 case REAL_TYPE:
25543 case FIXED_POINT_TYPE:
25544 case COMPLEX_TYPE:
25545 case BOOLEAN_TYPE:
25546 /* No DIEs needed for fundamental types. */
25547 break;
25548
25549 case NULLPTR_TYPE:
25550 case LANG_TYPE:
25551 /* Just use DW_TAG_unspecified_type. */
25552 {
25553 dw_die_ref type_die = lookup_type_die (type);
25554 if (type_die == NULL)
25555 {
25556 tree name = TYPE_IDENTIFIER (type);
25557 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25558 type);
25559 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25560 equate_type_number_to_die (type, type_die);
25561 }
25562 }
25563 break;
25564
25565 default:
25566 if (is_cxx_auto (type))
25567 {
25568 tree name = TYPE_IDENTIFIER (type);
25569 dw_die_ref *die = (name == get_identifier ("auto")
25570 ? &auto_die : &decltype_auto_die);
25571 if (!*die)
25572 {
25573 *die = new_die (DW_TAG_unspecified_type,
25574 comp_unit_die (), NULL_TREE);
25575 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25576 }
25577 equate_type_number_to_die (type, *die);
25578 break;
25579 }
25580 gcc_unreachable ();
25581 }
25582
25583 TREE_ASM_WRITTEN (type) = 1;
25584 }
25585
25586 static void
25587 gen_type_die (tree type, dw_die_ref context_die)
25588 {
25589 if (type != error_mark_node)
25590 {
25591 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25592 if (flag_checking)
25593 {
25594 dw_die_ref die = lookup_type_die (type);
25595 if (die)
25596 check_die (die);
25597 }
25598 }
25599 }
25600
25601 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25602 things which are local to the given block. */
25603
25604 static void
25605 gen_block_die (tree stmt, dw_die_ref context_die)
25606 {
25607 int must_output_die = 0;
25608 bool inlined_func;
25609
25610 /* Ignore blocks that are NULL. */
25611 if (stmt == NULL_TREE)
25612 return;
25613
25614 inlined_func = inlined_function_outer_scope_p (stmt);
25615
25616 /* If the block is one fragment of a non-contiguous block, do not
25617 process the variables, since they will have been done by the
25618 origin block. Do process subblocks. */
25619 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25620 {
25621 tree sub;
25622
25623 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25624 gen_block_die (sub, context_die);
25625
25626 return;
25627 }
25628
25629 /* Determine if we need to output any Dwarf DIEs at all to represent this
25630 block. */
25631 if (inlined_func)
25632 /* The outer scopes for inlinings *must* always be represented. We
25633 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25634 must_output_die = 1;
25635 else if (BLOCK_DIE (stmt))
25636 /* If we already have a DIE then it was filled early. Meanwhile
25637 we might have pruned all BLOCK_VARS as optimized out but we
25638 still want to generate high/low PC attributes so output it. */
25639 must_output_die = 1;
25640 else if (TREE_USED (stmt)
25641 || TREE_ASM_WRITTEN (stmt)
25642 || BLOCK_ABSTRACT (stmt))
25643 {
25644 /* Determine if this block directly contains any "significant"
25645 local declarations which we will need to output DIEs for. */
25646 if (debug_info_level > DINFO_LEVEL_TERSE)
25647 {
25648 /* We are not in terse mode so any local declaration that
25649 is not ignored for debug purposes counts as being a
25650 "significant" one. */
25651 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25652 must_output_die = 1;
25653 else
25654 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25655 if (!DECL_IGNORED_P (var))
25656 {
25657 must_output_die = 1;
25658 break;
25659 }
25660 }
25661 else if (!dwarf2out_ignore_block (stmt))
25662 must_output_die = 1;
25663 }
25664
25665 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25666 DIE for any block which contains no significant local declarations at
25667 all. Rather, in such cases we just call `decls_for_scope' so that any
25668 needed Dwarf info for any sub-blocks will get properly generated. Note
25669 that in terse mode, our definition of what constitutes a "significant"
25670 local declaration gets restricted to include only inlined function
25671 instances and local (nested) function definitions. */
25672 if (must_output_die)
25673 {
25674 if (inlined_func)
25675 {
25676 /* If STMT block is abstract, that means we have been called
25677 indirectly from dwarf2out_abstract_function.
25678 That function rightfully marks the descendent blocks (of
25679 the abstract function it is dealing with) as being abstract,
25680 precisely to prevent us from emitting any
25681 DW_TAG_inlined_subroutine DIE as a descendent
25682 of an abstract function instance. So in that case, we should
25683 not call gen_inlined_subroutine_die.
25684
25685 Later though, when cgraph asks dwarf2out to emit info
25686 for the concrete instance of the function decl into which
25687 the concrete instance of STMT got inlined, the later will lead
25688 to the generation of a DW_TAG_inlined_subroutine DIE. */
25689 if (! BLOCK_ABSTRACT (stmt))
25690 gen_inlined_subroutine_die (stmt, context_die);
25691 }
25692 else
25693 gen_lexical_block_die (stmt, context_die);
25694 }
25695 else
25696 decls_for_scope (stmt, context_die);
25697 }
25698
25699 /* Process variable DECL (or variable with origin ORIGIN) within
25700 block STMT and add it to CONTEXT_DIE. */
25701 static void
25702 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25703 {
25704 dw_die_ref die;
25705 tree decl_or_origin = decl ? decl : origin;
25706
25707 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25708 die = lookup_decl_die (decl_or_origin);
25709 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25710 {
25711 if (TYPE_DECL_IS_STUB (decl_or_origin))
25712 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25713 else
25714 die = lookup_decl_die (decl_or_origin);
25715 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25716 if (! die && ! early_dwarf)
25717 return;
25718 }
25719 else
25720 die = NULL;
25721
25722 /* Avoid creating DIEs for local typedefs and concrete static variables that
25723 will only be pruned later. */
25724 if ((origin || decl_ultimate_origin (decl))
25725 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25726 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25727 {
25728 origin = decl_ultimate_origin (decl_or_origin);
25729 if (decl && VAR_P (decl) && die != NULL)
25730 {
25731 die = lookup_decl_die (origin);
25732 if (die != NULL)
25733 equate_decl_number_to_die (decl, die);
25734 }
25735 return;
25736 }
25737
25738 if (die != NULL && die->die_parent == NULL)
25739 add_child_die (context_die, die);
25740 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25741 {
25742 if (early_dwarf)
25743 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25744 stmt, context_die);
25745 }
25746 else
25747 {
25748 if (decl && DECL_P (decl))
25749 {
25750 die = lookup_decl_die (decl);
25751
25752 /* Early created DIEs do not have a parent as the decls refer
25753 to the function as DECL_CONTEXT rather than the BLOCK. */
25754 if (die && die->die_parent == NULL)
25755 {
25756 gcc_assert (in_lto_p);
25757 add_child_die (context_die, die);
25758 }
25759 }
25760
25761 gen_decl_die (decl, origin, NULL, context_die);
25762 }
25763 }
25764
25765 /* Generate all of the decls declared within a given scope and (recursively)
25766 all of its sub-blocks. */
25767
25768 static void
25769 decls_for_scope (tree stmt, dw_die_ref context_die)
25770 {
25771 tree decl;
25772 unsigned int i;
25773 tree subblocks;
25774
25775 /* Ignore NULL blocks. */
25776 if (stmt == NULL_TREE)
25777 return;
25778
25779 /* Output the DIEs to represent all of the data objects and typedefs
25780 declared directly within this block but not within any nested
25781 sub-blocks. Also, nested function and tag DIEs have been
25782 generated with a parent of NULL; fix that up now. We don't
25783 have to do this if we're at -g1. */
25784 if (debug_info_level > DINFO_LEVEL_TERSE)
25785 {
25786 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25787 process_scope_var (stmt, decl, NULL_TREE, context_die);
25788 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25789 origin - avoid doing this twice as we have no good way to see
25790 if we've done it once already. */
25791 if (! early_dwarf)
25792 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25793 {
25794 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25795 if (decl == current_function_decl)
25796 /* Ignore declarations of the current function, while they
25797 are declarations, gen_subprogram_die would treat them
25798 as definitions again, because they are equal to
25799 current_function_decl and endlessly recurse. */;
25800 else if (TREE_CODE (decl) == FUNCTION_DECL)
25801 process_scope_var (stmt, decl, NULL_TREE, context_die);
25802 else
25803 process_scope_var (stmt, NULL_TREE, decl, context_die);
25804 }
25805 }
25806
25807 /* Even if we're at -g1, we need to process the subblocks in order to get
25808 inlined call information. */
25809
25810 /* Output the DIEs to represent all sub-blocks (and the items declared
25811 therein) of this block. */
25812 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25813 subblocks != NULL;
25814 subblocks = BLOCK_CHAIN (subblocks))
25815 gen_block_die (subblocks, context_die);
25816 }
25817
25818 /* Is this a typedef we can avoid emitting? */
25819
25820 bool
25821 is_redundant_typedef (const_tree decl)
25822 {
25823 if (TYPE_DECL_IS_STUB (decl))
25824 return true;
25825
25826 if (DECL_ARTIFICIAL (decl)
25827 && DECL_CONTEXT (decl)
25828 && is_tagged_type (DECL_CONTEXT (decl))
25829 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25830 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25831 /* Also ignore the artificial member typedef for the class name. */
25832 return true;
25833
25834 return false;
25835 }
25836
25837 /* Return TRUE if TYPE is a typedef that names a type for linkage
25838 purposes. This kind of typedefs is produced by the C++ FE for
25839 constructs like:
25840
25841 typedef struct {...} foo;
25842
25843 In that case, there is no typedef variant type produced for foo.
25844 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25845 struct type. */
25846
25847 static bool
25848 is_naming_typedef_decl (const_tree decl)
25849 {
25850 if (decl == NULL_TREE
25851 || TREE_CODE (decl) != TYPE_DECL
25852 || DECL_NAMELESS (decl)
25853 || !is_tagged_type (TREE_TYPE (decl))
25854 || DECL_IS_BUILTIN (decl)
25855 || is_redundant_typedef (decl)
25856 /* It looks like Ada produces TYPE_DECLs that are very similar
25857 to C++ naming typedefs but that have different
25858 semantics. Let's be specific to c++ for now. */
25859 || !is_cxx (decl))
25860 return FALSE;
25861
25862 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25863 && TYPE_NAME (TREE_TYPE (decl)) == decl
25864 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25865 != TYPE_NAME (TREE_TYPE (decl))));
25866 }
25867
25868 /* Looks up the DIE for a context. */
25869
25870 static inline dw_die_ref
25871 lookup_context_die (tree context)
25872 {
25873 if (context)
25874 {
25875 /* Find die that represents this context. */
25876 if (TYPE_P (context))
25877 {
25878 context = TYPE_MAIN_VARIANT (context);
25879 dw_die_ref ctx = lookup_type_die (context);
25880 if (!ctx)
25881 return NULL;
25882 return strip_naming_typedef (context, ctx);
25883 }
25884 else
25885 return lookup_decl_die (context);
25886 }
25887 return comp_unit_die ();
25888 }
25889
25890 /* Returns the DIE for a context. */
25891
25892 static inline dw_die_ref
25893 get_context_die (tree context)
25894 {
25895 if (context)
25896 {
25897 /* Find die that represents this context. */
25898 if (TYPE_P (context))
25899 {
25900 context = TYPE_MAIN_VARIANT (context);
25901 return strip_naming_typedef (context, force_type_die (context));
25902 }
25903 else
25904 return force_decl_die (context);
25905 }
25906 return comp_unit_die ();
25907 }
25908
25909 /* Returns the DIE for decl. A DIE will always be returned. */
25910
25911 static dw_die_ref
25912 force_decl_die (tree decl)
25913 {
25914 dw_die_ref decl_die;
25915 unsigned saved_external_flag;
25916 tree save_fn = NULL_TREE;
25917 decl_die = lookup_decl_die (decl);
25918 if (!decl_die)
25919 {
25920 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25921
25922 decl_die = lookup_decl_die (decl);
25923 if (decl_die)
25924 return decl_die;
25925
25926 switch (TREE_CODE (decl))
25927 {
25928 case FUNCTION_DECL:
25929 /* Clear current_function_decl, so that gen_subprogram_die thinks
25930 that this is a declaration. At this point, we just want to force
25931 declaration die. */
25932 save_fn = current_function_decl;
25933 current_function_decl = NULL_TREE;
25934 gen_subprogram_die (decl, context_die);
25935 current_function_decl = save_fn;
25936 break;
25937
25938 case VAR_DECL:
25939 /* Set external flag to force declaration die. Restore it after
25940 gen_decl_die() call. */
25941 saved_external_flag = DECL_EXTERNAL (decl);
25942 DECL_EXTERNAL (decl) = 1;
25943 gen_decl_die (decl, NULL, NULL, context_die);
25944 DECL_EXTERNAL (decl) = saved_external_flag;
25945 break;
25946
25947 case NAMESPACE_DECL:
25948 if (dwarf_version >= 3 || !dwarf_strict)
25949 dwarf2out_decl (decl);
25950 else
25951 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25952 decl_die = comp_unit_die ();
25953 break;
25954
25955 case TRANSLATION_UNIT_DECL:
25956 decl_die = comp_unit_die ();
25957 break;
25958
25959 default:
25960 gcc_unreachable ();
25961 }
25962
25963 /* We should be able to find the DIE now. */
25964 if (!decl_die)
25965 decl_die = lookup_decl_die (decl);
25966 gcc_assert (decl_die);
25967 }
25968
25969 return decl_die;
25970 }
25971
25972 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25973 always returned. */
25974
25975 static dw_die_ref
25976 force_type_die (tree type)
25977 {
25978 dw_die_ref type_die;
25979
25980 type_die = lookup_type_die (type);
25981 if (!type_die)
25982 {
25983 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25984
25985 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25986 false, context_die);
25987 gcc_assert (type_die);
25988 }
25989 return type_die;
25990 }
25991
25992 /* Force out any required namespaces to be able to output DECL,
25993 and return the new context_die for it, if it's changed. */
25994
25995 static dw_die_ref
25996 setup_namespace_context (tree thing, dw_die_ref context_die)
25997 {
25998 tree context = (DECL_P (thing)
25999 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26000 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26001 /* Force out the namespace. */
26002 context_die = force_decl_die (context);
26003
26004 return context_die;
26005 }
26006
26007 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26008 type) within its namespace, if appropriate.
26009
26010 For compatibility with older debuggers, namespace DIEs only contain
26011 declarations; all definitions are emitted at CU scope, with
26012 DW_AT_specification pointing to the declaration (like with class
26013 members). */
26014
26015 static dw_die_ref
26016 declare_in_namespace (tree thing, dw_die_ref context_die)
26017 {
26018 dw_die_ref ns_context;
26019
26020 if (debug_info_level <= DINFO_LEVEL_TERSE)
26021 return context_die;
26022
26023 /* External declarations in the local scope only need to be emitted
26024 once, not once in the namespace and once in the scope.
26025
26026 This avoids declaring the `extern' below in the
26027 namespace DIE as well as in the innermost scope:
26028
26029 namespace S
26030 {
26031 int i=5;
26032 int foo()
26033 {
26034 int i=8;
26035 extern int i;
26036 return i;
26037 }
26038 }
26039 */
26040 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26041 return context_die;
26042
26043 /* If this decl is from an inlined function, then don't try to emit it in its
26044 namespace, as we will get confused. It would have already been emitted
26045 when the abstract instance of the inline function was emitted anyways. */
26046 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26047 return context_die;
26048
26049 ns_context = setup_namespace_context (thing, context_die);
26050
26051 if (ns_context != context_die)
26052 {
26053 if (is_fortran ())
26054 return ns_context;
26055 if (DECL_P (thing))
26056 gen_decl_die (thing, NULL, NULL, ns_context);
26057 else
26058 gen_type_die (thing, ns_context);
26059 }
26060 return context_die;
26061 }
26062
26063 /* Generate a DIE for a namespace or namespace alias. */
26064
26065 static void
26066 gen_namespace_die (tree decl, dw_die_ref context_die)
26067 {
26068 dw_die_ref namespace_die;
26069
26070 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26071 they are an alias of. */
26072 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26073 {
26074 /* Output a real namespace or module. */
26075 context_die = setup_namespace_context (decl, comp_unit_die ());
26076 namespace_die = new_die (is_fortran ()
26077 ? DW_TAG_module : DW_TAG_namespace,
26078 context_die, decl);
26079 /* For Fortran modules defined in different CU don't add src coords. */
26080 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26081 {
26082 const char *name = dwarf2_name (decl, 0);
26083 if (name)
26084 add_name_attribute (namespace_die, name);
26085 }
26086 else
26087 add_name_and_src_coords_attributes (namespace_die, decl);
26088 if (DECL_EXTERNAL (decl))
26089 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26090 equate_decl_number_to_die (decl, namespace_die);
26091 }
26092 else
26093 {
26094 /* Output a namespace alias. */
26095
26096 /* Force out the namespace we are an alias of, if necessary. */
26097 dw_die_ref origin_die
26098 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26099
26100 if (DECL_FILE_SCOPE_P (decl)
26101 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26102 context_die = setup_namespace_context (decl, comp_unit_die ());
26103 /* Now create the namespace alias DIE. */
26104 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26105 add_name_and_src_coords_attributes (namespace_die, decl);
26106 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26107 equate_decl_number_to_die (decl, namespace_die);
26108 }
26109 if ((dwarf_version >= 5 || !dwarf_strict)
26110 && lang_hooks.decls.decl_dwarf_attribute (decl,
26111 DW_AT_export_symbols) == 1)
26112 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26113
26114 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26115 if (want_pubnames ())
26116 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26117 }
26118
26119 /* Generate Dwarf debug information for a decl described by DECL.
26120 The return value is currently only meaningful for PARM_DECLs,
26121 for all other decls it returns NULL.
26122
26123 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26124 It can be NULL otherwise. */
26125
26126 static dw_die_ref
26127 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26128 dw_die_ref context_die)
26129 {
26130 tree decl_or_origin = decl ? decl : origin;
26131 tree class_origin = NULL, ultimate_origin;
26132
26133 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26134 return NULL;
26135
26136 switch (TREE_CODE (decl_or_origin))
26137 {
26138 case ERROR_MARK:
26139 break;
26140
26141 case CONST_DECL:
26142 if (!is_fortran () && !is_ada ())
26143 {
26144 /* The individual enumerators of an enum type get output when we output
26145 the Dwarf representation of the relevant enum type itself. */
26146 break;
26147 }
26148
26149 /* Emit its type. */
26150 gen_type_die (TREE_TYPE (decl), context_die);
26151
26152 /* And its containing namespace. */
26153 context_die = declare_in_namespace (decl, context_die);
26154
26155 gen_const_die (decl, context_die);
26156 break;
26157
26158 case FUNCTION_DECL:
26159 #if 0
26160 /* FIXME */
26161 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26162 on local redeclarations of global functions. That seems broken. */
26163 if (current_function_decl != decl)
26164 /* This is only a declaration. */;
26165 #endif
26166
26167 /* We should have abstract copies already and should not generate
26168 stray type DIEs in late LTO dumping. */
26169 if (! early_dwarf)
26170 ;
26171
26172 /* If we're emitting a clone, emit info for the abstract instance. */
26173 else if (origin || DECL_ORIGIN (decl) != decl)
26174 dwarf2out_abstract_function (origin
26175 ? DECL_ORIGIN (origin)
26176 : DECL_ABSTRACT_ORIGIN (decl));
26177
26178 /* If we're emitting a possibly inlined function emit it as
26179 abstract instance. */
26180 else if (cgraph_function_possibly_inlined_p (decl)
26181 && ! DECL_ABSTRACT_P (decl)
26182 && ! class_or_namespace_scope_p (context_die)
26183 /* dwarf2out_abstract_function won't emit a die if this is just
26184 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26185 that case, because that works only if we have a die. */
26186 && DECL_INITIAL (decl) != NULL_TREE)
26187 dwarf2out_abstract_function (decl);
26188
26189 /* Otherwise we're emitting the primary DIE for this decl. */
26190 else if (debug_info_level > DINFO_LEVEL_TERSE)
26191 {
26192 /* Before we describe the FUNCTION_DECL itself, make sure that we
26193 have its containing type. */
26194 if (!origin)
26195 origin = decl_class_context (decl);
26196 if (origin != NULL_TREE)
26197 gen_type_die (origin, context_die);
26198
26199 /* And its return type. */
26200 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26201
26202 /* And its virtual context. */
26203 if (DECL_VINDEX (decl) != NULL_TREE)
26204 gen_type_die (DECL_CONTEXT (decl), context_die);
26205
26206 /* Make sure we have a member DIE for decl. */
26207 if (origin != NULL_TREE)
26208 gen_type_die_for_member (origin, decl, context_die);
26209
26210 /* And its containing namespace. */
26211 context_die = declare_in_namespace (decl, context_die);
26212 }
26213
26214 /* Now output a DIE to represent the function itself. */
26215 if (decl)
26216 gen_subprogram_die (decl, context_die);
26217 break;
26218
26219 case TYPE_DECL:
26220 /* If we are in terse mode, don't generate any DIEs to represent any
26221 actual typedefs. */
26222 if (debug_info_level <= DINFO_LEVEL_TERSE)
26223 break;
26224
26225 /* In the special case of a TYPE_DECL node representing the declaration
26226 of some type tag, if the given TYPE_DECL is marked as having been
26227 instantiated from some other (original) TYPE_DECL node (e.g. one which
26228 was generated within the original definition of an inline function) we
26229 used to generate a special (abbreviated) DW_TAG_structure_type,
26230 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26231 should be actually referencing those DIEs, as variable DIEs with that
26232 type would be emitted already in the abstract origin, so it was always
26233 removed during unused type prunning. Don't add anything in this
26234 case. */
26235 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26236 break;
26237
26238 if (is_redundant_typedef (decl))
26239 gen_type_die (TREE_TYPE (decl), context_die);
26240 else
26241 /* Output a DIE to represent the typedef itself. */
26242 gen_typedef_die (decl, context_die);
26243 break;
26244
26245 case LABEL_DECL:
26246 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26247 gen_label_die (decl, context_die);
26248 break;
26249
26250 case VAR_DECL:
26251 case RESULT_DECL:
26252 /* If we are in terse mode, don't generate any DIEs to represent any
26253 variable declarations or definitions. */
26254 if (debug_info_level <= DINFO_LEVEL_TERSE)
26255 break;
26256
26257 /* Avoid generating stray type DIEs during late dwarf dumping.
26258 All types have been dumped early. */
26259 if (early_dwarf
26260 /* ??? But in LTRANS we cannot annotate early created variably
26261 modified type DIEs without copying them and adjusting all
26262 references to them. Dump them again as happens for inlining
26263 which copies both the decl and the types. */
26264 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26265 in VLA bound information for example. */
26266 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26267 current_function_decl)))
26268 {
26269 /* Output any DIEs that are needed to specify the type of this data
26270 object. */
26271 if (decl_by_reference_p (decl_or_origin))
26272 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26273 else
26274 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26275 }
26276
26277 if (early_dwarf)
26278 {
26279 /* And its containing type. */
26280 class_origin = decl_class_context (decl_or_origin);
26281 if (class_origin != NULL_TREE)
26282 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26283
26284 /* And its containing namespace. */
26285 context_die = declare_in_namespace (decl_or_origin, context_die);
26286 }
26287
26288 /* Now output the DIE to represent the data object itself. This gets
26289 complicated because of the possibility that the VAR_DECL really
26290 represents an inlined instance of a formal parameter for an inline
26291 function. */
26292 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26293 if (ultimate_origin != NULL_TREE
26294 && TREE_CODE (ultimate_origin) == PARM_DECL)
26295 gen_formal_parameter_die (decl, origin,
26296 true /* Emit name attribute. */,
26297 context_die);
26298 else
26299 gen_variable_die (decl, origin, context_die);
26300 break;
26301
26302 case FIELD_DECL:
26303 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26304 /* Ignore the nameless fields that are used to skip bits but handle C++
26305 anonymous unions and structs. */
26306 if (DECL_NAME (decl) != NULL_TREE
26307 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26308 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26309 {
26310 gen_type_die (member_declared_type (decl), context_die);
26311 gen_field_die (decl, ctx, context_die);
26312 }
26313 break;
26314
26315 case PARM_DECL:
26316 /* Avoid generating stray type DIEs during late dwarf dumping.
26317 All types have been dumped early. */
26318 if (early_dwarf
26319 /* ??? But in LTRANS we cannot annotate early created variably
26320 modified type DIEs without copying them and adjusting all
26321 references to them. Dump them again as happens for inlining
26322 which copies both the decl and the types. */
26323 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26324 in VLA bound information for example. */
26325 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26326 current_function_decl)))
26327 {
26328 if (DECL_BY_REFERENCE (decl_or_origin))
26329 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26330 else
26331 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26332 }
26333 return gen_formal_parameter_die (decl, origin,
26334 true /* Emit name attribute. */,
26335 context_die);
26336
26337 case NAMESPACE_DECL:
26338 if (dwarf_version >= 3 || !dwarf_strict)
26339 gen_namespace_die (decl, context_die);
26340 break;
26341
26342 case IMPORTED_DECL:
26343 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26344 DECL_CONTEXT (decl), context_die);
26345 break;
26346
26347 case NAMELIST_DECL:
26348 gen_namelist_decl (DECL_NAME (decl), context_die,
26349 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26350 break;
26351
26352 default:
26353 /* Probably some frontend-internal decl. Assume we don't care. */
26354 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26355 break;
26356 }
26357
26358 return NULL;
26359 }
26360 \f
26361 /* Output initial debug information for global DECL. Called at the
26362 end of the parsing process.
26363
26364 This is the initial debug generation process. As such, the DIEs
26365 generated may be incomplete. A later debug generation pass
26366 (dwarf2out_late_global_decl) will augment the information generated
26367 in this pass (e.g., with complete location info). */
26368
26369 static void
26370 dwarf2out_early_global_decl (tree decl)
26371 {
26372 set_early_dwarf s;
26373
26374 /* gen_decl_die() will set DECL_ABSTRACT because
26375 cgraph_function_possibly_inlined_p() returns true. This is in
26376 turn will cause DW_AT_inline attributes to be set.
26377
26378 This happens because at early dwarf generation, there is no
26379 cgraph information, causing cgraph_function_possibly_inlined_p()
26380 to return true. Trick cgraph_function_possibly_inlined_p()
26381 while we generate dwarf early. */
26382 bool save = symtab->global_info_ready;
26383 symtab->global_info_ready = true;
26384
26385 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26386 other DECLs and they can point to template types or other things
26387 that dwarf2out can't handle when done via dwarf2out_decl. */
26388 if (TREE_CODE (decl) != TYPE_DECL
26389 && TREE_CODE (decl) != PARM_DECL)
26390 {
26391 if (TREE_CODE (decl) == FUNCTION_DECL)
26392 {
26393 tree save_fndecl = current_function_decl;
26394
26395 /* For nested functions, make sure we have DIEs for the parents first
26396 so that all nested DIEs are generated at the proper scope in the
26397 first shot. */
26398 tree context = decl_function_context (decl);
26399 if (context != NULL)
26400 {
26401 dw_die_ref context_die = lookup_decl_die (context);
26402 current_function_decl = context;
26403
26404 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26405 enough so that it lands in its own context. This avoids type
26406 pruning issues later on. */
26407 if (context_die == NULL || is_declaration_die (context_die))
26408 dwarf2out_decl (context);
26409 }
26410
26411 /* Emit an abstract origin of a function first. This happens
26412 with C++ constructor clones for example and makes
26413 dwarf2out_abstract_function happy which requires the early
26414 DIE of the abstract instance to be present. */
26415 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26416 dw_die_ref origin_die;
26417 if (origin != NULL
26418 /* Do not emit the DIE multiple times but make sure to
26419 process it fully here in case we just saw a declaration. */
26420 && ((origin_die = lookup_decl_die (origin)) == NULL
26421 || is_declaration_die (origin_die)))
26422 {
26423 current_function_decl = origin;
26424 dwarf2out_decl (origin);
26425 }
26426
26427 /* Emit the DIE for decl but avoid doing that multiple times. */
26428 dw_die_ref old_die;
26429 if ((old_die = lookup_decl_die (decl)) == NULL
26430 || is_declaration_die (old_die))
26431 {
26432 current_function_decl = decl;
26433 dwarf2out_decl (decl);
26434 }
26435
26436 current_function_decl = save_fndecl;
26437 }
26438 else
26439 dwarf2out_decl (decl);
26440 }
26441 symtab->global_info_ready = save;
26442 }
26443
26444 /* Return whether EXPR is an expression with the following pattern:
26445 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26446
26447 static bool
26448 is_trivial_indirect_ref (tree expr)
26449 {
26450 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26451 return false;
26452
26453 tree nop = TREE_OPERAND (expr, 0);
26454 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26455 return false;
26456
26457 tree int_cst = TREE_OPERAND (nop, 0);
26458 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26459 }
26460
26461 /* Output debug information for global decl DECL. Called from
26462 toplev.c after compilation proper has finished. */
26463
26464 static void
26465 dwarf2out_late_global_decl (tree decl)
26466 {
26467 /* Fill-in any location information we were unable to determine
26468 on the first pass. */
26469 if (VAR_P (decl))
26470 {
26471 dw_die_ref die = lookup_decl_die (decl);
26472
26473 /* We may have to generate early debug late for LTO in case debug
26474 was not enabled at compile-time or the target doesn't support
26475 the LTO early debug scheme. */
26476 if (! die && in_lto_p)
26477 {
26478 dwarf2out_decl (decl);
26479 die = lookup_decl_die (decl);
26480 }
26481
26482 if (die)
26483 {
26484 /* We get called via the symtab code invoking late_global_decl
26485 for symbols that are optimized out.
26486
26487 Do not add locations for those, except if they have a
26488 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26489 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26490 INDIRECT_REF expression, as this could generate relocations to
26491 text symbols in LTO object files, which is invalid. */
26492 varpool_node *node = varpool_node::get (decl);
26493 if ((! node || ! node->definition)
26494 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26495 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26496 tree_add_const_value_attribute_for_decl (die, decl);
26497 else
26498 add_location_or_const_value_attribute (die, decl, false);
26499 }
26500 }
26501 }
26502
26503 /* Output debug information for type decl DECL. Called from toplev.c
26504 and from language front ends (to record built-in types). */
26505 static void
26506 dwarf2out_type_decl (tree decl, int local)
26507 {
26508 if (!local)
26509 {
26510 set_early_dwarf s;
26511 dwarf2out_decl (decl);
26512 }
26513 }
26514
26515 /* Output debug information for imported module or decl DECL.
26516 NAME is non-NULL name in the lexical block if the decl has been renamed.
26517 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26518 that DECL belongs to.
26519 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26520 static void
26521 dwarf2out_imported_module_or_decl_1 (tree decl,
26522 tree name,
26523 tree lexical_block,
26524 dw_die_ref lexical_block_die)
26525 {
26526 expanded_location xloc;
26527 dw_die_ref imported_die = NULL;
26528 dw_die_ref at_import_die;
26529
26530 if (TREE_CODE (decl) == IMPORTED_DECL)
26531 {
26532 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26533 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26534 gcc_assert (decl);
26535 }
26536 else
26537 xloc = expand_location (input_location);
26538
26539 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26540 {
26541 at_import_die = force_type_die (TREE_TYPE (decl));
26542 /* For namespace N { typedef void T; } using N::T; base_type_die
26543 returns NULL, but DW_TAG_imported_declaration requires
26544 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26545 if (!at_import_die)
26546 {
26547 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26548 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26549 at_import_die = lookup_type_die (TREE_TYPE (decl));
26550 gcc_assert (at_import_die);
26551 }
26552 }
26553 else
26554 {
26555 at_import_die = lookup_decl_die (decl);
26556 if (!at_import_die)
26557 {
26558 /* If we're trying to avoid duplicate debug info, we may not have
26559 emitted the member decl for this field. Emit it now. */
26560 if (TREE_CODE (decl) == FIELD_DECL)
26561 {
26562 tree type = DECL_CONTEXT (decl);
26563
26564 if (TYPE_CONTEXT (type)
26565 && TYPE_P (TYPE_CONTEXT (type))
26566 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26567 DINFO_USAGE_DIR_USE))
26568 return;
26569 gen_type_die_for_member (type, decl,
26570 get_context_die (TYPE_CONTEXT (type)));
26571 }
26572 if (TREE_CODE (decl) == NAMELIST_DECL)
26573 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26574 get_context_die (DECL_CONTEXT (decl)),
26575 NULL_TREE);
26576 else
26577 at_import_die = force_decl_die (decl);
26578 }
26579 }
26580
26581 if (TREE_CODE (decl) == NAMESPACE_DECL)
26582 {
26583 if (dwarf_version >= 3 || !dwarf_strict)
26584 imported_die = new_die (DW_TAG_imported_module,
26585 lexical_block_die,
26586 lexical_block);
26587 else
26588 return;
26589 }
26590 else
26591 imported_die = new_die (DW_TAG_imported_declaration,
26592 lexical_block_die,
26593 lexical_block);
26594
26595 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26596 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26597 if (debug_column_info && xloc.column)
26598 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26599 if (name)
26600 add_AT_string (imported_die, DW_AT_name,
26601 IDENTIFIER_POINTER (name));
26602 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26603 }
26604
26605 /* Output debug information for imported module or decl DECL.
26606 NAME is non-NULL name in context if the decl has been renamed.
26607 CHILD is true if decl is one of the renamed decls as part of
26608 importing whole module.
26609 IMPLICIT is set if this hook is called for an implicit import
26610 such as inline namespace. */
26611
26612 static void
26613 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26614 bool child, bool implicit)
26615 {
26616 /* dw_die_ref at_import_die; */
26617 dw_die_ref scope_die;
26618
26619 if (debug_info_level <= DINFO_LEVEL_TERSE)
26620 return;
26621
26622 gcc_assert (decl);
26623
26624 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26625 should be enough, for DWARF4 and older even if we emit as extension
26626 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26627 for the benefit of consumers unaware of DW_AT_export_symbols. */
26628 if (implicit
26629 && dwarf_version >= 5
26630 && lang_hooks.decls.decl_dwarf_attribute (decl,
26631 DW_AT_export_symbols) == 1)
26632 return;
26633
26634 set_early_dwarf s;
26635
26636 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26637 We need decl DIE for reference and scope die. First, get DIE for the decl
26638 itself. */
26639
26640 /* Get the scope die for decl context. Use comp_unit_die for global module
26641 or decl. If die is not found for non globals, force new die. */
26642 if (context
26643 && TYPE_P (context)
26644 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26645 return;
26646
26647 scope_die = get_context_die (context);
26648
26649 if (child)
26650 {
26651 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26652 there is nothing we can do, here. */
26653 if (dwarf_version < 3 && dwarf_strict)
26654 return;
26655
26656 gcc_assert (scope_die->die_child);
26657 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26658 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26659 scope_die = scope_die->die_child;
26660 }
26661
26662 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26663 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26664 }
26665
26666 /* Output debug information for namelists. */
26667
26668 static dw_die_ref
26669 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26670 {
26671 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26672 tree value;
26673 unsigned i;
26674
26675 if (debug_info_level <= DINFO_LEVEL_TERSE)
26676 return NULL;
26677
26678 gcc_assert (scope_die != NULL);
26679 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26680 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26681
26682 /* If there are no item_decls, we have a nondefining namelist, e.g.
26683 with USE association; hence, set DW_AT_declaration. */
26684 if (item_decls == NULL_TREE)
26685 {
26686 add_AT_flag (nml_die, DW_AT_declaration, 1);
26687 return nml_die;
26688 }
26689
26690 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26691 {
26692 nml_item_ref_die = lookup_decl_die (value);
26693 if (!nml_item_ref_die)
26694 nml_item_ref_die = force_decl_die (value);
26695
26696 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26697 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26698 }
26699 return nml_die;
26700 }
26701
26702
26703 /* Write the debugging output for DECL and return the DIE. */
26704
26705 static void
26706 dwarf2out_decl (tree decl)
26707 {
26708 dw_die_ref context_die = comp_unit_die ();
26709
26710 switch (TREE_CODE (decl))
26711 {
26712 case ERROR_MARK:
26713 return;
26714
26715 case FUNCTION_DECL:
26716 /* If we're a nested function, initially use a parent of NULL; if we're
26717 a plain function, this will be fixed up in decls_for_scope. If
26718 we're a method, it will be ignored, since we already have a DIE.
26719 Avoid doing this late though since clones of class methods may
26720 otherwise end up in limbo and create type DIEs late. */
26721 if (early_dwarf
26722 && decl_function_context (decl)
26723 /* But if we're in terse mode, we don't care about scope. */
26724 && debug_info_level > DINFO_LEVEL_TERSE)
26725 context_die = NULL;
26726 break;
26727
26728 case VAR_DECL:
26729 /* For local statics lookup proper context die. */
26730 if (local_function_static (decl))
26731 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26732
26733 /* If we are in terse mode, don't generate any DIEs to represent any
26734 variable declarations or definitions. */
26735 if (debug_info_level <= DINFO_LEVEL_TERSE)
26736 return;
26737 break;
26738
26739 case CONST_DECL:
26740 if (debug_info_level <= DINFO_LEVEL_TERSE)
26741 return;
26742 if (!is_fortran () && !is_ada ())
26743 return;
26744 if (TREE_STATIC (decl) && decl_function_context (decl))
26745 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26746 break;
26747
26748 case NAMESPACE_DECL:
26749 case IMPORTED_DECL:
26750 if (debug_info_level <= DINFO_LEVEL_TERSE)
26751 return;
26752 if (lookup_decl_die (decl) != NULL)
26753 return;
26754 break;
26755
26756 case TYPE_DECL:
26757 /* Don't emit stubs for types unless they are needed by other DIEs. */
26758 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26759 return;
26760
26761 /* Don't bother trying to generate any DIEs to represent any of the
26762 normal built-in types for the language we are compiling. */
26763 if (DECL_IS_BUILTIN (decl))
26764 return;
26765
26766 /* If we are in terse mode, don't generate any DIEs for types. */
26767 if (debug_info_level <= DINFO_LEVEL_TERSE)
26768 return;
26769
26770 /* If we're a function-scope tag, initially use a parent of NULL;
26771 this will be fixed up in decls_for_scope. */
26772 if (decl_function_context (decl))
26773 context_die = NULL;
26774
26775 break;
26776
26777 case NAMELIST_DECL:
26778 break;
26779
26780 default:
26781 return;
26782 }
26783
26784 gen_decl_die (decl, NULL, NULL, context_die);
26785
26786 if (flag_checking)
26787 {
26788 dw_die_ref die = lookup_decl_die (decl);
26789 if (die)
26790 check_die (die);
26791 }
26792 }
26793
26794 /* Write the debugging output for DECL. */
26795
26796 static void
26797 dwarf2out_function_decl (tree decl)
26798 {
26799 dwarf2out_decl (decl);
26800 call_arg_locations = NULL;
26801 call_arg_loc_last = NULL;
26802 call_site_count = -1;
26803 tail_call_site_count = -1;
26804 decl_loc_table->empty ();
26805 cached_dw_loc_list_table->empty ();
26806 }
26807
26808 /* Output a marker (i.e. a label) for the beginning of the generated code for
26809 a lexical block. */
26810
26811 static void
26812 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26813 unsigned int blocknum)
26814 {
26815 switch_to_section (current_function_section ());
26816 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26817 }
26818
26819 /* Output a marker (i.e. a label) for the end of the generated code for a
26820 lexical block. */
26821
26822 static void
26823 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26824 {
26825 switch_to_section (current_function_section ());
26826 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26827 }
26828
26829 /* Returns nonzero if it is appropriate not to emit any debugging
26830 information for BLOCK, because it doesn't contain any instructions.
26831
26832 Don't allow this for blocks with nested functions or local classes
26833 as we would end up with orphans, and in the presence of scheduling
26834 we may end up calling them anyway. */
26835
26836 static bool
26837 dwarf2out_ignore_block (const_tree block)
26838 {
26839 tree decl;
26840 unsigned int i;
26841
26842 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26843 if (TREE_CODE (decl) == FUNCTION_DECL
26844 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26845 return 0;
26846 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26847 {
26848 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26849 if (TREE_CODE (decl) == FUNCTION_DECL
26850 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26851 return 0;
26852 }
26853
26854 return 1;
26855 }
26856
26857 /* Hash table routines for file_hash. */
26858
26859 bool
26860 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26861 {
26862 return filename_cmp (p1->filename, p2) == 0;
26863 }
26864
26865 hashval_t
26866 dwarf_file_hasher::hash (dwarf_file_data *p)
26867 {
26868 return htab_hash_string (p->filename);
26869 }
26870
26871 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26872 dwarf2out.c) and return its "index". The index of each (known) filename is
26873 just a unique number which is associated with only that one filename. We
26874 need such numbers for the sake of generating labels (in the .debug_sfnames
26875 section) and references to those files numbers (in the .debug_srcinfo
26876 and .debug_macinfo sections). If the filename given as an argument is not
26877 found in our current list, add it to the list and assign it the next
26878 available unique index number. */
26879
26880 static struct dwarf_file_data *
26881 lookup_filename (const char *file_name)
26882 {
26883 struct dwarf_file_data * created;
26884
26885 if (!file_name)
26886 return NULL;
26887
26888 dwarf_file_data **slot
26889 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26890 INSERT);
26891 if (*slot)
26892 return *slot;
26893
26894 created = ggc_alloc<dwarf_file_data> ();
26895 created->filename = file_name;
26896 created->emitted_number = 0;
26897 *slot = created;
26898 return created;
26899 }
26900
26901 /* If the assembler will construct the file table, then translate the compiler
26902 internal file table number into the assembler file table number, and emit
26903 a .file directive if we haven't already emitted one yet. The file table
26904 numbers are different because we prune debug info for unused variables and
26905 types, which may include filenames. */
26906
26907 static int
26908 maybe_emit_file (struct dwarf_file_data * fd)
26909 {
26910 if (! fd->emitted_number)
26911 {
26912 if (last_emitted_file)
26913 fd->emitted_number = last_emitted_file->emitted_number + 1;
26914 else
26915 fd->emitted_number = 1;
26916 last_emitted_file = fd;
26917
26918 if (output_asm_line_debug_info ())
26919 {
26920 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26921 output_quoted_string (asm_out_file,
26922 remap_debug_filename (fd->filename));
26923 fputc ('\n', asm_out_file);
26924 }
26925 }
26926
26927 return fd->emitted_number;
26928 }
26929
26930 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26931 That generation should happen after function debug info has been
26932 generated. The value of the attribute is the constant value of ARG. */
26933
26934 static void
26935 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26936 {
26937 die_arg_entry entry;
26938
26939 if (!die || !arg)
26940 return;
26941
26942 gcc_assert (early_dwarf);
26943
26944 if (!tmpl_value_parm_die_table)
26945 vec_alloc (tmpl_value_parm_die_table, 32);
26946
26947 entry.die = die;
26948 entry.arg = arg;
26949 vec_safe_push (tmpl_value_parm_die_table, entry);
26950 }
26951
26952 /* Return TRUE if T is an instance of generic type, FALSE
26953 otherwise. */
26954
26955 static bool
26956 generic_type_p (tree t)
26957 {
26958 if (t == NULL_TREE || !TYPE_P (t))
26959 return false;
26960 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26961 }
26962
26963 /* Schedule the generation of the generic parameter dies for the
26964 instance of generic type T. The proper generation itself is later
26965 done by gen_scheduled_generic_parms_dies. */
26966
26967 static void
26968 schedule_generic_params_dies_gen (tree t)
26969 {
26970 if (!generic_type_p (t))
26971 return;
26972
26973 gcc_assert (early_dwarf);
26974
26975 if (!generic_type_instances)
26976 vec_alloc (generic_type_instances, 256);
26977
26978 vec_safe_push (generic_type_instances, t);
26979 }
26980
26981 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26982 by append_entry_to_tmpl_value_parm_die_table. This function must
26983 be called after function DIEs have been generated. */
26984
26985 static void
26986 gen_remaining_tmpl_value_param_die_attribute (void)
26987 {
26988 if (tmpl_value_parm_die_table)
26989 {
26990 unsigned i, j;
26991 die_arg_entry *e;
26992
26993 /* We do this in two phases - first get the cases we can
26994 handle during early-finish, preserving those we cannot
26995 (containing symbolic constants where we don't yet know
26996 whether we are going to output the referenced symbols).
26997 For those we try again at late-finish. */
26998 j = 0;
26999 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27000 {
27001 if (!e->die->removed
27002 && !tree_add_const_value_attribute (e->die, e->arg))
27003 {
27004 dw_loc_descr_ref loc = NULL;
27005 if (! early_dwarf
27006 && (dwarf_version >= 5 || !dwarf_strict))
27007 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27008 if (loc)
27009 add_AT_loc (e->die, DW_AT_location, loc);
27010 else
27011 (*tmpl_value_parm_die_table)[j++] = *e;
27012 }
27013 }
27014 tmpl_value_parm_die_table->truncate (j);
27015 }
27016 }
27017
27018 /* Generate generic parameters DIEs for instances of generic types
27019 that have been previously scheduled by
27020 schedule_generic_params_dies_gen. This function must be called
27021 after all the types of the CU have been laid out. */
27022
27023 static void
27024 gen_scheduled_generic_parms_dies (void)
27025 {
27026 unsigned i;
27027 tree t;
27028
27029 if (!generic_type_instances)
27030 return;
27031
27032 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27033 if (COMPLETE_TYPE_P (t))
27034 gen_generic_params_dies (t);
27035
27036 generic_type_instances = NULL;
27037 }
27038
27039
27040 /* Replace DW_AT_name for the decl with name. */
27041
27042 static void
27043 dwarf2out_set_name (tree decl, tree name)
27044 {
27045 dw_die_ref die;
27046 dw_attr_node *attr;
27047 const char *dname;
27048
27049 die = TYPE_SYMTAB_DIE (decl);
27050 if (!die)
27051 return;
27052
27053 dname = dwarf2_name (name, 0);
27054 if (!dname)
27055 return;
27056
27057 attr = get_AT (die, DW_AT_name);
27058 if (attr)
27059 {
27060 struct indirect_string_node *node;
27061
27062 node = find_AT_string (dname);
27063 /* replace the string. */
27064 attr->dw_attr_val.v.val_str = node;
27065 }
27066
27067 else
27068 add_name_attribute (die, dname);
27069 }
27070
27071 /* True if before or during processing of the first function being emitted. */
27072 static bool in_first_function_p = true;
27073 /* True if loc_note during dwarf2out_var_location call might still be
27074 before first real instruction at address equal to .Ltext0. */
27075 static bool maybe_at_text_label_p = true;
27076 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27077 static unsigned int first_loclabel_num_not_at_text_label;
27078
27079 /* Look ahead for a real insn, or for a begin stmt marker. */
27080
27081 static rtx_insn *
27082 dwarf2out_next_real_insn (rtx_insn *loc_note)
27083 {
27084 rtx_insn *next_real = NEXT_INSN (loc_note);
27085
27086 while (next_real)
27087 if (INSN_P (next_real))
27088 break;
27089 else
27090 next_real = NEXT_INSN (next_real);
27091
27092 return next_real;
27093 }
27094
27095 /* Called by the final INSN scan whenever we see a var location. We
27096 use it to drop labels in the right places, and throw the location in
27097 our lookup table. */
27098
27099 static void
27100 dwarf2out_var_location (rtx_insn *loc_note)
27101 {
27102 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27103 struct var_loc_node *newloc;
27104 rtx_insn *next_real, *next_note;
27105 rtx_insn *call_insn = NULL;
27106 static const char *last_label;
27107 static const char *last_postcall_label;
27108 static bool last_in_cold_section_p;
27109 static rtx_insn *expected_next_loc_note;
27110 tree decl;
27111 bool var_loc_p;
27112 var_loc_view view = 0;
27113
27114 if (!NOTE_P (loc_note))
27115 {
27116 if (CALL_P (loc_note))
27117 {
27118 maybe_reset_location_view (loc_note, cur_line_info_table);
27119 call_site_count++;
27120 if (SIBLING_CALL_P (loc_note))
27121 tail_call_site_count++;
27122 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27123 {
27124 call_insn = loc_note;
27125 loc_note = NULL;
27126 var_loc_p = false;
27127
27128 next_real = dwarf2out_next_real_insn (call_insn);
27129 next_note = NULL;
27130 cached_next_real_insn = NULL;
27131 goto create_label;
27132 }
27133 if (optimize == 0 && !flag_var_tracking)
27134 {
27135 /* When the var-tracking pass is not running, there is no note
27136 for indirect calls whose target is compile-time known. In this
27137 case, process such calls specifically so that we generate call
27138 sites for them anyway. */
27139 rtx x = PATTERN (loc_note);
27140 if (GET_CODE (x) == PARALLEL)
27141 x = XVECEXP (x, 0, 0);
27142 if (GET_CODE (x) == SET)
27143 x = SET_SRC (x);
27144 if (GET_CODE (x) == CALL)
27145 x = XEXP (x, 0);
27146 if (!MEM_P (x)
27147 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27148 || !SYMBOL_REF_DECL (XEXP (x, 0))
27149 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27150 != FUNCTION_DECL))
27151 {
27152 call_insn = loc_note;
27153 loc_note = NULL;
27154 var_loc_p = false;
27155
27156 next_real = dwarf2out_next_real_insn (call_insn);
27157 next_note = NULL;
27158 cached_next_real_insn = NULL;
27159 goto create_label;
27160 }
27161 }
27162 }
27163 else if (!debug_variable_location_views)
27164 gcc_unreachable ();
27165 else
27166 maybe_reset_location_view (loc_note, cur_line_info_table);
27167
27168 return;
27169 }
27170
27171 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27172 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27173 return;
27174
27175 /* Optimize processing a large consecutive sequence of location
27176 notes so we don't spend too much time in next_real_insn. If the
27177 next insn is another location note, remember the next_real_insn
27178 calculation for next time. */
27179 next_real = cached_next_real_insn;
27180 if (next_real)
27181 {
27182 if (expected_next_loc_note != loc_note)
27183 next_real = NULL;
27184 }
27185
27186 next_note = NEXT_INSN (loc_note);
27187 if (! next_note
27188 || next_note->deleted ()
27189 || ! NOTE_P (next_note)
27190 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27191 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27192 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27193 next_note = NULL;
27194
27195 if (! next_real)
27196 next_real = dwarf2out_next_real_insn (loc_note);
27197
27198 if (next_note)
27199 {
27200 expected_next_loc_note = next_note;
27201 cached_next_real_insn = next_real;
27202 }
27203 else
27204 cached_next_real_insn = NULL;
27205
27206 /* If there are no instructions which would be affected by this note,
27207 don't do anything. */
27208 if (var_loc_p
27209 && next_real == NULL_RTX
27210 && !NOTE_DURING_CALL_P (loc_note))
27211 return;
27212
27213 create_label:
27214
27215 if (next_real == NULL_RTX)
27216 next_real = get_last_insn ();
27217
27218 /* If there were any real insns between note we processed last time
27219 and this note (or if it is the first note), clear
27220 last_{,postcall_}label so that they are not reused this time. */
27221 if (last_var_location_insn == NULL_RTX
27222 || last_var_location_insn != next_real
27223 || last_in_cold_section_p != in_cold_section_p)
27224 {
27225 last_label = NULL;
27226 last_postcall_label = NULL;
27227 }
27228
27229 if (var_loc_p)
27230 {
27231 const char *label
27232 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27233 view = cur_line_info_table->view;
27234 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27235 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27236 if (newloc == NULL)
27237 return;
27238 }
27239 else
27240 {
27241 decl = NULL_TREE;
27242 newloc = NULL;
27243 }
27244
27245 /* If there were no real insns between note we processed last time
27246 and this note, use the label we emitted last time. Otherwise
27247 create a new label and emit it. */
27248 if (last_label == NULL)
27249 {
27250 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27251 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27252 loclabel_num++;
27253 last_label = ggc_strdup (loclabel);
27254 /* See if loclabel might be equal to .Ltext0. If yes,
27255 bump first_loclabel_num_not_at_text_label. */
27256 if (!have_multiple_function_sections
27257 && in_first_function_p
27258 && maybe_at_text_label_p)
27259 {
27260 static rtx_insn *last_start;
27261 rtx_insn *insn;
27262 for (insn = loc_note; insn; insn = previous_insn (insn))
27263 if (insn == last_start)
27264 break;
27265 else if (!NONDEBUG_INSN_P (insn))
27266 continue;
27267 else
27268 {
27269 rtx body = PATTERN (insn);
27270 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27271 continue;
27272 /* Inline asm could occupy zero bytes. */
27273 else if (GET_CODE (body) == ASM_INPUT
27274 || asm_noperands (body) >= 0)
27275 continue;
27276 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27277 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27278 continue;
27279 #endif
27280 else
27281 {
27282 /* Assume insn has non-zero length. */
27283 maybe_at_text_label_p = false;
27284 break;
27285 }
27286 }
27287 if (maybe_at_text_label_p)
27288 {
27289 last_start = loc_note;
27290 first_loclabel_num_not_at_text_label = loclabel_num;
27291 }
27292 }
27293 }
27294
27295 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27296 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27297
27298 if (!var_loc_p)
27299 {
27300 struct call_arg_loc_node *ca_loc
27301 = ggc_cleared_alloc<call_arg_loc_node> ();
27302 rtx_insn *prev = call_insn;
27303
27304 ca_loc->call_arg_loc_note
27305 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27306 ca_loc->next = NULL;
27307 ca_loc->label = last_label;
27308 gcc_assert (prev
27309 && (CALL_P (prev)
27310 || (NONJUMP_INSN_P (prev)
27311 && GET_CODE (PATTERN (prev)) == SEQUENCE
27312 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27313 if (!CALL_P (prev))
27314 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27315 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27316
27317 /* Look for a SYMBOL_REF in the "prev" instruction. */
27318 rtx x = get_call_rtx_from (PATTERN (prev));
27319 if (x)
27320 {
27321 /* Try to get the call symbol, if any. */
27322 if (MEM_P (XEXP (x, 0)))
27323 x = XEXP (x, 0);
27324 /* First, look for a memory access to a symbol_ref. */
27325 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27326 && SYMBOL_REF_DECL (XEXP (x, 0))
27327 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27328 ca_loc->symbol_ref = XEXP (x, 0);
27329 /* Otherwise, look at a compile-time known user-level function
27330 declaration. */
27331 else if (MEM_P (x)
27332 && MEM_EXPR (x)
27333 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27334 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27335 }
27336
27337 ca_loc->block = insn_scope (prev);
27338 if (call_arg_locations)
27339 call_arg_loc_last->next = ca_loc;
27340 else
27341 call_arg_locations = ca_loc;
27342 call_arg_loc_last = ca_loc;
27343 }
27344 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27345 {
27346 newloc->label = last_label;
27347 newloc->view = view;
27348 }
27349 else
27350 {
27351 if (!last_postcall_label)
27352 {
27353 sprintf (loclabel, "%s-1", last_label);
27354 last_postcall_label = ggc_strdup (loclabel);
27355 }
27356 newloc->label = last_postcall_label;
27357 /* ??? This view is at last_label, not last_label-1, but we
27358 could only assume view at last_label-1 is zero if we could
27359 assume calls always have length greater than one. This is
27360 probably true in general, though there might be a rare
27361 exception to this rule, e.g. if a call insn is optimized out
27362 by target magic. Then, even the -1 in the label will be
27363 wrong, which might invalidate the range. Anyway, using view,
27364 though technically possibly incorrect, will work as far as
27365 ranges go: since L-1 is in the middle of the call insn,
27366 (L-1).0 and (L-1).V shouldn't make any difference, and having
27367 the loclist entry refer to the .loc entry might be useful, so
27368 leave it like this. */
27369 newloc->view = view;
27370 }
27371
27372 if (var_loc_p && flag_debug_asm)
27373 {
27374 const char *name, *sep, *patstr;
27375 if (decl && DECL_NAME (decl))
27376 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27377 else
27378 name = "";
27379 if (NOTE_VAR_LOCATION_LOC (loc_note))
27380 {
27381 sep = " => ";
27382 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27383 }
27384 else
27385 {
27386 sep = " ";
27387 patstr = "RESET";
27388 }
27389 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27390 name, sep, patstr);
27391 }
27392
27393 last_var_location_insn = next_real;
27394 last_in_cold_section_p = in_cold_section_p;
27395 }
27396
27397 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27398 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27399 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27400 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27401 BLOCK_FRAGMENT_ORIGIN links. */
27402 static bool
27403 block_within_block_p (tree block, tree outer, bool bothways)
27404 {
27405 if (block == outer)
27406 return true;
27407
27408 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27409 for (tree context = BLOCK_SUPERCONTEXT (block);
27410 context != outer;
27411 context = BLOCK_SUPERCONTEXT (context))
27412 if (!context || TREE_CODE (context) != BLOCK)
27413 return false;
27414
27415 if (!bothways)
27416 return true;
27417
27418 /* Now check that each block is actually referenced by its
27419 parent. */
27420 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27421 context = BLOCK_SUPERCONTEXT (context))
27422 {
27423 if (BLOCK_FRAGMENT_ORIGIN (context))
27424 {
27425 gcc_assert (!BLOCK_SUBBLOCKS (context));
27426 context = BLOCK_FRAGMENT_ORIGIN (context);
27427 }
27428 for (tree sub = BLOCK_SUBBLOCKS (context);
27429 sub != block;
27430 sub = BLOCK_CHAIN (sub))
27431 if (!sub)
27432 return false;
27433 if (context == outer)
27434 return true;
27435 else
27436 block = context;
27437 }
27438 }
27439
27440 /* Called during final while assembling the marker of the entry point
27441 for an inlined function. */
27442
27443 static void
27444 dwarf2out_inline_entry (tree block)
27445 {
27446 gcc_assert (debug_inline_points);
27447
27448 /* If we can't represent it, don't bother. */
27449 if (!(dwarf_version >= 3 || !dwarf_strict))
27450 return;
27451
27452 gcc_assert (DECL_P (block_ultimate_origin (block)));
27453
27454 /* Sanity check the block tree. This would catch a case in which
27455 BLOCK got removed from the tree reachable from the outermost
27456 lexical block, but got retained in markers. It would still link
27457 back to its parents, but some ancestor would be missing a link
27458 down the path to the sub BLOCK. If the block got removed, its
27459 BLOCK_NUMBER will not be a usable value. */
27460 if (flag_checking)
27461 gcc_assert (block_within_block_p (block,
27462 DECL_INITIAL (current_function_decl),
27463 true));
27464
27465 gcc_assert (inlined_function_outer_scope_p (block));
27466 gcc_assert (!BLOCK_DIE (block));
27467
27468 if (BLOCK_FRAGMENT_ORIGIN (block))
27469 block = BLOCK_FRAGMENT_ORIGIN (block);
27470 /* Can the entry point ever not be at the beginning of an
27471 unfragmented lexical block? */
27472 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27473 || (cur_line_info_table
27474 && !ZERO_VIEW_P (cur_line_info_table->view))))
27475 return;
27476
27477 if (!inline_entry_data_table)
27478 inline_entry_data_table
27479 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27480
27481
27482 inline_entry_data **iedp
27483 = inline_entry_data_table->find_slot_with_hash (block,
27484 htab_hash_pointer (block),
27485 INSERT);
27486 if (*iedp)
27487 /* ??? Ideally, we'd record all entry points for the same inlined
27488 function (some may have been duplicated by e.g. unrolling), but
27489 we have no way to represent that ATM. */
27490 return;
27491
27492 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27493 ied->block = block;
27494 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27495 ied->label_num = BLOCK_NUMBER (block);
27496 if (cur_line_info_table)
27497 ied->view = cur_line_info_table->view;
27498
27499 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27500
27501 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27502 BLOCK_NUMBER (block));
27503 ASM_OUTPUT_LABEL (asm_out_file, label);
27504 }
27505
27506 /* Called from finalize_size_functions for size functions so that their body
27507 can be encoded in the debug info to describe the layout of variable-length
27508 structures. */
27509
27510 static void
27511 dwarf2out_size_function (tree decl)
27512 {
27513 function_to_dwarf_procedure (decl);
27514 }
27515
27516 /* Note in one location list that text section has changed. */
27517
27518 int
27519 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27520 {
27521 var_loc_list *list = *slot;
27522 if (list->first)
27523 list->last_before_switch
27524 = list->last->next ? list->last->next : list->last;
27525 return 1;
27526 }
27527
27528 /* Note in all location lists that text section has changed. */
27529
27530 static void
27531 var_location_switch_text_section (void)
27532 {
27533 if (decl_loc_table == NULL)
27534 return;
27535
27536 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27537 }
27538
27539 /* Create a new line number table. */
27540
27541 static dw_line_info_table *
27542 new_line_info_table (void)
27543 {
27544 dw_line_info_table *table;
27545
27546 table = ggc_cleared_alloc<dw_line_info_table> ();
27547 table->file_num = 1;
27548 table->line_num = 1;
27549 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27550 FORCE_RESET_NEXT_VIEW (table->view);
27551 table->symviews_since_reset = 0;
27552
27553 return table;
27554 }
27555
27556 /* Lookup the "current" table into which we emit line info, so
27557 that we don't have to do it for every source line. */
27558
27559 static void
27560 set_cur_line_info_table (section *sec)
27561 {
27562 dw_line_info_table *table;
27563
27564 if (sec == text_section)
27565 table = text_section_line_info;
27566 else if (sec == cold_text_section)
27567 {
27568 table = cold_text_section_line_info;
27569 if (!table)
27570 {
27571 cold_text_section_line_info = table = new_line_info_table ();
27572 table->end_label = cold_end_label;
27573 }
27574 }
27575 else
27576 {
27577 const char *end_label;
27578
27579 if (crtl->has_bb_partition)
27580 {
27581 if (in_cold_section_p)
27582 end_label = crtl->subsections.cold_section_end_label;
27583 else
27584 end_label = crtl->subsections.hot_section_end_label;
27585 }
27586 else
27587 {
27588 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27589 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27590 current_function_funcdef_no);
27591 end_label = ggc_strdup (label);
27592 }
27593
27594 table = new_line_info_table ();
27595 table->end_label = end_label;
27596
27597 vec_safe_push (separate_line_info, table);
27598 }
27599
27600 if (output_asm_line_debug_info ())
27601 table->is_stmt = (cur_line_info_table
27602 ? cur_line_info_table->is_stmt
27603 : DWARF_LINE_DEFAULT_IS_STMT_START);
27604 cur_line_info_table = table;
27605 }
27606
27607
27608 /* We need to reset the locations at the beginning of each
27609 function. We can't do this in the end_function hook, because the
27610 declarations that use the locations won't have been output when
27611 that hook is called. Also compute have_multiple_function_sections here. */
27612
27613 static void
27614 dwarf2out_begin_function (tree fun)
27615 {
27616 section *sec = function_section (fun);
27617
27618 if (sec != text_section)
27619 have_multiple_function_sections = true;
27620
27621 if (crtl->has_bb_partition && !cold_text_section)
27622 {
27623 gcc_assert (current_function_decl == fun);
27624 cold_text_section = unlikely_text_section ();
27625 switch_to_section (cold_text_section);
27626 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27627 switch_to_section (sec);
27628 }
27629
27630 dwarf2out_note_section_used ();
27631 call_site_count = 0;
27632 tail_call_site_count = 0;
27633
27634 set_cur_line_info_table (sec);
27635 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27636 }
27637
27638 /* Helper function of dwarf2out_end_function, called only after emitting
27639 the very first function into assembly. Check if some .debug_loc range
27640 might end with a .LVL* label that could be equal to .Ltext0.
27641 In that case we must force using absolute addresses in .debug_loc ranges,
27642 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27643 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27644 list terminator.
27645 Set have_multiple_function_sections to true in that case and
27646 terminate htab traversal. */
27647
27648 int
27649 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27650 {
27651 var_loc_list *entry = *slot;
27652 struct var_loc_node *node;
27653
27654 node = entry->first;
27655 if (node && node->next && node->next->label)
27656 {
27657 unsigned int i;
27658 const char *label = node->next->label;
27659 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27660
27661 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27662 {
27663 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27664 if (strcmp (label, loclabel) == 0)
27665 {
27666 have_multiple_function_sections = true;
27667 return 0;
27668 }
27669 }
27670 }
27671 return 1;
27672 }
27673
27674 /* Hook called after emitting a function into assembly.
27675 This does something only for the very first function emitted. */
27676
27677 static void
27678 dwarf2out_end_function (unsigned int)
27679 {
27680 if (in_first_function_p
27681 && !have_multiple_function_sections
27682 && first_loclabel_num_not_at_text_label
27683 && decl_loc_table)
27684 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27685 in_first_function_p = false;
27686 maybe_at_text_label_p = false;
27687 }
27688
27689 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27690 front-ends register a translation unit even before dwarf2out_init is
27691 called. */
27692 static tree main_translation_unit = NULL_TREE;
27693
27694 /* Hook called by front-ends after they built their main translation unit.
27695 Associate comp_unit_die to UNIT. */
27696
27697 static void
27698 dwarf2out_register_main_translation_unit (tree unit)
27699 {
27700 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27701 && main_translation_unit == NULL_TREE);
27702 main_translation_unit = unit;
27703 /* If dwarf2out_init has not been called yet, it will perform the association
27704 itself looking at main_translation_unit. */
27705 if (decl_die_table != NULL)
27706 equate_decl_number_to_die (unit, comp_unit_die ());
27707 }
27708
27709 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27710
27711 static void
27712 push_dw_line_info_entry (dw_line_info_table *table,
27713 enum dw_line_info_opcode opcode, unsigned int val)
27714 {
27715 dw_line_info_entry e;
27716 e.opcode = opcode;
27717 e.val = val;
27718 vec_safe_push (table->entries, e);
27719 }
27720
27721 /* Output a label to mark the beginning of a source code line entry
27722 and record information relating to this source line, in
27723 'line_info_table' for later output of the .debug_line section. */
27724 /* ??? The discriminator parameter ought to be unsigned. */
27725
27726 static void
27727 dwarf2out_source_line (unsigned int line, unsigned int column,
27728 const char *filename,
27729 int discriminator, bool is_stmt)
27730 {
27731 unsigned int file_num;
27732 dw_line_info_table *table;
27733 static var_loc_view lvugid;
27734
27735 if (debug_info_level < DINFO_LEVEL_TERSE)
27736 return;
27737
27738 table = cur_line_info_table;
27739
27740 if (line == 0)
27741 {
27742 if (debug_variable_location_views
27743 && output_asm_line_debug_info ()
27744 && table && !RESETTING_VIEW_P (table->view))
27745 {
27746 /* If we're using the assembler to compute view numbers, we
27747 can't issue a .loc directive for line zero, so we can't
27748 get a view number at this point. We might attempt to
27749 compute it from the previous view, or equate it to a
27750 subsequent view (though it might not be there!), but
27751 since we're omitting the line number entry, we might as
27752 well omit the view number as well. That means pretending
27753 it's a view number zero, which might very well turn out
27754 to be correct. ??? Extend the assembler so that the
27755 compiler could emit e.g. ".locview .LVU#", to output a
27756 view without changing line number information. We'd then
27757 have to count it in symviews_since_reset; when it's omitted,
27758 it doesn't count. */
27759 if (!zero_view_p)
27760 zero_view_p = BITMAP_GGC_ALLOC ();
27761 bitmap_set_bit (zero_view_p, table->view);
27762 if (flag_debug_asm)
27763 {
27764 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27765 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27766 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27767 ASM_COMMENT_START);
27768 assemble_name (asm_out_file, label);
27769 putc ('\n', asm_out_file);
27770 }
27771 table->view = ++lvugid;
27772 }
27773 return;
27774 }
27775
27776 /* The discriminator column was added in dwarf4. Simplify the below
27777 by simply removing it if we're not supposed to output it. */
27778 if (dwarf_version < 4 && dwarf_strict)
27779 discriminator = 0;
27780
27781 if (!debug_column_info)
27782 column = 0;
27783
27784 file_num = maybe_emit_file (lookup_filename (filename));
27785
27786 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27787 the debugger has used the second (possibly duplicate) line number
27788 at the beginning of the function to mark the end of the prologue.
27789 We could eliminate any other duplicates within the function. For
27790 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27791 that second line number entry. */
27792 /* Recall that this end-of-prologue indication is *not* the same thing
27793 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27794 to which the hook corresponds, follows the last insn that was
27795 emitted by gen_prologue. What we need is to precede the first insn
27796 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27797 insn that corresponds to something the user wrote. These may be
27798 very different locations once scheduling is enabled. */
27799
27800 if (0 && file_num == table->file_num
27801 && line == table->line_num
27802 && column == table->column_num
27803 && discriminator == table->discrim_num
27804 && is_stmt == table->is_stmt)
27805 return;
27806
27807 switch_to_section (current_function_section ());
27808
27809 /* If requested, emit something human-readable. */
27810 if (flag_debug_asm)
27811 {
27812 if (debug_column_info)
27813 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27814 filename, line, column);
27815 else
27816 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27817 filename, line);
27818 }
27819
27820 if (output_asm_line_debug_info ())
27821 {
27822 /* Emit the .loc directive understood by GNU as. */
27823 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27824 file_num, line, is_stmt, discriminator */
27825 fputs ("\t.loc ", asm_out_file);
27826 fprint_ul (asm_out_file, file_num);
27827 putc (' ', asm_out_file);
27828 fprint_ul (asm_out_file, line);
27829 putc (' ', asm_out_file);
27830 fprint_ul (asm_out_file, column);
27831
27832 if (is_stmt != table->is_stmt)
27833 {
27834 fputs (" is_stmt ", asm_out_file);
27835 putc (is_stmt ? '1' : '0', asm_out_file);
27836 }
27837 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27838 {
27839 gcc_assert (discriminator > 0);
27840 fputs (" discriminator ", asm_out_file);
27841 fprint_ul (asm_out_file, (unsigned long) discriminator);
27842 }
27843 if (debug_variable_location_views)
27844 {
27845 if (!RESETTING_VIEW_P (table->view))
27846 {
27847 table->symviews_since_reset++;
27848 if (table->symviews_since_reset > symview_upper_bound)
27849 symview_upper_bound = table->symviews_since_reset;
27850 /* When we're using the assembler to compute view
27851 numbers, we output symbolic labels after "view" in
27852 .loc directives, and the assembler will set them for
27853 us, so that we can refer to the view numbers in
27854 location lists. The only exceptions are when we know
27855 a view will be zero: "-0" is a forced reset, used
27856 e.g. in the beginning of functions, whereas "0" tells
27857 the assembler to check that there was a PC change
27858 since the previous view, in a way that implicitly
27859 resets the next view. */
27860 fputs (" view ", asm_out_file);
27861 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27862 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27863 assemble_name (asm_out_file, label);
27864 table->view = ++lvugid;
27865 }
27866 else
27867 {
27868 table->symviews_since_reset = 0;
27869 if (FORCE_RESETTING_VIEW_P (table->view))
27870 fputs (" view -0", asm_out_file);
27871 else
27872 fputs (" view 0", asm_out_file);
27873 /* Mark the present view as a zero view. Earlier debug
27874 binds may have already added its id to loclists to be
27875 emitted later, so we can't reuse the id for something
27876 else. However, it's good to know whether a view is
27877 known to be zero, because then we may be able to
27878 optimize out locviews that are all zeros, so take
27879 note of it in zero_view_p. */
27880 if (!zero_view_p)
27881 zero_view_p = BITMAP_GGC_ALLOC ();
27882 bitmap_set_bit (zero_view_p, lvugid);
27883 table->view = ++lvugid;
27884 }
27885 }
27886 putc ('\n', asm_out_file);
27887 }
27888 else
27889 {
27890 unsigned int label_num = ++line_info_label_num;
27891
27892 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27893
27894 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27895 push_dw_line_info_entry (table, LI_adv_address, label_num);
27896 else
27897 push_dw_line_info_entry (table, LI_set_address, label_num);
27898 if (debug_variable_location_views)
27899 {
27900 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27901 if (resetting)
27902 table->view = 0;
27903
27904 if (flag_debug_asm)
27905 fprintf (asm_out_file, "\t%s view %s%d\n",
27906 ASM_COMMENT_START,
27907 resetting ? "-" : "",
27908 table->view);
27909
27910 table->view++;
27911 }
27912 if (file_num != table->file_num)
27913 push_dw_line_info_entry (table, LI_set_file, file_num);
27914 if (discriminator != table->discrim_num)
27915 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27916 if (is_stmt != table->is_stmt)
27917 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27918 push_dw_line_info_entry (table, LI_set_line, line);
27919 if (debug_column_info)
27920 push_dw_line_info_entry (table, LI_set_column, column);
27921 }
27922
27923 table->file_num = file_num;
27924 table->line_num = line;
27925 table->column_num = column;
27926 table->discrim_num = discriminator;
27927 table->is_stmt = is_stmt;
27928 table->in_use = true;
27929 }
27930
27931 /* Record the beginning of a new source file. */
27932
27933 static void
27934 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
27935 {
27936 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27937 {
27938 macinfo_entry e;
27939 e.code = DW_MACINFO_start_file;
27940 e.lineno = lineno;
27941 e.info = ggc_strdup (filename);
27942 vec_safe_push (macinfo_table, e);
27943 }
27944 }
27945
27946 /* Record the end of a source file. */
27947
27948 static void
27949 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
27950 {
27951 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27952 {
27953 macinfo_entry e;
27954 e.code = DW_MACINFO_end_file;
27955 e.lineno = lineno;
27956 e.info = NULL;
27957 vec_safe_push (macinfo_table, e);
27958 }
27959 }
27960
27961 /* Called from debug_define in toplev.c. The `buffer' parameter contains
27962 the tail part of the directive line, i.e. the part which is past the
27963 initial whitespace, #, whitespace, directive-name, whitespace part. */
27964
27965 static void
27966 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
27967 const char *buffer ATTRIBUTE_UNUSED)
27968 {
27969 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27970 {
27971 macinfo_entry e;
27972 /* Insert a dummy first entry to be able to optimize the whole
27973 predefined macro block using DW_MACRO_import. */
27974 if (macinfo_table->is_empty () && lineno <= 1)
27975 {
27976 e.code = 0;
27977 e.lineno = 0;
27978 e.info = NULL;
27979 vec_safe_push (macinfo_table, e);
27980 }
27981 e.code = DW_MACINFO_define;
27982 e.lineno = lineno;
27983 e.info = ggc_strdup (buffer);
27984 vec_safe_push (macinfo_table, e);
27985 }
27986 }
27987
27988 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
27989 the tail part of the directive line, i.e. the part which is past the
27990 initial whitespace, #, whitespace, directive-name, whitespace part. */
27991
27992 static void
27993 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
27994 const char *buffer ATTRIBUTE_UNUSED)
27995 {
27996 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27997 {
27998 macinfo_entry e;
27999 /* Insert a dummy first entry to be able to optimize the whole
28000 predefined macro block using DW_MACRO_import. */
28001 if (macinfo_table->is_empty () && lineno <= 1)
28002 {
28003 e.code = 0;
28004 e.lineno = 0;
28005 e.info = NULL;
28006 vec_safe_push (macinfo_table, e);
28007 }
28008 e.code = DW_MACINFO_undef;
28009 e.lineno = lineno;
28010 e.info = ggc_strdup (buffer);
28011 vec_safe_push (macinfo_table, e);
28012 }
28013 }
28014
28015 /* Helpers to manipulate hash table of CUs. */
28016
28017 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28018 {
28019 static inline hashval_t hash (const macinfo_entry *);
28020 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28021 };
28022
28023 inline hashval_t
28024 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28025 {
28026 return htab_hash_string (entry->info);
28027 }
28028
28029 inline bool
28030 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28031 const macinfo_entry *entry2)
28032 {
28033 return !strcmp (entry1->info, entry2->info);
28034 }
28035
28036 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28037
28038 /* Output a single .debug_macinfo entry. */
28039
28040 static void
28041 output_macinfo_op (macinfo_entry *ref)
28042 {
28043 int file_num;
28044 size_t len;
28045 struct indirect_string_node *node;
28046 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28047 struct dwarf_file_data *fd;
28048
28049 switch (ref->code)
28050 {
28051 case DW_MACINFO_start_file:
28052 fd = lookup_filename (ref->info);
28053 file_num = maybe_emit_file (fd);
28054 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28055 dw2_asm_output_data_uleb128 (ref->lineno,
28056 "Included from line number %lu",
28057 (unsigned long) ref->lineno);
28058 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28059 break;
28060 case DW_MACINFO_end_file:
28061 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28062 break;
28063 case DW_MACINFO_define:
28064 case DW_MACINFO_undef:
28065 len = strlen (ref->info) + 1;
28066 if (!dwarf_strict
28067 && len > DWARF_OFFSET_SIZE
28068 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28069 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28070 {
28071 ref->code = ref->code == DW_MACINFO_define
28072 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28073 output_macinfo_op (ref);
28074 return;
28075 }
28076 dw2_asm_output_data (1, ref->code,
28077 ref->code == DW_MACINFO_define
28078 ? "Define macro" : "Undefine macro");
28079 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28080 (unsigned long) ref->lineno);
28081 dw2_asm_output_nstring (ref->info, -1, "The macro");
28082 break;
28083 case DW_MACRO_define_strp:
28084 case DW_MACRO_undef_strp:
28085 node = find_AT_string (ref->info);
28086 gcc_assert (node
28087 && (node->form == DW_FORM_strp
28088 || node->form == dwarf_FORM (DW_FORM_strx)));
28089 dw2_asm_output_data (1, ref->code,
28090 ref->code == DW_MACRO_define_strp
28091 ? "Define macro strp"
28092 : "Undefine macro strp");
28093 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28094 (unsigned long) ref->lineno);
28095 if (node->form == DW_FORM_strp)
28096 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28097 debug_str_section, "The macro: \"%s\"",
28098 ref->info);
28099 else
28100 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28101 ref->info);
28102 break;
28103 case DW_MACRO_import:
28104 dw2_asm_output_data (1, ref->code, "Import");
28105 ASM_GENERATE_INTERNAL_LABEL (label,
28106 DEBUG_MACRO_SECTION_LABEL,
28107 ref->lineno + macinfo_label_base);
28108 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28109 break;
28110 default:
28111 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28112 ASM_COMMENT_START, (unsigned long) ref->code);
28113 break;
28114 }
28115 }
28116
28117 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28118 other compilation unit .debug_macinfo sections. IDX is the first
28119 index of a define/undef, return the number of ops that should be
28120 emitted in a comdat .debug_macinfo section and emit
28121 a DW_MACRO_import entry referencing it.
28122 If the define/undef entry should be emitted normally, return 0. */
28123
28124 static unsigned
28125 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28126 macinfo_hash_type **macinfo_htab)
28127 {
28128 macinfo_entry *first, *second, *cur, *inc;
28129 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28130 unsigned char checksum[16];
28131 struct md5_ctx ctx;
28132 char *grp_name, *tail;
28133 const char *base;
28134 unsigned int i, count, encoded_filename_len, linebuf_len;
28135 macinfo_entry **slot;
28136
28137 first = &(*macinfo_table)[idx];
28138 second = &(*macinfo_table)[idx + 1];
28139
28140 /* Optimize only if there are at least two consecutive define/undef ops,
28141 and either all of them are before first DW_MACINFO_start_file
28142 with lineno {0,1} (i.e. predefined macro block), or all of them are
28143 in some included header file. */
28144 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28145 return 0;
28146 if (vec_safe_is_empty (files))
28147 {
28148 if (first->lineno > 1 || second->lineno > 1)
28149 return 0;
28150 }
28151 else if (first->lineno == 0)
28152 return 0;
28153
28154 /* Find the last define/undef entry that can be grouped together
28155 with first and at the same time compute md5 checksum of their
28156 codes, linenumbers and strings. */
28157 md5_init_ctx (&ctx);
28158 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28159 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28160 break;
28161 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28162 break;
28163 else
28164 {
28165 unsigned char code = cur->code;
28166 md5_process_bytes (&code, 1, &ctx);
28167 checksum_uleb128 (cur->lineno, &ctx);
28168 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28169 }
28170 md5_finish_ctx (&ctx, checksum);
28171 count = i - idx;
28172
28173 /* From the containing include filename (if any) pick up just
28174 usable characters from its basename. */
28175 if (vec_safe_is_empty (files))
28176 base = "";
28177 else
28178 base = lbasename (files->last ().info);
28179 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28180 if (ISIDNUM (base[i]) || base[i] == '.')
28181 encoded_filename_len++;
28182 /* Count . at the end. */
28183 if (encoded_filename_len)
28184 encoded_filename_len++;
28185
28186 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28187 linebuf_len = strlen (linebuf);
28188
28189 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28190 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28191 + 16 * 2 + 1);
28192 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28193 tail = grp_name + 4;
28194 if (encoded_filename_len)
28195 {
28196 for (i = 0; base[i]; i++)
28197 if (ISIDNUM (base[i]) || base[i] == '.')
28198 *tail++ = base[i];
28199 *tail++ = '.';
28200 }
28201 memcpy (tail, linebuf, linebuf_len);
28202 tail += linebuf_len;
28203 *tail++ = '.';
28204 for (i = 0; i < 16; i++)
28205 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28206
28207 /* Construct a macinfo_entry for DW_MACRO_import
28208 in the empty vector entry before the first define/undef. */
28209 inc = &(*macinfo_table)[idx - 1];
28210 inc->code = DW_MACRO_import;
28211 inc->lineno = 0;
28212 inc->info = ggc_strdup (grp_name);
28213 if (!*macinfo_htab)
28214 *macinfo_htab = new macinfo_hash_type (10);
28215 /* Avoid emitting duplicates. */
28216 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28217 if (*slot != NULL)
28218 {
28219 inc->code = 0;
28220 inc->info = NULL;
28221 /* If such an entry has been used before, just emit
28222 a DW_MACRO_import op. */
28223 inc = *slot;
28224 output_macinfo_op (inc);
28225 /* And clear all macinfo_entry in the range to avoid emitting them
28226 in the second pass. */
28227 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28228 {
28229 cur->code = 0;
28230 cur->info = NULL;
28231 }
28232 }
28233 else
28234 {
28235 *slot = inc;
28236 inc->lineno = (*macinfo_htab)->elements ();
28237 output_macinfo_op (inc);
28238 }
28239 return count;
28240 }
28241
28242 /* Save any strings needed by the macinfo table in the debug str
28243 table. All strings must be collected into the table by the time
28244 index_string is called. */
28245
28246 static void
28247 save_macinfo_strings (void)
28248 {
28249 unsigned len;
28250 unsigned i;
28251 macinfo_entry *ref;
28252
28253 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28254 {
28255 switch (ref->code)
28256 {
28257 /* Match the logic in output_macinfo_op to decide on
28258 indirect strings. */
28259 case DW_MACINFO_define:
28260 case DW_MACINFO_undef:
28261 len = strlen (ref->info) + 1;
28262 if (!dwarf_strict
28263 && len > DWARF_OFFSET_SIZE
28264 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28265 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28266 set_indirect_string (find_AT_string (ref->info));
28267 break;
28268 case DW_MACRO_define_strp:
28269 case DW_MACRO_undef_strp:
28270 set_indirect_string (find_AT_string (ref->info));
28271 break;
28272 default:
28273 break;
28274 }
28275 }
28276 }
28277
28278 /* Output macinfo section(s). */
28279
28280 static void
28281 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28282 {
28283 unsigned i;
28284 unsigned long length = vec_safe_length (macinfo_table);
28285 macinfo_entry *ref;
28286 vec<macinfo_entry, va_gc> *files = NULL;
28287 macinfo_hash_type *macinfo_htab = NULL;
28288 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28289
28290 if (! length)
28291 return;
28292
28293 /* output_macinfo* uses these interchangeably. */
28294 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28295 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28296 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28297 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28298
28299 /* AIX Assembler inserts the length, so adjust the reference to match the
28300 offset expected by debuggers. */
28301 strcpy (dl_section_ref, debug_line_label);
28302 if (XCOFF_DEBUGGING_INFO)
28303 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28304
28305 /* For .debug_macro emit the section header. */
28306 if (!dwarf_strict || dwarf_version >= 5)
28307 {
28308 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28309 "DWARF macro version number");
28310 if (DWARF_OFFSET_SIZE == 8)
28311 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28312 else
28313 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28314 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28315 debug_line_section, NULL);
28316 }
28317
28318 /* In the first loop, it emits the primary .debug_macinfo section
28319 and after each emitted op the macinfo_entry is cleared.
28320 If a longer range of define/undef ops can be optimized using
28321 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28322 the vector before the first define/undef in the range and the
28323 whole range of define/undef ops is not emitted and kept. */
28324 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28325 {
28326 switch (ref->code)
28327 {
28328 case DW_MACINFO_start_file:
28329 vec_safe_push (files, *ref);
28330 break;
28331 case DW_MACINFO_end_file:
28332 if (!vec_safe_is_empty (files))
28333 files->pop ();
28334 break;
28335 case DW_MACINFO_define:
28336 case DW_MACINFO_undef:
28337 if ((!dwarf_strict || dwarf_version >= 5)
28338 && HAVE_COMDAT_GROUP
28339 && vec_safe_length (files) != 1
28340 && i > 0
28341 && i + 1 < length
28342 && (*macinfo_table)[i - 1].code == 0)
28343 {
28344 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28345 if (count)
28346 {
28347 i += count - 1;
28348 continue;
28349 }
28350 }
28351 break;
28352 case 0:
28353 /* A dummy entry may be inserted at the beginning to be able
28354 to optimize the whole block of predefined macros. */
28355 if (i == 0)
28356 continue;
28357 default:
28358 break;
28359 }
28360 output_macinfo_op (ref);
28361 ref->info = NULL;
28362 ref->code = 0;
28363 }
28364
28365 if (!macinfo_htab)
28366 return;
28367
28368 /* Save the number of transparent includes so we can adjust the
28369 label number for the fat LTO object DWARF. */
28370 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28371
28372 delete macinfo_htab;
28373 macinfo_htab = NULL;
28374
28375 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28376 terminate the current chain and switch to a new comdat .debug_macinfo
28377 section and emit the define/undef entries within it. */
28378 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28379 switch (ref->code)
28380 {
28381 case 0:
28382 continue;
28383 case DW_MACRO_import:
28384 {
28385 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28386 tree comdat_key = get_identifier (ref->info);
28387 /* Terminate the previous .debug_macinfo section. */
28388 dw2_asm_output_data (1, 0, "End compilation unit");
28389 targetm.asm_out.named_section (debug_macinfo_section_name,
28390 SECTION_DEBUG
28391 | SECTION_LINKONCE
28392 | (early_lto_debug
28393 ? SECTION_EXCLUDE : 0),
28394 comdat_key);
28395 ASM_GENERATE_INTERNAL_LABEL (label,
28396 DEBUG_MACRO_SECTION_LABEL,
28397 ref->lineno + macinfo_label_base);
28398 ASM_OUTPUT_LABEL (asm_out_file, label);
28399 ref->code = 0;
28400 ref->info = NULL;
28401 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28402 "DWARF macro version number");
28403 if (DWARF_OFFSET_SIZE == 8)
28404 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28405 else
28406 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28407 }
28408 break;
28409 case DW_MACINFO_define:
28410 case DW_MACINFO_undef:
28411 output_macinfo_op (ref);
28412 ref->code = 0;
28413 ref->info = NULL;
28414 break;
28415 default:
28416 gcc_unreachable ();
28417 }
28418
28419 macinfo_label_base += macinfo_label_base_adj;
28420 }
28421
28422 /* Initialize the various sections and labels for dwarf output and prefix
28423 them with PREFIX if non-NULL. Returns the generation (zero based
28424 number of times function was called). */
28425
28426 static unsigned
28427 init_sections_and_labels (bool early_lto_debug)
28428 {
28429 /* As we may get called multiple times have a generation count for
28430 labels. */
28431 static unsigned generation = 0;
28432
28433 if (early_lto_debug)
28434 {
28435 if (!dwarf_split_debug_info)
28436 {
28437 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28438 SECTION_DEBUG | SECTION_EXCLUDE,
28439 NULL);
28440 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28441 SECTION_DEBUG | SECTION_EXCLUDE,
28442 NULL);
28443 debug_macinfo_section_name
28444 = ((dwarf_strict && dwarf_version < 5)
28445 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28446 debug_macinfo_section = get_section (debug_macinfo_section_name,
28447 SECTION_DEBUG
28448 | SECTION_EXCLUDE, NULL);
28449 }
28450 else
28451 {
28452 /* ??? Which of the following do we need early? */
28453 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28454 SECTION_DEBUG | SECTION_EXCLUDE,
28455 NULL);
28456 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28457 SECTION_DEBUG | SECTION_EXCLUDE,
28458 NULL);
28459 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28460 SECTION_DEBUG
28461 | SECTION_EXCLUDE, NULL);
28462 debug_skeleton_abbrev_section
28463 = get_section (DEBUG_LTO_ABBREV_SECTION,
28464 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28465 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28466 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28467 generation);
28468
28469 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28470 stay in the main .o, but the skeleton_line goes into the split
28471 off dwo. */
28472 debug_skeleton_line_section
28473 = get_section (DEBUG_LTO_LINE_SECTION,
28474 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28475 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28476 DEBUG_SKELETON_LINE_SECTION_LABEL,
28477 generation);
28478 debug_str_offsets_section
28479 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28480 SECTION_DEBUG | SECTION_EXCLUDE,
28481 NULL);
28482 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28483 DEBUG_SKELETON_INFO_SECTION_LABEL,
28484 generation);
28485 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28486 DEBUG_STR_DWO_SECTION_FLAGS,
28487 NULL);
28488 debug_macinfo_section_name
28489 = ((dwarf_strict && dwarf_version < 5)
28490 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28491 debug_macinfo_section = get_section (debug_macinfo_section_name,
28492 SECTION_DEBUG | SECTION_EXCLUDE,
28493 NULL);
28494 }
28495 /* For macro info and the file table we have to refer to a
28496 debug_line section. */
28497 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28498 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28499 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28500 DEBUG_LINE_SECTION_LABEL, generation);
28501
28502 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28503 DEBUG_STR_SECTION_FLAGS
28504 | SECTION_EXCLUDE, NULL);
28505 if (!dwarf_split_debug_info)
28506 debug_line_str_section
28507 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28508 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28509 }
28510 else
28511 {
28512 if (!dwarf_split_debug_info)
28513 {
28514 debug_info_section = get_section (DEBUG_INFO_SECTION,
28515 SECTION_DEBUG, NULL);
28516 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28517 SECTION_DEBUG, NULL);
28518 debug_loc_section = get_section (dwarf_version >= 5
28519 ? DEBUG_LOCLISTS_SECTION
28520 : DEBUG_LOC_SECTION,
28521 SECTION_DEBUG, NULL);
28522 debug_macinfo_section_name
28523 = ((dwarf_strict && dwarf_version < 5)
28524 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28525 debug_macinfo_section = get_section (debug_macinfo_section_name,
28526 SECTION_DEBUG, NULL);
28527 }
28528 else
28529 {
28530 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28531 SECTION_DEBUG | SECTION_EXCLUDE,
28532 NULL);
28533 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28534 SECTION_DEBUG | SECTION_EXCLUDE,
28535 NULL);
28536 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28537 SECTION_DEBUG, NULL);
28538 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28539 SECTION_DEBUG, NULL);
28540 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28541 SECTION_DEBUG, NULL);
28542 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28543 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28544 generation);
28545
28546 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28547 stay in the main .o, but the skeleton_line goes into the
28548 split off dwo. */
28549 debug_skeleton_line_section
28550 = get_section (DEBUG_DWO_LINE_SECTION,
28551 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28552 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28553 DEBUG_SKELETON_LINE_SECTION_LABEL,
28554 generation);
28555 debug_str_offsets_section
28556 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28557 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28558 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28559 DEBUG_SKELETON_INFO_SECTION_LABEL,
28560 generation);
28561 debug_loc_section = get_section (dwarf_version >= 5
28562 ? DEBUG_DWO_LOCLISTS_SECTION
28563 : DEBUG_DWO_LOC_SECTION,
28564 SECTION_DEBUG | SECTION_EXCLUDE,
28565 NULL);
28566 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28567 DEBUG_STR_DWO_SECTION_FLAGS,
28568 NULL);
28569 debug_macinfo_section_name
28570 = ((dwarf_strict && dwarf_version < 5)
28571 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28572 debug_macinfo_section = get_section (debug_macinfo_section_name,
28573 SECTION_DEBUG | SECTION_EXCLUDE,
28574 NULL);
28575 }
28576 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28577 SECTION_DEBUG, NULL);
28578 debug_line_section = get_section (DEBUG_LINE_SECTION,
28579 SECTION_DEBUG, NULL);
28580 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28581 SECTION_DEBUG, NULL);
28582 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28583 SECTION_DEBUG, NULL);
28584 debug_str_section = get_section (DEBUG_STR_SECTION,
28585 DEBUG_STR_SECTION_FLAGS, NULL);
28586 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28587 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28588 DEBUG_STR_SECTION_FLAGS, NULL);
28589
28590 debug_ranges_section = get_section (dwarf_version >= 5
28591 ? DEBUG_RNGLISTS_SECTION
28592 : DEBUG_RANGES_SECTION,
28593 SECTION_DEBUG, NULL);
28594 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28595 SECTION_DEBUG, NULL);
28596 }
28597
28598 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28599 DEBUG_ABBREV_SECTION_LABEL, generation);
28600 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28601 DEBUG_INFO_SECTION_LABEL, generation);
28602 info_section_emitted = false;
28603 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28604 DEBUG_LINE_SECTION_LABEL, generation);
28605 /* There are up to 4 unique ranges labels per generation.
28606 See also output_rnglists. */
28607 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28608 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28609 if (dwarf_version >= 5 && dwarf_split_debug_info)
28610 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28611 DEBUG_RANGES_SECTION_LABEL,
28612 1 + generation * 4);
28613 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28614 DEBUG_ADDR_SECTION_LABEL, generation);
28615 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28616 (dwarf_strict && dwarf_version < 5)
28617 ? DEBUG_MACINFO_SECTION_LABEL
28618 : DEBUG_MACRO_SECTION_LABEL, generation);
28619 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28620 generation);
28621
28622 ++generation;
28623 return generation - 1;
28624 }
28625
28626 /* Set up for Dwarf output at the start of compilation. */
28627
28628 static void
28629 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28630 {
28631 /* Allocate the file_table. */
28632 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28633
28634 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28635 /* Allocate the decl_die_table. */
28636 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28637
28638 /* Allocate the decl_loc_table. */
28639 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28640
28641 /* Allocate the cached_dw_loc_list_table. */
28642 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28643
28644 /* Allocate the initial hunk of the abbrev_die_table. */
28645 vec_alloc (abbrev_die_table, 256);
28646 /* Zero-th entry is allocated, but unused. */
28647 abbrev_die_table->quick_push (NULL);
28648
28649 /* Allocate the dwarf_proc_stack_usage_map. */
28650 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28651
28652 /* Allocate the pubtypes and pubnames vectors. */
28653 vec_alloc (pubname_table, 32);
28654 vec_alloc (pubtype_table, 32);
28655
28656 vec_alloc (incomplete_types, 64);
28657
28658 vec_alloc (used_rtx_array, 32);
28659
28660 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28661 vec_alloc (macinfo_table, 64);
28662 #endif
28663
28664 /* If front-ends already registered a main translation unit but we were not
28665 ready to perform the association, do this now. */
28666 if (main_translation_unit != NULL_TREE)
28667 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28668 }
28669
28670 /* Called before compile () starts outputtting functions, variables
28671 and toplevel asms into assembly. */
28672
28673 static void
28674 dwarf2out_assembly_start (void)
28675 {
28676 if (text_section_line_info)
28677 return;
28678
28679 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28680 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28681 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28682 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28683 COLD_TEXT_SECTION_LABEL, 0);
28684 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28685
28686 switch_to_section (text_section);
28687 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28688 #endif
28689
28690 /* Make sure the line number table for .text always exists. */
28691 text_section_line_info = new_line_info_table ();
28692 text_section_line_info->end_label = text_end_label;
28693
28694 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28695 cur_line_info_table = text_section_line_info;
28696 #endif
28697
28698 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28699 && dwarf2out_do_cfi_asm ()
28700 && !dwarf2out_do_eh_frame ())
28701 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28702 }
28703
28704 /* A helper function for dwarf2out_finish called through
28705 htab_traverse. Assign a string its index. All strings must be
28706 collected into the table by the time index_string is called,
28707 because the indexing code relies on htab_traverse to traverse nodes
28708 in the same order for each run. */
28709
28710 int
28711 index_string (indirect_string_node **h, unsigned int *index)
28712 {
28713 indirect_string_node *node = *h;
28714
28715 find_string_form (node);
28716 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28717 {
28718 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28719 node->index = *index;
28720 *index += 1;
28721 }
28722 return 1;
28723 }
28724
28725 /* A helper function for output_indirect_strings called through
28726 htab_traverse. Output the offset to a string and update the
28727 current offset. */
28728
28729 int
28730 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28731 {
28732 indirect_string_node *node = *h;
28733
28734 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28735 {
28736 /* Assert that this node has been assigned an index. */
28737 gcc_assert (node->index != NO_INDEX_ASSIGNED
28738 && node->index != NOT_INDEXED);
28739 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28740 "indexed string 0x%x: %s", node->index, node->str);
28741 *offset += strlen (node->str) + 1;
28742 }
28743 return 1;
28744 }
28745
28746 /* A helper function for dwarf2out_finish called through
28747 htab_traverse. Output the indexed string. */
28748
28749 int
28750 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28751 {
28752 struct indirect_string_node *node = *h;
28753
28754 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28755 {
28756 /* Assert that the strings are output in the same order as their
28757 indexes were assigned. */
28758 gcc_assert (*cur_idx == node->index);
28759 assemble_string (node->str, strlen (node->str) + 1);
28760 *cur_idx += 1;
28761 }
28762 return 1;
28763 }
28764
28765 /* A helper function for output_indirect_strings. Counts the number
28766 of index strings offsets. Must match the logic of the functions
28767 output_index_string[_offsets] above. */
28768 int
28769 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28770 {
28771 struct indirect_string_node *node = *h;
28772
28773 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28774 *last_idx += 1;
28775 return 1;
28776 }
28777
28778 /* A helper function for dwarf2out_finish called through
28779 htab_traverse. Emit one queued .debug_str string. */
28780
28781 int
28782 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28783 {
28784 struct indirect_string_node *node = *h;
28785
28786 node->form = find_string_form (node);
28787 if (node->form == form && node->refcount > 0)
28788 {
28789 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28790 assemble_string (node->str, strlen (node->str) + 1);
28791 }
28792
28793 return 1;
28794 }
28795
28796 /* Output the indexed string table. */
28797
28798 static void
28799 output_indirect_strings (void)
28800 {
28801 switch_to_section (debug_str_section);
28802 if (!dwarf_split_debug_info)
28803 debug_str_hash->traverse<enum dwarf_form,
28804 output_indirect_string> (DW_FORM_strp);
28805 else
28806 {
28807 unsigned int offset = 0;
28808 unsigned int cur_idx = 0;
28809
28810 if (skeleton_debug_str_hash)
28811 skeleton_debug_str_hash->traverse<enum dwarf_form,
28812 output_indirect_string> (DW_FORM_strp);
28813
28814 switch_to_section (debug_str_offsets_section);
28815 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
28816 header. Note that we don't need to generate a label to the
28817 actual index table following the header here, because this is
28818 for the split dwarf case only. In an .dwo file there is only
28819 one string offsets table (and one debug info section). But
28820 if we would start using string offset tables for the main (or
28821 skeleton) unit, then we have to add a DW_AT_str_offsets_base
28822 pointing to the actual index after the header. Split dwarf
28823 units will never have a string offsets base attribute. When
28824 a split unit is moved into a .dwp file the string offsets can
28825 be found through the .debug_cu_index section table. */
28826 if (dwarf_version >= 5)
28827 {
28828 unsigned int last_idx = 0;
28829 unsigned long str_offsets_length;
28830
28831 debug_str_hash->traverse_noresize
28832 <unsigned int *, count_index_strings> (&last_idx);
28833 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
28834 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
28835 dw2_asm_output_data (4, 0xffffffff,
28836 "Escape value for 64-bit DWARF extension");
28837 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
28838 "Length of string offsets unit");
28839 dw2_asm_output_data (2, 5, "DWARF string offsets version");
28840 dw2_asm_output_data (2, 0, "Header zero padding");
28841 }
28842 debug_str_hash->traverse_noresize
28843 <unsigned int *, output_index_string_offset> (&offset);
28844 switch_to_section (debug_str_dwo_section);
28845 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28846 (&cur_idx);
28847 }
28848 }
28849
28850 /* Callback for htab_traverse to assign an index to an entry in the
28851 table, and to write that entry to the .debug_addr section. */
28852
28853 int
28854 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28855 {
28856 addr_table_entry *entry = *slot;
28857
28858 if (entry->refcount == 0)
28859 {
28860 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28861 || entry->index == NOT_INDEXED);
28862 return 1;
28863 }
28864
28865 gcc_assert (entry->index == *cur_index);
28866 (*cur_index)++;
28867
28868 switch (entry->kind)
28869 {
28870 case ate_kind_rtx:
28871 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28872 "0x%x", entry->index);
28873 break;
28874 case ate_kind_rtx_dtprel:
28875 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28876 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28877 DWARF2_ADDR_SIZE,
28878 entry->addr.rtl);
28879 fputc ('\n', asm_out_file);
28880 break;
28881 case ate_kind_label:
28882 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28883 "0x%x", entry->index);
28884 break;
28885 default:
28886 gcc_unreachable ();
28887 }
28888 return 1;
28889 }
28890
28891 /* A helper function for dwarf2out_finish. Counts the number
28892 of indexed addresses. Must match the logic of the functions
28893 output_addr_table_entry above. */
28894 int
28895 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
28896 {
28897 addr_table_entry *entry = *slot;
28898
28899 if (entry->refcount > 0)
28900 *last_idx += 1;
28901 return 1;
28902 }
28903
28904 /* Produce the .debug_addr section. */
28905
28906 static void
28907 output_addr_table (void)
28908 {
28909 unsigned int index = 0;
28910 if (addr_index_table == NULL || addr_index_table->size () == 0)
28911 return;
28912
28913 switch_to_section (debug_addr_section);
28914 addr_index_table
28915 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28916 }
28917
28918 #if ENABLE_ASSERT_CHECKING
28919 /* Verify that all marks are clear. */
28920
28921 static void
28922 verify_marks_clear (dw_die_ref die)
28923 {
28924 dw_die_ref c;
28925
28926 gcc_assert (! die->die_mark);
28927 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28928 }
28929 #endif /* ENABLE_ASSERT_CHECKING */
28930
28931 /* Clear the marks for a die and its children.
28932 Be cool if the mark isn't set. */
28933
28934 static void
28935 prune_unmark_dies (dw_die_ref die)
28936 {
28937 dw_die_ref c;
28938
28939 if (die->die_mark)
28940 die->die_mark = 0;
28941 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
28942 }
28943
28944 /* Given LOC that is referenced by a DIE we're marking as used, find all
28945 referenced DWARF procedures it references and mark them as used. */
28946
28947 static void
28948 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
28949 {
28950 for (; loc != NULL; loc = loc->dw_loc_next)
28951 switch (loc->dw_loc_opc)
28952 {
28953 case DW_OP_implicit_pointer:
28954 case DW_OP_convert:
28955 case DW_OP_reinterpret:
28956 case DW_OP_GNU_implicit_pointer:
28957 case DW_OP_GNU_convert:
28958 case DW_OP_GNU_reinterpret:
28959 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
28960 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28961 break;
28962 case DW_OP_GNU_variable_value:
28963 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28964 {
28965 dw_die_ref ref
28966 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28967 if (ref == NULL)
28968 break;
28969 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28970 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28971 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28972 }
28973 /* FALLTHRU */
28974 case DW_OP_call2:
28975 case DW_OP_call4:
28976 case DW_OP_call_ref:
28977 case DW_OP_const_type:
28978 case DW_OP_GNU_const_type:
28979 case DW_OP_GNU_parameter_ref:
28980 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
28981 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28982 break;
28983 case DW_OP_regval_type:
28984 case DW_OP_deref_type:
28985 case DW_OP_GNU_regval_type:
28986 case DW_OP_GNU_deref_type:
28987 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
28988 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
28989 break;
28990 case DW_OP_entry_value:
28991 case DW_OP_GNU_entry_value:
28992 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
28993 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
28994 break;
28995 default:
28996 break;
28997 }
28998 }
28999
29000 /* Given DIE that we're marking as used, find any other dies
29001 it references as attributes and mark them as used. */
29002
29003 static void
29004 prune_unused_types_walk_attribs (dw_die_ref die)
29005 {
29006 dw_attr_node *a;
29007 unsigned ix;
29008
29009 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29010 {
29011 switch (AT_class (a))
29012 {
29013 /* Make sure DWARF procedures referenced by location descriptions will
29014 get emitted. */
29015 case dw_val_class_loc:
29016 prune_unused_types_walk_loc_descr (AT_loc (a));
29017 break;
29018 case dw_val_class_loc_list:
29019 for (dw_loc_list_ref list = AT_loc_list (a);
29020 list != NULL;
29021 list = list->dw_loc_next)
29022 prune_unused_types_walk_loc_descr (list->expr);
29023 break;
29024
29025 case dw_val_class_view_list:
29026 /* This points to a loc_list in another attribute, so it's
29027 already covered. */
29028 break;
29029
29030 case dw_val_class_die_ref:
29031 /* A reference to another DIE.
29032 Make sure that it will get emitted.
29033 If it was broken out into a comdat group, don't follow it. */
29034 if (! AT_ref (a)->comdat_type_p
29035 || a->dw_attr == DW_AT_specification)
29036 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29037 break;
29038
29039 case dw_val_class_str:
29040 /* Set the string's refcount to 0 so that prune_unused_types_mark
29041 accounts properly for it. */
29042 a->dw_attr_val.v.val_str->refcount = 0;
29043 break;
29044
29045 default:
29046 break;
29047 }
29048 }
29049 }
29050
29051 /* Mark the generic parameters and arguments children DIEs of DIE. */
29052
29053 static void
29054 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29055 {
29056 dw_die_ref c;
29057
29058 if (die == NULL || die->die_child == NULL)
29059 return;
29060 c = die->die_child;
29061 do
29062 {
29063 if (is_template_parameter (c))
29064 prune_unused_types_mark (c, 1);
29065 c = c->die_sib;
29066 } while (c && c != die->die_child);
29067 }
29068
29069 /* Mark DIE as being used. If DOKIDS is true, then walk down
29070 to DIE's children. */
29071
29072 static void
29073 prune_unused_types_mark (dw_die_ref die, int dokids)
29074 {
29075 dw_die_ref c;
29076
29077 if (die->die_mark == 0)
29078 {
29079 /* We haven't done this node yet. Mark it as used. */
29080 die->die_mark = 1;
29081 /* If this is the DIE of a generic type instantiation,
29082 mark the children DIEs that describe its generic parms and
29083 args. */
29084 prune_unused_types_mark_generic_parms_dies (die);
29085
29086 /* We also have to mark its parents as used.
29087 (But we don't want to mark our parent's kids due to this,
29088 unless it is a class.) */
29089 if (die->die_parent)
29090 prune_unused_types_mark (die->die_parent,
29091 class_scope_p (die->die_parent));
29092
29093 /* Mark any referenced nodes. */
29094 prune_unused_types_walk_attribs (die);
29095
29096 /* If this node is a specification,
29097 also mark the definition, if it exists. */
29098 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29099 prune_unused_types_mark (die->die_definition, 1);
29100 }
29101
29102 if (dokids && die->die_mark != 2)
29103 {
29104 /* We need to walk the children, but haven't done so yet.
29105 Remember that we've walked the kids. */
29106 die->die_mark = 2;
29107
29108 /* If this is an array type, we need to make sure our
29109 kids get marked, even if they're types. If we're
29110 breaking out types into comdat sections, do this
29111 for all type definitions. */
29112 if (die->die_tag == DW_TAG_array_type
29113 || (use_debug_types
29114 && is_type_die (die) && ! is_declaration_die (die)))
29115 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29116 else
29117 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29118 }
29119 }
29120
29121 /* For local classes, look if any static member functions were emitted
29122 and if so, mark them. */
29123
29124 static void
29125 prune_unused_types_walk_local_classes (dw_die_ref die)
29126 {
29127 dw_die_ref c;
29128
29129 if (die->die_mark == 2)
29130 return;
29131
29132 switch (die->die_tag)
29133 {
29134 case DW_TAG_structure_type:
29135 case DW_TAG_union_type:
29136 case DW_TAG_class_type:
29137 break;
29138
29139 case DW_TAG_subprogram:
29140 if (!get_AT_flag (die, DW_AT_declaration)
29141 || die->die_definition != NULL)
29142 prune_unused_types_mark (die, 1);
29143 return;
29144
29145 default:
29146 return;
29147 }
29148
29149 /* Mark children. */
29150 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29151 }
29152
29153 /* Walk the tree DIE and mark types that we actually use. */
29154
29155 static void
29156 prune_unused_types_walk (dw_die_ref die)
29157 {
29158 dw_die_ref c;
29159
29160 /* Don't do anything if this node is already marked and
29161 children have been marked as well. */
29162 if (die->die_mark == 2)
29163 return;
29164
29165 switch (die->die_tag)
29166 {
29167 case DW_TAG_structure_type:
29168 case DW_TAG_union_type:
29169 case DW_TAG_class_type:
29170 if (die->die_perennial_p)
29171 break;
29172
29173 for (c = die->die_parent; c; c = c->die_parent)
29174 if (c->die_tag == DW_TAG_subprogram)
29175 break;
29176
29177 /* Finding used static member functions inside of classes
29178 is needed just for local classes, because for other classes
29179 static member function DIEs with DW_AT_specification
29180 are emitted outside of the DW_TAG_*_type. If we ever change
29181 it, we'd need to call this even for non-local classes. */
29182 if (c)
29183 prune_unused_types_walk_local_classes (die);
29184
29185 /* It's a type node --- don't mark it. */
29186 return;
29187
29188 case DW_TAG_const_type:
29189 case DW_TAG_packed_type:
29190 case DW_TAG_pointer_type:
29191 case DW_TAG_reference_type:
29192 case DW_TAG_rvalue_reference_type:
29193 case DW_TAG_volatile_type:
29194 case DW_TAG_typedef:
29195 case DW_TAG_array_type:
29196 case DW_TAG_interface_type:
29197 case DW_TAG_friend:
29198 case DW_TAG_enumeration_type:
29199 case DW_TAG_subroutine_type:
29200 case DW_TAG_string_type:
29201 case DW_TAG_set_type:
29202 case DW_TAG_subrange_type:
29203 case DW_TAG_ptr_to_member_type:
29204 case DW_TAG_file_type:
29205 /* Type nodes are useful only when other DIEs reference them --- don't
29206 mark them. */
29207 /* FALLTHROUGH */
29208
29209 case DW_TAG_dwarf_procedure:
29210 /* Likewise for DWARF procedures. */
29211
29212 if (die->die_perennial_p)
29213 break;
29214
29215 return;
29216
29217 default:
29218 /* Mark everything else. */
29219 break;
29220 }
29221
29222 if (die->die_mark == 0)
29223 {
29224 die->die_mark = 1;
29225
29226 /* Now, mark any dies referenced from here. */
29227 prune_unused_types_walk_attribs (die);
29228 }
29229
29230 die->die_mark = 2;
29231
29232 /* Mark children. */
29233 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29234 }
29235
29236 /* Increment the string counts on strings referred to from DIE's
29237 attributes. */
29238
29239 static void
29240 prune_unused_types_update_strings (dw_die_ref die)
29241 {
29242 dw_attr_node *a;
29243 unsigned ix;
29244
29245 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29246 if (AT_class (a) == dw_val_class_str)
29247 {
29248 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29249 s->refcount++;
29250 /* Avoid unnecessarily putting strings that are used less than
29251 twice in the hash table. */
29252 if (s->refcount
29253 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29254 {
29255 indirect_string_node **slot
29256 = debug_str_hash->find_slot_with_hash (s->str,
29257 htab_hash_string (s->str),
29258 INSERT);
29259 gcc_assert (*slot == NULL);
29260 *slot = s;
29261 }
29262 }
29263 }
29264
29265 /* Mark DIE and its children as removed. */
29266
29267 static void
29268 mark_removed (dw_die_ref die)
29269 {
29270 dw_die_ref c;
29271 die->removed = true;
29272 FOR_EACH_CHILD (die, c, mark_removed (c));
29273 }
29274
29275 /* Remove from the tree DIE any dies that aren't marked. */
29276
29277 static void
29278 prune_unused_types_prune (dw_die_ref die)
29279 {
29280 dw_die_ref c;
29281
29282 gcc_assert (die->die_mark);
29283 prune_unused_types_update_strings (die);
29284
29285 if (! die->die_child)
29286 return;
29287
29288 c = die->die_child;
29289 do {
29290 dw_die_ref prev = c, next;
29291 for (c = c->die_sib; ! c->die_mark; c = next)
29292 if (c == die->die_child)
29293 {
29294 /* No marked children between 'prev' and the end of the list. */
29295 if (prev == c)
29296 /* No marked children at all. */
29297 die->die_child = NULL;
29298 else
29299 {
29300 prev->die_sib = c->die_sib;
29301 die->die_child = prev;
29302 }
29303 c->die_sib = NULL;
29304 mark_removed (c);
29305 return;
29306 }
29307 else
29308 {
29309 next = c->die_sib;
29310 c->die_sib = NULL;
29311 mark_removed (c);
29312 }
29313
29314 if (c != prev->die_sib)
29315 prev->die_sib = c;
29316 prune_unused_types_prune (c);
29317 } while (c != die->die_child);
29318 }
29319
29320 /* Remove dies representing declarations that we never use. */
29321
29322 static void
29323 prune_unused_types (void)
29324 {
29325 unsigned int i;
29326 limbo_die_node *node;
29327 comdat_type_node *ctnode;
29328 pubname_entry *pub;
29329 dw_die_ref base_type;
29330
29331 #if ENABLE_ASSERT_CHECKING
29332 /* All the marks should already be clear. */
29333 verify_marks_clear (comp_unit_die ());
29334 for (node = limbo_die_list; node; node = node->next)
29335 verify_marks_clear (node->die);
29336 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29337 verify_marks_clear (ctnode->root_die);
29338 #endif /* ENABLE_ASSERT_CHECKING */
29339
29340 /* Mark types that are used in global variables. */
29341 premark_types_used_by_global_vars ();
29342
29343 /* Set the mark on nodes that are actually used. */
29344 prune_unused_types_walk (comp_unit_die ());
29345 for (node = limbo_die_list; node; node = node->next)
29346 prune_unused_types_walk (node->die);
29347 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29348 {
29349 prune_unused_types_walk (ctnode->root_die);
29350 prune_unused_types_mark (ctnode->type_die, 1);
29351 }
29352
29353 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29354 are unusual in that they are pubnames that are the children of pubtypes.
29355 They should only be marked via their parent DW_TAG_enumeration_type die,
29356 not as roots in themselves. */
29357 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29358 if (pub->die->die_tag != DW_TAG_enumerator)
29359 prune_unused_types_mark (pub->die, 1);
29360 for (i = 0; base_types.iterate (i, &base_type); i++)
29361 prune_unused_types_mark (base_type, 1);
29362
29363 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29364 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29365 callees). */
29366 cgraph_node *cnode;
29367 FOR_EACH_FUNCTION (cnode)
29368 if (cnode->referred_to_p (false))
29369 {
29370 dw_die_ref die = lookup_decl_die (cnode->decl);
29371 if (die == NULL || die->die_mark)
29372 continue;
29373 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29374 if (e->caller != cnode
29375 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29376 {
29377 prune_unused_types_mark (die, 1);
29378 break;
29379 }
29380 }
29381
29382 if (debug_str_hash)
29383 debug_str_hash->empty ();
29384 if (skeleton_debug_str_hash)
29385 skeleton_debug_str_hash->empty ();
29386 prune_unused_types_prune (comp_unit_die ());
29387 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29388 {
29389 node = *pnode;
29390 if (!node->die->die_mark)
29391 *pnode = node->next;
29392 else
29393 {
29394 prune_unused_types_prune (node->die);
29395 pnode = &node->next;
29396 }
29397 }
29398 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29399 prune_unused_types_prune (ctnode->root_die);
29400
29401 /* Leave the marks clear. */
29402 prune_unmark_dies (comp_unit_die ());
29403 for (node = limbo_die_list; node; node = node->next)
29404 prune_unmark_dies (node->die);
29405 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29406 prune_unmark_dies (ctnode->root_die);
29407 }
29408
29409 /* Helpers to manipulate hash table of comdat type units. */
29410
29411 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29412 {
29413 static inline hashval_t hash (const comdat_type_node *);
29414 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29415 };
29416
29417 inline hashval_t
29418 comdat_type_hasher::hash (const comdat_type_node *type_node)
29419 {
29420 hashval_t h;
29421 memcpy (&h, type_node->signature, sizeof (h));
29422 return h;
29423 }
29424
29425 inline bool
29426 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29427 const comdat_type_node *type_node_2)
29428 {
29429 return (! memcmp (type_node_1->signature, type_node_2->signature,
29430 DWARF_TYPE_SIGNATURE_SIZE));
29431 }
29432
29433 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29434 to the location it would have been added, should we know its
29435 DECL_ASSEMBLER_NAME when we added other attributes. This will
29436 probably improve compactness of debug info, removing equivalent
29437 abbrevs, and hide any differences caused by deferring the
29438 computation of the assembler name, triggered by e.g. PCH. */
29439
29440 static inline void
29441 move_linkage_attr (dw_die_ref die)
29442 {
29443 unsigned ix = vec_safe_length (die->die_attr);
29444 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29445
29446 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29447 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29448
29449 while (--ix > 0)
29450 {
29451 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29452
29453 if (prev->dw_attr == DW_AT_decl_line
29454 || prev->dw_attr == DW_AT_decl_column
29455 || prev->dw_attr == DW_AT_name)
29456 break;
29457 }
29458
29459 if (ix != vec_safe_length (die->die_attr) - 1)
29460 {
29461 die->die_attr->pop ();
29462 die->die_attr->quick_insert (ix, linkage);
29463 }
29464 }
29465
29466 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29467 referenced from typed stack ops and count how often they are used. */
29468
29469 static void
29470 mark_base_types (dw_loc_descr_ref loc)
29471 {
29472 dw_die_ref base_type = NULL;
29473
29474 for (; loc; loc = loc->dw_loc_next)
29475 {
29476 switch (loc->dw_loc_opc)
29477 {
29478 case DW_OP_regval_type:
29479 case DW_OP_deref_type:
29480 case DW_OP_GNU_regval_type:
29481 case DW_OP_GNU_deref_type:
29482 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29483 break;
29484 case DW_OP_convert:
29485 case DW_OP_reinterpret:
29486 case DW_OP_GNU_convert:
29487 case DW_OP_GNU_reinterpret:
29488 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29489 continue;
29490 /* FALLTHRU */
29491 case DW_OP_const_type:
29492 case DW_OP_GNU_const_type:
29493 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29494 break;
29495 case DW_OP_entry_value:
29496 case DW_OP_GNU_entry_value:
29497 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29498 continue;
29499 default:
29500 continue;
29501 }
29502 gcc_assert (base_type->die_parent == comp_unit_die ());
29503 if (base_type->die_mark)
29504 base_type->die_mark++;
29505 else
29506 {
29507 base_types.safe_push (base_type);
29508 base_type->die_mark = 1;
29509 }
29510 }
29511 }
29512
29513 /* Comparison function for sorting marked base types. */
29514
29515 static int
29516 base_type_cmp (const void *x, const void *y)
29517 {
29518 dw_die_ref dx = *(const dw_die_ref *) x;
29519 dw_die_ref dy = *(const dw_die_ref *) y;
29520 unsigned int byte_size1, byte_size2;
29521 unsigned int encoding1, encoding2;
29522 unsigned int align1, align2;
29523 if (dx->die_mark > dy->die_mark)
29524 return -1;
29525 if (dx->die_mark < dy->die_mark)
29526 return 1;
29527 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29528 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29529 if (byte_size1 < byte_size2)
29530 return 1;
29531 if (byte_size1 > byte_size2)
29532 return -1;
29533 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29534 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29535 if (encoding1 < encoding2)
29536 return 1;
29537 if (encoding1 > encoding2)
29538 return -1;
29539 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29540 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29541 if (align1 < align2)
29542 return 1;
29543 if (align1 > align2)
29544 return -1;
29545 return 0;
29546 }
29547
29548 /* Move base types marked by mark_base_types as early as possible
29549 in the CU, sorted by decreasing usage count both to make the
29550 uleb128 references as small as possible and to make sure they
29551 will have die_offset already computed by calc_die_sizes when
29552 sizes of typed stack loc ops is computed. */
29553
29554 static void
29555 move_marked_base_types (void)
29556 {
29557 unsigned int i;
29558 dw_die_ref base_type, die, c;
29559
29560 if (base_types.is_empty ())
29561 return;
29562
29563 /* Sort by decreasing usage count, they will be added again in that
29564 order later on. */
29565 base_types.qsort (base_type_cmp);
29566 die = comp_unit_die ();
29567 c = die->die_child;
29568 do
29569 {
29570 dw_die_ref prev = c;
29571 c = c->die_sib;
29572 while (c->die_mark)
29573 {
29574 remove_child_with_prev (c, prev);
29575 /* As base types got marked, there must be at least
29576 one node other than DW_TAG_base_type. */
29577 gcc_assert (die->die_child != NULL);
29578 c = prev->die_sib;
29579 }
29580 }
29581 while (c != die->die_child);
29582 gcc_assert (die->die_child);
29583 c = die->die_child;
29584 for (i = 0; base_types.iterate (i, &base_type); i++)
29585 {
29586 base_type->die_mark = 0;
29587 base_type->die_sib = c->die_sib;
29588 c->die_sib = base_type;
29589 c = base_type;
29590 }
29591 }
29592
29593 /* Helper function for resolve_addr, attempt to resolve
29594 one CONST_STRING, return true if successful. Similarly verify that
29595 SYMBOL_REFs refer to variables emitted in the current CU. */
29596
29597 static bool
29598 resolve_one_addr (rtx *addr)
29599 {
29600 rtx rtl = *addr;
29601
29602 if (GET_CODE (rtl) == CONST_STRING)
29603 {
29604 size_t len = strlen (XSTR (rtl, 0)) + 1;
29605 tree t = build_string (len, XSTR (rtl, 0));
29606 tree tlen = size_int (len - 1);
29607 TREE_TYPE (t)
29608 = build_array_type (char_type_node, build_index_type (tlen));
29609 rtl = lookup_constant_def (t);
29610 if (!rtl || !MEM_P (rtl))
29611 return false;
29612 rtl = XEXP (rtl, 0);
29613 if (GET_CODE (rtl) == SYMBOL_REF
29614 && SYMBOL_REF_DECL (rtl)
29615 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29616 return false;
29617 vec_safe_push (used_rtx_array, rtl);
29618 *addr = rtl;
29619 return true;
29620 }
29621
29622 if (GET_CODE (rtl) == SYMBOL_REF
29623 && SYMBOL_REF_DECL (rtl))
29624 {
29625 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29626 {
29627 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29628 return false;
29629 }
29630 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29631 return false;
29632 }
29633
29634 if (GET_CODE (rtl) == CONST)
29635 {
29636 subrtx_ptr_iterator::array_type array;
29637 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29638 if (!resolve_one_addr (*iter))
29639 return false;
29640 }
29641
29642 return true;
29643 }
29644
29645 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29646 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29647 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29648
29649 static rtx
29650 string_cst_pool_decl (tree t)
29651 {
29652 rtx rtl = output_constant_def (t, 1);
29653 unsigned char *array;
29654 dw_loc_descr_ref l;
29655 tree decl;
29656 size_t len;
29657 dw_die_ref ref;
29658
29659 if (!rtl || !MEM_P (rtl))
29660 return NULL_RTX;
29661 rtl = XEXP (rtl, 0);
29662 if (GET_CODE (rtl) != SYMBOL_REF
29663 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29664 return NULL_RTX;
29665
29666 decl = SYMBOL_REF_DECL (rtl);
29667 if (!lookup_decl_die (decl))
29668 {
29669 len = TREE_STRING_LENGTH (t);
29670 vec_safe_push (used_rtx_array, rtl);
29671 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29672 array = ggc_vec_alloc<unsigned char> (len);
29673 memcpy (array, TREE_STRING_POINTER (t), len);
29674 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29675 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29676 l->dw_loc_oprnd2.v.val_vec.length = len;
29677 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29678 l->dw_loc_oprnd2.v.val_vec.array = array;
29679 add_AT_loc (ref, DW_AT_location, l);
29680 equate_decl_number_to_die (decl, ref);
29681 }
29682 return rtl;
29683 }
29684
29685 /* Helper function of resolve_addr_in_expr. LOC is
29686 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29687 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29688 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29689 with DW_OP_implicit_pointer if possible
29690 and return true, if unsuccessful, return false. */
29691
29692 static bool
29693 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29694 {
29695 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29696 HOST_WIDE_INT offset = 0;
29697 dw_die_ref ref = NULL;
29698 tree decl;
29699
29700 if (GET_CODE (rtl) == CONST
29701 && GET_CODE (XEXP (rtl, 0)) == PLUS
29702 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29703 {
29704 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29705 rtl = XEXP (XEXP (rtl, 0), 0);
29706 }
29707 if (GET_CODE (rtl) == CONST_STRING)
29708 {
29709 size_t len = strlen (XSTR (rtl, 0)) + 1;
29710 tree t = build_string (len, XSTR (rtl, 0));
29711 tree tlen = size_int (len - 1);
29712
29713 TREE_TYPE (t)
29714 = build_array_type (char_type_node, build_index_type (tlen));
29715 rtl = string_cst_pool_decl (t);
29716 if (!rtl)
29717 return false;
29718 }
29719 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29720 {
29721 decl = SYMBOL_REF_DECL (rtl);
29722 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29723 {
29724 ref = lookup_decl_die (decl);
29725 if (ref && (get_AT (ref, DW_AT_location)
29726 || get_AT (ref, DW_AT_const_value)))
29727 {
29728 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29729 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29730 loc->dw_loc_oprnd1.val_entry = NULL;
29731 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29732 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29733 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29734 loc->dw_loc_oprnd2.v.val_int = offset;
29735 return true;
29736 }
29737 }
29738 }
29739 return false;
29740 }
29741
29742 /* Helper function for resolve_addr, handle one location
29743 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29744 the location list couldn't be resolved. */
29745
29746 static bool
29747 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29748 {
29749 dw_loc_descr_ref keep = NULL;
29750 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29751 switch (loc->dw_loc_opc)
29752 {
29753 case DW_OP_addr:
29754 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29755 {
29756 if ((prev == NULL
29757 || prev->dw_loc_opc == DW_OP_piece
29758 || prev->dw_loc_opc == DW_OP_bit_piece)
29759 && loc->dw_loc_next
29760 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29761 && (!dwarf_strict || dwarf_version >= 5)
29762 && optimize_one_addr_into_implicit_ptr (loc))
29763 break;
29764 return false;
29765 }
29766 break;
29767 case DW_OP_GNU_addr_index:
29768 case DW_OP_addrx:
29769 case DW_OP_GNU_const_index:
29770 case DW_OP_constx:
29771 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29772 || loc->dw_loc_opc == DW_OP_addrx)
29773 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29774 || loc->dw_loc_opc == DW_OP_constx)
29775 && loc->dtprel))
29776 {
29777 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29778 if (!resolve_one_addr (&rtl))
29779 return false;
29780 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29781 loc->dw_loc_oprnd1.val_entry
29782 = add_addr_table_entry (rtl, ate_kind_rtx);
29783 }
29784 break;
29785 case DW_OP_const4u:
29786 case DW_OP_const8u:
29787 if (loc->dtprel
29788 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29789 return false;
29790 break;
29791 case DW_OP_plus_uconst:
29792 if (size_of_loc_descr (loc)
29793 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29794 + 1
29795 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29796 {
29797 dw_loc_descr_ref repl
29798 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29799 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29800 add_loc_descr (&repl, loc->dw_loc_next);
29801 *loc = *repl;
29802 }
29803 break;
29804 case DW_OP_implicit_value:
29805 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29806 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29807 return false;
29808 break;
29809 case DW_OP_implicit_pointer:
29810 case DW_OP_GNU_implicit_pointer:
29811 case DW_OP_GNU_parameter_ref:
29812 case DW_OP_GNU_variable_value:
29813 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29814 {
29815 dw_die_ref ref
29816 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29817 if (ref == NULL)
29818 return false;
29819 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29820 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29821 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29822 }
29823 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29824 {
29825 if (prev == NULL
29826 && loc->dw_loc_next == NULL
29827 && AT_class (a) == dw_val_class_loc)
29828 switch (a->dw_attr)
29829 {
29830 /* Following attributes allow both exprloc and reference,
29831 so if the whole expression is DW_OP_GNU_variable_value
29832 alone we could transform it into reference. */
29833 case DW_AT_byte_size:
29834 case DW_AT_bit_size:
29835 case DW_AT_lower_bound:
29836 case DW_AT_upper_bound:
29837 case DW_AT_bit_stride:
29838 case DW_AT_count:
29839 case DW_AT_allocated:
29840 case DW_AT_associated:
29841 case DW_AT_byte_stride:
29842 a->dw_attr_val.val_class = dw_val_class_die_ref;
29843 a->dw_attr_val.val_entry = NULL;
29844 a->dw_attr_val.v.val_die_ref.die
29845 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29846 a->dw_attr_val.v.val_die_ref.external = 0;
29847 return true;
29848 default:
29849 break;
29850 }
29851 if (dwarf_strict)
29852 return false;
29853 }
29854 break;
29855 case DW_OP_const_type:
29856 case DW_OP_regval_type:
29857 case DW_OP_deref_type:
29858 case DW_OP_convert:
29859 case DW_OP_reinterpret:
29860 case DW_OP_GNU_const_type:
29861 case DW_OP_GNU_regval_type:
29862 case DW_OP_GNU_deref_type:
29863 case DW_OP_GNU_convert:
29864 case DW_OP_GNU_reinterpret:
29865 while (loc->dw_loc_next
29866 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29867 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29868 {
29869 dw_die_ref base1, base2;
29870 unsigned enc1, enc2, size1, size2;
29871 if (loc->dw_loc_opc == DW_OP_regval_type
29872 || loc->dw_loc_opc == DW_OP_deref_type
29873 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29874 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29875 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29876 else if (loc->dw_loc_oprnd1.val_class
29877 == dw_val_class_unsigned_const)
29878 break;
29879 else
29880 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29881 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29882 == dw_val_class_unsigned_const)
29883 break;
29884 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29885 gcc_assert (base1->die_tag == DW_TAG_base_type
29886 && base2->die_tag == DW_TAG_base_type);
29887 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29888 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29889 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29890 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29891 if (size1 == size2
29892 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29893 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29894 && loc != keep)
29895 || enc1 == enc2))
29896 {
29897 /* Optimize away next DW_OP_convert after
29898 adjusting LOC's base type die reference. */
29899 if (loc->dw_loc_opc == DW_OP_regval_type
29900 || loc->dw_loc_opc == DW_OP_deref_type
29901 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29902 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29903 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29904 else
29905 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29906 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29907 continue;
29908 }
29909 /* Don't change integer DW_OP_convert after e.g. floating
29910 point typed stack entry. */
29911 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29912 keep = loc->dw_loc_next;
29913 break;
29914 }
29915 break;
29916 default:
29917 break;
29918 }
29919 return true;
29920 }
29921
29922 /* Helper function of resolve_addr. DIE had DW_AT_location of
29923 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29924 and DW_OP_addr couldn't be resolved. resolve_addr has already
29925 removed the DW_AT_location attribute. This function attempts to
29926 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29927 to it or DW_AT_const_value attribute, if possible. */
29928
29929 static void
29930 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29931 {
29932 if (!VAR_P (decl)
29933 || lookup_decl_die (decl) != die
29934 || DECL_EXTERNAL (decl)
29935 || !TREE_STATIC (decl)
29936 || DECL_INITIAL (decl) == NULL_TREE
29937 || DECL_P (DECL_INITIAL (decl))
29938 || get_AT (die, DW_AT_const_value))
29939 return;
29940
29941 tree init = DECL_INITIAL (decl);
29942 HOST_WIDE_INT offset = 0;
29943 /* For variables that have been optimized away and thus
29944 don't have a memory location, see if we can emit
29945 DW_AT_const_value instead. */
29946 if (tree_add_const_value_attribute (die, init))
29947 return;
29948 if (dwarf_strict && dwarf_version < 5)
29949 return;
29950 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
29951 and ADDR_EXPR refers to a decl that has DW_AT_location or
29952 DW_AT_const_value (but isn't addressable, otherwise
29953 resolving the original DW_OP_addr wouldn't fail), see if
29954 we can add DW_OP_implicit_pointer. */
29955 STRIP_NOPS (init);
29956 if (TREE_CODE (init) == POINTER_PLUS_EXPR
29957 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
29958 {
29959 offset = tree_to_shwi (TREE_OPERAND (init, 1));
29960 init = TREE_OPERAND (init, 0);
29961 STRIP_NOPS (init);
29962 }
29963 if (TREE_CODE (init) != ADDR_EXPR)
29964 return;
29965 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
29966 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
29967 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
29968 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
29969 && TREE_OPERAND (init, 0) != decl))
29970 {
29971 dw_die_ref ref;
29972 dw_loc_descr_ref l;
29973
29974 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
29975 {
29976 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
29977 if (!rtl)
29978 return;
29979 decl = SYMBOL_REF_DECL (rtl);
29980 }
29981 else
29982 decl = TREE_OPERAND (init, 0);
29983 ref = lookup_decl_die (decl);
29984 if (ref == NULL
29985 || (!get_AT (ref, DW_AT_location)
29986 && !get_AT (ref, DW_AT_const_value)))
29987 return;
29988 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
29989 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29990 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
29991 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
29992 add_AT_loc (die, DW_AT_location, l);
29993 }
29994 }
29995
29996 /* Return NULL if l is a DWARF expression, or first op that is not
29997 valid DWARF expression. */
29998
29999 static dw_loc_descr_ref
30000 non_dwarf_expression (dw_loc_descr_ref l)
30001 {
30002 while (l)
30003 {
30004 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30005 return l;
30006 switch (l->dw_loc_opc)
30007 {
30008 case DW_OP_regx:
30009 case DW_OP_implicit_value:
30010 case DW_OP_stack_value:
30011 case DW_OP_implicit_pointer:
30012 case DW_OP_GNU_implicit_pointer:
30013 case DW_OP_GNU_parameter_ref:
30014 case DW_OP_piece:
30015 case DW_OP_bit_piece:
30016 return l;
30017 default:
30018 break;
30019 }
30020 l = l->dw_loc_next;
30021 }
30022 return NULL;
30023 }
30024
30025 /* Return adjusted copy of EXPR:
30026 If it is empty DWARF expression, return it.
30027 If it is valid non-empty DWARF expression,
30028 return copy of EXPR with DW_OP_deref appended to it.
30029 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30030 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30031 If it is DWARF expression followed by DW_OP_stack_value, return
30032 copy of the DWARF expression without anything appended.
30033 Otherwise, return NULL. */
30034
30035 static dw_loc_descr_ref
30036 copy_deref_exprloc (dw_loc_descr_ref expr)
30037 {
30038 dw_loc_descr_ref tail = NULL;
30039
30040 if (expr == NULL)
30041 return NULL;
30042
30043 dw_loc_descr_ref l = non_dwarf_expression (expr);
30044 if (l && l->dw_loc_next)
30045 return NULL;
30046
30047 if (l)
30048 {
30049 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30050 tail = new_loc_descr ((enum dwarf_location_atom)
30051 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30052 0, 0);
30053 else
30054 switch (l->dw_loc_opc)
30055 {
30056 case DW_OP_regx:
30057 tail = new_loc_descr (DW_OP_bregx,
30058 l->dw_loc_oprnd1.v.val_unsigned, 0);
30059 break;
30060 case DW_OP_stack_value:
30061 break;
30062 default:
30063 return NULL;
30064 }
30065 }
30066 else
30067 tail = new_loc_descr (DW_OP_deref, 0, 0);
30068
30069 dw_loc_descr_ref ret = NULL, *p = &ret;
30070 while (expr != l)
30071 {
30072 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30073 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30074 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30075 p = &(*p)->dw_loc_next;
30076 expr = expr->dw_loc_next;
30077 }
30078 *p = tail;
30079 return ret;
30080 }
30081
30082 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30083 reference to a variable or argument, adjust it if needed and return:
30084 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30085 attribute if present should be removed
30086 0 keep the attribute perhaps with minor modifications, no need to rescan
30087 1 if the attribute has been successfully adjusted. */
30088
30089 static int
30090 optimize_string_length (dw_attr_node *a)
30091 {
30092 dw_loc_descr_ref l = AT_loc (a), lv;
30093 dw_die_ref die;
30094 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30095 {
30096 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30097 die = lookup_decl_die (decl);
30098 if (die)
30099 {
30100 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30101 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30102 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30103 }
30104 else
30105 return -1;
30106 }
30107 else
30108 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30109
30110 /* DWARF5 allows reference class, so we can then reference the DIE.
30111 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30112 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30113 {
30114 a->dw_attr_val.val_class = dw_val_class_die_ref;
30115 a->dw_attr_val.val_entry = NULL;
30116 a->dw_attr_val.v.val_die_ref.die = die;
30117 a->dw_attr_val.v.val_die_ref.external = 0;
30118 return 0;
30119 }
30120
30121 dw_attr_node *av = get_AT (die, DW_AT_location);
30122 dw_loc_list_ref d;
30123 bool non_dwarf_expr = false;
30124
30125 if (av == NULL)
30126 return dwarf_strict ? -1 : 0;
30127 switch (AT_class (av))
30128 {
30129 case dw_val_class_loc_list:
30130 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30131 if (d->expr && non_dwarf_expression (d->expr))
30132 non_dwarf_expr = true;
30133 break;
30134 case dw_val_class_view_list:
30135 gcc_unreachable ();
30136 case dw_val_class_loc:
30137 lv = AT_loc (av);
30138 if (lv == NULL)
30139 return dwarf_strict ? -1 : 0;
30140 if (non_dwarf_expression (lv))
30141 non_dwarf_expr = true;
30142 break;
30143 default:
30144 return dwarf_strict ? -1 : 0;
30145 }
30146
30147 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30148 into DW_OP_call4 or DW_OP_GNU_variable_value into
30149 DW_OP_call4 DW_OP_deref, do so. */
30150 if (!non_dwarf_expr
30151 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30152 {
30153 l->dw_loc_opc = DW_OP_call4;
30154 if (l->dw_loc_next)
30155 l->dw_loc_next = NULL;
30156 else
30157 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30158 return 0;
30159 }
30160
30161 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30162 copy over the DW_AT_location attribute from die to a. */
30163 if (l->dw_loc_next != NULL)
30164 {
30165 a->dw_attr_val = av->dw_attr_val;
30166 return 1;
30167 }
30168
30169 dw_loc_list_ref list, *p;
30170 switch (AT_class (av))
30171 {
30172 case dw_val_class_loc_list:
30173 p = &list;
30174 list = NULL;
30175 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30176 {
30177 lv = copy_deref_exprloc (d->expr);
30178 if (lv)
30179 {
30180 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30181 p = &(*p)->dw_loc_next;
30182 }
30183 else if (!dwarf_strict && d->expr)
30184 return 0;
30185 }
30186 if (list == NULL)
30187 return dwarf_strict ? -1 : 0;
30188 a->dw_attr_val.val_class = dw_val_class_loc_list;
30189 gen_llsym (list);
30190 *AT_loc_list_ptr (a) = list;
30191 return 1;
30192 case dw_val_class_loc:
30193 lv = copy_deref_exprloc (AT_loc (av));
30194 if (lv == NULL)
30195 return dwarf_strict ? -1 : 0;
30196 a->dw_attr_val.v.val_loc = lv;
30197 return 1;
30198 default:
30199 gcc_unreachable ();
30200 }
30201 }
30202
30203 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30204 an address in .rodata section if the string literal is emitted there,
30205 or remove the containing location list or replace DW_AT_const_value
30206 with DW_AT_location and empty location expression, if it isn't found
30207 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30208 to something that has been emitted in the current CU. */
30209
30210 static void
30211 resolve_addr (dw_die_ref die)
30212 {
30213 dw_die_ref c;
30214 dw_attr_node *a;
30215 dw_loc_list_ref *curr, *start, loc;
30216 unsigned ix;
30217 bool remove_AT_byte_size = false;
30218
30219 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30220 switch (AT_class (a))
30221 {
30222 case dw_val_class_loc_list:
30223 start = curr = AT_loc_list_ptr (a);
30224 loc = *curr;
30225 gcc_assert (loc);
30226 /* The same list can be referenced more than once. See if we have
30227 already recorded the result from a previous pass. */
30228 if (loc->replaced)
30229 *curr = loc->dw_loc_next;
30230 else if (!loc->resolved_addr)
30231 {
30232 /* As things stand, we do not expect or allow one die to
30233 reference a suffix of another die's location list chain.
30234 References must be identical or completely separate.
30235 There is therefore no need to cache the result of this
30236 pass on any list other than the first; doing so
30237 would lead to unnecessary writes. */
30238 while (*curr)
30239 {
30240 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30241 if (!resolve_addr_in_expr (a, (*curr)->expr))
30242 {
30243 dw_loc_list_ref next = (*curr)->dw_loc_next;
30244 dw_loc_descr_ref l = (*curr)->expr;
30245
30246 if (next && (*curr)->ll_symbol)
30247 {
30248 gcc_assert (!next->ll_symbol);
30249 next->ll_symbol = (*curr)->ll_symbol;
30250 next->vl_symbol = (*curr)->vl_symbol;
30251 }
30252 if (dwarf_split_debug_info)
30253 remove_loc_list_addr_table_entries (l);
30254 *curr = next;
30255 }
30256 else
30257 {
30258 mark_base_types ((*curr)->expr);
30259 curr = &(*curr)->dw_loc_next;
30260 }
30261 }
30262 if (loc == *start)
30263 loc->resolved_addr = 1;
30264 else
30265 {
30266 loc->replaced = 1;
30267 loc->dw_loc_next = *start;
30268 }
30269 }
30270 if (!*start)
30271 {
30272 remove_AT (die, a->dw_attr);
30273 ix--;
30274 }
30275 break;
30276 case dw_val_class_view_list:
30277 {
30278 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30279 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30280 dw_val_node *llnode
30281 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30282 /* If we no longer have a loclist, or it no longer needs
30283 views, drop this attribute. */
30284 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30285 {
30286 remove_AT (die, a->dw_attr);
30287 ix--;
30288 }
30289 break;
30290 }
30291 case dw_val_class_loc:
30292 {
30293 dw_loc_descr_ref l = AT_loc (a);
30294 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30295 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30296 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30297 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30298 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30299 with DW_FORM_ref referencing the same DIE as
30300 DW_OP_GNU_variable_value used to reference. */
30301 if (a->dw_attr == DW_AT_string_length
30302 && l
30303 && l->dw_loc_opc == DW_OP_GNU_variable_value
30304 && (l->dw_loc_next == NULL
30305 || (l->dw_loc_next->dw_loc_next == NULL
30306 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30307 {
30308 switch (optimize_string_length (a))
30309 {
30310 case -1:
30311 remove_AT (die, a->dw_attr);
30312 ix--;
30313 /* If we drop DW_AT_string_length, we need to drop also
30314 DW_AT_{string_length_,}byte_size. */
30315 remove_AT_byte_size = true;
30316 continue;
30317 default:
30318 break;
30319 case 1:
30320 /* Even if we keep the optimized DW_AT_string_length,
30321 it might have changed AT_class, so process it again. */
30322 ix--;
30323 continue;
30324 }
30325 }
30326 /* For -gdwarf-2 don't attempt to optimize
30327 DW_AT_data_member_location containing
30328 DW_OP_plus_uconst - older consumers might
30329 rely on it being that op instead of a more complex,
30330 but shorter, location description. */
30331 if ((dwarf_version > 2
30332 || a->dw_attr != DW_AT_data_member_location
30333 || l == NULL
30334 || l->dw_loc_opc != DW_OP_plus_uconst
30335 || l->dw_loc_next != NULL)
30336 && !resolve_addr_in_expr (a, l))
30337 {
30338 if (dwarf_split_debug_info)
30339 remove_loc_list_addr_table_entries (l);
30340 if (l != NULL
30341 && l->dw_loc_next == NULL
30342 && l->dw_loc_opc == DW_OP_addr
30343 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30344 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30345 && a->dw_attr == DW_AT_location)
30346 {
30347 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30348 remove_AT (die, a->dw_attr);
30349 ix--;
30350 optimize_location_into_implicit_ptr (die, decl);
30351 break;
30352 }
30353 if (a->dw_attr == DW_AT_string_length)
30354 /* If we drop DW_AT_string_length, we need to drop also
30355 DW_AT_{string_length_,}byte_size. */
30356 remove_AT_byte_size = true;
30357 remove_AT (die, a->dw_attr);
30358 ix--;
30359 }
30360 else
30361 mark_base_types (l);
30362 }
30363 break;
30364 case dw_val_class_addr:
30365 if (a->dw_attr == DW_AT_const_value
30366 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30367 {
30368 if (AT_index (a) != NOT_INDEXED)
30369 remove_addr_table_entry (a->dw_attr_val.val_entry);
30370 remove_AT (die, a->dw_attr);
30371 ix--;
30372 }
30373 if ((die->die_tag == DW_TAG_call_site
30374 && a->dw_attr == DW_AT_call_origin)
30375 || (die->die_tag == DW_TAG_GNU_call_site
30376 && a->dw_attr == DW_AT_abstract_origin))
30377 {
30378 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30379 dw_die_ref tdie = lookup_decl_die (tdecl);
30380 dw_die_ref cdie;
30381 if (tdie == NULL
30382 && DECL_EXTERNAL (tdecl)
30383 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30384 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30385 {
30386 dw_die_ref pdie = cdie;
30387 /* Make sure we don't add these DIEs into type units.
30388 We could emit skeleton DIEs for context (namespaces,
30389 outer structs/classes) and a skeleton DIE for the
30390 innermost context with DW_AT_signature pointing to the
30391 type unit. See PR78835. */
30392 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30393 pdie = pdie->die_parent;
30394 if (pdie == NULL)
30395 {
30396 /* Creating a full DIE for tdecl is overly expensive and
30397 at this point even wrong when in the LTO phase
30398 as it can end up generating new type DIEs we didn't
30399 output and thus optimize_external_refs will crash. */
30400 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30401 add_AT_flag (tdie, DW_AT_external, 1);
30402 add_AT_flag (tdie, DW_AT_declaration, 1);
30403 add_linkage_attr (tdie, tdecl);
30404 add_name_and_src_coords_attributes (tdie, tdecl, true);
30405 equate_decl_number_to_die (tdecl, tdie);
30406 }
30407 }
30408 if (tdie)
30409 {
30410 a->dw_attr_val.val_class = dw_val_class_die_ref;
30411 a->dw_attr_val.v.val_die_ref.die = tdie;
30412 a->dw_attr_val.v.val_die_ref.external = 0;
30413 }
30414 else
30415 {
30416 if (AT_index (a) != NOT_INDEXED)
30417 remove_addr_table_entry (a->dw_attr_val.val_entry);
30418 remove_AT (die, a->dw_attr);
30419 ix--;
30420 }
30421 }
30422 break;
30423 default:
30424 break;
30425 }
30426
30427 if (remove_AT_byte_size)
30428 remove_AT (die, dwarf_version >= 5
30429 ? DW_AT_string_length_byte_size
30430 : DW_AT_byte_size);
30431
30432 FOR_EACH_CHILD (die, c, resolve_addr (c));
30433 }
30434 \f
30435 /* Helper routines for optimize_location_lists.
30436 This pass tries to share identical local lists in .debug_loc
30437 section. */
30438
30439 /* Iteratively hash operands of LOC opcode into HSTATE. */
30440
30441 static void
30442 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30443 {
30444 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30445 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30446
30447 switch (loc->dw_loc_opc)
30448 {
30449 case DW_OP_const4u:
30450 case DW_OP_const8u:
30451 if (loc->dtprel)
30452 goto hash_addr;
30453 /* FALLTHRU */
30454 case DW_OP_const1u:
30455 case DW_OP_const1s:
30456 case DW_OP_const2u:
30457 case DW_OP_const2s:
30458 case DW_OP_const4s:
30459 case DW_OP_const8s:
30460 case DW_OP_constu:
30461 case DW_OP_consts:
30462 case DW_OP_pick:
30463 case DW_OP_plus_uconst:
30464 case DW_OP_breg0:
30465 case DW_OP_breg1:
30466 case DW_OP_breg2:
30467 case DW_OP_breg3:
30468 case DW_OP_breg4:
30469 case DW_OP_breg5:
30470 case DW_OP_breg6:
30471 case DW_OP_breg7:
30472 case DW_OP_breg8:
30473 case DW_OP_breg9:
30474 case DW_OP_breg10:
30475 case DW_OP_breg11:
30476 case DW_OP_breg12:
30477 case DW_OP_breg13:
30478 case DW_OP_breg14:
30479 case DW_OP_breg15:
30480 case DW_OP_breg16:
30481 case DW_OP_breg17:
30482 case DW_OP_breg18:
30483 case DW_OP_breg19:
30484 case DW_OP_breg20:
30485 case DW_OP_breg21:
30486 case DW_OP_breg22:
30487 case DW_OP_breg23:
30488 case DW_OP_breg24:
30489 case DW_OP_breg25:
30490 case DW_OP_breg26:
30491 case DW_OP_breg27:
30492 case DW_OP_breg28:
30493 case DW_OP_breg29:
30494 case DW_OP_breg30:
30495 case DW_OP_breg31:
30496 case DW_OP_regx:
30497 case DW_OP_fbreg:
30498 case DW_OP_piece:
30499 case DW_OP_deref_size:
30500 case DW_OP_xderef_size:
30501 hstate.add_object (val1->v.val_int);
30502 break;
30503 case DW_OP_skip:
30504 case DW_OP_bra:
30505 {
30506 int offset;
30507
30508 gcc_assert (val1->val_class == dw_val_class_loc);
30509 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30510 hstate.add_object (offset);
30511 }
30512 break;
30513 case DW_OP_implicit_value:
30514 hstate.add_object (val1->v.val_unsigned);
30515 switch (val2->val_class)
30516 {
30517 case dw_val_class_const:
30518 hstate.add_object (val2->v.val_int);
30519 break;
30520 case dw_val_class_vec:
30521 {
30522 unsigned int elt_size = val2->v.val_vec.elt_size;
30523 unsigned int len = val2->v.val_vec.length;
30524
30525 hstate.add_int (elt_size);
30526 hstate.add_int (len);
30527 hstate.add (val2->v.val_vec.array, len * elt_size);
30528 }
30529 break;
30530 case dw_val_class_const_double:
30531 hstate.add_object (val2->v.val_double.low);
30532 hstate.add_object (val2->v.val_double.high);
30533 break;
30534 case dw_val_class_wide_int:
30535 hstate.add (val2->v.val_wide->get_val (),
30536 get_full_len (*val2->v.val_wide)
30537 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30538 break;
30539 case dw_val_class_addr:
30540 inchash::add_rtx (val2->v.val_addr, hstate);
30541 break;
30542 default:
30543 gcc_unreachable ();
30544 }
30545 break;
30546 case DW_OP_bregx:
30547 case DW_OP_bit_piece:
30548 hstate.add_object (val1->v.val_int);
30549 hstate.add_object (val2->v.val_int);
30550 break;
30551 case DW_OP_addr:
30552 hash_addr:
30553 if (loc->dtprel)
30554 {
30555 unsigned char dtprel = 0xd1;
30556 hstate.add_object (dtprel);
30557 }
30558 inchash::add_rtx (val1->v.val_addr, hstate);
30559 break;
30560 case DW_OP_GNU_addr_index:
30561 case DW_OP_addrx:
30562 case DW_OP_GNU_const_index:
30563 case DW_OP_constx:
30564 {
30565 if (loc->dtprel)
30566 {
30567 unsigned char dtprel = 0xd1;
30568 hstate.add_object (dtprel);
30569 }
30570 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30571 }
30572 break;
30573 case DW_OP_implicit_pointer:
30574 case DW_OP_GNU_implicit_pointer:
30575 hstate.add_int (val2->v.val_int);
30576 break;
30577 case DW_OP_entry_value:
30578 case DW_OP_GNU_entry_value:
30579 hstate.add_object (val1->v.val_loc);
30580 break;
30581 case DW_OP_regval_type:
30582 case DW_OP_deref_type:
30583 case DW_OP_GNU_regval_type:
30584 case DW_OP_GNU_deref_type:
30585 {
30586 unsigned int byte_size
30587 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30588 unsigned int encoding
30589 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30590 hstate.add_object (val1->v.val_int);
30591 hstate.add_object (byte_size);
30592 hstate.add_object (encoding);
30593 }
30594 break;
30595 case DW_OP_convert:
30596 case DW_OP_reinterpret:
30597 case DW_OP_GNU_convert:
30598 case DW_OP_GNU_reinterpret:
30599 if (val1->val_class == dw_val_class_unsigned_const)
30600 {
30601 hstate.add_object (val1->v.val_unsigned);
30602 break;
30603 }
30604 /* FALLTHRU */
30605 case DW_OP_const_type:
30606 case DW_OP_GNU_const_type:
30607 {
30608 unsigned int byte_size
30609 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30610 unsigned int encoding
30611 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30612 hstate.add_object (byte_size);
30613 hstate.add_object (encoding);
30614 if (loc->dw_loc_opc != DW_OP_const_type
30615 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30616 break;
30617 hstate.add_object (val2->val_class);
30618 switch (val2->val_class)
30619 {
30620 case dw_val_class_const:
30621 hstate.add_object (val2->v.val_int);
30622 break;
30623 case dw_val_class_vec:
30624 {
30625 unsigned int elt_size = val2->v.val_vec.elt_size;
30626 unsigned int len = val2->v.val_vec.length;
30627
30628 hstate.add_object (elt_size);
30629 hstate.add_object (len);
30630 hstate.add (val2->v.val_vec.array, len * elt_size);
30631 }
30632 break;
30633 case dw_val_class_const_double:
30634 hstate.add_object (val2->v.val_double.low);
30635 hstate.add_object (val2->v.val_double.high);
30636 break;
30637 case dw_val_class_wide_int:
30638 hstate.add (val2->v.val_wide->get_val (),
30639 get_full_len (*val2->v.val_wide)
30640 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30641 break;
30642 default:
30643 gcc_unreachable ();
30644 }
30645 }
30646 break;
30647
30648 default:
30649 /* Other codes have no operands. */
30650 break;
30651 }
30652 }
30653
30654 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30655
30656 static inline void
30657 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30658 {
30659 dw_loc_descr_ref l;
30660 bool sizes_computed = false;
30661 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30662 size_of_locs (loc);
30663
30664 for (l = loc; l != NULL; l = l->dw_loc_next)
30665 {
30666 enum dwarf_location_atom opc = l->dw_loc_opc;
30667 hstate.add_object (opc);
30668 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30669 {
30670 size_of_locs (loc);
30671 sizes_computed = true;
30672 }
30673 hash_loc_operands (l, hstate);
30674 }
30675 }
30676
30677 /* Compute hash of the whole location list LIST_HEAD. */
30678
30679 static inline void
30680 hash_loc_list (dw_loc_list_ref list_head)
30681 {
30682 dw_loc_list_ref curr = list_head;
30683 inchash::hash hstate;
30684
30685 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30686 {
30687 hstate.add (curr->begin, strlen (curr->begin) + 1);
30688 hstate.add (curr->end, strlen (curr->end) + 1);
30689 hstate.add_object (curr->vbegin);
30690 hstate.add_object (curr->vend);
30691 if (curr->section)
30692 hstate.add (curr->section, strlen (curr->section) + 1);
30693 hash_locs (curr->expr, hstate);
30694 }
30695 list_head->hash = hstate.end ();
30696 }
30697
30698 /* Return true if X and Y opcodes have the same operands. */
30699
30700 static inline bool
30701 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30702 {
30703 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30704 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30705 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30706 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30707
30708 switch (x->dw_loc_opc)
30709 {
30710 case DW_OP_const4u:
30711 case DW_OP_const8u:
30712 if (x->dtprel)
30713 goto hash_addr;
30714 /* FALLTHRU */
30715 case DW_OP_const1u:
30716 case DW_OP_const1s:
30717 case DW_OP_const2u:
30718 case DW_OP_const2s:
30719 case DW_OP_const4s:
30720 case DW_OP_const8s:
30721 case DW_OP_constu:
30722 case DW_OP_consts:
30723 case DW_OP_pick:
30724 case DW_OP_plus_uconst:
30725 case DW_OP_breg0:
30726 case DW_OP_breg1:
30727 case DW_OP_breg2:
30728 case DW_OP_breg3:
30729 case DW_OP_breg4:
30730 case DW_OP_breg5:
30731 case DW_OP_breg6:
30732 case DW_OP_breg7:
30733 case DW_OP_breg8:
30734 case DW_OP_breg9:
30735 case DW_OP_breg10:
30736 case DW_OP_breg11:
30737 case DW_OP_breg12:
30738 case DW_OP_breg13:
30739 case DW_OP_breg14:
30740 case DW_OP_breg15:
30741 case DW_OP_breg16:
30742 case DW_OP_breg17:
30743 case DW_OP_breg18:
30744 case DW_OP_breg19:
30745 case DW_OP_breg20:
30746 case DW_OP_breg21:
30747 case DW_OP_breg22:
30748 case DW_OP_breg23:
30749 case DW_OP_breg24:
30750 case DW_OP_breg25:
30751 case DW_OP_breg26:
30752 case DW_OP_breg27:
30753 case DW_OP_breg28:
30754 case DW_OP_breg29:
30755 case DW_OP_breg30:
30756 case DW_OP_breg31:
30757 case DW_OP_regx:
30758 case DW_OP_fbreg:
30759 case DW_OP_piece:
30760 case DW_OP_deref_size:
30761 case DW_OP_xderef_size:
30762 return valx1->v.val_int == valy1->v.val_int;
30763 case DW_OP_skip:
30764 case DW_OP_bra:
30765 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30766 can cause irrelevant differences in dw_loc_addr. */
30767 gcc_assert (valx1->val_class == dw_val_class_loc
30768 && valy1->val_class == dw_val_class_loc
30769 && (dwarf_split_debug_info
30770 || x->dw_loc_addr == y->dw_loc_addr));
30771 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30772 case DW_OP_implicit_value:
30773 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30774 || valx2->val_class != valy2->val_class)
30775 return false;
30776 switch (valx2->val_class)
30777 {
30778 case dw_val_class_const:
30779 return valx2->v.val_int == valy2->v.val_int;
30780 case dw_val_class_vec:
30781 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30782 && valx2->v.val_vec.length == valy2->v.val_vec.length
30783 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30784 valx2->v.val_vec.elt_size
30785 * valx2->v.val_vec.length) == 0;
30786 case dw_val_class_const_double:
30787 return valx2->v.val_double.low == valy2->v.val_double.low
30788 && valx2->v.val_double.high == valy2->v.val_double.high;
30789 case dw_val_class_wide_int:
30790 return *valx2->v.val_wide == *valy2->v.val_wide;
30791 case dw_val_class_addr:
30792 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30793 default:
30794 gcc_unreachable ();
30795 }
30796 case DW_OP_bregx:
30797 case DW_OP_bit_piece:
30798 return valx1->v.val_int == valy1->v.val_int
30799 && valx2->v.val_int == valy2->v.val_int;
30800 case DW_OP_addr:
30801 hash_addr:
30802 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30803 case DW_OP_GNU_addr_index:
30804 case DW_OP_addrx:
30805 case DW_OP_GNU_const_index:
30806 case DW_OP_constx:
30807 {
30808 rtx ax1 = valx1->val_entry->addr.rtl;
30809 rtx ay1 = valy1->val_entry->addr.rtl;
30810 return rtx_equal_p (ax1, ay1);
30811 }
30812 case DW_OP_implicit_pointer:
30813 case DW_OP_GNU_implicit_pointer:
30814 return valx1->val_class == dw_val_class_die_ref
30815 && valx1->val_class == valy1->val_class
30816 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30817 && valx2->v.val_int == valy2->v.val_int;
30818 case DW_OP_entry_value:
30819 case DW_OP_GNU_entry_value:
30820 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30821 case DW_OP_const_type:
30822 case DW_OP_GNU_const_type:
30823 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30824 || valx2->val_class != valy2->val_class)
30825 return false;
30826 switch (valx2->val_class)
30827 {
30828 case dw_val_class_const:
30829 return valx2->v.val_int == valy2->v.val_int;
30830 case dw_val_class_vec:
30831 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30832 && valx2->v.val_vec.length == valy2->v.val_vec.length
30833 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30834 valx2->v.val_vec.elt_size
30835 * valx2->v.val_vec.length) == 0;
30836 case dw_val_class_const_double:
30837 return valx2->v.val_double.low == valy2->v.val_double.low
30838 && valx2->v.val_double.high == valy2->v.val_double.high;
30839 case dw_val_class_wide_int:
30840 return *valx2->v.val_wide == *valy2->v.val_wide;
30841 default:
30842 gcc_unreachable ();
30843 }
30844 case DW_OP_regval_type:
30845 case DW_OP_deref_type:
30846 case DW_OP_GNU_regval_type:
30847 case DW_OP_GNU_deref_type:
30848 return valx1->v.val_int == valy1->v.val_int
30849 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30850 case DW_OP_convert:
30851 case DW_OP_reinterpret:
30852 case DW_OP_GNU_convert:
30853 case DW_OP_GNU_reinterpret:
30854 if (valx1->val_class != valy1->val_class)
30855 return false;
30856 if (valx1->val_class == dw_val_class_unsigned_const)
30857 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30858 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30859 case DW_OP_GNU_parameter_ref:
30860 return valx1->val_class == dw_val_class_die_ref
30861 && valx1->val_class == valy1->val_class
30862 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30863 default:
30864 /* Other codes have no operands. */
30865 return true;
30866 }
30867 }
30868
30869 /* Return true if DWARF location expressions X and Y are the same. */
30870
30871 static inline bool
30872 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30873 {
30874 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30875 if (x->dw_loc_opc != y->dw_loc_opc
30876 || x->dtprel != y->dtprel
30877 || !compare_loc_operands (x, y))
30878 break;
30879 return x == NULL && y == NULL;
30880 }
30881
30882 /* Hashtable helpers. */
30883
30884 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30885 {
30886 static inline hashval_t hash (const dw_loc_list_struct *);
30887 static inline bool equal (const dw_loc_list_struct *,
30888 const dw_loc_list_struct *);
30889 };
30890
30891 /* Return precomputed hash of location list X. */
30892
30893 inline hashval_t
30894 loc_list_hasher::hash (const dw_loc_list_struct *x)
30895 {
30896 return x->hash;
30897 }
30898
30899 /* Return true if location lists A and B are the same. */
30900
30901 inline bool
30902 loc_list_hasher::equal (const dw_loc_list_struct *a,
30903 const dw_loc_list_struct *b)
30904 {
30905 if (a == b)
30906 return 1;
30907 if (a->hash != b->hash)
30908 return 0;
30909 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30910 if (strcmp (a->begin, b->begin) != 0
30911 || strcmp (a->end, b->end) != 0
30912 || (a->section == NULL) != (b->section == NULL)
30913 || (a->section && strcmp (a->section, b->section) != 0)
30914 || a->vbegin != b->vbegin || a->vend != b->vend
30915 || !compare_locs (a->expr, b->expr))
30916 break;
30917 return a == NULL && b == NULL;
30918 }
30919
30920 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30921
30922
30923 /* Recursively optimize location lists referenced from DIE
30924 children and share them whenever possible. */
30925
30926 static void
30927 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30928 {
30929 dw_die_ref c;
30930 dw_attr_node *a;
30931 unsigned ix;
30932 dw_loc_list_struct **slot;
30933 bool drop_locviews = false;
30934 bool has_locviews = false;
30935
30936 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30937 if (AT_class (a) == dw_val_class_loc_list)
30938 {
30939 dw_loc_list_ref list = AT_loc_list (a);
30940 /* TODO: perform some optimizations here, before hashing
30941 it and storing into the hash table. */
30942 hash_loc_list (list);
30943 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30944 if (*slot == NULL)
30945 {
30946 *slot = list;
30947 if (loc_list_has_views (list))
30948 gcc_assert (list->vl_symbol);
30949 else if (list->vl_symbol)
30950 {
30951 drop_locviews = true;
30952 list->vl_symbol = NULL;
30953 }
30954 }
30955 else
30956 {
30957 if (list->vl_symbol && !(*slot)->vl_symbol)
30958 drop_locviews = true;
30959 a->dw_attr_val.v.val_loc_list = *slot;
30960 }
30961 }
30962 else if (AT_class (a) == dw_val_class_view_list)
30963 {
30964 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30965 has_locviews = true;
30966 }
30967
30968
30969 if (drop_locviews && has_locviews)
30970 remove_AT (die, DW_AT_GNU_locviews);
30971
30972 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
30973 }
30974
30975
30976 /* Recursively assign each location list a unique index into the debug_addr
30977 section. */
30978
30979 static void
30980 index_location_lists (dw_die_ref die)
30981 {
30982 dw_die_ref c;
30983 dw_attr_node *a;
30984 unsigned ix;
30985
30986 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30987 if (AT_class (a) == dw_val_class_loc_list)
30988 {
30989 dw_loc_list_ref list = AT_loc_list (a);
30990 dw_loc_list_ref curr;
30991 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
30992 {
30993 /* Don't index an entry that has already been indexed
30994 or won't be output. Make sure skip_loc_list_entry doesn't
30995 call size_of_locs, because that might cause circular dependency,
30996 index_location_lists requiring address table indexes to be
30997 computed, but adding new indexes through add_addr_table_entry
30998 and address table index computation requiring no new additions
30999 to the hash table. In the rare case of DWARF[234] >= 64KB
31000 location expression, we'll just waste unused address table entry
31001 for it. */
31002 if (curr->begin_entry != NULL
31003 || skip_loc_list_entry (curr))
31004 continue;
31005
31006 curr->begin_entry
31007 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31008 }
31009 }
31010
31011 FOR_EACH_CHILD (die, c, index_location_lists (c));
31012 }
31013
31014 /* Optimize location lists referenced from DIE
31015 children and share them whenever possible. */
31016
31017 static void
31018 optimize_location_lists (dw_die_ref die)
31019 {
31020 loc_list_hash_type htab (500);
31021 optimize_location_lists_1 (die, &htab);
31022 }
31023 \f
31024 /* Traverse the limbo die list, and add parent/child links. The only
31025 dies without parents that should be here are concrete instances of
31026 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31027 For concrete instances, we can get the parent die from the abstract
31028 instance. */
31029
31030 static void
31031 flush_limbo_die_list (void)
31032 {
31033 limbo_die_node *node;
31034
31035 /* get_context_die calls force_decl_die, which can put new DIEs on the
31036 limbo list in LTO mode when nested functions are put in a different
31037 partition than that of their parent function. */
31038 while ((node = limbo_die_list))
31039 {
31040 dw_die_ref die = node->die;
31041 limbo_die_list = node->next;
31042
31043 if (die->die_parent == NULL)
31044 {
31045 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31046
31047 if (origin && origin->die_parent)
31048 add_child_die (origin->die_parent, die);
31049 else if (is_cu_die (die))
31050 ;
31051 else if (seen_error ())
31052 /* It's OK to be confused by errors in the input. */
31053 add_child_die (comp_unit_die (), die);
31054 else
31055 {
31056 /* In certain situations, the lexical block containing a
31057 nested function can be optimized away, which results
31058 in the nested function die being orphaned. Likewise
31059 with the return type of that nested function. Force
31060 this to be a child of the containing function.
31061
31062 It may happen that even the containing function got fully
31063 inlined and optimized out. In that case we are lost and
31064 assign the empty child. This should not be big issue as
31065 the function is likely unreachable too. */
31066 gcc_assert (node->created_for);
31067
31068 if (DECL_P (node->created_for))
31069 origin = get_context_die (DECL_CONTEXT (node->created_for));
31070 else if (TYPE_P (node->created_for))
31071 origin = scope_die_for (node->created_for, comp_unit_die ());
31072 else
31073 origin = comp_unit_die ();
31074
31075 add_child_die (origin, die);
31076 }
31077 }
31078 }
31079 }
31080
31081 /* Reset DIEs so we can output them again. */
31082
31083 static void
31084 reset_dies (dw_die_ref die)
31085 {
31086 dw_die_ref c;
31087
31088 /* Remove stuff we re-generate. */
31089 die->die_mark = 0;
31090 die->die_offset = 0;
31091 die->die_abbrev = 0;
31092 remove_AT (die, DW_AT_sibling);
31093
31094 FOR_EACH_CHILD (die, c, reset_dies (c));
31095 }
31096
31097 /* Output stuff that dwarf requires at the end of every file,
31098 and generate the DWARF-2 debugging info. */
31099
31100 static void
31101 dwarf2out_finish (const char *)
31102 {
31103 comdat_type_node *ctnode;
31104 dw_die_ref main_comp_unit_die;
31105 unsigned char checksum[16];
31106 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31107
31108 /* Flush out any latecomers to the limbo party. */
31109 flush_limbo_die_list ();
31110
31111 if (inline_entry_data_table)
31112 gcc_assert (inline_entry_data_table->elements () == 0);
31113
31114 if (flag_checking)
31115 {
31116 verify_die (comp_unit_die ());
31117 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31118 verify_die (node->die);
31119 }
31120
31121 /* We shouldn't have any symbols with delayed asm names for
31122 DIEs generated after early finish. */
31123 gcc_assert (deferred_asm_name == NULL);
31124
31125 gen_remaining_tmpl_value_param_die_attribute ();
31126
31127 if (flag_generate_lto || flag_generate_offload)
31128 {
31129 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31130
31131 /* Prune stuff so that dwarf2out_finish runs successfully
31132 for the fat part of the object. */
31133 reset_dies (comp_unit_die ());
31134 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31135 reset_dies (node->die);
31136
31137 hash_table<comdat_type_hasher> comdat_type_table (100);
31138 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31139 {
31140 comdat_type_node **slot
31141 = comdat_type_table.find_slot (ctnode, INSERT);
31142
31143 /* Don't reset types twice. */
31144 if (*slot != HTAB_EMPTY_ENTRY)
31145 continue;
31146
31147 /* Remove the pointer to the line table. */
31148 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31149
31150 if (debug_info_level >= DINFO_LEVEL_TERSE)
31151 reset_dies (ctnode->root_die);
31152
31153 *slot = ctnode;
31154 }
31155
31156 /* Reset die CU symbol so we don't output it twice. */
31157 comp_unit_die ()->die_id.die_symbol = NULL;
31158
31159 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31160 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31161 if (have_macinfo)
31162 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31163
31164 /* Remove indirect string decisions. */
31165 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31166 if (debug_line_str_hash)
31167 {
31168 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31169 debug_line_str_hash = NULL;
31170 }
31171 }
31172
31173 #if ENABLE_ASSERT_CHECKING
31174 {
31175 dw_die_ref die = comp_unit_die (), c;
31176 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31177 }
31178 #endif
31179 resolve_addr (comp_unit_die ());
31180 move_marked_base_types ();
31181
31182 /* Initialize sections and labels used for actual assembler output. */
31183 unsigned generation = init_sections_and_labels (false);
31184
31185 /* Traverse the DIE's and add sibling attributes to those DIE's that
31186 have children. */
31187 add_sibling_attributes (comp_unit_die ());
31188 limbo_die_node *node;
31189 for (node = cu_die_list; node; node = node->next)
31190 add_sibling_attributes (node->die);
31191 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31192 add_sibling_attributes (ctnode->root_die);
31193
31194 /* When splitting DWARF info, we put some attributes in the
31195 skeleton compile_unit DIE that remains in the .o, while
31196 most attributes go in the DWO compile_unit_die. */
31197 if (dwarf_split_debug_info)
31198 {
31199 limbo_die_node *cu;
31200 main_comp_unit_die = gen_compile_unit_die (NULL);
31201 if (dwarf_version >= 5)
31202 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31203 cu = limbo_die_list;
31204 gcc_assert (cu->die == main_comp_unit_die);
31205 limbo_die_list = limbo_die_list->next;
31206 cu->next = cu_die_list;
31207 cu_die_list = cu;
31208 }
31209 else
31210 main_comp_unit_die = comp_unit_die ();
31211
31212 /* Output a terminator label for the .text section. */
31213 switch_to_section (text_section);
31214 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31215 if (cold_text_section)
31216 {
31217 switch_to_section (cold_text_section);
31218 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31219 }
31220
31221 /* We can only use the low/high_pc attributes if all of the code was
31222 in .text. */
31223 if (!have_multiple_function_sections
31224 || (dwarf_version < 3 && dwarf_strict))
31225 {
31226 /* Don't add if the CU has no associated code. */
31227 if (text_section_used)
31228 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31229 text_end_label, true);
31230 }
31231 else
31232 {
31233 unsigned fde_idx;
31234 dw_fde_ref fde;
31235 bool range_list_added = false;
31236
31237 if (text_section_used)
31238 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31239 text_end_label, &range_list_added, true);
31240 if (cold_text_section_used)
31241 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31242 cold_end_label, &range_list_added, true);
31243
31244 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31245 {
31246 if (DECL_IGNORED_P (fde->decl))
31247 continue;
31248 if (!fde->in_std_section)
31249 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31250 fde->dw_fde_end, &range_list_added,
31251 true);
31252 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31253 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31254 fde->dw_fde_second_end, &range_list_added,
31255 true);
31256 }
31257
31258 if (range_list_added)
31259 {
31260 /* We need to give .debug_loc and .debug_ranges an appropriate
31261 "base address". Use zero so that these addresses become
31262 absolute. Historically, we've emitted the unexpected
31263 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31264 Emit both to give time for other tools to adapt. */
31265 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31266 if (! dwarf_strict && dwarf_version < 4)
31267 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31268
31269 add_ranges (NULL);
31270 }
31271 }
31272
31273 /* AIX Assembler inserts the length, so adjust the reference to match the
31274 offset expected by debuggers. */
31275 strcpy (dl_section_ref, debug_line_section_label);
31276 if (XCOFF_DEBUGGING_INFO)
31277 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31278
31279 if (debug_info_level >= DINFO_LEVEL_TERSE)
31280 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31281 dl_section_ref);
31282
31283 if (have_macinfo)
31284 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31285 macinfo_section_label);
31286
31287 if (dwarf_split_debug_info)
31288 {
31289 if (have_location_lists)
31290 {
31291 /* Since we generate the loclists in the split DWARF .dwo
31292 file itself, we don't need to generate a loclists_base
31293 attribute for the split compile unit DIE. That attribute
31294 (and using relocatable sec_offset FORMs) isn't allowed
31295 for a split compile unit. Only if the .debug_loclists
31296 section was in the main file, would we need to generate a
31297 loclists_base attribute here (for the full or skeleton
31298 unit DIE). */
31299
31300 /* optimize_location_lists calculates the size of the lists,
31301 so index them first, and assign indices to the entries.
31302 Although optimize_location_lists will remove entries from
31303 the table, it only does so for duplicates, and therefore
31304 only reduces ref_counts to 1. */
31305 index_location_lists (comp_unit_die ());
31306 }
31307
31308 if (addr_index_table != NULL)
31309 {
31310 unsigned int index = 0;
31311 addr_index_table
31312 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31313 (&index);
31314 }
31315 }
31316
31317 loc_list_idx = 0;
31318 if (have_location_lists)
31319 {
31320 optimize_location_lists (comp_unit_die ());
31321 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31322 if (dwarf_version >= 5 && dwarf_split_debug_info)
31323 assign_location_list_indexes (comp_unit_die ());
31324 }
31325
31326 save_macinfo_strings ();
31327
31328 if (dwarf_split_debug_info)
31329 {
31330 unsigned int index = 0;
31331
31332 /* Add attributes common to skeleton compile_units and
31333 type_units. Because these attributes include strings, it
31334 must be done before freezing the string table. Top-level
31335 skeleton die attrs are added when the skeleton type unit is
31336 created, so ensure it is created by this point. */
31337 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31338 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31339 }
31340
31341 /* Output all of the compilation units. We put the main one last so that
31342 the offsets are available to output_pubnames. */
31343 for (node = cu_die_list; node; node = node->next)
31344 output_comp_unit (node->die, 0, NULL);
31345
31346 hash_table<comdat_type_hasher> comdat_type_table (100);
31347 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31348 {
31349 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31350
31351 /* Don't output duplicate types. */
31352 if (*slot != HTAB_EMPTY_ENTRY)
31353 continue;
31354
31355 /* Add a pointer to the line table for the main compilation unit
31356 so that the debugger can make sense of DW_AT_decl_file
31357 attributes. */
31358 if (debug_info_level >= DINFO_LEVEL_TERSE)
31359 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31360 (!dwarf_split_debug_info
31361 ? dl_section_ref
31362 : debug_skeleton_line_section_label));
31363
31364 output_comdat_type_unit (ctnode);
31365 *slot = ctnode;
31366 }
31367
31368 if (dwarf_split_debug_info)
31369 {
31370 int mark;
31371 struct md5_ctx ctx;
31372
31373 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31374 index_rnglists ();
31375
31376 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31377 md5_init_ctx (&ctx);
31378 mark = 0;
31379 die_checksum (comp_unit_die (), &ctx, &mark);
31380 unmark_all_dies (comp_unit_die ());
31381 md5_finish_ctx (&ctx, checksum);
31382
31383 if (dwarf_version < 5)
31384 {
31385 /* Use the first 8 bytes of the checksum as the dwo_id,
31386 and add it to both comp-unit DIEs. */
31387 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31388 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31389 }
31390
31391 /* Add the base offset of the ranges table to the skeleton
31392 comp-unit DIE. */
31393 if (!vec_safe_is_empty (ranges_table))
31394 {
31395 if (dwarf_version >= 5)
31396 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31397 ranges_base_label);
31398 else
31399 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31400 ranges_section_label);
31401 }
31402
31403 switch_to_section (debug_addr_section);
31404 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
31405 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
31406 before DWARF5, didn't have a header for .debug_addr units.
31407 DWARF5 specifies a small header when address tables are used. */
31408 if (dwarf_version >= 5)
31409 {
31410 unsigned int last_idx = 0;
31411 unsigned long addrs_length;
31412
31413 addr_index_table->traverse_noresize
31414 <unsigned int *, count_index_addrs> (&last_idx);
31415 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
31416
31417 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31418 dw2_asm_output_data (4, 0xffffffff,
31419 "Escape value for 64-bit DWARF extension");
31420 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
31421 "Length of Address Unit");
31422 dw2_asm_output_data (2, 5, "DWARF addr version");
31423 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
31424 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
31425 }
31426 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31427 output_addr_table ();
31428 }
31429
31430 /* Output the main compilation unit if non-empty or if .debug_macinfo
31431 or .debug_macro will be emitted. */
31432 output_comp_unit (comp_unit_die (), have_macinfo,
31433 dwarf_split_debug_info ? checksum : NULL);
31434
31435 if (dwarf_split_debug_info && info_section_emitted)
31436 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31437
31438 /* Output the abbreviation table. */
31439 if (vec_safe_length (abbrev_die_table) != 1)
31440 {
31441 switch_to_section (debug_abbrev_section);
31442 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31443 output_abbrev_section ();
31444 }
31445
31446 /* Output location list section if necessary. */
31447 if (have_location_lists)
31448 {
31449 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31450 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31451 /* Output the location lists info. */
31452 switch_to_section (debug_loc_section);
31453 if (dwarf_version >= 5)
31454 {
31455 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31456 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31457 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31458 dw2_asm_output_data (4, 0xffffffff,
31459 "Initial length escape value indicating "
31460 "64-bit DWARF extension");
31461 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31462 "Length of Location Lists");
31463 ASM_OUTPUT_LABEL (asm_out_file, l1);
31464 output_dwarf_version ();
31465 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31466 dw2_asm_output_data (1, 0, "Segment Size");
31467 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31468 "Offset Entry Count");
31469 }
31470 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31471 if (dwarf_version >= 5 && dwarf_split_debug_info)
31472 {
31473 unsigned int save_loc_list_idx = loc_list_idx;
31474 loc_list_idx = 0;
31475 output_loclists_offsets (comp_unit_die ());
31476 gcc_assert (save_loc_list_idx == loc_list_idx);
31477 }
31478 output_location_lists (comp_unit_die ());
31479 if (dwarf_version >= 5)
31480 ASM_OUTPUT_LABEL (asm_out_file, l2);
31481 }
31482
31483 output_pubtables ();
31484
31485 /* Output the address range information if a CU (.debug_info section)
31486 was emitted. We output an empty table even if we had no functions
31487 to put in it. This because the consumer has no way to tell the
31488 difference between an empty table that we omitted and failure to
31489 generate a table that would have contained data. */
31490 if (info_section_emitted)
31491 {
31492 switch_to_section (debug_aranges_section);
31493 output_aranges ();
31494 }
31495
31496 /* Output ranges section if necessary. */
31497 if (!vec_safe_is_empty (ranges_table))
31498 {
31499 if (dwarf_version >= 5)
31500 output_rnglists (generation);
31501 else
31502 output_ranges ();
31503 }
31504
31505 /* Have to end the macro section. */
31506 if (have_macinfo)
31507 {
31508 switch_to_section (debug_macinfo_section);
31509 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31510 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31511 : debug_skeleton_line_section_label, false);
31512 dw2_asm_output_data (1, 0, "End compilation unit");
31513 }
31514
31515 /* Output the source line correspondence table. We must do this
31516 even if there is no line information. Otherwise, on an empty
31517 translation unit, we will generate a present, but empty,
31518 .debug_info section. IRIX 6.5 `nm' will then complain when
31519 examining the file. This is done late so that any filenames
31520 used by the debug_info section are marked as 'used'. */
31521 switch_to_section (debug_line_section);
31522 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31523 if (! output_asm_line_debug_info ())
31524 output_line_info (false);
31525
31526 if (dwarf_split_debug_info && info_section_emitted)
31527 {
31528 switch_to_section (debug_skeleton_line_section);
31529 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31530 output_line_info (true);
31531 }
31532
31533 /* If we emitted any indirect strings, output the string table too. */
31534 if (debug_str_hash || skeleton_debug_str_hash)
31535 output_indirect_strings ();
31536 if (debug_line_str_hash)
31537 {
31538 switch_to_section (debug_line_str_section);
31539 const enum dwarf_form form = DW_FORM_line_strp;
31540 debug_line_str_hash->traverse<enum dwarf_form,
31541 output_indirect_string> (form);
31542 }
31543
31544 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31545 symview_upper_bound = 0;
31546 if (zero_view_p)
31547 bitmap_clear (zero_view_p);
31548 }
31549
31550 /* Returns a hash value for X (which really is a variable_value_struct). */
31551
31552 inline hashval_t
31553 variable_value_hasher::hash (variable_value_struct *x)
31554 {
31555 return (hashval_t) x->decl_id;
31556 }
31557
31558 /* Return nonzero if decl_id of variable_value_struct X is the same as
31559 UID of decl Y. */
31560
31561 inline bool
31562 variable_value_hasher::equal (variable_value_struct *x, tree y)
31563 {
31564 return x->decl_id == DECL_UID (y);
31565 }
31566
31567 /* Helper function for resolve_variable_value, handle
31568 DW_OP_GNU_variable_value in one location expression.
31569 Return true if exprloc has been changed into loclist. */
31570
31571 static bool
31572 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31573 {
31574 dw_loc_descr_ref next;
31575 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31576 {
31577 next = loc->dw_loc_next;
31578 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31579 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31580 continue;
31581
31582 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31583 if (DECL_CONTEXT (decl) != current_function_decl)
31584 continue;
31585
31586 dw_die_ref ref = lookup_decl_die (decl);
31587 if (ref)
31588 {
31589 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31590 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31591 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31592 continue;
31593 }
31594 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31595 if (l == NULL)
31596 continue;
31597 if (l->dw_loc_next)
31598 {
31599 if (AT_class (a) != dw_val_class_loc)
31600 continue;
31601 switch (a->dw_attr)
31602 {
31603 /* Following attributes allow both exprloc and loclist
31604 classes, so we can change them into a loclist. */
31605 case DW_AT_location:
31606 case DW_AT_string_length:
31607 case DW_AT_return_addr:
31608 case DW_AT_data_member_location:
31609 case DW_AT_frame_base:
31610 case DW_AT_segment:
31611 case DW_AT_static_link:
31612 case DW_AT_use_location:
31613 case DW_AT_vtable_elem_location:
31614 if (prev)
31615 {
31616 prev->dw_loc_next = NULL;
31617 prepend_loc_descr_to_each (l, AT_loc (a));
31618 }
31619 if (next)
31620 add_loc_descr_to_each (l, next);
31621 a->dw_attr_val.val_class = dw_val_class_loc_list;
31622 a->dw_attr_val.val_entry = NULL;
31623 a->dw_attr_val.v.val_loc_list = l;
31624 have_location_lists = true;
31625 return true;
31626 /* Following attributes allow both exprloc and reference,
31627 so if the whole expression is DW_OP_GNU_variable_value alone
31628 we could transform it into reference. */
31629 case DW_AT_byte_size:
31630 case DW_AT_bit_size:
31631 case DW_AT_lower_bound:
31632 case DW_AT_upper_bound:
31633 case DW_AT_bit_stride:
31634 case DW_AT_count:
31635 case DW_AT_allocated:
31636 case DW_AT_associated:
31637 case DW_AT_byte_stride:
31638 if (prev == NULL && next == NULL)
31639 break;
31640 /* FALLTHRU */
31641 default:
31642 if (dwarf_strict)
31643 continue;
31644 break;
31645 }
31646 /* Create DW_TAG_variable that we can refer to. */
31647 gen_decl_die (decl, NULL_TREE, NULL,
31648 lookup_decl_die (current_function_decl));
31649 ref = lookup_decl_die (decl);
31650 if (ref)
31651 {
31652 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31653 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31654 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31655 }
31656 continue;
31657 }
31658 if (prev)
31659 {
31660 prev->dw_loc_next = l->expr;
31661 add_loc_descr (&prev->dw_loc_next, next);
31662 free_loc_descr (loc, NULL);
31663 next = prev->dw_loc_next;
31664 }
31665 else
31666 {
31667 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31668 add_loc_descr (&loc, next);
31669 next = loc;
31670 }
31671 loc = prev;
31672 }
31673 return false;
31674 }
31675
31676 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31677
31678 static void
31679 resolve_variable_value (dw_die_ref die)
31680 {
31681 dw_attr_node *a;
31682 dw_loc_list_ref loc;
31683 unsigned ix;
31684
31685 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31686 switch (AT_class (a))
31687 {
31688 case dw_val_class_loc:
31689 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31690 break;
31691 /* FALLTHRU */
31692 case dw_val_class_loc_list:
31693 loc = AT_loc_list (a);
31694 gcc_assert (loc);
31695 for (; loc; loc = loc->dw_loc_next)
31696 resolve_variable_value_in_expr (a, loc->expr);
31697 break;
31698 default:
31699 break;
31700 }
31701 }
31702
31703 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31704 temporaries in the current function. */
31705
31706 static void
31707 resolve_variable_values (void)
31708 {
31709 if (!variable_value_hash || !current_function_decl)
31710 return;
31711
31712 struct variable_value_struct *node
31713 = variable_value_hash->find_with_hash (current_function_decl,
31714 DECL_UID (current_function_decl));
31715
31716 if (node == NULL)
31717 return;
31718
31719 unsigned int i;
31720 dw_die_ref die;
31721 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31722 resolve_variable_value (die);
31723 }
31724
31725 /* Helper function for note_variable_value, handle one location
31726 expression. */
31727
31728 static void
31729 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31730 {
31731 for (; loc; loc = loc->dw_loc_next)
31732 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31733 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31734 {
31735 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31736 dw_die_ref ref = lookup_decl_die (decl);
31737 if (! ref && (flag_generate_lto || flag_generate_offload))
31738 {
31739 /* ??? This is somewhat a hack because we do not create DIEs
31740 for variables not in BLOCK trees early but when generating
31741 early LTO output we need the dw_val_class_decl_ref to be
31742 fully resolved. For fat LTO objects we'd also like to
31743 undo this after LTO dwarf output. */
31744 gcc_assert (DECL_CONTEXT (decl));
31745 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31746 gcc_assert (ctx != NULL);
31747 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31748 ref = lookup_decl_die (decl);
31749 gcc_assert (ref != NULL);
31750 }
31751 if (ref)
31752 {
31753 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31754 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31755 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31756 continue;
31757 }
31758 if (VAR_P (decl)
31759 && DECL_CONTEXT (decl)
31760 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31761 && lookup_decl_die (DECL_CONTEXT (decl)))
31762 {
31763 if (!variable_value_hash)
31764 variable_value_hash
31765 = hash_table<variable_value_hasher>::create_ggc (10);
31766
31767 tree fndecl = DECL_CONTEXT (decl);
31768 struct variable_value_struct *node;
31769 struct variable_value_struct **slot
31770 = variable_value_hash->find_slot_with_hash (fndecl,
31771 DECL_UID (fndecl),
31772 INSERT);
31773 if (*slot == NULL)
31774 {
31775 node = ggc_cleared_alloc<variable_value_struct> ();
31776 node->decl_id = DECL_UID (fndecl);
31777 *slot = node;
31778 }
31779 else
31780 node = *slot;
31781
31782 vec_safe_push (node->dies, die);
31783 }
31784 }
31785 }
31786
31787 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31788 with dw_val_class_decl_ref operand. */
31789
31790 static void
31791 note_variable_value (dw_die_ref die)
31792 {
31793 dw_die_ref c;
31794 dw_attr_node *a;
31795 dw_loc_list_ref loc;
31796 unsigned ix;
31797
31798 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31799 switch (AT_class (a))
31800 {
31801 case dw_val_class_loc_list:
31802 loc = AT_loc_list (a);
31803 gcc_assert (loc);
31804 if (!loc->noted_variable_value)
31805 {
31806 loc->noted_variable_value = 1;
31807 for (; loc; loc = loc->dw_loc_next)
31808 note_variable_value_in_expr (die, loc->expr);
31809 }
31810 break;
31811 case dw_val_class_loc:
31812 note_variable_value_in_expr (die, AT_loc (a));
31813 break;
31814 default:
31815 break;
31816 }
31817
31818 /* Mark children. */
31819 FOR_EACH_CHILD (die, c, note_variable_value (c));
31820 }
31821
31822 /* Perform any cleanups needed after the early debug generation pass
31823 has run. */
31824
31825 static void
31826 dwarf2out_early_finish (const char *filename)
31827 {
31828 set_early_dwarf s;
31829 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31830
31831 /* PCH might result in DW_AT_producer string being restored from the
31832 header compilation, so always fill it with empty string initially
31833 and overwrite only here. */
31834 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31835 producer_string = gen_producer_string ();
31836 producer->dw_attr_val.v.val_str->refcount--;
31837 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31838
31839 /* Add the name for the main input file now. We delayed this from
31840 dwarf2out_init to avoid complications with PCH. */
31841 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31842 add_comp_dir_attribute (comp_unit_die ());
31843
31844 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31845 DW_AT_comp_dir into .debug_line_str section. */
31846 if (!output_asm_line_debug_info ()
31847 && dwarf_version >= 5
31848 && DWARF5_USE_DEBUG_LINE_STR)
31849 {
31850 for (int i = 0; i < 2; i++)
31851 {
31852 dw_attr_node *a = get_AT (comp_unit_die (),
31853 i ? DW_AT_comp_dir : DW_AT_name);
31854 if (a == NULL
31855 || AT_class (a) != dw_val_class_str
31856 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31857 continue;
31858
31859 if (! debug_line_str_hash)
31860 debug_line_str_hash
31861 = hash_table<indirect_string_hasher>::create_ggc (10);
31862
31863 struct indirect_string_node *node
31864 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31865 set_indirect_string (node);
31866 node->form = DW_FORM_line_strp;
31867 a->dw_attr_val.v.val_str->refcount--;
31868 a->dw_attr_val.v.val_str = node;
31869 }
31870 }
31871
31872 /* With LTO early dwarf was really finished at compile-time, so make
31873 sure to adjust the phase after annotating the LTRANS CU DIE. */
31874 if (in_lto_p)
31875 {
31876 early_dwarf_finished = true;
31877 return;
31878 }
31879
31880 /* Walk through the list of incomplete types again, trying once more to
31881 emit full debugging info for them. */
31882 retry_incomplete_types ();
31883
31884 /* The point here is to flush out the limbo list so that it is empty
31885 and we don't need to stream it for LTO. */
31886 flush_limbo_die_list ();
31887
31888 gen_scheduled_generic_parms_dies ();
31889 gen_remaining_tmpl_value_param_die_attribute ();
31890
31891 /* Add DW_AT_linkage_name for all deferred DIEs. */
31892 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31893 {
31894 tree decl = node->created_for;
31895 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31896 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31897 ended up in deferred_asm_name before we knew it was
31898 constant and never written to disk. */
31899 && DECL_ASSEMBLER_NAME (decl))
31900 {
31901 add_linkage_attr (node->die, decl);
31902 move_linkage_attr (node->die);
31903 }
31904 }
31905 deferred_asm_name = NULL;
31906
31907 if (flag_eliminate_unused_debug_types)
31908 prune_unused_types ();
31909
31910 /* Generate separate COMDAT sections for type DIEs. */
31911 if (use_debug_types)
31912 {
31913 break_out_comdat_types (comp_unit_die ());
31914
31915 /* Each new type_unit DIE was added to the limbo die list when created.
31916 Since these have all been added to comdat_type_list, clear the
31917 limbo die list. */
31918 limbo_die_list = NULL;
31919
31920 /* For each new comdat type unit, copy declarations for incomplete
31921 types to make the new unit self-contained (i.e., no direct
31922 references to the main compile unit). */
31923 for (comdat_type_node *ctnode = comdat_type_list;
31924 ctnode != NULL; ctnode = ctnode->next)
31925 copy_decls_for_unworthy_types (ctnode->root_die);
31926 copy_decls_for_unworthy_types (comp_unit_die ());
31927
31928 /* In the process of copying declarations from one unit to another,
31929 we may have left some declarations behind that are no longer
31930 referenced. Prune them. */
31931 prune_unused_types ();
31932 }
31933
31934 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
31935 with dw_val_class_decl_ref operand. */
31936 note_variable_value (comp_unit_die ());
31937 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31938 note_variable_value (node->die);
31939 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
31940 ctnode = ctnode->next)
31941 note_variable_value (ctnode->root_die);
31942 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31943 note_variable_value (node->die);
31944
31945 /* The AT_pubnames attribute needs to go in all skeleton dies, including
31946 both the main_cu and all skeleton TUs. Making this call unconditional
31947 would end up either adding a second copy of the AT_pubnames attribute, or
31948 requiring a special case in add_top_level_skeleton_die_attrs. */
31949 if (!dwarf_split_debug_info)
31950 add_AT_pubnames (comp_unit_die ());
31951
31952 /* The early debug phase is now finished. */
31953 early_dwarf_finished = true;
31954
31955 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
31956 if ((!flag_generate_lto && !flag_generate_offload)
31957 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
31958 copy_lto_debug_sections operation of the simple object support in
31959 libiberty is not implemented for them yet. */
31960 || TARGET_PECOFF || TARGET_COFF)
31961 return;
31962
31963 /* Now as we are going to output for LTO initialize sections and labels
31964 to the LTO variants. We don't need a random-seed postfix as other
31965 LTO sections as linking the LTO debug sections into one in a partial
31966 link is fine. */
31967 init_sections_and_labels (true);
31968
31969 /* The output below is modeled after dwarf2out_finish with all
31970 location related output removed and some LTO specific changes.
31971 Some refactoring might make both smaller and easier to match up. */
31972
31973 /* Traverse the DIE's and add add sibling attributes to those DIE's
31974 that have children. */
31975 add_sibling_attributes (comp_unit_die ());
31976 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31977 add_sibling_attributes (node->die);
31978 for (comdat_type_node *ctnode = comdat_type_list;
31979 ctnode != NULL; ctnode = ctnode->next)
31980 add_sibling_attributes (ctnode->root_die);
31981
31982 /* AIX Assembler inserts the length, so adjust the reference to match the
31983 offset expected by debuggers. */
31984 strcpy (dl_section_ref, debug_line_section_label);
31985 if (XCOFF_DEBUGGING_INFO)
31986 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31987
31988 if (debug_info_level >= DINFO_LEVEL_TERSE)
31989 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
31990
31991 if (have_macinfo)
31992 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31993 macinfo_section_label);
31994
31995 save_macinfo_strings ();
31996
31997 if (dwarf_split_debug_info)
31998 {
31999 unsigned int index = 0;
32000 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32001 }
32002
32003 /* Output all of the compilation units. We put the main one last so that
32004 the offsets are available to output_pubnames. */
32005 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32006 output_comp_unit (node->die, 0, NULL);
32007
32008 hash_table<comdat_type_hasher> comdat_type_table (100);
32009 for (comdat_type_node *ctnode = comdat_type_list;
32010 ctnode != NULL; ctnode = ctnode->next)
32011 {
32012 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32013
32014 /* Don't output duplicate types. */
32015 if (*slot != HTAB_EMPTY_ENTRY)
32016 continue;
32017
32018 /* Add a pointer to the line table for the main compilation unit
32019 so that the debugger can make sense of DW_AT_decl_file
32020 attributes. */
32021 if (debug_info_level >= DINFO_LEVEL_TERSE)
32022 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32023 (!dwarf_split_debug_info
32024 ? debug_line_section_label
32025 : debug_skeleton_line_section_label));
32026
32027 output_comdat_type_unit (ctnode);
32028 *slot = ctnode;
32029 }
32030
32031 /* Stick a unique symbol to the main debuginfo section. */
32032 compute_comp_unit_symbol (comp_unit_die ());
32033
32034 /* Output the main compilation unit. We always need it if only for
32035 the CU symbol. */
32036 output_comp_unit (comp_unit_die (), true, NULL);
32037
32038 /* Output the abbreviation table. */
32039 if (vec_safe_length (abbrev_die_table) != 1)
32040 {
32041 switch_to_section (debug_abbrev_section);
32042 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32043 output_abbrev_section ();
32044 }
32045
32046 /* Have to end the macro section. */
32047 if (have_macinfo)
32048 {
32049 /* We have to save macinfo state if we need to output it again
32050 for the FAT part of the object. */
32051 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32052 if (flag_fat_lto_objects)
32053 macinfo_table = macinfo_table->copy ();
32054
32055 switch_to_section (debug_macinfo_section);
32056 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32057 output_macinfo (debug_line_section_label, true);
32058 dw2_asm_output_data (1, 0, "End compilation unit");
32059
32060 if (flag_fat_lto_objects)
32061 {
32062 vec_free (macinfo_table);
32063 macinfo_table = saved_macinfo_table;
32064 }
32065 }
32066
32067 /* Emit a skeleton debug_line section. */
32068 switch_to_section (debug_line_section);
32069 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32070 output_line_info (true);
32071
32072 /* If we emitted any indirect strings, output the string table too. */
32073 if (debug_str_hash || skeleton_debug_str_hash)
32074 output_indirect_strings ();
32075 if (debug_line_str_hash)
32076 {
32077 switch_to_section (debug_line_str_section);
32078 const enum dwarf_form form = DW_FORM_line_strp;
32079 debug_line_str_hash->traverse<enum dwarf_form,
32080 output_indirect_string> (form);
32081 }
32082
32083 /* Switch back to the text section. */
32084 switch_to_section (text_section);
32085 }
32086
32087 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32088 within the same process. For use by toplev::finalize. */
32089
32090 void
32091 dwarf2out_c_finalize (void)
32092 {
32093 last_var_location_insn = NULL;
32094 cached_next_real_insn = NULL;
32095 used_rtx_array = NULL;
32096 incomplete_types = NULL;
32097 debug_info_section = NULL;
32098 debug_skeleton_info_section = NULL;
32099 debug_abbrev_section = NULL;
32100 debug_skeleton_abbrev_section = NULL;
32101 debug_aranges_section = NULL;
32102 debug_addr_section = NULL;
32103 debug_macinfo_section = NULL;
32104 debug_line_section = NULL;
32105 debug_skeleton_line_section = NULL;
32106 debug_loc_section = NULL;
32107 debug_pubnames_section = NULL;
32108 debug_pubtypes_section = NULL;
32109 debug_str_section = NULL;
32110 debug_line_str_section = NULL;
32111 debug_str_dwo_section = NULL;
32112 debug_str_offsets_section = NULL;
32113 debug_ranges_section = NULL;
32114 debug_frame_section = NULL;
32115 fde_vec = NULL;
32116 debug_str_hash = NULL;
32117 debug_line_str_hash = NULL;
32118 skeleton_debug_str_hash = NULL;
32119 dw2_string_counter = 0;
32120 have_multiple_function_sections = false;
32121 text_section_used = false;
32122 cold_text_section_used = false;
32123 cold_text_section = NULL;
32124 current_unit_personality = NULL;
32125
32126 early_dwarf = false;
32127 early_dwarf_finished = false;
32128
32129 next_die_offset = 0;
32130 single_comp_unit_die = NULL;
32131 comdat_type_list = NULL;
32132 limbo_die_list = NULL;
32133 file_table = NULL;
32134 decl_die_table = NULL;
32135 common_block_die_table = NULL;
32136 decl_loc_table = NULL;
32137 call_arg_locations = NULL;
32138 call_arg_loc_last = NULL;
32139 call_site_count = -1;
32140 tail_call_site_count = -1;
32141 cached_dw_loc_list_table = NULL;
32142 abbrev_die_table = NULL;
32143 delete dwarf_proc_stack_usage_map;
32144 dwarf_proc_stack_usage_map = NULL;
32145 line_info_label_num = 0;
32146 cur_line_info_table = NULL;
32147 text_section_line_info = NULL;
32148 cold_text_section_line_info = NULL;
32149 separate_line_info = NULL;
32150 info_section_emitted = false;
32151 pubname_table = NULL;
32152 pubtype_table = NULL;
32153 macinfo_table = NULL;
32154 ranges_table = NULL;
32155 ranges_by_label = NULL;
32156 rnglist_idx = 0;
32157 have_location_lists = false;
32158 loclabel_num = 0;
32159 poc_label_num = 0;
32160 last_emitted_file = NULL;
32161 label_num = 0;
32162 tmpl_value_parm_die_table = NULL;
32163 generic_type_instances = NULL;
32164 frame_pointer_fb_offset = 0;
32165 frame_pointer_fb_offset_valid = false;
32166 base_types.release ();
32167 XDELETEVEC (producer_string);
32168 producer_string = NULL;
32169 }
32170
32171 #include "gt-dwarf2out.h"