Update copyright years.
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98
99 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
100 int, bool);
101 static rtx_insn *last_var_location_insn;
102 static rtx_insn *cached_next_real_insn;
103 static void dwarf2out_decl (tree);
104
105 #ifndef XCOFF_DEBUGGING_INFO
106 #define XCOFF_DEBUGGING_INFO 0
107 #endif
108
109 #ifndef HAVE_XCOFF_DWARF_EXTRAS
110 #define HAVE_XCOFF_DWARF_EXTRAS 0
111 #endif
112
113 #ifdef VMS_DEBUGGING_INFO
114 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
115
116 /* Define this macro to be a nonzero value if the directory specifications
117 which are output in the debug info should end with a separator. */
118 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
119 /* Define this macro to evaluate to a nonzero value if GCC should refrain
120 from generating indirect strings in DWARF2 debug information, for instance
121 if your target is stuck with an old version of GDB that is unable to
122 process them properly or uses VMS Debug. */
123 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
124 #else
125 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
126 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
127 #endif
128
129 /* ??? Poison these here until it can be done generically. They've been
130 totally replaced in this file; make sure it stays that way. */
131 #undef DWARF2_UNWIND_INFO
132 #undef DWARF2_FRAME_INFO
133 #if (GCC_VERSION >= 3000)
134 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
135 #endif
136
137 /* The size of the target's pointer type. */
138 #ifndef PTR_SIZE
139 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
140 #endif
141
142 /* Array of RTXes referenced by the debugging information, which therefore
143 must be kept around forever. */
144 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
145
146 /* A pointer to the base of a list of incomplete types which might be
147 completed at some later time. incomplete_types_list needs to be a
148 vec<tree, va_gc> *because we want to tell the garbage collector about
149 it. */
150 static GTY(()) vec<tree, va_gc> *incomplete_types;
151
152 /* A pointer to the base of a table of references to declaration
153 scopes. This table is a display which tracks the nesting
154 of declaration scopes at the current scope and containing
155 scopes. This table is used to find the proper place to
156 define type declaration DIE's. */
157 static GTY(()) vec<tree, va_gc> *decl_scope_table;
158
159 /* Pointers to various DWARF2 sections. */
160 static GTY(()) section *debug_info_section;
161 static GTY(()) section *debug_skeleton_info_section;
162 static GTY(()) section *debug_abbrev_section;
163 static GTY(()) section *debug_skeleton_abbrev_section;
164 static GTY(()) section *debug_aranges_section;
165 static GTY(()) section *debug_addr_section;
166 static GTY(()) section *debug_macinfo_section;
167 static const char *debug_macinfo_section_name;
168 static unsigned macinfo_label_base = 1;
169 static GTY(()) section *debug_line_section;
170 static GTY(()) section *debug_skeleton_line_section;
171 static GTY(()) section *debug_loc_section;
172 static GTY(()) section *debug_pubnames_section;
173 static GTY(()) section *debug_pubtypes_section;
174 static GTY(()) section *debug_str_section;
175 static GTY(()) section *debug_line_str_section;
176 static GTY(()) section *debug_str_dwo_section;
177 static GTY(()) section *debug_str_offsets_section;
178 static GTY(()) section *debug_ranges_section;
179 static GTY(()) section *debug_frame_section;
180
181 /* Maximum size (in bytes) of an artificially generated label. */
182 #define MAX_ARTIFICIAL_LABEL_BYTES 40
183
184 /* According to the (draft) DWARF 3 specification, the initial length
185 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
186 bytes are 0xffffffff, followed by the length stored in the next 8
187 bytes.
188
189 However, the SGI/MIPS ABI uses an initial length which is equal to
190 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
191
192 #ifndef DWARF_INITIAL_LENGTH_SIZE
193 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
194 #endif
195
196 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
197 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
198 #endif
199
200 /* Round SIZE up to the nearest BOUNDARY. */
201 #define DWARF_ROUND(SIZE,BOUNDARY) \
202 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
203
204 /* CIE identifier. */
205 #if HOST_BITS_PER_WIDE_INT >= 64
206 #define DWARF_CIE_ID \
207 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
208 #else
209 #define DWARF_CIE_ID DW_CIE_ID
210 #endif
211
212
213 /* A vector for a table that contains frame description
214 information for each routine. */
215 #define NOT_INDEXED (-1U)
216 #define NO_INDEX_ASSIGNED (-2U)
217
218 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
219
220 struct GTY((for_user)) indirect_string_node {
221 const char *str;
222 unsigned int refcount;
223 enum dwarf_form form;
224 char *label;
225 unsigned int index;
226 };
227
228 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
229 {
230 typedef const char *compare_type;
231
232 static hashval_t hash (indirect_string_node *);
233 static bool equal (indirect_string_node *, const char *);
234 };
235
236 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
237
238 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
239
240 /* With split_debug_info, both the comp_dir and dwo_name go in the
241 main object file, rather than the dwo, similar to the force_direct
242 parameter elsewhere but with additional complications:
243
244 1) The string is needed in both the main object file and the dwo.
245 That is, the comp_dir and dwo_name will appear in both places.
246
247 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
248 DW_FORM_line_strp or DW_FORM_GNU_str_index.
249
250 3) GCC chooses the form to use late, depending on the size and
251 reference count.
252
253 Rather than forcing the all debug string handling functions and
254 callers to deal with these complications, simply use a separate,
255 special-cased string table for any attribute that should go in the
256 main object file. This limits the complexity to just the places
257 that need it. */
258
259 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
260
261 static GTY(()) int dw2_string_counter;
262
263 /* True if the compilation unit places functions in more than one section. */
264 static GTY(()) bool have_multiple_function_sections = false;
265
266 /* Whether the default text and cold text sections have been used at all. */
267 static GTY(()) bool text_section_used = false;
268 static GTY(()) bool cold_text_section_used = false;
269
270 /* The default cold text section. */
271 static GTY(()) section *cold_text_section;
272
273 /* The DIE for C++14 'auto' in a function return type. */
274 static GTY(()) dw_die_ref auto_die;
275
276 /* The DIE for C++14 'decltype(auto)' in a function return type. */
277 static GTY(()) dw_die_ref decltype_auto_die;
278
279 /* Forward declarations for functions defined in this file. */
280
281 static void output_call_frame_info (int);
282 static void dwarf2out_note_section_used (void);
283
284 /* Personality decl of current unit. Used only when assembler does not support
285 personality CFI. */
286 static GTY(()) rtx current_unit_personality;
287
288 /* Whether an eh_frame section is required. */
289 static GTY(()) bool do_eh_frame = false;
290
291 /* .debug_rnglists next index. */
292 static unsigned int rnglist_idx;
293
294 /* Data and reference forms for relocatable data. */
295 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
296 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
297
298 #ifndef DEBUG_FRAME_SECTION
299 #define DEBUG_FRAME_SECTION ".debug_frame"
300 #endif
301
302 #ifndef FUNC_BEGIN_LABEL
303 #define FUNC_BEGIN_LABEL "LFB"
304 #endif
305
306 #ifndef FUNC_END_LABEL
307 #define FUNC_END_LABEL "LFE"
308 #endif
309
310 #ifndef PROLOGUE_END_LABEL
311 #define PROLOGUE_END_LABEL "LPE"
312 #endif
313
314 #ifndef EPILOGUE_BEGIN_LABEL
315 #define EPILOGUE_BEGIN_LABEL "LEB"
316 #endif
317
318 #ifndef FRAME_BEGIN_LABEL
319 #define FRAME_BEGIN_LABEL "Lframe"
320 #endif
321 #define CIE_AFTER_SIZE_LABEL "LSCIE"
322 #define CIE_END_LABEL "LECIE"
323 #define FDE_LABEL "LSFDE"
324 #define FDE_AFTER_SIZE_LABEL "LASFDE"
325 #define FDE_END_LABEL "LEFDE"
326 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
327 #define LINE_NUMBER_END_LABEL "LELT"
328 #define LN_PROLOG_AS_LABEL "LASLTP"
329 #define LN_PROLOG_END_LABEL "LELTP"
330 #define DIE_LABEL_PREFIX "DW"
331 \f
332 /* Match the base name of a file to the base name of a compilation unit. */
333
334 static int
335 matches_main_base (const char *path)
336 {
337 /* Cache the last query. */
338 static const char *last_path = NULL;
339 static int last_match = 0;
340 if (path != last_path)
341 {
342 const char *base;
343 int length = base_of_path (path, &base);
344 last_path = path;
345 last_match = (length == main_input_baselength
346 && memcmp (base, main_input_basename, length) == 0);
347 }
348 return last_match;
349 }
350
351 #ifdef DEBUG_DEBUG_STRUCT
352
353 static int
354 dump_struct_debug (tree type, enum debug_info_usage usage,
355 enum debug_struct_file criterion, int generic,
356 int matches, int result)
357 {
358 /* Find the type name. */
359 tree type_decl = TYPE_STUB_DECL (type);
360 tree t = type_decl;
361 const char *name = 0;
362 if (TREE_CODE (t) == TYPE_DECL)
363 t = DECL_NAME (t);
364 if (t)
365 name = IDENTIFIER_POINTER (t);
366
367 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
368 criterion,
369 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
370 matches ? "bas" : "hdr",
371 generic ? "gen" : "ord",
372 usage == DINFO_USAGE_DFN ? ";" :
373 usage == DINFO_USAGE_DIR_USE ? "." : "*",
374 result,
375 (void*) type_decl, name);
376 return result;
377 }
378 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
379 dump_struct_debug (type, usage, criterion, generic, matches, result)
380
381 #else
382
383 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
384 (result)
385
386 #endif
387
388 /* Get the number of HOST_WIDE_INTs needed to represent the precision
389 of the number. Some constants have a large uniform precision, so
390 we get the precision needed for the actual value of the number. */
391
392 static unsigned int
393 get_full_len (const wide_int &op)
394 {
395 int prec = wi::min_precision (op, UNSIGNED);
396 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
397 / HOST_BITS_PER_WIDE_INT);
398 }
399
400 static bool
401 should_emit_struct_debug (tree type, enum debug_info_usage usage)
402 {
403 enum debug_struct_file criterion;
404 tree type_decl;
405 bool generic = lang_hooks.types.generic_p (type);
406
407 if (generic)
408 criterion = debug_struct_generic[usage];
409 else
410 criterion = debug_struct_ordinary[usage];
411
412 if (criterion == DINFO_STRUCT_FILE_NONE)
413 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
414 if (criterion == DINFO_STRUCT_FILE_ANY)
415 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
416
417 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
418
419 if (type_decl != NULL)
420 {
421 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
422 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
423
424 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
425 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
426 }
427
428 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
429 }
430 \f
431 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
432 switch to the data section instead, and write out a synthetic start label
433 for collect2 the first time around. */
434
435 static void
436 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
437 {
438 if (eh_frame_section == 0)
439 {
440 int flags;
441
442 if (EH_TABLES_CAN_BE_READ_ONLY)
443 {
444 int fde_encoding;
445 int per_encoding;
446 int lsda_encoding;
447
448 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
449 /*global=*/0);
450 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
451 /*global=*/1);
452 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
453 /*global=*/0);
454 flags = ((! flag_pic
455 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
456 && (fde_encoding & 0x70) != DW_EH_PE_aligned
457 && (per_encoding & 0x70) != DW_EH_PE_absptr
458 && (per_encoding & 0x70) != DW_EH_PE_aligned
459 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
460 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
461 ? 0 : SECTION_WRITE);
462 }
463 else
464 flags = SECTION_WRITE;
465
466 #ifdef EH_FRAME_SECTION_NAME
467 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
468 #else
469 eh_frame_section = ((flags == SECTION_WRITE)
470 ? data_section : readonly_data_section);
471 #endif /* EH_FRAME_SECTION_NAME */
472 }
473
474 switch_to_section (eh_frame_section);
475
476 #ifdef EH_FRAME_THROUGH_COLLECT2
477 /* We have no special eh_frame section. Emit special labels to guide
478 collect2. */
479 if (!back)
480 {
481 tree label = get_file_function_name ("F");
482 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
483 targetm.asm_out.globalize_label (asm_out_file,
484 IDENTIFIER_POINTER (label));
485 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
486 }
487 #endif
488 }
489
490 /* Switch [BACK] to the eh or debug frame table section, depending on
491 FOR_EH. */
492
493 static void
494 switch_to_frame_table_section (int for_eh, bool back)
495 {
496 if (for_eh)
497 switch_to_eh_frame_section (back);
498 else
499 {
500 if (!debug_frame_section)
501 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
502 SECTION_DEBUG, NULL);
503 switch_to_section (debug_frame_section);
504 }
505 }
506
507 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
508
509 enum dw_cfi_oprnd_type
510 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
511 {
512 switch (cfi)
513 {
514 case DW_CFA_nop:
515 case DW_CFA_GNU_window_save:
516 case DW_CFA_remember_state:
517 case DW_CFA_restore_state:
518 return dw_cfi_oprnd_unused;
519
520 case DW_CFA_set_loc:
521 case DW_CFA_advance_loc1:
522 case DW_CFA_advance_loc2:
523 case DW_CFA_advance_loc4:
524 case DW_CFA_MIPS_advance_loc8:
525 return dw_cfi_oprnd_addr;
526
527 case DW_CFA_offset:
528 case DW_CFA_offset_extended:
529 case DW_CFA_def_cfa:
530 case DW_CFA_offset_extended_sf:
531 case DW_CFA_def_cfa_sf:
532 case DW_CFA_restore:
533 case DW_CFA_restore_extended:
534 case DW_CFA_undefined:
535 case DW_CFA_same_value:
536 case DW_CFA_def_cfa_register:
537 case DW_CFA_register:
538 case DW_CFA_expression:
539 case DW_CFA_val_expression:
540 return dw_cfi_oprnd_reg_num;
541
542 case DW_CFA_def_cfa_offset:
543 case DW_CFA_GNU_args_size:
544 case DW_CFA_def_cfa_offset_sf:
545 return dw_cfi_oprnd_offset;
546
547 case DW_CFA_def_cfa_expression:
548 return dw_cfi_oprnd_loc;
549
550 default:
551 gcc_unreachable ();
552 }
553 }
554
555 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
556
557 enum dw_cfi_oprnd_type
558 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
559 {
560 switch (cfi)
561 {
562 case DW_CFA_def_cfa:
563 case DW_CFA_def_cfa_sf:
564 case DW_CFA_offset:
565 case DW_CFA_offset_extended_sf:
566 case DW_CFA_offset_extended:
567 return dw_cfi_oprnd_offset;
568
569 case DW_CFA_register:
570 return dw_cfi_oprnd_reg_num;
571
572 case DW_CFA_expression:
573 case DW_CFA_val_expression:
574 return dw_cfi_oprnd_loc;
575
576 case DW_CFA_def_cfa_expression:
577 return dw_cfi_oprnd_cfa_loc;
578
579 default:
580 return dw_cfi_oprnd_unused;
581 }
582 }
583
584 /* Output one FDE. */
585
586 static void
587 output_fde (dw_fde_ref fde, bool for_eh, bool second,
588 char *section_start_label, int fde_encoding, char *augmentation,
589 bool any_lsda_needed, int lsda_encoding)
590 {
591 const char *begin, *end;
592 static unsigned int j;
593 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
594
595 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
596 /* empty */ 0);
597 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
598 for_eh + j);
599 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
600 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
601 if (!XCOFF_DEBUGGING_INFO || for_eh)
602 {
603 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
604 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
605 " indicating 64-bit DWARF extension");
606 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
607 "FDE Length");
608 }
609 ASM_OUTPUT_LABEL (asm_out_file, l1);
610
611 if (for_eh)
612 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
613 else
614 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
615 debug_frame_section, "FDE CIE offset");
616
617 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
618 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
619
620 if (for_eh)
621 {
622 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
623 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
624 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
625 "FDE initial location");
626 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
627 end, begin, "FDE address range");
628 }
629 else
630 {
631 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
632 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
633 }
634
635 if (augmentation[0])
636 {
637 if (any_lsda_needed)
638 {
639 int size = size_of_encoded_value (lsda_encoding);
640
641 if (lsda_encoding == DW_EH_PE_aligned)
642 {
643 int offset = ( 4 /* Length */
644 + 4 /* CIE offset */
645 + 2 * size_of_encoded_value (fde_encoding)
646 + 1 /* Augmentation size */ );
647 int pad = -offset & (PTR_SIZE - 1);
648
649 size += pad;
650 gcc_assert (size_of_uleb128 (size) == 1);
651 }
652
653 dw2_asm_output_data_uleb128 (size, "Augmentation size");
654
655 if (fde->uses_eh_lsda)
656 {
657 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
658 fde->funcdef_number);
659 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
660 gen_rtx_SYMBOL_REF (Pmode, l1),
661 false,
662 "Language Specific Data Area");
663 }
664 else
665 {
666 if (lsda_encoding == DW_EH_PE_aligned)
667 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
668 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
669 "Language Specific Data Area (none)");
670 }
671 }
672 else
673 dw2_asm_output_data_uleb128 (0, "Augmentation size");
674 }
675
676 /* Loop through the Call Frame Instructions associated with this FDE. */
677 fde->dw_fde_current_label = begin;
678 {
679 size_t from, until, i;
680
681 from = 0;
682 until = vec_safe_length (fde->dw_fde_cfi);
683
684 if (fde->dw_fde_second_begin == NULL)
685 ;
686 else if (!second)
687 until = fde->dw_fde_switch_cfi_index;
688 else
689 from = fde->dw_fde_switch_cfi_index;
690
691 for (i = from; i < until; i++)
692 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
693 }
694
695 /* If we are to emit a ref/link from function bodies to their frame tables,
696 do it now. This is typically performed to make sure that tables
697 associated with functions are dragged with them and not discarded in
698 garbage collecting links. We need to do this on a per function basis to
699 cope with -ffunction-sections. */
700
701 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
702 /* Switch to the function section, emit the ref to the tables, and
703 switch *back* into the table section. */
704 switch_to_section (function_section (fde->decl));
705 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
706 switch_to_frame_table_section (for_eh, true);
707 #endif
708
709 /* Pad the FDE out to an address sized boundary. */
710 ASM_OUTPUT_ALIGN (asm_out_file,
711 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
712 ASM_OUTPUT_LABEL (asm_out_file, l2);
713
714 j += 2;
715 }
716
717 /* Return true if frame description entry FDE is needed for EH. */
718
719 static bool
720 fde_needed_for_eh_p (dw_fde_ref fde)
721 {
722 if (flag_asynchronous_unwind_tables)
723 return true;
724
725 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
726 return true;
727
728 if (fde->uses_eh_lsda)
729 return true;
730
731 /* If exceptions are enabled, we have collected nothrow info. */
732 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
733 return false;
734
735 return true;
736 }
737
738 /* Output the call frame information used to record information
739 that relates to calculating the frame pointer, and records the
740 location of saved registers. */
741
742 static void
743 output_call_frame_info (int for_eh)
744 {
745 unsigned int i;
746 dw_fde_ref fde;
747 dw_cfi_ref cfi;
748 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
749 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
750 bool any_lsda_needed = false;
751 char augmentation[6];
752 int augmentation_size;
753 int fde_encoding = DW_EH_PE_absptr;
754 int per_encoding = DW_EH_PE_absptr;
755 int lsda_encoding = DW_EH_PE_absptr;
756 int return_reg;
757 rtx personality = NULL;
758 int dw_cie_version;
759
760 /* Don't emit a CIE if there won't be any FDEs. */
761 if (!fde_vec)
762 return;
763
764 /* Nothing to do if the assembler's doing it all. */
765 if (dwarf2out_do_cfi_asm ())
766 return;
767
768 /* If we don't have any functions we'll want to unwind out of, don't emit
769 any EH unwind information. If we make FDEs linkonce, we may have to
770 emit an empty label for an FDE that wouldn't otherwise be emitted. We
771 want to avoid having an FDE kept around when the function it refers to
772 is discarded. Example where this matters: a primary function template
773 in C++ requires EH information, an explicit specialization doesn't. */
774 if (for_eh)
775 {
776 bool any_eh_needed = false;
777
778 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
779 {
780 if (fde->uses_eh_lsda)
781 any_eh_needed = any_lsda_needed = true;
782 else if (fde_needed_for_eh_p (fde))
783 any_eh_needed = true;
784 else if (TARGET_USES_WEAK_UNWIND_INFO)
785 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
786 }
787
788 if (!any_eh_needed)
789 return;
790 }
791
792 /* We're going to be generating comments, so turn on app. */
793 if (flag_debug_asm)
794 app_enable ();
795
796 /* Switch to the proper frame section, first time. */
797 switch_to_frame_table_section (for_eh, false);
798
799 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
800 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
801
802 /* Output the CIE. */
803 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
804 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
805 if (!XCOFF_DEBUGGING_INFO || for_eh)
806 {
807 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
808 dw2_asm_output_data (4, 0xffffffff,
809 "Initial length escape value indicating 64-bit DWARF extension");
810 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
811 "Length of Common Information Entry");
812 }
813 ASM_OUTPUT_LABEL (asm_out_file, l1);
814
815 /* Now that the CIE pointer is PC-relative for EH,
816 use 0 to identify the CIE. */
817 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
818 (for_eh ? 0 : DWARF_CIE_ID),
819 "CIE Identifier Tag");
820
821 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
822 use CIE version 1, unless that would produce incorrect results
823 due to overflowing the return register column. */
824 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
825 dw_cie_version = 1;
826 if (return_reg >= 256 || dwarf_version > 2)
827 dw_cie_version = 3;
828 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
829
830 augmentation[0] = 0;
831 augmentation_size = 0;
832
833 personality = current_unit_personality;
834 if (for_eh)
835 {
836 char *p;
837
838 /* Augmentation:
839 z Indicates that a uleb128 is present to size the
840 augmentation section.
841 L Indicates the encoding (and thus presence) of
842 an LSDA pointer in the FDE augmentation.
843 R Indicates a non-default pointer encoding for
844 FDE code pointers.
845 P Indicates the presence of an encoding + language
846 personality routine in the CIE augmentation. */
847
848 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
849 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
850 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
851
852 p = augmentation + 1;
853 if (personality)
854 {
855 *p++ = 'P';
856 augmentation_size += 1 + size_of_encoded_value (per_encoding);
857 assemble_external_libcall (personality);
858 }
859 if (any_lsda_needed)
860 {
861 *p++ = 'L';
862 augmentation_size += 1;
863 }
864 if (fde_encoding != DW_EH_PE_absptr)
865 {
866 *p++ = 'R';
867 augmentation_size += 1;
868 }
869 if (p > augmentation + 1)
870 {
871 augmentation[0] = 'z';
872 *p = '\0';
873 }
874
875 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
876 if (personality && per_encoding == DW_EH_PE_aligned)
877 {
878 int offset = ( 4 /* Length */
879 + 4 /* CIE Id */
880 + 1 /* CIE version */
881 + strlen (augmentation) + 1 /* Augmentation */
882 + size_of_uleb128 (1) /* Code alignment */
883 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
884 + 1 /* RA column */
885 + 1 /* Augmentation size */
886 + 1 /* Personality encoding */ );
887 int pad = -offset & (PTR_SIZE - 1);
888
889 augmentation_size += pad;
890
891 /* Augmentations should be small, so there's scarce need to
892 iterate for a solution. Die if we exceed one uleb128 byte. */
893 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
894 }
895 }
896
897 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
898 if (dw_cie_version >= 4)
899 {
900 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
901 dw2_asm_output_data (1, 0, "CIE Segment Size");
902 }
903 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
904 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
905 "CIE Data Alignment Factor");
906
907 if (dw_cie_version == 1)
908 dw2_asm_output_data (1, return_reg, "CIE RA Column");
909 else
910 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
911
912 if (augmentation[0])
913 {
914 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
915 if (personality)
916 {
917 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
918 eh_data_format_name (per_encoding));
919 dw2_asm_output_encoded_addr_rtx (per_encoding,
920 personality,
921 true, NULL);
922 }
923
924 if (any_lsda_needed)
925 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
926 eh_data_format_name (lsda_encoding));
927
928 if (fde_encoding != DW_EH_PE_absptr)
929 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
930 eh_data_format_name (fde_encoding));
931 }
932
933 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
934 output_cfi (cfi, NULL, for_eh);
935
936 /* Pad the CIE out to an address sized boundary. */
937 ASM_OUTPUT_ALIGN (asm_out_file,
938 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
939 ASM_OUTPUT_LABEL (asm_out_file, l2);
940
941 /* Loop through all of the FDE's. */
942 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
943 {
944 unsigned int k;
945
946 /* Don't emit EH unwind info for leaf functions that don't need it. */
947 if (for_eh && !fde_needed_for_eh_p (fde))
948 continue;
949
950 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
951 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
952 augmentation, any_lsda_needed, lsda_encoding);
953 }
954
955 if (for_eh && targetm.terminate_dw2_eh_frame_info)
956 dw2_asm_output_data (4, 0, "End of Table");
957
958 /* Turn off app to make assembly quicker. */
959 if (flag_debug_asm)
960 app_disable ();
961 }
962
963 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
964
965 static void
966 dwarf2out_do_cfi_startproc (bool second)
967 {
968 int enc;
969 rtx ref;
970
971 fprintf (asm_out_file, "\t.cfi_startproc\n");
972
973 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
974 eh unwinders. */
975 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
976 return;
977
978 rtx personality = get_personality_function (current_function_decl);
979
980 if (personality)
981 {
982 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
983 ref = personality;
984
985 /* ??? The GAS support isn't entirely consistent. We have to
986 handle indirect support ourselves, but PC-relative is done
987 in the assembler. Further, the assembler can't handle any
988 of the weirder relocation types. */
989 if (enc & DW_EH_PE_indirect)
990 ref = dw2_force_const_mem (ref, true);
991
992 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
993 output_addr_const (asm_out_file, ref);
994 fputc ('\n', asm_out_file);
995 }
996
997 if (crtl->uses_eh_lsda)
998 {
999 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1000
1001 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1002 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1003 current_function_funcdef_no);
1004 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1005 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1006
1007 if (enc & DW_EH_PE_indirect)
1008 ref = dw2_force_const_mem (ref, true);
1009
1010 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1011 output_addr_const (asm_out_file, ref);
1012 fputc ('\n', asm_out_file);
1013 }
1014 }
1015
1016 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1017 this allocation may be done before pass_final. */
1018
1019 dw_fde_ref
1020 dwarf2out_alloc_current_fde (void)
1021 {
1022 dw_fde_ref fde;
1023
1024 fde = ggc_cleared_alloc<dw_fde_node> ();
1025 fde->decl = current_function_decl;
1026 fde->funcdef_number = current_function_funcdef_no;
1027 fde->fde_index = vec_safe_length (fde_vec);
1028 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1029 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1030 fde->nothrow = crtl->nothrow;
1031 fde->drap_reg = INVALID_REGNUM;
1032 fde->vdrap_reg = INVALID_REGNUM;
1033
1034 /* Record the FDE associated with this function. */
1035 cfun->fde = fde;
1036 vec_safe_push (fde_vec, fde);
1037
1038 return fde;
1039 }
1040
1041 /* Output a marker (i.e. a label) for the beginning of a function, before
1042 the prologue. */
1043
1044 void
1045 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1046 unsigned int column ATTRIBUTE_UNUSED,
1047 const char *file ATTRIBUTE_UNUSED)
1048 {
1049 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1050 char * dup_label;
1051 dw_fde_ref fde;
1052 section *fnsec;
1053 bool do_frame;
1054
1055 current_function_func_begin_label = NULL;
1056
1057 do_frame = dwarf2out_do_frame ();
1058
1059 /* ??? current_function_func_begin_label is also used by except.c for
1060 call-site information. We must emit this label if it might be used. */
1061 if (!do_frame
1062 && (!flag_exceptions
1063 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1064 return;
1065
1066 fnsec = function_section (current_function_decl);
1067 switch_to_section (fnsec);
1068 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1069 current_function_funcdef_no);
1070 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1071 current_function_funcdef_no);
1072 dup_label = xstrdup (label);
1073 current_function_func_begin_label = dup_label;
1074
1075 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1076 if (!do_frame)
1077 return;
1078
1079 /* Unlike the debug version, the EH version of frame unwind info is a per-
1080 function setting so we need to record whether we need it for the unit. */
1081 do_eh_frame |= dwarf2out_do_eh_frame ();
1082
1083 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1084 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1085 would include pass_dwarf2_frame. If we've not created the FDE yet,
1086 do so now. */
1087 fde = cfun->fde;
1088 if (fde == NULL)
1089 fde = dwarf2out_alloc_current_fde ();
1090
1091 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1092 fde->dw_fde_begin = dup_label;
1093 fde->dw_fde_current_label = dup_label;
1094 fde->in_std_section = (fnsec == text_section
1095 || (cold_text_section && fnsec == cold_text_section));
1096
1097 /* We only want to output line number information for the genuine dwarf2
1098 prologue case, not the eh frame case. */
1099 #ifdef DWARF2_DEBUGGING_INFO
1100 if (file)
1101 dwarf2out_source_line (line, column, file, 0, true);
1102 #endif
1103
1104 if (dwarf2out_do_cfi_asm ())
1105 dwarf2out_do_cfi_startproc (false);
1106 else
1107 {
1108 rtx personality = get_personality_function (current_function_decl);
1109 if (!current_unit_personality)
1110 current_unit_personality = personality;
1111
1112 /* We cannot keep a current personality per function as without CFI
1113 asm, at the point where we emit the CFI data, there is no current
1114 function anymore. */
1115 if (personality && current_unit_personality != personality)
1116 sorry ("multiple EH personalities are supported only with assemblers "
1117 "supporting .cfi_personality directive");
1118 }
1119 }
1120
1121 /* Output a marker (i.e. a label) for the end of the generated code
1122 for a function prologue. This gets called *after* the prologue code has
1123 been generated. */
1124
1125 void
1126 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1127 const char *file ATTRIBUTE_UNUSED)
1128 {
1129 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1130
1131 /* Output a label to mark the endpoint of the code generated for this
1132 function. */
1133 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1134 current_function_funcdef_no);
1135 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1136 current_function_funcdef_no);
1137 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1138 }
1139
1140 /* Output a marker (i.e. a label) for the beginning of the generated code
1141 for a function epilogue. This gets called *before* the prologue code has
1142 been generated. */
1143
1144 void
1145 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1146 const char *file ATTRIBUTE_UNUSED)
1147 {
1148 dw_fde_ref fde = cfun->fde;
1149 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1150
1151 if (fde->dw_fde_vms_begin_epilogue)
1152 return;
1153
1154 /* Output a label to mark the endpoint of the code generated for this
1155 function. */
1156 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1157 current_function_funcdef_no);
1158 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1159 current_function_funcdef_no);
1160 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1161 }
1162
1163 /* Output a marker (i.e. a label) for the absolute end of the generated code
1164 for a function definition. This gets called *after* the epilogue code has
1165 been generated. */
1166
1167 void
1168 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1169 const char *file ATTRIBUTE_UNUSED)
1170 {
1171 dw_fde_ref fde;
1172 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1173
1174 last_var_location_insn = NULL;
1175 cached_next_real_insn = NULL;
1176
1177 if (dwarf2out_do_cfi_asm ())
1178 fprintf (asm_out_file, "\t.cfi_endproc\n");
1179
1180 /* Output a label to mark the endpoint of the code generated for this
1181 function. */
1182 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1183 current_function_funcdef_no);
1184 ASM_OUTPUT_LABEL (asm_out_file, label);
1185 fde = cfun->fde;
1186 gcc_assert (fde != NULL);
1187 if (fde->dw_fde_second_begin == NULL)
1188 fde->dw_fde_end = xstrdup (label);
1189 }
1190
1191 void
1192 dwarf2out_frame_finish (void)
1193 {
1194 /* Output call frame information. */
1195 if (targetm.debug_unwind_info () == UI_DWARF2)
1196 output_call_frame_info (0);
1197
1198 /* Output another copy for the unwinder. */
1199 if (do_eh_frame)
1200 output_call_frame_info (1);
1201 }
1202
1203 /* Note that the current function section is being used for code. */
1204
1205 static void
1206 dwarf2out_note_section_used (void)
1207 {
1208 section *sec = current_function_section ();
1209 if (sec == text_section)
1210 text_section_used = true;
1211 else if (sec == cold_text_section)
1212 cold_text_section_used = true;
1213 }
1214
1215 static void var_location_switch_text_section (void);
1216 static void set_cur_line_info_table (section *);
1217
1218 void
1219 dwarf2out_switch_text_section (void)
1220 {
1221 section *sect;
1222 dw_fde_ref fde = cfun->fde;
1223
1224 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1225
1226 if (!in_cold_section_p)
1227 {
1228 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1229 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1230 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1231 }
1232 else
1233 {
1234 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1235 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1236 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1237 }
1238 have_multiple_function_sections = true;
1239
1240 /* There is no need to mark used sections when not debugging. */
1241 if (cold_text_section != NULL)
1242 dwarf2out_note_section_used ();
1243
1244 if (dwarf2out_do_cfi_asm ())
1245 fprintf (asm_out_file, "\t.cfi_endproc\n");
1246
1247 /* Now do the real section switch. */
1248 sect = current_function_section ();
1249 switch_to_section (sect);
1250
1251 fde->second_in_std_section
1252 = (sect == text_section
1253 || (cold_text_section && sect == cold_text_section));
1254
1255 if (dwarf2out_do_cfi_asm ())
1256 dwarf2out_do_cfi_startproc (true);
1257
1258 var_location_switch_text_section ();
1259
1260 if (cold_text_section != NULL)
1261 set_cur_line_info_table (sect);
1262 }
1263 \f
1264 /* And now, the subset of the debugging information support code necessary
1265 for emitting location expressions. */
1266
1267 /* Data about a single source file. */
1268 struct GTY((for_user)) dwarf_file_data {
1269 const char * filename;
1270 int emitted_number;
1271 };
1272
1273 /* Describe an entry into the .debug_addr section. */
1274
1275 enum ate_kind {
1276 ate_kind_rtx,
1277 ate_kind_rtx_dtprel,
1278 ate_kind_label
1279 };
1280
1281 struct GTY((for_user)) addr_table_entry {
1282 enum ate_kind kind;
1283 unsigned int refcount;
1284 unsigned int index;
1285 union addr_table_entry_struct_union
1286 {
1287 rtx GTY ((tag ("0"))) rtl;
1288 char * GTY ((tag ("1"))) label;
1289 }
1290 GTY ((desc ("%1.kind"))) addr;
1291 };
1292
1293 /* Location lists are ranges + location descriptions for that range,
1294 so you can track variables that are in different places over
1295 their entire life. */
1296 typedef struct GTY(()) dw_loc_list_struct {
1297 dw_loc_list_ref dw_loc_next;
1298 const char *begin; /* Label and addr_entry for start of range */
1299 addr_table_entry *begin_entry;
1300 const char *end; /* Label for end of range */
1301 char *ll_symbol; /* Label for beginning of location list.
1302 Only on head of list */
1303 const char *section; /* Section this loclist is relative to */
1304 dw_loc_descr_ref expr;
1305 hashval_t hash;
1306 /* True if all addresses in this and subsequent lists are known to be
1307 resolved. */
1308 bool resolved_addr;
1309 /* True if this list has been replaced by dw_loc_next. */
1310 bool replaced;
1311 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1312 section. */
1313 unsigned char emitted : 1;
1314 /* True if hash field is index rather than hash value. */
1315 unsigned char num_assigned : 1;
1316 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1317 unsigned char offset_emitted : 1;
1318 /* True if note_variable_value_in_expr has been called on it. */
1319 unsigned char noted_variable_value : 1;
1320 /* True if the range should be emitted even if begin and end
1321 are the same. */
1322 bool force;
1323 } dw_loc_list_node;
1324
1325 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1326 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1327
1328 /* Convert a DWARF stack opcode into its string name. */
1329
1330 static const char *
1331 dwarf_stack_op_name (unsigned int op)
1332 {
1333 const char *name = get_DW_OP_name (op);
1334
1335 if (name != NULL)
1336 return name;
1337
1338 return "OP_<unknown>";
1339 }
1340
1341 /* Return a pointer to a newly allocated location description. Location
1342 descriptions are simple expression terms that can be strung
1343 together to form more complicated location (address) descriptions. */
1344
1345 static inline dw_loc_descr_ref
1346 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1347 unsigned HOST_WIDE_INT oprnd2)
1348 {
1349 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1350
1351 descr->dw_loc_opc = op;
1352 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1353 descr->dw_loc_oprnd1.val_entry = NULL;
1354 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1355 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1356 descr->dw_loc_oprnd2.val_entry = NULL;
1357 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1358
1359 return descr;
1360 }
1361
1362 /* Add a location description term to a location description expression. */
1363
1364 static inline void
1365 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1366 {
1367 dw_loc_descr_ref *d;
1368
1369 /* Find the end of the chain. */
1370 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1371 ;
1372
1373 *d = descr;
1374 }
1375
1376 /* Compare two location operands for exact equality. */
1377
1378 static bool
1379 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1380 {
1381 if (a->val_class != b->val_class)
1382 return false;
1383 switch (a->val_class)
1384 {
1385 case dw_val_class_none:
1386 return true;
1387 case dw_val_class_addr:
1388 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1389
1390 case dw_val_class_offset:
1391 case dw_val_class_unsigned_const:
1392 case dw_val_class_const:
1393 case dw_val_class_unsigned_const_implicit:
1394 case dw_val_class_const_implicit:
1395 case dw_val_class_range_list:
1396 /* These are all HOST_WIDE_INT, signed or unsigned. */
1397 return a->v.val_unsigned == b->v.val_unsigned;
1398
1399 case dw_val_class_loc:
1400 return a->v.val_loc == b->v.val_loc;
1401 case dw_val_class_loc_list:
1402 return a->v.val_loc_list == b->v.val_loc_list;
1403 case dw_val_class_die_ref:
1404 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1405 case dw_val_class_fde_ref:
1406 return a->v.val_fde_index == b->v.val_fde_index;
1407 case dw_val_class_lbl_id:
1408 case dw_val_class_lineptr:
1409 case dw_val_class_macptr:
1410 case dw_val_class_loclistsptr:
1411 case dw_val_class_high_pc:
1412 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1413 case dw_val_class_str:
1414 return a->v.val_str == b->v.val_str;
1415 case dw_val_class_flag:
1416 return a->v.val_flag == b->v.val_flag;
1417 case dw_val_class_file:
1418 case dw_val_class_file_implicit:
1419 return a->v.val_file == b->v.val_file;
1420 case dw_val_class_decl_ref:
1421 return a->v.val_decl_ref == b->v.val_decl_ref;
1422
1423 case dw_val_class_const_double:
1424 return (a->v.val_double.high == b->v.val_double.high
1425 && a->v.val_double.low == b->v.val_double.low);
1426
1427 case dw_val_class_wide_int:
1428 return *a->v.val_wide == *b->v.val_wide;
1429
1430 case dw_val_class_vec:
1431 {
1432 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1433 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1434
1435 return (a_len == b_len
1436 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1437 }
1438
1439 case dw_val_class_data8:
1440 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1441
1442 case dw_val_class_vms_delta:
1443 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1444 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1445
1446 case dw_val_class_discr_value:
1447 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1448 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1449 case dw_val_class_discr_list:
1450 /* It makes no sense comparing two discriminant value lists. */
1451 return false;
1452 }
1453 gcc_unreachable ();
1454 }
1455
1456 /* Compare two location atoms for exact equality. */
1457
1458 static bool
1459 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1460 {
1461 if (a->dw_loc_opc != b->dw_loc_opc)
1462 return false;
1463
1464 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1465 address size, but since we always allocate cleared storage it
1466 should be zero for other types of locations. */
1467 if (a->dtprel != b->dtprel)
1468 return false;
1469
1470 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1471 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1472 }
1473
1474 /* Compare two complete location expressions for exact equality. */
1475
1476 bool
1477 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1478 {
1479 while (1)
1480 {
1481 if (a == b)
1482 return true;
1483 if (a == NULL || b == NULL)
1484 return false;
1485 if (!loc_descr_equal_p_1 (a, b))
1486 return false;
1487
1488 a = a->dw_loc_next;
1489 b = b->dw_loc_next;
1490 }
1491 }
1492
1493
1494 /* Add a constant POLY_OFFSET to a location expression. */
1495
1496 static void
1497 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1498 {
1499 dw_loc_descr_ref loc;
1500 HOST_WIDE_INT *p;
1501
1502 gcc_assert (*list_head != NULL);
1503
1504 if (known_eq (poly_offset, 0))
1505 return;
1506
1507 /* Find the end of the chain. */
1508 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1509 ;
1510
1511 HOST_WIDE_INT offset;
1512 if (!poly_offset.is_constant (&offset))
1513 {
1514 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1515 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1516 return;
1517 }
1518
1519 p = NULL;
1520 if (loc->dw_loc_opc == DW_OP_fbreg
1521 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1522 p = &loc->dw_loc_oprnd1.v.val_int;
1523 else if (loc->dw_loc_opc == DW_OP_bregx)
1524 p = &loc->dw_loc_oprnd2.v.val_int;
1525
1526 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1527 offset. Don't optimize if an signed integer overflow would happen. */
1528 if (p != NULL
1529 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1530 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1531 *p += offset;
1532
1533 else if (offset > 0)
1534 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1535
1536 else
1537 {
1538 loc->dw_loc_next
1539 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1540 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1541 }
1542 }
1543
1544 /* Return a pointer to a newly allocated location description for
1545 REG and OFFSET. */
1546
1547 static inline dw_loc_descr_ref
1548 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1549 {
1550 HOST_WIDE_INT const_offset;
1551 if (offset.is_constant (&const_offset))
1552 {
1553 if (reg <= 31)
1554 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1555 const_offset, 0);
1556 else
1557 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1558 }
1559 else
1560 {
1561 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1562 loc_descr_plus_const (&ret, offset);
1563 return ret;
1564 }
1565 }
1566
1567 /* Add a constant OFFSET to a location list. */
1568
1569 static void
1570 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1571 {
1572 dw_loc_list_ref d;
1573 for (d = list_head; d != NULL; d = d->dw_loc_next)
1574 loc_descr_plus_const (&d->expr, offset);
1575 }
1576
1577 #define DWARF_REF_SIZE \
1578 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1579
1580 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1581 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1582 DW_FORM_data16 with 128 bits. */
1583 #define DWARF_LARGEST_DATA_FORM_BITS \
1584 (dwarf_version >= 5 ? 128 : 64)
1585
1586 /* Utility inline function for construction of ops that were GNU extension
1587 before DWARF 5. */
1588 static inline enum dwarf_location_atom
1589 dwarf_OP (enum dwarf_location_atom op)
1590 {
1591 switch (op)
1592 {
1593 case DW_OP_implicit_pointer:
1594 if (dwarf_version < 5)
1595 return DW_OP_GNU_implicit_pointer;
1596 break;
1597
1598 case DW_OP_entry_value:
1599 if (dwarf_version < 5)
1600 return DW_OP_GNU_entry_value;
1601 break;
1602
1603 case DW_OP_const_type:
1604 if (dwarf_version < 5)
1605 return DW_OP_GNU_const_type;
1606 break;
1607
1608 case DW_OP_regval_type:
1609 if (dwarf_version < 5)
1610 return DW_OP_GNU_regval_type;
1611 break;
1612
1613 case DW_OP_deref_type:
1614 if (dwarf_version < 5)
1615 return DW_OP_GNU_deref_type;
1616 break;
1617
1618 case DW_OP_convert:
1619 if (dwarf_version < 5)
1620 return DW_OP_GNU_convert;
1621 break;
1622
1623 case DW_OP_reinterpret:
1624 if (dwarf_version < 5)
1625 return DW_OP_GNU_reinterpret;
1626 break;
1627
1628 default:
1629 break;
1630 }
1631 return op;
1632 }
1633
1634 /* Similarly for attributes. */
1635 static inline enum dwarf_attribute
1636 dwarf_AT (enum dwarf_attribute at)
1637 {
1638 switch (at)
1639 {
1640 case DW_AT_call_return_pc:
1641 if (dwarf_version < 5)
1642 return DW_AT_low_pc;
1643 break;
1644
1645 case DW_AT_call_tail_call:
1646 if (dwarf_version < 5)
1647 return DW_AT_GNU_tail_call;
1648 break;
1649
1650 case DW_AT_call_origin:
1651 if (dwarf_version < 5)
1652 return DW_AT_abstract_origin;
1653 break;
1654
1655 case DW_AT_call_target:
1656 if (dwarf_version < 5)
1657 return DW_AT_GNU_call_site_target;
1658 break;
1659
1660 case DW_AT_call_target_clobbered:
1661 if (dwarf_version < 5)
1662 return DW_AT_GNU_call_site_target_clobbered;
1663 break;
1664
1665 case DW_AT_call_parameter:
1666 if (dwarf_version < 5)
1667 return DW_AT_abstract_origin;
1668 break;
1669
1670 case DW_AT_call_value:
1671 if (dwarf_version < 5)
1672 return DW_AT_GNU_call_site_value;
1673 break;
1674
1675 case DW_AT_call_data_value:
1676 if (dwarf_version < 5)
1677 return DW_AT_GNU_call_site_data_value;
1678 break;
1679
1680 case DW_AT_call_all_calls:
1681 if (dwarf_version < 5)
1682 return DW_AT_GNU_all_call_sites;
1683 break;
1684
1685 case DW_AT_call_all_tail_calls:
1686 if (dwarf_version < 5)
1687 return DW_AT_GNU_all_tail_call_sites;
1688 break;
1689
1690 case DW_AT_dwo_name:
1691 if (dwarf_version < 5)
1692 return DW_AT_GNU_dwo_name;
1693 break;
1694
1695 default:
1696 break;
1697 }
1698 return at;
1699 }
1700
1701 /* And similarly for tags. */
1702 static inline enum dwarf_tag
1703 dwarf_TAG (enum dwarf_tag tag)
1704 {
1705 switch (tag)
1706 {
1707 case DW_TAG_call_site:
1708 if (dwarf_version < 5)
1709 return DW_TAG_GNU_call_site;
1710 break;
1711
1712 case DW_TAG_call_site_parameter:
1713 if (dwarf_version < 5)
1714 return DW_TAG_GNU_call_site_parameter;
1715 break;
1716
1717 default:
1718 break;
1719 }
1720 return tag;
1721 }
1722
1723 static unsigned long int get_base_type_offset (dw_die_ref);
1724
1725 /* Return the size of a location descriptor. */
1726
1727 static unsigned long
1728 size_of_loc_descr (dw_loc_descr_ref loc)
1729 {
1730 unsigned long size = 1;
1731
1732 switch (loc->dw_loc_opc)
1733 {
1734 case DW_OP_addr:
1735 size += DWARF2_ADDR_SIZE;
1736 break;
1737 case DW_OP_GNU_addr_index:
1738 case DW_OP_GNU_const_index:
1739 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1740 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1741 break;
1742 case DW_OP_const1u:
1743 case DW_OP_const1s:
1744 size += 1;
1745 break;
1746 case DW_OP_const2u:
1747 case DW_OP_const2s:
1748 size += 2;
1749 break;
1750 case DW_OP_const4u:
1751 case DW_OP_const4s:
1752 size += 4;
1753 break;
1754 case DW_OP_const8u:
1755 case DW_OP_const8s:
1756 size += 8;
1757 break;
1758 case DW_OP_constu:
1759 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1760 break;
1761 case DW_OP_consts:
1762 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1763 break;
1764 case DW_OP_pick:
1765 size += 1;
1766 break;
1767 case DW_OP_plus_uconst:
1768 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1769 break;
1770 case DW_OP_skip:
1771 case DW_OP_bra:
1772 size += 2;
1773 break;
1774 case DW_OP_breg0:
1775 case DW_OP_breg1:
1776 case DW_OP_breg2:
1777 case DW_OP_breg3:
1778 case DW_OP_breg4:
1779 case DW_OP_breg5:
1780 case DW_OP_breg6:
1781 case DW_OP_breg7:
1782 case DW_OP_breg8:
1783 case DW_OP_breg9:
1784 case DW_OP_breg10:
1785 case DW_OP_breg11:
1786 case DW_OP_breg12:
1787 case DW_OP_breg13:
1788 case DW_OP_breg14:
1789 case DW_OP_breg15:
1790 case DW_OP_breg16:
1791 case DW_OP_breg17:
1792 case DW_OP_breg18:
1793 case DW_OP_breg19:
1794 case DW_OP_breg20:
1795 case DW_OP_breg21:
1796 case DW_OP_breg22:
1797 case DW_OP_breg23:
1798 case DW_OP_breg24:
1799 case DW_OP_breg25:
1800 case DW_OP_breg26:
1801 case DW_OP_breg27:
1802 case DW_OP_breg28:
1803 case DW_OP_breg29:
1804 case DW_OP_breg30:
1805 case DW_OP_breg31:
1806 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1807 break;
1808 case DW_OP_regx:
1809 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1810 break;
1811 case DW_OP_fbreg:
1812 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1813 break;
1814 case DW_OP_bregx:
1815 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1816 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1817 break;
1818 case DW_OP_piece:
1819 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1820 break;
1821 case DW_OP_bit_piece:
1822 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1823 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1824 break;
1825 case DW_OP_deref_size:
1826 case DW_OP_xderef_size:
1827 size += 1;
1828 break;
1829 case DW_OP_call2:
1830 size += 2;
1831 break;
1832 case DW_OP_call4:
1833 size += 4;
1834 break;
1835 case DW_OP_call_ref:
1836 case DW_OP_GNU_variable_value:
1837 size += DWARF_REF_SIZE;
1838 break;
1839 case DW_OP_implicit_value:
1840 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1841 + loc->dw_loc_oprnd1.v.val_unsigned;
1842 break;
1843 case DW_OP_implicit_pointer:
1844 case DW_OP_GNU_implicit_pointer:
1845 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1846 break;
1847 case DW_OP_entry_value:
1848 case DW_OP_GNU_entry_value:
1849 {
1850 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1851 size += size_of_uleb128 (op_size) + op_size;
1852 break;
1853 }
1854 case DW_OP_const_type:
1855 case DW_OP_GNU_const_type:
1856 {
1857 unsigned long o
1858 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1859 size += size_of_uleb128 (o) + 1;
1860 switch (loc->dw_loc_oprnd2.val_class)
1861 {
1862 case dw_val_class_vec:
1863 size += loc->dw_loc_oprnd2.v.val_vec.length
1864 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1865 break;
1866 case dw_val_class_const:
1867 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1868 break;
1869 case dw_val_class_const_double:
1870 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1871 break;
1872 case dw_val_class_wide_int:
1873 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1874 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1875 break;
1876 default:
1877 gcc_unreachable ();
1878 }
1879 break;
1880 }
1881 case DW_OP_regval_type:
1882 case DW_OP_GNU_regval_type:
1883 {
1884 unsigned long o
1885 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1886 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1887 + size_of_uleb128 (o);
1888 }
1889 break;
1890 case DW_OP_deref_type:
1891 case DW_OP_GNU_deref_type:
1892 {
1893 unsigned long o
1894 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1895 size += 1 + size_of_uleb128 (o);
1896 }
1897 break;
1898 case DW_OP_convert:
1899 case DW_OP_reinterpret:
1900 case DW_OP_GNU_convert:
1901 case DW_OP_GNU_reinterpret:
1902 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1903 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1904 else
1905 {
1906 unsigned long o
1907 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1908 size += size_of_uleb128 (o);
1909 }
1910 break;
1911 case DW_OP_GNU_parameter_ref:
1912 size += 4;
1913 break;
1914 default:
1915 break;
1916 }
1917
1918 return size;
1919 }
1920
1921 /* Return the size of a series of location descriptors. */
1922
1923 unsigned long
1924 size_of_locs (dw_loc_descr_ref loc)
1925 {
1926 dw_loc_descr_ref l;
1927 unsigned long size;
1928
1929 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1930 field, to avoid writing to a PCH file. */
1931 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1932 {
1933 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1934 break;
1935 size += size_of_loc_descr (l);
1936 }
1937 if (! l)
1938 return size;
1939
1940 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1941 {
1942 l->dw_loc_addr = size;
1943 size += size_of_loc_descr (l);
1944 }
1945
1946 return size;
1947 }
1948
1949 /* Return the size of the value in a DW_AT_discr_value attribute. */
1950
1951 static int
1952 size_of_discr_value (dw_discr_value *discr_value)
1953 {
1954 if (discr_value->pos)
1955 return size_of_uleb128 (discr_value->v.uval);
1956 else
1957 return size_of_sleb128 (discr_value->v.sval);
1958 }
1959
1960 /* Return the size of the value in a DW_AT_discr_list attribute. */
1961
1962 static int
1963 size_of_discr_list (dw_discr_list_ref discr_list)
1964 {
1965 int size = 0;
1966
1967 for (dw_discr_list_ref list = discr_list;
1968 list != NULL;
1969 list = list->dw_discr_next)
1970 {
1971 /* One byte for the discriminant value descriptor, and then one or two
1972 LEB128 numbers, depending on whether it's a single case label or a
1973 range label. */
1974 size += 1;
1975 size += size_of_discr_value (&list->dw_discr_lower_bound);
1976 if (list->dw_discr_range != 0)
1977 size += size_of_discr_value (&list->dw_discr_upper_bound);
1978 }
1979 return size;
1980 }
1981
1982 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
1983 static void get_ref_die_offset_label (char *, dw_die_ref);
1984 static unsigned long int get_ref_die_offset (dw_die_ref);
1985
1986 /* Output location description stack opcode's operands (if any).
1987 The for_eh_or_skip parameter controls whether register numbers are
1988 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
1989 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
1990 info). This should be suppressed for the cases that have not been converted
1991 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
1992
1993 static void
1994 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
1995 {
1996 dw_val_ref val1 = &loc->dw_loc_oprnd1;
1997 dw_val_ref val2 = &loc->dw_loc_oprnd2;
1998
1999 switch (loc->dw_loc_opc)
2000 {
2001 #ifdef DWARF2_DEBUGGING_INFO
2002 case DW_OP_const2u:
2003 case DW_OP_const2s:
2004 dw2_asm_output_data (2, val1->v.val_int, NULL);
2005 break;
2006 case DW_OP_const4u:
2007 if (loc->dtprel)
2008 {
2009 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2010 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2011 val1->v.val_addr);
2012 fputc ('\n', asm_out_file);
2013 break;
2014 }
2015 /* FALLTHRU */
2016 case DW_OP_const4s:
2017 dw2_asm_output_data (4, val1->v.val_int, NULL);
2018 break;
2019 case DW_OP_const8u:
2020 if (loc->dtprel)
2021 {
2022 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2023 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2024 val1->v.val_addr);
2025 fputc ('\n', asm_out_file);
2026 break;
2027 }
2028 /* FALLTHRU */
2029 case DW_OP_const8s:
2030 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2031 dw2_asm_output_data (8, val1->v.val_int, NULL);
2032 break;
2033 case DW_OP_skip:
2034 case DW_OP_bra:
2035 {
2036 int offset;
2037
2038 gcc_assert (val1->val_class == dw_val_class_loc);
2039 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2040
2041 dw2_asm_output_data (2, offset, NULL);
2042 }
2043 break;
2044 case DW_OP_implicit_value:
2045 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2046 switch (val2->val_class)
2047 {
2048 case dw_val_class_const:
2049 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2050 break;
2051 case dw_val_class_vec:
2052 {
2053 unsigned int elt_size = val2->v.val_vec.elt_size;
2054 unsigned int len = val2->v.val_vec.length;
2055 unsigned int i;
2056 unsigned char *p;
2057
2058 if (elt_size > sizeof (HOST_WIDE_INT))
2059 {
2060 elt_size /= 2;
2061 len *= 2;
2062 }
2063 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2064 i < len;
2065 i++, p += elt_size)
2066 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2067 "fp or vector constant word %u", i);
2068 }
2069 break;
2070 case dw_val_class_const_double:
2071 {
2072 unsigned HOST_WIDE_INT first, second;
2073
2074 if (WORDS_BIG_ENDIAN)
2075 {
2076 first = val2->v.val_double.high;
2077 second = val2->v.val_double.low;
2078 }
2079 else
2080 {
2081 first = val2->v.val_double.low;
2082 second = val2->v.val_double.high;
2083 }
2084 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2085 first, NULL);
2086 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2087 second, NULL);
2088 }
2089 break;
2090 case dw_val_class_wide_int:
2091 {
2092 int i;
2093 int len = get_full_len (*val2->v.val_wide);
2094 if (WORDS_BIG_ENDIAN)
2095 for (i = len - 1; i >= 0; --i)
2096 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2097 val2->v.val_wide->elt (i), NULL);
2098 else
2099 for (i = 0; i < len; ++i)
2100 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2101 val2->v.val_wide->elt (i), NULL);
2102 }
2103 break;
2104 case dw_val_class_addr:
2105 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2106 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2107 break;
2108 default:
2109 gcc_unreachable ();
2110 }
2111 break;
2112 #else
2113 case DW_OP_const2u:
2114 case DW_OP_const2s:
2115 case DW_OP_const4u:
2116 case DW_OP_const4s:
2117 case DW_OP_const8u:
2118 case DW_OP_const8s:
2119 case DW_OP_skip:
2120 case DW_OP_bra:
2121 case DW_OP_implicit_value:
2122 /* We currently don't make any attempt to make sure these are
2123 aligned properly like we do for the main unwind info, so
2124 don't support emitting things larger than a byte if we're
2125 only doing unwinding. */
2126 gcc_unreachable ();
2127 #endif
2128 case DW_OP_const1u:
2129 case DW_OP_const1s:
2130 dw2_asm_output_data (1, val1->v.val_int, NULL);
2131 break;
2132 case DW_OP_constu:
2133 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2134 break;
2135 case DW_OP_consts:
2136 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2137 break;
2138 case DW_OP_pick:
2139 dw2_asm_output_data (1, val1->v.val_int, NULL);
2140 break;
2141 case DW_OP_plus_uconst:
2142 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2143 break;
2144 case DW_OP_breg0:
2145 case DW_OP_breg1:
2146 case DW_OP_breg2:
2147 case DW_OP_breg3:
2148 case DW_OP_breg4:
2149 case DW_OP_breg5:
2150 case DW_OP_breg6:
2151 case DW_OP_breg7:
2152 case DW_OP_breg8:
2153 case DW_OP_breg9:
2154 case DW_OP_breg10:
2155 case DW_OP_breg11:
2156 case DW_OP_breg12:
2157 case DW_OP_breg13:
2158 case DW_OP_breg14:
2159 case DW_OP_breg15:
2160 case DW_OP_breg16:
2161 case DW_OP_breg17:
2162 case DW_OP_breg18:
2163 case DW_OP_breg19:
2164 case DW_OP_breg20:
2165 case DW_OP_breg21:
2166 case DW_OP_breg22:
2167 case DW_OP_breg23:
2168 case DW_OP_breg24:
2169 case DW_OP_breg25:
2170 case DW_OP_breg26:
2171 case DW_OP_breg27:
2172 case DW_OP_breg28:
2173 case DW_OP_breg29:
2174 case DW_OP_breg30:
2175 case DW_OP_breg31:
2176 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2177 break;
2178 case DW_OP_regx:
2179 {
2180 unsigned r = val1->v.val_unsigned;
2181 if (for_eh_or_skip >= 0)
2182 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2183 gcc_assert (size_of_uleb128 (r)
2184 == size_of_uleb128 (val1->v.val_unsigned));
2185 dw2_asm_output_data_uleb128 (r, NULL);
2186 }
2187 break;
2188 case DW_OP_fbreg:
2189 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2190 break;
2191 case DW_OP_bregx:
2192 {
2193 unsigned r = val1->v.val_unsigned;
2194 if (for_eh_or_skip >= 0)
2195 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2196 gcc_assert (size_of_uleb128 (r)
2197 == size_of_uleb128 (val1->v.val_unsigned));
2198 dw2_asm_output_data_uleb128 (r, NULL);
2199 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2200 }
2201 break;
2202 case DW_OP_piece:
2203 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2204 break;
2205 case DW_OP_bit_piece:
2206 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2207 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2208 break;
2209 case DW_OP_deref_size:
2210 case DW_OP_xderef_size:
2211 dw2_asm_output_data (1, val1->v.val_int, NULL);
2212 break;
2213
2214 case DW_OP_addr:
2215 if (loc->dtprel)
2216 {
2217 if (targetm.asm_out.output_dwarf_dtprel)
2218 {
2219 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2220 DWARF2_ADDR_SIZE,
2221 val1->v.val_addr);
2222 fputc ('\n', asm_out_file);
2223 }
2224 else
2225 gcc_unreachable ();
2226 }
2227 else
2228 {
2229 #ifdef DWARF2_DEBUGGING_INFO
2230 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2231 #else
2232 gcc_unreachable ();
2233 #endif
2234 }
2235 break;
2236
2237 case DW_OP_GNU_addr_index:
2238 case DW_OP_GNU_const_index:
2239 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2240 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2241 "(index into .debug_addr)");
2242 break;
2243
2244 case DW_OP_call2:
2245 case DW_OP_call4:
2246 {
2247 unsigned long die_offset
2248 = get_ref_die_offset (val1->v.val_die_ref.die);
2249 /* Make sure the offset has been computed and that we can encode it as
2250 an operand. */
2251 gcc_assert (die_offset > 0
2252 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2253 ? 0xffff
2254 : 0xffffffff));
2255 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2256 die_offset, NULL);
2257 }
2258 break;
2259
2260 case DW_OP_call_ref:
2261 case DW_OP_GNU_variable_value:
2262 {
2263 char label[MAX_ARTIFICIAL_LABEL_BYTES
2264 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2265 gcc_assert (val1->val_class == dw_val_class_die_ref);
2266 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2267 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2268 }
2269 break;
2270
2271 case DW_OP_implicit_pointer:
2272 case DW_OP_GNU_implicit_pointer:
2273 {
2274 char label[MAX_ARTIFICIAL_LABEL_BYTES
2275 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2276 gcc_assert (val1->val_class == dw_val_class_die_ref);
2277 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2278 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2279 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2280 }
2281 break;
2282
2283 case DW_OP_entry_value:
2284 case DW_OP_GNU_entry_value:
2285 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2286 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2287 break;
2288
2289 case DW_OP_const_type:
2290 case DW_OP_GNU_const_type:
2291 {
2292 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2293 gcc_assert (o);
2294 dw2_asm_output_data_uleb128 (o, NULL);
2295 switch (val2->val_class)
2296 {
2297 case dw_val_class_const:
2298 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2299 dw2_asm_output_data (1, l, NULL);
2300 dw2_asm_output_data (l, val2->v.val_int, NULL);
2301 break;
2302 case dw_val_class_vec:
2303 {
2304 unsigned int elt_size = val2->v.val_vec.elt_size;
2305 unsigned int len = val2->v.val_vec.length;
2306 unsigned int i;
2307 unsigned char *p;
2308
2309 l = len * elt_size;
2310 dw2_asm_output_data (1, l, NULL);
2311 if (elt_size > sizeof (HOST_WIDE_INT))
2312 {
2313 elt_size /= 2;
2314 len *= 2;
2315 }
2316 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2317 i < len;
2318 i++, p += elt_size)
2319 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2320 "fp or vector constant word %u", i);
2321 }
2322 break;
2323 case dw_val_class_const_double:
2324 {
2325 unsigned HOST_WIDE_INT first, second;
2326 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2327
2328 dw2_asm_output_data (1, 2 * l, NULL);
2329 if (WORDS_BIG_ENDIAN)
2330 {
2331 first = val2->v.val_double.high;
2332 second = val2->v.val_double.low;
2333 }
2334 else
2335 {
2336 first = val2->v.val_double.low;
2337 second = val2->v.val_double.high;
2338 }
2339 dw2_asm_output_data (l, first, NULL);
2340 dw2_asm_output_data (l, second, NULL);
2341 }
2342 break;
2343 case dw_val_class_wide_int:
2344 {
2345 int i;
2346 int len = get_full_len (*val2->v.val_wide);
2347 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2348
2349 dw2_asm_output_data (1, len * l, NULL);
2350 if (WORDS_BIG_ENDIAN)
2351 for (i = len - 1; i >= 0; --i)
2352 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2353 else
2354 for (i = 0; i < len; ++i)
2355 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2356 }
2357 break;
2358 default:
2359 gcc_unreachable ();
2360 }
2361 }
2362 break;
2363 case DW_OP_regval_type:
2364 case DW_OP_GNU_regval_type:
2365 {
2366 unsigned r = val1->v.val_unsigned;
2367 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2368 gcc_assert (o);
2369 if (for_eh_or_skip >= 0)
2370 {
2371 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2372 gcc_assert (size_of_uleb128 (r)
2373 == size_of_uleb128 (val1->v.val_unsigned));
2374 }
2375 dw2_asm_output_data_uleb128 (r, NULL);
2376 dw2_asm_output_data_uleb128 (o, NULL);
2377 }
2378 break;
2379 case DW_OP_deref_type:
2380 case DW_OP_GNU_deref_type:
2381 {
2382 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2383 gcc_assert (o);
2384 dw2_asm_output_data (1, val1->v.val_int, NULL);
2385 dw2_asm_output_data_uleb128 (o, NULL);
2386 }
2387 break;
2388 case DW_OP_convert:
2389 case DW_OP_reinterpret:
2390 case DW_OP_GNU_convert:
2391 case DW_OP_GNU_reinterpret:
2392 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2393 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2394 else
2395 {
2396 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2397 gcc_assert (o);
2398 dw2_asm_output_data_uleb128 (o, NULL);
2399 }
2400 break;
2401
2402 case DW_OP_GNU_parameter_ref:
2403 {
2404 unsigned long o;
2405 gcc_assert (val1->val_class == dw_val_class_die_ref);
2406 o = get_ref_die_offset (val1->v.val_die_ref.die);
2407 dw2_asm_output_data (4, o, NULL);
2408 }
2409 break;
2410
2411 default:
2412 /* Other codes have no operands. */
2413 break;
2414 }
2415 }
2416
2417 /* Output a sequence of location operations.
2418 The for_eh_or_skip parameter controls whether register numbers are
2419 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2420 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2421 info). This should be suppressed for the cases that have not been converted
2422 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2423
2424 void
2425 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2426 {
2427 for (; loc != NULL; loc = loc->dw_loc_next)
2428 {
2429 enum dwarf_location_atom opc = loc->dw_loc_opc;
2430 /* Output the opcode. */
2431 if (for_eh_or_skip >= 0
2432 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2433 {
2434 unsigned r = (opc - DW_OP_breg0);
2435 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2436 gcc_assert (r <= 31);
2437 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2438 }
2439 else if (for_eh_or_skip >= 0
2440 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2441 {
2442 unsigned r = (opc - DW_OP_reg0);
2443 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2444 gcc_assert (r <= 31);
2445 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2446 }
2447
2448 dw2_asm_output_data (1, opc,
2449 "%s", dwarf_stack_op_name (opc));
2450
2451 /* Output the operand(s) (if any). */
2452 output_loc_operands (loc, for_eh_or_skip);
2453 }
2454 }
2455
2456 /* Output location description stack opcode's operands (if any).
2457 The output is single bytes on a line, suitable for .cfi_escape. */
2458
2459 static void
2460 output_loc_operands_raw (dw_loc_descr_ref loc)
2461 {
2462 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2463 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2464
2465 switch (loc->dw_loc_opc)
2466 {
2467 case DW_OP_addr:
2468 case DW_OP_GNU_addr_index:
2469 case DW_OP_GNU_const_index:
2470 case DW_OP_implicit_value:
2471 /* We cannot output addresses in .cfi_escape, only bytes. */
2472 gcc_unreachable ();
2473
2474 case DW_OP_const1u:
2475 case DW_OP_const1s:
2476 case DW_OP_pick:
2477 case DW_OP_deref_size:
2478 case DW_OP_xderef_size:
2479 fputc (',', asm_out_file);
2480 dw2_asm_output_data_raw (1, val1->v.val_int);
2481 break;
2482
2483 case DW_OP_const2u:
2484 case DW_OP_const2s:
2485 fputc (',', asm_out_file);
2486 dw2_asm_output_data_raw (2, val1->v.val_int);
2487 break;
2488
2489 case DW_OP_const4u:
2490 case DW_OP_const4s:
2491 fputc (',', asm_out_file);
2492 dw2_asm_output_data_raw (4, val1->v.val_int);
2493 break;
2494
2495 case DW_OP_const8u:
2496 case DW_OP_const8s:
2497 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2498 fputc (',', asm_out_file);
2499 dw2_asm_output_data_raw (8, val1->v.val_int);
2500 break;
2501
2502 case DW_OP_skip:
2503 case DW_OP_bra:
2504 {
2505 int offset;
2506
2507 gcc_assert (val1->val_class == dw_val_class_loc);
2508 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2509
2510 fputc (',', asm_out_file);
2511 dw2_asm_output_data_raw (2, offset);
2512 }
2513 break;
2514
2515 case DW_OP_regx:
2516 {
2517 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2518 gcc_assert (size_of_uleb128 (r)
2519 == size_of_uleb128 (val1->v.val_unsigned));
2520 fputc (',', asm_out_file);
2521 dw2_asm_output_data_uleb128_raw (r);
2522 }
2523 break;
2524
2525 case DW_OP_constu:
2526 case DW_OP_plus_uconst:
2527 case DW_OP_piece:
2528 fputc (',', asm_out_file);
2529 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2530 break;
2531
2532 case DW_OP_bit_piece:
2533 fputc (',', asm_out_file);
2534 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2535 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2536 break;
2537
2538 case DW_OP_consts:
2539 case DW_OP_breg0:
2540 case DW_OP_breg1:
2541 case DW_OP_breg2:
2542 case DW_OP_breg3:
2543 case DW_OP_breg4:
2544 case DW_OP_breg5:
2545 case DW_OP_breg6:
2546 case DW_OP_breg7:
2547 case DW_OP_breg8:
2548 case DW_OP_breg9:
2549 case DW_OP_breg10:
2550 case DW_OP_breg11:
2551 case DW_OP_breg12:
2552 case DW_OP_breg13:
2553 case DW_OP_breg14:
2554 case DW_OP_breg15:
2555 case DW_OP_breg16:
2556 case DW_OP_breg17:
2557 case DW_OP_breg18:
2558 case DW_OP_breg19:
2559 case DW_OP_breg20:
2560 case DW_OP_breg21:
2561 case DW_OP_breg22:
2562 case DW_OP_breg23:
2563 case DW_OP_breg24:
2564 case DW_OP_breg25:
2565 case DW_OP_breg26:
2566 case DW_OP_breg27:
2567 case DW_OP_breg28:
2568 case DW_OP_breg29:
2569 case DW_OP_breg30:
2570 case DW_OP_breg31:
2571 case DW_OP_fbreg:
2572 fputc (',', asm_out_file);
2573 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2574 break;
2575
2576 case DW_OP_bregx:
2577 {
2578 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2579 gcc_assert (size_of_uleb128 (r)
2580 == size_of_uleb128 (val1->v.val_unsigned));
2581 fputc (',', asm_out_file);
2582 dw2_asm_output_data_uleb128_raw (r);
2583 fputc (',', asm_out_file);
2584 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2585 }
2586 break;
2587
2588 case DW_OP_implicit_pointer:
2589 case DW_OP_entry_value:
2590 case DW_OP_const_type:
2591 case DW_OP_regval_type:
2592 case DW_OP_deref_type:
2593 case DW_OP_convert:
2594 case DW_OP_reinterpret:
2595 case DW_OP_GNU_implicit_pointer:
2596 case DW_OP_GNU_entry_value:
2597 case DW_OP_GNU_const_type:
2598 case DW_OP_GNU_regval_type:
2599 case DW_OP_GNU_deref_type:
2600 case DW_OP_GNU_convert:
2601 case DW_OP_GNU_reinterpret:
2602 case DW_OP_GNU_parameter_ref:
2603 gcc_unreachable ();
2604 break;
2605
2606 default:
2607 /* Other codes have no operands. */
2608 break;
2609 }
2610 }
2611
2612 void
2613 output_loc_sequence_raw (dw_loc_descr_ref loc)
2614 {
2615 while (1)
2616 {
2617 enum dwarf_location_atom opc = loc->dw_loc_opc;
2618 /* Output the opcode. */
2619 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2620 {
2621 unsigned r = (opc - DW_OP_breg0);
2622 r = DWARF2_FRAME_REG_OUT (r, 1);
2623 gcc_assert (r <= 31);
2624 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2625 }
2626 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2627 {
2628 unsigned r = (opc - DW_OP_reg0);
2629 r = DWARF2_FRAME_REG_OUT (r, 1);
2630 gcc_assert (r <= 31);
2631 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2632 }
2633 /* Output the opcode. */
2634 fprintf (asm_out_file, "%#x", opc);
2635 output_loc_operands_raw (loc);
2636
2637 if (!loc->dw_loc_next)
2638 break;
2639 loc = loc->dw_loc_next;
2640
2641 fputc (',', asm_out_file);
2642 }
2643 }
2644
2645 /* This function builds a dwarf location descriptor sequence from a
2646 dw_cfa_location, adding the given OFFSET to the result of the
2647 expression. */
2648
2649 struct dw_loc_descr_node *
2650 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2651 {
2652 struct dw_loc_descr_node *head, *tmp;
2653
2654 offset += cfa->offset;
2655
2656 if (cfa->indirect)
2657 {
2658 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2659 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2660 head->dw_loc_oprnd1.val_entry = NULL;
2661 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2662 add_loc_descr (&head, tmp);
2663 loc_descr_plus_const (&head, offset);
2664 }
2665 else
2666 head = new_reg_loc_descr (cfa->reg, offset);
2667
2668 return head;
2669 }
2670
2671 /* This function builds a dwarf location descriptor sequence for
2672 the address at OFFSET from the CFA when stack is aligned to
2673 ALIGNMENT byte. */
2674
2675 struct dw_loc_descr_node *
2676 build_cfa_aligned_loc (dw_cfa_location *cfa,
2677 poly_int64 offset, HOST_WIDE_INT alignment)
2678 {
2679 struct dw_loc_descr_node *head;
2680 unsigned int dwarf_fp
2681 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2682
2683 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2684 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2685 {
2686 head = new_reg_loc_descr (dwarf_fp, 0);
2687 add_loc_descr (&head, int_loc_descriptor (alignment));
2688 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2689 loc_descr_plus_const (&head, offset);
2690 }
2691 else
2692 head = new_reg_loc_descr (dwarf_fp, offset);
2693 return head;
2694 }
2695 \f
2696 /* And now, the support for symbolic debugging information. */
2697
2698 /* .debug_str support. */
2699
2700 static void dwarf2out_init (const char *);
2701 static void dwarf2out_finish (const char *);
2702 static void dwarf2out_early_finish (const char *);
2703 static void dwarf2out_assembly_start (void);
2704 static void dwarf2out_define (unsigned int, const char *);
2705 static void dwarf2out_undef (unsigned int, const char *);
2706 static void dwarf2out_start_source_file (unsigned, const char *);
2707 static void dwarf2out_end_source_file (unsigned);
2708 static void dwarf2out_function_decl (tree);
2709 static void dwarf2out_begin_block (unsigned, unsigned);
2710 static void dwarf2out_end_block (unsigned, unsigned);
2711 static bool dwarf2out_ignore_block (const_tree);
2712 static void dwarf2out_early_global_decl (tree);
2713 static void dwarf2out_late_global_decl (tree);
2714 static void dwarf2out_type_decl (tree, int);
2715 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2716 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2717 dw_die_ref);
2718 static void dwarf2out_abstract_function (tree);
2719 static void dwarf2out_var_location (rtx_insn *);
2720 static void dwarf2out_size_function (tree);
2721 static void dwarf2out_begin_function (tree);
2722 static void dwarf2out_end_function (unsigned int);
2723 static void dwarf2out_register_main_translation_unit (tree unit);
2724 static void dwarf2out_set_name (tree, tree);
2725 static void dwarf2out_register_external_die (tree decl, const char *sym,
2726 unsigned HOST_WIDE_INT off);
2727 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2728 unsigned HOST_WIDE_INT *off);
2729
2730 /* The debug hooks structure. */
2731
2732 const struct gcc_debug_hooks dwarf2_debug_hooks =
2733 {
2734 dwarf2out_init,
2735 dwarf2out_finish,
2736 dwarf2out_early_finish,
2737 dwarf2out_assembly_start,
2738 dwarf2out_define,
2739 dwarf2out_undef,
2740 dwarf2out_start_source_file,
2741 dwarf2out_end_source_file,
2742 dwarf2out_begin_block,
2743 dwarf2out_end_block,
2744 dwarf2out_ignore_block,
2745 dwarf2out_source_line,
2746 dwarf2out_begin_prologue,
2747 #if VMS_DEBUGGING_INFO
2748 dwarf2out_vms_end_prologue,
2749 dwarf2out_vms_begin_epilogue,
2750 #else
2751 debug_nothing_int_charstar,
2752 debug_nothing_int_charstar,
2753 #endif
2754 dwarf2out_end_epilogue,
2755 dwarf2out_begin_function,
2756 dwarf2out_end_function, /* end_function */
2757 dwarf2out_register_main_translation_unit,
2758 dwarf2out_function_decl, /* function_decl */
2759 dwarf2out_early_global_decl,
2760 dwarf2out_late_global_decl,
2761 dwarf2out_type_decl, /* type_decl */
2762 dwarf2out_imported_module_or_decl,
2763 dwarf2out_die_ref_for_decl,
2764 dwarf2out_register_external_die,
2765 debug_nothing_tree, /* deferred_inline_function */
2766 /* The DWARF 2 backend tries to reduce debugging bloat by not
2767 emitting the abstract description of inline functions until
2768 something tries to reference them. */
2769 dwarf2out_abstract_function, /* outlining_inline_function */
2770 debug_nothing_rtx_code_label, /* label */
2771 debug_nothing_int, /* handle_pch */
2772 dwarf2out_var_location,
2773 debug_nothing_tree, /* inline_entry */
2774 dwarf2out_size_function, /* size_function */
2775 dwarf2out_switch_text_section,
2776 dwarf2out_set_name,
2777 1, /* start_end_main_source_file */
2778 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2779 };
2780
2781 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2782 {
2783 dwarf2out_init,
2784 debug_nothing_charstar,
2785 debug_nothing_charstar,
2786 dwarf2out_assembly_start,
2787 debug_nothing_int_charstar,
2788 debug_nothing_int_charstar,
2789 debug_nothing_int_charstar,
2790 debug_nothing_int,
2791 debug_nothing_int_int, /* begin_block */
2792 debug_nothing_int_int, /* end_block */
2793 debug_true_const_tree, /* ignore_block */
2794 dwarf2out_source_line, /* source_line */
2795 debug_nothing_int_int_charstar, /* begin_prologue */
2796 debug_nothing_int_charstar, /* end_prologue */
2797 debug_nothing_int_charstar, /* begin_epilogue */
2798 debug_nothing_int_charstar, /* end_epilogue */
2799 debug_nothing_tree, /* begin_function */
2800 debug_nothing_int, /* end_function */
2801 debug_nothing_tree, /* register_main_translation_unit */
2802 debug_nothing_tree, /* function_decl */
2803 debug_nothing_tree, /* early_global_decl */
2804 debug_nothing_tree, /* late_global_decl */
2805 debug_nothing_tree_int, /* type_decl */
2806 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2807 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2808 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2809 debug_nothing_tree, /* deferred_inline_function */
2810 debug_nothing_tree, /* outlining_inline_function */
2811 debug_nothing_rtx_code_label, /* label */
2812 debug_nothing_int, /* handle_pch */
2813 debug_nothing_rtx_insn, /* var_location */
2814 debug_nothing_tree, /* inline_entry */
2815 debug_nothing_tree, /* size_function */
2816 debug_nothing_void, /* switch_text_section */
2817 debug_nothing_tree_tree, /* set_name */
2818 0, /* start_end_main_source_file */
2819 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2820 };
2821 \f
2822 /* NOTE: In the comments in this file, many references are made to
2823 "Debugging Information Entries". This term is abbreviated as `DIE'
2824 throughout the remainder of this file. */
2825
2826 /* An internal representation of the DWARF output is built, and then
2827 walked to generate the DWARF debugging info. The walk of the internal
2828 representation is done after the entire program has been compiled.
2829 The types below are used to describe the internal representation. */
2830
2831 /* Whether to put type DIEs into their own section .debug_types instead
2832 of making them part of the .debug_info section. Only supported for
2833 Dwarf V4 or higher and the user didn't disable them through
2834 -fno-debug-types-section. It is more efficient to put them in a
2835 separate comdat sections since the linker will then be able to
2836 remove duplicates. But not all tools support .debug_types sections
2837 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2838 it is DW_UT_type unit type in .debug_info section. */
2839
2840 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2841
2842 /* Various DIE's use offsets relative to the beginning of the
2843 .debug_info section to refer to each other. */
2844
2845 typedef long int dw_offset;
2846
2847 struct comdat_type_node;
2848
2849 /* The entries in the line_info table more-or-less mirror the opcodes
2850 that are used in the real dwarf line table. Arrays of these entries
2851 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2852 supported. */
2853
2854 enum dw_line_info_opcode {
2855 /* Emit DW_LNE_set_address; the operand is the label index. */
2856 LI_set_address,
2857
2858 /* Emit a row to the matrix with the given line. This may be done
2859 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2860 special opcodes. */
2861 LI_set_line,
2862
2863 /* Emit a DW_LNS_set_file. */
2864 LI_set_file,
2865
2866 /* Emit a DW_LNS_set_column. */
2867 LI_set_column,
2868
2869 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2870 LI_negate_stmt,
2871
2872 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2873 LI_set_prologue_end,
2874 LI_set_epilogue_begin,
2875
2876 /* Emit a DW_LNE_set_discriminator. */
2877 LI_set_discriminator
2878 };
2879
2880 typedef struct GTY(()) dw_line_info_struct {
2881 enum dw_line_info_opcode opcode;
2882 unsigned int val;
2883 } dw_line_info_entry;
2884
2885
2886 struct GTY(()) dw_line_info_table {
2887 /* The label that marks the end of this section. */
2888 const char *end_label;
2889
2890 /* The values for the last row of the matrix, as collected in the table.
2891 These are used to minimize the changes to the next row. */
2892 unsigned int file_num;
2893 unsigned int line_num;
2894 unsigned int column_num;
2895 int discrim_num;
2896 bool is_stmt;
2897 bool in_use;
2898
2899 vec<dw_line_info_entry, va_gc> *entries;
2900 };
2901
2902
2903 /* Each DIE attribute has a field specifying the attribute kind,
2904 a link to the next attribute in the chain, and an attribute value.
2905 Attributes are typically linked below the DIE they modify. */
2906
2907 typedef struct GTY(()) dw_attr_struct {
2908 enum dwarf_attribute dw_attr;
2909 dw_val_node dw_attr_val;
2910 }
2911 dw_attr_node;
2912
2913
2914 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
2915 The children of each node form a circular list linked by
2916 die_sib. die_child points to the node *before* the "first" child node. */
2917
2918 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
2919 union die_symbol_or_type_node
2920 {
2921 const char * GTY ((tag ("0"))) die_symbol;
2922 comdat_type_node *GTY ((tag ("1"))) die_type_node;
2923 }
2924 GTY ((desc ("%0.comdat_type_p"))) die_id;
2925 vec<dw_attr_node, va_gc> *die_attr;
2926 dw_die_ref die_parent;
2927 dw_die_ref die_child;
2928 dw_die_ref die_sib;
2929 dw_die_ref die_definition; /* ref from a specification to its definition */
2930 dw_offset die_offset;
2931 unsigned long die_abbrev;
2932 int die_mark;
2933 unsigned int decl_id;
2934 enum dwarf_tag die_tag;
2935 /* Die is used and must not be pruned as unused. */
2936 BOOL_BITFIELD die_perennial_p : 1;
2937 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
2938 /* For an external ref to die_symbol if die_offset contains an extra
2939 offset to that symbol. */
2940 BOOL_BITFIELD with_offset : 1;
2941 /* Whether this DIE was removed from the DIE tree, for example via
2942 prune_unused_types. We don't consider those present from the
2943 DIE lookup routines. */
2944 BOOL_BITFIELD removed : 1;
2945 /* Lots of spare bits. */
2946 }
2947 die_node;
2948
2949 /* Set to TRUE while dwarf2out_early_global_decl is running. */
2950 static bool early_dwarf;
2951 static bool early_dwarf_finished;
2952 struct set_early_dwarf {
2953 bool saved;
2954 set_early_dwarf () : saved(early_dwarf)
2955 {
2956 gcc_assert (! early_dwarf_finished);
2957 early_dwarf = true;
2958 }
2959 ~set_early_dwarf () { early_dwarf = saved; }
2960 };
2961
2962 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
2963 #define FOR_EACH_CHILD(die, c, expr) do { \
2964 c = die->die_child; \
2965 if (c) do { \
2966 c = c->die_sib; \
2967 expr; \
2968 } while (c != die->die_child); \
2969 } while (0)
2970
2971 /* The pubname structure */
2972
2973 typedef struct GTY(()) pubname_struct {
2974 dw_die_ref die;
2975 const char *name;
2976 }
2977 pubname_entry;
2978
2979
2980 struct GTY(()) dw_ranges {
2981 const char *label;
2982 /* If this is positive, it's a block number, otherwise it's a
2983 bitwise-negated index into dw_ranges_by_label. */
2984 int num;
2985 /* Index for the range list for DW_FORM_rnglistx. */
2986 unsigned int idx : 31;
2987 /* True if this range might be possibly in a different section
2988 from previous entry. */
2989 unsigned int maybe_new_sec : 1;
2990 };
2991
2992 /* A structure to hold a macinfo entry. */
2993
2994 typedef struct GTY(()) macinfo_struct {
2995 unsigned char code;
2996 unsigned HOST_WIDE_INT lineno;
2997 const char *info;
2998 }
2999 macinfo_entry;
3000
3001
3002 struct GTY(()) dw_ranges_by_label {
3003 const char *begin;
3004 const char *end;
3005 };
3006
3007 /* The comdat type node structure. */
3008 struct GTY(()) comdat_type_node
3009 {
3010 dw_die_ref root_die;
3011 dw_die_ref type_die;
3012 dw_die_ref skeleton_die;
3013 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3014 comdat_type_node *next;
3015 };
3016
3017 /* A list of DIEs for which we can't determine ancestry (parent_die
3018 field) just yet. Later in dwarf2out_finish we will fill in the
3019 missing bits. */
3020 typedef struct GTY(()) limbo_die_struct {
3021 dw_die_ref die;
3022 /* The tree for which this DIE was created. We use this to
3023 determine ancestry later. */
3024 tree created_for;
3025 struct limbo_die_struct *next;
3026 }
3027 limbo_die_node;
3028
3029 typedef struct skeleton_chain_struct
3030 {
3031 dw_die_ref old_die;
3032 dw_die_ref new_die;
3033 struct skeleton_chain_struct *parent;
3034 }
3035 skeleton_chain_node;
3036
3037 /* Define a macro which returns nonzero for a TYPE_DECL which was
3038 implicitly generated for a type.
3039
3040 Note that, unlike the C front-end (which generates a NULL named
3041 TYPE_DECL node for each complete tagged type, each array type,
3042 and each function type node created) the C++ front-end generates
3043 a _named_ TYPE_DECL node for each tagged type node created.
3044 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3045 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3046 front-end, but for each type, tagged or not. */
3047
3048 #define TYPE_DECL_IS_STUB(decl) \
3049 (DECL_NAME (decl) == NULL_TREE \
3050 || (DECL_ARTIFICIAL (decl) \
3051 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3052 /* This is necessary for stub decls that \
3053 appear in nested inline functions. */ \
3054 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3055 && (decl_ultimate_origin (decl) \
3056 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3057
3058 /* Information concerning the compilation unit's programming
3059 language, and compiler version. */
3060
3061 /* Fixed size portion of the DWARF compilation unit header. */
3062 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3063 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3064 + (dwarf_version >= 5 ? 4 : 3))
3065
3066 /* Fixed size portion of the DWARF comdat type unit header. */
3067 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3068 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3069 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3070
3071 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3072 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3073 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3074
3075 /* Fixed size portion of public names info. */
3076 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3077
3078 /* Fixed size portion of the address range info. */
3079 #define DWARF_ARANGES_HEADER_SIZE \
3080 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3081 DWARF2_ADDR_SIZE * 2) \
3082 - DWARF_INITIAL_LENGTH_SIZE)
3083
3084 /* Size of padding portion in the address range info. It must be
3085 aligned to twice the pointer size. */
3086 #define DWARF_ARANGES_PAD_SIZE \
3087 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3088 DWARF2_ADDR_SIZE * 2) \
3089 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3090
3091 /* Use assembler line directives if available. */
3092 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3093 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3094 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3095 #else
3096 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3097 #endif
3098 #endif
3099
3100 /* Minimum line offset in a special line info. opcode.
3101 This value was chosen to give a reasonable range of values. */
3102 #define DWARF_LINE_BASE -10
3103
3104 /* First special line opcode - leave room for the standard opcodes. */
3105 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3106
3107 /* Range of line offsets in a special line info. opcode. */
3108 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3109
3110 /* Flag that indicates the initial value of the is_stmt_start flag.
3111 In the present implementation, we do not mark any lines as
3112 the beginning of a source statement, because that information
3113 is not made available by the GCC front-end. */
3114 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3115
3116 /* Maximum number of operations per instruction bundle. */
3117 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3118 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3119 #endif
3120
3121 /* This location is used by calc_die_sizes() to keep track
3122 the offset of each DIE within the .debug_info section. */
3123 static unsigned long next_die_offset;
3124
3125 /* Record the root of the DIE's built for the current compilation unit. */
3126 static GTY(()) dw_die_ref single_comp_unit_die;
3127
3128 /* A list of type DIEs that have been separated into comdat sections. */
3129 static GTY(()) comdat_type_node *comdat_type_list;
3130
3131 /* A list of CU DIEs that have been separated. */
3132 static GTY(()) limbo_die_node *cu_die_list;
3133
3134 /* A list of DIEs with a NULL parent waiting to be relocated. */
3135 static GTY(()) limbo_die_node *limbo_die_list;
3136
3137 /* A list of DIEs for which we may have to generate
3138 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3139 static GTY(()) limbo_die_node *deferred_asm_name;
3140
3141 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3142 {
3143 typedef const char *compare_type;
3144
3145 static hashval_t hash (dwarf_file_data *);
3146 static bool equal (dwarf_file_data *, const char *);
3147 };
3148
3149 /* Filenames referenced by this compilation unit. */
3150 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3151
3152 struct decl_die_hasher : ggc_ptr_hash<die_node>
3153 {
3154 typedef tree compare_type;
3155
3156 static hashval_t hash (die_node *);
3157 static bool equal (die_node *, tree);
3158 };
3159 /* A hash table of references to DIE's that describe declarations.
3160 The key is a DECL_UID() which is a unique number identifying each decl. */
3161 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3162
3163 struct GTY ((for_user)) variable_value_struct {
3164 unsigned int decl_id;
3165 vec<dw_die_ref, va_gc> *dies;
3166 };
3167
3168 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3169 {
3170 typedef tree compare_type;
3171
3172 static hashval_t hash (variable_value_struct *);
3173 static bool equal (variable_value_struct *, tree);
3174 };
3175 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3176 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3177 DECL_CONTEXT of the referenced VAR_DECLs. */
3178 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3179
3180 struct block_die_hasher : ggc_ptr_hash<die_struct>
3181 {
3182 static hashval_t hash (die_struct *);
3183 static bool equal (die_struct *, die_struct *);
3184 };
3185
3186 /* A hash table of references to DIE's that describe COMMON blocks.
3187 The key is DECL_UID() ^ die_parent. */
3188 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3189
3190 typedef struct GTY(()) die_arg_entry_struct {
3191 dw_die_ref die;
3192 tree arg;
3193 } die_arg_entry;
3194
3195
3196 /* Node of the variable location list. */
3197 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3198 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3199 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3200 in mode of the EXPR_LIST node and first EXPR_LIST operand
3201 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3202 location or NULL for padding. For larger bitsizes,
3203 mode is 0 and first operand is a CONCAT with bitsize
3204 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3205 NULL as second operand. */
3206 rtx GTY (()) loc;
3207 const char * GTY (()) label;
3208 struct var_loc_node * GTY (()) next;
3209 };
3210
3211 /* Variable location list. */
3212 struct GTY ((for_user)) var_loc_list_def {
3213 struct var_loc_node * GTY (()) first;
3214
3215 /* Pointer to the last but one or last element of the
3216 chained list. If the list is empty, both first and
3217 last are NULL, if the list contains just one node
3218 or the last node certainly is not redundant, it points
3219 to the last node, otherwise points to the last but one.
3220 Do not mark it for GC because it is marked through the chain. */
3221 struct var_loc_node * GTY ((skip ("%h"))) last;
3222
3223 /* Pointer to the last element before section switch,
3224 if NULL, either sections weren't switched or first
3225 is after section switch. */
3226 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3227
3228 /* DECL_UID of the variable decl. */
3229 unsigned int decl_id;
3230 };
3231 typedef struct var_loc_list_def var_loc_list;
3232
3233 /* Call argument location list. */
3234 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3235 rtx GTY (()) call_arg_loc_note;
3236 const char * GTY (()) label;
3237 tree GTY (()) block;
3238 bool tail_call_p;
3239 rtx GTY (()) symbol_ref;
3240 struct call_arg_loc_node * GTY (()) next;
3241 };
3242
3243
3244 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3245 {
3246 typedef const_tree compare_type;
3247
3248 static hashval_t hash (var_loc_list *);
3249 static bool equal (var_loc_list *, const_tree);
3250 };
3251
3252 /* Table of decl location linked lists. */
3253 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3254
3255 /* Head and tail of call_arg_loc chain. */
3256 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3257 static struct call_arg_loc_node *call_arg_loc_last;
3258
3259 /* Number of call sites in the current function. */
3260 static int call_site_count = -1;
3261 /* Number of tail call sites in the current function. */
3262 static int tail_call_site_count = -1;
3263
3264 /* A cached location list. */
3265 struct GTY ((for_user)) cached_dw_loc_list_def {
3266 /* The DECL_UID of the decl that this entry describes. */
3267 unsigned int decl_id;
3268
3269 /* The cached location list. */
3270 dw_loc_list_ref loc_list;
3271 };
3272 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3273
3274 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3275 {
3276
3277 typedef const_tree compare_type;
3278
3279 static hashval_t hash (cached_dw_loc_list *);
3280 static bool equal (cached_dw_loc_list *, const_tree);
3281 };
3282
3283 /* Table of cached location lists. */
3284 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3285
3286 /* A vector of references to DIE's that are uniquely identified by their tag,
3287 presence/absence of children DIE's, and list of attribute/value pairs. */
3288 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3289
3290 /* A hash map to remember the stack usage for DWARF procedures. The value
3291 stored is the stack size difference between before the DWARF procedure
3292 invokation and after it returned. In other words, for a DWARF procedure
3293 that consumes N stack slots and that pushes M ones, this stores M - N. */
3294 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3295
3296 /* A global counter for generating labels for line number data. */
3297 static unsigned int line_info_label_num;
3298
3299 /* The current table to which we should emit line number information
3300 for the current function. This will be set up at the beginning of
3301 assembly for the function. */
3302 static GTY(()) dw_line_info_table *cur_line_info_table;
3303
3304 /* The two default tables of line number info. */
3305 static GTY(()) dw_line_info_table *text_section_line_info;
3306 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3307
3308 /* The set of all non-default tables of line number info. */
3309 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3310
3311 /* A flag to tell pubnames/types export if there is an info section to
3312 refer to. */
3313 static bool info_section_emitted;
3314
3315 /* A pointer to the base of a table that contains a list of publicly
3316 accessible names. */
3317 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3318
3319 /* A pointer to the base of a table that contains a list of publicly
3320 accessible types. */
3321 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3322
3323 /* A pointer to the base of a table that contains a list of macro
3324 defines/undefines (and file start/end markers). */
3325 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3326
3327 /* True if .debug_macinfo or .debug_macros section is going to be
3328 emitted. */
3329 #define have_macinfo \
3330 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3331 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3332 && !macinfo_table->is_empty ())
3333
3334 /* Vector of dies for which we should generate .debug_ranges info. */
3335 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3336
3337 /* Vector of pairs of labels referenced in ranges_table. */
3338 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3339
3340 /* Whether we have location lists that need outputting */
3341 static GTY(()) bool have_location_lists;
3342
3343 /* Unique label counter. */
3344 static GTY(()) unsigned int loclabel_num;
3345
3346 /* Unique label counter for point-of-call tables. */
3347 static GTY(()) unsigned int poc_label_num;
3348
3349 /* The last file entry emitted by maybe_emit_file(). */
3350 static GTY(()) struct dwarf_file_data * last_emitted_file;
3351
3352 /* Number of internal labels generated by gen_internal_sym(). */
3353 static GTY(()) int label_num;
3354
3355 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3356
3357 /* Instances of generic types for which we need to generate debug
3358 info that describe their generic parameters and arguments. That
3359 generation needs to happen once all types are properly laid out so
3360 we do it at the end of compilation. */
3361 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3362
3363 /* Offset from the "steady-state frame pointer" to the frame base,
3364 within the current function. */
3365 static poly_int64 frame_pointer_fb_offset;
3366 static bool frame_pointer_fb_offset_valid;
3367
3368 static vec<dw_die_ref> base_types;
3369
3370 /* Flags to represent a set of attribute classes for attributes that represent
3371 a scalar value (bounds, pointers, ...). */
3372 enum dw_scalar_form
3373 {
3374 dw_scalar_form_constant = 0x01,
3375 dw_scalar_form_exprloc = 0x02,
3376 dw_scalar_form_reference = 0x04
3377 };
3378
3379 /* Forward declarations for functions defined in this file. */
3380
3381 static int is_pseudo_reg (const_rtx);
3382 static tree type_main_variant (tree);
3383 static int is_tagged_type (const_tree);
3384 static const char *dwarf_tag_name (unsigned);
3385 static const char *dwarf_attr_name (unsigned);
3386 static const char *dwarf_form_name (unsigned);
3387 static tree decl_ultimate_origin (const_tree);
3388 static tree decl_class_context (tree);
3389 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3390 static inline enum dw_val_class AT_class (dw_attr_node *);
3391 static inline unsigned int AT_index (dw_attr_node *);
3392 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3393 static inline unsigned AT_flag (dw_attr_node *);
3394 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3395 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3396 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3397 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3398 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3399 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3400 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3401 unsigned int, unsigned char *);
3402 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3403 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3404 static inline const char *AT_string (dw_attr_node *);
3405 static enum dwarf_form AT_string_form (dw_attr_node *);
3406 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3407 static void add_AT_specification (dw_die_ref, dw_die_ref);
3408 static inline dw_die_ref AT_ref (dw_attr_node *);
3409 static inline int AT_ref_external (dw_attr_node *);
3410 static inline void set_AT_ref_external (dw_attr_node *, int);
3411 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3412 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3413 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3414 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3415 dw_loc_list_ref);
3416 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3417 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3418 static void remove_addr_table_entry (addr_table_entry *);
3419 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3420 static inline rtx AT_addr (dw_attr_node *);
3421 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3422 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3423 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3424 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3425 const char *);
3426 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3427 unsigned HOST_WIDE_INT);
3428 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3429 unsigned long, bool);
3430 static inline const char *AT_lbl (dw_attr_node *);
3431 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3432 static const char *get_AT_low_pc (dw_die_ref);
3433 static const char *get_AT_hi_pc (dw_die_ref);
3434 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3435 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3436 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3437 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3438 static bool is_cxx (void);
3439 static bool is_cxx (const_tree);
3440 static bool is_fortran (void);
3441 static bool is_ada (void);
3442 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3443 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3444 static void add_child_die (dw_die_ref, dw_die_ref);
3445 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3446 static dw_die_ref lookup_type_die (tree);
3447 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3448 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3449 static void equate_type_number_to_die (tree, dw_die_ref);
3450 static dw_die_ref lookup_decl_die (tree);
3451 static var_loc_list *lookup_decl_loc (const_tree);
3452 static void equate_decl_number_to_die (tree, dw_die_ref);
3453 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *);
3454 static void print_spaces (FILE *);
3455 static void print_die (dw_die_ref, FILE *);
3456 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3457 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3458 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3459 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3460 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3461 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3462 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3463 struct md5_ctx *, int *);
3464 struct checksum_attributes;
3465 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3466 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3467 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3468 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3469 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3470 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3471 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3472 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3473 static int is_type_die (dw_die_ref);
3474 static int is_comdat_die (dw_die_ref);
3475 static inline bool is_template_instantiation (dw_die_ref);
3476 static int is_declaration_die (dw_die_ref);
3477 static int should_move_die_to_comdat (dw_die_ref);
3478 static dw_die_ref clone_as_declaration (dw_die_ref);
3479 static dw_die_ref clone_die (dw_die_ref);
3480 static dw_die_ref clone_tree (dw_die_ref);
3481 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3482 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3483 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3484 static dw_die_ref generate_skeleton (dw_die_ref);
3485 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3486 dw_die_ref,
3487 dw_die_ref);
3488 static void break_out_comdat_types (dw_die_ref);
3489 static void copy_decls_for_unworthy_types (dw_die_ref);
3490
3491 static void add_sibling_attributes (dw_die_ref);
3492 static void output_location_lists (dw_die_ref);
3493 static int constant_size (unsigned HOST_WIDE_INT);
3494 static unsigned long size_of_die (dw_die_ref);
3495 static void calc_die_sizes (dw_die_ref);
3496 static void calc_base_type_die_sizes (void);
3497 static void mark_dies (dw_die_ref);
3498 static void unmark_dies (dw_die_ref);
3499 static void unmark_all_dies (dw_die_ref);
3500 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3501 static unsigned long size_of_aranges (void);
3502 static enum dwarf_form value_format (dw_attr_node *);
3503 static void output_value_format (dw_attr_node *);
3504 static void output_abbrev_section (void);
3505 static void output_die_abbrevs (unsigned long, dw_die_ref);
3506 static void output_die (dw_die_ref);
3507 static void output_compilation_unit_header (enum dwarf_unit_type);
3508 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3509 static void output_comdat_type_unit (comdat_type_node *);
3510 static const char *dwarf2_name (tree, int);
3511 static void add_pubname (tree, dw_die_ref);
3512 static void add_enumerator_pubname (const char *, dw_die_ref);
3513 static void add_pubname_string (const char *, dw_die_ref);
3514 static void add_pubtype (tree, dw_die_ref);
3515 static void output_pubnames (vec<pubname_entry, va_gc> *);
3516 static void output_aranges (void);
3517 static unsigned int add_ranges (const_tree, bool = false);
3518 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3519 bool *, bool);
3520 static void output_ranges (void);
3521 static dw_line_info_table *new_line_info_table (void);
3522 static void output_line_info (bool);
3523 static void output_file_names (void);
3524 static dw_die_ref base_type_die (tree, bool);
3525 static int is_base_type (tree);
3526 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3527 static int decl_quals (const_tree);
3528 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3529 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3530 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3531 static int type_is_enum (const_tree);
3532 static unsigned int dbx_reg_number (const_rtx);
3533 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3534 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3535 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3536 enum var_init_status);
3537 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3538 enum var_init_status);
3539 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3540 enum var_init_status);
3541 static int is_based_loc (const_rtx);
3542 static bool resolve_one_addr (rtx *);
3543 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3544 enum var_init_status);
3545 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3546 enum var_init_status);
3547 struct loc_descr_context;
3548 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3549 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3550 static dw_loc_list_ref loc_list_from_tree (tree, int,
3551 struct loc_descr_context *);
3552 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3553 struct loc_descr_context *);
3554 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3555 static tree field_type (const_tree);
3556 static unsigned int simple_type_align_in_bits (const_tree);
3557 static unsigned int simple_decl_align_in_bits (const_tree);
3558 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3559 struct vlr_context;
3560 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3561 HOST_WIDE_INT *);
3562 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3563 dw_loc_list_ref);
3564 static void add_data_member_location_attribute (dw_die_ref, tree,
3565 struct vlr_context *);
3566 static bool add_const_value_attribute (dw_die_ref, rtx);
3567 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3568 static void insert_wide_int (const wide_int &, unsigned char *, int);
3569 static void insert_float (const_rtx, unsigned char *);
3570 static rtx rtl_for_decl_location (tree);
3571 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3572 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3573 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3574 static void add_name_attribute (dw_die_ref, const char *);
3575 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3576 static void add_comp_dir_attribute (dw_die_ref);
3577 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3578 struct loc_descr_context *);
3579 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3580 struct loc_descr_context *);
3581 static void add_subscript_info (dw_die_ref, tree, bool);
3582 static void add_byte_size_attribute (dw_die_ref, tree);
3583 static void add_alignment_attribute (dw_die_ref, tree);
3584 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3585 struct vlr_context *);
3586 static void add_bit_size_attribute (dw_die_ref, tree);
3587 static void add_prototyped_attribute (dw_die_ref, tree);
3588 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3589 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3590 static void add_src_coords_attributes (dw_die_ref, tree);
3591 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3592 static void add_discr_value (dw_die_ref, dw_discr_value *);
3593 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3594 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3595 static void push_decl_scope (tree);
3596 static void pop_decl_scope (void);
3597 static dw_die_ref scope_die_for (tree, dw_die_ref);
3598 static inline int local_scope_p (dw_die_ref);
3599 static inline int class_scope_p (dw_die_ref);
3600 static inline int class_or_namespace_scope_p (dw_die_ref);
3601 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3602 static void add_calling_convention_attribute (dw_die_ref, tree);
3603 static const char *type_tag (const_tree);
3604 static tree member_declared_type (const_tree);
3605 #if 0
3606 static const char *decl_start_label (tree);
3607 #endif
3608 static void gen_array_type_die (tree, dw_die_ref);
3609 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3610 #if 0
3611 static void gen_entry_point_die (tree, dw_die_ref);
3612 #endif
3613 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3614 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3615 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3616 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3617 static void gen_formal_types_die (tree, dw_die_ref);
3618 static void gen_subprogram_die (tree, dw_die_ref);
3619 static void gen_variable_die (tree, tree, dw_die_ref);
3620 static void gen_const_die (tree, dw_die_ref);
3621 static void gen_label_die (tree, dw_die_ref);
3622 static void gen_lexical_block_die (tree, dw_die_ref);
3623 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3624 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3625 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3626 static dw_die_ref gen_compile_unit_die (const char *);
3627 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3628 static void gen_member_die (tree, dw_die_ref);
3629 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3630 enum debug_info_usage);
3631 static void gen_subroutine_type_die (tree, dw_die_ref);
3632 static void gen_typedef_die (tree, dw_die_ref);
3633 static void gen_type_die (tree, dw_die_ref);
3634 static void gen_block_die (tree, dw_die_ref);
3635 static void decls_for_scope (tree, dw_die_ref);
3636 static bool is_naming_typedef_decl (const_tree);
3637 static inline dw_die_ref get_context_die (tree);
3638 static void gen_namespace_die (tree, dw_die_ref);
3639 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3640 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3641 static dw_die_ref force_decl_die (tree);
3642 static dw_die_ref force_type_die (tree);
3643 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3644 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3645 static struct dwarf_file_data * lookup_filename (const char *);
3646 static void retry_incomplete_types (void);
3647 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3648 static void gen_generic_params_dies (tree);
3649 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3650 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3651 static void splice_child_die (dw_die_ref, dw_die_ref);
3652 static int file_info_cmp (const void *, const void *);
3653 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *,
3654 const char *, const char *);
3655 static void output_loc_list (dw_loc_list_ref);
3656 static char *gen_internal_sym (const char *);
3657 static bool want_pubnames (void);
3658
3659 static void prune_unmark_dies (dw_die_ref);
3660 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3661 static void prune_unused_types_mark (dw_die_ref, int);
3662 static void prune_unused_types_walk (dw_die_ref);
3663 static void prune_unused_types_walk_attribs (dw_die_ref);
3664 static void prune_unused_types_prune (dw_die_ref);
3665 static void prune_unused_types (void);
3666 static int maybe_emit_file (struct dwarf_file_data *fd);
3667 static inline const char *AT_vms_delta1 (dw_attr_node *);
3668 static inline const char *AT_vms_delta2 (dw_attr_node *);
3669 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3670 const char *, const char *);
3671 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3672 static void gen_remaining_tmpl_value_param_die_attribute (void);
3673 static bool generic_type_p (tree);
3674 static void schedule_generic_params_dies_gen (tree t);
3675 static void gen_scheduled_generic_parms_dies (void);
3676 static void resolve_variable_values (void);
3677
3678 static const char *comp_dir_string (void);
3679
3680 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3681
3682 /* enum for tracking thread-local variables whose address is really an offset
3683 relative to the TLS pointer, which will need link-time relocation, but will
3684 not need relocation by the DWARF consumer. */
3685
3686 enum dtprel_bool
3687 {
3688 dtprel_false = 0,
3689 dtprel_true = 1
3690 };
3691
3692 /* Return the operator to use for an address of a variable. For dtprel_true, we
3693 use DW_OP_const*. For regular variables, which need both link-time
3694 relocation and consumer-level relocation (e.g., to account for shared objects
3695 loaded at a random address), we use DW_OP_addr*. */
3696
3697 static inline enum dwarf_location_atom
3698 dw_addr_op (enum dtprel_bool dtprel)
3699 {
3700 if (dtprel == dtprel_true)
3701 return (dwarf_split_debug_info ? DW_OP_GNU_const_index
3702 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3703 else
3704 return dwarf_split_debug_info ? DW_OP_GNU_addr_index : DW_OP_addr;
3705 }
3706
3707 /* Return a pointer to a newly allocated address location description. If
3708 dwarf_split_debug_info is true, then record the address with the appropriate
3709 relocation. */
3710 static inline dw_loc_descr_ref
3711 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3712 {
3713 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3714
3715 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3716 ref->dw_loc_oprnd1.v.val_addr = addr;
3717 ref->dtprel = dtprel;
3718 if (dwarf_split_debug_info)
3719 ref->dw_loc_oprnd1.val_entry
3720 = add_addr_table_entry (addr,
3721 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3722 else
3723 ref->dw_loc_oprnd1.val_entry = NULL;
3724
3725 return ref;
3726 }
3727
3728 /* Section names used to hold DWARF debugging information. */
3729
3730 #ifndef DEBUG_INFO_SECTION
3731 #define DEBUG_INFO_SECTION ".debug_info"
3732 #endif
3733 #ifndef DEBUG_DWO_INFO_SECTION
3734 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3735 #endif
3736 #ifndef DEBUG_LTO_INFO_SECTION
3737 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3738 #endif
3739 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3740 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3741 #endif
3742 #ifndef DEBUG_ABBREV_SECTION
3743 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3744 #endif
3745 #ifndef DEBUG_LTO_ABBREV_SECTION
3746 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3747 #endif
3748 #ifndef DEBUG_DWO_ABBREV_SECTION
3749 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3750 #endif
3751 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3752 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3753 #endif
3754 #ifndef DEBUG_ARANGES_SECTION
3755 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3756 #endif
3757 #ifndef DEBUG_ADDR_SECTION
3758 #define DEBUG_ADDR_SECTION ".debug_addr"
3759 #endif
3760 #ifndef DEBUG_MACINFO_SECTION
3761 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3762 #endif
3763 #ifndef DEBUG_LTO_MACINFO_SECTION
3764 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
3765 #endif
3766 #ifndef DEBUG_DWO_MACINFO_SECTION
3767 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
3768 #endif
3769 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
3770 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
3771 #endif
3772 #ifndef DEBUG_MACRO_SECTION
3773 #define DEBUG_MACRO_SECTION ".debug_macro"
3774 #endif
3775 #ifndef DEBUG_LTO_MACRO_SECTION
3776 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
3777 #endif
3778 #ifndef DEBUG_DWO_MACRO_SECTION
3779 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
3780 #endif
3781 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
3782 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
3783 #endif
3784 #ifndef DEBUG_LINE_SECTION
3785 #define DEBUG_LINE_SECTION ".debug_line"
3786 #endif
3787 #ifndef DEBUG_LTO_LINE_SECTION
3788 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
3789 #endif
3790 #ifndef DEBUG_DWO_LINE_SECTION
3791 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
3792 #endif
3793 #ifndef DEBUG_LTO_DWO_LINE_SECTION
3794 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
3795 #endif
3796 #ifndef DEBUG_LOC_SECTION
3797 #define DEBUG_LOC_SECTION ".debug_loc"
3798 #endif
3799 #ifndef DEBUG_DWO_LOC_SECTION
3800 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
3801 #endif
3802 #ifndef DEBUG_LOCLISTS_SECTION
3803 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
3804 #endif
3805 #ifndef DEBUG_DWO_LOCLISTS_SECTION
3806 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
3807 #endif
3808 #ifndef DEBUG_PUBNAMES_SECTION
3809 #define DEBUG_PUBNAMES_SECTION \
3810 ((debug_generate_pub_sections == 2) \
3811 ? ".debug_gnu_pubnames" : ".debug_pubnames")
3812 #endif
3813 #ifndef DEBUG_PUBTYPES_SECTION
3814 #define DEBUG_PUBTYPES_SECTION \
3815 ((debug_generate_pub_sections == 2) \
3816 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
3817 #endif
3818 #ifndef DEBUG_STR_OFFSETS_SECTION
3819 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
3820 #endif
3821 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
3822 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
3823 #endif
3824 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
3825 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
3826 #endif
3827 #ifndef DEBUG_STR_SECTION
3828 #define DEBUG_STR_SECTION ".debug_str"
3829 #endif
3830 #ifndef DEBUG_LTO_STR_SECTION
3831 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
3832 #endif
3833 #ifndef DEBUG_STR_DWO_SECTION
3834 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
3835 #endif
3836 #ifndef DEBUG_LTO_STR_DWO_SECTION
3837 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
3838 #endif
3839 #ifndef DEBUG_RANGES_SECTION
3840 #define DEBUG_RANGES_SECTION ".debug_ranges"
3841 #endif
3842 #ifndef DEBUG_RNGLISTS_SECTION
3843 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
3844 #endif
3845 #ifndef DEBUG_LINE_STR_SECTION
3846 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
3847 #endif
3848 #ifndef DEBUG_LTO_LINE_STR_SECTION
3849 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
3850 #endif
3851
3852 /* Standard ELF section names for compiled code and data. */
3853 #ifndef TEXT_SECTION_NAME
3854 #define TEXT_SECTION_NAME ".text"
3855 #endif
3856
3857 /* Section flags for .debug_str section. */
3858 #define DEBUG_STR_SECTION_FLAGS \
3859 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
3860 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
3861 : SECTION_DEBUG)
3862
3863 /* Section flags for .debug_str.dwo section. */
3864 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
3865
3866 /* Attribute used to refer to the macro section. */
3867 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
3868 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
3869
3870 /* Labels we insert at beginning sections we can reference instead of
3871 the section names themselves. */
3872
3873 #ifndef TEXT_SECTION_LABEL
3874 #define TEXT_SECTION_LABEL "Ltext"
3875 #endif
3876 #ifndef COLD_TEXT_SECTION_LABEL
3877 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
3878 #endif
3879 #ifndef DEBUG_LINE_SECTION_LABEL
3880 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
3881 #endif
3882 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
3883 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
3884 #endif
3885 #ifndef DEBUG_INFO_SECTION_LABEL
3886 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
3887 #endif
3888 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
3889 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
3890 #endif
3891 #ifndef DEBUG_ABBREV_SECTION_LABEL
3892 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
3893 #endif
3894 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
3895 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
3896 #endif
3897 #ifndef DEBUG_ADDR_SECTION_LABEL
3898 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
3899 #endif
3900 #ifndef DEBUG_LOC_SECTION_LABEL
3901 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
3902 #endif
3903 #ifndef DEBUG_RANGES_SECTION_LABEL
3904 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
3905 #endif
3906 #ifndef DEBUG_MACINFO_SECTION_LABEL
3907 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
3908 #endif
3909 #ifndef DEBUG_MACRO_SECTION_LABEL
3910 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
3911 #endif
3912 #define SKELETON_COMP_DIE_ABBREV 1
3913 #define SKELETON_TYPE_DIE_ABBREV 2
3914
3915 /* Definitions of defaults for formats and names of various special
3916 (artificial) labels which may be generated within this file (when the -g
3917 options is used and DWARF2_DEBUGGING_INFO is in effect.
3918 If necessary, these may be overridden from within the tm.h file, but
3919 typically, overriding these defaults is unnecessary. */
3920
3921 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
3922 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3923 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3924 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
3925 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3926 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3927 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3928 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3929 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3930 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3931 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3932 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3933 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3934 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
3935 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
3936
3937 #ifndef TEXT_END_LABEL
3938 #define TEXT_END_LABEL "Letext"
3939 #endif
3940 #ifndef COLD_END_LABEL
3941 #define COLD_END_LABEL "Letext_cold"
3942 #endif
3943 #ifndef BLOCK_BEGIN_LABEL
3944 #define BLOCK_BEGIN_LABEL "LBB"
3945 #endif
3946 #ifndef BLOCK_END_LABEL
3947 #define BLOCK_END_LABEL "LBE"
3948 #endif
3949 #ifndef LINE_CODE_LABEL
3950 #define LINE_CODE_LABEL "LM"
3951 #endif
3952
3953 \f
3954 /* Return the root of the DIE's built for the current compilation unit. */
3955 static dw_die_ref
3956 comp_unit_die (void)
3957 {
3958 if (!single_comp_unit_die)
3959 single_comp_unit_die = gen_compile_unit_die (NULL);
3960 return single_comp_unit_die;
3961 }
3962
3963 /* We allow a language front-end to designate a function that is to be
3964 called to "demangle" any name before it is put into a DIE. */
3965
3966 static const char *(*demangle_name_func) (const char *);
3967
3968 void
3969 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
3970 {
3971 demangle_name_func = func;
3972 }
3973
3974 /* Test if rtl node points to a pseudo register. */
3975
3976 static inline int
3977 is_pseudo_reg (const_rtx rtl)
3978 {
3979 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
3980 || (GET_CODE (rtl) == SUBREG
3981 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
3982 }
3983
3984 /* Return a reference to a type, with its const and volatile qualifiers
3985 removed. */
3986
3987 static inline tree
3988 type_main_variant (tree type)
3989 {
3990 type = TYPE_MAIN_VARIANT (type);
3991
3992 /* ??? There really should be only one main variant among any group of
3993 variants of a given type (and all of the MAIN_VARIANT values for all
3994 members of the group should point to that one type) but sometimes the C
3995 front-end messes this up for array types, so we work around that bug
3996 here. */
3997 if (TREE_CODE (type) == ARRAY_TYPE)
3998 while (type != TYPE_MAIN_VARIANT (type))
3999 type = TYPE_MAIN_VARIANT (type);
4000
4001 return type;
4002 }
4003
4004 /* Return nonzero if the given type node represents a tagged type. */
4005
4006 static inline int
4007 is_tagged_type (const_tree type)
4008 {
4009 enum tree_code code = TREE_CODE (type);
4010
4011 return (code == RECORD_TYPE || code == UNION_TYPE
4012 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4013 }
4014
4015 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4016
4017 static void
4018 get_ref_die_offset_label (char *label, dw_die_ref ref)
4019 {
4020 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4021 }
4022
4023 /* Return die_offset of a DIE reference to a base type. */
4024
4025 static unsigned long int
4026 get_base_type_offset (dw_die_ref ref)
4027 {
4028 if (ref->die_offset)
4029 return ref->die_offset;
4030 if (comp_unit_die ()->die_abbrev)
4031 {
4032 calc_base_type_die_sizes ();
4033 gcc_assert (ref->die_offset);
4034 }
4035 return ref->die_offset;
4036 }
4037
4038 /* Return die_offset of a DIE reference other than base type. */
4039
4040 static unsigned long int
4041 get_ref_die_offset (dw_die_ref ref)
4042 {
4043 gcc_assert (ref->die_offset);
4044 return ref->die_offset;
4045 }
4046
4047 /* Convert a DIE tag into its string name. */
4048
4049 static const char *
4050 dwarf_tag_name (unsigned int tag)
4051 {
4052 const char *name = get_DW_TAG_name (tag);
4053
4054 if (name != NULL)
4055 return name;
4056
4057 return "DW_TAG_<unknown>";
4058 }
4059
4060 /* Convert a DWARF attribute code into its string name. */
4061
4062 static const char *
4063 dwarf_attr_name (unsigned int attr)
4064 {
4065 const char *name;
4066
4067 switch (attr)
4068 {
4069 #if VMS_DEBUGGING_INFO
4070 case DW_AT_HP_prologue:
4071 return "DW_AT_HP_prologue";
4072 #else
4073 case DW_AT_MIPS_loop_unroll_factor:
4074 return "DW_AT_MIPS_loop_unroll_factor";
4075 #endif
4076
4077 #if VMS_DEBUGGING_INFO
4078 case DW_AT_HP_epilogue:
4079 return "DW_AT_HP_epilogue";
4080 #else
4081 case DW_AT_MIPS_stride:
4082 return "DW_AT_MIPS_stride";
4083 #endif
4084 }
4085
4086 name = get_DW_AT_name (attr);
4087
4088 if (name != NULL)
4089 return name;
4090
4091 return "DW_AT_<unknown>";
4092 }
4093
4094 /* Convert a DWARF value form code into its string name. */
4095
4096 static const char *
4097 dwarf_form_name (unsigned int form)
4098 {
4099 const char *name = get_DW_FORM_name (form);
4100
4101 if (name != NULL)
4102 return name;
4103
4104 return "DW_FORM_<unknown>";
4105 }
4106 \f
4107 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4108 instance of an inlined instance of a decl which is local to an inline
4109 function, so we have to trace all of the way back through the origin chain
4110 to find out what sort of node actually served as the original seed for the
4111 given block. */
4112
4113 static tree
4114 decl_ultimate_origin (const_tree decl)
4115 {
4116 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4117 return NULL_TREE;
4118
4119 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4120 we're trying to output the abstract instance of this function. */
4121 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4122 return NULL_TREE;
4123
4124 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4125 most distant ancestor, this should never happen. */
4126 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4127
4128 return DECL_ABSTRACT_ORIGIN (decl);
4129 }
4130
4131 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4132 of a virtual function may refer to a base class, so we check the 'this'
4133 parameter. */
4134
4135 static tree
4136 decl_class_context (tree decl)
4137 {
4138 tree context = NULL_TREE;
4139
4140 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4141 context = DECL_CONTEXT (decl);
4142 else
4143 context = TYPE_MAIN_VARIANT
4144 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4145
4146 if (context && !TYPE_P (context))
4147 context = NULL_TREE;
4148
4149 return context;
4150 }
4151 \f
4152 /* Add an attribute/value pair to a DIE. */
4153
4154 static inline void
4155 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4156 {
4157 /* Maybe this should be an assert? */
4158 if (die == NULL)
4159 return;
4160
4161 if (flag_checking)
4162 {
4163 /* Check we do not add duplicate attrs. Can't use get_AT here
4164 because that recurses to the specification/abstract origin DIE. */
4165 dw_attr_node *a;
4166 unsigned ix;
4167 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4168 gcc_assert (a->dw_attr != attr->dw_attr);
4169 }
4170
4171 vec_safe_reserve (die->die_attr, 1);
4172 vec_safe_push (die->die_attr, *attr);
4173 }
4174
4175 static inline enum dw_val_class
4176 AT_class (dw_attr_node *a)
4177 {
4178 return a->dw_attr_val.val_class;
4179 }
4180
4181 /* Return the index for any attribute that will be referenced with a
4182 DW_FORM_GNU_addr_index or DW_FORM_GNU_str_index. String indices
4183 are stored in dw_attr_val.v.val_str for reference counting
4184 pruning. */
4185
4186 static inline unsigned int
4187 AT_index (dw_attr_node *a)
4188 {
4189 if (AT_class (a) == dw_val_class_str)
4190 return a->dw_attr_val.v.val_str->index;
4191 else if (a->dw_attr_val.val_entry != NULL)
4192 return a->dw_attr_val.val_entry->index;
4193 return NOT_INDEXED;
4194 }
4195
4196 /* Add a flag value attribute to a DIE. */
4197
4198 static inline void
4199 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4200 {
4201 dw_attr_node attr;
4202
4203 attr.dw_attr = attr_kind;
4204 attr.dw_attr_val.val_class = dw_val_class_flag;
4205 attr.dw_attr_val.val_entry = NULL;
4206 attr.dw_attr_val.v.val_flag = flag;
4207 add_dwarf_attr (die, &attr);
4208 }
4209
4210 static inline unsigned
4211 AT_flag (dw_attr_node *a)
4212 {
4213 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4214 return a->dw_attr_val.v.val_flag;
4215 }
4216
4217 /* Add a signed integer attribute value to a DIE. */
4218
4219 static inline void
4220 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4221 {
4222 dw_attr_node attr;
4223
4224 attr.dw_attr = attr_kind;
4225 attr.dw_attr_val.val_class = dw_val_class_const;
4226 attr.dw_attr_val.val_entry = NULL;
4227 attr.dw_attr_val.v.val_int = int_val;
4228 add_dwarf_attr (die, &attr);
4229 }
4230
4231 static inline HOST_WIDE_INT
4232 AT_int (dw_attr_node *a)
4233 {
4234 gcc_assert (a && (AT_class (a) == dw_val_class_const
4235 || AT_class (a) == dw_val_class_const_implicit));
4236 return a->dw_attr_val.v.val_int;
4237 }
4238
4239 /* Add an unsigned integer attribute value to a DIE. */
4240
4241 static inline void
4242 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4243 unsigned HOST_WIDE_INT unsigned_val)
4244 {
4245 dw_attr_node attr;
4246
4247 attr.dw_attr = attr_kind;
4248 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4249 attr.dw_attr_val.val_entry = NULL;
4250 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4251 add_dwarf_attr (die, &attr);
4252 }
4253
4254 static inline unsigned HOST_WIDE_INT
4255 AT_unsigned (dw_attr_node *a)
4256 {
4257 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4258 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4259 return a->dw_attr_val.v.val_unsigned;
4260 }
4261
4262 /* Add an unsigned wide integer attribute value to a DIE. */
4263
4264 static inline void
4265 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4266 const wide_int& w)
4267 {
4268 dw_attr_node attr;
4269
4270 attr.dw_attr = attr_kind;
4271 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4272 attr.dw_attr_val.val_entry = NULL;
4273 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4274 *attr.dw_attr_val.v.val_wide = w;
4275 add_dwarf_attr (die, &attr);
4276 }
4277
4278 /* Add an unsigned double integer attribute value to a DIE. */
4279
4280 static inline void
4281 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4282 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4283 {
4284 dw_attr_node attr;
4285
4286 attr.dw_attr = attr_kind;
4287 attr.dw_attr_val.val_class = dw_val_class_const_double;
4288 attr.dw_attr_val.val_entry = NULL;
4289 attr.dw_attr_val.v.val_double.high = high;
4290 attr.dw_attr_val.v.val_double.low = low;
4291 add_dwarf_attr (die, &attr);
4292 }
4293
4294 /* Add a floating point attribute value to a DIE and return it. */
4295
4296 static inline void
4297 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4298 unsigned int length, unsigned int elt_size, unsigned char *array)
4299 {
4300 dw_attr_node attr;
4301
4302 attr.dw_attr = attr_kind;
4303 attr.dw_attr_val.val_class = dw_val_class_vec;
4304 attr.dw_attr_val.val_entry = NULL;
4305 attr.dw_attr_val.v.val_vec.length = length;
4306 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4307 attr.dw_attr_val.v.val_vec.array = array;
4308 add_dwarf_attr (die, &attr);
4309 }
4310
4311 /* Add an 8-byte data attribute value to a DIE. */
4312
4313 static inline void
4314 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4315 unsigned char data8[8])
4316 {
4317 dw_attr_node attr;
4318
4319 attr.dw_attr = attr_kind;
4320 attr.dw_attr_val.val_class = dw_val_class_data8;
4321 attr.dw_attr_val.val_entry = NULL;
4322 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4323 add_dwarf_attr (die, &attr);
4324 }
4325
4326 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4327 dwarf_split_debug_info, address attributes in dies destined for the
4328 final executable have force_direct set to avoid using indexed
4329 references. */
4330
4331 static inline void
4332 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4333 bool force_direct)
4334 {
4335 dw_attr_node attr;
4336 char * lbl_id;
4337
4338 lbl_id = xstrdup (lbl_low);
4339 attr.dw_attr = DW_AT_low_pc;
4340 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4341 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4342 if (dwarf_split_debug_info && !force_direct)
4343 attr.dw_attr_val.val_entry
4344 = add_addr_table_entry (lbl_id, ate_kind_label);
4345 else
4346 attr.dw_attr_val.val_entry = NULL;
4347 add_dwarf_attr (die, &attr);
4348
4349 attr.dw_attr = DW_AT_high_pc;
4350 if (dwarf_version < 4)
4351 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4352 else
4353 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4354 lbl_id = xstrdup (lbl_high);
4355 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4356 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4357 && dwarf_split_debug_info && !force_direct)
4358 attr.dw_attr_val.val_entry
4359 = add_addr_table_entry (lbl_id, ate_kind_label);
4360 else
4361 attr.dw_attr_val.val_entry = NULL;
4362 add_dwarf_attr (die, &attr);
4363 }
4364
4365 /* Hash and equality functions for debug_str_hash. */
4366
4367 hashval_t
4368 indirect_string_hasher::hash (indirect_string_node *x)
4369 {
4370 return htab_hash_string (x->str);
4371 }
4372
4373 bool
4374 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4375 {
4376 return strcmp (x1->str, x2) == 0;
4377 }
4378
4379 /* Add STR to the given string hash table. */
4380
4381 static struct indirect_string_node *
4382 find_AT_string_in_table (const char *str,
4383 hash_table<indirect_string_hasher> *table)
4384 {
4385 struct indirect_string_node *node;
4386
4387 indirect_string_node **slot
4388 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4389 if (*slot == NULL)
4390 {
4391 node = ggc_cleared_alloc<indirect_string_node> ();
4392 node->str = ggc_strdup (str);
4393 *slot = node;
4394 }
4395 else
4396 node = *slot;
4397
4398 node->refcount++;
4399 return node;
4400 }
4401
4402 /* Add STR to the indirect string hash table. */
4403
4404 static struct indirect_string_node *
4405 find_AT_string (const char *str)
4406 {
4407 if (! debug_str_hash)
4408 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4409
4410 return find_AT_string_in_table (str, debug_str_hash);
4411 }
4412
4413 /* Add a string attribute value to a DIE. */
4414
4415 static inline void
4416 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4417 {
4418 dw_attr_node attr;
4419 struct indirect_string_node *node;
4420
4421 node = find_AT_string (str);
4422
4423 attr.dw_attr = attr_kind;
4424 attr.dw_attr_val.val_class = dw_val_class_str;
4425 attr.dw_attr_val.val_entry = NULL;
4426 attr.dw_attr_val.v.val_str = node;
4427 add_dwarf_attr (die, &attr);
4428 }
4429
4430 static inline const char *
4431 AT_string (dw_attr_node *a)
4432 {
4433 gcc_assert (a && AT_class (a) == dw_val_class_str);
4434 return a->dw_attr_val.v.val_str->str;
4435 }
4436
4437 /* Call this function directly to bypass AT_string_form's logic to put
4438 the string inline in the die. */
4439
4440 static void
4441 set_indirect_string (struct indirect_string_node *node)
4442 {
4443 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4444 /* Already indirect is a no op. */
4445 if (node->form == DW_FORM_strp
4446 || node->form == DW_FORM_line_strp
4447 || node->form == DW_FORM_GNU_str_index)
4448 {
4449 gcc_assert (node->label);
4450 return;
4451 }
4452 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4453 ++dw2_string_counter;
4454 node->label = xstrdup (label);
4455
4456 if (!dwarf_split_debug_info)
4457 {
4458 node->form = DW_FORM_strp;
4459 node->index = NOT_INDEXED;
4460 }
4461 else
4462 {
4463 node->form = DW_FORM_GNU_str_index;
4464 node->index = NO_INDEX_ASSIGNED;
4465 }
4466 }
4467
4468 /* A helper function for dwarf2out_finish, called to reset indirect
4469 string decisions done for early LTO dwarf output before fat object
4470 dwarf output. */
4471
4472 int
4473 reset_indirect_string (indirect_string_node **h, void *)
4474 {
4475 struct indirect_string_node *node = *h;
4476 if (node->form == DW_FORM_strp || node->form == DW_FORM_GNU_str_index)
4477 {
4478 free (node->label);
4479 node->label = NULL;
4480 node->form = (dwarf_form) 0;
4481 node->index = 0;
4482 }
4483 return 1;
4484 }
4485
4486 /* Find out whether a string should be output inline in DIE
4487 or out-of-line in .debug_str section. */
4488
4489 static enum dwarf_form
4490 find_string_form (struct indirect_string_node *node)
4491 {
4492 unsigned int len;
4493
4494 if (node->form)
4495 return node->form;
4496
4497 len = strlen (node->str) + 1;
4498
4499 /* If the string is shorter or equal to the size of the reference, it is
4500 always better to put it inline. */
4501 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4502 return node->form = DW_FORM_string;
4503
4504 /* If we cannot expect the linker to merge strings in .debug_str
4505 section, only put it into .debug_str if it is worth even in this
4506 single module. */
4507 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4508 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4509 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4510 return node->form = DW_FORM_string;
4511
4512 set_indirect_string (node);
4513
4514 return node->form;
4515 }
4516
4517 /* Find out whether the string referenced from the attribute should be
4518 output inline in DIE or out-of-line in .debug_str section. */
4519
4520 static enum dwarf_form
4521 AT_string_form (dw_attr_node *a)
4522 {
4523 gcc_assert (a && AT_class (a) == dw_val_class_str);
4524 return find_string_form (a->dw_attr_val.v.val_str);
4525 }
4526
4527 /* Add a DIE reference attribute value to a DIE. */
4528
4529 static inline void
4530 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4531 {
4532 dw_attr_node attr;
4533 gcc_checking_assert (targ_die != NULL);
4534
4535 /* With LTO we can end up trying to reference something we didn't create
4536 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4537 if (targ_die == NULL)
4538 return;
4539
4540 attr.dw_attr = attr_kind;
4541 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4542 attr.dw_attr_val.val_entry = NULL;
4543 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4544 attr.dw_attr_val.v.val_die_ref.external = 0;
4545 add_dwarf_attr (die, &attr);
4546 }
4547
4548 /* Change DIE reference REF to point to NEW_DIE instead. */
4549
4550 static inline void
4551 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4552 {
4553 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4554 ref->dw_attr_val.v.val_die_ref.die = new_die;
4555 ref->dw_attr_val.v.val_die_ref.external = 0;
4556 }
4557
4558 /* Add an AT_specification attribute to a DIE, and also make the back
4559 pointer from the specification to the definition. */
4560
4561 static inline void
4562 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4563 {
4564 add_AT_die_ref (die, DW_AT_specification, targ_die);
4565 gcc_assert (!targ_die->die_definition);
4566 targ_die->die_definition = die;
4567 }
4568
4569 static inline dw_die_ref
4570 AT_ref (dw_attr_node *a)
4571 {
4572 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4573 return a->dw_attr_val.v.val_die_ref.die;
4574 }
4575
4576 static inline int
4577 AT_ref_external (dw_attr_node *a)
4578 {
4579 if (a && AT_class (a) == dw_val_class_die_ref)
4580 return a->dw_attr_val.v.val_die_ref.external;
4581
4582 return 0;
4583 }
4584
4585 static inline void
4586 set_AT_ref_external (dw_attr_node *a, int i)
4587 {
4588 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4589 a->dw_attr_val.v.val_die_ref.external = i;
4590 }
4591
4592 /* Add an FDE reference attribute value to a DIE. */
4593
4594 static inline void
4595 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4596 {
4597 dw_attr_node attr;
4598
4599 attr.dw_attr = attr_kind;
4600 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4601 attr.dw_attr_val.val_entry = NULL;
4602 attr.dw_attr_val.v.val_fde_index = targ_fde;
4603 add_dwarf_attr (die, &attr);
4604 }
4605
4606 /* Add a location description attribute value to a DIE. */
4607
4608 static inline void
4609 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4610 {
4611 dw_attr_node attr;
4612
4613 attr.dw_attr = attr_kind;
4614 attr.dw_attr_val.val_class = dw_val_class_loc;
4615 attr.dw_attr_val.val_entry = NULL;
4616 attr.dw_attr_val.v.val_loc = loc;
4617 add_dwarf_attr (die, &attr);
4618 }
4619
4620 static inline dw_loc_descr_ref
4621 AT_loc (dw_attr_node *a)
4622 {
4623 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4624 return a->dw_attr_val.v.val_loc;
4625 }
4626
4627 static inline void
4628 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4629 {
4630 dw_attr_node attr;
4631
4632 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4633 return;
4634
4635 attr.dw_attr = attr_kind;
4636 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4637 attr.dw_attr_val.val_entry = NULL;
4638 attr.dw_attr_val.v.val_loc_list = loc_list;
4639 add_dwarf_attr (die, &attr);
4640 have_location_lists = true;
4641 }
4642
4643 static inline dw_loc_list_ref
4644 AT_loc_list (dw_attr_node *a)
4645 {
4646 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4647 return a->dw_attr_val.v.val_loc_list;
4648 }
4649
4650 static inline dw_loc_list_ref *
4651 AT_loc_list_ptr (dw_attr_node *a)
4652 {
4653 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4654 return &a->dw_attr_val.v.val_loc_list;
4655 }
4656
4657 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4658 {
4659 static hashval_t hash (addr_table_entry *);
4660 static bool equal (addr_table_entry *, addr_table_entry *);
4661 };
4662
4663 /* Table of entries into the .debug_addr section. */
4664
4665 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4666
4667 /* Hash an address_table_entry. */
4668
4669 hashval_t
4670 addr_hasher::hash (addr_table_entry *a)
4671 {
4672 inchash::hash hstate;
4673 switch (a->kind)
4674 {
4675 case ate_kind_rtx:
4676 hstate.add_int (0);
4677 break;
4678 case ate_kind_rtx_dtprel:
4679 hstate.add_int (1);
4680 break;
4681 case ate_kind_label:
4682 return htab_hash_string (a->addr.label);
4683 default:
4684 gcc_unreachable ();
4685 }
4686 inchash::add_rtx (a->addr.rtl, hstate);
4687 return hstate.end ();
4688 }
4689
4690 /* Determine equality for two address_table_entries. */
4691
4692 bool
4693 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4694 {
4695 if (a1->kind != a2->kind)
4696 return 0;
4697 switch (a1->kind)
4698 {
4699 case ate_kind_rtx:
4700 case ate_kind_rtx_dtprel:
4701 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4702 case ate_kind_label:
4703 return strcmp (a1->addr.label, a2->addr.label) == 0;
4704 default:
4705 gcc_unreachable ();
4706 }
4707 }
4708
4709 /* Initialize an addr_table_entry. */
4710
4711 void
4712 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4713 {
4714 e->kind = kind;
4715 switch (kind)
4716 {
4717 case ate_kind_rtx:
4718 case ate_kind_rtx_dtprel:
4719 e->addr.rtl = (rtx) addr;
4720 break;
4721 case ate_kind_label:
4722 e->addr.label = (char *) addr;
4723 break;
4724 }
4725 e->refcount = 0;
4726 e->index = NO_INDEX_ASSIGNED;
4727 }
4728
4729 /* Add attr to the address table entry to the table. Defer setting an
4730 index until output time. */
4731
4732 static addr_table_entry *
4733 add_addr_table_entry (void *addr, enum ate_kind kind)
4734 {
4735 addr_table_entry *node;
4736 addr_table_entry finder;
4737
4738 gcc_assert (dwarf_split_debug_info);
4739 if (! addr_index_table)
4740 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
4741 init_addr_table_entry (&finder, kind, addr);
4742 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
4743
4744 if (*slot == HTAB_EMPTY_ENTRY)
4745 {
4746 node = ggc_cleared_alloc<addr_table_entry> ();
4747 init_addr_table_entry (node, kind, addr);
4748 *slot = node;
4749 }
4750 else
4751 node = *slot;
4752
4753 node->refcount++;
4754 return node;
4755 }
4756
4757 /* Remove an entry from the addr table by decrementing its refcount.
4758 Strictly, decrementing the refcount would be enough, but the
4759 assertion that the entry is actually in the table has found
4760 bugs. */
4761
4762 static void
4763 remove_addr_table_entry (addr_table_entry *entry)
4764 {
4765 gcc_assert (dwarf_split_debug_info && addr_index_table);
4766 /* After an index is assigned, the table is frozen. */
4767 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
4768 entry->refcount--;
4769 }
4770
4771 /* Given a location list, remove all addresses it refers to from the
4772 address_table. */
4773
4774 static void
4775 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
4776 {
4777 for (; descr; descr = descr->dw_loc_next)
4778 if (descr->dw_loc_oprnd1.val_entry != NULL)
4779 {
4780 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
4781 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
4782 }
4783 }
4784
4785 /* A helper function for dwarf2out_finish called through
4786 htab_traverse. Assign an addr_table_entry its index. All entries
4787 must be collected into the table when this function is called,
4788 because the indexing code relies on htab_traverse to traverse nodes
4789 in the same order for each run. */
4790
4791 int
4792 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
4793 {
4794 addr_table_entry *node = *h;
4795
4796 /* Don't index unreferenced nodes. */
4797 if (node->refcount == 0)
4798 return 1;
4799
4800 gcc_assert (node->index == NO_INDEX_ASSIGNED);
4801 node->index = *index;
4802 *index += 1;
4803
4804 return 1;
4805 }
4806
4807 /* Add an address constant attribute value to a DIE. When using
4808 dwarf_split_debug_info, address attributes in dies destined for the
4809 final executable should be direct references--setting the parameter
4810 force_direct ensures this behavior. */
4811
4812 static inline void
4813 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
4814 bool force_direct)
4815 {
4816 dw_attr_node attr;
4817
4818 attr.dw_attr = attr_kind;
4819 attr.dw_attr_val.val_class = dw_val_class_addr;
4820 attr.dw_attr_val.v.val_addr = addr;
4821 if (dwarf_split_debug_info && !force_direct)
4822 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
4823 else
4824 attr.dw_attr_val.val_entry = NULL;
4825 add_dwarf_attr (die, &attr);
4826 }
4827
4828 /* Get the RTX from to an address DIE attribute. */
4829
4830 static inline rtx
4831 AT_addr (dw_attr_node *a)
4832 {
4833 gcc_assert (a && AT_class (a) == dw_val_class_addr);
4834 return a->dw_attr_val.v.val_addr;
4835 }
4836
4837 /* Add a file attribute value to a DIE. */
4838
4839 static inline void
4840 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
4841 struct dwarf_file_data *fd)
4842 {
4843 dw_attr_node attr;
4844
4845 attr.dw_attr = attr_kind;
4846 attr.dw_attr_val.val_class = dw_val_class_file;
4847 attr.dw_attr_val.val_entry = NULL;
4848 attr.dw_attr_val.v.val_file = fd;
4849 add_dwarf_attr (die, &attr);
4850 }
4851
4852 /* Get the dwarf_file_data from a file DIE attribute. */
4853
4854 static inline struct dwarf_file_data *
4855 AT_file (dw_attr_node *a)
4856 {
4857 gcc_assert (a && (AT_class (a) == dw_val_class_file
4858 || AT_class (a) == dw_val_class_file_implicit));
4859 return a->dw_attr_val.v.val_file;
4860 }
4861
4862 /* Add a vms delta attribute value to a DIE. */
4863
4864 static inline void
4865 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
4866 const char *lbl1, const char *lbl2)
4867 {
4868 dw_attr_node attr;
4869
4870 attr.dw_attr = attr_kind;
4871 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
4872 attr.dw_attr_val.val_entry = NULL;
4873 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
4874 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
4875 add_dwarf_attr (die, &attr);
4876 }
4877
4878 /* Add a label identifier attribute value to a DIE. */
4879
4880 static inline void
4881 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
4882 const char *lbl_id)
4883 {
4884 dw_attr_node attr;
4885
4886 attr.dw_attr = attr_kind;
4887 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4888 attr.dw_attr_val.val_entry = NULL;
4889 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
4890 if (dwarf_split_debug_info)
4891 attr.dw_attr_val.val_entry
4892 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
4893 ate_kind_label);
4894 add_dwarf_attr (die, &attr);
4895 }
4896
4897 /* Add a section offset attribute value to a DIE, an offset into the
4898 debug_line section. */
4899
4900 static inline void
4901 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4902 const char *label)
4903 {
4904 dw_attr_node attr;
4905
4906 attr.dw_attr = attr_kind;
4907 attr.dw_attr_val.val_class = dw_val_class_lineptr;
4908 attr.dw_attr_val.val_entry = NULL;
4909 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4910 add_dwarf_attr (die, &attr);
4911 }
4912
4913 /* Add a section offset attribute value to a DIE, an offset into the
4914 debug_loclists section. */
4915
4916 static inline void
4917 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4918 const char *label)
4919 {
4920 dw_attr_node attr;
4921
4922 attr.dw_attr = attr_kind;
4923 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
4924 attr.dw_attr_val.val_entry = NULL;
4925 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4926 add_dwarf_attr (die, &attr);
4927 }
4928
4929 /* Add a section offset attribute value to a DIE, an offset into the
4930 debug_macinfo section. */
4931
4932 static inline void
4933 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4934 const char *label)
4935 {
4936 dw_attr_node attr;
4937
4938 attr.dw_attr = attr_kind;
4939 attr.dw_attr_val.val_class = dw_val_class_macptr;
4940 attr.dw_attr_val.val_entry = NULL;
4941 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4942 add_dwarf_attr (die, &attr);
4943 }
4944
4945 /* Add an offset attribute value to a DIE. */
4946
4947 static inline void
4948 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
4949 unsigned HOST_WIDE_INT offset)
4950 {
4951 dw_attr_node attr;
4952
4953 attr.dw_attr = attr_kind;
4954 attr.dw_attr_val.val_class = dw_val_class_offset;
4955 attr.dw_attr_val.val_entry = NULL;
4956 attr.dw_attr_val.v.val_offset = offset;
4957 add_dwarf_attr (die, &attr);
4958 }
4959
4960 /* Add a range_list attribute value to a DIE. When using
4961 dwarf_split_debug_info, address attributes in dies destined for the
4962 final executable should be direct references--setting the parameter
4963 force_direct ensures this behavior. */
4964
4965 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
4966 #define RELOCATED_OFFSET (NULL)
4967
4968 static void
4969 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
4970 long unsigned int offset, bool force_direct)
4971 {
4972 dw_attr_node attr;
4973
4974 attr.dw_attr = attr_kind;
4975 attr.dw_attr_val.val_class = dw_val_class_range_list;
4976 /* For the range_list attribute, use val_entry to store whether the
4977 offset should follow split-debug-info or normal semantics. This
4978 value is read in output_range_list_offset. */
4979 if (dwarf_split_debug_info && !force_direct)
4980 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
4981 else
4982 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
4983 attr.dw_attr_val.v.val_offset = offset;
4984 add_dwarf_attr (die, &attr);
4985 }
4986
4987 /* Return the start label of a delta attribute. */
4988
4989 static inline const char *
4990 AT_vms_delta1 (dw_attr_node *a)
4991 {
4992 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
4993 return a->dw_attr_val.v.val_vms_delta.lbl1;
4994 }
4995
4996 /* Return the end label of a delta attribute. */
4997
4998 static inline const char *
4999 AT_vms_delta2 (dw_attr_node *a)
5000 {
5001 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5002 return a->dw_attr_val.v.val_vms_delta.lbl2;
5003 }
5004
5005 static inline const char *
5006 AT_lbl (dw_attr_node *a)
5007 {
5008 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5009 || AT_class (a) == dw_val_class_lineptr
5010 || AT_class (a) == dw_val_class_macptr
5011 || AT_class (a) == dw_val_class_loclistsptr
5012 || AT_class (a) == dw_val_class_high_pc));
5013 return a->dw_attr_val.v.val_lbl_id;
5014 }
5015
5016 /* Get the attribute of type attr_kind. */
5017
5018 static dw_attr_node *
5019 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5020 {
5021 dw_attr_node *a;
5022 unsigned ix;
5023 dw_die_ref spec = NULL;
5024
5025 if (! die)
5026 return NULL;
5027
5028 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5029 if (a->dw_attr == attr_kind)
5030 return a;
5031 else if (a->dw_attr == DW_AT_specification
5032 || a->dw_attr == DW_AT_abstract_origin)
5033 spec = AT_ref (a);
5034
5035 if (spec)
5036 return get_AT (spec, attr_kind);
5037
5038 return NULL;
5039 }
5040
5041 /* Returns the parent of the declaration of DIE. */
5042
5043 static dw_die_ref
5044 get_die_parent (dw_die_ref die)
5045 {
5046 dw_die_ref t;
5047
5048 if (!die)
5049 return NULL;
5050
5051 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5052 || (t = get_AT_ref (die, DW_AT_specification)))
5053 die = t;
5054
5055 return die->die_parent;
5056 }
5057
5058 /* Return the "low pc" attribute value, typically associated with a subprogram
5059 DIE. Return null if the "low pc" attribute is either not present, or if it
5060 cannot be represented as an assembler label identifier. */
5061
5062 static inline const char *
5063 get_AT_low_pc (dw_die_ref die)
5064 {
5065 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5066
5067 return a ? AT_lbl (a) : NULL;
5068 }
5069
5070 /* Return the "high pc" attribute value, typically associated with a subprogram
5071 DIE. Return null if the "high pc" attribute is either not present, or if it
5072 cannot be represented as an assembler label identifier. */
5073
5074 static inline const char *
5075 get_AT_hi_pc (dw_die_ref die)
5076 {
5077 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5078
5079 return a ? AT_lbl (a) : NULL;
5080 }
5081
5082 /* Return the value of the string attribute designated by ATTR_KIND, or
5083 NULL if it is not present. */
5084
5085 static inline const char *
5086 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5087 {
5088 dw_attr_node *a = get_AT (die, attr_kind);
5089
5090 return a ? AT_string (a) : NULL;
5091 }
5092
5093 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5094 if it is not present. */
5095
5096 static inline int
5097 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5098 {
5099 dw_attr_node *a = get_AT (die, attr_kind);
5100
5101 return a ? AT_flag (a) : 0;
5102 }
5103
5104 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5105 if it is not present. */
5106
5107 static inline unsigned
5108 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5109 {
5110 dw_attr_node *a = get_AT (die, attr_kind);
5111
5112 return a ? AT_unsigned (a) : 0;
5113 }
5114
5115 static inline dw_die_ref
5116 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5117 {
5118 dw_attr_node *a = get_AT (die, attr_kind);
5119
5120 return a ? AT_ref (a) : NULL;
5121 }
5122
5123 static inline struct dwarf_file_data *
5124 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5125 {
5126 dw_attr_node *a = get_AT (die, attr_kind);
5127
5128 return a ? AT_file (a) : NULL;
5129 }
5130
5131 /* Return TRUE if the language is C++. */
5132
5133 static inline bool
5134 is_cxx (void)
5135 {
5136 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5137
5138 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5139 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5140 }
5141
5142 /* Return TRUE if DECL was created by the C++ frontend. */
5143
5144 static bool
5145 is_cxx (const_tree decl)
5146 {
5147 if (in_lto_p)
5148 {
5149 const_tree context = get_ultimate_context (decl);
5150 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5151 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5152 }
5153 return is_cxx ();
5154 }
5155
5156 /* Return TRUE if the language is Fortran. */
5157
5158 static inline bool
5159 is_fortran (void)
5160 {
5161 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5162
5163 return (lang == DW_LANG_Fortran77
5164 || lang == DW_LANG_Fortran90
5165 || lang == DW_LANG_Fortran95
5166 || lang == DW_LANG_Fortran03
5167 || lang == DW_LANG_Fortran08);
5168 }
5169
5170 static inline bool
5171 is_fortran (const_tree decl)
5172 {
5173 if (in_lto_p)
5174 {
5175 const_tree context = get_ultimate_context (decl);
5176 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5177 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5178 "GNU Fortran", 11) == 0
5179 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5180 "GNU F77") == 0);
5181 }
5182 return is_fortran ();
5183 }
5184
5185 /* Return TRUE if the language is Ada. */
5186
5187 static inline bool
5188 is_ada (void)
5189 {
5190 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5191
5192 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5193 }
5194
5195 /* Remove the specified attribute if present. Return TRUE if removal
5196 was successful. */
5197
5198 static bool
5199 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5200 {
5201 dw_attr_node *a;
5202 unsigned ix;
5203
5204 if (! die)
5205 return false;
5206
5207 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5208 if (a->dw_attr == attr_kind)
5209 {
5210 if (AT_class (a) == dw_val_class_str)
5211 if (a->dw_attr_val.v.val_str->refcount)
5212 a->dw_attr_val.v.val_str->refcount--;
5213
5214 /* vec::ordered_remove should help reduce the number of abbrevs
5215 that are needed. */
5216 die->die_attr->ordered_remove (ix);
5217 return true;
5218 }
5219 return false;
5220 }
5221
5222 /* Remove CHILD from its parent. PREV must have the property that
5223 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5224
5225 static void
5226 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5227 {
5228 gcc_assert (child->die_parent == prev->die_parent);
5229 gcc_assert (prev->die_sib == child);
5230 if (prev == child)
5231 {
5232 gcc_assert (child->die_parent->die_child == child);
5233 prev = NULL;
5234 }
5235 else
5236 prev->die_sib = child->die_sib;
5237 if (child->die_parent->die_child == child)
5238 child->die_parent->die_child = prev;
5239 child->die_sib = NULL;
5240 }
5241
5242 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5243 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5244
5245 static void
5246 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5247 {
5248 dw_die_ref parent = old_child->die_parent;
5249
5250 gcc_assert (parent == prev->die_parent);
5251 gcc_assert (prev->die_sib == old_child);
5252
5253 new_child->die_parent = parent;
5254 if (prev == old_child)
5255 {
5256 gcc_assert (parent->die_child == old_child);
5257 new_child->die_sib = new_child;
5258 }
5259 else
5260 {
5261 prev->die_sib = new_child;
5262 new_child->die_sib = old_child->die_sib;
5263 }
5264 if (old_child->die_parent->die_child == old_child)
5265 old_child->die_parent->die_child = new_child;
5266 old_child->die_sib = NULL;
5267 }
5268
5269 /* Move all children from OLD_PARENT to NEW_PARENT. */
5270
5271 static void
5272 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5273 {
5274 dw_die_ref c;
5275 new_parent->die_child = old_parent->die_child;
5276 old_parent->die_child = NULL;
5277 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5278 }
5279
5280 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5281 matches TAG. */
5282
5283 static void
5284 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5285 {
5286 dw_die_ref c;
5287
5288 c = die->die_child;
5289 if (c) do {
5290 dw_die_ref prev = c;
5291 c = c->die_sib;
5292 while (c->die_tag == tag)
5293 {
5294 remove_child_with_prev (c, prev);
5295 c->die_parent = NULL;
5296 /* Might have removed every child. */
5297 if (die->die_child == NULL)
5298 return;
5299 c = prev->die_sib;
5300 }
5301 } while (c != die->die_child);
5302 }
5303
5304 /* Add a CHILD_DIE as the last child of DIE. */
5305
5306 static void
5307 add_child_die (dw_die_ref die, dw_die_ref child_die)
5308 {
5309 /* FIXME this should probably be an assert. */
5310 if (! die || ! child_die)
5311 return;
5312 gcc_assert (die != child_die);
5313
5314 child_die->die_parent = die;
5315 if (die->die_child)
5316 {
5317 child_die->die_sib = die->die_child->die_sib;
5318 die->die_child->die_sib = child_die;
5319 }
5320 else
5321 child_die->die_sib = child_die;
5322 die->die_child = child_die;
5323 }
5324
5325 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5326
5327 static void
5328 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5329 dw_die_ref after_die)
5330 {
5331 gcc_assert (die
5332 && child_die
5333 && after_die
5334 && die->die_child
5335 && die != child_die);
5336
5337 child_die->die_parent = die;
5338 child_die->die_sib = after_die->die_sib;
5339 after_die->die_sib = child_die;
5340 if (die->die_child == after_die)
5341 die->die_child = child_die;
5342 }
5343
5344 /* Unassociate CHILD from its parent, and make its parent be
5345 NEW_PARENT. */
5346
5347 static void
5348 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5349 {
5350 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5351 if (p->die_sib == child)
5352 {
5353 remove_child_with_prev (child, p);
5354 break;
5355 }
5356 add_child_die (new_parent, child);
5357 }
5358
5359 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5360 is the specification, to the end of PARENT's list of children.
5361 This is done by removing and re-adding it. */
5362
5363 static void
5364 splice_child_die (dw_die_ref parent, dw_die_ref child)
5365 {
5366 /* We want the declaration DIE from inside the class, not the
5367 specification DIE at toplevel. */
5368 if (child->die_parent != parent)
5369 {
5370 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5371
5372 if (tmp)
5373 child = tmp;
5374 }
5375
5376 gcc_assert (child->die_parent == parent
5377 || (child->die_parent
5378 == get_AT_ref (parent, DW_AT_specification)));
5379
5380 reparent_child (child, parent);
5381 }
5382
5383 /* Create and return a new die with TAG_VALUE as tag. */
5384
5385 static inline dw_die_ref
5386 new_die_raw (enum dwarf_tag tag_value)
5387 {
5388 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5389 die->die_tag = tag_value;
5390 return die;
5391 }
5392
5393 /* Create and return a new die with a parent of PARENT_DIE. If
5394 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5395 associated tree T must be supplied to determine parenthood
5396 later. */
5397
5398 static inline dw_die_ref
5399 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5400 {
5401 dw_die_ref die = new_die_raw (tag_value);
5402
5403 if (parent_die != NULL)
5404 add_child_die (parent_die, die);
5405 else
5406 {
5407 limbo_die_node *limbo_node;
5408
5409 /* No DIEs created after early dwarf should end up in limbo,
5410 because the limbo list should not persist past LTO
5411 streaming. */
5412 if (tag_value != DW_TAG_compile_unit
5413 /* These are allowed because they're generated while
5414 breaking out COMDAT units late. */
5415 && tag_value != DW_TAG_type_unit
5416 && tag_value != DW_TAG_skeleton_unit
5417 && !early_dwarf
5418 /* Allow nested functions to live in limbo because they will
5419 only temporarily live there, as decls_for_scope will fix
5420 them up. */
5421 && (TREE_CODE (t) != FUNCTION_DECL
5422 || !decl_function_context (t))
5423 /* Same as nested functions above but for types. Types that
5424 are local to a function will be fixed in
5425 decls_for_scope. */
5426 && (!RECORD_OR_UNION_TYPE_P (t)
5427 || !TYPE_CONTEXT (t)
5428 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5429 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5430 especially in the ltrans stage, but once we implement LTO
5431 dwarf streaming, we should remove this exception. */
5432 && !in_lto_p)
5433 {
5434 fprintf (stderr, "symbol ended up in limbo too late:");
5435 debug_generic_stmt (t);
5436 gcc_unreachable ();
5437 }
5438
5439 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5440 limbo_node->die = die;
5441 limbo_node->created_for = t;
5442 limbo_node->next = limbo_die_list;
5443 limbo_die_list = limbo_node;
5444 }
5445
5446 return die;
5447 }
5448
5449 /* Return the DIE associated with the given type specifier. */
5450
5451 static inline dw_die_ref
5452 lookup_type_die (tree type)
5453 {
5454 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5455 if (die && die->removed)
5456 {
5457 TYPE_SYMTAB_DIE (type) = NULL;
5458 return NULL;
5459 }
5460 return die;
5461 }
5462
5463 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5464 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5465 anonymous type instead the one of the naming typedef. */
5466
5467 static inline dw_die_ref
5468 strip_naming_typedef (tree type, dw_die_ref type_die)
5469 {
5470 if (type
5471 && TREE_CODE (type) == RECORD_TYPE
5472 && type_die
5473 && type_die->die_tag == DW_TAG_typedef
5474 && is_naming_typedef_decl (TYPE_NAME (type)))
5475 type_die = get_AT_ref (type_die, DW_AT_type);
5476 return type_die;
5477 }
5478
5479 /* Like lookup_type_die, but if type is an anonymous type named by a
5480 typedef[1], return the DIE of the anonymous type instead the one of
5481 the naming typedef. This is because in gen_typedef_die, we did
5482 equate the anonymous struct named by the typedef with the DIE of
5483 the naming typedef. So by default, lookup_type_die on an anonymous
5484 struct yields the DIE of the naming typedef.
5485
5486 [1]: Read the comment of is_naming_typedef_decl to learn about what
5487 a naming typedef is. */
5488
5489 static inline dw_die_ref
5490 lookup_type_die_strip_naming_typedef (tree type)
5491 {
5492 dw_die_ref die = lookup_type_die (type);
5493 return strip_naming_typedef (type, die);
5494 }
5495
5496 /* Equate a DIE to a given type specifier. */
5497
5498 static inline void
5499 equate_type_number_to_die (tree type, dw_die_ref type_die)
5500 {
5501 TYPE_SYMTAB_DIE (type) = type_die;
5502 }
5503
5504 /* Returns a hash value for X (which really is a die_struct). */
5505
5506 inline hashval_t
5507 decl_die_hasher::hash (die_node *x)
5508 {
5509 return (hashval_t) x->decl_id;
5510 }
5511
5512 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5513
5514 inline bool
5515 decl_die_hasher::equal (die_node *x, tree y)
5516 {
5517 return (x->decl_id == DECL_UID (y));
5518 }
5519
5520 /* Return the DIE associated with a given declaration. */
5521
5522 static inline dw_die_ref
5523 lookup_decl_die (tree decl)
5524 {
5525 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5526 NO_INSERT);
5527 if (!die)
5528 return NULL;
5529 if ((*die)->removed)
5530 {
5531 decl_die_table->clear_slot (die);
5532 return NULL;
5533 }
5534 return *die;
5535 }
5536
5537
5538 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5539 style reference. Return true if we found one refering to a DIE for
5540 DECL, otherwise return false. */
5541
5542 static bool
5543 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5544 unsigned HOST_WIDE_INT *off)
5545 {
5546 dw_die_ref die;
5547
5548 if (flag_wpa && !decl_die_table)
5549 return false;
5550
5551 if (TREE_CODE (decl) == BLOCK)
5552 die = BLOCK_DIE (decl);
5553 else
5554 die = lookup_decl_die (decl);
5555 if (!die)
5556 return false;
5557
5558 /* During WPA stage we currently use DIEs to store the
5559 decl <-> label + offset map. That's quite inefficient but it
5560 works for now. */
5561 if (flag_wpa)
5562 {
5563 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5564 if (!ref)
5565 {
5566 gcc_assert (die == comp_unit_die ());
5567 return false;
5568 }
5569 *off = ref->die_offset;
5570 *sym = ref->die_id.die_symbol;
5571 return true;
5572 }
5573
5574 /* Similar to get_ref_die_offset_label, but using the "correct"
5575 label. */
5576 *off = die->die_offset;
5577 while (die->die_parent)
5578 die = die->die_parent;
5579 /* For the containing CU DIE we compute a die_symbol in
5580 compute_comp_unit_symbol. */
5581 gcc_assert (die->die_tag == DW_TAG_compile_unit
5582 && die->die_id.die_symbol != NULL);
5583 *sym = die->die_id.die_symbol;
5584 return true;
5585 }
5586
5587 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5588
5589 static void
5590 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5591 const char *symbol, HOST_WIDE_INT offset)
5592 {
5593 /* Create a fake DIE that contains the reference. Don't use
5594 new_die because we don't want to end up in the limbo list. */
5595 dw_die_ref ref = new_die_raw (die->die_tag);
5596 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5597 ref->die_offset = offset;
5598 ref->with_offset = 1;
5599 add_AT_die_ref (die, attr_kind, ref);
5600 }
5601
5602 /* Create a DIE for DECL if required and add a reference to a DIE
5603 at SYMBOL + OFFSET which contains attributes dumped early. */
5604
5605 static void
5606 dwarf2out_register_external_die (tree decl, const char *sym,
5607 unsigned HOST_WIDE_INT off)
5608 {
5609 if (debug_info_level == DINFO_LEVEL_NONE)
5610 return;
5611
5612 if (flag_wpa && !decl_die_table)
5613 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5614
5615 dw_die_ref die
5616 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5617 gcc_assert (!die);
5618
5619 tree ctx;
5620 dw_die_ref parent = NULL;
5621 /* Need to lookup a DIE for the decls context - the containing
5622 function or translation unit. */
5623 if (TREE_CODE (decl) == BLOCK)
5624 {
5625 ctx = BLOCK_SUPERCONTEXT (decl);
5626 /* ??? We do not output DIEs for all scopes thus skip as
5627 many DIEs as needed. */
5628 while (TREE_CODE (ctx) == BLOCK
5629 && !BLOCK_DIE (ctx))
5630 ctx = BLOCK_SUPERCONTEXT (ctx);
5631 }
5632 else
5633 ctx = DECL_CONTEXT (decl);
5634 while (ctx && TYPE_P (ctx))
5635 ctx = TYPE_CONTEXT (ctx);
5636 if (ctx)
5637 {
5638 if (TREE_CODE (ctx) == BLOCK)
5639 parent = BLOCK_DIE (ctx);
5640 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5641 /* Keep the 1:1 association during WPA. */
5642 && !flag_wpa)
5643 /* Otherwise all late annotations go to the main CU which
5644 imports the original CUs. */
5645 parent = comp_unit_die ();
5646 else if (TREE_CODE (ctx) == FUNCTION_DECL
5647 && TREE_CODE (decl) != PARM_DECL
5648 && TREE_CODE (decl) != BLOCK)
5649 /* Leave function local entities parent determination to when
5650 we process scope vars. */
5651 ;
5652 else
5653 parent = lookup_decl_die (ctx);
5654 }
5655 else
5656 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5657 Handle this case gracefully by globalizing stuff. */
5658 parent = comp_unit_die ();
5659 /* Create a DIE "stub". */
5660 switch (TREE_CODE (decl))
5661 {
5662 case TRANSLATION_UNIT_DECL:
5663 if (! flag_wpa)
5664 {
5665 die = comp_unit_die ();
5666 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5667 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5668 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5669 to create a DIE for the original CUs. */
5670 return;
5671 }
5672 /* Keep the 1:1 association during WPA. */
5673 die = new_die (DW_TAG_compile_unit, NULL, decl);
5674 break;
5675 case NAMESPACE_DECL:
5676 if (is_fortran (decl))
5677 die = new_die (DW_TAG_module, parent, decl);
5678 else
5679 die = new_die (DW_TAG_namespace, parent, decl);
5680 break;
5681 case FUNCTION_DECL:
5682 die = new_die (DW_TAG_subprogram, parent, decl);
5683 break;
5684 case VAR_DECL:
5685 die = new_die (DW_TAG_variable, parent, decl);
5686 break;
5687 case RESULT_DECL:
5688 die = new_die (DW_TAG_variable, parent, decl);
5689 break;
5690 case PARM_DECL:
5691 die = new_die (DW_TAG_formal_parameter, parent, decl);
5692 break;
5693 case CONST_DECL:
5694 die = new_die (DW_TAG_constant, parent, decl);
5695 break;
5696 case LABEL_DECL:
5697 die = new_die (DW_TAG_label, parent, decl);
5698 break;
5699 case BLOCK:
5700 die = new_die (DW_TAG_lexical_block, parent, decl);
5701 break;
5702 default:
5703 gcc_unreachable ();
5704 }
5705 if (TREE_CODE (decl) == BLOCK)
5706 BLOCK_DIE (decl) = die;
5707 else
5708 equate_decl_number_to_die (decl, die);
5709
5710 /* Add a reference to the DIE providing early debug at $sym + off. */
5711 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
5712 }
5713
5714 /* Returns a hash value for X (which really is a var_loc_list). */
5715
5716 inline hashval_t
5717 decl_loc_hasher::hash (var_loc_list *x)
5718 {
5719 return (hashval_t) x->decl_id;
5720 }
5721
5722 /* Return nonzero if decl_id of var_loc_list X is the same as
5723 UID of decl *Y. */
5724
5725 inline bool
5726 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
5727 {
5728 return (x->decl_id == DECL_UID (y));
5729 }
5730
5731 /* Return the var_loc list associated with a given declaration. */
5732
5733 static inline var_loc_list *
5734 lookup_decl_loc (const_tree decl)
5735 {
5736 if (!decl_loc_table)
5737 return NULL;
5738 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
5739 }
5740
5741 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
5742
5743 inline hashval_t
5744 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
5745 {
5746 return (hashval_t) x->decl_id;
5747 }
5748
5749 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
5750 UID of decl *Y. */
5751
5752 inline bool
5753 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
5754 {
5755 return (x->decl_id == DECL_UID (y));
5756 }
5757
5758 /* Equate a DIE to a particular declaration. */
5759
5760 static void
5761 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
5762 {
5763 unsigned int decl_id = DECL_UID (decl);
5764
5765 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
5766 decl_die->decl_id = decl_id;
5767 }
5768
5769 /* Return how many bits covers PIECE EXPR_LIST. */
5770
5771 static HOST_WIDE_INT
5772 decl_piece_bitsize (rtx piece)
5773 {
5774 int ret = (int) GET_MODE (piece);
5775 if (ret)
5776 return ret;
5777 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
5778 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
5779 return INTVAL (XEXP (XEXP (piece, 0), 0));
5780 }
5781
5782 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
5783
5784 static rtx *
5785 decl_piece_varloc_ptr (rtx piece)
5786 {
5787 if ((int) GET_MODE (piece))
5788 return &XEXP (piece, 0);
5789 else
5790 return &XEXP (XEXP (piece, 0), 1);
5791 }
5792
5793 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
5794 Next is the chain of following piece nodes. */
5795
5796 static rtx_expr_list *
5797 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
5798 {
5799 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
5800 return alloc_EXPR_LIST (bitsize, loc_note, next);
5801 else
5802 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
5803 GEN_INT (bitsize),
5804 loc_note), next);
5805 }
5806
5807 /* Return rtx that should be stored into loc field for
5808 LOC_NOTE and BITPOS/BITSIZE. */
5809
5810 static rtx
5811 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
5812 HOST_WIDE_INT bitsize)
5813 {
5814 if (bitsize != -1)
5815 {
5816 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
5817 if (bitpos != 0)
5818 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
5819 }
5820 return loc_note;
5821 }
5822
5823 /* This function either modifies location piece list *DEST in
5824 place (if SRC and INNER is NULL), or copies location piece list
5825 *SRC to *DEST while modifying it. Location BITPOS is modified
5826 to contain LOC_NOTE, any pieces overlapping it are removed resp.
5827 not copied and if needed some padding around it is added.
5828 When modifying in place, DEST should point to EXPR_LIST where
5829 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
5830 to the start of the whole list and INNER points to the EXPR_LIST
5831 where earlier pieces cover PIECE_BITPOS bits. */
5832
5833 static void
5834 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
5835 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
5836 HOST_WIDE_INT bitsize, rtx loc_note)
5837 {
5838 HOST_WIDE_INT diff;
5839 bool copy = inner != NULL;
5840
5841 if (copy)
5842 {
5843 /* First copy all nodes preceding the current bitpos. */
5844 while (src != inner)
5845 {
5846 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
5847 decl_piece_bitsize (*src), NULL_RTX);
5848 dest = &XEXP (*dest, 1);
5849 src = &XEXP (*src, 1);
5850 }
5851 }
5852 /* Add padding if needed. */
5853 if (bitpos != piece_bitpos)
5854 {
5855 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
5856 copy ? NULL_RTX : *dest);
5857 dest = &XEXP (*dest, 1);
5858 }
5859 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
5860 {
5861 gcc_assert (!copy);
5862 /* A piece with correct bitpos and bitsize already exist,
5863 just update the location for it and return. */
5864 *decl_piece_varloc_ptr (*dest) = loc_note;
5865 return;
5866 }
5867 /* Add the piece that changed. */
5868 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
5869 dest = &XEXP (*dest, 1);
5870 /* Skip over pieces that overlap it. */
5871 diff = bitpos - piece_bitpos + bitsize;
5872 if (!copy)
5873 src = dest;
5874 while (diff > 0 && *src)
5875 {
5876 rtx piece = *src;
5877 diff -= decl_piece_bitsize (piece);
5878 if (copy)
5879 src = &XEXP (piece, 1);
5880 else
5881 {
5882 *src = XEXP (piece, 1);
5883 free_EXPR_LIST_node (piece);
5884 }
5885 }
5886 /* Add padding if needed. */
5887 if (diff < 0 && *src)
5888 {
5889 if (!copy)
5890 dest = src;
5891 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
5892 dest = &XEXP (*dest, 1);
5893 }
5894 if (!copy)
5895 return;
5896 /* Finally copy all nodes following it. */
5897 while (*src)
5898 {
5899 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
5900 decl_piece_bitsize (*src), NULL_RTX);
5901 dest = &XEXP (*dest, 1);
5902 src = &XEXP (*src, 1);
5903 }
5904 }
5905
5906 /* Add a variable location node to the linked list for DECL. */
5907
5908 static struct var_loc_node *
5909 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label)
5910 {
5911 unsigned int decl_id;
5912 var_loc_list *temp;
5913 struct var_loc_node *loc = NULL;
5914 HOST_WIDE_INT bitsize = -1, bitpos = -1;
5915
5916 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
5917 {
5918 tree realdecl = DECL_DEBUG_EXPR (decl);
5919 if (handled_component_p (realdecl)
5920 || (TREE_CODE (realdecl) == MEM_REF
5921 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5922 {
5923 bool reverse;
5924 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
5925 &bitsize, &reverse);
5926 if (!innerdecl
5927 || !DECL_P (innerdecl)
5928 || DECL_IGNORED_P (innerdecl)
5929 || TREE_STATIC (innerdecl)
5930 || bitsize == 0
5931 || bitpos + bitsize > 256)
5932 return NULL;
5933 decl = innerdecl;
5934 }
5935 }
5936
5937 decl_id = DECL_UID (decl);
5938 var_loc_list **slot
5939 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
5940 if (*slot == NULL)
5941 {
5942 temp = ggc_cleared_alloc<var_loc_list> ();
5943 temp->decl_id = decl_id;
5944 *slot = temp;
5945 }
5946 else
5947 temp = *slot;
5948
5949 /* For PARM_DECLs try to keep around the original incoming value,
5950 even if that means we'll emit a zero-range .debug_loc entry. */
5951 if (temp->last
5952 && temp->first == temp->last
5953 && TREE_CODE (decl) == PARM_DECL
5954 && NOTE_P (temp->first->loc)
5955 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
5956 && DECL_INCOMING_RTL (decl)
5957 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
5958 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
5959 == GET_CODE (DECL_INCOMING_RTL (decl))
5960 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
5961 && (bitsize != -1
5962 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
5963 NOTE_VAR_LOCATION_LOC (loc_note))
5964 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
5965 != NOTE_VAR_LOCATION_STATUS (loc_note))))
5966 {
5967 loc = ggc_cleared_alloc<var_loc_node> ();
5968 temp->first->next = loc;
5969 temp->last = loc;
5970 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
5971 }
5972 else if (temp->last)
5973 {
5974 struct var_loc_node *last = temp->last, *unused = NULL;
5975 rtx *piece_loc = NULL, last_loc_note;
5976 HOST_WIDE_INT piece_bitpos = 0;
5977 if (last->next)
5978 {
5979 last = last->next;
5980 gcc_assert (last->next == NULL);
5981 }
5982 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
5983 {
5984 piece_loc = &last->loc;
5985 do
5986 {
5987 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
5988 if (piece_bitpos + cur_bitsize > bitpos)
5989 break;
5990 piece_bitpos += cur_bitsize;
5991 piece_loc = &XEXP (*piece_loc, 1);
5992 }
5993 while (*piece_loc);
5994 }
5995 /* TEMP->LAST here is either pointer to the last but one or
5996 last element in the chained list, LAST is pointer to the
5997 last element. */
5998 if (label && strcmp (last->label, label) == 0)
5999 {
6000 /* For SRA optimized variables if there weren't any real
6001 insns since last note, just modify the last node. */
6002 if (piece_loc != NULL)
6003 {
6004 adjust_piece_list (piece_loc, NULL, NULL,
6005 bitpos, piece_bitpos, bitsize, loc_note);
6006 return NULL;
6007 }
6008 /* If the last note doesn't cover any instructions, remove it. */
6009 if (temp->last != last)
6010 {
6011 temp->last->next = NULL;
6012 unused = last;
6013 last = temp->last;
6014 gcc_assert (strcmp (last->label, label) != 0);
6015 }
6016 else
6017 {
6018 gcc_assert (temp->first == temp->last
6019 || (temp->first->next == temp->last
6020 && TREE_CODE (decl) == PARM_DECL));
6021 memset (temp->last, '\0', sizeof (*temp->last));
6022 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6023 return temp->last;
6024 }
6025 }
6026 if (bitsize == -1 && NOTE_P (last->loc))
6027 last_loc_note = last->loc;
6028 else if (piece_loc != NULL
6029 && *piece_loc != NULL_RTX
6030 && piece_bitpos == bitpos
6031 && decl_piece_bitsize (*piece_loc) == bitsize)
6032 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6033 else
6034 last_loc_note = NULL_RTX;
6035 /* If the current location is the same as the end of the list,
6036 and either both or neither of the locations is uninitialized,
6037 we have nothing to do. */
6038 if (last_loc_note == NULL_RTX
6039 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6040 NOTE_VAR_LOCATION_LOC (loc_note)))
6041 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6042 != NOTE_VAR_LOCATION_STATUS (loc_note))
6043 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6044 == VAR_INIT_STATUS_UNINITIALIZED)
6045 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6046 == VAR_INIT_STATUS_UNINITIALIZED))))
6047 {
6048 /* Add LOC to the end of list and update LAST. If the last
6049 element of the list has been removed above, reuse its
6050 memory for the new node, otherwise allocate a new one. */
6051 if (unused)
6052 {
6053 loc = unused;
6054 memset (loc, '\0', sizeof (*loc));
6055 }
6056 else
6057 loc = ggc_cleared_alloc<var_loc_node> ();
6058 if (bitsize == -1 || piece_loc == NULL)
6059 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6060 else
6061 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6062 bitpos, piece_bitpos, bitsize, loc_note);
6063 last->next = loc;
6064 /* Ensure TEMP->LAST will point either to the new last but one
6065 element of the chain, or to the last element in it. */
6066 if (last != temp->last)
6067 temp->last = last;
6068 }
6069 else if (unused)
6070 ggc_free (unused);
6071 }
6072 else
6073 {
6074 loc = ggc_cleared_alloc<var_loc_node> ();
6075 temp->first = loc;
6076 temp->last = loc;
6077 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6078 }
6079 return loc;
6080 }
6081 \f
6082 /* Keep track of the number of spaces used to indent the
6083 output of the debugging routines that print the structure of
6084 the DIE internal representation. */
6085 static int print_indent;
6086
6087 /* Indent the line the number of spaces given by print_indent. */
6088
6089 static inline void
6090 print_spaces (FILE *outfile)
6091 {
6092 fprintf (outfile, "%*s", print_indent, "");
6093 }
6094
6095 /* Print a type signature in hex. */
6096
6097 static inline void
6098 print_signature (FILE *outfile, char *sig)
6099 {
6100 int i;
6101
6102 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6103 fprintf (outfile, "%02x", sig[i] & 0xff);
6104 }
6105
6106 static inline void
6107 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6108 {
6109 if (discr_value->pos)
6110 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6111 else
6112 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6113 }
6114
6115 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6116
6117 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6118 RECURSE, output location descriptor operations. */
6119
6120 static void
6121 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6122 {
6123 switch (val->val_class)
6124 {
6125 case dw_val_class_addr:
6126 fprintf (outfile, "address");
6127 break;
6128 case dw_val_class_offset:
6129 fprintf (outfile, "offset");
6130 break;
6131 case dw_val_class_loc:
6132 fprintf (outfile, "location descriptor");
6133 if (val->v.val_loc == NULL)
6134 fprintf (outfile, " -> <null>\n");
6135 else if (recurse)
6136 {
6137 fprintf (outfile, ":\n");
6138 print_indent += 4;
6139 print_loc_descr (val->v.val_loc, outfile);
6140 print_indent -= 4;
6141 }
6142 else
6143 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6144 break;
6145 case dw_val_class_loc_list:
6146 fprintf (outfile, "location list -> label:%s",
6147 val->v.val_loc_list->ll_symbol);
6148 break;
6149 case dw_val_class_range_list:
6150 fprintf (outfile, "range list");
6151 break;
6152 case dw_val_class_const:
6153 case dw_val_class_const_implicit:
6154 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6155 break;
6156 case dw_val_class_unsigned_const:
6157 case dw_val_class_unsigned_const_implicit:
6158 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6159 break;
6160 case dw_val_class_const_double:
6161 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6162 HOST_WIDE_INT_PRINT_UNSIGNED")",
6163 val->v.val_double.high,
6164 val->v.val_double.low);
6165 break;
6166 case dw_val_class_wide_int:
6167 {
6168 int i = val->v.val_wide->get_len ();
6169 fprintf (outfile, "constant (");
6170 gcc_assert (i > 0);
6171 if (val->v.val_wide->elt (i - 1) == 0)
6172 fprintf (outfile, "0x");
6173 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6174 val->v.val_wide->elt (--i));
6175 while (--i >= 0)
6176 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6177 val->v.val_wide->elt (i));
6178 fprintf (outfile, ")");
6179 break;
6180 }
6181 case dw_val_class_vec:
6182 fprintf (outfile, "floating-point or vector constant");
6183 break;
6184 case dw_val_class_flag:
6185 fprintf (outfile, "%u", val->v.val_flag);
6186 break;
6187 case dw_val_class_die_ref:
6188 if (val->v.val_die_ref.die != NULL)
6189 {
6190 dw_die_ref die = val->v.val_die_ref.die;
6191
6192 if (die->comdat_type_p)
6193 {
6194 fprintf (outfile, "die -> signature: ");
6195 print_signature (outfile,
6196 die->die_id.die_type_node->signature);
6197 }
6198 else if (die->die_id.die_symbol)
6199 {
6200 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6201 if (die->with_offset)
6202 fprintf (outfile, " + %ld", die->die_offset);
6203 }
6204 else
6205 fprintf (outfile, "die -> %ld", die->die_offset);
6206 fprintf (outfile, " (%p)", (void *) die);
6207 }
6208 else
6209 fprintf (outfile, "die -> <null>");
6210 break;
6211 case dw_val_class_vms_delta:
6212 fprintf (outfile, "delta: @slotcount(%s-%s)",
6213 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6214 break;
6215 case dw_val_class_lbl_id:
6216 case dw_val_class_lineptr:
6217 case dw_val_class_macptr:
6218 case dw_val_class_loclistsptr:
6219 case dw_val_class_high_pc:
6220 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6221 break;
6222 case dw_val_class_str:
6223 if (val->v.val_str->str != NULL)
6224 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6225 else
6226 fprintf (outfile, "<null>");
6227 break;
6228 case dw_val_class_file:
6229 case dw_val_class_file_implicit:
6230 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6231 val->v.val_file->emitted_number);
6232 break;
6233 case dw_val_class_data8:
6234 {
6235 int i;
6236
6237 for (i = 0; i < 8; i++)
6238 fprintf (outfile, "%02x", val->v.val_data8[i]);
6239 break;
6240 }
6241 case dw_val_class_discr_value:
6242 print_discr_value (outfile, &val->v.val_discr_value);
6243 break;
6244 case dw_val_class_discr_list:
6245 for (dw_discr_list_ref node = val->v.val_discr_list;
6246 node != NULL;
6247 node = node->dw_discr_next)
6248 {
6249 if (node->dw_discr_range)
6250 {
6251 fprintf (outfile, " .. ");
6252 print_discr_value (outfile, &node->dw_discr_lower_bound);
6253 print_discr_value (outfile, &node->dw_discr_upper_bound);
6254 }
6255 else
6256 print_discr_value (outfile, &node->dw_discr_lower_bound);
6257
6258 if (node->dw_discr_next != NULL)
6259 fprintf (outfile, " | ");
6260 }
6261 default:
6262 break;
6263 }
6264 }
6265
6266 /* Likewise, for a DIE attribute. */
6267
6268 static void
6269 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6270 {
6271 print_dw_val (&a->dw_attr_val, recurse, outfile);
6272 }
6273
6274
6275 /* Print the list of operands in the LOC location description to OUTFILE. This
6276 routine is a debugging aid only. */
6277
6278 static void
6279 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6280 {
6281 dw_loc_descr_ref l = loc;
6282
6283 if (loc == NULL)
6284 {
6285 print_spaces (outfile);
6286 fprintf (outfile, "<null>\n");
6287 return;
6288 }
6289
6290 for (l = loc; l != NULL; l = l->dw_loc_next)
6291 {
6292 print_spaces (outfile);
6293 fprintf (outfile, "(%p) %s",
6294 (void *) l,
6295 dwarf_stack_op_name (l->dw_loc_opc));
6296 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6297 {
6298 fprintf (outfile, " ");
6299 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6300 }
6301 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6302 {
6303 fprintf (outfile, ", ");
6304 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6305 }
6306 fprintf (outfile, "\n");
6307 }
6308 }
6309
6310 /* Print the information associated with a given DIE, and its children.
6311 This routine is a debugging aid only. */
6312
6313 static void
6314 print_die (dw_die_ref die, FILE *outfile)
6315 {
6316 dw_attr_node *a;
6317 dw_die_ref c;
6318 unsigned ix;
6319
6320 print_spaces (outfile);
6321 fprintf (outfile, "DIE %4ld: %s (%p)\n",
6322 die->die_offset, dwarf_tag_name (die->die_tag),
6323 (void*) die);
6324 print_spaces (outfile);
6325 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6326 fprintf (outfile, " offset: %ld", die->die_offset);
6327 fprintf (outfile, " mark: %d\n", die->die_mark);
6328
6329 if (die->comdat_type_p)
6330 {
6331 print_spaces (outfile);
6332 fprintf (outfile, " signature: ");
6333 print_signature (outfile, die->die_id.die_type_node->signature);
6334 fprintf (outfile, "\n");
6335 }
6336
6337 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6338 {
6339 print_spaces (outfile);
6340 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6341
6342 print_attribute (a, true, outfile);
6343 fprintf (outfile, "\n");
6344 }
6345
6346 if (die->die_child != NULL)
6347 {
6348 print_indent += 4;
6349 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6350 print_indent -= 4;
6351 }
6352 if (print_indent == 0)
6353 fprintf (outfile, "\n");
6354 }
6355
6356 /* Print the list of operations in the LOC location description. */
6357
6358 DEBUG_FUNCTION void
6359 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6360 {
6361 print_loc_descr (loc, stderr);
6362 }
6363
6364 /* Print the information collected for a given DIE. */
6365
6366 DEBUG_FUNCTION void
6367 debug_dwarf_die (dw_die_ref die)
6368 {
6369 print_die (die, stderr);
6370 }
6371
6372 DEBUG_FUNCTION void
6373 debug (die_struct &ref)
6374 {
6375 print_die (&ref, stderr);
6376 }
6377
6378 DEBUG_FUNCTION void
6379 debug (die_struct *ptr)
6380 {
6381 if (ptr)
6382 debug (*ptr);
6383 else
6384 fprintf (stderr, "<nil>\n");
6385 }
6386
6387
6388 /* Print all DWARF information collected for the compilation unit.
6389 This routine is a debugging aid only. */
6390
6391 DEBUG_FUNCTION void
6392 debug_dwarf (void)
6393 {
6394 print_indent = 0;
6395 print_die (comp_unit_die (), stderr);
6396 }
6397
6398 /* Verify the DIE tree structure. */
6399
6400 DEBUG_FUNCTION void
6401 verify_die (dw_die_ref die)
6402 {
6403 gcc_assert (!die->die_mark);
6404 if (die->die_parent == NULL
6405 && die->die_sib == NULL)
6406 return;
6407 /* Verify the die_sib list is cyclic. */
6408 dw_die_ref x = die;
6409 do
6410 {
6411 x->die_mark = 1;
6412 x = x->die_sib;
6413 }
6414 while (x && !x->die_mark);
6415 gcc_assert (x == die);
6416 x = die;
6417 do
6418 {
6419 /* Verify all dies have the same parent. */
6420 gcc_assert (x->die_parent == die->die_parent);
6421 if (x->die_child)
6422 {
6423 /* Verify the child has the proper parent and recurse. */
6424 gcc_assert (x->die_child->die_parent == x);
6425 verify_die (x->die_child);
6426 }
6427 x->die_mark = 0;
6428 x = x->die_sib;
6429 }
6430 while (x && x->die_mark);
6431 }
6432
6433 /* Sanity checks on DIEs. */
6434
6435 static void
6436 check_die (dw_die_ref die)
6437 {
6438 unsigned ix;
6439 dw_attr_node *a;
6440 bool inline_found = false;
6441 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6442 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6443 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6444 {
6445 switch (a->dw_attr)
6446 {
6447 case DW_AT_inline:
6448 if (a->dw_attr_val.v.val_unsigned)
6449 inline_found = true;
6450 break;
6451 case DW_AT_location:
6452 ++n_location;
6453 break;
6454 case DW_AT_low_pc:
6455 ++n_low_pc;
6456 break;
6457 case DW_AT_high_pc:
6458 ++n_high_pc;
6459 break;
6460 case DW_AT_artificial:
6461 ++n_artificial;
6462 break;
6463 case DW_AT_decl_column:
6464 ++n_decl_column;
6465 break;
6466 case DW_AT_decl_line:
6467 ++n_decl_line;
6468 break;
6469 case DW_AT_decl_file:
6470 ++n_decl_file;
6471 break;
6472 default:
6473 break;
6474 }
6475 }
6476 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6477 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6478 {
6479 fprintf (stderr, "Duplicate attributes in DIE:\n");
6480 debug_dwarf_die (die);
6481 gcc_unreachable ();
6482 }
6483 if (inline_found)
6484 {
6485 /* A debugging information entry that is a member of an abstract
6486 instance tree [that has DW_AT_inline] should not contain any
6487 attributes which describe aspects of the subroutine which vary
6488 between distinct inlined expansions or distinct out-of-line
6489 expansions. */
6490 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6491 gcc_assert (a->dw_attr != DW_AT_low_pc
6492 && a->dw_attr != DW_AT_high_pc
6493 && a->dw_attr != DW_AT_location
6494 && a->dw_attr != DW_AT_frame_base
6495 && a->dw_attr != DW_AT_call_all_calls
6496 && a->dw_attr != DW_AT_GNU_all_call_sites);
6497 }
6498 }
6499 \f
6500 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6501 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6502 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6503
6504 /* Calculate the checksum of a location expression. */
6505
6506 static inline void
6507 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6508 {
6509 int tem;
6510 inchash::hash hstate;
6511 hashval_t hash;
6512
6513 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6514 CHECKSUM (tem);
6515 hash_loc_operands (loc, hstate);
6516 hash = hstate.end();
6517 CHECKSUM (hash);
6518 }
6519
6520 /* Calculate the checksum of an attribute. */
6521
6522 static void
6523 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6524 {
6525 dw_loc_descr_ref loc;
6526 rtx r;
6527
6528 CHECKSUM (at->dw_attr);
6529
6530 /* We don't care that this was compiled with a different compiler
6531 snapshot; if the output is the same, that's what matters. */
6532 if (at->dw_attr == DW_AT_producer)
6533 return;
6534
6535 switch (AT_class (at))
6536 {
6537 case dw_val_class_const:
6538 case dw_val_class_const_implicit:
6539 CHECKSUM (at->dw_attr_val.v.val_int);
6540 break;
6541 case dw_val_class_unsigned_const:
6542 case dw_val_class_unsigned_const_implicit:
6543 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6544 break;
6545 case dw_val_class_const_double:
6546 CHECKSUM (at->dw_attr_val.v.val_double);
6547 break;
6548 case dw_val_class_wide_int:
6549 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6550 get_full_len (*at->dw_attr_val.v.val_wide)
6551 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6552 break;
6553 case dw_val_class_vec:
6554 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6555 (at->dw_attr_val.v.val_vec.length
6556 * at->dw_attr_val.v.val_vec.elt_size));
6557 break;
6558 case dw_val_class_flag:
6559 CHECKSUM (at->dw_attr_val.v.val_flag);
6560 break;
6561 case dw_val_class_str:
6562 CHECKSUM_STRING (AT_string (at));
6563 break;
6564
6565 case dw_val_class_addr:
6566 r = AT_addr (at);
6567 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6568 CHECKSUM_STRING (XSTR (r, 0));
6569 break;
6570
6571 case dw_val_class_offset:
6572 CHECKSUM (at->dw_attr_val.v.val_offset);
6573 break;
6574
6575 case dw_val_class_loc:
6576 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6577 loc_checksum (loc, ctx);
6578 break;
6579
6580 case dw_val_class_die_ref:
6581 die_checksum (AT_ref (at), ctx, mark);
6582 break;
6583
6584 case dw_val_class_fde_ref:
6585 case dw_val_class_vms_delta:
6586 case dw_val_class_lbl_id:
6587 case dw_val_class_lineptr:
6588 case dw_val_class_macptr:
6589 case dw_val_class_loclistsptr:
6590 case dw_val_class_high_pc:
6591 break;
6592
6593 case dw_val_class_file:
6594 case dw_val_class_file_implicit:
6595 CHECKSUM_STRING (AT_file (at)->filename);
6596 break;
6597
6598 case dw_val_class_data8:
6599 CHECKSUM (at->dw_attr_val.v.val_data8);
6600 break;
6601
6602 default:
6603 break;
6604 }
6605 }
6606
6607 /* Calculate the checksum of a DIE. */
6608
6609 static void
6610 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6611 {
6612 dw_die_ref c;
6613 dw_attr_node *a;
6614 unsigned ix;
6615
6616 /* To avoid infinite recursion. */
6617 if (die->die_mark)
6618 {
6619 CHECKSUM (die->die_mark);
6620 return;
6621 }
6622 die->die_mark = ++(*mark);
6623
6624 CHECKSUM (die->die_tag);
6625
6626 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6627 attr_checksum (a, ctx, mark);
6628
6629 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6630 }
6631
6632 #undef CHECKSUM
6633 #undef CHECKSUM_BLOCK
6634 #undef CHECKSUM_STRING
6635
6636 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6637 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6638 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6639 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6640 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6641 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6642 #define CHECKSUM_ATTR(FOO) \
6643 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6644
6645 /* Calculate the checksum of a number in signed LEB128 format. */
6646
6647 static void
6648 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6649 {
6650 unsigned char byte;
6651 bool more;
6652
6653 while (1)
6654 {
6655 byte = (value & 0x7f);
6656 value >>= 7;
6657 more = !((value == 0 && (byte & 0x40) == 0)
6658 || (value == -1 && (byte & 0x40) != 0));
6659 if (more)
6660 byte |= 0x80;
6661 CHECKSUM (byte);
6662 if (!more)
6663 break;
6664 }
6665 }
6666
6667 /* Calculate the checksum of a number in unsigned LEB128 format. */
6668
6669 static void
6670 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6671 {
6672 while (1)
6673 {
6674 unsigned char byte = (value & 0x7f);
6675 value >>= 7;
6676 if (value != 0)
6677 /* More bytes to follow. */
6678 byte |= 0x80;
6679 CHECKSUM (byte);
6680 if (value == 0)
6681 break;
6682 }
6683 }
6684
6685 /* Checksum the context of the DIE. This adds the names of any
6686 surrounding namespaces or structures to the checksum. */
6687
6688 static void
6689 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
6690 {
6691 const char *name;
6692 dw_die_ref spec;
6693 int tag = die->die_tag;
6694
6695 if (tag != DW_TAG_namespace
6696 && tag != DW_TAG_structure_type
6697 && tag != DW_TAG_class_type)
6698 return;
6699
6700 name = get_AT_string (die, DW_AT_name);
6701
6702 spec = get_AT_ref (die, DW_AT_specification);
6703 if (spec != NULL)
6704 die = spec;
6705
6706 if (die->die_parent != NULL)
6707 checksum_die_context (die->die_parent, ctx);
6708
6709 CHECKSUM_ULEB128 ('C');
6710 CHECKSUM_ULEB128 (tag);
6711 if (name != NULL)
6712 CHECKSUM_STRING (name);
6713 }
6714
6715 /* Calculate the checksum of a location expression. */
6716
6717 static inline void
6718 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6719 {
6720 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
6721 were emitted as a DW_FORM_sdata instead of a location expression. */
6722 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
6723 {
6724 CHECKSUM_ULEB128 (DW_FORM_sdata);
6725 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
6726 return;
6727 }
6728
6729 /* Otherwise, just checksum the raw location expression. */
6730 while (loc != NULL)
6731 {
6732 inchash::hash hstate;
6733 hashval_t hash;
6734
6735 CHECKSUM_ULEB128 (loc->dtprel);
6736 CHECKSUM_ULEB128 (loc->dw_loc_opc);
6737 hash_loc_operands (loc, hstate);
6738 hash = hstate.end ();
6739 CHECKSUM (hash);
6740 loc = loc->dw_loc_next;
6741 }
6742 }
6743
6744 /* Calculate the checksum of an attribute. */
6745
6746 static void
6747 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
6748 struct md5_ctx *ctx, int *mark)
6749 {
6750 dw_loc_descr_ref loc;
6751 rtx r;
6752
6753 if (AT_class (at) == dw_val_class_die_ref)
6754 {
6755 dw_die_ref target_die = AT_ref (at);
6756
6757 /* For pointer and reference types, we checksum only the (qualified)
6758 name of the target type (if there is a name). For friend entries,
6759 we checksum only the (qualified) name of the target type or function.
6760 This allows the checksum to remain the same whether the target type
6761 is complete or not. */
6762 if ((at->dw_attr == DW_AT_type
6763 && (tag == DW_TAG_pointer_type
6764 || tag == DW_TAG_reference_type
6765 || tag == DW_TAG_rvalue_reference_type
6766 || tag == DW_TAG_ptr_to_member_type))
6767 || (at->dw_attr == DW_AT_friend
6768 && tag == DW_TAG_friend))
6769 {
6770 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
6771
6772 if (name_attr != NULL)
6773 {
6774 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
6775
6776 if (decl == NULL)
6777 decl = target_die;
6778 CHECKSUM_ULEB128 ('N');
6779 CHECKSUM_ULEB128 (at->dw_attr);
6780 if (decl->die_parent != NULL)
6781 checksum_die_context (decl->die_parent, ctx);
6782 CHECKSUM_ULEB128 ('E');
6783 CHECKSUM_STRING (AT_string (name_attr));
6784 return;
6785 }
6786 }
6787
6788 /* For all other references to another DIE, we check to see if the
6789 target DIE has already been visited. If it has, we emit a
6790 backward reference; if not, we descend recursively. */
6791 if (target_die->die_mark > 0)
6792 {
6793 CHECKSUM_ULEB128 ('R');
6794 CHECKSUM_ULEB128 (at->dw_attr);
6795 CHECKSUM_ULEB128 (target_die->die_mark);
6796 }
6797 else
6798 {
6799 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
6800
6801 if (decl == NULL)
6802 decl = target_die;
6803 target_die->die_mark = ++(*mark);
6804 CHECKSUM_ULEB128 ('T');
6805 CHECKSUM_ULEB128 (at->dw_attr);
6806 if (decl->die_parent != NULL)
6807 checksum_die_context (decl->die_parent, ctx);
6808 die_checksum_ordered (target_die, ctx, mark);
6809 }
6810 return;
6811 }
6812
6813 CHECKSUM_ULEB128 ('A');
6814 CHECKSUM_ULEB128 (at->dw_attr);
6815
6816 switch (AT_class (at))
6817 {
6818 case dw_val_class_const:
6819 case dw_val_class_const_implicit:
6820 CHECKSUM_ULEB128 (DW_FORM_sdata);
6821 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
6822 break;
6823
6824 case dw_val_class_unsigned_const:
6825 case dw_val_class_unsigned_const_implicit:
6826 CHECKSUM_ULEB128 (DW_FORM_sdata);
6827 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
6828 break;
6829
6830 case dw_val_class_const_double:
6831 CHECKSUM_ULEB128 (DW_FORM_block);
6832 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
6833 CHECKSUM (at->dw_attr_val.v.val_double);
6834 break;
6835
6836 case dw_val_class_wide_int:
6837 CHECKSUM_ULEB128 (DW_FORM_block);
6838 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
6839 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
6840 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6841 get_full_len (*at->dw_attr_val.v.val_wide)
6842 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6843 break;
6844
6845 case dw_val_class_vec:
6846 CHECKSUM_ULEB128 (DW_FORM_block);
6847 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
6848 * at->dw_attr_val.v.val_vec.elt_size);
6849 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6850 (at->dw_attr_val.v.val_vec.length
6851 * at->dw_attr_val.v.val_vec.elt_size));
6852 break;
6853
6854 case dw_val_class_flag:
6855 CHECKSUM_ULEB128 (DW_FORM_flag);
6856 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
6857 break;
6858
6859 case dw_val_class_str:
6860 CHECKSUM_ULEB128 (DW_FORM_string);
6861 CHECKSUM_STRING (AT_string (at));
6862 break;
6863
6864 case dw_val_class_addr:
6865 r = AT_addr (at);
6866 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6867 CHECKSUM_ULEB128 (DW_FORM_string);
6868 CHECKSUM_STRING (XSTR (r, 0));
6869 break;
6870
6871 case dw_val_class_offset:
6872 CHECKSUM_ULEB128 (DW_FORM_sdata);
6873 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
6874 break;
6875
6876 case dw_val_class_loc:
6877 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6878 loc_checksum_ordered (loc, ctx);
6879 break;
6880
6881 case dw_val_class_fde_ref:
6882 case dw_val_class_lbl_id:
6883 case dw_val_class_lineptr:
6884 case dw_val_class_macptr:
6885 case dw_val_class_loclistsptr:
6886 case dw_val_class_high_pc:
6887 break;
6888
6889 case dw_val_class_file:
6890 case dw_val_class_file_implicit:
6891 CHECKSUM_ULEB128 (DW_FORM_string);
6892 CHECKSUM_STRING (AT_file (at)->filename);
6893 break;
6894
6895 case dw_val_class_data8:
6896 CHECKSUM (at->dw_attr_val.v.val_data8);
6897 break;
6898
6899 default:
6900 break;
6901 }
6902 }
6903
6904 struct checksum_attributes
6905 {
6906 dw_attr_node *at_name;
6907 dw_attr_node *at_type;
6908 dw_attr_node *at_friend;
6909 dw_attr_node *at_accessibility;
6910 dw_attr_node *at_address_class;
6911 dw_attr_node *at_alignment;
6912 dw_attr_node *at_allocated;
6913 dw_attr_node *at_artificial;
6914 dw_attr_node *at_associated;
6915 dw_attr_node *at_binary_scale;
6916 dw_attr_node *at_bit_offset;
6917 dw_attr_node *at_bit_size;
6918 dw_attr_node *at_bit_stride;
6919 dw_attr_node *at_byte_size;
6920 dw_attr_node *at_byte_stride;
6921 dw_attr_node *at_const_value;
6922 dw_attr_node *at_containing_type;
6923 dw_attr_node *at_count;
6924 dw_attr_node *at_data_location;
6925 dw_attr_node *at_data_member_location;
6926 dw_attr_node *at_decimal_scale;
6927 dw_attr_node *at_decimal_sign;
6928 dw_attr_node *at_default_value;
6929 dw_attr_node *at_digit_count;
6930 dw_attr_node *at_discr;
6931 dw_attr_node *at_discr_list;
6932 dw_attr_node *at_discr_value;
6933 dw_attr_node *at_encoding;
6934 dw_attr_node *at_endianity;
6935 dw_attr_node *at_explicit;
6936 dw_attr_node *at_is_optional;
6937 dw_attr_node *at_location;
6938 dw_attr_node *at_lower_bound;
6939 dw_attr_node *at_mutable;
6940 dw_attr_node *at_ordering;
6941 dw_attr_node *at_picture_string;
6942 dw_attr_node *at_prototyped;
6943 dw_attr_node *at_small;
6944 dw_attr_node *at_segment;
6945 dw_attr_node *at_string_length;
6946 dw_attr_node *at_string_length_bit_size;
6947 dw_attr_node *at_string_length_byte_size;
6948 dw_attr_node *at_threads_scaled;
6949 dw_attr_node *at_upper_bound;
6950 dw_attr_node *at_use_location;
6951 dw_attr_node *at_use_UTF8;
6952 dw_attr_node *at_variable_parameter;
6953 dw_attr_node *at_virtuality;
6954 dw_attr_node *at_visibility;
6955 dw_attr_node *at_vtable_elem_location;
6956 };
6957
6958 /* Collect the attributes that we will want to use for the checksum. */
6959
6960 static void
6961 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
6962 {
6963 dw_attr_node *a;
6964 unsigned ix;
6965
6966 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6967 {
6968 switch (a->dw_attr)
6969 {
6970 case DW_AT_name:
6971 attrs->at_name = a;
6972 break;
6973 case DW_AT_type:
6974 attrs->at_type = a;
6975 break;
6976 case DW_AT_friend:
6977 attrs->at_friend = a;
6978 break;
6979 case DW_AT_accessibility:
6980 attrs->at_accessibility = a;
6981 break;
6982 case DW_AT_address_class:
6983 attrs->at_address_class = a;
6984 break;
6985 case DW_AT_alignment:
6986 attrs->at_alignment = a;
6987 break;
6988 case DW_AT_allocated:
6989 attrs->at_allocated = a;
6990 break;
6991 case DW_AT_artificial:
6992 attrs->at_artificial = a;
6993 break;
6994 case DW_AT_associated:
6995 attrs->at_associated = a;
6996 break;
6997 case DW_AT_binary_scale:
6998 attrs->at_binary_scale = a;
6999 break;
7000 case DW_AT_bit_offset:
7001 attrs->at_bit_offset = a;
7002 break;
7003 case DW_AT_bit_size:
7004 attrs->at_bit_size = a;
7005 break;
7006 case DW_AT_bit_stride:
7007 attrs->at_bit_stride = a;
7008 break;
7009 case DW_AT_byte_size:
7010 attrs->at_byte_size = a;
7011 break;
7012 case DW_AT_byte_stride:
7013 attrs->at_byte_stride = a;
7014 break;
7015 case DW_AT_const_value:
7016 attrs->at_const_value = a;
7017 break;
7018 case DW_AT_containing_type:
7019 attrs->at_containing_type = a;
7020 break;
7021 case DW_AT_count:
7022 attrs->at_count = a;
7023 break;
7024 case DW_AT_data_location:
7025 attrs->at_data_location = a;
7026 break;
7027 case DW_AT_data_member_location:
7028 attrs->at_data_member_location = a;
7029 break;
7030 case DW_AT_decimal_scale:
7031 attrs->at_decimal_scale = a;
7032 break;
7033 case DW_AT_decimal_sign:
7034 attrs->at_decimal_sign = a;
7035 break;
7036 case DW_AT_default_value:
7037 attrs->at_default_value = a;
7038 break;
7039 case DW_AT_digit_count:
7040 attrs->at_digit_count = a;
7041 break;
7042 case DW_AT_discr:
7043 attrs->at_discr = a;
7044 break;
7045 case DW_AT_discr_list:
7046 attrs->at_discr_list = a;
7047 break;
7048 case DW_AT_discr_value:
7049 attrs->at_discr_value = a;
7050 break;
7051 case DW_AT_encoding:
7052 attrs->at_encoding = a;
7053 break;
7054 case DW_AT_endianity:
7055 attrs->at_endianity = a;
7056 break;
7057 case DW_AT_explicit:
7058 attrs->at_explicit = a;
7059 break;
7060 case DW_AT_is_optional:
7061 attrs->at_is_optional = a;
7062 break;
7063 case DW_AT_location:
7064 attrs->at_location = a;
7065 break;
7066 case DW_AT_lower_bound:
7067 attrs->at_lower_bound = a;
7068 break;
7069 case DW_AT_mutable:
7070 attrs->at_mutable = a;
7071 break;
7072 case DW_AT_ordering:
7073 attrs->at_ordering = a;
7074 break;
7075 case DW_AT_picture_string:
7076 attrs->at_picture_string = a;
7077 break;
7078 case DW_AT_prototyped:
7079 attrs->at_prototyped = a;
7080 break;
7081 case DW_AT_small:
7082 attrs->at_small = a;
7083 break;
7084 case DW_AT_segment:
7085 attrs->at_segment = a;
7086 break;
7087 case DW_AT_string_length:
7088 attrs->at_string_length = a;
7089 break;
7090 case DW_AT_string_length_bit_size:
7091 attrs->at_string_length_bit_size = a;
7092 break;
7093 case DW_AT_string_length_byte_size:
7094 attrs->at_string_length_byte_size = a;
7095 break;
7096 case DW_AT_threads_scaled:
7097 attrs->at_threads_scaled = a;
7098 break;
7099 case DW_AT_upper_bound:
7100 attrs->at_upper_bound = a;
7101 break;
7102 case DW_AT_use_location:
7103 attrs->at_use_location = a;
7104 break;
7105 case DW_AT_use_UTF8:
7106 attrs->at_use_UTF8 = a;
7107 break;
7108 case DW_AT_variable_parameter:
7109 attrs->at_variable_parameter = a;
7110 break;
7111 case DW_AT_virtuality:
7112 attrs->at_virtuality = a;
7113 break;
7114 case DW_AT_visibility:
7115 attrs->at_visibility = a;
7116 break;
7117 case DW_AT_vtable_elem_location:
7118 attrs->at_vtable_elem_location = a;
7119 break;
7120 default:
7121 break;
7122 }
7123 }
7124 }
7125
7126 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7127
7128 static void
7129 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7130 {
7131 dw_die_ref c;
7132 dw_die_ref decl;
7133 struct checksum_attributes attrs;
7134
7135 CHECKSUM_ULEB128 ('D');
7136 CHECKSUM_ULEB128 (die->die_tag);
7137
7138 memset (&attrs, 0, sizeof (attrs));
7139
7140 decl = get_AT_ref (die, DW_AT_specification);
7141 if (decl != NULL)
7142 collect_checksum_attributes (&attrs, decl);
7143 collect_checksum_attributes (&attrs, die);
7144
7145 CHECKSUM_ATTR (attrs.at_name);
7146 CHECKSUM_ATTR (attrs.at_accessibility);
7147 CHECKSUM_ATTR (attrs.at_address_class);
7148 CHECKSUM_ATTR (attrs.at_allocated);
7149 CHECKSUM_ATTR (attrs.at_artificial);
7150 CHECKSUM_ATTR (attrs.at_associated);
7151 CHECKSUM_ATTR (attrs.at_binary_scale);
7152 CHECKSUM_ATTR (attrs.at_bit_offset);
7153 CHECKSUM_ATTR (attrs.at_bit_size);
7154 CHECKSUM_ATTR (attrs.at_bit_stride);
7155 CHECKSUM_ATTR (attrs.at_byte_size);
7156 CHECKSUM_ATTR (attrs.at_byte_stride);
7157 CHECKSUM_ATTR (attrs.at_const_value);
7158 CHECKSUM_ATTR (attrs.at_containing_type);
7159 CHECKSUM_ATTR (attrs.at_count);
7160 CHECKSUM_ATTR (attrs.at_data_location);
7161 CHECKSUM_ATTR (attrs.at_data_member_location);
7162 CHECKSUM_ATTR (attrs.at_decimal_scale);
7163 CHECKSUM_ATTR (attrs.at_decimal_sign);
7164 CHECKSUM_ATTR (attrs.at_default_value);
7165 CHECKSUM_ATTR (attrs.at_digit_count);
7166 CHECKSUM_ATTR (attrs.at_discr);
7167 CHECKSUM_ATTR (attrs.at_discr_list);
7168 CHECKSUM_ATTR (attrs.at_discr_value);
7169 CHECKSUM_ATTR (attrs.at_encoding);
7170 CHECKSUM_ATTR (attrs.at_endianity);
7171 CHECKSUM_ATTR (attrs.at_explicit);
7172 CHECKSUM_ATTR (attrs.at_is_optional);
7173 CHECKSUM_ATTR (attrs.at_location);
7174 CHECKSUM_ATTR (attrs.at_lower_bound);
7175 CHECKSUM_ATTR (attrs.at_mutable);
7176 CHECKSUM_ATTR (attrs.at_ordering);
7177 CHECKSUM_ATTR (attrs.at_picture_string);
7178 CHECKSUM_ATTR (attrs.at_prototyped);
7179 CHECKSUM_ATTR (attrs.at_small);
7180 CHECKSUM_ATTR (attrs.at_segment);
7181 CHECKSUM_ATTR (attrs.at_string_length);
7182 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7183 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7184 CHECKSUM_ATTR (attrs.at_threads_scaled);
7185 CHECKSUM_ATTR (attrs.at_upper_bound);
7186 CHECKSUM_ATTR (attrs.at_use_location);
7187 CHECKSUM_ATTR (attrs.at_use_UTF8);
7188 CHECKSUM_ATTR (attrs.at_variable_parameter);
7189 CHECKSUM_ATTR (attrs.at_virtuality);
7190 CHECKSUM_ATTR (attrs.at_visibility);
7191 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7192 CHECKSUM_ATTR (attrs.at_type);
7193 CHECKSUM_ATTR (attrs.at_friend);
7194 CHECKSUM_ATTR (attrs.at_alignment);
7195
7196 /* Checksum the child DIEs. */
7197 c = die->die_child;
7198 if (c) do {
7199 dw_attr_node *name_attr;
7200
7201 c = c->die_sib;
7202 name_attr = get_AT (c, DW_AT_name);
7203 if (is_template_instantiation (c))
7204 {
7205 /* Ignore instantiations of member type and function templates. */
7206 }
7207 else if (name_attr != NULL
7208 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7209 {
7210 /* Use a shallow checksum for named nested types and member
7211 functions. */
7212 CHECKSUM_ULEB128 ('S');
7213 CHECKSUM_ULEB128 (c->die_tag);
7214 CHECKSUM_STRING (AT_string (name_attr));
7215 }
7216 else
7217 {
7218 /* Use a deep checksum for other children. */
7219 /* Mark this DIE so it gets processed when unmarking. */
7220 if (c->die_mark == 0)
7221 c->die_mark = -1;
7222 die_checksum_ordered (c, ctx, mark);
7223 }
7224 } while (c != die->die_child);
7225
7226 CHECKSUM_ULEB128 (0);
7227 }
7228
7229 /* Add a type name and tag to a hash. */
7230 static void
7231 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7232 {
7233 CHECKSUM_ULEB128 (tag);
7234 CHECKSUM_STRING (name);
7235 }
7236
7237 #undef CHECKSUM
7238 #undef CHECKSUM_STRING
7239 #undef CHECKSUM_ATTR
7240 #undef CHECKSUM_LEB128
7241 #undef CHECKSUM_ULEB128
7242
7243 /* Generate the type signature for DIE. This is computed by generating an
7244 MD5 checksum over the DIE's tag, its relevant attributes, and its
7245 children. Attributes that are references to other DIEs are processed
7246 by recursion, using the MARK field to prevent infinite recursion.
7247 If the DIE is nested inside a namespace or another type, we also
7248 need to include that context in the signature. The lower 64 bits
7249 of the resulting MD5 checksum comprise the signature. */
7250
7251 static void
7252 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7253 {
7254 int mark;
7255 const char *name;
7256 unsigned char checksum[16];
7257 struct md5_ctx ctx;
7258 dw_die_ref decl;
7259 dw_die_ref parent;
7260
7261 name = get_AT_string (die, DW_AT_name);
7262 decl = get_AT_ref (die, DW_AT_specification);
7263 parent = get_die_parent (die);
7264
7265 /* First, compute a signature for just the type name (and its surrounding
7266 context, if any. This is stored in the type unit DIE for link-time
7267 ODR (one-definition rule) checking. */
7268
7269 if (is_cxx () && name != NULL)
7270 {
7271 md5_init_ctx (&ctx);
7272
7273 /* Checksum the names of surrounding namespaces and structures. */
7274 if (parent != NULL)
7275 checksum_die_context (parent, &ctx);
7276
7277 /* Checksum the current DIE. */
7278 die_odr_checksum (die->die_tag, name, &ctx);
7279 md5_finish_ctx (&ctx, checksum);
7280
7281 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7282 }
7283
7284 /* Next, compute the complete type signature. */
7285
7286 md5_init_ctx (&ctx);
7287 mark = 1;
7288 die->die_mark = mark;
7289
7290 /* Checksum the names of surrounding namespaces and structures. */
7291 if (parent != NULL)
7292 checksum_die_context (parent, &ctx);
7293
7294 /* Checksum the DIE and its children. */
7295 die_checksum_ordered (die, &ctx, &mark);
7296 unmark_all_dies (die);
7297 md5_finish_ctx (&ctx, checksum);
7298
7299 /* Store the signature in the type node and link the type DIE and the
7300 type node together. */
7301 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7302 DWARF_TYPE_SIGNATURE_SIZE);
7303 die->comdat_type_p = true;
7304 die->die_id.die_type_node = type_node;
7305 type_node->type_die = die;
7306
7307 /* If the DIE is a specification, link its declaration to the type node
7308 as well. */
7309 if (decl != NULL)
7310 {
7311 decl->comdat_type_p = true;
7312 decl->die_id.die_type_node = type_node;
7313 }
7314 }
7315
7316 /* Do the location expressions look same? */
7317 static inline int
7318 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7319 {
7320 return loc1->dw_loc_opc == loc2->dw_loc_opc
7321 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7322 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7323 }
7324
7325 /* Do the values look the same? */
7326 static int
7327 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7328 {
7329 dw_loc_descr_ref loc1, loc2;
7330 rtx r1, r2;
7331
7332 if (v1->val_class != v2->val_class)
7333 return 0;
7334
7335 switch (v1->val_class)
7336 {
7337 case dw_val_class_const:
7338 case dw_val_class_const_implicit:
7339 return v1->v.val_int == v2->v.val_int;
7340 case dw_val_class_unsigned_const:
7341 case dw_val_class_unsigned_const_implicit:
7342 return v1->v.val_unsigned == v2->v.val_unsigned;
7343 case dw_val_class_const_double:
7344 return v1->v.val_double.high == v2->v.val_double.high
7345 && v1->v.val_double.low == v2->v.val_double.low;
7346 case dw_val_class_wide_int:
7347 return *v1->v.val_wide == *v2->v.val_wide;
7348 case dw_val_class_vec:
7349 if (v1->v.val_vec.length != v2->v.val_vec.length
7350 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7351 return 0;
7352 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7353 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7354 return 0;
7355 return 1;
7356 case dw_val_class_flag:
7357 return v1->v.val_flag == v2->v.val_flag;
7358 case dw_val_class_str:
7359 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7360
7361 case dw_val_class_addr:
7362 r1 = v1->v.val_addr;
7363 r2 = v2->v.val_addr;
7364 if (GET_CODE (r1) != GET_CODE (r2))
7365 return 0;
7366 return !rtx_equal_p (r1, r2);
7367
7368 case dw_val_class_offset:
7369 return v1->v.val_offset == v2->v.val_offset;
7370
7371 case dw_val_class_loc:
7372 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7373 loc1 && loc2;
7374 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7375 if (!same_loc_p (loc1, loc2, mark))
7376 return 0;
7377 return !loc1 && !loc2;
7378
7379 case dw_val_class_die_ref:
7380 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7381
7382 case dw_val_class_fde_ref:
7383 case dw_val_class_vms_delta:
7384 case dw_val_class_lbl_id:
7385 case dw_val_class_lineptr:
7386 case dw_val_class_macptr:
7387 case dw_val_class_loclistsptr:
7388 case dw_val_class_high_pc:
7389 return 1;
7390
7391 case dw_val_class_file:
7392 case dw_val_class_file_implicit:
7393 return v1->v.val_file == v2->v.val_file;
7394
7395 case dw_val_class_data8:
7396 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7397
7398 default:
7399 return 1;
7400 }
7401 }
7402
7403 /* Do the attributes look the same? */
7404
7405 static int
7406 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7407 {
7408 if (at1->dw_attr != at2->dw_attr)
7409 return 0;
7410
7411 /* We don't care that this was compiled with a different compiler
7412 snapshot; if the output is the same, that's what matters. */
7413 if (at1->dw_attr == DW_AT_producer)
7414 return 1;
7415
7416 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7417 }
7418
7419 /* Do the dies look the same? */
7420
7421 static int
7422 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7423 {
7424 dw_die_ref c1, c2;
7425 dw_attr_node *a1;
7426 unsigned ix;
7427
7428 /* To avoid infinite recursion. */
7429 if (die1->die_mark)
7430 return die1->die_mark == die2->die_mark;
7431 die1->die_mark = die2->die_mark = ++(*mark);
7432
7433 if (die1->die_tag != die2->die_tag)
7434 return 0;
7435
7436 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7437 return 0;
7438
7439 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7440 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7441 return 0;
7442
7443 c1 = die1->die_child;
7444 c2 = die2->die_child;
7445 if (! c1)
7446 {
7447 if (c2)
7448 return 0;
7449 }
7450 else
7451 for (;;)
7452 {
7453 if (!same_die_p (c1, c2, mark))
7454 return 0;
7455 c1 = c1->die_sib;
7456 c2 = c2->die_sib;
7457 if (c1 == die1->die_child)
7458 {
7459 if (c2 == die2->die_child)
7460 break;
7461 else
7462 return 0;
7463 }
7464 }
7465
7466 return 1;
7467 }
7468
7469 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7470 children, and set die_symbol. */
7471
7472 static void
7473 compute_comp_unit_symbol (dw_die_ref unit_die)
7474 {
7475 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7476 const char *base = die_name ? lbasename (die_name) : "anonymous";
7477 char *name = XALLOCAVEC (char, strlen (base) + 64);
7478 char *p;
7479 int i, mark;
7480 unsigned char checksum[16];
7481 struct md5_ctx ctx;
7482
7483 /* Compute the checksum of the DIE, then append part of it as hex digits to
7484 the name filename of the unit. */
7485
7486 md5_init_ctx (&ctx);
7487 mark = 0;
7488 die_checksum (unit_die, &ctx, &mark);
7489 unmark_all_dies (unit_die);
7490 md5_finish_ctx (&ctx, checksum);
7491
7492 /* When we this for comp_unit_die () we have a DW_AT_name that might
7493 not start with a letter but with anything valid for filenames and
7494 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7495 character is not a letter. */
7496 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7497 clean_symbol_name (name);
7498
7499 p = name + strlen (name);
7500 for (i = 0; i < 4; i++)
7501 {
7502 sprintf (p, "%.2x", checksum[i]);
7503 p += 2;
7504 }
7505
7506 unit_die->die_id.die_symbol = xstrdup (name);
7507 }
7508
7509 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7510
7511 static int
7512 is_type_die (dw_die_ref die)
7513 {
7514 switch (die->die_tag)
7515 {
7516 case DW_TAG_array_type:
7517 case DW_TAG_class_type:
7518 case DW_TAG_interface_type:
7519 case DW_TAG_enumeration_type:
7520 case DW_TAG_pointer_type:
7521 case DW_TAG_reference_type:
7522 case DW_TAG_rvalue_reference_type:
7523 case DW_TAG_string_type:
7524 case DW_TAG_structure_type:
7525 case DW_TAG_subroutine_type:
7526 case DW_TAG_union_type:
7527 case DW_TAG_ptr_to_member_type:
7528 case DW_TAG_set_type:
7529 case DW_TAG_subrange_type:
7530 case DW_TAG_base_type:
7531 case DW_TAG_const_type:
7532 case DW_TAG_file_type:
7533 case DW_TAG_packed_type:
7534 case DW_TAG_volatile_type:
7535 case DW_TAG_typedef:
7536 return 1;
7537 default:
7538 return 0;
7539 }
7540 }
7541
7542 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7543 Basically, we want to choose the bits that are likely to be shared between
7544 compilations (types) and leave out the bits that are specific to individual
7545 compilations (functions). */
7546
7547 static int
7548 is_comdat_die (dw_die_ref c)
7549 {
7550 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7551 we do for stabs. The advantage is a greater likelihood of sharing between
7552 objects that don't include headers in the same order (and therefore would
7553 put the base types in a different comdat). jason 8/28/00 */
7554
7555 if (c->die_tag == DW_TAG_base_type)
7556 return 0;
7557
7558 if (c->die_tag == DW_TAG_pointer_type
7559 || c->die_tag == DW_TAG_reference_type
7560 || c->die_tag == DW_TAG_rvalue_reference_type
7561 || c->die_tag == DW_TAG_const_type
7562 || c->die_tag == DW_TAG_volatile_type)
7563 {
7564 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7565
7566 return t ? is_comdat_die (t) : 0;
7567 }
7568
7569 return is_type_die (c);
7570 }
7571
7572 /* Returns true iff C is a compile-unit DIE. */
7573
7574 static inline bool
7575 is_cu_die (dw_die_ref c)
7576 {
7577 return c && (c->die_tag == DW_TAG_compile_unit
7578 || c->die_tag == DW_TAG_skeleton_unit);
7579 }
7580
7581 /* Returns true iff C is a unit DIE of some sort. */
7582
7583 static inline bool
7584 is_unit_die (dw_die_ref c)
7585 {
7586 return c && (c->die_tag == DW_TAG_compile_unit
7587 || c->die_tag == DW_TAG_partial_unit
7588 || c->die_tag == DW_TAG_type_unit
7589 || c->die_tag == DW_TAG_skeleton_unit);
7590 }
7591
7592 /* Returns true iff C is a namespace DIE. */
7593
7594 static inline bool
7595 is_namespace_die (dw_die_ref c)
7596 {
7597 return c && c->die_tag == DW_TAG_namespace;
7598 }
7599
7600 /* Returns true iff C is a class or structure DIE. */
7601
7602 static inline bool
7603 is_class_die (dw_die_ref c)
7604 {
7605 return c && (c->die_tag == DW_TAG_class_type
7606 || c->die_tag == DW_TAG_structure_type);
7607 }
7608
7609 /* Return non-zero if this DIE is a template parameter. */
7610
7611 static inline bool
7612 is_template_parameter (dw_die_ref die)
7613 {
7614 switch (die->die_tag)
7615 {
7616 case DW_TAG_template_type_param:
7617 case DW_TAG_template_value_param:
7618 case DW_TAG_GNU_template_template_param:
7619 case DW_TAG_GNU_template_parameter_pack:
7620 return true;
7621 default:
7622 return false;
7623 }
7624 }
7625
7626 /* Return non-zero if this DIE represents a template instantiation. */
7627
7628 static inline bool
7629 is_template_instantiation (dw_die_ref die)
7630 {
7631 dw_die_ref c;
7632
7633 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7634 return false;
7635 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7636 return false;
7637 }
7638
7639 static char *
7640 gen_internal_sym (const char *prefix)
7641 {
7642 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7643
7644 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7645 return xstrdup (buf);
7646 }
7647
7648 /* Return non-zero if this DIE is a declaration. */
7649
7650 static int
7651 is_declaration_die (dw_die_ref die)
7652 {
7653 dw_attr_node *a;
7654 unsigned ix;
7655
7656 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7657 if (a->dw_attr == DW_AT_declaration)
7658 return 1;
7659
7660 return 0;
7661 }
7662
7663 /* Return non-zero if this DIE is nested inside a subprogram. */
7664
7665 static int
7666 is_nested_in_subprogram (dw_die_ref die)
7667 {
7668 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7669
7670 if (decl == NULL)
7671 decl = die;
7672 return local_scope_p (decl);
7673 }
7674
7675 /* Return non-zero if this DIE contains a defining declaration of a
7676 subprogram. */
7677
7678 static int
7679 contains_subprogram_definition (dw_die_ref die)
7680 {
7681 dw_die_ref c;
7682
7683 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7684 return 1;
7685 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7686 return 0;
7687 }
7688
7689 /* Return non-zero if this is a type DIE that should be moved to a
7690 COMDAT .debug_types section or .debug_info section with DW_UT_*type
7691 unit type. */
7692
7693 static int
7694 should_move_die_to_comdat (dw_die_ref die)
7695 {
7696 switch (die->die_tag)
7697 {
7698 case DW_TAG_class_type:
7699 case DW_TAG_structure_type:
7700 case DW_TAG_enumeration_type:
7701 case DW_TAG_union_type:
7702 /* Don't move declarations, inlined instances, types nested in a
7703 subprogram, or types that contain subprogram definitions. */
7704 if (is_declaration_die (die)
7705 || get_AT (die, DW_AT_abstract_origin)
7706 || is_nested_in_subprogram (die)
7707 || contains_subprogram_definition (die))
7708 return 0;
7709 return 1;
7710 case DW_TAG_array_type:
7711 case DW_TAG_interface_type:
7712 case DW_TAG_pointer_type:
7713 case DW_TAG_reference_type:
7714 case DW_TAG_rvalue_reference_type:
7715 case DW_TAG_string_type:
7716 case DW_TAG_subroutine_type:
7717 case DW_TAG_ptr_to_member_type:
7718 case DW_TAG_set_type:
7719 case DW_TAG_subrange_type:
7720 case DW_TAG_base_type:
7721 case DW_TAG_const_type:
7722 case DW_TAG_file_type:
7723 case DW_TAG_packed_type:
7724 case DW_TAG_volatile_type:
7725 case DW_TAG_typedef:
7726 default:
7727 return 0;
7728 }
7729 }
7730
7731 /* Make a clone of DIE. */
7732
7733 static dw_die_ref
7734 clone_die (dw_die_ref die)
7735 {
7736 dw_die_ref clone = new_die_raw (die->die_tag);
7737 dw_attr_node *a;
7738 unsigned ix;
7739
7740 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7741 add_dwarf_attr (clone, a);
7742
7743 return clone;
7744 }
7745
7746 /* Make a clone of the tree rooted at DIE. */
7747
7748 static dw_die_ref
7749 clone_tree (dw_die_ref die)
7750 {
7751 dw_die_ref c;
7752 dw_die_ref clone = clone_die (die);
7753
7754 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
7755
7756 return clone;
7757 }
7758
7759 /* Make a clone of DIE as a declaration. */
7760
7761 static dw_die_ref
7762 clone_as_declaration (dw_die_ref die)
7763 {
7764 dw_die_ref clone;
7765 dw_die_ref decl;
7766 dw_attr_node *a;
7767 unsigned ix;
7768
7769 /* If the DIE is already a declaration, just clone it. */
7770 if (is_declaration_die (die))
7771 return clone_die (die);
7772
7773 /* If the DIE is a specification, just clone its declaration DIE. */
7774 decl = get_AT_ref (die, DW_AT_specification);
7775 if (decl != NULL)
7776 {
7777 clone = clone_die (decl);
7778 if (die->comdat_type_p)
7779 add_AT_die_ref (clone, DW_AT_signature, die);
7780 return clone;
7781 }
7782
7783 clone = new_die_raw (die->die_tag);
7784
7785 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7786 {
7787 /* We don't want to copy over all attributes.
7788 For example we don't want DW_AT_byte_size because otherwise we will no
7789 longer have a declaration and GDB will treat it as a definition. */
7790
7791 switch (a->dw_attr)
7792 {
7793 case DW_AT_abstract_origin:
7794 case DW_AT_artificial:
7795 case DW_AT_containing_type:
7796 case DW_AT_external:
7797 case DW_AT_name:
7798 case DW_AT_type:
7799 case DW_AT_virtuality:
7800 case DW_AT_linkage_name:
7801 case DW_AT_MIPS_linkage_name:
7802 add_dwarf_attr (clone, a);
7803 break;
7804 case DW_AT_byte_size:
7805 case DW_AT_alignment:
7806 default:
7807 break;
7808 }
7809 }
7810
7811 if (die->comdat_type_p)
7812 add_AT_die_ref (clone, DW_AT_signature, die);
7813
7814 add_AT_flag (clone, DW_AT_declaration, 1);
7815 return clone;
7816 }
7817
7818
7819 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
7820
7821 struct decl_table_entry
7822 {
7823 dw_die_ref orig;
7824 dw_die_ref copy;
7825 };
7826
7827 /* Helpers to manipulate hash table of copied declarations. */
7828
7829 /* Hashtable helpers. */
7830
7831 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
7832 {
7833 typedef die_struct *compare_type;
7834 static inline hashval_t hash (const decl_table_entry *);
7835 static inline bool equal (const decl_table_entry *, const die_struct *);
7836 };
7837
7838 inline hashval_t
7839 decl_table_entry_hasher::hash (const decl_table_entry *entry)
7840 {
7841 return htab_hash_pointer (entry->orig);
7842 }
7843
7844 inline bool
7845 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
7846 const die_struct *entry2)
7847 {
7848 return entry1->orig == entry2;
7849 }
7850
7851 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
7852
7853 /* Copy DIE and its ancestors, up to, but not including, the compile unit
7854 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
7855 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
7856 to check if the ancestor has already been copied into UNIT. */
7857
7858 static dw_die_ref
7859 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
7860 decl_hash_type *decl_table)
7861 {
7862 dw_die_ref parent = die->die_parent;
7863 dw_die_ref new_parent = unit;
7864 dw_die_ref copy;
7865 decl_table_entry **slot = NULL;
7866 struct decl_table_entry *entry = NULL;
7867
7868 if (decl_table)
7869 {
7870 /* Check if the entry has already been copied to UNIT. */
7871 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
7872 INSERT);
7873 if (*slot != HTAB_EMPTY_ENTRY)
7874 {
7875 entry = *slot;
7876 return entry->copy;
7877 }
7878
7879 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
7880 entry = XCNEW (struct decl_table_entry);
7881 entry->orig = die;
7882 entry->copy = NULL;
7883 *slot = entry;
7884 }
7885
7886 if (parent != NULL)
7887 {
7888 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
7889 if (spec != NULL)
7890 parent = spec;
7891 if (!is_unit_die (parent))
7892 new_parent = copy_ancestor_tree (unit, parent, decl_table);
7893 }
7894
7895 copy = clone_as_declaration (die);
7896 add_child_die (new_parent, copy);
7897
7898 if (decl_table)
7899 {
7900 /* Record the pointer to the copy. */
7901 entry->copy = copy;
7902 }
7903
7904 return copy;
7905 }
7906 /* Copy the declaration context to the new type unit DIE. This includes
7907 any surrounding namespace or type declarations. If the DIE has an
7908 AT_specification attribute, it also includes attributes and children
7909 attached to the specification, and returns a pointer to the original
7910 parent of the declaration DIE. Returns NULL otherwise. */
7911
7912 static dw_die_ref
7913 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
7914 {
7915 dw_die_ref decl;
7916 dw_die_ref new_decl;
7917 dw_die_ref orig_parent = NULL;
7918
7919 decl = get_AT_ref (die, DW_AT_specification);
7920 if (decl == NULL)
7921 decl = die;
7922 else
7923 {
7924 unsigned ix;
7925 dw_die_ref c;
7926 dw_attr_node *a;
7927
7928 /* The original DIE will be changed to a declaration, and must
7929 be moved to be a child of the original declaration DIE. */
7930 orig_parent = decl->die_parent;
7931
7932 /* Copy the type node pointer from the new DIE to the original
7933 declaration DIE so we can forward references later. */
7934 decl->comdat_type_p = true;
7935 decl->die_id.die_type_node = die->die_id.die_type_node;
7936
7937 remove_AT (die, DW_AT_specification);
7938
7939 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
7940 {
7941 if (a->dw_attr != DW_AT_name
7942 && a->dw_attr != DW_AT_declaration
7943 && a->dw_attr != DW_AT_external)
7944 add_dwarf_attr (die, a);
7945 }
7946
7947 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
7948 }
7949
7950 if (decl->die_parent != NULL
7951 && !is_unit_die (decl->die_parent))
7952 {
7953 new_decl = copy_ancestor_tree (unit, decl, NULL);
7954 if (new_decl != NULL)
7955 {
7956 remove_AT (new_decl, DW_AT_signature);
7957 add_AT_specification (die, new_decl);
7958 }
7959 }
7960
7961 return orig_parent;
7962 }
7963
7964 /* Generate the skeleton ancestor tree for the given NODE, then clone
7965 the DIE and add the clone into the tree. */
7966
7967 static void
7968 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
7969 {
7970 if (node->new_die != NULL)
7971 return;
7972
7973 node->new_die = clone_as_declaration (node->old_die);
7974
7975 if (node->parent != NULL)
7976 {
7977 generate_skeleton_ancestor_tree (node->parent);
7978 add_child_die (node->parent->new_die, node->new_die);
7979 }
7980 }
7981
7982 /* Generate a skeleton tree of DIEs containing any declarations that are
7983 found in the original tree. We traverse the tree looking for declaration
7984 DIEs, and construct the skeleton from the bottom up whenever we find one. */
7985
7986 static void
7987 generate_skeleton_bottom_up (skeleton_chain_node *parent)
7988 {
7989 skeleton_chain_node node;
7990 dw_die_ref c;
7991 dw_die_ref first;
7992 dw_die_ref prev = NULL;
7993 dw_die_ref next = NULL;
7994
7995 node.parent = parent;
7996
7997 first = c = parent->old_die->die_child;
7998 if (c)
7999 next = c->die_sib;
8000 if (c) do {
8001 if (prev == NULL || prev->die_sib == c)
8002 prev = c;
8003 c = next;
8004 next = (c == first ? NULL : c->die_sib);
8005 node.old_die = c;
8006 node.new_die = NULL;
8007 if (is_declaration_die (c))
8008 {
8009 if (is_template_instantiation (c))
8010 {
8011 /* Instantiated templates do not need to be cloned into the
8012 type unit. Just move the DIE and its children back to
8013 the skeleton tree (in the main CU). */
8014 remove_child_with_prev (c, prev);
8015 add_child_die (parent->new_die, c);
8016 c = prev;
8017 }
8018 else if (c->comdat_type_p)
8019 {
8020 /* This is the skeleton of earlier break_out_comdat_types
8021 type. Clone the existing DIE, but keep the children
8022 under the original (which is in the main CU). */
8023 dw_die_ref clone = clone_die (c);
8024
8025 replace_child (c, clone, prev);
8026 generate_skeleton_ancestor_tree (parent);
8027 add_child_die (parent->new_die, c);
8028 c = clone;
8029 continue;
8030 }
8031 else
8032 {
8033 /* Clone the existing DIE, move the original to the skeleton
8034 tree (which is in the main CU), and put the clone, with
8035 all the original's children, where the original came from
8036 (which is about to be moved to the type unit). */
8037 dw_die_ref clone = clone_die (c);
8038 move_all_children (c, clone);
8039
8040 /* If the original has a DW_AT_object_pointer attribute,
8041 it would now point to a child DIE just moved to the
8042 cloned tree, so we need to remove that attribute from
8043 the original. */
8044 remove_AT (c, DW_AT_object_pointer);
8045
8046 replace_child (c, clone, prev);
8047 generate_skeleton_ancestor_tree (parent);
8048 add_child_die (parent->new_die, c);
8049 node.old_die = clone;
8050 node.new_die = c;
8051 c = clone;
8052 }
8053 }
8054 generate_skeleton_bottom_up (&node);
8055 } while (next != NULL);
8056 }
8057
8058 /* Wrapper function for generate_skeleton_bottom_up. */
8059
8060 static dw_die_ref
8061 generate_skeleton (dw_die_ref die)
8062 {
8063 skeleton_chain_node node;
8064
8065 node.old_die = die;
8066 node.new_die = NULL;
8067 node.parent = NULL;
8068
8069 /* If this type definition is nested inside another type,
8070 and is not an instantiation of a template, always leave
8071 at least a declaration in its place. */
8072 if (die->die_parent != NULL
8073 && is_type_die (die->die_parent)
8074 && !is_template_instantiation (die))
8075 node.new_die = clone_as_declaration (die);
8076
8077 generate_skeleton_bottom_up (&node);
8078 return node.new_die;
8079 }
8080
8081 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8082 declaration. The original DIE is moved to a new compile unit so that
8083 existing references to it follow it to the new location. If any of the
8084 original DIE's descendants is a declaration, we need to replace the
8085 original DIE with a skeleton tree and move the declarations back into the
8086 skeleton tree. */
8087
8088 static dw_die_ref
8089 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8090 dw_die_ref prev)
8091 {
8092 dw_die_ref skeleton, orig_parent;
8093
8094 /* Copy the declaration context to the type unit DIE. If the returned
8095 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8096 that DIE. */
8097 orig_parent = copy_declaration_context (unit, child);
8098
8099 skeleton = generate_skeleton (child);
8100 if (skeleton == NULL)
8101 remove_child_with_prev (child, prev);
8102 else
8103 {
8104 skeleton->comdat_type_p = true;
8105 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8106
8107 /* If the original DIE was a specification, we need to put
8108 the skeleton under the parent DIE of the declaration.
8109 This leaves the original declaration in the tree, but
8110 it will be pruned later since there are no longer any
8111 references to it. */
8112 if (orig_parent != NULL)
8113 {
8114 remove_child_with_prev (child, prev);
8115 add_child_die (orig_parent, skeleton);
8116 }
8117 else
8118 replace_child (child, skeleton, prev);
8119 }
8120
8121 return skeleton;
8122 }
8123
8124 static void
8125 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8126 comdat_type_node *type_node,
8127 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8128
8129 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8130 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8131 DWARF procedure references in the DW_AT_location attribute. */
8132
8133 static dw_die_ref
8134 copy_dwarf_procedure (dw_die_ref die,
8135 comdat_type_node *type_node,
8136 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8137 {
8138 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8139
8140 /* DWARF procedures are not supposed to have children... */
8141 gcc_assert (die->die_child == NULL);
8142
8143 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8144 gcc_assert (vec_safe_length (die->die_attr) == 1
8145 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8146
8147 /* Do not copy more than once DWARF procedures. */
8148 bool existed;
8149 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8150 if (existed)
8151 return die_copy;
8152
8153 die_copy = clone_die (die);
8154 add_child_die (type_node->root_die, die_copy);
8155 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8156 return die_copy;
8157 }
8158
8159 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8160 procedures in DIE's attributes. */
8161
8162 static void
8163 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8164 comdat_type_node *type_node,
8165 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8166 {
8167 dw_attr_node *a;
8168 unsigned i;
8169
8170 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8171 {
8172 dw_loc_descr_ref loc;
8173
8174 if (a->dw_attr_val.val_class != dw_val_class_loc)
8175 continue;
8176
8177 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8178 {
8179 switch (loc->dw_loc_opc)
8180 {
8181 case DW_OP_call2:
8182 case DW_OP_call4:
8183 case DW_OP_call_ref:
8184 gcc_assert (loc->dw_loc_oprnd1.val_class
8185 == dw_val_class_die_ref);
8186 loc->dw_loc_oprnd1.v.val_die_ref.die
8187 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8188 type_node,
8189 copied_dwarf_procs);
8190
8191 default:
8192 break;
8193 }
8194 }
8195 }
8196 }
8197
8198 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8199 rewrite references to point to the copies.
8200
8201 References are looked for in DIE's attributes and recursively in all its
8202 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8203 mapping from old DWARF procedures to their copy. It is used not to copy
8204 twice the same DWARF procedure under TYPE_NODE. */
8205
8206 static void
8207 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8208 comdat_type_node *type_node,
8209 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8210 {
8211 dw_die_ref c;
8212
8213 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8214 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8215 type_node,
8216 copied_dwarf_procs));
8217 }
8218
8219 /* Traverse the DIE and set up additional .debug_types or .debug_info
8220 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8221 section. */
8222
8223 static void
8224 break_out_comdat_types (dw_die_ref die)
8225 {
8226 dw_die_ref c;
8227 dw_die_ref first;
8228 dw_die_ref prev = NULL;
8229 dw_die_ref next = NULL;
8230 dw_die_ref unit = NULL;
8231
8232 first = c = die->die_child;
8233 if (c)
8234 next = c->die_sib;
8235 if (c) do {
8236 if (prev == NULL || prev->die_sib == c)
8237 prev = c;
8238 c = next;
8239 next = (c == first ? NULL : c->die_sib);
8240 if (should_move_die_to_comdat (c))
8241 {
8242 dw_die_ref replacement;
8243 comdat_type_node *type_node;
8244
8245 /* Break out nested types into their own type units. */
8246 break_out_comdat_types (c);
8247
8248 /* Create a new type unit DIE as the root for the new tree, and
8249 add it to the list of comdat types. */
8250 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8251 add_AT_unsigned (unit, DW_AT_language,
8252 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8253 type_node = ggc_cleared_alloc<comdat_type_node> ();
8254 type_node->root_die = unit;
8255 type_node->next = comdat_type_list;
8256 comdat_type_list = type_node;
8257
8258 /* Generate the type signature. */
8259 generate_type_signature (c, type_node);
8260
8261 /* Copy the declaration context, attributes, and children of the
8262 declaration into the new type unit DIE, then remove this DIE
8263 from the main CU (or replace it with a skeleton if necessary). */
8264 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8265 type_node->skeleton_die = replacement;
8266
8267 /* Add the DIE to the new compunit. */
8268 add_child_die (unit, c);
8269
8270 /* Types can reference DWARF procedures for type size or data location
8271 expressions. Calls in DWARF expressions cannot target procedures
8272 that are not in the same section. So we must copy DWARF procedures
8273 along with this type and then rewrite references to them. */
8274 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8275 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8276
8277 if (replacement != NULL)
8278 c = replacement;
8279 }
8280 else if (c->die_tag == DW_TAG_namespace
8281 || c->die_tag == DW_TAG_class_type
8282 || c->die_tag == DW_TAG_structure_type
8283 || c->die_tag == DW_TAG_union_type)
8284 {
8285 /* Look for nested types that can be broken out. */
8286 break_out_comdat_types (c);
8287 }
8288 } while (next != NULL);
8289 }
8290
8291 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8292 Enter all the cloned children into the hash table decl_table. */
8293
8294 static dw_die_ref
8295 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8296 {
8297 dw_die_ref c;
8298 dw_die_ref clone;
8299 struct decl_table_entry *entry;
8300 decl_table_entry **slot;
8301
8302 if (die->die_tag == DW_TAG_subprogram)
8303 clone = clone_as_declaration (die);
8304 else
8305 clone = clone_die (die);
8306
8307 slot = decl_table->find_slot_with_hash (die,
8308 htab_hash_pointer (die), INSERT);
8309
8310 /* Assert that DIE isn't in the hash table yet. If it would be there
8311 before, the ancestors would be necessarily there as well, therefore
8312 clone_tree_partial wouldn't be called. */
8313 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8314
8315 entry = XCNEW (struct decl_table_entry);
8316 entry->orig = die;
8317 entry->copy = clone;
8318 *slot = entry;
8319
8320 if (die->die_tag != DW_TAG_subprogram)
8321 FOR_EACH_CHILD (die, c,
8322 add_child_die (clone, clone_tree_partial (c, decl_table)));
8323
8324 return clone;
8325 }
8326
8327 /* Walk the DIE and its children, looking for references to incomplete
8328 or trivial types that are unmarked (i.e., that are not in the current
8329 type_unit). */
8330
8331 static void
8332 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8333 {
8334 dw_die_ref c;
8335 dw_attr_node *a;
8336 unsigned ix;
8337
8338 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8339 {
8340 if (AT_class (a) == dw_val_class_die_ref)
8341 {
8342 dw_die_ref targ = AT_ref (a);
8343 decl_table_entry **slot;
8344 struct decl_table_entry *entry;
8345
8346 if (targ->die_mark != 0 || targ->comdat_type_p)
8347 continue;
8348
8349 slot = decl_table->find_slot_with_hash (targ,
8350 htab_hash_pointer (targ),
8351 INSERT);
8352
8353 if (*slot != HTAB_EMPTY_ENTRY)
8354 {
8355 /* TARG has already been copied, so we just need to
8356 modify the reference to point to the copy. */
8357 entry = *slot;
8358 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8359 }
8360 else
8361 {
8362 dw_die_ref parent = unit;
8363 dw_die_ref copy = clone_die (targ);
8364
8365 /* Record in DECL_TABLE that TARG has been copied.
8366 Need to do this now, before the recursive call,
8367 because DECL_TABLE may be expanded and SLOT
8368 would no longer be a valid pointer. */
8369 entry = XCNEW (struct decl_table_entry);
8370 entry->orig = targ;
8371 entry->copy = copy;
8372 *slot = entry;
8373
8374 /* If TARG is not a declaration DIE, we need to copy its
8375 children. */
8376 if (!is_declaration_die (targ))
8377 {
8378 FOR_EACH_CHILD (
8379 targ, c,
8380 add_child_die (copy,
8381 clone_tree_partial (c, decl_table)));
8382 }
8383
8384 /* Make sure the cloned tree is marked as part of the
8385 type unit. */
8386 mark_dies (copy);
8387
8388 /* If TARG has surrounding context, copy its ancestor tree
8389 into the new type unit. */
8390 if (targ->die_parent != NULL
8391 && !is_unit_die (targ->die_parent))
8392 parent = copy_ancestor_tree (unit, targ->die_parent,
8393 decl_table);
8394
8395 add_child_die (parent, copy);
8396 a->dw_attr_val.v.val_die_ref.die = copy;
8397
8398 /* Make sure the newly-copied DIE is walked. If it was
8399 installed in a previously-added context, it won't
8400 get visited otherwise. */
8401 if (parent != unit)
8402 {
8403 /* Find the highest point of the newly-added tree,
8404 mark each node along the way, and walk from there. */
8405 parent->die_mark = 1;
8406 while (parent->die_parent
8407 && parent->die_parent->die_mark == 0)
8408 {
8409 parent = parent->die_parent;
8410 parent->die_mark = 1;
8411 }
8412 copy_decls_walk (unit, parent, decl_table);
8413 }
8414 }
8415 }
8416 }
8417
8418 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8419 }
8420
8421 /* Copy declarations for "unworthy" types into the new comdat section.
8422 Incomplete types, modified types, and certain other types aren't broken
8423 out into comdat sections of their own, so they don't have a signature,
8424 and we need to copy the declaration into the same section so that we
8425 don't have an external reference. */
8426
8427 static void
8428 copy_decls_for_unworthy_types (dw_die_ref unit)
8429 {
8430 mark_dies (unit);
8431 decl_hash_type decl_table (10);
8432 copy_decls_walk (unit, unit, &decl_table);
8433 unmark_dies (unit);
8434 }
8435
8436 /* Traverse the DIE and add a sibling attribute if it may have the
8437 effect of speeding up access to siblings. To save some space,
8438 avoid generating sibling attributes for DIE's without children. */
8439
8440 static void
8441 add_sibling_attributes (dw_die_ref die)
8442 {
8443 dw_die_ref c;
8444
8445 if (! die->die_child)
8446 return;
8447
8448 if (die->die_parent && die != die->die_parent->die_child)
8449 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8450
8451 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8452 }
8453
8454 /* Output all location lists for the DIE and its children. */
8455
8456 static void
8457 output_location_lists (dw_die_ref die)
8458 {
8459 dw_die_ref c;
8460 dw_attr_node *a;
8461 unsigned ix;
8462
8463 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8464 if (AT_class (a) == dw_val_class_loc_list)
8465 output_loc_list (AT_loc_list (a));
8466
8467 FOR_EACH_CHILD (die, c, output_location_lists (c));
8468 }
8469
8470 /* During assign_location_list_indexes and output_loclists_offset the
8471 current index, after it the number of assigned indexes (i.e. how
8472 large the .debug_loclists* offset table should be). */
8473 static unsigned int loc_list_idx;
8474
8475 /* Output all location list offsets for the DIE and its children. */
8476
8477 static void
8478 output_loclists_offsets (dw_die_ref die)
8479 {
8480 dw_die_ref c;
8481 dw_attr_node *a;
8482 unsigned ix;
8483
8484 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8485 if (AT_class (a) == dw_val_class_loc_list)
8486 {
8487 dw_loc_list_ref l = AT_loc_list (a);
8488 if (l->offset_emitted)
8489 continue;
8490 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8491 loc_section_label, NULL);
8492 gcc_assert (l->hash == loc_list_idx);
8493 loc_list_idx++;
8494 l->offset_emitted = true;
8495 }
8496
8497 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8498 }
8499
8500 /* Recursively set indexes of location lists. */
8501
8502 static void
8503 assign_location_list_indexes (dw_die_ref die)
8504 {
8505 dw_die_ref c;
8506 dw_attr_node *a;
8507 unsigned ix;
8508
8509 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8510 if (AT_class (a) == dw_val_class_loc_list)
8511 {
8512 dw_loc_list_ref list = AT_loc_list (a);
8513 if (!list->num_assigned)
8514 {
8515 list->num_assigned = true;
8516 list->hash = loc_list_idx++;
8517 }
8518 }
8519
8520 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8521 }
8522
8523 /* We want to limit the number of external references, because they are
8524 larger than local references: a relocation takes multiple words, and
8525 even a sig8 reference is always eight bytes, whereas a local reference
8526 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8527 So if we encounter multiple external references to the same type DIE, we
8528 make a local typedef stub for it and redirect all references there.
8529
8530 This is the element of the hash table for keeping track of these
8531 references. */
8532
8533 struct external_ref
8534 {
8535 dw_die_ref type;
8536 dw_die_ref stub;
8537 unsigned n_refs;
8538 };
8539
8540 /* Hashtable helpers. */
8541
8542 struct external_ref_hasher : free_ptr_hash <external_ref>
8543 {
8544 static inline hashval_t hash (const external_ref *);
8545 static inline bool equal (const external_ref *, const external_ref *);
8546 };
8547
8548 inline hashval_t
8549 external_ref_hasher::hash (const external_ref *r)
8550 {
8551 dw_die_ref die = r->type;
8552 hashval_t h = 0;
8553
8554 /* We can't use the address of the DIE for hashing, because
8555 that will make the order of the stub DIEs non-deterministic. */
8556 if (! die->comdat_type_p)
8557 /* We have a symbol; use it to compute a hash. */
8558 h = htab_hash_string (die->die_id.die_symbol);
8559 else
8560 {
8561 /* We have a type signature; use a subset of the bits as the hash.
8562 The 8-byte signature is at least as large as hashval_t. */
8563 comdat_type_node *type_node = die->die_id.die_type_node;
8564 memcpy (&h, type_node->signature, sizeof (h));
8565 }
8566 return h;
8567 }
8568
8569 inline bool
8570 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8571 {
8572 return r1->type == r2->type;
8573 }
8574
8575 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8576
8577 /* Return a pointer to the external_ref for references to DIE. */
8578
8579 static struct external_ref *
8580 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8581 {
8582 struct external_ref ref, *ref_p;
8583 external_ref **slot;
8584
8585 ref.type = die;
8586 slot = map->find_slot (&ref, INSERT);
8587 if (*slot != HTAB_EMPTY_ENTRY)
8588 return *slot;
8589
8590 ref_p = XCNEW (struct external_ref);
8591 ref_p->type = die;
8592 *slot = ref_p;
8593 return ref_p;
8594 }
8595
8596 /* Subroutine of optimize_external_refs, below.
8597
8598 If we see a type skeleton, record it as our stub. If we see external
8599 references, remember how many we've seen. */
8600
8601 static void
8602 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8603 {
8604 dw_die_ref c;
8605 dw_attr_node *a;
8606 unsigned ix;
8607 struct external_ref *ref_p;
8608
8609 if (is_type_die (die)
8610 && (c = get_AT_ref (die, DW_AT_signature)))
8611 {
8612 /* This is a local skeleton; use it for local references. */
8613 ref_p = lookup_external_ref (map, c);
8614 ref_p->stub = die;
8615 }
8616
8617 /* Scan the DIE references, and remember any that refer to DIEs from
8618 other CUs (i.e. those which are not marked). */
8619 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8620 if (AT_class (a) == dw_val_class_die_ref
8621 && (c = AT_ref (a))->die_mark == 0
8622 && is_type_die (c))
8623 {
8624 ref_p = lookup_external_ref (map, c);
8625 ref_p->n_refs++;
8626 }
8627
8628 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8629 }
8630
8631 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8632 points to an external_ref, DATA is the CU we're processing. If we don't
8633 already have a local stub, and we have multiple refs, build a stub. */
8634
8635 int
8636 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8637 {
8638 struct external_ref *ref_p = *slot;
8639
8640 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8641 {
8642 /* We have multiple references to this type, so build a small stub.
8643 Both of these forms are a bit dodgy from the perspective of the
8644 DWARF standard, since technically they should have names. */
8645 dw_die_ref cu = data;
8646 dw_die_ref type = ref_p->type;
8647 dw_die_ref stub = NULL;
8648
8649 if (type->comdat_type_p)
8650 {
8651 /* If we refer to this type via sig8, use AT_signature. */
8652 stub = new_die (type->die_tag, cu, NULL_TREE);
8653 add_AT_die_ref (stub, DW_AT_signature, type);
8654 }
8655 else
8656 {
8657 /* Otherwise, use a typedef with no name. */
8658 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8659 add_AT_die_ref (stub, DW_AT_type, type);
8660 }
8661
8662 stub->die_mark++;
8663 ref_p->stub = stub;
8664 }
8665 return 1;
8666 }
8667
8668 /* DIE is a unit; look through all the DIE references to see if there are
8669 any external references to types, and if so, create local stubs for
8670 them which will be applied in build_abbrev_table. This is useful because
8671 references to local DIEs are smaller. */
8672
8673 static external_ref_hash_type *
8674 optimize_external_refs (dw_die_ref die)
8675 {
8676 external_ref_hash_type *map = new external_ref_hash_type (10);
8677 optimize_external_refs_1 (die, map);
8678 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8679 return map;
8680 }
8681
8682 /* The following 3 variables are temporaries that are computed only during the
8683 build_abbrev_table call and used and released during the following
8684 optimize_abbrev_table call. */
8685
8686 /* First abbrev_id that can be optimized based on usage. */
8687 static unsigned int abbrev_opt_start;
8688
8689 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
8690 abbrev_id smaller than this, because they must be already sized
8691 during build_abbrev_table). */
8692 static unsigned int abbrev_opt_base_type_end;
8693
8694 /* Vector of usage counts during build_abbrev_table. Indexed by
8695 abbrev_id - abbrev_opt_start. */
8696 static vec<unsigned int> abbrev_usage_count;
8697
8698 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
8699 static vec<dw_die_ref> sorted_abbrev_dies;
8700
8701 /* The format of each DIE (and its attribute value pairs) is encoded in an
8702 abbreviation table. This routine builds the abbreviation table and assigns
8703 a unique abbreviation id for each abbreviation entry. The children of each
8704 die are visited recursively. */
8705
8706 static void
8707 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
8708 {
8709 unsigned int abbrev_id = 0;
8710 dw_die_ref c;
8711 dw_attr_node *a;
8712 unsigned ix;
8713 dw_die_ref abbrev;
8714
8715 /* Scan the DIE references, and replace any that refer to
8716 DIEs from other CUs (i.e. those which are not marked) with
8717 the local stubs we built in optimize_external_refs. */
8718 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8719 if (AT_class (a) == dw_val_class_die_ref
8720 && (c = AT_ref (a))->die_mark == 0)
8721 {
8722 struct external_ref *ref_p;
8723 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
8724
8725 ref_p = lookup_external_ref (extern_map, c);
8726 if (ref_p->stub && ref_p->stub != die)
8727 change_AT_die_ref (a, ref_p->stub);
8728 else
8729 /* We aren't changing this reference, so mark it external. */
8730 set_AT_ref_external (a, 1);
8731 }
8732
8733 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
8734 {
8735 dw_attr_node *die_a, *abbrev_a;
8736 unsigned ix;
8737 bool ok = true;
8738
8739 if (abbrev_id == 0)
8740 continue;
8741 if (abbrev->die_tag != die->die_tag)
8742 continue;
8743 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
8744 continue;
8745
8746 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
8747 continue;
8748
8749 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
8750 {
8751 abbrev_a = &(*abbrev->die_attr)[ix];
8752 if ((abbrev_a->dw_attr != die_a->dw_attr)
8753 || (value_format (abbrev_a) != value_format (die_a)))
8754 {
8755 ok = false;
8756 break;
8757 }
8758 }
8759 if (ok)
8760 break;
8761 }
8762
8763 if (abbrev_id >= vec_safe_length (abbrev_die_table))
8764 {
8765 vec_safe_push (abbrev_die_table, die);
8766 if (abbrev_opt_start)
8767 abbrev_usage_count.safe_push (0);
8768 }
8769 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
8770 {
8771 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
8772 sorted_abbrev_dies.safe_push (die);
8773 }
8774
8775 die->die_abbrev = abbrev_id;
8776 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
8777 }
8778
8779 /* Callback function for sorted_abbrev_dies vector sorting. We sort
8780 by die_abbrev's usage count, from the most commonly used
8781 abbreviation to the least. */
8782
8783 static int
8784 die_abbrev_cmp (const void *p1, const void *p2)
8785 {
8786 dw_die_ref die1 = *(const dw_die_ref *) p1;
8787 dw_die_ref die2 = *(const dw_die_ref *) p2;
8788
8789 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
8790 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
8791
8792 if (die1->die_abbrev >= abbrev_opt_base_type_end
8793 && die2->die_abbrev >= abbrev_opt_base_type_end)
8794 {
8795 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
8796 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
8797 return -1;
8798 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
8799 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
8800 return 1;
8801 }
8802
8803 /* Stabilize the sort. */
8804 if (die1->die_abbrev < die2->die_abbrev)
8805 return -1;
8806 if (die1->die_abbrev > die2->die_abbrev)
8807 return 1;
8808
8809 return 0;
8810 }
8811
8812 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
8813 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
8814 into dw_val_class_const_implicit or
8815 dw_val_class_unsigned_const_implicit. */
8816
8817 static void
8818 optimize_implicit_const (unsigned int first_id, unsigned int end,
8819 vec<bool> &implicit_consts)
8820 {
8821 /* It never makes sense if there is just one DIE using the abbreviation. */
8822 if (end < first_id + 2)
8823 return;
8824
8825 dw_attr_node *a;
8826 unsigned ix, i;
8827 dw_die_ref die = sorted_abbrev_dies[first_id];
8828 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8829 if (implicit_consts[ix])
8830 {
8831 enum dw_val_class new_class = dw_val_class_none;
8832 switch (AT_class (a))
8833 {
8834 case dw_val_class_unsigned_const:
8835 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
8836 continue;
8837
8838 /* The .debug_abbrev section will grow by
8839 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
8840 in all the DIEs using that abbreviation. */
8841 if (constant_size (AT_unsigned (a)) * (end - first_id)
8842 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
8843 continue;
8844
8845 new_class = dw_val_class_unsigned_const_implicit;
8846 break;
8847
8848 case dw_val_class_const:
8849 new_class = dw_val_class_const_implicit;
8850 break;
8851
8852 case dw_val_class_file:
8853 new_class = dw_val_class_file_implicit;
8854 break;
8855
8856 default:
8857 continue;
8858 }
8859 for (i = first_id; i < end; i++)
8860 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
8861 = new_class;
8862 }
8863 }
8864
8865 /* Attempt to optimize abbreviation table from abbrev_opt_start
8866 abbreviation above. */
8867
8868 static void
8869 optimize_abbrev_table (void)
8870 {
8871 if (abbrev_opt_start
8872 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
8873 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
8874 {
8875 auto_vec<bool, 32> implicit_consts;
8876 sorted_abbrev_dies.qsort (die_abbrev_cmp);
8877
8878 unsigned int abbrev_id = abbrev_opt_start - 1;
8879 unsigned int first_id = ~0U;
8880 unsigned int last_abbrev_id = 0;
8881 unsigned int i;
8882 dw_die_ref die;
8883 if (abbrev_opt_base_type_end > abbrev_opt_start)
8884 abbrev_id = abbrev_opt_base_type_end - 1;
8885 /* Reassign abbreviation ids from abbrev_opt_start above, so that
8886 most commonly used abbreviations come first. */
8887 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
8888 {
8889 dw_attr_node *a;
8890 unsigned ix;
8891
8892 /* If calc_base_type_die_sizes has been called, the CU and
8893 base types after it can't be optimized, because we've already
8894 calculated their DIE offsets. We've sorted them first. */
8895 if (die->die_abbrev < abbrev_opt_base_type_end)
8896 continue;
8897 if (die->die_abbrev != last_abbrev_id)
8898 {
8899 last_abbrev_id = die->die_abbrev;
8900 if (dwarf_version >= 5 && first_id != ~0U)
8901 optimize_implicit_const (first_id, i, implicit_consts);
8902 abbrev_id++;
8903 (*abbrev_die_table)[abbrev_id] = die;
8904 if (dwarf_version >= 5)
8905 {
8906 first_id = i;
8907 implicit_consts.truncate (0);
8908
8909 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8910 switch (AT_class (a))
8911 {
8912 case dw_val_class_const:
8913 case dw_val_class_unsigned_const:
8914 case dw_val_class_file:
8915 implicit_consts.safe_push (true);
8916 break;
8917 default:
8918 implicit_consts.safe_push (false);
8919 break;
8920 }
8921 }
8922 }
8923 else if (dwarf_version >= 5)
8924 {
8925 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8926 if (!implicit_consts[ix])
8927 continue;
8928 else
8929 {
8930 dw_attr_node *other_a
8931 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
8932 if (!dw_val_equal_p (&a->dw_attr_val,
8933 &other_a->dw_attr_val))
8934 implicit_consts[ix] = false;
8935 }
8936 }
8937 die->die_abbrev = abbrev_id;
8938 }
8939 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
8940 if (dwarf_version >= 5 && first_id != ~0U)
8941 optimize_implicit_const (first_id, i, implicit_consts);
8942 }
8943
8944 abbrev_opt_start = 0;
8945 abbrev_opt_base_type_end = 0;
8946 abbrev_usage_count.release ();
8947 sorted_abbrev_dies.release ();
8948 }
8949 \f
8950 /* Return the power-of-two number of bytes necessary to represent VALUE. */
8951
8952 static int
8953 constant_size (unsigned HOST_WIDE_INT value)
8954 {
8955 int log;
8956
8957 if (value == 0)
8958 log = 0;
8959 else
8960 log = floor_log2 (value);
8961
8962 log = log / 8;
8963 log = 1 << (floor_log2 (log) + 1);
8964
8965 return log;
8966 }
8967
8968 /* Return the size of a DIE as it is represented in the
8969 .debug_info section. */
8970
8971 static unsigned long
8972 size_of_die (dw_die_ref die)
8973 {
8974 unsigned long size = 0;
8975 dw_attr_node *a;
8976 unsigned ix;
8977 enum dwarf_form form;
8978
8979 size += size_of_uleb128 (die->die_abbrev);
8980 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8981 {
8982 switch (AT_class (a))
8983 {
8984 case dw_val_class_addr:
8985 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
8986 {
8987 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
8988 size += size_of_uleb128 (AT_index (a));
8989 }
8990 else
8991 size += DWARF2_ADDR_SIZE;
8992 break;
8993 case dw_val_class_offset:
8994 size += DWARF_OFFSET_SIZE;
8995 break;
8996 case dw_val_class_loc:
8997 {
8998 unsigned long lsize = size_of_locs (AT_loc (a));
8999
9000 /* Block length. */
9001 if (dwarf_version >= 4)
9002 size += size_of_uleb128 (lsize);
9003 else
9004 size += constant_size (lsize);
9005 size += lsize;
9006 }
9007 break;
9008 case dw_val_class_loc_list:
9009 if (dwarf_split_debug_info && dwarf_version >= 5)
9010 {
9011 gcc_assert (AT_loc_list (a)->num_assigned);
9012 size += size_of_uleb128 (AT_loc_list (a)->hash);
9013 }
9014 else
9015 size += DWARF_OFFSET_SIZE;
9016 break;
9017 case dw_val_class_range_list:
9018 if (value_format (a) == DW_FORM_rnglistx)
9019 {
9020 gcc_assert (rnglist_idx);
9021 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9022 size += size_of_uleb128 (r->idx);
9023 }
9024 else
9025 size += DWARF_OFFSET_SIZE;
9026 break;
9027 case dw_val_class_const:
9028 size += size_of_sleb128 (AT_int (a));
9029 break;
9030 case dw_val_class_unsigned_const:
9031 {
9032 int csize = constant_size (AT_unsigned (a));
9033 if (dwarf_version == 3
9034 && a->dw_attr == DW_AT_data_member_location
9035 && csize >= 4)
9036 size += size_of_uleb128 (AT_unsigned (a));
9037 else
9038 size += csize;
9039 }
9040 break;
9041 case dw_val_class_const_implicit:
9042 case dw_val_class_unsigned_const_implicit:
9043 case dw_val_class_file_implicit:
9044 /* These occupy no size in the DIE, just an extra sleb128 in
9045 .debug_abbrev. */
9046 break;
9047 case dw_val_class_const_double:
9048 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9049 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9050 size++; /* block */
9051 break;
9052 case dw_val_class_wide_int:
9053 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9054 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9055 if (get_full_len (*a->dw_attr_val.v.val_wide)
9056 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9057 size++; /* block */
9058 break;
9059 case dw_val_class_vec:
9060 size += constant_size (a->dw_attr_val.v.val_vec.length
9061 * a->dw_attr_val.v.val_vec.elt_size)
9062 + a->dw_attr_val.v.val_vec.length
9063 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9064 break;
9065 case dw_val_class_flag:
9066 if (dwarf_version >= 4)
9067 /* Currently all add_AT_flag calls pass in 1 as last argument,
9068 so DW_FORM_flag_present can be used. If that ever changes,
9069 we'll need to use DW_FORM_flag and have some optimization
9070 in build_abbrev_table that will change those to
9071 DW_FORM_flag_present if it is set to 1 in all DIEs using
9072 the same abbrev entry. */
9073 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9074 else
9075 size += 1;
9076 break;
9077 case dw_val_class_die_ref:
9078 if (AT_ref_external (a))
9079 {
9080 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9081 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9082 is sized by target address length, whereas in DWARF3
9083 it's always sized as an offset. */
9084 if (use_debug_types)
9085 size += DWARF_TYPE_SIGNATURE_SIZE;
9086 else if (dwarf_version == 2)
9087 size += DWARF2_ADDR_SIZE;
9088 else
9089 size += DWARF_OFFSET_SIZE;
9090 }
9091 else
9092 size += DWARF_OFFSET_SIZE;
9093 break;
9094 case dw_val_class_fde_ref:
9095 size += DWARF_OFFSET_SIZE;
9096 break;
9097 case dw_val_class_lbl_id:
9098 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9099 {
9100 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9101 size += size_of_uleb128 (AT_index (a));
9102 }
9103 else
9104 size += DWARF2_ADDR_SIZE;
9105 break;
9106 case dw_val_class_lineptr:
9107 case dw_val_class_macptr:
9108 case dw_val_class_loclistsptr:
9109 size += DWARF_OFFSET_SIZE;
9110 break;
9111 case dw_val_class_str:
9112 form = AT_string_form (a);
9113 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9114 size += DWARF_OFFSET_SIZE;
9115 else if (form == DW_FORM_GNU_str_index)
9116 size += size_of_uleb128 (AT_index (a));
9117 else
9118 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9119 break;
9120 case dw_val_class_file:
9121 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9122 break;
9123 case dw_val_class_data8:
9124 size += 8;
9125 break;
9126 case dw_val_class_vms_delta:
9127 size += DWARF_OFFSET_SIZE;
9128 break;
9129 case dw_val_class_high_pc:
9130 size += DWARF2_ADDR_SIZE;
9131 break;
9132 case dw_val_class_discr_value:
9133 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9134 break;
9135 case dw_val_class_discr_list:
9136 {
9137 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9138
9139 /* This is a block, so we have the block length and then its
9140 data. */
9141 size += constant_size (block_size) + block_size;
9142 }
9143 break;
9144 default:
9145 gcc_unreachable ();
9146 }
9147 }
9148
9149 return size;
9150 }
9151
9152 /* Size the debugging information associated with a given DIE. Visits the
9153 DIE's children recursively. Updates the global variable next_die_offset, on
9154 each time through. Uses the current value of next_die_offset to update the
9155 die_offset field in each DIE. */
9156
9157 static void
9158 calc_die_sizes (dw_die_ref die)
9159 {
9160 dw_die_ref c;
9161
9162 gcc_assert (die->die_offset == 0
9163 || (unsigned long int) die->die_offset == next_die_offset);
9164 die->die_offset = next_die_offset;
9165 next_die_offset += size_of_die (die);
9166
9167 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9168
9169 if (die->die_child != NULL)
9170 /* Count the null byte used to terminate sibling lists. */
9171 next_die_offset += 1;
9172 }
9173
9174 /* Size just the base type children at the start of the CU.
9175 This is needed because build_abbrev needs to size locs
9176 and sizing of type based stack ops needs to know die_offset
9177 values for the base types. */
9178
9179 static void
9180 calc_base_type_die_sizes (void)
9181 {
9182 unsigned long die_offset = (dwarf_split_debug_info
9183 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9184 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9185 unsigned int i;
9186 dw_die_ref base_type;
9187 #if ENABLE_ASSERT_CHECKING
9188 dw_die_ref prev = comp_unit_die ()->die_child;
9189 #endif
9190
9191 die_offset += size_of_die (comp_unit_die ());
9192 for (i = 0; base_types.iterate (i, &base_type); i++)
9193 {
9194 #if ENABLE_ASSERT_CHECKING
9195 gcc_assert (base_type->die_offset == 0
9196 && prev->die_sib == base_type
9197 && base_type->die_child == NULL
9198 && base_type->die_abbrev);
9199 prev = base_type;
9200 #endif
9201 if (abbrev_opt_start
9202 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9203 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9204 base_type->die_offset = die_offset;
9205 die_offset += size_of_die (base_type);
9206 }
9207 }
9208
9209 /* Set the marks for a die and its children. We do this so
9210 that we know whether or not a reference needs to use FORM_ref_addr; only
9211 DIEs in the same CU will be marked. We used to clear out the offset
9212 and use that as the flag, but ran into ordering problems. */
9213
9214 static void
9215 mark_dies (dw_die_ref die)
9216 {
9217 dw_die_ref c;
9218
9219 gcc_assert (!die->die_mark);
9220
9221 die->die_mark = 1;
9222 FOR_EACH_CHILD (die, c, mark_dies (c));
9223 }
9224
9225 /* Clear the marks for a die and its children. */
9226
9227 static void
9228 unmark_dies (dw_die_ref die)
9229 {
9230 dw_die_ref c;
9231
9232 if (! use_debug_types)
9233 gcc_assert (die->die_mark);
9234
9235 die->die_mark = 0;
9236 FOR_EACH_CHILD (die, c, unmark_dies (c));
9237 }
9238
9239 /* Clear the marks for a die, its children and referred dies. */
9240
9241 static void
9242 unmark_all_dies (dw_die_ref die)
9243 {
9244 dw_die_ref c;
9245 dw_attr_node *a;
9246 unsigned ix;
9247
9248 if (!die->die_mark)
9249 return;
9250 die->die_mark = 0;
9251
9252 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9253
9254 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9255 if (AT_class (a) == dw_val_class_die_ref)
9256 unmark_all_dies (AT_ref (a));
9257 }
9258
9259 /* Calculate if the entry should appear in the final output file. It may be
9260 from a pruned a type. */
9261
9262 static bool
9263 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9264 {
9265 /* By limiting gnu pubnames to definitions only, gold can generate a
9266 gdb index without entries for declarations, which don't include
9267 enough information to be useful. */
9268 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9269 return false;
9270
9271 if (table == pubname_table)
9272 {
9273 /* Enumerator names are part of the pubname table, but the
9274 parent DW_TAG_enumeration_type die may have been pruned.
9275 Don't output them if that is the case. */
9276 if (p->die->die_tag == DW_TAG_enumerator &&
9277 (p->die->die_parent == NULL
9278 || !p->die->die_parent->die_perennial_p))
9279 return false;
9280
9281 /* Everything else in the pubname table is included. */
9282 return true;
9283 }
9284
9285 /* The pubtypes table shouldn't include types that have been
9286 pruned. */
9287 return (p->die->die_offset != 0
9288 || !flag_eliminate_unused_debug_types);
9289 }
9290
9291 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9292 generated for the compilation unit. */
9293
9294 static unsigned long
9295 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9296 {
9297 unsigned long size;
9298 unsigned i;
9299 pubname_entry *p;
9300 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9301
9302 size = DWARF_PUBNAMES_HEADER_SIZE;
9303 FOR_EACH_VEC_ELT (*names, i, p)
9304 if (include_pubname_in_output (names, p))
9305 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9306
9307 size += DWARF_OFFSET_SIZE;
9308 return size;
9309 }
9310
9311 /* Return the size of the information in the .debug_aranges section. */
9312
9313 static unsigned long
9314 size_of_aranges (void)
9315 {
9316 unsigned long size;
9317
9318 size = DWARF_ARANGES_HEADER_SIZE;
9319
9320 /* Count the address/length pair for this compilation unit. */
9321 if (text_section_used)
9322 size += 2 * DWARF2_ADDR_SIZE;
9323 if (cold_text_section_used)
9324 size += 2 * DWARF2_ADDR_SIZE;
9325 if (have_multiple_function_sections)
9326 {
9327 unsigned fde_idx;
9328 dw_fde_ref fde;
9329
9330 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9331 {
9332 if (DECL_IGNORED_P (fde->decl))
9333 continue;
9334 if (!fde->in_std_section)
9335 size += 2 * DWARF2_ADDR_SIZE;
9336 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9337 size += 2 * DWARF2_ADDR_SIZE;
9338 }
9339 }
9340
9341 /* Count the two zero words used to terminated the address range table. */
9342 size += 2 * DWARF2_ADDR_SIZE;
9343 return size;
9344 }
9345 \f
9346 /* Select the encoding of an attribute value. */
9347
9348 static enum dwarf_form
9349 value_format (dw_attr_node *a)
9350 {
9351 switch (AT_class (a))
9352 {
9353 case dw_val_class_addr:
9354 /* Only very few attributes allow DW_FORM_addr. */
9355 switch (a->dw_attr)
9356 {
9357 case DW_AT_low_pc:
9358 case DW_AT_high_pc:
9359 case DW_AT_entry_pc:
9360 case DW_AT_trampoline:
9361 return (AT_index (a) == NOT_INDEXED
9362 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9363 default:
9364 break;
9365 }
9366 switch (DWARF2_ADDR_SIZE)
9367 {
9368 case 1:
9369 return DW_FORM_data1;
9370 case 2:
9371 return DW_FORM_data2;
9372 case 4:
9373 return DW_FORM_data4;
9374 case 8:
9375 return DW_FORM_data8;
9376 default:
9377 gcc_unreachable ();
9378 }
9379 case dw_val_class_loc_list:
9380 if (dwarf_split_debug_info
9381 && dwarf_version >= 5
9382 && AT_loc_list (a)->num_assigned)
9383 return DW_FORM_loclistx;
9384 /* FALLTHRU */
9385 case dw_val_class_range_list:
9386 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9387 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9388 care about sizes of .debug* sections in shared libraries and
9389 executables and don't take into account relocations that affect just
9390 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9391 table in the .debug_rnglists section. */
9392 if (dwarf_split_debug_info
9393 && dwarf_version >= 5
9394 && AT_class (a) == dw_val_class_range_list
9395 && rnglist_idx
9396 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9397 return DW_FORM_rnglistx;
9398 if (dwarf_version >= 4)
9399 return DW_FORM_sec_offset;
9400 /* FALLTHRU */
9401 case dw_val_class_vms_delta:
9402 case dw_val_class_offset:
9403 switch (DWARF_OFFSET_SIZE)
9404 {
9405 case 4:
9406 return DW_FORM_data4;
9407 case 8:
9408 return DW_FORM_data8;
9409 default:
9410 gcc_unreachable ();
9411 }
9412 case dw_val_class_loc:
9413 if (dwarf_version >= 4)
9414 return DW_FORM_exprloc;
9415 switch (constant_size (size_of_locs (AT_loc (a))))
9416 {
9417 case 1:
9418 return DW_FORM_block1;
9419 case 2:
9420 return DW_FORM_block2;
9421 case 4:
9422 return DW_FORM_block4;
9423 default:
9424 gcc_unreachable ();
9425 }
9426 case dw_val_class_const:
9427 return DW_FORM_sdata;
9428 case dw_val_class_unsigned_const:
9429 switch (constant_size (AT_unsigned (a)))
9430 {
9431 case 1:
9432 return DW_FORM_data1;
9433 case 2:
9434 return DW_FORM_data2;
9435 case 4:
9436 /* In DWARF3 DW_AT_data_member_location with
9437 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9438 constant, so we need to use DW_FORM_udata if we need
9439 a large constant. */
9440 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9441 return DW_FORM_udata;
9442 return DW_FORM_data4;
9443 case 8:
9444 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9445 return DW_FORM_udata;
9446 return DW_FORM_data8;
9447 default:
9448 gcc_unreachable ();
9449 }
9450 case dw_val_class_const_implicit:
9451 case dw_val_class_unsigned_const_implicit:
9452 case dw_val_class_file_implicit:
9453 return DW_FORM_implicit_const;
9454 case dw_val_class_const_double:
9455 switch (HOST_BITS_PER_WIDE_INT)
9456 {
9457 case 8:
9458 return DW_FORM_data2;
9459 case 16:
9460 return DW_FORM_data4;
9461 case 32:
9462 return DW_FORM_data8;
9463 case 64:
9464 if (dwarf_version >= 5)
9465 return DW_FORM_data16;
9466 /* FALLTHRU */
9467 default:
9468 return DW_FORM_block1;
9469 }
9470 case dw_val_class_wide_int:
9471 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9472 {
9473 case 8:
9474 return DW_FORM_data1;
9475 case 16:
9476 return DW_FORM_data2;
9477 case 32:
9478 return DW_FORM_data4;
9479 case 64:
9480 return DW_FORM_data8;
9481 case 128:
9482 if (dwarf_version >= 5)
9483 return DW_FORM_data16;
9484 /* FALLTHRU */
9485 default:
9486 return DW_FORM_block1;
9487 }
9488 case dw_val_class_vec:
9489 switch (constant_size (a->dw_attr_val.v.val_vec.length
9490 * a->dw_attr_val.v.val_vec.elt_size))
9491 {
9492 case 1:
9493 return DW_FORM_block1;
9494 case 2:
9495 return DW_FORM_block2;
9496 case 4:
9497 return DW_FORM_block4;
9498 default:
9499 gcc_unreachable ();
9500 }
9501 case dw_val_class_flag:
9502 if (dwarf_version >= 4)
9503 {
9504 /* Currently all add_AT_flag calls pass in 1 as last argument,
9505 so DW_FORM_flag_present can be used. If that ever changes,
9506 we'll need to use DW_FORM_flag and have some optimization
9507 in build_abbrev_table that will change those to
9508 DW_FORM_flag_present if it is set to 1 in all DIEs using
9509 the same abbrev entry. */
9510 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9511 return DW_FORM_flag_present;
9512 }
9513 return DW_FORM_flag;
9514 case dw_val_class_die_ref:
9515 if (AT_ref_external (a))
9516 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9517 else
9518 return DW_FORM_ref;
9519 case dw_val_class_fde_ref:
9520 return DW_FORM_data;
9521 case dw_val_class_lbl_id:
9522 return (AT_index (a) == NOT_INDEXED
9523 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9524 case dw_val_class_lineptr:
9525 case dw_val_class_macptr:
9526 case dw_val_class_loclistsptr:
9527 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9528 case dw_val_class_str:
9529 return AT_string_form (a);
9530 case dw_val_class_file:
9531 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9532 {
9533 case 1:
9534 return DW_FORM_data1;
9535 case 2:
9536 return DW_FORM_data2;
9537 case 4:
9538 return DW_FORM_data4;
9539 default:
9540 gcc_unreachable ();
9541 }
9542
9543 case dw_val_class_data8:
9544 return DW_FORM_data8;
9545
9546 case dw_val_class_high_pc:
9547 switch (DWARF2_ADDR_SIZE)
9548 {
9549 case 1:
9550 return DW_FORM_data1;
9551 case 2:
9552 return DW_FORM_data2;
9553 case 4:
9554 return DW_FORM_data4;
9555 case 8:
9556 return DW_FORM_data8;
9557 default:
9558 gcc_unreachable ();
9559 }
9560
9561 case dw_val_class_discr_value:
9562 return (a->dw_attr_val.v.val_discr_value.pos
9563 ? DW_FORM_udata
9564 : DW_FORM_sdata);
9565 case dw_val_class_discr_list:
9566 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9567 {
9568 case 1:
9569 return DW_FORM_block1;
9570 case 2:
9571 return DW_FORM_block2;
9572 case 4:
9573 return DW_FORM_block4;
9574 default:
9575 gcc_unreachable ();
9576 }
9577
9578 default:
9579 gcc_unreachable ();
9580 }
9581 }
9582
9583 /* Output the encoding of an attribute value. */
9584
9585 static void
9586 output_value_format (dw_attr_node *a)
9587 {
9588 enum dwarf_form form = value_format (a);
9589
9590 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9591 }
9592
9593 /* Given a die and id, produce the appropriate abbreviations. */
9594
9595 static void
9596 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9597 {
9598 unsigned ix;
9599 dw_attr_node *a_attr;
9600
9601 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9602 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9603 dwarf_tag_name (abbrev->die_tag));
9604
9605 if (abbrev->die_child != NULL)
9606 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9607 else
9608 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9609
9610 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9611 {
9612 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9613 dwarf_attr_name (a_attr->dw_attr));
9614 output_value_format (a_attr);
9615 if (value_format (a_attr) == DW_FORM_implicit_const)
9616 {
9617 if (AT_class (a_attr) == dw_val_class_file_implicit)
9618 {
9619 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9620 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9621 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9622 }
9623 else
9624 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9625 }
9626 }
9627
9628 dw2_asm_output_data (1, 0, NULL);
9629 dw2_asm_output_data (1, 0, NULL);
9630 }
9631
9632
9633 /* Output the .debug_abbrev section which defines the DIE abbreviation
9634 table. */
9635
9636 static void
9637 output_abbrev_section (void)
9638 {
9639 unsigned int abbrev_id;
9640 dw_die_ref abbrev;
9641
9642 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9643 if (abbrev_id != 0)
9644 output_die_abbrevs (abbrev_id, abbrev);
9645
9646 /* Terminate the table. */
9647 dw2_asm_output_data (1, 0, NULL);
9648 }
9649
9650 /* Return a new location list, given the begin and end range, and the
9651 expression. */
9652
9653 static inline dw_loc_list_ref
9654 new_loc_list (dw_loc_descr_ref expr, const char *begin, const char *end,
9655 const char *section)
9656 {
9657 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
9658
9659 retlist->begin = begin;
9660 retlist->begin_entry = NULL;
9661 retlist->end = end;
9662 retlist->expr = expr;
9663 retlist->section = section;
9664
9665 return retlist;
9666 }
9667
9668 /* Generate a new internal symbol for this location list node, if it
9669 hasn't got one yet. */
9670
9671 static inline void
9672 gen_llsym (dw_loc_list_ref list)
9673 {
9674 gcc_assert (!list->ll_symbol);
9675 list->ll_symbol = gen_internal_sym ("LLST");
9676 }
9677
9678 /* Output the location list given to us. */
9679
9680 static void
9681 output_loc_list (dw_loc_list_ref list_head)
9682 {
9683 if (list_head->emitted)
9684 return;
9685 list_head->emitted = true;
9686
9687 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
9688
9689 dw_loc_list_ref curr = list_head;
9690 const char *last_section = NULL;
9691 const char *base_label = NULL;
9692
9693 /* Walk the location list, and output each range + expression. */
9694 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
9695 {
9696 unsigned long size;
9697 /* Don't output an entry that starts and ends at the same address. */
9698 if (strcmp (curr->begin, curr->end) == 0 && !curr->force)
9699 continue;
9700 size = size_of_locs (curr->expr);
9701 /* If the expression is too large, drop it on the floor. We could
9702 perhaps put it into DW_TAG_dwarf_procedure and refer to that
9703 in the expression, but >= 64KB expressions for a single value
9704 in a single range are unlikely very useful. */
9705 if (dwarf_version < 5 && size > 0xffff)
9706 continue;
9707 if (dwarf_version >= 5)
9708 {
9709 if (dwarf_split_debug_info)
9710 {
9711 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
9712 uleb128 index into .debug_addr and uleb128 length. */
9713 dw2_asm_output_data (1, DW_LLE_startx_length,
9714 "DW_LLE_startx_length (%s)",
9715 list_head->ll_symbol);
9716 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
9717 "Location list range start index "
9718 "(%s)", curr->begin);
9719 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
9720 For that case we probably need to emit DW_LLE_startx_endx,
9721 but we'd need 2 .debug_addr entries rather than just one. */
9722 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
9723 "Location list length (%s)",
9724 list_head->ll_symbol);
9725 }
9726 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
9727 {
9728 /* If all code is in .text section, the base address is
9729 already provided by the CU attributes. Use
9730 DW_LLE_offset_pair where both addresses are uleb128 encoded
9731 offsets against that base. */
9732 dw2_asm_output_data (1, DW_LLE_offset_pair,
9733 "DW_LLE_offset_pair (%s)",
9734 list_head->ll_symbol);
9735 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
9736 "Location list begin address (%s)",
9737 list_head->ll_symbol);
9738 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
9739 "Location list end address (%s)",
9740 list_head->ll_symbol);
9741 }
9742 else if (HAVE_AS_LEB128)
9743 {
9744 /* Otherwise, find out how many consecutive entries could share
9745 the same base entry. If just one, emit DW_LLE_start_length,
9746 otherwise emit DW_LLE_base_address for the base address
9747 followed by a series of DW_LLE_offset_pair. */
9748 if (last_section == NULL || curr->section != last_section)
9749 {
9750 dw_loc_list_ref curr2;
9751 for (curr2 = curr->dw_loc_next; curr2 != NULL;
9752 curr2 = curr2->dw_loc_next)
9753 {
9754 if (strcmp (curr2->begin, curr2->end) == 0
9755 && !curr2->force)
9756 continue;
9757 break;
9758 }
9759 if (curr2 == NULL || curr->section != curr2->section)
9760 last_section = NULL;
9761 else
9762 {
9763 last_section = curr->section;
9764 base_label = curr->begin;
9765 dw2_asm_output_data (1, DW_LLE_base_address,
9766 "DW_LLE_base_address (%s)",
9767 list_head->ll_symbol);
9768 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
9769 "Base address (%s)",
9770 list_head->ll_symbol);
9771 }
9772 }
9773 /* Only one entry with the same base address. Use
9774 DW_LLE_start_length with absolute address and uleb128
9775 length. */
9776 if (last_section == NULL)
9777 {
9778 dw2_asm_output_data (1, DW_LLE_start_length,
9779 "DW_LLE_start_length (%s)",
9780 list_head->ll_symbol);
9781 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9782 "Location list begin address (%s)",
9783 list_head->ll_symbol);
9784 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
9785 "Location list length "
9786 "(%s)", list_head->ll_symbol);
9787 }
9788 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
9789 DW_LLE_base_address. */
9790 else
9791 {
9792 dw2_asm_output_data (1, DW_LLE_offset_pair,
9793 "DW_LLE_offset_pair (%s)",
9794 list_head->ll_symbol);
9795 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
9796 "Location list begin address "
9797 "(%s)", list_head->ll_symbol);
9798 dw2_asm_output_delta_uleb128 (curr->end, base_label,
9799 "Location list end address "
9800 "(%s)", list_head->ll_symbol);
9801 }
9802 }
9803 /* The assembler does not support .uleb128 directive. Emit
9804 DW_LLE_start_end with a pair of absolute addresses. */
9805 else
9806 {
9807 dw2_asm_output_data (1, DW_LLE_start_end,
9808 "DW_LLE_start_end (%s)",
9809 list_head->ll_symbol);
9810 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9811 "Location list begin address (%s)",
9812 list_head->ll_symbol);
9813 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
9814 "Location list end address (%s)",
9815 list_head->ll_symbol);
9816 }
9817 }
9818 else if (dwarf_split_debug_info)
9819 {
9820 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
9821 and 4 byte length. */
9822 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
9823 "Location list start/length entry (%s)",
9824 list_head->ll_symbol);
9825 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
9826 "Location list range start index (%s)",
9827 curr->begin);
9828 /* The length field is 4 bytes. If we ever need to support
9829 an 8-byte length, we can add a new DW_LLE code or fall back
9830 to DW_LLE_GNU_start_end_entry. */
9831 dw2_asm_output_delta (4, curr->end, curr->begin,
9832 "Location list range length (%s)",
9833 list_head->ll_symbol);
9834 }
9835 else if (!have_multiple_function_sections)
9836 {
9837 /* Pair of relative addresses against start of text section. */
9838 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
9839 "Location list begin address (%s)",
9840 list_head->ll_symbol);
9841 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
9842 "Location list end address (%s)",
9843 list_head->ll_symbol);
9844 }
9845 else
9846 {
9847 /* Pair of absolute addresses. */
9848 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9849 "Location list begin address (%s)",
9850 list_head->ll_symbol);
9851 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
9852 "Location list end address (%s)",
9853 list_head->ll_symbol);
9854 }
9855
9856 /* Output the block length for this list of location operations. */
9857 if (dwarf_version >= 5)
9858 dw2_asm_output_data_uleb128 (size, "Location expression size");
9859 else
9860 {
9861 gcc_assert (size <= 0xffff);
9862 dw2_asm_output_data (2, size, "Location expression size");
9863 }
9864
9865 output_loc_sequence (curr->expr, -1);
9866 }
9867
9868 /* And finally list termination. */
9869 if (dwarf_version >= 5)
9870 dw2_asm_output_data (1, DW_LLE_end_of_list,
9871 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
9872 else if (dwarf_split_debug_info)
9873 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
9874 "Location list terminator (%s)",
9875 list_head->ll_symbol);
9876 else
9877 {
9878 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
9879 "Location list terminator begin (%s)",
9880 list_head->ll_symbol);
9881 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
9882 "Location list terminator end (%s)",
9883 list_head->ll_symbol);
9884 }
9885 }
9886
9887 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
9888 section. Emit a relocated reference if val_entry is NULL, otherwise,
9889 emit an indirect reference. */
9890
9891 static void
9892 output_range_list_offset (dw_attr_node *a)
9893 {
9894 const char *name = dwarf_attr_name (a->dw_attr);
9895
9896 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
9897 {
9898 if (dwarf_version >= 5)
9899 {
9900 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9901 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
9902 debug_ranges_section, "%s", name);
9903 }
9904 else
9905 {
9906 char *p = strchr (ranges_section_label, '\0');
9907 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
9908 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
9909 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
9910 debug_ranges_section, "%s", name);
9911 *p = '\0';
9912 }
9913 }
9914 else if (dwarf_version >= 5)
9915 {
9916 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9917 gcc_assert (rnglist_idx);
9918 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
9919 }
9920 else
9921 dw2_asm_output_data (DWARF_OFFSET_SIZE,
9922 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
9923 "%s (offset from %s)", name, ranges_section_label);
9924 }
9925
9926 /* Output the offset into the debug_loc section. */
9927
9928 static void
9929 output_loc_list_offset (dw_attr_node *a)
9930 {
9931 char *sym = AT_loc_list (a)->ll_symbol;
9932
9933 gcc_assert (sym);
9934 if (!dwarf_split_debug_info)
9935 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
9936 "%s", dwarf_attr_name (a->dw_attr));
9937 else if (dwarf_version >= 5)
9938 {
9939 gcc_assert (AT_loc_list (a)->num_assigned);
9940 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
9941 dwarf_attr_name (a->dw_attr),
9942 sym);
9943 }
9944 else
9945 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
9946 "%s", dwarf_attr_name (a->dw_attr));
9947 }
9948
9949 /* Output an attribute's index or value appropriately. */
9950
9951 static void
9952 output_attr_index_or_value (dw_attr_node *a)
9953 {
9954 const char *name = dwarf_attr_name (a->dw_attr);
9955
9956 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9957 {
9958 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
9959 return;
9960 }
9961 switch (AT_class (a))
9962 {
9963 case dw_val_class_addr:
9964 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
9965 break;
9966 case dw_val_class_high_pc:
9967 case dw_val_class_lbl_id:
9968 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
9969 break;
9970 default:
9971 gcc_unreachable ();
9972 }
9973 }
9974
9975 /* Output a type signature. */
9976
9977 static inline void
9978 output_signature (const char *sig, const char *name)
9979 {
9980 int i;
9981
9982 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
9983 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
9984 }
9985
9986 /* Output a discriminant value. */
9987
9988 static inline void
9989 output_discr_value (dw_discr_value *discr_value, const char *name)
9990 {
9991 if (discr_value->pos)
9992 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
9993 else
9994 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
9995 }
9996
9997 /* Output the DIE and its attributes. Called recursively to generate
9998 the definitions of each child DIE. */
9999
10000 static void
10001 output_die (dw_die_ref die)
10002 {
10003 dw_attr_node *a;
10004 dw_die_ref c;
10005 unsigned long size;
10006 unsigned ix;
10007
10008 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10009 (unsigned long)die->die_offset,
10010 dwarf_tag_name (die->die_tag));
10011
10012 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10013 {
10014 const char *name = dwarf_attr_name (a->dw_attr);
10015
10016 switch (AT_class (a))
10017 {
10018 case dw_val_class_addr:
10019 output_attr_index_or_value (a);
10020 break;
10021
10022 case dw_val_class_offset:
10023 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10024 "%s", name);
10025 break;
10026
10027 case dw_val_class_range_list:
10028 output_range_list_offset (a);
10029 break;
10030
10031 case dw_val_class_loc:
10032 size = size_of_locs (AT_loc (a));
10033
10034 /* Output the block length for this list of location operations. */
10035 if (dwarf_version >= 4)
10036 dw2_asm_output_data_uleb128 (size, "%s", name);
10037 else
10038 dw2_asm_output_data (constant_size (size), size, "%s", name);
10039
10040 output_loc_sequence (AT_loc (a), -1);
10041 break;
10042
10043 case dw_val_class_const:
10044 /* ??? It would be slightly more efficient to use a scheme like is
10045 used for unsigned constants below, but gdb 4.x does not sign
10046 extend. Gdb 5.x does sign extend. */
10047 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10048 break;
10049
10050 case dw_val_class_unsigned_const:
10051 {
10052 int csize = constant_size (AT_unsigned (a));
10053 if (dwarf_version == 3
10054 && a->dw_attr == DW_AT_data_member_location
10055 && csize >= 4)
10056 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10057 else
10058 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10059 }
10060 break;
10061
10062 case dw_val_class_const_implicit:
10063 if (flag_debug_asm)
10064 fprintf (asm_out_file, "\t\t\t%s %s ("
10065 HOST_WIDE_INT_PRINT_DEC ")\n",
10066 ASM_COMMENT_START, name, AT_int (a));
10067 break;
10068
10069 case dw_val_class_unsigned_const_implicit:
10070 if (flag_debug_asm)
10071 fprintf (asm_out_file, "\t\t\t%s %s ("
10072 HOST_WIDE_INT_PRINT_HEX ")\n",
10073 ASM_COMMENT_START, name, AT_unsigned (a));
10074 break;
10075
10076 case dw_val_class_const_double:
10077 {
10078 unsigned HOST_WIDE_INT first, second;
10079
10080 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10081 dw2_asm_output_data (1,
10082 HOST_BITS_PER_DOUBLE_INT
10083 / HOST_BITS_PER_CHAR,
10084 NULL);
10085
10086 if (WORDS_BIG_ENDIAN)
10087 {
10088 first = a->dw_attr_val.v.val_double.high;
10089 second = a->dw_attr_val.v.val_double.low;
10090 }
10091 else
10092 {
10093 first = a->dw_attr_val.v.val_double.low;
10094 second = a->dw_attr_val.v.val_double.high;
10095 }
10096
10097 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10098 first, "%s", name);
10099 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10100 second, NULL);
10101 }
10102 break;
10103
10104 case dw_val_class_wide_int:
10105 {
10106 int i;
10107 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10108 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10109 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10110 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10111 * l, NULL);
10112
10113 if (WORDS_BIG_ENDIAN)
10114 for (i = len - 1; i >= 0; --i)
10115 {
10116 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10117 "%s", name);
10118 name = "";
10119 }
10120 else
10121 for (i = 0; i < len; ++i)
10122 {
10123 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10124 "%s", name);
10125 name = "";
10126 }
10127 }
10128 break;
10129
10130 case dw_val_class_vec:
10131 {
10132 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10133 unsigned int len = a->dw_attr_val.v.val_vec.length;
10134 unsigned int i;
10135 unsigned char *p;
10136
10137 dw2_asm_output_data (constant_size (len * elt_size),
10138 len * elt_size, "%s", name);
10139 if (elt_size > sizeof (HOST_WIDE_INT))
10140 {
10141 elt_size /= 2;
10142 len *= 2;
10143 }
10144 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10145 i < len;
10146 i++, p += elt_size)
10147 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10148 "fp or vector constant word %u", i);
10149 break;
10150 }
10151
10152 case dw_val_class_flag:
10153 if (dwarf_version >= 4)
10154 {
10155 /* Currently all add_AT_flag calls pass in 1 as last argument,
10156 so DW_FORM_flag_present can be used. If that ever changes,
10157 we'll need to use DW_FORM_flag and have some optimization
10158 in build_abbrev_table that will change those to
10159 DW_FORM_flag_present if it is set to 1 in all DIEs using
10160 the same abbrev entry. */
10161 gcc_assert (AT_flag (a) == 1);
10162 if (flag_debug_asm)
10163 fprintf (asm_out_file, "\t\t\t%s %s\n",
10164 ASM_COMMENT_START, name);
10165 break;
10166 }
10167 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10168 break;
10169
10170 case dw_val_class_loc_list:
10171 output_loc_list_offset (a);
10172 break;
10173
10174 case dw_val_class_die_ref:
10175 if (AT_ref_external (a))
10176 {
10177 if (AT_ref (a)->comdat_type_p)
10178 {
10179 comdat_type_node *type_node
10180 = AT_ref (a)->die_id.die_type_node;
10181
10182 gcc_assert (type_node);
10183 output_signature (type_node->signature, name);
10184 }
10185 else
10186 {
10187 const char *sym = AT_ref (a)->die_id.die_symbol;
10188 int size;
10189
10190 gcc_assert (sym);
10191 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10192 length, whereas in DWARF3 it's always sized as an
10193 offset. */
10194 if (dwarf_version == 2)
10195 size = DWARF2_ADDR_SIZE;
10196 else
10197 size = DWARF_OFFSET_SIZE;
10198 /* ??? We cannot unconditionally output die_offset if
10199 non-zero - others might create references to those
10200 DIEs via symbols.
10201 And we do not clear its DIE offset after outputting it
10202 (and the label refers to the actual DIEs, not the
10203 DWARF CU unit header which is when using label + offset
10204 would be the correct thing to do).
10205 ??? This is the reason for the with_offset flag. */
10206 if (AT_ref (a)->with_offset)
10207 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10208 debug_info_section, "%s", name);
10209 else
10210 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10211 name);
10212 }
10213 }
10214 else
10215 {
10216 gcc_assert (AT_ref (a)->die_offset);
10217 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10218 "%s", name);
10219 }
10220 break;
10221
10222 case dw_val_class_fde_ref:
10223 {
10224 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10225
10226 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10227 a->dw_attr_val.v.val_fde_index * 2);
10228 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10229 "%s", name);
10230 }
10231 break;
10232
10233 case dw_val_class_vms_delta:
10234 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10235 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10236 AT_vms_delta2 (a), AT_vms_delta1 (a),
10237 "%s", name);
10238 #else
10239 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10240 AT_vms_delta2 (a), AT_vms_delta1 (a),
10241 "%s", name);
10242 #endif
10243 break;
10244
10245 case dw_val_class_lbl_id:
10246 output_attr_index_or_value (a);
10247 break;
10248
10249 case dw_val_class_lineptr:
10250 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10251 debug_line_section, "%s", name);
10252 break;
10253
10254 case dw_val_class_macptr:
10255 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10256 debug_macinfo_section, "%s", name);
10257 break;
10258
10259 case dw_val_class_loclistsptr:
10260 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10261 debug_loc_section, "%s", name);
10262 break;
10263
10264 case dw_val_class_str:
10265 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10266 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10267 a->dw_attr_val.v.val_str->label,
10268 debug_str_section,
10269 "%s: \"%s\"", name, AT_string (a));
10270 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10271 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10272 a->dw_attr_val.v.val_str->label,
10273 debug_line_str_section,
10274 "%s: \"%s\"", name, AT_string (a));
10275 else if (a->dw_attr_val.v.val_str->form == DW_FORM_GNU_str_index)
10276 dw2_asm_output_data_uleb128 (AT_index (a),
10277 "%s: \"%s\"", name, AT_string (a));
10278 else
10279 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10280 break;
10281
10282 case dw_val_class_file:
10283 {
10284 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10285
10286 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10287 a->dw_attr_val.v.val_file->filename);
10288 break;
10289 }
10290
10291 case dw_val_class_file_implicit:
10292 if (flag_debug_asm)
10293 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10294 ASM_COMMENT_START, name,
10295 maybe_emit_file (a->dw_attr_val.v.val_file),
10296 a->dw_attr_val.v.val_file->filename);
10297 break;
10298
10299 case dw_val_class_data8:
10300 {
10301 int i;
10302
10303 for (i = 0; i < 8; i++)
10304 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10305 i == 0 ? "%s" : NULL, name);
10306 break;
10307 }
10308
10309 case dw_val_class_high_pc:
10310 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10311 get_AT_low_pc (die), "DW_AT_high_pc");
10312 break;
10313
10314 case dw_val_class_discr_value:
10315 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10316 break;
10317
10318 case dw_val_class_discr_list:
10319 {
10320 dw_discr_list_ref list = AT_discr_list (a);
10321 const int size = size_of_discr_list (list);
10322
10323 /* This is a block, so output its length first. */
10324 dw2_asm_output_data (constant_size (size), size,
10325 "%s: block size", name);
10326
10327 for (; list != NULL; list = list->dw_discr_next)
10328 {
10329 /* One byte for the discriminant value descriptor, and then as
10330 many LEB128 numbers as required. */
10331 if (list->dw_discr_range)
10332 dw2_asm_output_data (1, DW_DSC_range,
10333 "%s: DW_DSC_range", name);
10334 else
10335 dw2_asm_output_data (1, DW_DSC_label,
10336 "%s: DW_DSC_label", name);
10337
10338 output_discr_value (&list->dw_discr_lower_bound, name);
10339 if (list->dw_discr_range)
10340 output_discr_value (&list->dw_discr_upper_bound, name);
10341 }
10342 break;
10343 }
10344
10345 default:
10346 gcc_unreachable ();
10347 }
10348 }
10349
10350 FOR_EACH_CHILD (die, c, output_die (c));
10351
10352 /* Add null byte to terminate sibling list. */
10353 if (die->die_child != NULL)
10354 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10355 (unsigned long) die->die_offset);
10356 }
10357
10358 /* Output the compilation unit that appears at the beginning of the
10359 .debug_info section, and precedes the DIE descriptions. */
10360
10361 static void
10362 output_compilation_unit_header (enum dwarf_unit_type ut)
10363 {
10364 if (!XCOFF_DEBUGGING_INFO)
10365 {
10366 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10367 dw2_asm_output_data (4, 0xffffffff,
10368 "Initial length escape value indicating 64-bit DWARF extension");
10369 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10370 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10371 "Length of Compilation Unit Info");
10372 }
10373
10374 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10375 if (dwarf_version >= 5)
10376 {
10377 const char *name;
10378 switch (ut)
10379 {
10380 case DW_UT_compile: name = "DW_UT_compile"; break;
10381 case DW_UT_type: name = "DW_UT_type"; break;
10382 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10383 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10384 default: gcc_unreachable ();
10385 }
10386 dw2_asm_output_data (1, ut, "%s", name);
10387 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10388 }
10389 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10390 debug_abbrev_section,
10391 "Offset Into Abbrev. Section");
10392 if (dwarf_version < 5)
10393 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10394 }
10395
10396 /* Output the compilation unit DIE and its children. */
10397
10398 static void
10399 output_comp_unit (dw_die_ref die, int output_if_empty,
10400 const unsigned char *dwo_id)
10401 {
10402 const char *secname, *oldsym;
10403 char *tmp;
10404
10405 /* Unless we are outputting main CU, we may throw away empty ones. */
10406 if (!output_if_empty && die->die_child == NULL)
10407 return;
10408
10409 /* Even if there are no children of this DIE, we must output the information
10410 about the compilation unit. Otherwise, on an empty translation unit, we
10411 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
10412 will then complain when examining the file. First mark all the DIEs in
10413 this CU so we know which get local refs. */
10414 mark_dies (die);
10415
10416 external_ref_hash_type *extern_map = optimize_external_refs (die);
10417
10418 /* For now, optimize only the main CU, in order to optimize the rest
10419 we'd need to see all of them earlier. Leave the rest for post-linking
10420 tools like DWZ. */
10421 if (die == comp_unit_die ())
10422 abbrev_opt_start = vec_safe_length (abbrev_die_table);
10423
10424 build_abbrev_table (die, extern_map);
10425
10426 optimize_abbrev_table ();
10427
10428 delete extern_map;
10429
10430 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
10431 next_die_offset = (dwo_id
10432 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
10433 : DWARF_COMPILE_UNIT_HEADER_SIZE);
10434 calc_die_sizes (die);
10435
10436 oldsym = die->die_id.die_symbol;
10437 if (oldsym && die->comdat_type_p)
10438 {
10439 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
10440
10441 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
10442 secname = tmp;
10443 die->die_id.die_symbol = NULL;
10444 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
10445 }
10446 else
10447 {
10448 switch_to_section (debug_info_section);
10449 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
10450 info_section_emitted = true;
10451 }
10452
10453 /* For LTO cross unit DIE refs we want a symbol on the start of the
10454 debuginfo section, not on the CU DIE. */
10455 if ((flag_generate_lto || flag_generate_offload) && oldsym)
10456 {
10457 /* ??? No way to get visibility assembled without a decl. */
10458 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
10459 get_identifier (oldsym), char_type_node);
10460 TREE_PUBLIC (decl) = true;
10461 TREE_STATIC (decl) = true;
10462 DECL_ARTIFICIAL (decl) = true;
10463 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
10464 DECL_VISIBILITY_SPECIFIED (decl) = true;
10465 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
10466 #ifdef ASM_WEAKEN_LABEL
10467 /* We prefer a .weak because that handles duplicates from duplicate
10468 archive members in a graceful way. */
10469 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
10470 #else
10471 targetm.asm_out.globalize_label (asm_out_file, oldsym);
10472 #endif
10473 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
10474 }
10475
10476 /* Output debugging information. */
10477 output_compilation_unit_header (dwo_id
10478 ? DW_UT_split_compile : DW_UT_compile);
10479 if (dwarf_version >= 5)
10480 {
10481 if (dwo_id != NULL)
10482 for (int i = 0; i < 8; i++)
10483 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
10484 }
10485 output_die (die);
10486
10487 /* Leave the marks on the main CU, so we can check them in
10488 output_pubnames. */
10489 if (oldsym)
10490 {
10491 unmark_dies (die);
10492 die->die_id.die_symbol = oldsym;
10493 }
10494 }
10495
10496 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
10497 and .debug_pubtypes. This is configured per-target, but can be
10498 overridden by the -gpubnames or -gno-pubnames options. */
10499
10500 static inline bool
10501 want_pubnames (void)
10502 {
10503 if (debug_info_level <= DINFO_LEVEL_TERSE)
10504 return false;
10505 if (debug_generate_pub_sections != -1)
10506 return debug_generate_pub_sections;
10507 return targetm.want_debug_pub_sections;
10508 }
10509
10510 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
10511
10512 static void
10513 add_AT_pubnames (dw_die_ref die)
10514 {
10515 if (want_pubnames ())
10516 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
10517 }
10518
10519 /* Add a string attribute value to a skeleton DIE. */
10520
10521 static inline void
10522 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
10523 const char *str)
10524 {
10525 dw_attr_node attr;
10526 struct indirect_string_node *node;
10527
10528 if (! skeleton_debug_str_hash)
10529 skeleton_debug_str_hash
10530 = hash_table<indirect_string_hasher>::create_ggc (10);
10531
10532 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
10533 find_string_form (node);
10534 if (node->form == DW_FORM_GNU_str_index)
10535 node->form = DW_FORM_strp;
10536
10537 attr.dw_attr = attr_kind;
10538 attr.dw_attr_val.val_class = dw_val_class_str;
10539 attr.dw_attr_val.val_entry = NULL;
10540 attr.dw_attr_val.v.val_str = node;
10541 add_dwarf_attr (die, &attr);
10542 }
10543
10544 /* Helper function to generate top-level dies for skeleton debug_info and
10545 debug_types. */
10546
10547 static void
10548 add_top_level_skeleton_die_attrs (dw_die_ref die)
10549 {
10550 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
10551 const char *comp_dir = comp_dir_string ();
10552
10553 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
10554 if (comp_dir != NULL)
10555 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
10556 add_AT_pubnames (die);
10557 add_AT_lineptr (die, DW_AT_GNU_addr_base, debug_addr_section_label);
10558 }
10559
10560 /* Output skeleton debug sections that point to the dwo file. */
10561
10562 static void
10563 output_skeleton_debug_sections (dw_die_ref comp_unit,
10564 const unsigned char *dwo_id)
10565 {
10566 /* These attributes will be found in the full debug_info section. */
10567 remove_AT (comp_unit, DW_AT_producer);
10568 remove_AT (comp_unit, DW_AT_language);
10569
10570 switch_to_section (debug_skeleton_info_section);
10571 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
10572
10573 /* Produce the skeleton compilation-unit header. This one differs enough from
10574 a normal CU header that it's better not to call output_compilation_unit
10575 header. */
10576 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10577 dw2_asm_output_data (4, 0xffffffff,
10578 "Initial length escape value indicating 64-bit "
10579 "DWARF extension");
10580
10581 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10582 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
10583 - DWARF_INITIAL_LENGTH_SIZE
10584 + size_of_die (comp_unit),
10585 "Length of Compilation Unit Info");
10586 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10587 if (dwarf_version >= 5)
10588 {
10589 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
10590 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10591 }
10592 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
10593 debug_skeleton_abbrev_section,
10594 "Offset Into Abbrev. Section");
10595 if (dwarf_version < 5)
10596 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10597 else
10598 for (int i = 0; i < 8; i++)
10599 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
10600
10601 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
10602 output_die (comp_unit);
10603
10604 /* Build the skeleton debug_abbrev section. */
10605 switch_to_section (debug_skeleton_abbrev_section);
10606 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
10607
10608 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
10609
10610 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
10611 }
10612
10613 /* Output a comdat type unit DIE and its children. */
10614
10615 static void
10616 output_comdat_type_unit (comdat_type_node *node)
10617 {
10618 const char *secname;
10619 char *tmp;
10620 int i;
10621 #if defined (OBJECT_FORMAT_ELF)
10622 tree comdat_key;
10623 #endif
10624
10625 /* First mark all the DIEs in this CU so we know which get local refs. */
10626 mark_dies (node->root_die);
10627
10628 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
10629
10630 build_abbrev_table (node->root_die, extern_map);
10631
10632 delete extern_map;
10633 extern_map = NULL;
10634
10635 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
10636 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
10637 calc_die_sizes (node->root_die);
10638
10639 #if defined (OBJECT_FORMAT_ELF)
10640 if (dwarf_version >= 5)
10641 {
10642 if (!dwarf_split_debug_info)
10643 secname = ".debug_info";
10644 else
10645 secname = ".debug_info.dwo";
10646 }
10647 else if (!dwarf_split_debug_info)
10648 secname = ".debug_types";
10649 else
10650 secname = ".debug_types.dwo";
10651
10652 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
10653 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
10654 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10655 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
10656 comdat_key = get_identifier (tmp);
10657 targetm.asm_out.named_section (secname,
10658 SECTION_DEBUG | SECTION_LINKONCE,
10659 comdat_key);
10660 #else
10661 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
10662 sprintf (tmp, (dwarf_version >= 5
10663 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
10664 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10665 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
10666 secname = tmp;
10667 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
10668 #endif
10669
10670 /* Output debugging information. */
10671 output_compilation_unit_header (dwarf_split_debug_info
10672 ? DW_UT_split_type : DW_UT_type);
10673 output_signature (node->signature, "Type Signature");
10674 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
10675 "Offset to Type DIE");
10676 output_die (node->root_die);
10677
10678 unmark_dies (node->root_die);
10679 }
10680
10681 /* Return the DWARF2/3 pubname associated with a decl. */
10682
10683 static const char *
10684 dwarf2_name (tree decl, int scope)
10685 {
10686 if (DECL_NAMELESS (decl))
10687 return NULL;
10688 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
10689 }
10690
10691 /* Add a new entry to .debug_pubnames if appropriate. */
10692
10693 static void
10694 add_pubname_string (const char *str, dw_die_ref die)
10695 {
10696 pubname_entry e;
10697
10698 e.die = die;
10699 e.name = xstrdup (str);
10700 vec_safe_push (pubname_table, e);
10701 }
10702
10703 static void
10704 add_pubname (tree decl, dw_die_ref die)
10705 {
10706 if (!want_pubnames ())
10707 return;
10708
10709 /* Don't add items to the table when we expect that the consumer will have
10710 just read the enclosing die. For example, if the consumer is looking at a
10711 class_member, it will either be inside the class already, or will have just
10712 looked up the class to find the member. Either way, searching the class is
10713 faster than searching the index. */
10714 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
10715 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
10716 {
10717 const char *name = dwarf2_name (decl, 1);
10718
10719 if (name)
10720 add_pubname_string (name, die);
10721 }
10722 }
10723
10724 /* Add an enumerator to the pubnames section. */
10725
10726 static void
10727 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
10728 {
10729 pubname_entry e;
10730
10731 gcc_assert (scope_name);
10732 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
10733 e.die = die;
10734 vec_safe_push (pubname_table, e);
10735 }
10736
10737 /* Add a new entry to .debug_pubtypes if appropriate. */
10738
10739 static void
10740 add_pubtype (tree decl, dw_die_ref die)
10741 {
10742 pubname_entry e;
10743
10744 if (!want_pubnames ())
10745 return;
10746
10747 if ((TREE_PUBLIC (decl)
10748 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
10749 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
10750 {
10751 tree scope = NULL;
10752 const char *scope_name = "";
10753 const char *sep = is_cxx () ? "::" : ".";
10754 const char *name;
10755
10756 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
10757 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
10758 {
10759 scope_name = lang_hooks.dwarf_name (scope, 1);
10760 if (scope_name != NULL && scope_name[0] != '\0')
10761 scope_name = concat (scope_name, sep, NULL);
10762 else
10763 scope_name = "";
10764 }
10765
10766 if (TYPE_P (decl))
10767 name = type_tag (decl);
10768 else
10769 name = lang_hooks.dwarf_name (decl, 1);
10770
10771 /* If we don't have a name for the type, there's no point in adding
10772 it to the table. */
10773 if (name != NULL && name[0] != '\0')
10774 {
10775 e.die = die;
10776 e.name = concat (scope_name, name, NULL);
10777 vec_safe_push (pubtype_table, e);
10778 }
10779
10780 /* Although it might be more consistent to add the pubinfo for the
10781 enumerators as their dies are created, they should only be added if the
10782 enum type meets the criteria above. So rather than re-check the parent
10783 enum type whenever an enumerator die is created, just output them all
10784 here. This isn't protected by the name conditional because anonymous
10785 enums don't have names. */
10786 if (die->die_tag == DW_TAG_enumeration_type)
10787 {
10788 dw_die_ref c;
10789
10790 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
10791 }
10792 }
10793 }
10794
10795 /* Output a single entry in the pubnames table. */
10796
10797 static void
10798 output_pubname (dw_offset die_offset, pubname_entry *entry)
10799 {
10800 dw_die_ref die = entry->die;
10801 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
10802
10803 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
10804
10805 if (debug_generate_pub_sections == 2)
10806 {
10807 /* This logic follows gdb's method for determining the value of the flag
10808 byte. */
10809 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
10810 switch (die->die_tag)
10811 {
10812 case DW_TAG_typedef:
10813 case DW_TAG_base_type:
10814 case DW_TAG_subrange_type:
10815 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
10816 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
10817 break;
10818 case DW_TAG_enumerator:
10819 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10820 GDB_INDEX_SYMBOL_KIND_VARIABLE);
10821 if (!is_cxx ())
10822 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
10823 break;
10824 case DW_TAG_subprogram:
10825 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10826 GDB_INDEX_SYMBOL_KIND_FUNCTION);
10827 if (!is_ada ())
10828 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
10829 break;
10830 case DW_TAG_constant:
10831 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10832 GDB_INDEX_SYMBOL_KIND_VARIABLE);
10833 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
10834 break;
10835 case DW_TAG_variable:
10836 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10837 GDB_INDEX_SYMBOL_KIND_VARIABLE);
10838 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
10839 break;
10840 case DW_TAG_namespace:
10841 case DW_TAG_imported_declaration:
10842 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
10843 break;
10844 case DW_TAG_class_type:
10845 case DW_TAG_interface_type:
10846 case DW_TAG_structure_type:
10847 case DW_TAG_union_type:
10848 case DW_TAG_enumeration_type:
10849 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
10850 if (!is_cxx ())
10851 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
10852 break;
10853 default:
10854 /* An unusual tag. Leave the flag-byte empty. */
10855 break;
10856 }
10857 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
10858 "GDB-index flags");
10859 }
10860
10861 dw2_asm_output_nstring (entry->name, -1, "external name");
10862 }
10863
10864
10865 /* Output the public names table used to speed up access to externally
10866 visible names; or the public types table used to find type definitions. */
10867
10868 static void
10869 output_pubnames (vec<pubname_entry, va_gc> *names)
10870 {
10871 unsigned i;
10872 unsigned long pubnames_length = size_of_pubnames (names);
10873 pubname_entry *pub;
10874
10875 if (!XCOFF_DEBUGGING_INFO)
10876 {
10877 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10878 dw2_asm_output_data (4, 0xffffffff,
10879 "Initial length escape value indicating 64-bit DWARF extension");
10880 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
10881 "Pub Info Length");
10882 }
10883
10884 /* Version number for pubnames/pubtypes is independent of dwarf version. */
10885 dw2_asm_output_data (2, 2, "DWARF Version");
10886
10887 if (dwarf_split_debug_info)
10888 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
10889 debug_skeleton_info_section,
10890 "Offset of Compilation Unit Info");
10891 else
10892 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
10893 debug_info_section,
10894 "Offset of Compilation Unit Info");
10895 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
10896 "Compilation Unit Length");
10897
10898 FOR_EACH_VEC_ELT (*names, i, pub)
10899 {
10900 if (include_pubname_in_output (names, pub))
10901 {
10902 dw_offset die_offset = pub->die->die_offset;
10903
10904 /* We shouldn't see pubnames for DIEs outside of the main CU. */
10905 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
10906 gcc_assert (pub->die->die_mark);
10907
10908 /* If we're putting types in their own .debug_types sections,
10909 the .debug_pubtypes table will still point to the compile
10910 unit (not the type unit), so we want to use the offset of
10911 the skeleton DIE (if there is one). */
10912 if (pub->die->comdat_type_p && names == pubtype_table)
10913 {
10914 comdat_type_node *type_node = pub->die->die_id.die_type_node;
10915
10916 if (type_node != NULL)
10917 die_offset = (type_node->skeleton_die != NULL
10918 ? type_node->skeleton_die->die_offset
10919 : comp_unit_die ()->die_offset);
10920 }
10921
10922 output_pubname (die_offset, pub);
10923 }
10924 }
10925
10926 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
10927 }
10928
10929 /* Output public names and types tables if necessary. */
10930
10931 static void
10932 output_pubtables (void)
10933 {
10934 if (!want_pubnames () || !info_section_emitted)
10935 return;
10936
10937 switch_to_section (debug_pubnames_section);
10938 output_pubnames (pubname_table);
10939 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
10940 It shouldn't hurt to emit it always, since pure DWARF2 consumers
10941 simply won't look for the section. */
10942 switch_to_section (debug_pubtypes_section);
10943 output_pubnames (pubtype_table);
10944 }
10945
10946
10947 /* Output the information that goes into the .debug_aranges table.
10948 Namely, define the beginning and ending address range of the
10949 text section generated for this compilation unit. */
10950
10951 static void
10952 output_aranges (void)
10953 {
10954 unsigned i;
10955 unsigned long aranges_length = size_of_aranges ();
10956
10957 if (!XCOFF_DEBUGGING_INFO)
10958 {
10959 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10960 dw2_asm_output_data (4, 0xffffffff,
10961 "Initial length escape value indicating 64-bit DWARF extension");
10962 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
10963 "Length of Address Ranges Info");
10964 }
10965
10966 /* Version number for aranges is still 2, even up to DWARF5. */
10967 dw2_asm_output_data (2, 2, "DWARF Version");
10968 if (dwarf_split_debug_info)
10969 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
10970 debug_skeleton_info_section,
10971 "Offset of Compilation Unit Info");
10972 else
10973 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
10974 debug_info_section,
10975 "Offset of Compilation Unit Info");
10976 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
10977 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
10978
10979 /* We need to align to twice the pointer size here. */
10980 if (DWARF_ARANGES_PAD_SIZE)
10981 {
10982 /* Pad using a 2 byte words so that padding is correct for any
10983 pointer size. */
10984 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
10985 2 * DWARF2_ADDR_SIZE);
10986 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
10987 dw2_asm_output_data (2, 0, NULL);
10988 }
10989
10990 /* It is necessary not to output these entries if the sections were
10991 not used; if the sections were not used, the length will be 0 and
10992 the address may end up as 0 if the section is discarded by ld
10993 --gc-sections, leaving an invalid (0, 0) entry that can be
10994 confused with the terminator. */
10995 if (text_section_used)
10996 {
10997 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
10998 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
10999 text_section_label, "Length");
11000 }
11001 if (cold_text_section_used)
11002 {
11003 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11004 "Address");
11005 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11006 cold_text_section_label, "Length");
11007 }
11008
11009 if (have_multiple_function_sections)
11010 {
11011 unsigned fde_idx;
11012 dw_fde_ref fde;
11013
11014 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11015 {
11016 if (DECL_IGNORED_P (fde->decl))
11017 continue;
11018 if (!fde->in_std_section)
11019 {
11020 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11021 "Address");
11022 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11023 fde->dw_fde_begin, "Length");
11024 }
11025 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11026 {
11027 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11028 "Address");
11029 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11030 fde->dw_fde_second_begin, "Length");
11031 }
11032 }
11033 }
11034
11035 /* Output the terminator words. */
11036 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11037 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11038 }
11039
11040 /* Add a new entry to .debug_ranges. Return its index into
11041 ranges_table vector. */
11042
11043 static unsigned int
11044 add_ranges_num (int num, bool maybe_new_sec)
11045 {
11046 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11047 vec_safe_push (ranges_table, r);
11048 return vec_safe_length (ranges_table) - 1;
11049 }
11050
11051 /* Add a new entry to .debug_ranges corresponding to a block, or a
11052 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11053 this entry might be in a different section from previous range. */
11054
11055 static unsigned int
11056 add_ranges (const_tree block, bool maybe_new_sec)
11057 {
11058 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11059 }
11060
11061 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11062 chain, or middle entry of a chain that will be directly referred to. */
11063
11064 static void
11065 note_rnglist_head (unsigned int offset)
11066 {
11067 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11068 return;
11069 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11070 }
11071
11072 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11073 When using dwarf_split_debug_info, address attributes in dies destined
11074 for the final executable should be direct references--setting the
11075 parameter force_direct ensures this behavior. */
11076
11077 static void
11078 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11079 bool *added, bool force_direct)
11080 {
11081 unsigned int in_use = vec_safe_length (ranges_by_label);
11082 unsigned int offset;
11083 dw_ranges_by_label rbl = { begin, end };
11084 vec_safe_push (ranges_by_label, rbl);
11085 offset = add_ranges_num (-(int)in_use - 1, true);
11086 if (!*added)
11087 {
11088 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11089 *added = true;
11090 note_rnglist_head (offset);
11091 }
11092 }
11093
11094 /* Emit .debug_ranges section. */
11095
11096 static void
11097 output_ranges (void)
11098 {
11099 unsigned i;
11100 static const char *const start_fmt = "Offset %#x";
11101 const char *fmt = start_fmt;
11102 dw_ranges *r;
11103
11104 switch_to_section (debug_ranges_section);
11105 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11106 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11107 {
11108 int block_num = r->num;
11109
11110 if (block_num > 0)
11111 {
11112 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11113 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11114
11115 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11116 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11117
11118 /* If all code is in the text section, then the compilation
11119 unit base address defaults to DW_AT_low_pc, which is the
11120 base of the text section. */
11121 if (!have_multiple_function_sections)
11122 {
11123 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11124 text_section_label,
11125 fmt, i * 2 * DWARF2_ADDR_SIZE);
11126 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11127 text_section_label, NULL);
11128 }
11129
11130 /* Otherwise, the compilation unit base address is zero,
11131 which allows us to use absolute addresses, and not worry
11132 about whether the target supports cross-section
11133 arithmetic. */
11134 else
11135 {
11136 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11137 fmt, i * 2 * DWARF2_ADDR_SIZE);
11138 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11139 }
11140
11141 fmt = NULL;
11142 }
11143
11144 /* Negative block_num stands for an index into ranges_by_label. */
11145 else if (block_num < 0)
11146 {
11147 int lab_idx = - block_num - 1;
11148
11149 if (!have_multiple_function_sections)
11150 {
11151 gcc_unreachable ();
11152 #if 0
11153 /* If we ever use add_ranges_by_labels () for a single
11154 function section, all we have to do is to take out
11155 the #if 0 above. */
11156 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11157 (*ranges_by_label)[lab_idx].begin,
11158 text_section_label,
11159 fmt, i * 2 * DWARF2_ADDR_SIZE);
11160 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11161 (*ranges_by_label)[lab_idx].end,
11162 text_section_label, NULL);
11163 #endif
11164 }
11165 else
11166 {
11167 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11168 (*ranges_by_label)[lab_idx].begin,
11169 fmt, i * 2 * DWARF2_ADDR_SIZE);
11170 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11171 (*ranges_by_label)[lab_idx].end,
11172 NULL);
11173 }
11174 }
11175 else
11176 {
11177 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11178 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11179 fmt = start_fmt;
11180 }
11181 }
11182 }
11183
11184 /* Non-zero if .debug_line_str should be used for .debug_line section
11185 strings or strings that are likely shareable with those. */
11186 #define DWARF5_USE_DEBUG_LINE_STR \
11187 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11188 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11189 /* FIXME: there is no .debug_line_str.dwo section, \
11190 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11191 && !dwarf_split_debug_info)
11192
11193 /* Assign .debug_rnglists indexes. */
11194
11195 static void
11196 index_rnglists (void)
11197 {
11198 unsigned i;
11199 dw_ranges *r;
11200
11201 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11202 if (r->label)
11203 r->idx = rnglist_idx++;
11204 }
11205
11206 /* Emit .debug_rnglists section. */
11207
11208 static void
11209 output_rnglists (unsigned generation)
11210 {
11211 unsigned i;
11212 dw_ranges *r;
11213 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11214 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11215 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11216
11217 switch_to_section (debug_ranges_section);
11218 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11219 /* There are up to 4 unique ranges labels per generation.
11220 See also init_sections_and_labels. */
11221 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11222 2 + generation * 4);
11223 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11224 3 + generation * 4);
11225 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11226 dw2_asm_output_data (4, 0xffffffff,
11227 "Initial length escape value indicating "
11228 "64-bit DWARF extension");
11229 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11230 "Length of Range Lists");
11231 ASM_OUTPUT_LABEL (asm_out_file, l1);
11232 dw2_asm_output_data (2, dwarf_version, "DWARF Version");
11233 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11234 dw2_asm_output_data (1, 0, "Segment Size");
11235 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11236 about relocation sizes and primarily care about the size of .debug*
11237 sections in linked shared libraries and executables, then
11238 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11239 into it are usually larger than just DW_FORM_sec_offset offsets
11240 into the .debug_rnglists section. */
11241 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11242 "Offset Entry Count");
11243 if (dwarf_split_debug_info)
11244 {
11245 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11246 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11247 if (r->label)
11248 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11249 ranges_base_label, NULL);
11250 }
11251
11252 const char *lab = "";
11253 unsigned int len = vec_safe_length (ranges_table);
11254 const char *base = NULL;
11255 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11256 {
11257 int block_num = r->num;
11258
11259 if (r->label)
11260 {
11261 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11262 lab = r->label;
11263 }
11264 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11265 base = NULL;
11266 if (block_num > 0)
11267 {
11268 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11269 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11270
11271 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11272 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11273
11274 if (HAVE_AS_LEB128)
11275 {
11276 /* If all code is in the text section, then the compilation
11277 unit base address defaults to DW_AT_low_pc, which is the
11278 base of the text section. */
11279 if (!have_multiple_function_sections)
11280 {
11281 dw2_asm_output_data (1, DW_RLE_offset_pair,
11282 "DW_RLE_offset_pair (%s)", lab);
11283 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11284 "Range begin address (%s)", lab);
11285 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11286 "Range end address (%s)", lab);
11287 continue;
11288 }
11289 if (base == NULL)
11290 {
11291 dw_ranges *r2 = NULL;
11292 if (i < len - 1)
11293 r2 = &(*ranges_table)[i + 1];
11294 if (r2
11295 && r2->num != 0
11296 && r2->label == NULL
11297 && !r2->maybe_new_sec)
11298 {
11299 dw2_asm_output_data (1, DW_RLE_base_address,
11300 "DW_RLE_base_address (%s)", lab);
11301 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11302 "Base address (%s)", lab);
11303 strcpy (basebuf, blabel);
11304 base = basebuf;
11305 }
11306 }
11307 if (base)
11308 {
11309 dw2_asm_output_data (1, DW_RLE_offset_pair,
11310 "DW_RLE_offset_pair (%s)", lab);
11311 dw2_asm_output_delta_uleb128 (blabel, base,
11312 "Range begin address (%s)", lab);
11313 dw2_asm_output_delta_uleb128 (elabel, base,
11314 "Range end address (%s)", lab);
11315 continue;
11316 }
11317 dw2_asm_output_data (1, DW_RLE_start_length,
11318 "DW_RLE_start_length (%s)", lab);
11319 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11320 "Range begin address (%s)", lab);
11321 dw2_asm_output_delta_uleb128 (elabel, blabel,
11322 "Range length (%s)", lab);
11323 }
11324 else
11325 {
11326 dw2_asm_output_data (1, DW_RLE_start_end,
11327 "DW_RLE_start_end (%s)", lab);
11328 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11329 "Range begin address (%s)", lab);
11330 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11331 "Range end address (%s)", lab);
11332 }
11333 }
11334
11335 /* Negative block_num stands for an index into ranges_by_label. */
11336 else if (block_num < 0)
11337 {
11338 int lab_idx = - block_num - 1;
11339 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11340 const char *elabel = (*ranges_by_label)[lab_idx].end;
11341
11342 if (!have_multiple_function_sections)
11343 gcc_unreachable ();
11344 if (HAVE_AS_LEB128)
11345 {
11346 dw2_asm_output_data (1, DW_RLE_start_length,
11347 "DW_RLE_start_length (%s)", lab);
11348 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11349 "Range begin address (%s)", lab);
11350 dw2_asm_output_delta_uleb128 (elabel, blabel,
11351 "Range length (%s)", lab);
11352 }
11353 else
11354 {
11355 dw2_asm_output_data (1, DW_RLE_start_end,
11356 "DW_RLE_start_end (%s)", lab);
11357 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11358 "Range begin address (%s)", lab);
11359 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11360 "Range end address (%s)", lab);
11361 }
11362 }
11363 else
11364 dw2_asm_output_data (1, DW_RLE_end_of_list,
11365 "DW_RLE_end_of_list (%s)", lab);
11366 }
11367 ASM_OUTPUT_LABEL (asm_out_file, l2);
11368 }
11369
11370 /* Data structure containing information about input files. */
11371 struct file_info
11372 {
11373 const char *path; /* Complete file name. */
11374 const char *fname; /* File name part. */
11375 int length; /* Length of entire string. */
11376 struct dwarf_file_data * file_idx; /* Index in input file table. */
11377 int dir_idx; /* Index in directory table. */
11378 };
11379
11380 /* Data structure containing information about directories with source
11381 files. */
11382 struct dir_info
11383 {
11384 const char *path; /* Path including directory name. */
11385 int length; /* Path length. */
11386 int prefix; /* Index of directory entry which is a prefix. */
11387 int count; /* Number of files in this directory. */
11388 int dir_idx; /* Index of directory used as base. */
11389 };
11390
11391 /* Callback function for file_info comparison. We sort by looking at
11392 the directories in the path. */
11393
11394 static int
11395 file_info_cmp (const void *p1, const void *p2)
11396 {
11397 const struct file_info *const s1 = (const struct file_info *) p1;
11398 const struct file_info *const s2 = (const struct file_info *) p2;
11399 const unsigned char *cp1;
11400 const unsigned char *cp2;
11401
11402 /* Take care of file names without directories. We need to make sure that
11403 we return consistent values to qsort since some will get confused if
11404 we return the same value when identical operands are passed in opposite
11405 orders. So if neither has a directory, return 0 and otherwise return
11406 1 or -1 depending on which one has the directory. */
11407 if ((s1->path == s1->fname || s2->path == s2->fname))
11408 return (s2->path == s2->fname) - (s1->path == s1->fname);
11409
11410 cp1 = (const unsigned char *) s1->path;
11411 cp2 = (const unsigned char *) s2->path;
11412
11413 while (1)
11414 {
11415 ++cp1;
11416 ++cp2;
11417 /* Reached the end of the first path? If so, handle like above. */
11418 if ((cp1 == (const unsigned char *) s1->fname)
11419 || (cp2 == (const unsigned char *) s2->fname))
11420 return ((cp2 == (const unsigned char *) s2->fname)
11421 - (cp1 == (const unsigned char *) s1->fname));
11422
11423 /* Character of current path component the same? */
11424 else if (*cp1 != *cp2)
11425 return *cp1 - *cp2;
11426 }
11427 }
11428
11429 struct file_name_acquire_data
11430 {
11431 struct file_info *files;
11432 int used_files;
11433 int max_files;
11434 };
11435
11436 /* Traversal function for the hash table. */
11437
11438 int
11439 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
11440 {
11441 struct dwarf_file_data *d = *slot;
11442 struct file_info *fi;
11443 const char *f;
11444
11445 gcc_assert (fnad->max_files >= d->emitted_number);
11446
11447 if (! d->emitted_number)
11448 return 1;
11449
11450 gcc_assert (fnad->max_files != fnad->used_files);
11451
11452 fi = fnad->files + fnad->used_files++;
11453
11454 /* Skip all leading "./". */
11455 f = d->filename;
11456 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
11457 f += 2;
11458
11459 /* Create a new array entry. */
11460 fi->path = f;
11461 fi->length = strlen (f);
11462 fi->file_idx = d;
11463
11464 /* Search for the file name part. */
11465 f = strrchr (f, DIR_SEPARATOR);
11466 #if defined (DIR_SEPARATOR_2)
11467 {
11468 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
11469
11470 if (g != NULL)
11471 {
11472 if (f == NULL || f < g)
11473 f = g;
11474 }
11475 }
11476 #endif
11477
11478 fi->fname = f == NULL ? fi->path : f + 1;
11479 return 1;
11480 }
11481
11482 /* Helper function for output_file_names. Emit a FORM encoded
11483 string STR, with assembly comment start ENTRY_KIND and
11484 index IDX */
11485
11486 static void
11487 output_line_string (enum dwarf_form form, const char *str,
11488 const char *entry_kind, unsigned int idx)
11489 {
11490 switch (form)
11491 {
11492 case DW_FORM_string:
11493 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
11494 break;
11495 case DW_FORM_line_strp:
11496 if (!debug_line_str_hash)
11497 debug_line_str_hash
11498 = hash_table<indirect_string_hasher>::create_ggc (10);
11499
11500 struct indirect_string_node *node;
11501 node = find_AT_string_in_table (str, debug_line_str_hash);
11502 set_indirect_string (node);
11503 node->form = form;
11504 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
11505 debug_line_str_section, "%s: %#x: \"%s\"",
11506 entry_kind, 0, node->str);
11507 break;
11508 default:
11509 gcc_unreachable ();
11510 }
11511 }
11512
11513 /* Output the directory table and the file name table. We try to minimize
11514 the total amount of memory needed. A heuristic is used to avoid large
11515 slowdowns with many input files. */
11516
11517 static void
11518 output_file_names (void)
11519 {
11520 struct file_name_acquire_data fnad;
11521 int numfiles;
11522 struct file_info *files;
11523 struct dir_info *dirs;
11524 int *saved;
11525 int *savehere;
11526 int *backmap;
11527 int ndirs;
11528 int idx_offset;
11529 int i;
11530
11531 if (!last_emitted_file)
11532 {
11533 if (dwarf_version >= 5)
11534 {
11535 dw2_asm_output_data (1, 0, "Directory entry format count");
11536 dw2_asm_output_data_uleb128 (0, "Directories count");
11537 dw2_asm_output_data (1, 0, "File name entry format count");
11538 dw2_asm_output_data_uleb128 (0, "File names count");
11539 }
11540 else
11541 {
11542 dw2_asm_output_data (1, 0, "End directory table");
11543 dw2_asm_output_data (1, 0, "End file name table");
11544 }
11545 return;
11546 }
11547
11548 numfiles = last_emitted_file->emitted_number;
11549
11550 /* Allocate the various arrays we need. */
11551 files = XALLOCAVEC (struct file_info, numfiles);
11552 dirs = XALLOCAVEC (struct dir_info, numfiles);
11553
11554 fnad.files = files;
11555 fnad.used_files = 0;
11556 fnad.max_files = numfiles;
11557 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
11558 gcc_assert (fnad.used_files == fnad.max_files);
11559
11560 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
11561
11562 /* Find all the different directories used. */
11563 dirs[0].path = files[0].path;
11564 dirs[0].length = files[0].fname - files[0].path;
11565 dirs[0].prefix = -1;
11566 dirs[0].count = 1;
11567 dirs[0].dir_idx = 0;
11568 files[0].dir_idx = 0;
11569 ndirs = 1;
11570
11571 for (i = 1; i < numfiles; i++)
11572 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
11573 && memcmp (dirs[ndirs - 1].path, files[i].path,
11574 dirs[ndirs - 1].length) == 0)
11575 {
11576 /* Same directory as last entry. */
11577 files[i].dir_idx = ndirs - 1;
11578 ++dirs[ndirs - 1].count;
11579 }
11580 else
11581 {
11582 int j;
11583
11584 /* This is a new directory. */
11585 dirs[ndirs].path = files[i].path;
11586 dirs[ndirs].length = files[i].fname - files[i].path;
11587 dirs[ndirs].count = 1;
11588 dirs[ndirs].dir_idx = ndirs;
11589 files[i].dir_idx = ndirs;
11590
11591 /* Search for a prefix. */
11592 dirs[ndirs].prefix = -1;
11593 for (j = 0; j < ndirs; j++)
11594 if (dirs[j].length < dirs[ndirs].length
11595 && dirs[j].length > 1
11596 && (dirs[ndirs].prefix == -1
11597 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
11598 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
11599 dirs[ndirs].prefix = j;
11600
11601 ++ndirs;
11602 }
11603
11604 /* Now to the actual work. We have to find a subset of the directories which
11605 allow expressing the file name using references to the directory table
11606 with the least amount of characters. We do not do an exhaustive search
11607 where we would have to check out every combination of every single
11608 possible prefix. Instead we use a heuristic which provides nearly optimal
11609 results in most cases and never is much off. */
11610 saved = XALLOCAVEC (int, ndirs);
11611 savehere = XALLOCAVEC (int, ndirs);
11612
11613 memset (saved, '\0', ndirs * sizeof (saved[0]));
11614 for (i = 0; i < ndirs; i++)
11615 {
11616 int j;
11617 int total;
11618
11619 /* We can always save some space for the current directory. But this
11620 does not mean it will be enough to justify adding the directory. */
11621 savehere[i] = dirs[i].length;
11622 total = (savehere[i] - saved[i]) * dirs[i].count;
11623
11624 for (j = i + 1; j < ndirs; j++)
11625 {
11626 savehere[j] = 0;
11627 if (saved[j] < dirs[i].length)
11628 {
11629 /* Determine whether the dirs[i] path is a prefix of the
11630 dirs[j] path. */
11631 int k;
11632
11633 k = dirs[j].prefix;
11634 while (k != -1 && k != (int) i)
11635 k = dirs[k].prefix;
11636
11637 if (k == (int) i)
11638 {
11639 /* Yes it is. We can possibly save some memory by
11640 writing the filenames in dirs[j] relative to
11641 dirs[i]. */
11642 savehere[j] = dirs[i].length;
11643 total += (savehere[j] - saved[j]) * dirs[j].count;
11644 }
11645 }
11646 }
11647
11648 /* Check whether we can save enough to justify adding the dirs[i]
11649 directory. */
11650 if (total > dirs[i].length + 1)
11651 {
11652 /* It's worthwhile adding. */
11653 for (j = i; j < ndirs; j++)
11654 if (savehere[j] > 0)
11655 {
11656 /* Remember how much we saved for this directory so far. */
11657 saved[j] = savehere[j];
11658
11659 /* Remember the prefix directory. */
11660 dirs[j].dir_idx = i;
11661 }
11662 }
11663 }
11664
11665 /* Emit the directory name table. */
11666 idx_offset = dirs[0].length > 0 ? 1 : 0;
11667 enum dwarf_form str_form = DW_FORM_string;
11668 enum dwarf_form idx_form = DW_FORM_udata;
11669 if (dwarf_version >= 5)
11670 {
11671 const char *comp_dir = comp_dir_string ();
11672 if (comp_dir == NULL)
11673 comp_dir = "";
11674 dw2_asm_output_data (1, 1, "Directory entry format count");
11675 if (DWARF5_USE_DEBUG_LINE_STR)
11676 str_form = DW_FORM_line_strp;
11677 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
11678 dw2_asm_output_data_uleb128 (str_form, "%s",
11679 get_DW_FORM_name (str_form));
11680 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
11681 if (str_form == DW_FORM_string)
11682 {
11683 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
11684 for (i = 1 - idx_offset; i < ndirs; i++)
11685 dw2_asm_output_nstring (dirs[i].path,
11686 dirs[i].length
11687 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
11688 "Directory Entry: %#x", i + idx_offset);
11689 }
11690 else
11691 {
11692 output_line_string (str_form, comp_dir, "Directory Entry", 0);
11693 for (i = 1 - idx_offset; i < ndirs; i++)
11694 {
11695 const char *str
11696 = ggc_alloc_string (dirs[i].path,
11697 dirs[i].length
11698 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
11699 output_line_string (str_form, str, "Directory Entry",
11700 (unsigned) i + idx_offset);
11701 }
11702 }
11703 }
11704 else
11705 {
11706 for (i = 1 - idx_offset; i < ndirs; i++)
11707 dw2_asm_output_nstring (dirs[i].path,
11708 dirs[i].length
11709 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
11710 "Directory Entry: %#x", i + idx_offset);
11711
11712 dw2_asm_output_data (1, 0, "End directory table");
11713 }
11714
11715 /* We have to emit them in the order of emitted_number since that's
11716 used in the debug info generation. To do this efficiently we
11717 generate a back-mapping of the indices first. */
11718 backmap = XALLOCAVEC (int, numfiles);
11719 for (i = 0; i < numfiles; i++)
11720 backmap[files[i].file_idx->emitted_number - 1] = i;
11721
11722 if (dwarf_version >= 5)
11723 {
11724 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
11725 if (filename0 == NULL)
11726 filename0 = "";
11727 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
11728 DW_FORM_data2. Choose one based on the number of directories
11729 and how much space would they occupy in each encoding.
11730 If we have at most 256 directories, all indexes fit into
11731 a single byte, so DW_FORM_data1 is most compact (if there
11732 are at most 128 directories, DW_FORM_udata would be as
11733 compact as that, but not shorter and slower to decode). */
11734 if (ndirs + idx_offset <= 256)
11735 idx_form = DW_FORM_data1;
11736 /* If there are more than 65536 directories, we have to use
11737 DW_FORM_udata, DW_FORM_data2 can't refer to them.
11738 Otherwise, compute what space would occupy if all the indexes
11739 used DW_FORM_udata - sum - and compare that to how large would
11740 be DW_FORM_data2 encoding, and pick the more efficient one. */
11741 else if (ndirs + idx_offset <= 65536)
11742 {
11743 unsigned HOST_WIDE_INT sum = 1;
11744 for (i = 0; i < numfiles; i++)
11745 {
11746 int file_idx = backmap[i];
11747 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
11748 sum += size_of_uleb128 (dir_idx);
11749 }
11750 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
11751 idx_form = DW_FORM_data2;
11752 }
11753 #ifdef VMS_DEBUGGING_INFO
11754 dw2_asm_output_data (1, 4, "File name entry format count");
11755 #else
11756 dw2_asm_output_data (1, 2, "File name entry format count");
11757 #endif
11758 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
11759 dw2_asm_output_data_uleb128 (str_form, "%s",
11760 get_DW_FORM_name (str_form));
11761 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
11762 "DW_LNCT_directory_index");
11763 dw2_asm_output_data_uleb128 (idx_form, "%s",
11764 get_DW_FORM_name (idx_form));
11765 #ifdef VMS_DEBUGGING_INFO
11766 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
11767 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
11768 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
11769 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
11770 #endif
11771 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
11772
11773 output_line_string (str_form, filename0, "File Entry", 0);
11774
11775 /* Include directory index. */
11776 if (idx_form != DW_FORM_udata)
11777 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
11778 0, NULL);
11779 else
11780 dw2_asm_output_data_uleb128 (0, NULL);
11781
11782 #ifdef VMS_DEBUGGING_INFO
11783 dw2_asm_output_data_uleb128 (0, NULL);
11784 dw2_asm_output_data_uleb128 (0, NULL);
11785 #endif
11786 }
11787
11788 /* Now write all the file names. */
11789 for (i = 0; i < numfiles; i++)
11790 {
11791 int file_idx = backmap[i];
11792 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
11793
11794 #ifdef VMS_DEBUGGING_INFO
11795 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
11796
11797 /* Setting these fields can lead to debugger miscomparisons,
11798 but VMS Debug requires them to be set correctly. */
11799
11800 int ver;
11801 long long cdt;
11802 long siz;
11803 int maxfilelen = (strlen (files[file_idx].path)
11804 + dirs[dir_idx].length
11805 + MAX_VMS_VERSION_LEN + 1);
11806 char *filebuf = XALLOCAVEC (char, maxfilelen);
11807
11808 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
11809 snprintf (filebuf, maxfilelen, "%s;%d",
11810 files[file_idx].path + dirs[dir_idx].length, ver);
11811
11812 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
11813
11814 /* Include directory index. */
11815 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
11816 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
11817 dir_idx + idx_offset, NULL);
11818 else
11819 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
11820
11821 /* Modification time. */
11822 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
11823 &cdt, 0, 0, 0) == 0)
11824 ? cdt : 0, NULL);
11825
11826 /* File length in bytes. */
11827 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
11828 0, &siz, 0, 0) == 0)
11829 ? siz : 0, NULL);
11830 #else
11831 output_line_string (str_form,
11832 files[file_idx].path + dirs[dir_idx].length,
11833 "File Entry", (unsigned) i + 1);
11834
11835 /* Include directory index. */
11836 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
11837 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
11838 dir_idx + idx_offset, NULL);
11839 else
11840 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
11841
11842 if (dwarf_version >= 5)
11843 continue;
11844
11845 /* Modification time. */
11846 dw2_asm_output_data_uleb128 (0, NULL);
11847
11848 /* File length in bytes. */
11849 dw2_asm_output_data_uleb128 (0, NULL);
11850 #endif /* VMS_DEBUGGING_INFO */
11851 }
11852
11853 if (dwarf_version < 5)
11854 dw2_asm_output_data (1, 0, "End file name table");
11855 }
11856
11857
11858 /* Output one line number table into the .debug_line section. */
11859
11860 static void
11861 output_one_line_info_table (dw_line_info_table *table)
11862 {
11863 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
11864 unsigned int current_line = 1;
11865 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
11866 dw_line_info_entry *ent;
11867 size_t i;
11868
11869 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
11870 {
11871 switch (ent->opcode)
11872 {
11873 case LI_set_address:
11874 /* ??? Unfortunately, we have little choice here currently, and
11875 must always use the most general form. GCC does not know the
11876 address delta itself, so we can't use DW_LNS_advance_pc. Many
11877 ports do have length attributes which will give an upper bound
11878 on the address range. We could perhaps use length attributes
11879 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
11880 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
11881
11882 /* This can handle any delta. This takes
11883 4+DWARF2_ADDR_SIZE bytes. */
11884 dw2_asm_output_data (1, 0, "set address %s", line_label);
11885 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
11886 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
11887 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
11888 break;
11889
11890 case LI_set_line:
11891 if (ent->val == current_line)
11892 {
11893 /* We still need to start a new row, so output a copy insn. */
11894 dw2_asm_output_data (1, DW_LNS_copy,
11895 "copy line %u", current_line);
11896 }
11897 else
11898 {
11899 int line_offset = ent->val - current_line;
11900 int line_delta = line_offset - DWARF_LINE_BASE;
11901
11902 current_line = ent->val;
11903 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
11904 {
11905 /* This can handle deltas from -10 to 234, using the current
11906 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
11907 This takes 1 byte. */
11908 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
11909 "line %u", current_line);
11910 }
11911 else
11912 {
11913 /* This can handle any delta. This takes at least 4 bytes,
11914 depending on the value being encoded. */
11915 dw2_asm_output_data (1, DW_LNS_advance_line,
11916 "advance to line %u", current_line);
11917 dw2_asm_output_data_sleb128 (line_offset, NULL);
11918 dw2_asm_output_data (1, DW_LNS_copy, NULL);
11919 }
11920 }
11921 break;
11922
11923 case LI_set_file:
11924 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
11925 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
11926 break;
11927
11928 case LI_set_column:
11929 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
11930 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
11931 break;
11932
11933 case LI_negate_stmt:
11934 current_is_stmt = !current_is_stmt;
11935 dw2_asm_output_data (1, DW_LNS_negate_stmt,
11936 "is_stmt %d", current_is_stmt);
11937 break;
11938
11939 case LI_set_prologue_end:
11940 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
11941 "set prologue end");
11942 break;
11943
11944 case LI_set_epilogue_begin:
11945 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
11946 "set epilogue begin");
11947 break;
11948
11949 case LI_set_discriminator:
11950 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
11951 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
11952 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
11953 dw2_asm_output_data_uleb128 (ent->val, NULL);
11954 break;
11955 }
11956 }
11957
11958 /* Emit debug info for the address of the end of the table. */
11959 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
11960 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
11961 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
11962 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
11963
11964 dw2_asm_output_data (1, 0, "end sequence");
11965 dw2_asm_output_data_uleb128 (1, NULL);
11966 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
11967 }
11968
11969 /* Output the source line number correspondence information. This
11970 information goes into the .debug_line section. */
11971
11972 static void
11973 output_line_info (bool prologue_only)
11974 {
11975 static unsigned int generation;
11976 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
11977 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
11978 bool saw_one = false;
11979 int opc;
11980
11981 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
11982 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
11983 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
11984 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
11985
11986 if (!XCOFF_DEBUGGING_INFO)
11987 {
11988 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11989 dw2_asm_output_data (4, 0xffffffff,
11990 "Initial length escape value indicating 64-bit DWARF extension");
11991 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11992 "Length of Source Line Info");
11993 }
11994
11995 ASM_OUTPUT_LABEL (asm_out_file, l1);
11996
11997 dw2_asm_output_data (2, dwarf_version, "DWARF Version");
11998 if (dwarf_version >= 5)
11999 {
12000 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12001 dw2_asm_output_data (1, 0, "Segment Size");
12002 }
12003 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12004 ASM_OUTPUT_LABEL (asm_out_file, p1);
12005
12006 /* Define the architecture-dependent minimum instruction length (in bytes).
12007 In this implementation of DWARF, this field is used for information
12008 purposes only. Since GCC generates assembly language, we have no
12009 a priori knowledge of how many instruction bytes are generated for each
12010 source line, and therefore can use only the DW_LNE_set_address and
12011 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12012 this as '1', which is "correct enough" for all architectures,
12013 and don't let the target override. */
12014 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12015
12016 if (dwarf_version >= 4)
12017 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12018 "Maximum Operations Per Instruction");
12019 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12020 "Default is_stmt_start flag");
12021 dw2_asm_output_data (1, DWARF_LINE_BASE,
12022 "Line Base Value (Special Opcodes)");
12023 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12024 "Line Range Value (Special Opcodes)");
12025 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12026 "Special Opcode Base");
12027
12028 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12029 {
12030 int n_op_args;
12031 switch (opc)
12032 {
12033 case DW_LNS_advance_pc:
12034 case DW_LNS_advance_line:
12035 case DW_LNS_set_file:
12036 case DW_LNS_set_column:
12037 case DW_LNS_fixed_advance_pc:
12038 case DW_LNS_set_isa:
12039 n_op_args = 1;
12040 break;
12041 default:
12042 n_op_args = 0;
12043 break;
12044 }
12045
12046 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12047 opc, n_op_args);
12048 }
12049
12050 /* Write out the information about the files we use. */
12051 output_file_names ();
12052 ASM_OUTPUT_LABEL (asm_out_file, p2);
12053 if (prologue_only)
12054 {
12055 /* Output the marker for the end of the line number info. */
12056 ASM_OUTPUT_LABEL (asm_out_file, l2);
12057 return;
12058 }
12059
12060 if (separate_line_info)
12061 {
12062 dw_line_info_table *table;
12063 size_t i;
12064
12065 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12066 if (table->in_use)
12067 {
12068 output_one_line_info_table (table);
12069 saw_one = true;
12070 }
12071 }
12072 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12073 {
12074 output_one_line_info_table (cold_text_section_line_info);
12075 saw_one = true;
12076 }
12077
12078 /* ??? Some Darwin linkers crash on a .debug_line section with no
12079 sequences. Further, merely a DW_LNE_end_sequence entry is not
12080 sufficient -- the address column must also be initialized.
12081 Make sure to output at least one set_address/end_sequence pair,
12082 choosing .text since that section is always present. */
12083 if (text_section_line_info->in_use || !saw_one)
12084 output_one_line_info_table (text_section_line_info);
12085
12086 /* Output the marker for the end of the line number info. */
12087 ASM_OUTPUT_LABEL (asm_out_file, l2);
12088 }
12089 \f
12090 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12091
12092 static inline bool
12093 need_endianity_attribute_p (bool reverse)
12094 {
12095 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12096 }
12097
12098 /* Given a pointer to a tree node for some base type, return a pointer to
12099 a DIE that describes the given type. REVERSE is true if the type is
12100 to be interpreted in the reverse storage order wrt the target order.
12101
12102 This routine must only be called for GCC type nodes that correspond to
12103 Dwarf base (fundamental) types. */
12104
12105 static dw_die_ref
12106 base_type_die (tree type, bool reverse)
12107 {
12108 dw_die_ref base_type_result;
12109 enum dwarf_type encoding;
12110 bool fpt_used = false;
12111 struct fixed_point_type_info fpt_info;
12112 tree type_bias = NULL_TREE;
12113
12114 /* If this is a subtype that should not be emitted as a subrange type,
12115 use the base type. See subrange_type_for_debug_p. */
12116 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12117 type = TREE_TYPE (type);
12118
12119 switch (TREE_CODE (type))
12120 {
12121 case INTEGER_TYPE:
12122 if ((dwarf_version >= 4 || !dwarf_strict)
12123 && TYPE_NAME (type)
12124 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12125 && DECL_IS_BUILTIN (TYPE_NAME (type))
12126 && DECL_NAME (TYPE_NAME (type)))
12127 {
12128 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12129 if (strcmp (name, "char16_t") == 0
12130 || strcmp (name, "char32_t") == 0)
12131 {
12132 encoding = DW_ATE_UTF;
12133 break;
12134 }
12135 }
12136 if ((dwarf_version >= 3 || !dwarf_strict)
12137 && lang_hooks.types.get_fixed_point_type_info)
12138 {
12139 memset (&fpt_info, 0, sizeof (fpt_info));
12140 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12141 {
12142 fpt_used = true;
12143 encoding = ((TYPE_UNSIGNED (type))
12144 ? DW_ATE_unsigned_fixed
12145 : DW_ATE_signed_fixed);
12146 break;
12147 }
12148 }
12149 if (TYPE_STRING_FLAG (type))
12150 {
12151 if (TYPE_UNSIGNED (type))
12152 encoding = DW_ATE_unsigned_char;
12153 else
12154 encoding = DW_ATE_signed_char;
12155 }
12156 else if (TYPE_UNSIGNED (type))
12157 encoding = DW_ATE_unsigned;
12158 else
12159 encoding = DW_ATE_signed;
12160
12161 if (!dwarf_strict
12162 && lang_hooks.types.get_type_bias)
12163 type_bias = lang_hooks.types.get_type_bias (type);
12164 break;
12165
12166 case REAL_TYPE:
12167 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12168 {
12169 if (dwarf_version >= 3 || !dwarf_strict)
12170 encoding = DW_ATE_decimal_float;
12171 else
12172 encoding = DW_ATE_lo_user;
12173 }
12174 else
12175 encoding = DW_ATE_float;
12176 break;
12177
12178 case FIXED_POINT_TYPE:
12179 if (!(dwarf_version >= 3 || !dwarf_strict))
12180 encoding = DW_ATE_lo_user;
12181 else if (TYPE_UNSIGNED (type))
12182 encoding = DW_ATE_unsigned_fixed;
12183 else
12184 encoding = DW_ATE_signed_fixed;
12185 break;
12186
12187 /* Dwarf2 doesn't know anything about complex ints, so use
12188 a user defined type for it. */
12189 case COMPLEX_TYPE:
12190 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12191 encoding = DW_ATE_complex_float;
12192 else
12193 encoding = DW_ATE_lo_user;
12194 break;
12195
12196 case BOOLEAN_TYPE:
12197 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12198 encoding = DW_ATE_boolean;
12199 break;
12200
12201 default:
12202 /* No other TREE_CODEs are Dwarf fundamental types. */
12203 gcc_unreachable ();
12204 }
12205
12206 base_type_result = new_die_raw (DW_TAG_base_type);
12207
12208 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12209 int_size_in_bytes (type));
12210 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12211
12212 if (need_endianity_attribute_p (reverse))
12213 add_AT_unsigned (base_type_result, DW_AT_endianity,
12214 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12215
12216 add_alignment_attribute (base_type_result, type);
12217
12218 if (fpt_used)
12219 {
12220 switch (fpt_info.scale_factor_kind)
12221 {
12222 case fixed_point_scale_factor_binary:
12223 add_AT_int (base_type_result, DW_AT_binary_scale,
12224 fpt_info.scale_factor.binary);
12225 break;
12226
12227 case fixed_point_scale_factor_decimal:
12228 add_AT_int (base_type_result, DW_AT_decimal_scale,
12229 fpt_info.scale_factor.decimal);
12230 break;
12231
12232 case fixed_point_scale_factor_arbitrary:
12233 /* Arbitrary scale factors cannot be described in standard DWARF,
12234 yet. */
12235 if (!dwarf_strict)
12236 {
12237 /* Describe the scale factor as a rational constant. */
12238 const dw_die_ref scale_factor
12239 = new_die (DW_TAG_constant, comp_unit_die (), type);
12240
12241 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12242 fpt_info.scale_factor.arbitrary.numerator);
12243 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12244 fpt_info.scale_factor.arbitrary.denominator);
12245
12246 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12247 }
12248 break;
12249
12250 default:
12251 gcc_unreachable ();
12252 }
12253 }
12254
12255 if (type_bias)
12256 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12257 dw_scalar_form_constant
12258 | dw_scalar_form_exprloc
12259 | dw_scalar_form_reference,
12260 NULL);
12261
12262 return base_type_result;
12263 }
12264
12265 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12266 named 'auto' in its type: return true for it, false otherwise. */
12267
12268 static inline bool
12269 is_cxx_auto (tree type)
12270 {
12271 if (is_cxx ())
12272 {
12273 tree name = TYPE_IDENTIFIER (type);
12274 if (name == get_identifier ("auto")
12275 || name == get_identifier ("decltype(auto)"))
12276 return true;
12277 }
12278 return false;
12279 }
12280
12281 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12282 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12283
12284 static inline int
12285 is_base_type (tree type)
12286 {
12287 switch (TREE_CODE (type))
12288 {
12289 case INTEGER_TYPE:
12290 case REAL_TYPE:
12291 case FIXED_POINT_TYPE:
12292 case COMPLEX_TYPE:
12293 case BOOLEAN_TYPE:
12294 case POINTER_BOUNDS_TYPE:
12295 return 1;
12296
12297 case VOID_TYPE:
12298 case ARRAY_TYPE:
12299 case RECORD_TYPE:
12300 case UNION_TYPE:
12301 case QUAL_UNION_TYPE:
12302 case ENUMERAL_TYPE:
12303 case FUNCTION_TYPE:
12304 case METHOD_TYPE:
12305 case POINTER_TYPE:
12306 case REFERENCE_TYPE:
12307 case NULLPTR_TYPE:
12308 case OFFSET_TYPE:
12309 case LANG_TYPE:
12310 case VECTOR_TYPE:
12311 return 0;
12312
12313 default:
12314 if (is_cxx_auto (type))
12315 return 0;
12316 gcc_unreachable ();
12317 }
12318
12319 return 0;
12320 }
12321
12322 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12323 node, return the size in bits for the type if it is a constant, or else
12324 return the alignment for the type if the type's size is not constant, or
12325 else return BITS_PER_WORD if the type actually turns out to be an
12326 ERROR_MARK node. */
12327
12328 static inline unsigned HOST_WIDE_INT
12329 simple_type_size_in_bits (const_tree type)
12330 {
12331 if (TREE_CODE (type) == ERROR_MARK)
12332 return BITS_PER_WORD;
12333 else if (TYPE_SIZE (type) == NULL_TREE)
12334 return 0;
12335 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12336 return tree_to_uhwi (TYPE_SIZE (type));
12337 else
12338 return TYPE_ALIGN (type);
12339 }
12340
12341 /* Similarly, but return an offset_int instead of UHWI. */
12342
12343 static inline offset_int
12344 offset_int_type_size_in_bits (const_tree type)
12345 {
12346 if (TREE_CODE (type) == ERROR_MARK)
12347 return BITS_PER_WORD;
12348 else if (TYPE_SIZE (type) == NULL_TREE)
12349 return 0;
12350 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12351 return wi::to_offset (TYPE_SIZE (type));
12352 else
12353 return TYPE_ALIGN (type);
12354 }
12355
12356 /* Given a pointer to a tree node for a subrange type, return a pointer
12357 to a DIE that describes the given type. */
12358
12359 static dw_die_ref
12360 subrange_type_die (tree type, tree low, tree high, tree bias,
12361 dw_die_ref context_die)
12362 {
12363 dw_die_ref subrange_die;
12364 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12365
12366 if (context_die == NULL)
12367 context_die = comp_unit_die ();
12368
12369 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12370
12371 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12372 {
12373 /* The size of the subrange type and its base type do not match,
12374 so we need to generate a size attribute for the subrange type. */
12375 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
12376 }
12377
12378 add_alignment_attribute (subrange_die, type);
12379
12380 if (low)
12381 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
12382 if (high)
12383 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
12384 if (bias && !dwarf_strict)
12385 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
12386 dw_scalar_form_constant
12387 | dw_scalar_form_exprloc
12388 | dw_scalar_form_reference,
12389 NULL);
12390
12391 return subrange_die;
12392 }
12393
12394 /* Returns the (const and/or volatile) cv_qualifiers associated with
12395 the decl node. This will normally be augmented with the
12396 cv_qualifiers of the underlying type in add_type_attribute. */
12397
12398 static int
12399 decl_quals (const_tree decl)
12400 {
12401 return ((TREE_READONLY (decl)
12402 /* The C++ front-end correctly marks reference-typed
12403 variables as readonly, but from a language (and debug
12404 info) standpoint they are not const-qualified. */
12405 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
12406 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
12407 | (TREE_THIS_VOLATILE (decl)
12408 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
12409 }
12410
12411 /* Determine the TYPE whose qualifiers match the largest strict subset
12412 of the given TYPE_QUALS, and return its qualifiers. Ignore all
12413 qualifiers outside QUAL_MASK. */
12414
12415 static int
12416 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
12417 {
12418 tree t;
12419 int best_rank = 0, best_qual = 0, max_rank;
12420
12421 type_quals &= qual_mask;
12422 max_rank = popcount_hwi (type_quals) - 1;
12423
12424 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
12425 t = TYPE_NEXT_VARIANT (t))
12426 {
12427 int q = TYPE_QUALS (t) & qual_mask;
12428
12429 if ((q & type_quals) == q && q != type_quals
12430 && check_base_type (t, type))
12431 {
12432 int rank = popcount_hwi (q);
12433
12434 if (rank > best_rank)
12435 {
12436 best_rank = rank;
12437 best_qual = q;
12438 }
12439 }
12440 }
12441
12442 return best_qual;
12443 }
12444
12445 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
12446 static const dwarf_qual_info_t dwarf_qual_info[] =
12447 {
12448 { TYPE_QUAL_CONST, DW_TAG_const_type },
12449 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
12450 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
12451 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
12452 };
12453 static const unsigned int dwarf_qual_info_size
12454 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
12455
12456 /* If DIE is a qualified DIE of some base DIE with the same parent,
12457 return the base DIE, otherwise return NULL. Set MASK to the
12458 qualifiers added compared to the returned DIE. */
12459
12460 static dw_die_ref
12461 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
12462 {
12463 unsigned int i;
12464 for (i = 0; i < dwarf_qual_info_size; i++)
12465 if (die->die_tag == dwarf_qual_info[i].t)
12466 break;
12467 if (i == dwarf_qual_info_size)
12468 return NULL;
12469 if (vec_safe_length (die->die_attr) != 1)
12470 return NULL;
12471 dw_die_ref type = get_AT_ref (die, DW_AT_type);
12472 if (type == NULL || type->die_parent != die->die_parent)
12473 return NULL;
12474 *mask |= dwarf_qual_info[i].q;
12475 if (depth)
12476 {
12477 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
12478 if (ret)
12479 return ret;
12480 }
12481 return type;
12482 }
12483
12484 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
12485 entry that chains the modifiers specified by CV_QUALS in front of the
12486 given type. REVERSE is true if the type is to be interpreted in the
12487 reverse storage order wrt the target order. */
12488
12489 static dw_die_ref
12490 modified_type_die (tree type, int cv_quals, bool reverse,
12491 dw_die_ref context_die)
12492 {
12493 enum tree_code code = TREE_CODE (type);
12494 dw_die_ref mod_type_die;
12495 dw_die_ref sub_die = NULL;
12496 tree item_type = NULL;
12497 tree qualified_type;
12498 tree name, low, high;
12499 dw_die_ref mod_scope;
12500 /* Only these cv-qualifiers are currently handled. */
12501 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
12502 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
12503 ENCODE_QUAL_ADDR_SPACE(~0U));
12504 const bool reverse_base_type
12505 = need_endianity_attribute_p (reverse) && is_base_type (type);
12506
12507 if (code == ERROR_MARK)
12508 return NULL;
12509
12510 if (lang_hooks.types.get_debug_type)
12511 {
12512 tree debug_type = lang_hooks.types.get_debug_type (type);
12513
12514 if (debug_type != NULL_TREE && debug_type != type)
12515 return modified_type_die (debug_type, cv_quals, reverse, context_die);
12516 }
12517
12518 cv_quals &= cv_qual_mask;
12519
12520 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
12521 tag modifier (and not an attribute) old consumers won't be able
12522 to handle it. */
12523 if (dwarf_version < 3)
12524 cv_quals &= ~TYPE_QUAL_RESTRICT;
12525
12526 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
12527 if (dwarf_version < 5)
12528 cv_quals &= ~TYPE_QUAL_ATOMIC;
12529
12530 /* See if we already have the appropriately qualified variant of
12531 this type. */
12532 qualified_type = get_qualified_type (type, cv_quals);
12533
12534 if (qualified_type == sizetype)
12535 {
12536 /* Try not to expose the internal sizetype type's name. */
12537 if (TYPE_NAME (qualified_type)
12538 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
12539 {
12540 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
12541
12542 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
12543 && (TYPE_PRECISION (t)
12544 == TYPE_PRECISION (qualified_type))
12545 && (TYPE_UNSIGNED (t)
12546 == TYPE_UNSIGNED (qualified_type)));
12547 qualified_type = t;
12548 }
12549 else if (qualified_type == sizetype
12550 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
12551 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
12552 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
12553 qualified_type = size_type_node;
12554 }
12555
12556 /* If we do, then we can just use its DIE, if it exists. */
12557 if (qualified_type)
12558 {
12559 mod_type_die = lookup_type_die (qualified_type);
12560
12561 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
12562 dealt with specially: the DIE with the attribute, if it exists, is
12563 placed immediately after the regular DIE for the same base type. */
12564 if (mod_type_die
12565 && (!reverse_base_type
12566 || ((mod_type_die = mod_type_die->die_sib) != NULL
12567 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
12568 return mod_type_die;
12569 }
12570
12571 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
12572
12573 /* Handle C typedef types. */
12574 if (name
12575 && TREE_CODE (name) == TYPE_DECL
12576 && DECL_ORIGINAL_TYPE (name)
12577 && !DECL_ARTIFICIAL (name))
12578 {
12579 tree dtype = TREE_TYPE (name);
12580
12581 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
12582 if (qualified_type == dtype && !reverse_base_type)
12583 {
12584 tree origin = decl_ultimate_origin (name);
12585
12586 /* Typedef variants that have an abstract origin don't get their own
12587 type DIE (see gen_typedef_die), so fall back on the ultimate
12588 abstract origin instead. */
12589 if (origin != NULL && origin != name)
12590 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
12591 context_die);
12592
12593 /* For a named type, use the typedef. */
12594 gen_type_die (qualified_type, context_die);
12595 return lookup_type_die (qualified_type);
12596 }
12597 else
12598 {
12599 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
12600 dquals &= cv_qual_mask;
12601 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
12602 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
12603 /* cv-unqualified version of named type. Just use
12604 the unnamed type to which it refers. */
12605 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
12606 reverse, context_die);
12607 /* Else cv-qualified version of named type; fall through. */
12608 }
12609 }
12610
12611 mod_scope = scope_die_for (type, context_die);
12612
12613 if (cv_quals)
12614 {
12615 int sub_quals = 0, first_quals = 0;
12616 unsigned i;
12617 dw_die_ref first = NULL, last = NULL;
12618
12619 /* Determine a lesser qualified type that most closely matches
12620 this one. Then generate DW_TAG_* entries for the remaining
12621 qualifiers. */
12622 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
12623 cv_qual_mask);
12624 if (sub_quals && use_debug_types)
12625 {
12626 bool needed = false;
12627 /* If emitting type units, make sure the order of qualifiers
12628 is canonical. Thus, start from unqualified type if
12629 an earlier qualifier is missing in sub_quals, but some later
12630 one is present there. */
12631 for (i = 0; i < dwarf_qual_info_size; i++)
12632 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
12633 needed = true;
12634 else if (needed && (dwarf_qual_info[i].q & cv_quals))
12635 {
12636 sub_quals = 0;
12637 break;
12638 }
12639 }
12640 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
12641 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
12642 {
12643 /* As not all intermediate qualified DIEs have corresponding
12644 tree types, ensure that qualified DIEs in the same scope
12645 as their DW_AT_type are emitted after their DW_AT_type,
12646 only with other qualified DIEs for the same type possibly
12647 in between them. Determine the range of such qualified
12648 DIEs now (first being the base type, last being corresponding
12649 last qualified DIE for it). */
12650 unsigned int count = 0;
12651 first = qualified_die_p (mod_type_die, &first_quals,
12652 dwarf_qual_info_size);
12653 if (first == NULL)
12654 first = mod_type_die;
12655 gcc_assert ((first_quals & ~sub_quals) == 0);
12656 for (count = 0, last = first;
12657 count < (1U << dwarf_qual_info_size);
12658 count++, last = last->die_sib)
12659 {
12660 int quals = 0;
12661 if (last == mod_scope->die_child)
12662 break;
12663 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
12664 != first)
12665 break;
12666 }
12667 }
12668
12669 for (i = 0; i < dwarf_qual_info_size; i++)
12670 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
12671 {
12672 dw_die_ref d;
12673 if (first && first != last)
12674 {
12675 for (d = first->die_sib; ; d = d->die_sib)
12676 {
12677 int quals = 0;
12678 qualified_die_p (d, &quals, dwarf_qual_info_size);
12679 if (quals == (first_quals | dwarf_qual_info[i].q))
12680 break;
12681 if (d == last)
12682 {
12683 d = NULL;
12684 break;
12685 }
12686 }
12687 if (d)
12688 {
12689 mod_type_die = d;
12690 continue;
12691 }
12692 }
12693 if (first)
12694 {
12695 d = new_die_raw (dwarf_qual_info[i].t);
12696 add_child_die_after (mod_scope, d, last);
12697 last = d;
12698 }
12699 else
12700 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
12701 if (mod_type_die)
12702 add_AT_die_ref (d, DW_AT_type, mod_type_die);
12703 mod_type_die = d;
12704 first_quals |= dwarf_qual_info[i].q;
12705 }
12706 }
12707 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
12708 {
12709 dwarf_tag tag = DW_TAG_pointer_type;
12710 if (code == REFERENCE_TYPE)
12711 {
12712 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
12713 tag = DW_TAG_rvalue_reference_type;
12714 else
12715 tag = DW_TAG_reference_type;
12716 }
12717 mod_type_die = new_die (tag, mod_scope, type);
12718
12719 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
12720 simple_type_size_in_bits (type) / BITS_PER_UNIT);
12721 add_alignment_attribute (mod_type_die, type);
12722 item_type = TREE_TYPE (type);
12723
12724 addr_space_t as = TYPE_ADDR_SPACE (item_type);
12725 if (!ADDR_SPACE_GENERIC_P (as))
12726 {
12727 int action = targetm.addr_space.debug (as);
12728 if (action >= 0)
12729 {
12730 /* Positive values indicate an address_class. */
12731 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
12732 }
12733 else
12734 {
12735 /* Negative values indicate an (inverted) segment base reg. */
12736 dw_loc_descr_ref d
12737 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
12738 add_AT_loc (mod_type_die, DW_AT_segment, d);
12739 }
12740 }
12741 }
12742 else if (code == INTEGER_TYPE
12743 && TREE_TYPE (type) != NULL_TREE
12744 && subrange_type_for_debug_p (type, &low, &high))
12745 {
12746 tree bias = NULL_TREE;
12747 if (lang_hooks.types.get_type_bias)
12748 bias = lang_hooks.types.get_type_bias (type);
12749 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
12750 item_type = TREE_TYPE (type);
12751 }
12752 else if (is_base_type (type))
12753 {
12754 mod_type_die = base_type_die (type, reverse);
12755
12756 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
12757 if (reverse_base_type)
12758 {
12759 dw_die_ref after_die
12760 = modified_type_die (type, cv_quals, false, context_die);
12761 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
12762 }
12763 else
12764 add_child_die (comp_unit_die (), mod_type_die);
12765
12766 add_pubtype (type, mod_type_die);
12767 }
12768 else
12769 {
12770 gen_type_die (type, context_die);
12771
12772 /* We have to get the type_main_variant here (and pass that to the
12773 `lookup_type_die' routine) because the ..._TYPE node we have
12774 might simply be a *copy* of some original type node (where the
12775 copy was created to help us keep track of typedef names) and
12776 that copy might have a different TYPE_UID from the original
12777 ..._TYPE node. */
12778 if (TREE_CODE (type) == FUNCTION_TYPE
12779 || TREE_CODE (type) == METHOD_TYPE)
12780 {
12781 /* For function/method types, can't just use type_main_variant here,
12782 because that can have different ref-qualifiers for C++,
12783 but try to canonicalize. */
12784 tree main = TYPE_MAIN_VARIANT (type);
12785 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
12786 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
12787 && check_base_type (t, main)
12788 && check_lang_type (t, type))
12789 return lookup_type_die (t);
12790 return lookup_type_die (type);
12791 }
12792 else if (TREE_CODE (type) != VECTOR_TYPE
12793 && TREE_CODE (type) != ARRAY_TYPE)
12794 return lookup_type_die (type_main_variant (type));
12795 else
12796 /* Vectors have the debugging information in the type,
12797 not the main variant. */
12798 return lookup_type_die (type);
12799 }
12800
12801 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
12802 don't output a DW_TAG_typedef, since there isn't one in the
12803 user's program; just attach a DW_AT_name to the type.
12804 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
12805 if the base type already has the same name. */
12806 if (name
12807 && ((TREE_CODE (name) != TYPE_DECL
12808 && (qualified_type == TYPE_MAIN_VARIANT (type)
12809 || (cv_quals == TYPE_UNQUALIFIED)))
12810 || (TREE_CODE (name) == TYPE_DECL
12811 && TREE_TYPE (name) == qualified_type
12812 && DECL_NAME (name))))
12813 {
12814 if (TREE_CODE (name) == TYPE_DECL)
12815 /* Could just call add_name_and_src_coords_attributes here,
12816 but since this is a builtin type it doesn't have any
12817 useful source coordinates anyway. */
12818 name = DECL_NAME (name);
12819 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
12820 }
12821 /* This probably indicates a bug. */
12822 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
12823 {
12824 name = TYPE_IDENTIFIER (type);
12825 add_name_attribute (mod_type_die,
12826 name ? IDENTIFIER_POINTER (name) : "__unknown__");
12827 }
12828
12829 if (qualified_type && !reverse_base_type)
12830 equate_type_number_to_die (qualified_type, mod_type_die);
12831
12832 if (item_type)
12833 /* We must do this after the equate_type_number_to_die call, in case
12834 this is a recursive type. This ensures that the modified_type_die
12835 recursion will terminate even if the type is recursive. Recursive
12836 types are possible in Ada. */
12837 sub_die = modified_type_die (item_type,
12838 TYPE_QUALS_NO_ADDR_SPACE (item_type),
12839 reverse,
12840 context_die);
12841
12842 if (sub_die != NULL)
12843 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
12844
12845 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
12846 if (TYPE_ARTIFICIAL (type))
12847 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
12848
12849 return mod_type_die;
12850 }
12851
12852 /* Generate DIEs for the generic parameters of T.
12853 T must be either a generic type or a generic function.
12854 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
12855
12856 static void
12857 gen_generic_params_dies (tree t)
12858 {
12859 tree parms, args;
12860 int parms_num, i;
12861 dw_die_ref die = NULL;
12862 int non_default;
12863
12864 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
12865 return;
12866
12867 if (TYPE_P (t))
12868 die = lookup_type_die (t);
12869 else if (DECL_P (t))
12870 die = lookup_decl_die (t);
12871
12872 gcc_assert (die);
12873
12874 parms = lang_hooks.get_innermost_generic_parms (t);
12875 if (!parms)
12876 /* T has no generic parameter. It means T is neither a generic type
12877 or function. End of story. */
12878 return;
12879
12880 parms_num = TREE_VEC_LENGTH (parms);
12881 args = lang_hooks.get_innermost_generic_args (t);
12882 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
12883 non_default = int_cst_value (TREE_CHAIN (args));
12884 else
12885 non_default = TREE_VEC_LENGTH (args);
12886 for (i = 0; i < parms_num; i++)
12887 {
12888 tree parm, arg, arg_pack_elems;
12889 dw_die_ref parm_die;
12890
12891 parm = TREE_VEC_ELT (parms, i);
12892 arg = TREE_VEC_ELT (args, i);
12893 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
12894 gcc_assert (parm && TREE_VALUE (parm) && arg);
12895
12896 if (parm && TREE_VALUE (parm) && arg)
12897 {
12898 /* If PARM represents a template parameter pack,
12899 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
12900 by DW_TAG_template_*_parameter DIEs for the argument
12901 pack elements of ARG. Note that ARG would then be
12902 an argument pack. */
12903 if (arg_pack_elems)
12904 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
12905 arg_pack_elems,
12906 die);
12907 else
12908 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
12909 true /* emit name */, die);
12910 if (i >= non_default)
12911 add_AT_flag (parm_die, DW_AT_default_value, 1);
12912 }
12913 }
12914 }
12915
12916 /* Create and return a DIE for PARM which should be
12917 the representation of a generic type parameter.
12918 For instance, in the C++ front end, PARM would be a template parameter.
12919 ARG is the argument to PARM.
12920 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
12921 name of the PARM.
12922 PARENT_DIE is the parent DIE which the new created DIE should be added to,
12923 as a child node. */
12924
12925 static dw_die_ref
12926 generic_parameter_die (tree parm, tree arg,
12927 bool emit_name_p,
12928 dw_die_ref parent_die)
12929 {
12930 dw_die_ref tmpl_die = NULL;
12931 const char *name = NULL;
12932
12933 if (!parm || !DECL_NAME (parm) || !arg)
12934 return NULL;
12935
12936 /* We support non-type generic parameters and arguments,
12937 type generic parameters and arguments, as well as
12938 generic generic parameters (a.k.a. template template parameters in C++)
12939 and arguments. */
12940 if (TREE_CODE (parm) == PARM_DECL)
12941 /* PARM is a nontype generic parameter */
12942 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
12943 else if (TREE_CODE (parm) == TYPE_DECL)
12944 /* PARM is a type generic parameter. */
12945 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
12946 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
12947 /* PARM is a generic generic parameter.
12948 Its DIE is a GNU extension. It shall have a
12949 DW_AT_name attribute to represent the name of the template template
12950 parameter, and a DW_AT_GNU_template_name attribute to represent the
12951 name of the template template argument. */
12952 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
12953 parent_die, parm);
12954 else
12955 gcc_unreachable ();
12956
12957 if (tmpl_die)
12958 {
12959 tree tmpl_type;
12960
12961 /* If PARM is a generic parameter pack, it means we are
12962 emitting debug info for a template argument pack element.
12963 In other terms, ARG is a template argument pack element.
12964 In that case, we don't emit any DW_AT_name attribute for
12965 the die. */
12966 if (emit_name_p)
12967 {
12968 name = IDENTIFIER_POINTER (DECL_NAME (parm));
12969 gcc_assert (name);
12970 add_AT_string (tmpl_die, DW_AT_name, name);
12971 }
12972
12973 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
12974 {
12975 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
12976 TMPL_DIE should have a child DW_AT_type attribute that is set
12977 to the type of the argument to PARM, which is ARG.
12978 If PARM is a type generic parameter, TMPL_DIE should have a
12979 child DW_AT_type that is set to ARG. */
12980 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
12981 add_type_attribute (tmpl_die, tmpl_type,
12982 (TREE_THIS_VOLATILE (tmpl_type)
12983 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
12984 false, parent_die);
12985 }
12986 else
12987 {
12988 /* So TMPL_DIE is a DIE representing a
12989 a generic generic template parameter, a.k.a template template
12990 parameter in C++ and arg is a template. */
12991
12992 /* The DW_AT_GNU_template_name attribute of the DIE must be set
12993 to the name of the argument. */
12994 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
12995 if (name)
12996 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
12997 }
12998
12999 if (TREE_CODE (parm) == PARM_DECL)
13000 /* So PARM is a non-type generic parameter.
13001 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13002 attribute of TMPL_DIE which value represents the value
13003 of ARG.
13004 We must be careful here:
13005 The value of ARG might reference some function decls.
13006 We might currently be emitting debug info for a generic
13007 type and types are emitted before function decls, we don't
13008 know if the function decls referenced by ARG will actually be
13009 emitted after cgraph computations.
13010 So must defer the generation of the DW_AT_const_value to
13011 after cgraph is ready. */
13012 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13013 }
13014
13015 return tmpl_die;
13016 }
13017
13018 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13019 PARM_PACK must be a template parameter pack. The returned DIE
13020 will be child DIE of PARENT_DIE. */
13021
13022 static dw_die_ref
13023 template_parameter_pack_die (tree parm_pack,
13024 tree parm_pack_args,
13025 dw_die_ref parent_die)
13026 {
13027 dw_die_ref die;
13028 int j;
13029
13030 gcc_assert (parent_die && parm_pack);
13031
13032 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13033 add_name_and_src_coords_attributes (die, parm_pack);
13034 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13035 generic_parameter_die (parm_pack,
13036 TREE_VEC_ELT (parm_pack_args, j),
13037 false /* Don't emit DW_AT_name */,
13038 die);
13039 return die;
13040 }
13041
13042 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13043 an enumerated type. */
13044
13045 static inline int
13046 type_is_enum (const_tree type)
13047 {
13048 return TREE_CODE (type) == ENUMERAL_TYPE;
13049 }
13050
13051 /* Return the DBX register number described by a given RTL node. */
13052
13053 static unsigned int
13054 dbx_reg_number (const_rtx rtl)
13055 {
13056 unsigned regno = REGNO (rtl);
13057
13058 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13059
13060 #ifdef LEAF_REG_REMAP
13061 if (crtl->uses_only_leaf_regs)
13062 {
13063 int leaf_reg = LEAF_REG_REMAP (regno);
13064 if (leaf_reg != -1)
13065 regno = (unsigned) leaf_reg;
13066 }
13067 #endif
13068
13069 regno = DBX_REGISTER_NUMBER (regno);
13070 gcc_assert (regno != INVALID_REGNUM);
13071 return regno;
13072 }
13073
13074 /* Optionally add a DW_OP_piece term to a location description expression.
13075 DW_OP_piece is only added if the location description expression already
13076 doesn't end with DW_OP_piece. */
13077
13078 static void
13079 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13080 {
13081 dw_loc_descr_ref loc;
13082
13083 if (*list_head != NULL)
13084 {
13085 /* Find the end of the chain. */
13086 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13087 ;
13088
13089 if (loc->dw_loc_opc != DW_OP_piece)
13090 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13091 }
13092 }
13093
13094 /* Return a location descriptor that designates a machine register or
13095 zero if there is none. */
13096
13097 static dw_loc_descr_ref
13098 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13099 {
13100 rtx regs;
13101
13102 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13103 return 0;
13104
13105 /* We only use "frame base" when we're sure we're talking about the
13106 post-prologue local stack frame. We do this by *not* running
13107 register elimination until this point, and recognizing the special
13108 argument pointer and soft frame pointer rtx's.
13109 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13110 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13111 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13112 {
13113 dw_loc_descr_ref result = NULL;
13114
13115 if (dwarf_version >= 4 || !dwarf_strict)
13116 {
13117 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13118 initialized);
13119 if (result)
13120 add_loc_descr (&result,
13121 new_loc_descr (DW_OP_stack_value, 0, 0));
13122 }
13123 return result;
13124 }
13125
13126 regs = targetm.dwarf_register_span (rtl);
13127
13128 if (REG_NREGS (rtl) > 1 || regs)
13129 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13130 else
13131 {
13132 unsigned int dbx_regnum = dbx_reg_number (rtl);
13133 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13134 return 0;
13135 return one_reg_loc_descriptor (dbx_regnum, initialized);
13136 }
13137 }
13138
13139 /* Return a location descriptor that designates a machine register for
13140 a given hard register number. */
13141
13142 static dw_loc_descr_ref
13143 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13144 {
13145 dw_loc_descr_ref reg_loc_descr;
13146
13147 if (regno <= 31)
13148 reg_loc_descr
13149 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13150 else
13151 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13152
13153 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13154 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13155
13156 return reg_loc_descr;
13157 }
13158
13159 /* Given an RTL of a register, return a location descriptor that
13160 designates a value that spans more than one register. */
13161
13162 static dw_loc_descr_ref
13163 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13164 enum var_init_status initialized)
13165 {
13166 int size, i;
13167 dw_loc_descr_ref loc_result = NULL;
13168
13169 /* Simple, contiguous registers. */
13170 if (regs == NULL_RTX)
13171 {
13172 unsigned reg = REGNO (rtl);
13173 int nregs;
13174
13175 #ifdef LEAF_REG_REMAP
13176 if (crtl->uses_only_leaf_regs)
13177 {
13178 int leaf_reg = LEAF_REG_REMAP (reg);
13179 if (leaf_reg != -1)
13180 reg = (unsigned) leaf_reg;
13181 }
13182 #endif
13183
13184 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13185 nregs = REG_NREGS (rtl);
13186
13187 size = GET_MODE_SIZE (GET_MODE (rtl)) / nregs;
13188
13189 loc_result = NULL;
13190 while (nregs--)
13191 {
13192 dw_loc_descr_ref t;
13193
13194 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13195 VAR_INIT_STATUS_INITIALIZED);
13196 add_loc_descr (&loc_result, t);
13197 add_loc_descr_op_piece (&loc_result, size);
13198 ++reg;
13199 }
13200 return loc_result;
13201 }
13202
13203 /* Now onto stupid register sets in non contiguous locations. */
13204
13205 gcc_assert (GET_CODE (regs) == PARALLEL);
13206
13207 size = GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0)));
13208 loc_result = NULL;
13209
13210 for (i = 0; i < XVECLEN (regs, 0); ++i)
13211 {
13212 dw_loc_descr_ref t;
13213
13214 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13215 VAR_INIT_STATUS_INITIALIZED);
13216 add_loc_descr (&loc_result, t);
13217 add_loc_descr_op_piece (&loc_result, size);
13218 }
13219
13220 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13221 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13222 return loc_result;
13223 }
13224
13225 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13226
13227 /* Return a location descriptor that designates a constant i,
13228 as a compound operation from constant (i >> shift), constant shift
13229 and DW_OP_shl. */
13230
13231 static dw_loc_descr_ref
13232 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13233 {
13234 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13235 add_loc_descr (&ret, int_loc_descriptor (shift));
13236 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13237 return ret;
13238 }
13239
13240 /* Return a location descriptor that designates constant POLY_I. */
13241
13242 static dw_loc_descr_ref
13243 int_loc_descriptor (poly_int64 poly_i)
13244 {
13245 enum dwarf_location_atom op;
13246
13247 HOST_WIDE_INT i;
13248 if (!poly_i.is_constant (&i))
13249 {
13250 /* Create location descriptions for the non-constant part and
13251 add any constant offset at the end. */
13252 dw_loc_descr_ref ret = NULL;
13253 HOST_WIDE_INT constant = poly_i.coeffs[0];
13254 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13255 {
13256 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13257 if (coeff != 0)
13258 {
13259 dw_loc_descr_ref start = ret;
13260 unsigned int factor;
13261 int bias;
13262 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13263 (j, &factor, &bias);
13264
13265 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13266 add COEFF * (REGNO / FACTOR) now and subtract
13267 COEFF * BIAS from the final constant part. */
13268 constant -= coeff * bias;
13269 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13270 if (coeff % factor == 0)
13271 coeff /= factor;
13272 else
13273 {
13274 int amount = exact_log2 (factor);
13275 gcc_assert (amount >= 0);
13276 add_loc_descr (&ret, int_loc_descriptor (amount));
13277 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13278 }
13279 if (coeff != 1)
13280 {
13281 add_loc_descr (&ret, int_loc_descriptor (coeff));
13282 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13283 }
13284 if (start)
13285 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13286 }
13287 }
13288 loc_descr_plus_const (&ret, constant);
13289 return ret;
13290 }
13291
13292 /* Pick the smallest representation of a constant, rather than just
13293 defaulting to the LEB encoding. */
13294 if (i >= 0)
13295 {
13296 int clz = clz_hwi (i);
13297 int ctz = ctz_hwi (i);
13298 if (i <= 31)
13299 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13300 else if (i <= 0xff)
13301 op = DW_OP_const1u;
13302 else if (i <= 0xffff)
13303 op = DW_OP_const2u;
13304 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13305 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13306 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13307 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13308 while DW_OP_const4u is 5 bytes. */
13309 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13310 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13311 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13312 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13313 while DW_OP_const4u is 5 bytes. */
13314 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13315
13316 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13317 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13318 <= 4)
13319 {
13320 /* As i >= 2**31, the double cast above will yield a negative number.
13321 Since wrapping is defined in DWARF expressions we can output big
13322 positive integers as small negative ones, regardless of the size
13323 of host wide ints.
13324
13325 Here, since the evaluator will handle 32-bit values and since i >=
13326 2**31, we know it's going to be interpreted as a negative literal:
13327 store it this way if we can do better than 5 bytes this way. */
13328 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13329 }
13330 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13331 op = DW_OP_const4u;
13332
13333 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13334 least 6 bytes: see if we can do better before falling back to it. */
13335 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13336 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13337 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13338 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13339 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13340 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13341 >= HOST_BITS_PER_WIDE_INT)
13342 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13343 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13344 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13345 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13346 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13347 && size_of_uleb128 (i) > 6)
13348 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13349 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13350 else
13351 op = DW_OP_constu;
13352 }
13353 else
13354 {
13355 if (i >= -0x80)
13356 op = DW_OP_const1s;
13357 else if (i >= -0x8000)
13358 op = DW_OP_const2s;
13359 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13360 {
13361 if (size_of_int_loc_descriptor (i) < 5)
13362 {
13363 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13364 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13365 return ret;
13366 }
13367 op = DW_OP_const4s;
13368 }
13369 else
13370 {
13371 if (size_of_int_loc_descriptor (i)
13372 < (unsigned long) 1 + size_of_sleb128 (i))
13373 {
13374 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13375 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13376 return ret;
13377 }
13378 op = DW_OP_consts;
13379 }
13380 }
13381
13382 return new_loc_descr (op, i, 0);
13383 }
13384
13385 /* Likewise, for unsigned constants. */
13386
13387 static dw_loc_descr_ref
13388 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
13389 {
13390 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
13391 const unsigned HOST_WIDE_INT max_uint
13392 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
13393
13394 /* If possible, use the clever signed constants handling. */
13395 if (i <= max_int)
13396 return int_loc_descriptor ((HOST_WIDE_INT) i);
13397
13398 /* Here, we are left with positive numbers that cannot be represented as
13399 HOST_WIDE_INT, i.e.:
13400 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
13401
13402 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
13403 whereas may be better to output a negative integer: thanks to integer
13404 wrapping, we know that:
13405 x = x - 2 ** DWARF2_ADDR_SIZE
13406 = x - 2 * (max (HOST_WIDE_INT) + 1)
13407 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
13408 small negative integers. Let's try that in cases it will clearly improve
13409 the encoding: there is no gain turning DW_OP_const4u into
13410 DW_OP_const4s. */
13411 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
13412 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
13413 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
13414 {
13415 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
13416
13417 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
13418 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
13419 const HOST_WIDE_INT second_shift
13420 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
13421
13422 /* So we finally have:
13423 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
13424 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
13425 return int_loc_descriptor (second_shift);
13426 }
13427
13428 /* Last chance: fallback to a simple constant operation. */
13429 return new_loc_descr
13430 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13431 ? DW_OP_const4u
13432 : DW_OP_const8u,
13433 i, 0);
13434 }
13435
13436 /* Generate and return a location description that computes the unsigned
13437 comparison of the two stack top entries (a OP b where b is the top-most
13438 entry and a is the second one). The KIND of comparison can be LT_EXPR,
13439 LE_EXPR, GT_EXPR or GE_EXPR. */
13440
13441 static dw_loc_descr_ref
13442 uint_comparison_loc_list (enum tree_code kind)
13443 {
13444 enum dwarf_location_atom op, flip_op;
13445 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
13446
13447 switch (kind)
13448 {
13449 case LT_EXPR:
13450 op = DW_OP_lt;
13451 break;
13452 case LE_EXPR:
13453 op = DW_OP_le;
13454 break;
13455 case GT_EXPR:
13456 op = DW_OP_gt;
13457 break;
13458 case GE_EXPR:
13459 op = DW_OP_ge;
13460 break;
13461 default:
13462 gcc_unreachable ();
13463 }
13464
13465 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
13466 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
13467
13468 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
13469 possible to perform unsigned comparisons: we just have to distinguish
13470 three cases:
13471
13472 1. when a and b have the same sign (as signed integers); then we should
13473 return: a OP(signed) b;
13474
13475 2. when a is a negative signed integer while b is a positive one, then a
13476 is a greater unsigned integer than b; likewise when a and b's roles
13477 are flipped.
13478
13479 So first, compare the sign of the two operands. */
13480 ret = new_loc_descr (DW_OP_over, 0, 0);
13481 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
13482 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
13483 /* If they have different signs (i.e. they have different sign bits), then
13484 the stack top value has now the sign bit set and thus it's smaller than
13485 zero. */
13486 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
13487 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
13488 add_loc_descr (&ret, bra_node);
13489
13490 /* We are in case 1. At this point, we know both operands have the same
13491 sign, to it's safe to use the built-in signed comparison. */
13492 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
13493 add_loc_descr (&ret, jmp_node);
13494
13495 /* We are in case 2. Here, we know both operands do not have the same sign,
13496 so we have to flip the signed comparison. */
13497 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
13498 tmp = new_loc_descr (flip_op, 0, 0);
13499 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
13500 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
13501 add_loc_descr (&ret, tmp);
13502
13503 /* This dummy operation is necessary to make the two branches join. */
13504 tmp = new_loc_descr (DW_OP_nop, 0, 0);
13505 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
13506 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
13507 add_loc_descr (&ret, tmp);
13508
13509 return ret;
13510 }
13511
13512 /* Likewise, but takes the location description lists (might be destructive on
13513 them). Return NULL if either is NULL or if concatenation fails. */
13514
13515 static dw_loc_list_ref
13516 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
13517 enum tree_code kind)
13518 {
13519 if (left == NULL || right == NULL)
13520 return NULL;
13521
13522 add_loc_list (&left, right);
13523 if (left == NULL)
13524 return NULL;
13525
13526 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
13527 return left;
13528 }
13529
13530 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
13531 without actually allocating it. */
13532
13533 static unsigned long
13534 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13535 {
13536 return size_of_int_loc_descriptor (i >> shift)
13537 + size_of_int_loc_descriptor (shift)
13538 + 1;
13539 }
13540
13541 /* Return size_of_locs (int_loc_descriptor (i)) without
13542 actually allocating it. */
13543
13544 static unsigned long
13545 size_of_int_loc_descriptor (HOST_WIDE_INT i)
13546 {
13547 unsigned long s;
13548
13549 if (i >= 0)
13550 {
13551 int clz, ctz;
13552 if (i <= 31)
13553 return 1;
13554 else if (i <= 0xff)
13555 return 2;
13556 else if (i <= 0xffff)
13557 return 3;
13558 clz = clz_hwi (i);
13559 ctz = ctz_hwi (i);
13560 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13561 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13562 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13563 - clz - 5);
13564 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13565 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13566 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13567 - clz - 8);
13568 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13569 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13570 <= 4)
13571 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13572 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13573 return 5;
13574 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
13575 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13576 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13577 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13578 - clz - 8);
13579 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13580 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
13581 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13582 - clz - 16);
13583 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13584 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13585 && s > 6)
13586 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13587 - clz - 32);
13588 else
13589 return 1 + s;
13590 }
13591 else
13592 {
13593 if (i >= -0x80)
13594 return 2;
13595 else if (i >= -0x8000)
13596 return 3;
13597 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13598 {
13599 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
13600 {
13601 s = size_of_int_loc_descriptor (-i) + 1;
13602 if (s < 5)
13603 return s;
13604 }
13605 return 5;
13606 }
13607 else
13608 {
13609 unsigned long r = 1 + size_of_sleb128 (i);
13610 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
13611 {
13612 s = size_of_int_loc_descriptor (-i) + 1;
13613 if (s < r)
13614 return s;
13615 }
13616 return r;
13617 }
13618 }
13619 }
13620
13621 /* Return loc description representing "address" of integer value.
13622 This can appear only as toplevel expression. */
13623
13624 static dw_loc_descr_ref
13625 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
13626 {
13627 int litsize;
13628 dw_loc_descr_ref loc_result = NULL;
13629
13630 if (!(dwarf_version >= 4 || !dwarf_strict))
13631 return NULL;
13632
13633 litsize = size_of_int_loc_descriptor (i);
13634 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
13635 is more compact. For DW_OP_stack_value we need:
13636 litsize + 1 (DW_OP_stack_value)
13637 and for DW_OP_implicit_value:
13638 1 (DW_OP_implicit_value) + 1 (length) + size. */
13639 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
13640 {
13641 loc_result = int_loc_descriptor (i);
13642 add_loc_descr (&loc_result,
13643 new_loc_descr (DW_OP_stack_value, 0, 0));
13644 return loc_result;
13645 }
13646
13647 loc_result = new_loc_descr (DW_OP_implicit_value,
13648 size, 0);
13649 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
13650 loc_result->dw_loc_oprnd2.v.val_int = i;
13651 return loc_result;
13652 }
13653
13654 /* Return a location descriptor that designates a base+offset location. */
13655
13656 static dw_loc_descr_ref
13657 based_loc_descr (rtx reg, poly_int64 offset,
13658 enum var_init_status initialized)
13659 {
13660 unsigned int regno;
13661 dw_loc_descr_ref result;
13662 dw_fde_ref fde = cfun->fde;
13663
13664 /* We only use "frame base" when we're sure we're talking about the
13665 post-prologue local stack frame. We do this by *not* running
13666 register elimination until this point, and recognizing the special
13667 argument pointer and soft frame pointer rtx's. */
13668 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
13669 {
13670 rtx elim = (ira_use_lra_p
13671 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
13672 : eliminate_regs (reg, VOIDmode, NULL_RTX));
13673
13674 if (elim != reg)
13675 {
13676 elim = strip_offset_and_add (elim, &offset);
13677 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
13678 && (elim == hard_frame_pointer_rtx
13679 || elim == stack_pointer_rtx))
13680 || elim == (frame_pointer_needed
13681 ? hard_frame_pointer_rtx
13682 : stack_pointer_rtx));
13683
13684 /* If drap register is used to align stack, use frame
13685 pointer + offset to access stack variables. If stack
13686 is aligned without drap, use stack pointer + offset to
13687 access stack variables. */
13688 if (crtl->stack_realign_tried
13689 && reg == frame_pointer_rtx)
13690 {
13691 int base_reg
13692 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
13693 ? HARD_FRAME_POINTER_REGNUM
13694 : REGNO (elim));
13695 return new_reg_loc_descr (base_reg, offset);
13696 }
13697
13698 gcc_assert (frame_pointer_fb_offset_valid);
13699 offset += frame_pointer_fb_offset;
13700 HOST_WIDE_INT const_offset;
13701 if (offset.is_constant (&const_offset))
13702 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
13703 else
13704 {
13705 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
13706 loc_descr_plus_const (&ret, offset);
13707 return ret;
13708 }
13709 }
13710 }
13711
13712 regno = REGNO (reg);
13713 #ifdef LEAF_REG_REMAP
13714 if (crtl->uses_only_leaf_regs)
13715 {
13716 int leaf_reg = LEAF_REG_REMAP (regno);
13717 if (leaf_reg != -1)
13718 regno = (unsigned) leaf_reg;
13719 }
13720 #endif
13721 regno = DWARF_FRAME_REGNUM (regno);
13722
13723 HOST_WIDE_INT const_offset;
13724 if (!optimize && fde
13725 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
13726 && offset.is_constant (&const_offset))
13727 {
13728 /* Use cfa+offset to represent the location of arguments passed
13729 on the stack when drap is used to align stack.
13730 Only do this when not optimizing, for optimized code var-tracking
13731 is supposed to track where the arguments live and the register
13732 used as vdrap or drap in some spot might be used for something
13733 else in other part of the routine. */
13734 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
13735 }
13736
13737 result = new_reg_loc_descr (regno, offset);
13738
13739 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13740 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13741
13742 return result;
13743 }
13744
13745 /* Return true if this RTL expression describes a base+offset calculation. */
13746
13747 static inline int
13748 is_based_loc (const_rtx rtl)
13749 {
13750 return (GET_CODE (rtl) == PLUS
13751 && ((REG_P (XEXP (rtl, 0))
13752 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
13753 && CONST_INT_P (XEXP (rtl, 1)))));
13754 }
13755
13756 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
13757 failed. */
13758
13759 static dw_loc_descr_ref
13760 tls_mem_loc_descriptor (rtx mem)
13761 {
13762 tree base;
13763 dw_loc_descr_ref loc_result;
13764
13765 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
13766 return NULL;
13767
13768 base = get_base_address (MEM_EXPR (mem));
13769 if (base == NULL
13770 || !VAR_P (base)
13771 || !DECL_THREAD_LOCAL_P (base))
13772 return NULL;
13773
13774 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
13775 if (loc_result == NULL)
13776 return NULL;
13777
13778 if (maybe_ne (MEM_OFFSET (mem), 0))
13779 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
13780
13781 return loc_result;
13782 }
13783
13784 /* Output debug info about reason why we failed to expand expression as dwarf
13785 expression. */
13786
13787 static void
13788 expansion_failed (tree expr, rtx rtl, char const *reason)
13789 {
13790 if (dump_file && (dump_flags & TDF_DETAILS))
13791 {
13792 fprintf (dump_file, "Failed to expand as dwarf: ");
13793 if (expr)
13794 print_generic_expr (dump_file, expr, dump_flags);
13795 if (rtl)
13796 {
13797 fprintf (dump_file, "\n");
13798 print_rtl (dump_file, rtl);
13799 }
13800 fprintf (dump_file, "\nReason: %s\n", reason);
13801 }
13802 }
13803
13804 /* Helper function for const_ok_for_output. */
13805
13806 static bool
13807 const_ok_for_output_1 (rtx rtl)
13808 {
13809 if (targetm.const_not_ok_for_debug_p (rtl))
13810 {
13811 if (GET_CODE (rtl) != UNSPEC)
13812 {
13813 expansion_failed (NULL_TREE, rtl,
13814 "Expression rejected for debug by the backend.\n");
13815 return false;
13816 }
13817
13818 /* If delegitimize_address couldn't do anything with the UNSPEC, and
13819 the target hook doesn't explicitly allow it in debug info, assume
13820 we can't express it in the debug info. */
13821 /* Don't complain about TLS UNSPECs, those are just too hard to
13822 delegitimize. Note this could be a non-decl SYMBOL_REF such as
13823 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
13824 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
13825 if (flag_checking
13826 && (XVECLEN (rtl, 0) == 0
13827 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
13828 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
13829 inform (current_function_decl
13830 ? DECL_SOURCE_LOCATION (current_function_decl)
13831 : UNKNOWN_LOCATION,
13832 #if NUM_UNSPEC_VALUES > 0
13833 "non-delegitimized UNSPEC %s (%d) found in variable location",
13834 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
13835 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
13836 XINT (rtl, 1));
13837 #else
13838 "non-delegitimized UNSPEC %d found in variable location",
13839 XINT (rtl, 1));
13840 #endif
13841 expansion_failed (NULL_TREE, rtl,
13842 "UNSPEC hasn't been delegitimized.\n");
13843 return false;
13844 }
13845
13846 if (CONST_POLY_INT_P (rtl))
13847 return false;
13848
13849 if (targetm.const_not_ok_for_debug_p (rtl))
13850 {
13851 expansion_failed (NULL_TREE, rtl,
13852 "Expression rejected for debug by the backend.\n");
13853 return false;
13854 }
13855
13856 /* FIXME: Refer to PR60655. It is possible for simplification
13857 of rtl expressions in var tracking to produce such expressions.
13858 We should really identify / validate expressions
13859 enclosed in CONST that can be handled by assemblers on various
13860 targets and only handle legitimate cases here. */
13861 switch (GET_CODE (rtl))
13862 {
13863 case SYMBOL_REF:
13864 break;
13865 case NOT:
13866 case NEG:
13867 return false;
13868 default:
13869 return true;
13870 }
13871
13872 if (CONSTANT_POOL_ADDRESS_P (rtl))
13873 {
13874 bool marked;
13875 get_pool_constant_mark (rtl, &marked);
13876 /* If all references to this pool constant were optimized away,
13877 it was not output and thus we can't represent it. */
13878 if (!marked)
13879 {
13880 expansion_failed (NULL_TREE, rtl,
13881 "Constant was removed from constant pool.\n");
13882 return false;
13883 }
13884 }
13885
13886 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
13887 return false;
13888
13889 /* Avoid references to external symbols in debug info, on several targets
13890 the linker might even refuse to link when linking a shared library,
13891 and in many other cases the relocations for .debug_info/.debug_loc are
13892 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
13893 to be defined within the same shared library or executable are fine. */
13894 if (SYMBOL_REF_EXTERNAL_P (rtl))
13895 {
13896 tree decl = SYMBOL_REF_DECL (rtl);
13897
13898 if (decl == NULL || !targetm.binds_local_p (decl))
13899 {
13900 expansion_failed (NULL_TREE, rtl,
13901 "Symbol not defined in current TU.\n");
13902 return false;
13903 }
13904 }
13905
13906 return true;
13907 }
13908
13909 /* Return true if constant RTL can be emitted in DW_OP_addr or
13910 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
13911 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
13912
13913 static bool
13914 const_ok_for_output (rtx rtl)
13915 {
13916 if (GET_CODE (rtl) == SYMBOL_REF)
13917 return const_ok_for_output_1 (rtl);
13918
13919 if (GET_CODE (rtl) == CONST)
13920 {
13921 subrtx_var_iterator::array_type array;
13922 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
13923 if (!const_ok_for_output_1 (*iter))
13924 return false;
13925 return true;
13926 }
13927
13928 return true;
13929 }
13930
13931 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
13932 if possible, NULL otherwise. */
13933
13934 static dw_die_ref
13935 base_type_for_mode (machine_mode mode, bool unsignedp)
13936 {
13937 dw_die_ref type_die;
13938 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
13939
13940 if (type == NULL)
13941 return NULL;
13942 switch (TREE_CODE (type))
13943 {
13944 case INTEGER_TYPE:
13945 case REAL_TYPE:
13946 break;
13947 default:
13948 return NULL;
13949 }
13950 type_die = lookup_type_die (type);
13951 if (!type_die)
13952 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
13953 comp_unit_die ());
13954 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
13955 return NULL;
13956 return type_die;
13957 }
13958
13959 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
13960 type matching MODE, or, if MODE is narrower than or as wide as
13961 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
13962 possible. */
13963
13964 static dw_loc_descr_ref
13965 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
13966 {
13967 machine_mode outer_mode = mode;
13968 dw_die_ref type_die;
13969 dw_loc_descr_ref cvt;
13970
13971 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
13972 {
13973 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
13974 return op;
13975 }
13976 type_die = base_type_for_mode (outer_mode, 1);
13977 if (type_die == NULL)
13978 return NULL;
13979 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
13980 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13981 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13982 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13983 add_loc_descr (&op, cvt);
13984 return op;
13985 }
13986
13987 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
13988
13989 static dw_loc_descr_ref
13990 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
13991 dw_loc_descr_ref op1)
13992 {
13993 dw_loc_descr_ref ret = op0;
13994 add_loc_descr (&ret, op1);
13995 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
13996 if (STORE_FLAG_VALUE != 1)
13997 {
13998 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
13999 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14000 }
14001 return ret;
14002 }
14003
14004 /* Subroutine of scompare_loc_descriptor for the case in which we're
14005 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14006 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14007
14008 static dw_loc_descr_ref
14009 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14010 scalar_int_mode op_mode,
14011 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14012 {
14013 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14014 dw_loc_descr_ref cvt;
14015
14016 if (type_die == NULL)
14017 return NULL;
14018 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14019 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14020 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14021 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14022 add_loc_descr (&op0, cvt);
14023 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14024 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14025 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14026 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14027 add_loc_descr (&op1, cvt);
14028 return compare_loc_descriptor (op, op0, op1);
14029 }
14030
14031 /* Subroutine of scompare_loc_descriptor for the case in which we're
14032 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14033 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14034
14035 static dw_loc_descr_ref
14036 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14037 scalar_int_mode op_mode,
14038 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14039 {
14040 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14041 /* For eq/ne, if the operands are known to be zero-extended,
14042 there is no need to do the fancy shifting up. */
14043 if (op == DW_OP_eq || op == DW_OP_ne)
14044 {
14045 dw_loc_descr_ref last0, last1;
14046 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14047 ;
14048 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14049 ;
14050 /* deref_size zero extends, and for constants we can check
14051 whether they are zero extended or not. */
14052 if (((last0->dw_loc_opc == DW_OP_deref_size
14053 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14054 || (CONST_INT_P (XEXP (rtl, 0))
14055 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14056 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14057 && ((last1->dw_loc_opc == DW_OP_deref_size
14058 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14059 || (CONST_INT_P (XEXP (rtl, 1))
14060 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14061 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14062 return compare_loc_descriptor (op, op0, op1);
14063
14064 /* EQ/NE comparison against constant in narrower type than
14065 DWARF2_ADDR_SIZE can be performed either as
14066 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14067 DW_OP_{eq,ne}
14068 or
14069 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14070 DW_OP_{eq,ne}. Pick whatever is shorter. */
14071 if (CONST_INT_P (XEXP (rtl, 1))
14072 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14073 && (size_of_int_loc_descriptor (shift) + 1
14074 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14075 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14076 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14077 & GET_MODE_MASK (op_mode))))
14078 {
14079 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14080 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14081 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14082 & GET_MODE_MASK (op_mode));
14083 return compare_loc_descriptor (op, op0, op1);
14084 }
14085 }
14086 add_loc_descr (&op0, int_loc_descriptor (shift));
14087 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14088 if (CONST_INT_P (XEXP (rtl, 1)))
14089 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14090 else
14091 {
14092 add_loc_descr (&op1, int_loc_descriptor (shift));
14093 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14094 }
14095 return compare_loc_descriptor (op, op0, op1);
14096 }
14097
14098 /* Return location descriptor for unsigned comparison OP RTL. */
14099
14100 static dw_loc_descr_ref
14101 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14102 machine_mode mem_mode)
14103 {
14104 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14105 dw_loc_descr_ref op0, op1;
14106
14107 if (op_mode == VOIDmode)
14108 op_mode = GET_MODE (XEXP (rtl, 1));
14109 if (op_mode == VOIDmode)
14110 return NULL;
14111
14112 scalar_int_mode int_op_mode;
14113 if (dwarf_strict
14114 && dwarf_version < 5
14115 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14116 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14117 return NULL;
14118
14119 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14120 VAR_INIT_STATUS_INITIALIZED);
14121 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14122 VAR_INIT_STATUS_INITIALIZED);
14123
14124 if (op0 == NULL || op1 == NULL)
14125 return NULL;
14126
14127 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14128 {
14129 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14130 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14131
14132 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14133 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14134 }
14135 return compare_loc_descriptor (op, op0, op1);
14136 }
14137
14138 /* Return location descriptor for unsigned comparison OP RTL. */
14139
14140 static dw_loc_descr_ref
14141 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14142 machine_mode mem_mode)
14143 {
14144 dw_loc_descr_ref op0, op1;
14145
14146 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14147 if (test_op_mode == VOIDmode)
14148 test_op_mode = GET_MODE (XEXP (rtl, 1));
14149
14150 scalar_int_mode op_mode;
14151 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14152 return NULL;
14153
14154 if (dwarf_strict
14155 && dwarf_version < 5
14156 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14157 return NULL;
14158
14159 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14160 VAR_INIT_STATUS_INITIALIZED);
14161 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14162 VAR_INIT_STATUS_INITIALIZED);
14163
14164 if (op0 == NULL || op1 == NULL)
14165 return NULL;
14166
14167 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14168 {
14169 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14170 dw_loc_descr_ref last0, last1;
14171 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14172 ;
14173 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14174 ;
14175 if (CONST_INT_P (XEXP (rtl, 0)))
14176 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14177 /* deref_size zero extends, so no need to mask it again. */
14178 else if (last0->dw_loc_opc != DW_OP_deref_size
14179 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14180 {
14181 add_loc_descr (&op0, int_loc_descriptor (mask));
14182 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14183 }
14184 if (CONST_INT_P (XEXP (rtl, 1)))
14185 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14186 /* deref_size zero extends, so no need to mask it again. */
14187 else if (last1->dw_loc_opc != DW_OP_deref_size
14188 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14189 {
14190 add_loc_descr (&op1, int_loc_descriptor (mask));
14191 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14192 }
14193 }
14194 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14195 {
14196 HOST_WIDE_INT bias = 1;
14197 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14198 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14199 if (CONST_INT_P (XEXP (rtl, 1)))
14200 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14201 + INTVAL (XEXP (rtl, 1)));
14202 else
14203 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14204 bias, 0));
14205 }
14206 return compare_loc_descriptor (op, op0, op1);
14207 }
14208
14209 /* Return location descriptor for {U,S}{MIN,MAX}. */
14210
14211 static dw_loc_descr_ref
14212 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14213 machine_mode mem_mode)
14214 {
14215 enum dwarf_location_atom op;
14216 dw_loc_descr_ref op0, op1, ret;
14217 dw_loc_descr_ref bra_node, drop_node;
14218
14219 scalar_int_mode int_mode;
14220 if (dwarf_strict
14221 && dwarf_version < 5
14222 && (!is_a <scalar_int_mode> (mode, &int_mode)
14223 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14224 return NULL;
14225
14226 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14227 VAR_INIT_STATUS_INITIALIZED);
14228 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14229 VAR_INIT_STATUS_INITIALIZED);
14230
14231 if (op0 == NULL || op1 == NULL)
14232 return NULL;
14233
14234 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14235 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14236 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14237 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14238 {
14239 /* Checked by the caller. */
14240 int_mode = as_a <scalar_int_mode> (mode);
14241 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14242 {
14243 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14244 add_loc_descr (&op0, int_loc_descriptor (mask));
14245 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14246 add_loc_descr (&op1, int_loc_descriptor (mask));
14247 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14248 }
14249 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14250 {
14251 HOST_WIDE_INT bias = 1;
14252 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14253 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14254 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14255 }
14256 }
14257 else if (is_a <scalar_int_mode> (mode, &int_mode)
14258 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14259 {
14260 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14261 add_loc_descr (&op0, int_loc_descriptor (shift));
14262 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14263 add_loc_descr (&op1, int_loc_descriptor (shift));
14264 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14265 }
14266 else if (is_a <scalar_int_mode> (mode, &int_mode)
14267 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14268 {
14269 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14270 dw_loc_descr_ref cvt;
14271 if (type_die == NULL)
14272 return NULL;
14273 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14274 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14275 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14276 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14277 add_loc_descr (&op0, cvt);
14278 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14279 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14280 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14281 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14282 add_loc_descr (&op1, cvt);
14283 }
14284
14285 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14286 op = DW_OP_lt;
14287 else
14288 op = DW_OP_gt;
14289 ret = op0;
14290 add_loc_descr (&ret, op1);
14291 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14292 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14293 add_loc_descr (&ret, bra_node);
14294 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14295 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14296 add_loc_descr (&ret, drop_node);
14297 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14298 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14299 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14300 && is_a <scalar_int_mode> (mode, &int_mode)
14301 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14302 ret = convert_descriptor_to_mode (int_mode, ret);
14303 return ret;
14304 }
14305
14306 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14307 but after converting arguments to type_die, afterwards
14308 convert back to unsigned. */
14309
14310 static dw_loc_descr_ref
14311 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14312 scalar_int_mode mode, machine_mode mem_mode)
14313 {
14314 dw_loc_descr_ref cvt, op0, op1;
14315
14316 if (type_die == NULL)
14317 return NULL;
14318 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14319 VAR_INIT_STATUS_INITIALIZED);
14320 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14321 VAR_INIT_STATUS_INITIALIZED);
14322 if (op0 == NULL || op1 == NULL)
14323 return NULL;
14324 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14325 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14326 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14327 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14328 add_loc_descr (&op0, cvt);
14329 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14330 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14331 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14332 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14333 add_loc_descr (&op1, cvt);
14334 add_loc_descr (&op0, op1);
14335 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14336 return convert_descriptor_to_mode (mode, op0);
14337 }
14338
14339 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14340 const0 is DW_OP_lit0 or corresponding typed constant,
14341 const1 is DW_OP_lit1 or corresponding typed constant
14342 and constMSB is constant with just the MSB bit set
14343 for the mode):
14344 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14345 L1: const0 DW_OP_swap
14346 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14347 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14348 L3: DW_OP_drop
14349 L4: DW_OP_nop
14350
14351 CTZ is similar:
14352 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14353 L1: const0 DW_OP_swap
14354 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14355 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14356 L3: DW_OP_drop
14357 L4: DW_OP_nop
14358
14359 FFS is similar:
14360 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14361 L1: const1 DW_OP_swap
14362 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14363 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14364 L3: DW_OP_drop
14365 L4: DW_OP_nop */
14366
14367 static dw_loc_descr_ref
14368 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14369 machine_mode mem_mode)
14370 {
14371 dw_loc_descr_ref op0, ret, tmp;
14372 HOST_WIDE_INT valv;
14373 dw_loc_descr_ref l1jump, l1label;
14374 dw_loc_descr_ref l2jump, l2label;
14375 dw_loc_descr_ref l3jump, l3label;
14376 dw_loc_descr_ref l4jump, l4label;
14377 rtx msb;
14378
14379 if (GET_MODE (XEXP (rtl, 0)) != mode)
14380 return NULL;
14381
14382 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14383 VAR_INIT_STATUS_INITIALIZED);
14384 if (op0 == NULL)
14385 return NULL;
14386 ret = op0;
14387 if (GET_CODE (rtl) == CLZ)
14388 {
14389 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14390 valv = GET_MODE_BITSIZE (mode);
14391 }
14392 else if (GET_CODE (rtl) == FFS)
14393 valv = 0;
14394 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14395 valv = GET_MODE_BITSIZE (mode);
14396 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14397 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
14398 add_loc_descr (&ret, l1jump);
14399 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14400 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
14401 VAR_INIT_STATUS_INITIALIZED);
14402 if (tmp == NULL)
14403 return NULL;
14404 add_loc_descr (&ret, tmp);
14405 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
14406 add_loc_descr (&ret, l4jump);
14407 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
14408 ? const1_rtx : const0_rtx,
14409 mode, mem_mode,
14410 VAR_INIT_STATUS_INITIALIZED);
14411 if (l1label == NULL)
14412 return NULL;
14413 add_loc_descr (&ret, l1label);
14414 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14415 l2label = new_loc_descr (DW_OP_dup, 0, 0);
14416 add_loc_descr (&ret, l2label);
14417 if (GET_CODE (rtl) != CLZ)
14418 msb = const1_rtx;
14419 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
14420 msb = GEN_INT (HOST_WIDE_INT_1U
14421 << (GET_MODE_BITSIZE (mode) - 1));
14422 else
14423 msb = immed_wide_int_const
14424 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
14425 GET_MODE_PRECISION (mode)), mode);
14426 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
14427 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
14428 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
14429 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
14430 else
14431 tmp = mem_loc_descriptor (msb, mode, mem_mode,
14432 VAR_INIT_STATUS_INITIALIZED);
14433 if (tmp == NULL)
14434 return NULL;
14435 add_loc_descr (&ret, tmp);
14436 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
14437 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
14438 add_loc_descr (&ret, l3jump);
14439 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
14440 VAR_INIT_STATUS_INITIALIZED);
14441 if (tmp == NULL)
14442 return NULL;
14443 add_loc_descr (&ret, tmp);
14444 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
14445 ? DW_OP_shl : DW_OP_shr, 0, 0));
14446 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14447 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
14448 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14449 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
14450 add_loc_descr (&ret, l2jump);
14451 l3label = new_loc_descr (DW_OP_drop, 0, 0);
14452 add_loc_descr (&ret, l3label);
14453 l4label = new_loc_descr (DW_OP_nop, 0, 0);
14454 add_loc_descr (&ret, l4label);
14455 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14456 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
14457 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14458 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
14459 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14460 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
14461 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14462 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
14463 return ret;
14464 }
14465
14466 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
14467 const1 is DW_OP_lit1 or corresponding typed constant):
14468 const0 DW_OP_swap
14469 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
14470 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
14471 L2: DW_OP_drop
14472
14473 PARITY is similar:
14474 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
14475 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
14476 L2: DW_OP_drop */
14477
14478 static dw_loc_descr_ref
14479 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
14480 machine_mode mem_mode)
14481 {
14482 dw_loc_descr_ref op0, ret, tmp;
14483 dw_loc_descr_ref l1jump, l1label;
14484 dw_loc_descr_ref l2jump, l2label;
14485
14486 if (GET_MODE (XEXP (rtl, 0)) != mode)
14487 return NULL;
14488
14489 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14490 VAR_INIT_STATUS_INITIALIZED);
14491 if (op0 == NULL)
14492 return NULL;
14493 ret = op0;
14494 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
14495 VAR_INIT_STATUS_INITIALIZED);
14496 if (tmp == NULL)
14497 return NULL;
14498 add_loc_descr (&ret, tmp);
14499 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14500 l1label = new_loc_descr (DW_OP_dup, 0, 0);
14501 add_loc_descr (&ret, l1label);
14502 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
14503 add_loc_descr (&ret, l2jump);
14504 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14505 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
14506 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
14507 VAR_INIT_STATUS_INITIALIZED);
14508 if (tmp == NULL)
14509 return NULL;
14510 add_loc_descr (&ret, tmp);
14511 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
14512 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
14513 ? DW_OP_plus : DW_OP_xor, 0, 0));
14514 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14515 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
14516 VAR_INIT_STATUS_INITIALIZED);
14517 add_loc_descr (&ret, tmp);
14518 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14519 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
14520 add_loc_descr (&ret, l1jump);
14521 l2label = new_loc_descr (DW_OP_drop, 0, 0);
14522 add_loc_descr (&ret, l2label);
14523 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14524 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
14525 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14526 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
14527 return ret;
14528 }
14529
14530 /* BSWAP (constS is initial shift count, either 56 or 24):
14531 constS const0
14532 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
14533 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
14534 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
14535 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
14536 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
14537
14538 static dw_loc_descr_ref
14539 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
14540 machine_mode mem_mode)
14541 {
14542 dw_loc_descr_ref op0, ret, tmp;
14543 dw_loc_descr_ref l1jump, l1label;
14544 dw_loc_descr_ref l2jump, l2label;
14545
14546 if (BITS_PER_UNIT != 8
14547 || (GET_MODE_BITSIZE (mode) != 32
14548 && GET_MODE_BITSIZE (mode) != 64))
14549 return NULL;
14550
14551 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14552 VAR_INIT_STATUS_INITIALIZED);
14553 if (op0 == NULL)
14554 return NULL;
14555
14556 ret = op0;
14557 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
14558 mode, mem_mode,
14559 VAR_INIT_STATUS_INITIALIZED);
14560 if (tmp == NULL)
14561 return NULL;
14562 add_loc_descr (&ret, tmp);
14563 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
14564 VAR_INIT_STATUS_INITIALIZED);
14565 if (tmp == NULL)
14566 return NULL;
14567 add_loc_descr (&ret, tmp);
14568 l1label = new_loc_descr (DW_OP_pick, 2, 0);
14569 add_loc_descr (&ret, l1label);
14570 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
14571 mode, mem_mode,
14572 VAR_INIT_STATUS_INITIALIZED);
14573 add_loc_descr (&ret, tmp);
14574 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
14575 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
14576 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14577 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
14578 VAR_INIT_STATUS_INITIALIZED);
14579 if (tmp == NULL)
14580 return NULL;
14581 add_loc_descr (&ret, tmp);
14582 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
14583 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
14584 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
14585 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
14586 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14587 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14588 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
14589 VAR_INIT_STATUS_INITIALIZED);
14590 add_loc_descr (&ret, tmp);
14591 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
14592 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
14593 add_loc_descr (&ret, l2jump);
14594 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
14595 VAR_INIT_STATUS_INITIALIZED);
14596 add_loc_descr (&ret, tmp);
14597 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
14598 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14599 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
14600 add_loc_descr (&ret, l1jump);
14601 l2label = new_loc_descr (DW_OP_drop, 0, 0);
14602 add_loc_descr (&ret, l2label);
14603 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14604 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14605 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14606 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
14607 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14608 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
14609 return ret;
14610 }
14611
14612 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
14613 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
14614 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
14615 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
14616
14617 ROTATERT is similar:
14618 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
14619 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
14620 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
14621
14622 static dw_loc_descr_ref
14623 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
14624 machine_mode mem_mode)
14625 {
14626 rtx rtlop1 = XEXP (rtl, 1);
14627 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
14628 int i;
14629
14630 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
14631 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
14632 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14633 VAR_INIT_STATUS_INITIALIZED);
14634 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
14635 VAR_INIT_STATUS_INITIALIZED);
14636 if (op0 == NULL || op1 == NULL)
14637 return NULL;
14638 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
14639 for (i = 0; i < 2; i++)
14640 {
14641 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
14642 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
14643 mode, mem_mode,
14644 VAR_INIT_STATUS_INITIALIZED);
14645 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
14646 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
14647 ? DW_OP_const4u
14648 : HOST_BITS_PER_WIDE_INT == 64
14649 ? DW_OP_const8u : DW_OP_constu,
14650 GET_MODE_MASK (mode), 0);
14651 else
14652 mask[i] = NULL;
14653 if (mask[i] == NULL)
14654 return NULL;
14655 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
14656 }
14657 ret = op0;
14658 add_loc_descr (&ret, op1);
14659 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14660 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14661 if (GET_CODE (rtl) == ROTATERT)
14662 {
14663 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14664 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
14665 GET_MODE_BITSIZE (mode), 0));
14666 }
14667 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
14668 if (mask[0] != NULL)
14669 add_loc_descr (&ret, mask[0]);
14670 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
14671 if (mask[1] != NULL)
14672 {
14673 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14674 add_loc_descr (&ret, mask[1]);
14675 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14676 }
14677 if (GET_CODE (rtl) == ROTATE)
14678 {
14679 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14680 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
14681 GET_MODE_BITSIZE (mode), 0));
14682 }
14683 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14684 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
14685 return ret;
14686 }
14687
14688 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
14689 for DEBUG_PARAMETER_REF RTL. */
14690
14691 static dw_loc_descr_ref
14692 parameter_ref_descriptor (rtx rtl)
14693 {
14694 dw_loc_descr_ref ret;
14695 dw_die_ref ref;
14696
14697 if (dwarf_strict)
14698 return NULL;
14699 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
14700 /* With LTO during LTRANS we get the late DIE that refers to the early
14701 DIE, thus we add another indirection here. This seems to confuse
14702 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
14703 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
14704 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
14705 if (ref)
14706 {
14707 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14708 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
14709 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
14710 }
14711 else
14712 {
14713 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
14714 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
14715 }
14716 return ret;
14717 }
14718
14719 /* The following routine converts the RTL for a variable or parameter
14720 (resident in memory) into an equivalent Dwarf representation of a
14721 mechanism for getting the address of that same variable onto the top of a
14722 hypothetical "address evaluation" stack.
14723
14724 When creating memory location descriptors, we are effectively transforming
14725 the RTL for a memory-resident object into its Dwarf postfix expression
14726 equivalent. This routine recursively descends an RTL tree, turning
14727 it into Dwarf postfix code as it goes.
14728
14729 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
14730
14731 MEM_MODE is the mode of the memory reference, needed to handle some
14732 autoincrement addressing modes.
14733
14734 Return 0 if we can't represent the location. */
14735
14736 dw_loc_descr_ref
14737 mem_loc_descriptor (rtx rtl, machine_mode mode,
14738 machine_mode mem_mode,
14739 enum var_init_status initialized)
14740 {
14741 dw_loc_descr_ref mem_loc_result = NULL;
14742 enum dwarf_location_atom op;
14743 dw_loc_descr_ref op0, op1;
14744 rtx inner = NULL_RTX;
14745 poly_int64 offset;
14746
14747 if (mode == VOIDmode)
14748 mode = GET_MODE (rtl);
14749
14750 /* Note that for a dynamically sized array, the location we will generate a
14751 description of here will be the lowest numbered location which is
14752 actually within the array. That's *not* necessarily the same as the
14753 zeroth element of the array. */
14754
14755 rtl = targetm.delegitimize_address (rtl);
14756
14757 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
14758 return NULL;
14759
14760 scalar_int_mode int_mode, inner_mode, op1_mode;
14761 switch (GET_CODE (rtl))
14762 {
14763 case POST_INC:
14764 case POST_DEC:
14765 case POST_MODIFY:
14766 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
14767
14768 case SUBREG:
14769 /* The case of a subreg may arise when we have a local (register)
14770 variable or a formal (register) parameter which doesn't quite fill
14771 up an entire register. For now, just assume that it is
14772 legitimate to make the Dwarf info refer to the whole register which
14773 contains the given subreg. */
14774 if (!subreg_lowpart_p (rtl))
14775 break;
14776 inner = SUBREG_REG (rtl);
14777 /* FALLTHRU */
14778 case TRUNCATE:
14779 if (inner == NULL_RTX)
14780 inner = XEXP (rtl, 0);
14781 if (is_a <scalar_int_mode> (mode, &int_mode)
14782 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
14783 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
14784 #ifdef POINTERS_EXTEND_UNSIGNED
14785 || (int_mode == Pmode && mem_mode != VOIDmode)
14786 #endif
14787 )
14788 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
14789 {
14790 mem_loc_result = mem_loc_descriptor (inner,
14791 inner_mode,
14792 mem_mode, initialized);
14793 break;
14794 }
14795 if (dwarf_strict && dwarf_version < 5)
14796 break;
14797 if (is_a <scalar_int_mode> (mode, &int_mode)
14798 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
14799 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
14800 : GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (inner)))
14801 {
14802 dw_die_ref type_die;
14803 dw_loc_descr_ref cvt;
14804
14805 mem_loc_result = mem_loc_descriptor (inner,
14806 GET_MODE (inner),
14807 mem_mode, initialized);
14808 if (mem_loc_result == NULL)
14809 break;
14810 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
14811 if (type_die == NULL)
14812 {
14813 mem_loc_result = NULL;
14814 break;
14815 }
14816 if (GET_MODE_SIZE (mode)
14817 != GET_MODE_SIZE (GET_MODE (inner)))
14818 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14819 else
14820 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
14821 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14822 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14823 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14824 add_loc_descr (&mem_loc_result, cvt);
14825 if (is_a <scalar_int_mode> (mode, &int_mode)
14826 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
14827 {
14828 /* Convert it to untyped afterwards. */
14829 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14830 add_loc_descr (&mem_loc_result, cvt);
14831 }
14832 }
14833 break;
14834
14835 case REG:
14836 if (!is_a <scalar_int_mode> (mode, &int_mode)
14837 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
14838 && rtl != arg_pointer_rtx
14839 && rtl != frame_pointer_rtx
14840 #ifdef POINTERS_EXTEND_UNSIGNED
14841 && (int_mode != Pmode || mem_mode == VOIDmode)
14842 #endif
14843 ))
14844 {
14845 dw_die_ref type_die;
14846 unsigned int dbx_regnum;
14847
14848 if (dwarf_strict && dwarf_version < 5)
14849 break;
14850 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
14851 break;
14852 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
14853 if (type_die == NULL)
14854 break;
14855
14856 dbx_regnum = dbx_reg_number (rtl);
14857 if (dbx_regnum == IGNORED_DWARF_REGNUM)
14858 break;
14859 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
14860 dbx_regnum, 0);
14861 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
14862 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
14863 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
14864 break;
14865 }
14866 /* Whenever a register number forms a part of the description of the
14867 method for calculating the (dynamic) address of a memory resident
14868 object, DWARF rules require the register number be referred to as
14869 a "base register". This distinction is not based in any way upon
14870 what category of register the hardware believes the given register
14871 belongs to. This is strictly DWARF terminology we're dealing with
14872 here. Note that in cases where the location of a memory-resident
14873 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
14874 OP_CONST (0)) the actual DWARF location descriptor that we generate
14875 may just be OP_BASEREG (basereg). This may look deceptively like
14876 the object in question was allocated to a register (rather than in
14877 memory) so DWARF consumers need to be aware of the subtle
14878 distinction between OP_REG and OP_BASEREG. */
14879 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
14880 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
14881 else if (stack_realign_drap
14882 && crtl->drap_reg
14883 && crtl->args.internal_arg_pointer == rtl
14884 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
14885 {
14886 /* If RTL is internal_arg_pointer, which has been optimized
14887 out, use DRAP instead. */
14888 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
14889 VAR_INIT_STATUS_INITIALIZED);
14890 }
14891 break;
14892
14893 case SIGN_EXTEND:
14894 case ZERO_EXTEND:
14895 if (!is_a <scalar_int_mode> (mode, &int_mode)
14896 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
14897 break;
14898 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
14899 mem_mode, VAR_INIT_STATUS_INITIALIZED);
14900 if (op0 == 0)
14901 break;
14902 else if (GET_CODE (rtl) == ZERO_EXTEND
14903 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
14904 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
14905 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
14906 to expand zero extend as two shifts instead of
14907 masking. */
14908 && GET_MODE_SIZE (inner_mode) <= 4)
14909 {
14910 mem_loc_result = op0;
14911 add_loc_descr (&mem_loc_result,
14912 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
14913 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
14914 }
14915 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
14916 {
14917 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
14918 shift *= BITS_PER_UNIT;
14919 if (GET_CODE (rtl) == SIGN_EXTEND)
14920 op = DW_OP_shra;
14921 else
14922 op = DW_OP_shr;
14923 mem_loc_result = op0;
14924 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
14925 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
14926 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
14927 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
14928 }
14929 else if (!dwarf_strict || dwarf_version >= 5)
14930 {
14931 dw_die_ref type_die1, type_die2;
14932 dw_loc_descr_ref cvt;
14933
14934 type_die1 = base_type_for_mode (inner_mode,
14935 GET_CODE (rtl) == ZERO_EXTEND);
14936 if (type_die1 == NULL)
14937 break;
14938 type_die2 = base_type_for_mode (int_mode, 1);
14939 if (type_die2 == NULL)
14940 break;
14941 mem_loc_result = op0;
14942 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14943 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14944 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
14945 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14946 add_loc_descr (&mem_loc_result, cvt);
14947 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14948 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14949 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
14950 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14951 add_loc_descr (&mem_loc_result, cvt);
14952 }
14953 break;
14954
14955 case MEM:
14956 {
14957 rtx new_rtl = avoid_constant_pool_reference (rtl);
14958 if (new_rtl != rtl)
14959 {
14960 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
14961 initialized);
14962 if (mem_loc_result != NULL)
14963 return mem_loc_result;
14964 }
14965 }
14966 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
14967 get_address_mode (rtl), mode,
14968 VAR_INIT_STATUS_INITIALIZED);
14969 if (mem_loc_result == NULL)
14970 mem_loc_result = tls_mem_loc_descriptor (rtl);
14971 if (mem_loc_result != NULL)
14972 {
14973 if (!is_a <scalar_int_mode> (mode, &int_mode)
14974 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14975 {
14976 dw_die_ref type_die;
14977 dw_loc_descr_ref deref;
14978
14979 if (dwarf_strict && dwarf_version < 5)
14980 return NULL;
14981 type_die
14982 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
14983 if (type_die == NULL)
14984 return NULL;
14985 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type),
14986 GET_MODE_SIZE (mode), 0);
14987 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
14988 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
14989 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
14990 add_loc_descr (&mem_loc_result, deref);
14991 }
14992 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14993 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
14994 else
14995 add_loc_descr (&mem_loc_result,
14996 new_loc_descr (DW_OP_deref_size,
14997 GET_MODE_SIZE (int_mode), 0));
14998 }
14999 break;
15000
15001 case LO_SUM:
15002 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15003
15004 case LABEL_REF:
15005 /* Some ports can transform a symbol ref into a label ref, because
15006 the symbol ref is too far away and has to be dumped into a constant
15007 pool. */
15008 case CONST:
15009 case SYMBOL_REF:
15010 if (!is_a <scalar_int_mode> (mode, &int_mode)
15011 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15012 #ifdef POINTERS_EXTEND_UNSIGNED
15013 && (int_mode != Pmode || mem_mode == VOIDmode)
15014 #endif
15015 ))
15016 break;
15017 if (GET_CODE (rtl) == SYMBOL_REF
15018 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15019 {
15020 dw_loc_descr_ref temp;
15021
15022 /* If this is not defined, we have no way to emit the data. */
15023 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15024 break;
15025
15026 temp = new_addr_loc_descr (rtl, dtprel_true);
15027
15028 /* We check for DWARF 5 here because gdb did not implement
15029 DW_OP_form_tls_address until after 7.12. */
15030 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15031 ? DW_OP_form_tls_address
15032 : DW_OP_GNU_push_tls_address),
15033 0, 0);
15034 add_loc_descr (&mem_loc_result, temp);
15035
15036 break;
15037 }
15038
15039 if (!const_ok_for_output (rtl))
15040 {
15041 if (GET_CODE (rtl) == CONST)
15042 switch (GET_CODE (XEXP (rtl, 0)))
15043 {
15044 case NOT:
15045 op = DW_OP_not;
15046 goto try_const_unop;
15047 case NEG:
15048 op = DW_OP_neg;
15049 goto try_const_unop;
15050 try_const_unop:
15051 rtx arg;
15052 arg = XEXP (XEXP (rtl, 0), 0);
15053 if (!CONSTANT_P (arg))
15054 arg = gen_rtx_CONST (int_mode, arg);
15055 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15056 initialized);
15057 if (op0)
15058 {
15059 mem_loc_result = op0;
15060 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15061 }
15062 break;
15063 default:
15064 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15065 mem_mode, initialized);
15066 break;
15067 }
15068 break;
15069 }
15070
15071 symref:
15072 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15073 vec_safe_push (used_rtx_array, rtl);
15074 break;
15075
15076 case CONCAT:
15077 case CONCATN:
15078 case VAR_LOCATION:
15079 case DEBUG_IMPLICIT_PTR:
15080 expansion_failed (NULL_TREE, rtl,
15081 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15082 return 0;
15083
15084 case ENTRY_VALUE:
15085 if (dwarf_strict && dwarf_version < 5)
15086 return NULL;
15087 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15088 {
15089 if (!is_a <scalar_int_mode> (mode, &int_mode)
15090 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15091 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15092 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15093 else
15094 {
15095 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15096 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15097 return NULL;
15098 op0 = one_reg_loc_descriptor (dbx_regnum,
15099 VAR_INIT_STATUS_INITIALIZED);
15100 }
15101 }
15102 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15103 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15104 {
15105 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15106 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15107 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15108 return NULL;
15109 }
15110 else
15111 gcc_unreachable ();
15112 if (op0 == NULL)
15113 return NULL;
15114 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15115 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15116 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15117 break;
15118
15119 case DEBUG_PARAMETER_REF:
15120 mem_loc_result = parameter_ref_descriptor (rtl);
15121 break;
15122
15123 case PRE_MODIFY:
15124 /* Extract the PLUS expression nested inside and fall into
15125 PLUS code below. */
15126 rtl = XEXP (rtl, 1);
15127 goto plus;
15128
15129 case PRE_INC:
15130 case PRE_DEC:
15131 /* Turn these into a PLUS expression and fall into the PLUS code
15132 below. */
15133 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15134 gen_int_mode (GET_CODE (rtl) == PRE_INC
15135 ? GET_MODE_UNIT_SIZE (mem_mode)
15136 : -GET_MODE_UNIT_SIZE (mem_mode),
15137 mode));
15138
15139 /* fall through */
15140
15141 case PLUS:
15142 plus:
15143 if (is_based_loc (rtl)
15144 && is_a <scalar_int_mode> (mode, &int_mode)
15145 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15146 || XEXP (rtl, 0) == arg_pointer_rtx
15147 || XEXP (rtl, 0) == frame_pointer_rtx))
15148 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15149 INTVAL (XEXP (rtl, 1)),
15150 VAR_INIT_STATUS_INITIALIZED);
15151 else
15152 {
15153 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15154 VAR_INIT_STATUS_INITIALIZED);
15155 if (mem_loc_result == 0)
15156 break;
15157
15158 if (CONST_INT_P (XEXP (rtl, 1))
15159 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15160 <= DWARF2_ADDR_SIZE))
15161 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15162 else
15163 {
15164 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15165 VAR_INIT_STATUS_INITIALIZED);
15166 if (op1 == 0)
15167 return NULL;
15168 add_loc_descr (&mem_loc_result, op1);
15169 add_loc_descr (&mem_loc_result,
15170 new_loc_descr (DW_OP_plus, 0, 0));
15171 }
15172 }
15173 break;
15174
15175 /* If a pseudo-reg is optimized away, it is possible for it to
15176 be replaced with a MEM containing a multiply or shift. */
15177 case MINUS:
15178 op = DW_OP_minus;
15179 goto do_binop;
15180
15181 case MULT:
15182 op = DW_OP_mul;
15183 goto do_binop;
15184
15185 case DIV:
15186 if ((!dwarf_strict || dwarf_version >= 5)
15187 && is_a <scalar_int_mode> (mode, &int_mode)
15188 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15189 {
15190 mem_loc_result = typed_binop (DW_OP_div, rtl,
15191 base_type_for_mode (mode, 0),
15192 int_mode, mem_mode);
15193 break;
15194 }
15195 op = DW_OP_div;
15196 goto do_binop;
15197
15198 case UMOD:
15199 op = DW_OP_mod;
15200 goto do_binop;
15201
15202 case ASHIFT:
15203 op = DW_OP_shl;
15204 goto do_shift;
15205
15206 case ASHIFTRT:
15207 op = DW_OP_shra;
15208 goto do_shift;
15209
15210 case LSHIFTRT:
15211 op = DW_OP_shr;
15212 goto do_shift;
15213
15214 do_shift:
15215 if (!is_a <scalar_int_mode> (mode, &int_mode))
15216 break;
15217 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15218 VAR_INIT_STATUS_INITIALIZED);
15219 {
15220 rtx rtlop1 = XEXP (rtl, 1);
15221 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15222 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15223 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15224 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15225 VAR_INIT_STATUS_INITIALIZED);
15226 }
15227
15228 if (op0 == 0 || op1 == 0)
15229 break;
15230
15231 mem_loc_result = op0;
15232 add_loc_descr (&mem_loc_result, op1);
15233 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15234 break;
15235
15236 case AND:
15237 op = DW_OP_and;
15238 goto do_binop;
15239
15240 case IOR:
15241 op = DW_OP_or;
15242 goto do_binop;
15243
15244 case XOR:
15245 op = DW_OP_xor;
15246 goto do_binop;
15247
15248 do_binop:
15249 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15250 VAR_INIT_STATUS_INITIALIZED);
15251 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15252 VAR_INIT_STATUS_INITIALIZED);
15253
15254 if (op0 == 0 || op1 == 0)
15255 break;
15256
15257 mem_loc_result = op0;
15258 add_loc_descr (&mem_loc_result, op1);
15259 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15260 break;
15261
15262 case MOD:
15263 if ((!dwarf_strict || dwarf_version >= 5)
15264 && is_a <scalar_int_mode> (mode, &int_mode)
15265 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15266 {
15267 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15268 base_type_for_mode (mode, 0),
15269 int_mode, mem_mode);
15270 break;
15271 }
15272
15273 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15274 VAR_INIT_STATUS_INITIALIZED);
15275 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15276 VAR_INIT_STATUS_INITIALIZED);
15277
15278 if (op0 == 0 || op1 == 0)
15279 break;
15280
15281 mem_loc_result = op0;
15282 add_loc_descr (&mem_loc_result, op1);
15283 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15284 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15285 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15286 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15287 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15288 break;
15289
15290 case UDIV:
15291 if ((!dwarf_strict || dwarf_version >= 5)
15292 && is_a <scalar_int_mode> (mode, &int_mode))
15293 {
15294 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15295 {
15296 op = DW_OP_div;
15297 goto do_binop;
15298 }
15299 mem_loc_result = typed_binop (DW_OP_div, rtl,
15300 base_type_for_mode (int_mode, 1),
15301 int_mode, mem_mode);
15302 }
15303 break;
15304
15305 case NOT:
15306 op = DW_OP_not;
15307 goto do_unop;
15308
15309 case ABS:
15310 op = DW_OP_abs;
15311 goto do_unop;
15312
15313 case NEG:
15314 op = DW_OP_neg;
15315 goto do_unop;
15316
15317 do_unop:
15318 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15319 VAR_INIT_STATUS_INITIALIZED);
15320
15321 if (op0 == 0)
15322 break;
15323
15324 mem_loc_result = op0;
15325 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15326 break;
15327
15328 case CONST_INT:
15329 if (!is_a <scalar_int_mode> (mode, &int_mode)
15330 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15331 #ifdef POINTERS_EXTEND_UNSIGNED
15332 || (int_mode == Pmode
15333 && mem_mode != VOIDmode
15334 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15335 #endif
15336 )
15337 {
15338 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15339 break;
15340 }
15341 if ((!dwarf_strict || dwarf_version >= 5)
15342 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15343 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15344 {
15345 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15346 scalar_int_mode amode;
15347 if (type_die == NULL)
15348 return NULL;
15349 if (INTVAL (rtl) >= 0
15350 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15351 .exists (&amode))
15352 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15353 /* const DW_OP_convert <XXX> vs.
15354 DW_OP_const_type <XXX, 1, const>. */
15355 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15356 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15357 {
15358 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15359 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15360 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15361 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15362 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15363 add_loc_descr (&mem_loc_result, op0);
15364 return mem_loc_result;
15365 }
15366 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15367 INTVAL (rtl));
15368 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15369 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15370 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15371 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
15372 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
15373 else
15374 {
15375 mem_loc_result->dw_loc_oprnd2.val_class
15376 = dw_val_class_const_double;
15377 mem_loc_result->dw_loc_oprnd2.v.val_double
15378 = double_int::from_shwi (INTVAL (rtl));
15379 }
15380 }
15381 break;
15382
15383 case CONST_DOUBLE:
15384 if (!dwarf_strict || dwarf_version >= 5)
15385 {
15386 dw_die_ref type_die;
15387
15388 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
15389 CONST_DOUBLE rtx could represent either a large integer
15390 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
15391 the value is always a floating point constant.
15392
15393 When it is an integer, a CONST_DOUBLE is used whenever
15394 the constant requires 2 HWIs to be adequately represented.
15395 We output CONST_DOUBLEs as blocks. */
15396 if (mode == VOIDmode
15397 || (GET_MODE (rtl) == VOIDmode
15398 && GET_MODE_BITSIZE (mode) != HOST_BITS_PER_DOUBLE_INT))
15399 break;
15400 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15401 if (type_die == NULL)
15402 return NULL;
15403 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15404 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15405 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15406 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15407 #if TARGET_SUPPORTS_WIDE_INT == 0
15408 if (!SCALAR_FLOAT_MODE_P (mode))
15409 {
15410 mem_loc_result->dw_loc_oprnd2.val_class
15411 = dw_val_class_const_double;
15412 mem_loc_result->dw_loc_oprnd2.v.val_double
15413 = rtx_to_double_int (rtl);
15414 }
15415 else
15416 #endif
15417 {
15418 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
15419 unsigned int length = GET_MODE_SIZE (float_mode);
15420 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
15421
15422 insert_float (rtl, array);
15423 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
15424 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
15425 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
15426 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
15427 }
15428 }
15429 break;
15430
15431 case CONST_WIDE_INT:
15432 if (!dwarf_strict || dwarf_version >= 5)
15433 {
15434 dw_die_ref type_die;
15435
15436 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15437 if (type_die == NULL)
15438 return NULL;
15439 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15440 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15441 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15442 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15443 mem_loc_result->dw_loc_oprnd2.val_class
15444 = dw_val_class_wide_int;
15445 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
15446 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
15447 }
15448 break;
15449
15450 case CONST_POLY_INT:
15451 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
15452 break;
15453
15454 case EQ:
15455 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
15456 break;
15457
15458 case GE:
15459 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
15460 break;
15461
15462 case GT:
15463 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
15464 break;
15465
15466 case LE:
15467 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
15468 break;
15469
15470 case LT:
15471 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
15472 break;
15473
15474 case NE:
15475 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
15476 break;
15477
15478 case GEU:
15479 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
15480 break;
15481
15482 case GTU:
15483 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
15484 break;
15485
15486 case LEU:
15487 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
15488 break;
15489
15490 case LTU:
15491 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
15492 break;
15493
15494 case UMIN:
15495 case UMAX:
15496 if (!SCALAR_INT_MODE_P (mode))
15497 break;
15498 /* FALLTHRU */
15499 case SMIN:
15500 case SMAX:
15501 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
15502 break;
15503
15504 case ZERO_EXTRACT:
15505 case SIGN_EXTRACT:
15506 if (CONST_INT_P (XEXP (rtl, 1))
15507 && CONST_INT_P (XEXP (rtl, 2))
15508 && is_a <scalar_int_mode> (mode, &int_mode)
15509 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
15510 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15511 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
15512 && ((unsigned) INTVAL (XEXP (rtl, 1))
15513 + (unsigned) INTVAL (XEXP (rtl, 2))
15514 <= GET_MODE_BITSIZE (int_mode)))
15515 {
15516 int shift, size;
15517 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15518 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15519 if (op0 == 0)
15520 break;
15521 if (GET_CODE (rtl) == SIGN_EXTRACT)
15522 op = DW_OP_shra;
15523 else
15524 op = DW_OP_shr;
15525 mem_loc_result = op0;
15526 size = INTVAL (XEXP (rtl, 1));
15527 shift = INTVAL (XEXP (rtl, 2));
15528 if (BITS_BIG_ENDIAN)
15529 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
15530 if (shift + size != (int) DWARF2_ADDR_SIZE)
15531 {
15532 add_loc_descr (&mem_loc_result,
15533 int_loc_descriptor (DWARF2_ADDR_SIZE
15534 - shift - size));
15535 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15536 }
15537 if (size != (int) DWARF2_ADDR_SIZE)
15538 {
15539 add_loc_descr (&mem_loc_result,
15540 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
15541 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15542 }
15543 }
15544 break;
15545
15546 case IF_THEN_ELSE:
15547 {
15548 dw_loc_descr_ref op2, bra_node, drop_node;
15549 op0 = mem_loc_descriptor (XEXP (rtl, 0),
15550 GET_MODE (XEXP (rtl, 0)) == VOIDmode
15551 ? word_mode : GET_MODE (XEXP (rtl, 0)),
15552 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15553 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15554 VAR_INIT_STATUS_INITIALIZED);
15555 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
15556 VAR_INIT_STATUS_INITIALIZED);
15557 if (op0 == NULL || op1 == NULL || op2 == NULL)
15558 break;
15559
15560 mem_loc_result = op1;
15561 add_loc_descr (&mem_loc_result, op2);
15562 add_loc_descr (&mem_loc_result, op0);
15563 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
15564 add_loc_descr (&mem_loc_result, bra_node);
15565 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
15566 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
15567 add_loc_descr (&mem_loc_result, drop_node);
15568 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
15569 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
15570 }
15571 break;
15572
15573 case FLOAT_EXTEND:
15574 case FLOAT_TRUNCATE:
15575 case FLOAT:
15576 case UNSIGNED_FLOAT:
15577 case FIX:
15578 case UNSIGNED_FIX:
15579 if (!dwarf_strict || dwarf_version >= 5)
15580 {
15581 dw_die_ref type_die;
15582 dw_loc_descr_ref cvt;
15583
15584 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
15585 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15586 if (op0 == NULL)
15587 break;
15588 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
15589 && (GET_CODE (rtl) == FLOAT
15590 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
15591 {
15592 type_die = base_type_for_mode (int_mode,
15593 GET_CODE (rtl) == UNSIGNED_FLOAT);
15594 if (type_die == NULL)
15595 break;
15596 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15597 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15598 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15599 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15600 add_loc_descr (&op0, cvt);
15601 }
15602 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
15603 if (type_die == NULL)
15604 break;
15605 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15606 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15607 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15608 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15609 add_loc_descr (&op0, cvt);
15610 if (is_a <scalar_int_mode> (mode, &int_mode)
15611 && (GET_CODE (rtl) == FIX
15612 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
15613 {
15614 op0 = convert_descriptor_to_mode (int_mode, op0);
15615 if (op0 == NULL)
15616 break;
15617 }
15618 mem_loc_result = op0;
15619 }
15620 break;
15621
15622 case CLZ:
15623 case CTZ:
15624 case FFS:
15625 if (is_a <scalar_int_mode> (mode, &int_mode))
15626 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
15627 break;
15628
15629 case POPCOUNT:
15630 case PARITY:
15631 if (is_a <scalar_int_mode> (mode, &int_mode))
15632 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
15633 break;
15634
15635 case BSWAP:
15636 if (is_a <scalar_int_mode> (mode, &int_mode))
15637 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
15638 break;
15639
15640 case ROTATE:
15641 case ROTATERT:
15642 if (is_a <scalar_int_mode> (mode, &int_mode))
15643 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
15644 break;
15645
15646 case COMPARE:
15647 /* In theory, we could implement the above. */
15648 /* DWARF cannot represent the unsigned compare operations
15649 natively. */
15650 case SS_MULT:
15651 case US_MULT:
15652 case SS_DIV:
15653 case US_DIV:
15654 case SS_PLUS:
15655 case US_PLUS:
15656 case SS_MINUS:
15657 case US_MINUS:
15658 case SS_NEG:
15659 case US_NEG:
15660 case SS_ABS:
15661 case SS_ASHIFT:
15662 case US_ASHIFT:
15663 case SS_TRUNCATE:
15664 case US_TRUNCATE:
15665 case UNORDERED:
15666 case ORDERED:
15667 case UNEQ:
15668 case UNGE:
15669 case UNGT:
15670 case UNLE:
15671 case UNLT:
15672 case LTGT:
15673 case FRACT_CONVERT:
15674 case UNSIGNED_FRACT_CONVERT:
15675 case SAT_FRACT:
15676 case UNSIGNED_SAT_FRACT:
15677 case SQRT:
15678 case ASM_OPERANDS:
15679 case VEC_MERGE:
15680 case VEC_SELECT:
15681 case VEC_CONCAT:
15682 case VEC_DUPLICATE:
15683 case VEC_SERIES:
15684 case UNSPEC:
15685 case HIGH:
15686 case FMA:
15687 case STRICT_LOW_PART:
15688 case CONST_VECTOR:
15689 case CONST_FIXED:
15690 case CLRSB:
15691 case CLOBBER:
15692 /* If delegitimize_address couldn't do anything with the UNSPEC, we
15693 can't express it in the debug info. This can happen e.g. with some
15694 TLS UNSPECs. */
15695 break;
15696
15697 case CONST_STRING:
15698 resolve_one_addr (&rtl);
15699 goto symref;
15700
15701 /* RTL sequences inside PARALLEL record a series of DWARF operations for
15702 the expression. An UNSPEC rtx represents a raw DWARF operation,
15703 new_loc_descr is called for it to build the operation directly.
15704 Otherwise mem_loc_descriptor is called recursively. */
15705 case PARALLEL:
15706 {
15707 int index = 0;
15708 dw_loc_descr_ref exp_result = NULL;
15709
15710 for (; index < XVECLEN (rtl, 0); index++)
15711 {
15712 rtx elem = XVECEXP (rtl, 0, index);
15713 if (GET_CODE (elem) == UNSPEC)
15714 {
15715 /* Each DWARF operation UNSPEC contain two operands, if
15716 one operand is not used for the operation, const0_rtx is
15717 passed. */
15718 gcc_assert (XVECLEN (elem, 0) == 2);
15719
15720 HOST_WIDE_INT dw_op = XINT (elem, 1);
15721 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
15722 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
15723 exp_result
15724 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
15725 oprnd2);
15726 }
15727 else
15728 exp_result
15729 = mem_loc_descriptor (elem, mode, mem_mode,
15730 VAR_INIT_STATUS_INITIALIZED);
15731
15732 if (!mem_loc_result)
15733 mem_loc_result = exp_result;
15734 else
15735 add_loc_descr (&mem_loc_result, exp_result);
15736 }
15737
15738 break;
15739 }
15740
15741 default:
15742 if (flag_checking)
15743 {
15744 print_rtl (stderr, rtl);
15745 gcc_unreachable ();
15746 }
15747 break;
15748 }
15749
15750 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
15751 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
15752
15753 return mem_loc_result;
15754 }
15755
15756 /* Return a descriptor that describes the concatenation of two locations.
15757 This is typically a complex variable. */
15758
15759 static dw_loc_descr_ref
15760 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
15761 {
15762 dw_loc_descr_ref cc_loc_result = NULL;
15763 dw_loc_descr_ref x0_ref
15764 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15765 dw_loc_descr_ref x1_ref
15766 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15767
15768 if (x0_ref == 0 || x1_ref == 0)
15769 return 0;
15770
15771 cc_loc_result = x0_ref;
15772 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x0)));
15773
15774 add_loc_descr (&cc_loc_result, x1_ref);
15775 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x1)));
15776
15777 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
15778 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
15779
15780 return cc_loc_result;
15781 }
15782
15783 /* Return a descriptor that describes the concatenation of N
15784 locations. */
15785
15786 static dw_loc_descr_ref
15787 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
15788 {
15789 unsigned int i;
15790 dw_loc_descr_ref cc_loc_result = NULL;
15791 unsigned int n = XVECLEN (concatn, 0);
15792
15793 for (i = 0; i < n; ++i)
15794 {
15795 dw_loc_descr_ref ref;
15796 rtx x = XVECEXP (concatn, 0, i);
15797
15798 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15799 if (ref == NULL)
15800 return NULL;
15801
15802 add_loc_descr (&cc_loc_result, ref);
15803 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x)));
15804 }
15805
15806 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
15807 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
15808
15809 return cc_loc_result;
15810 }
15811
15812 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
15813 for DEBUG_IMPLICIT_PTR RTL. */
15814
15815 static dw_loc_descr_ref
15816 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
15817 {
15818 dw_loc_descr_ref ret;
15819 dw_die_ref ref;
15820
15821 if (dwarf_strict && dwarf_version < 5)
15822 return NULL;
15823 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
15824 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
15825 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
15826 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
15827 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
15828 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
15829 if (ref)
15830 {
15831 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15832 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15833 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15834 }
15835 else
15836 {
15837 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15838 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
15839 }
15840 return ret;
15841 }
15842
15843 /* Output a proper Dwarf location descriptor for a variable or parameter
15844 which is either allocated in a register or in a memory location. For a
15845 register, we just generate an OP_REG and the register number. For a
15846 memory location we provide a Dwarf postfix expression describing how to
15847 generate the (dynamic) address of the object onto the address stack.
15848
15849 MODE is mode of the decl if this loc_descriptor is going to be used in
15850 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
15851 allowed, VOIDmode otherwise.
15852
15853 If we don't know how to describe it, return 0. */
15854
15855 static dw_loc_descr_ref
15856 loc_descriptor (rtx rtl, machine_mode mode,
15857 enum var_init_status initialized)
15858 {
15859 dw_loc_descr_ref loc_result = NULL;
15860 scalar_int_mode int_mode;
15861
15862 switch (GET_CODE (rtl))
15863 {
15864 case SUBREG:
15865 /* The case of a subreg may arise when we have a local (register)
15866 variable or a formal (register) parameter which doesn't quite fill
15867 up an entire register. For now, just assume that it is
15868 legitimate to make the Dwarf info refer to the whole register which
15869 contains the given subreg. */
15870 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
15871 loc_result = loc_descriptor (SUBREG_REG (rtl),
15872 GET_MODE (SUBREG_REG (rtl)), initialized);
15873 else
15874 goto do_default;
15875 break;
15876
15877 case REG:
15878 loc_result = reg_loc_descriptor (rtl, initialized);
15879 break;
15880
15881 case MEM:
15882 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
15883 GET_MODE (rtl), initialized);
15884 if (loc_result == NULL)
15885 loc_result = tls_mem_loc_descriptor (rtl);
15886 if (loc_result == NULL)
15887 {
15888 rtx new_rtl = avoid_constant_pool_reference (rtl);
15889 if (new_rtl != rtl)
15890 loc_result = loc_descriptor (new_rtl, mode, initialized);
15891 }
15892 break;
15893
15894 case CONCAT:
15895 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
15896 initialized);
15897 break;
15898
15899 case CONCATN:
15900 loc_result = concatn_loc_descriptor (rtl, initialized);
15901 break;
15902
15903 case VAR_LOCATION:
15904 /* Single part. */
15905 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
15906 {
15907 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
15908 if (GET_CODE (loc) == EXPR_LIST)
15909 loc = XEXP (loc, 0);
15910 loc_result = loc_descriptor (loc, mode, initialized);
15911 break;
15912 }
15913
15914 rtl = XEXP (rtl, 1);
15915 /* FALLTHRU */
15916
15917 case PARALLEL:
15918 {
15919 rtvec par_elems = XVEC (rtl, 0);
15920 int num_elem = GET_NUM_ELEM (par_elems);
15921 machine_mode mode;
15922 int i;
15923
15924 /* Create the first one, so we have something to add to. */
15925 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
15926 VOIDmode, initialized);
15927 if (loc_result == NULL)
15928 return NULL;
15929 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
15930 add_loc_descr_op_piece (&loc_result, GET_MODE_SIZE (mode));
15931 for (i = 1; i < num_elem; i++)
15932 {
15933 dw_loc_descr_ref temp;
15934
15935 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
15936 VOIDmode, initialized);
15937 if (temp == NULL)
15938 return NULL;
15939 add_loc_descr (&loc_result, temp);
15940 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
15941 add_loc_descr_op_piece (&loc_result, GET_MODE_SIZE (mode));
15942 }
15943 }
15944 break;
15945
15946 case CONST_INT:
15947 if (mode != VOIDmode && mode != BLKmode)
15948 {
15949 int_mode = as_a <scalar_int_mode> (mode);
15950 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
15951 INTVAL (rtl));
15952 }
15953 break;
15954
15955 case CONST_DOUBLE:
15956 if (mode == VOIDmode)
15957 mode = GET_MODE (rtl);
15958
15959 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
15960 {
15961 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
15962
15963 /* Note that a CONST_DOUBLE rtx could represent either an integer
15964 or a floating-point constant. A CONST_DOUBLE is used whenever
15965 the constant requires more than one word in order to be
15966 adequately represented. We output CONST_DOUBLEs as blocks. */
15967 scalar_mode smode = as_a <scalar_mode> (mode);
15968 loc_result = new_loc_descr (DW_OP_implicit_value,
15969 GET_MODE_SIZE (smode), 0);
15970 #if TARGET_SUPPORTS_WIDE_INT == 0
15971 if (!SCALAR_FLOAT_MODE_P (smode))
15972 {
15973 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
15974 loc_result->dw_loc_oprnd2.v.val_double
15975 = rtx_to_double_int (rtl);
15976 }
15977 else
15978 #endif
15979 {
15980 unsigned int length = GET_MODE_SIZE (smode);
15981 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
15982
15983 insert_float (rtl, array);
15984 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
15985 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
15986 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
15987 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
15988 }
15989 }
15990 break;
15991
15992 case CONST_WIDE_INT:
15993 if (mode == VOIDmode)
15994 mode = GET_MODE (rtl);
15995
15996 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
15997 {
15998 int_mode = as_a <scalar_int_mode> (mode);
15999 loc_result = new_loc_descr (DW_OP_implicit_value,
16000 GET_MODE_SIZE (int_mode), 0);
16001 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16002 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16003 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16004 }
16005 break;
16006
16007 case CONST_VECTOR:
16008 if (mode == VOIDmode)
16009 mode = GET_MODE (rtl);
16010
16011 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16012 {
16013 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16014 unsigned int length = CONST_VECTOR_NUNITS (rtl);
16015 unsigned char *array
16016 = ggc_vec_alloc<unsigned char> (length * elt_size);
16017 unsigned int i;
16018 unsigned char *p;
16019 machine_mode imode = GET_MODE_INNER (mode);
16020
16021 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16022 switch (GET_MODE_CLASS (mode))
16023 {
16024 case MODE_VECTOR_INT:
16025 for (i = 0, p = array; i < length; i++, p += elt_size)
16026 {
16027 rtx elt = CONST_VECTOR_ELT (rtl, i);
16028 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16029 }
16030 break;
16031
16032 case MODE_VECTOR_FLOAT:
16033 for (i = 0, p = array; i < length; i++, p += elt_size)
16034 {
16035 rtx elt = CONST_VECTOR_ELT (rtl, i);
16036 insert_float (elt, p);
16037 }
16038 break;
16039
16040 default:
16041 gcc_unreachable ();
16042 }
16043
16044 loc_result = new_loc_descr (DW_OP_implicit_value,
16045 length * elt_size, 0);
16046 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16047 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16048 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16049 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16050 }
16051 break;
16052
16053 case CONST:
16054 if (mode == VOIDmode
16055 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16056 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16057 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16058 {
16059 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16060 break;
16061 }
16062 /* FALLTHROUGH */
16063 case SYMBOL_REF:
16064 if (!const_ok_for_output (rtl))
16065 break;
16066 /* FALLTHROUGH */
16067 case LABEL_REF:
16068 if (is_a <scalar_int_mode> (mode, &int_mode)
16069 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16070 && (dwarf_version >= 4 || !dwarf_strict))
16071 {
16072 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16073 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16074 vec_safe_push (used_rtx_array, rtl);
16075 }
16076 break;
16077
16078 case DEBUG_IMPLICIT_PTR:
16079 loc_result = implicit_ptr_descriptor (rtl, 0);
16080 break;
16081
16082 case PLUS:
16083 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16084 && CONST_INT_P (XEXP (rtl, 1)))
16085 {
16086 loc_result
16087 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16088 break;
16089 }
16090 /* FALLTHRU */
16091 do_default:
16092 default:
16093 if ((is_a <scalar_int_mode> (mode, &int_mode)
16094 && GET_MODE (rtl) == int_mode
16095 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16096 && dwarf_version >= 4)
16097 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16098 {
16099 /* Value expression. */
16100 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16101 if (loc_result)
16102 add_loc_descr (&loc_result,
16103 new_loc_descr (DW_OP_stack_value, 0, 0));
16104 }
16105 break;
16106 }
16107
16108 return loc_result;
16109 }
16110
16111 /* We need to figure out what section we should use as the base for the
16112 address ranges where a given location is valid.
16113 1. If this particular DECL has a section associated with it, use that.
16114 2. If this function has a section associated with it, use that.
16115 3. Otherwise, use the text section.
16116 XXX: If you split a variable across multiple sections, we won't notice. */
16117
16118 static const char *
16119 secname_for_decl (const_tree decl)
16120 {
16121 const char *secname;
16122
16123 if (VAR_OR_FUNCTION_DECL_P (decl)
16124 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16125 && DECL_SECTION_NAME (decl))
16126 secname = DECL_SECTION_NAME (decl);
16127 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16128 secname = DECL_SECTION_NAME (current_function_decl);
16129 else if (cfun && in_cold_section_p)
16130 secname = crtl->subsections.cold_section_label;
16131 else
16132 secname = text_section_label;
16133
16134 return secname;
16135 }
16136
16137 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16138
16139 static bool
16140 decl_by_reference_p (tree decl)
16141 {
16142 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16143 || VAR_P (decl))
16144 && DECL_BY_REFERENCE (decl));
16145 }
16146
16147 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16148 for VARLOC. */
16149
16150 static dw_loc_descr_ref
16151 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16152 enum var_init_status initialized)
16153 {
16154 int have_address = 0;
16155 dw_loc_descr_ref descr;
16156 machine_mode mode;
16157
16158 if (want_address != 2)
16159 {
16160 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16161 /* Single part. */
16162 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16163 {
16164 varloc = PAT_VAR_LOCATION_LOC (varloc);
16165 if (GET_CODE (varloc) == EXPR_LIST)
16166 varloc = XEXP (varloc, 0);
16167 mode = GET_MODE (varloc);
16168 if (MEM_P (varloc))
16169 {
16170 rtx addr = XEXP (varloc, 0);
16171 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16172 mode, initialized);
16173 if (descr)
16174 have_address = 1;
16175 else
16176 {
16177 rtx x = avoid_constant_pool_reference (varloc);
16178 if (x != varloc)
16179 descr = mem_loc_descriptor (x, mode, VOIDmode,
16180 initialized);
16181 }
16182 }
16183 else
16184 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16185 }
16186 else
16187 return 0;
16188 }
16189 else
16190 {
16191 if (GET_CODE (varloc) == VAR_LOCATION)
16192 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16193 else
16194 mode = DECL_MODE (loc);
16195 descr = loc_descriptor (varloc, mode, initialized);
16196 have_address = 1;
16197 }
16198
16199 if (!descr)
16200 return 0;
16201
16202 if (want_address == 2 && !have_address
16203 && (dwarf_version >= 4 || !dwarf_strict))
16204 {
16205 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16206 {
16207 expansion_failed (loc, NULL_RTX,
16208 "DWARF address size mismatch");
16209 return 0;
16210 }
16211 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16212 have_address = 1;
16213 }
16214 /* Show if we can't fill the request for an address. */
16215 if (want_address && !have_address)
16216 {
16217 expansion_failed (loc, NULL_RTX,
16218 "Want address and only have value");
16219 return 0;
16220 }
16221
16222 /* If we've got an address and don't want one, dereference. */
16223 if (!want_address && have_address)
16224 {
16225 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16226 enum dwarf_location_atom op;
16227
16228 if (size > DWARF2_ADDR_SIZE || size == -1)
16229 {
16230 expansion_failed (loc, NULL_RTX,
16231 "DWARF address size mismatch");
16232 return 0;
16233 }
16234 else if (size == DWARF2_ADDR_SIZE)
16235 op = DW_OP_deref;
16236 else
16237 op = DW_OP_deref_size;
16238
16239 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16240 }
16241
16242 return descr;
16243 }
16244
16245 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16246 if it is not possible. */
16247
16248 static dw_loc_descr_ref
16249 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16250 {
16251 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16252 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16253 else if (dwarf_version >= 3 || !dwarf_strict)
16254 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16255 else
16256 return NULL;
16257 }
16258
16259 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16260 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16261
16262 static dw_loc_descr_ref
16263 dw_sra_loc_expr (tree decl, rtx loc)
16264 {
16265 rtx p;
16266 unsigned HOST_WIDE_INT padsize = 0;
16267 dw_loc_descr_ref descr, *descr_tail;
16268 unsigned HOST_WIDE_INT decl_size;
16269 rtx varloc;
16270 enum var_init_status initialized;
16271
16272 if (DECL_SIZE (decl) == NULL
16273 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16274 return NULL;
16275
16276 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16277 descr = NULL;
16278 descr_tail = &descr;
16279
16280 for (p = loc; p; p = XEXP (p, 1))
16281 {
16282 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16283 rtx loc_note = *decl_piece_varloc_ptr (p);
16284 dw_loc_descr_ref cur_descr;
16285 dw_loc_descr_ref *tail, last = NULL;
16286 unsigned HOST_WIDE_INT opsize = 0;
16287
16288 if (loc_note == NULL_RTX
16289 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16290 {
16291 padsize += bitsize;
16292 continue;
16293 }
16294 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16295 varloc = NOTE_VAR_LOCATION (loc_note);
16296 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16297 if (cur_descr == NULL)
16298 {
16299 padsize += bitsize;
16300 continue;
16301 }
16302
16303 /* Check that cur_descr either doesn't use
16304 DW_OP_*piece operations, or their sum is equal
16305 to bitsize. Otherwise we can't embed it. */
16306 for (tail = &cur_descr; *tail != NULL;
16307 tail = &(*tail)->dw_loc_next)
16308 if ((*tail)->dw_loc_opc == DW_OP_piece)
16309 {
16310 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16311 * BITS_PER_UNIT;
16312 last = *tail;
16313 }
16314 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16315 {
16316 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16317 last = *tail;
16318 }
16319
16320 if (last != NULL && opsize != bitsize)
16321 {
16322 padsize += bitsize;
16323 /* Discard the current piece of the descriptor and release any
16324 addr_table entries it uses. */
16325 remove_loc_list_addr_table_entries (cur_descr);
16326 continue;
16327 }
16328
16329 /* If there is a hole, add DW_OP_*piece after empty DWARF
16330 expression, which means that those bits are optimized out. */
16331 if (padsize)
16332 {
16333 if (padsize > decl_size)
16334 {
16335 remove_loc_list_addr_table_entries (cur_descr);
16336 goto discard_descr;
16337 }
16338 decl_size -= padsize;
16339 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16340 if (*descr_tail == NULL)
16341 {
16342 remove_loc_list_addr_table_entries (cur_descr);
16343 goto discard_descr;
16344 }
16345 descr_tail = &(*descr_tail)->dw_loc_next;
16346 padsize = 0;
16347 }
16348 *descr_tail = cur_descr;
16349 descr_tail = tail;
16350 if (bitsize > decl_size)
16351 goto discard_descr;
16352 decl_size -= bitsize;
16353 if (last == NULL)
16354 {
16355 HOST_WIDE_INT offset = 0;
16356 if (GET_CODE (varloc) == VAR_LOCATION
16357 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16358 {
16359 varloc = PAT_VAR_LOCATION_LOC (varloc);
16360 if (GET_CODE (varloc) == EXPR_LIST)
16361 varloc = XEXP (varloc, 0);
16362 }
16363 do
16364 {
16365 if (GET_CODE (varloc) == CONST
16366 || GET_CODE (varloc) == SIGN_EXTEND
16367 || GET_CODE (varloc) == ZERO_EXTEND)
16368 varloc = XEXP (varloc, 0);
16369 else if (GET_CODE (varloc) == SUBREG)
16370 varloc = SUBREG_REG (varloc);
16371 else
16372 break;
16373 }
16374 while (1);
16375 /* DW_OP_bit_size offset should be zero for register
16376 or implicit location descriptions and empty location
16377 descriptions, but for memory addresses needs big endian
16378 adjustment. */
16379 if (MEM_P (varloc))
16380 {
16381 unsigned HOST_WIDE_INT memsize;
16382 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
16383 goto discard_descr;
16384 memsize *= BITS_PER_UNIT;
16385 if (memsize != bitsize)
16386 {
16387 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
16388 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
16389 goto discard_descr;
16390 if (memsize < bitsize)
16391 goto discard_descr;
16392 if (BITS_BIG_ENDIAN)
16393 offset = memsize - bitsize;
16394 }
16395 }
16396
16397 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
16398 if (*descr_tail == NULL)
16399 goto discard_descr;
16400 descr_tail = &(*descr_tail)->dw_loc_next;
16401 }
16402 }
16403
16404 /* If there were any non-empty expressions, add padding till the end of
16405 the decl. */
16406 if (descr != NULL && decl_size != 0)
16407 {
16408 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
16409 if (*descr_tail == NULL)
16410 goto discard_descr;
16411 }
16412 return descr;
16413
16414 discard_descr:
16415 /* Discard the descriptor and release any addr_table entries it uses. */
16416 remove_loc_list_addr_table_entries (descr);
16417 return NULL;
16418 }
16419
16420 /* Return the dwarf representation of the location list LOC_LIST of
16421 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
16422 function. */
16423
16424 static dw_loc_list_ref
16425 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
16426 {
16427 const char *endname, *secname;
16428 rtx varloc;
16429 enum var_init_status initialized;
16430 struct var_loc_node *node;
16431 dw_loc_descr_ref descr;
16432 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
16433 dw_loc_list_ref list = NULL;
16434 dw_loc_list_ref *listp = &list;
16435
16436 /* Now that we know what section we are using for a base,
16437 actually construct the list of locations.
16438 The first location information is what is passed to the
16439 function that creates the location list, and the remaining
16440 locations just get added on to that list.
16441 Note that we only know the start address for a location
16442 (IE location changes), so to build the range, we use
16443 the range [current location start, next location start].
16444 This means we have to special case the last node, and generate
16445 a range of [last location start, end of function label]. */
16446
16447 if (cfun && crtl->has_bb_partition)
16448 {
16449 bool save_in_cold_section_p = in_cold_section_p;
16450 in_cold_section_p = first_function_block_is_cold;
16451 if (loc_list->last_before_switch == NULL)
16452 in_cold_section_p = !in_cold_section_p;
16453 secname = secname_for_decl (decl);
16454 in_cold_section_p = save_in_cold_section_p;
16455 }
16456 else
16457 secname = secname_for_decl (decl);
16458
16459 for (node = loc_list->first; node; node = node->next)
16460 {
16461 bool range_across_switch = false;
16462 if (GET_CODE (node->loc) == EXPR_LIST
16463 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
16464 {
16465 if (GET_CODE (node->loc) == EXPR_LIST)
16466 {
16467 descr = NULL;
16468 /* This requires DW_OP_{,bit_}piece, which is not usable
16469 inside DWARF expressions. */
16470 if (want_address == 2)
16471 descr = dw_sra_loc_expr (decl, node->loc);
16472 }
16473 else
16474 {
16475 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
16476 varloc = NOTE_VAR_LOCATION (node->loc);
16477 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
16478 }
16479 if (descr)
16480 {
16481 /* If section switch happens in between node->label
16482 and node->next->label (or end of function) and
16483 we can't emit it as a single entry list,
16484 emit two ranges, first one ending at the end
16485 of first partition and second one starting at the
16486 beginning of second partition. */
16487 if (node == loc_list->last_before_switch
16488 && (node != loc_list->first || loc_list->first->next)
16489 && current_function_decl)
16490 {
16491 endname = cfun->fde->dw_fde_end;
16492 range_across_switch = true;
16493 }
16494 /* The variable has a location between NODE->LABEL and
16495 NODE->NEXT->LABEL. */
16496 else if (node->next)
16497 endname = node->next->label;
16498 /* If the variable has a location at the last label
16499 it keeps its location until the end of function. */
16500 else if (!current_function_decl)
16501 endname = text_end_label;
16502 else
16503 {
16504 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
16505 current_function_funcdef_no);
16506 endname = ggc_strdup (label_id);
16507 }
16508
16509 *listp = new_loc_list (descr, node->label, endname, secname);
16510 if (TREE_CODE (decl) == PARM_DECL
16511 && node == loc_list->first
16512 && NOTE_P (node->loc)
16513 && strcmp (node->label, endname) == 0)
16514 (*listp)->force = true;
16515 listp = &(*listp)->dw_loc_next;
16516 }
16517 }
16518
16519 if (cfun
16520 && crtl->has_bb_partition
16521 && node == loc_list->last_before_switch)
16522 {
16523 bool save_in_cold_section_p = in_cold_section_p;
16524 in_cold_section_p = !first_function_block_is_cold;
16525 secname = secname_for_decl (decl);
16526 in_cold_section_p = save_in_cold_section_p;
16527 }
16528
16529 if (range_across_switch)
16530 {
16531 if (GET_CODE (node->loc) == EXPR_LIST)
16532 descr = dw_sra_loc_expr (decl, node->loc);
16533 else
16534 {
16535 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
16536 varloc = NOTE_VAR_LOCATION (node->loc);
16537 descr = dw_loc_list_1 (decl, varloc, want_address,
16538 initialized);
16539 }
16540 gcc_assert (descr);
16541 /* The variable has a location between NODE->LABEL and
16542 NODE->NEXT->LABEL. */
16543 if (node->next)
16544 endname = node->next->label;
16545 else
16546 endname = cfun->fde->dw_fde_second_end;
16547 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin,
16548 endname, secname);
16549 listp = &(*listp)->dw_loc_next;
16550 }
16551 }
16552
16553 /* Try to avoid the overhead of a location list emitting a location
16554 expression instead, but only if we didn't have more than one
16555 location entry in the first place. If some entries were not
16556 representable, we don't want to pretend a single entry that was
16557 applies to the entire scope in which the variable is
16558 available. */
16559 if (list && loc_list->first->next)
16560 gen_llsym (list);
16561
16562 return list;
16563 }
16564
16565 /* Return if the loc_list has only single element and thus can be represented
16566 as location description. */
16567
16568 static bool
16569 single_element_loc_list_p (dw_loc_list_ref list)
16570 {
16571 gcc_assert (!list->dw_loc_next || list->ll_symbol);
16572 return !list->ll_symbol;
16573 }
16574
16575 /* Duplicate a single element of location list. */
16576
16577 static inline dw_loc_descr_ref
16578 copy_loc_descr (dw_loc_descr_ref ref)
16579 {
16580 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
16581 memcpy (copy, ref, sizeof (dw_loc_descr_node));
16582 return copy;
16583 }
16584
16585 /* To each location in list LIST append loc descr REF. */
16586
16587 static void
16588 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
16589 {
16590 dw_loc_descr_ref copy;
16591 add_loc_descr (&list->expr, ref);
16592 list = list->dw_loc_next;
16593 while (list)
16594 {
16595 copy = copy_loc_descr (ref);
16596 add_loc_descr (&list->expr, copy);
16597 while (copy->dw_loc_next)
16598 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
16599 list = list->dw_loc_next;
16600 }
16601 }
16602
16603 /* To each location in list LIST prepend loc descr REF. */
16604
16605 static void
16606 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
16607 {
16608 dw_loc_descr_ref copy;
16609 dw_loc_descr_ref ref_end = list->expr;
16610 add_loc_descr (&ref, list->expr);
16611 list->expr = ref;
16612 list = list->dw_loc_next;
16613 while (list)
16614 {
16615 dw_loc_descr_ref end = list->expr;
16616 list->expr = copy = copy_loc_descr (ref);
16617 while (copy->dw_loc_next != ref_end)
16618 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
16619 copy->dw_loc_next = end;
16620 list = list->dw_loc_next;
16621 }
16622 }
16623
16624 /* Given two lists RET and LIST
16625 produce location list that is result of adding expression in LIST
16626 to expression in RET on each position in program.
16627 Might be destructive on both RET and LIST.
16628
16629 TODO: We handle only simple cases of RET or LIST having at most one
16630 element. General case would involve sorting the lists in program order
16631 and merging them that will need some additional work.
16632 Adding that will improve quality of debug info especially for SRA-ed
16633 structures. */
16634
16635 static void
16636 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
16637 {
16638 if (!list)
16639 return;
16640 if (!*ret)
16641 {
16642 *ret = list;
16643 return;
16644 }
16645 if (!list->dw_loc_next)
16646 {
16647 add_loc_descr_to_each (*ret, list->expr);
16648 return;
16649 }
16650 if (!(*ret)->dw_loc_next)
16651 {
16652 prepend_loc_descr_to_each (list, (*ret)->expr);
16653 *ret = list;
16654 return;
16655 }
16656 expansion_failed (NULL_TREE, NULL_RTX,
16657 "Don't know how to merge two non-trivial"
16658 " location lists.\n");
16659 *ret = NULL;
16660 return;
16661 }
16662
16663 /* LOC is constant expression. Try a luck, look it up in constant
16664 pool and return its loc_descr of its address. */
16665
16666 static dw_loc_descr_ref
16667 cst_pool_loc_descr (tree loc)
16668 {
16669 /* Get an RTL for this, if something has been emitted. */
16670 rtx rtl = lookup_constant_def (loc);
16671
16672 if (!rtl || !MEM_P (rtl))
16673 {
16674 gcc_assert (!rtl);
16675 return 0;
16676 }
16677 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
16678
16679 /* TODO: We might get more coverage if we was actually delaying expansion
16680 of all expressions till end of compilation when constant pools are fully
16681 populated. */
16682 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
16683 {
16684 expansion_failed (loc, NULL_RTX,
16685 "CST value in contant pool but not marked.");
16686 return 0;
16687 }
16688 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16689 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
16690 }
16691
16692 /* Return dw_loc_list representing address of addr_expr LOC
16693 by looking for inner INDIRECT_REF expression and turning
16694 it into simple arithmetics.
16695
16696 See loc_list_from_tree for the meaning of CONTEXT. */
16697
16698 static dw_loc_list_ref
16699 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
16700 loc_descr_context *context)
16701 {
16702 tree obj, offset;
16703 poly_int64 bitsize, bitpos, bytepos;
16704 machine_mode mode;
16705 int unsignedp, reversep, volatilep = 0;
16706 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
16707
16708 obj = get_inner_reference (TREE_OPERAND (loc, 0),
16709 &bitsize, &bitpos, &offset, &mode,
16710 &unsignedp, &reversep, &volatilep);
16711 STRIP_NOPS (obj);
16712 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
16713 {
16714 expansion_failed (loc, NULL_RTX, "bitfield access");
16715 return 0;
16716 }
16717 if (!INDIRECT_REF_P (obj))
16718 {
16719 expansion_failed (obj,
16720 NULL_RTX, "no indirect ref in inner refrence");
16721 return 0;
16722 }
16723 if (!offset && known_eq (bitpos, 0))
16724 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
16725 context);
16726 else if (toplev
16727 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
16728 && (dwarf_version >= 4 || !dwarf_strict))
16729 {
16730 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
16731 if (!list_ret)
16732 return 0;
16733 if (offset)
16734 {
16735 /* Variable offset. */
16736 list_ret1 = loc_list_from_tree (offset, 0, context);
16737 if (list_ret1 == 0)
16738 return 0;
16739 add_loc_list (&list_ret, list_ret1);
16740 if (!list_ret)
16741 return 0;
16742 add_loc_descr_to_each (list_ret,
16743 new_loc_descr (DW_OP_plus, 0, 0));
16744 }
16745 HOST_WIDE_INT value;
16746 if (bytepos.is_constant (&value) && value > 0)
16747 add_loc_descr_to_each (list_ret,
16748 new_loc_descr (DW_OP_plus_uconst, value, 0));
16749 else if (maybe_ne (bytepos, 0))
16750 loc_list_plus_const (list_ret, bytepos);
16751 add_loc_descr_to_each (list_ret,
16752 new_loc_descr (DW_OP_stack_value, 0, 0));
16753 }
16754 return list_ret;
16755 }
16756
16757 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
16758 all operations from LOC are nops, move to the last one. Insert in NOPS all
16759 operations that are skipped. */
16760
16761 static void
16762 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
16763 hash_set<dw_loc_descr_ref> &nops)
16764 {
16765 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
16766 {
16767 nops.add (loc);
16768 loc = loc->dw_loc_next;
16769 }
16770 }
16771
16772 /* Helper for loc_descr_without_nops: free the location description operation
16773 P. */
16774
16775 bool
16776 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
16777 {
16778 ggc_free (loc);
16779 return true;
16780 }
16781
16782 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
16783 finishes LOC. */
16784
16785 static void
16786 loc_descr_without_nops (dw_loc_descr_ref &loc)
16787 {
16788 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
16789 return;
16790
16791 /* Set of all DW_OP_nop operations we remove. */
16792 hash_set<dw_loc_descr_ref> nops;
16793
16794 /* First, strip all prefix NOP operations in order to keep the head of the
16795 operations list. */
16796 loc_descr_to_next_no_nop (loc, nops);
16797
16798 for (dw_loc_descr_ref cur = loc; cur != NULL;)
16799 {
16800 /* For control flow operations: strip "prefix" nops in destination
16801 labels. */
16802 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
16803 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
16804 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
16805 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
16806
16807 /* Do the same for the operations that follow, then move to the next
16808 iteration. */
16809 if (cur->dw_loc_next != NULL)
16810 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
16811 cur = cur->dw_loc_next;
16812 }
16813
16814 nops.traverse<void *, free_loc_descr> (NULL);
16815 }
16816
16817
16818 struct dwarf_procedure_info;
16819
16820 /* Helper structure for location descriptions generation. */
16821 struct loc_descr_context
16822 {
16823 /* The type that is implicitly referenced by DW_OP_push_object_address, or
16824 NULL_TREE if DW_OP_push_object_address in invalid for this location
16825 description. This is used when processing PLACEHOLDER_EXPR nodes. */
16826 tree context_type;
16827 /* The ..._DECL node that should be translated as a
16828 DW_OP_push_object_address operation. */
16829 tree base_decl;
16830 /* Information about the DWARF procedure we are currently generating. NULL if
16831 we are not generating a DWARF procedure. */
16832 struct dwarf_procedure_info *dpi;
16833 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
16834 by consumer. Used for DW_TAG_generic_subrange attributes. */
16835 bool placeholder_arg;
16836 /* True if PLACEHOLDER_EXPR has been seen. */
16837 bool placeholder_seen;
16838 };
16839
16840 /* DWARF procedures generation
16841
16842 DWARF expressions (aka. location descriptions) are used to encode variable
16843 things such as sizes or offsets. Such computations can have redundant parts
16844 that can be factorized in order to reduce the size of the output debug
16845 information. This is the whole point of DWARF procedures.
16846
16847 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
16848 already factorized into functions ("size functions") in order to handle very
16849 big and complex types. Such functions are quite simple: they have integral
16850 arguments, they return an integral result and their body contains only a
16851 return statement with arithmetic expressions. This is the only kind of
16852 function we are interested in translating into DWARF procedures, here.
16853
16854 DWARF expressions and DWARF procedure are executed using a stack, so we have
16855 to define some calling convention for them to interact. Let's say that:
16856
16857 - Before calling a DWARF procedure, DWARF expressions must push on the stack
16858 all arguments in reverse order (right-to-left) so that when the DWARF
16859 procedure execution starts, the first argument is the top of the stack.
16860
16861 - Then, when returning, the DWARF procedure must have consumed all arguments
16862 on the stack, must have pushed the result and touched nothing else.
16863
16864 - Each integral argument and the result are integral types can be hold in a
16865 single stack slot.
16866
16867 - We call "frame offset" the number of stack slots that are "under DWARF
16868 procedure control": it includes the arguments slots, the temporaries and
16869 the result slot. Thus, it is equal to the number of arguments when the
16870 procedure execution starts and must be equal to one (the result) when it
16871 returns. */
16872
16873 /* Helper structure used when generating operations for a DWARF procedure. */
16874 struct dwarf_procedure_info
16875 {
16876 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
16877 currently translated. */
16878 tree fndecl;
16879 /* The number of arguments FNDECL takes. */
16880 unsigned args_count;
16881 };
16882
16883 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
16884 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
16885 equate it to this DIE. */
16886
16887 static dw_die_ref
16888 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
16889 dw_die_ref parent_die)
16890 {
16891 dw_die_ref dwarf_proc_die;
16892
16893 if ((dwarf_version < 3 && dwarf_strict)
16894 || location == NULL)
16895 return NULL;
16896
16897 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
16898 if (fndecl)
16899 equate_decl_number_to_die (fndecl, dwarf_proc_die);
16900 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
16901 return dwarf_proc_die;
16902 }
16903
16904 /* Return whether TYPE is a supported type as a DWARF procedure argument
16905 type or return type (we handle only scalar types and pointer types that
16906 aren't wider than the DWARF expression evaluation stack. */
16907
16908 static bool
16909 is_handled_procedure_type (tree type)
16910 {
16911 return ((INTEGRAL_TYPE_P (type)
16912 || TREE_CODE (type) == OFFSET_TYPE
16913 || TREE_CODE (type) == POINTER_TYPE)
16914 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
16915 }
16916
16917 /* Helper for resolve_args_picking: do the same but stop when coming across
16918 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
16919 offset *before* evaluating the corresponding operation. */
16920
16921 static bool
16922 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
16923 struct dwarf_procedure_info *dpi,
16924 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
16925 {
16926 /* The "frame_offset" identifier is already used to name a macro... */
16927 unsigned frame_offset_ = initial_frame_offset;
16928 dw_loc_descr_ref l;
16929
16930 for (l = loc; l != NULL;)
16931 {
16932 bool existed;
16933 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
16934
16935 /* If we already met this node, there is nothing to compute anymore. */
16936 if (existed)
16937 {
16938 /* Make sure that the stack size is consistent wherever the execution
16939 flow comes from. */
16940 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
16941 break;
16942 }
16943 l_frame_offset = frame_offset_;
16944
16945 /* If needed, relocate the picking offset with respect to the frame
16946 offset. */
16947 if (l->frame_offset_rel)
16948 {
16949 unsigned HOST_WIDE_INT off;
16950 switch (l->dw_loc_opc)
16951 {
16952 case DW_OP_pick:
16953 off = l->dw_loc_oprnd1.v.val_unsigned;
16954 break;
16955 case DW_OP_dup:
16956 off = 0;
16957 break;
16958 case DW_OP_over:
16959 off = 1;
16960 break;
16961 default:
16962 gcc_unreachable ();
16963 }
16964 /* frame_offset_ is the size of the current stack frame, including
16965 incoming arguments. Besides, the arguments are pushed
16966 right-to-left. Thus, in order to access the Nth argument from
16967 this operation node, the picking has to skip temporaries *plus*
16968 one stack slot per argument (0 for the first one, 1 for the second
16969 one, etc.).
16970
16971 The targetted argument number (N) is already set as the operand,
16972 and the number of temporaries can be computed with:
16973 frame_offsets_ - dpi->args_count */
16974 off += frame_offset_ - dpi->args_count;
16975
16976 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
16977 if (off > 255)
16978 return false;
16979
16980 if (off == 0)
16981 {
16982 l->dw_loc_opc = DW_OP_dup;
16983 l->dw_loc_oprnd1.v.val_unsigned = 0;
16984 }
16985 else if (off == 1)
16986 {
16987 l->dw_loc_opc = DW_OP_over;
16988 l->dw_loc_oprnd1.v.val_unsigned = 0;
16989 }
16990 else
16991 {
16992 l->dw_loc_opc = DW_OP_pick;
16993 l->dw_loc_oprnd1.v.val_unsigned = off;
16994 }
16995 }
16996
16997 /* Update frame_offset according to the effect the current operation has
16998 on the stack. */
16999 switch (l->dw_loc_opc)
17000 {
17001 case DW_OP_deref:
17002 case DW_OP_swap:
17003 case DW_OP_rot:
17004 case DW_OP_abs:
17005 case DW_OP_neg:
17006 case DW_OP_not:
17007 case DW_OP_plus_uconst:
17008 case DW_OP_skip:
17009 case DW_OP_reg0:
17010 case DW_OP_reg1:
17011 case DW_OP_reg2:
17012 case DW_OP_reg3:
17013 case DW_OP_reg4:
17014 case DW_OP_reg5:
17015 case DW_OP_reg6:
17016 case DW_OP_reg7:
17017 case DW_OP_reg8:
17018 case DW_OP_reg9:
17019 case DW_OP_reg10:
17020 case DW_OP_reg11:
17021 case DW_OP_reg12:
17022 case DW_OP_reg13:
17023 case DW_OP_reg14:
17024 case DW_OP_reg15:
17025 case DW_OP_reg16:
17026 case DW_OP_reg17:
17027 case DW_OP_reg18:
17028 case DW_OP_reg19:
17029 case DW_OP_reg20:
17030 case DW_OP_reg21:
17031 case DW_OP_reg22:
17032 case DW_OP_reg23:
17033 case DW_OP_reg24:
17034 case DW_OP_reg25:
17035 case DW_OP_reg26:
17036 case DW_OP_reg27:
17037 case DW_OP_reg28:
17038 case DW_OP_reg29:
17039 case DW_OP_reg30:
17040 case DW_OP_reg31:
17041 case DW_OP_bregx:
17042 case DW_OP_piece:
17043 case DW_OP_deref_size:
17044 case DW_OP_nop:
17045 case DW_OP_bit_piece:
17046 case DW_OP_implicit_value:
17047 case DW_OP_stack_value:
17048 break;
17049
17050 case DW_OP_addr:
17051 case DW_OP_const1u:
17052 case DW_OP_const1s:
17053 case DW_OP_const2u:
17054 case DW_OP_const2s:
17055 case DW_OP_const4u:
17056 case DW_OP_const4s:
17057 case DW_OP_const8u:
17058 case DW_OP_const8s:
17059 case DW_OP_constu:
17060 case DW_OP_consts:
17061 case DW_OP_dup:
17062 case DW_OP_over:
17063 case DW_OP_pick:
17064 case DW_OP_lit0:
17065 case DW_OP_lit1:
17066 case DW_OP_lit2:
17067 case DW_OP_lit3:
17068 case DW_OP_lit4:
17069 case DW_OP_lit5:
17070 case DW_OP_lit6:
17071 case DW_OP_lit7:
17072 case DW_OP_lit8:
17073 case DW_OP_lit9:
17074 case DW_OP_lit10:
17075 case DW_OP_lit11:
17076 case DW_OP_lit12:
17077 case DW_OP_lit13:
17078 case DW_OP_lit14:
17079 case DW_OP_lit15:
17080 case DW_OP_lit16:
17081 case DW_OP_lit17:
17082 case DW_OP_lit18:
17083 case DW_OP_lit19:
17084 case DW_OP_lit20:
17085 case DW_OP_lit21:
17086 case DW_OP_lit22:
17087 case DW_OP_lit23:
17088 case DW_OP_lit24:
17089 case DW_OP_lit25:
17090 case DW_OP_lit26:
17091 case DW_OP_lit27:
17092 case DW_OP_lit28:
17093 case DW_OP_lit29:
17094 case DW_OP_lit30:
17095 case DW_OP_lit31:
17096 case DW_OP_breg0:
17097 case DW_OP_breg1:
17098 case DW_OP_breg2:
17099 case DW_OP_breg3:
17100 case DW_OP_breg4:
17101 case DW_OP_breg5:
17102 case DW_OP_breg6:
17103 case DW_OP_breg7:
17104 case DW_OP_breg8:
17105 case DW_OP_breg9:
17106 case DW_OP_breg10:
17107 case DW_OP_breg11:
17108 case DW_OP_breg12:
17109 case DW_OP_breg13:
17110 case DW_OP_breg14:
17111 case DW_OP_breg15:
17112 case DW_OP_breg16:
17113 case DW_OP_breg17:
17114 case DW_OP_breg18:
17115 case DW_OP_breg19:
17116 case DW_OP_breg20:
17117 case DW_OP_breg21:
17118 case DW_OP_breg22:
17119 case DW_OP_breg23:
17120 case DW_OP_breg24:
17121 case DW_OP_breg25:
17122 case DW_OP_breg26:
17123 case DW_OP_breg27:
17124 case DW_OP_breg28:
17125 case DW_OP_breg29:
17126 case DW_OP_breg30:
17127 case DW_OP_breg31:
17128 case DW_OP_fbreg:
17129 case DW_OP_push_object_address:
17130 case DW_OP_call_frame_cfa:
17131 case DW_OP_GNU_variable_value:
17132 ++frame_offset_;
17133 break;
17134
17135 case DW_OP_drop:
17136 case DW_OP_xderef:
17137 case DW_OP_and:
17138 case DW_OP_div:
17139 case DW_OP_minus:
17140 case DW_OP_mod:
17141 case DW_OP_mul:
17142 case DW_OP_or:
17143 case DW_OP_plus:
17144 case DW_OP_shl:
17145 case DW_OP_shr:
17146 case DW_OP_shra:
17147 case DW_OP_xor:
17148 case DW_OP_bra:
17149 case DW_OP_eq:
17150 case DW_OP_ge:
17151 case DW_OP_gt:
17152 case DW_OP_le:
17153 case DW_OP_lt:
17154 case DW_OP_ne:
17155 case DW_OP_regx:
17156 case DW_OP_xderef_size:
17157 --frame_offset_;
17158 break;
17159
17160 case DW_OP_call2:
17161 case DW_OP_call4:
17162 case DW_OP_call_ref:
17163 {
17164 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17165 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17166
17167 if (stack_usage == NULL)
17168 return false;
17169 frame_offset_ += *stack_usage;
17170 break;
17171 }
17172
17173 case DW_OP_implicit_pointer:
17174 case DW_OP_entry_value:
17175 case DW_OP_const_type:
17176 case DW_OP_regval_type:
17177 case DW_OP_deref_type:
17178 case DW_OP_convert:
17179 case DW_OP_reinterpret:
17180 case DW_OP_form_tls_address:
17181 case DW_OP_GNU_push_tls_address:
17182 case DW_OP_GNU_uninit:
17183 case DW_OP_GNU_encoded_addr:
17184 case DW_OP_GNU_implicit_pointer:
17185 case DW_OP_GNU_entry_value:
17186 case DW_OP_GNU_const_type:
17187 case DW_OP_GNU_regval_type:
17188 case DW_OP_GNU_deref_type:
17189 case DW_OP_GNU_convert:
17190 case DW_OP_GNU_reinterpret:
17191 case DW_OP_GNU_parameter_ref:
17192 /* loc_list_from_tree will probably not output these operations for
17193 size functions, so assume they will not appear here. */
17194 /* Fall through... */
17195
17196 default:
17197 gcc_unreachable ();
17198 }
17199
17200 /* Now, follow the control flow (except subroutine calls). */
17201 switch (l->dw_loc_opc)
17202 {
17203 case DW_OP_bra:
17204 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17205 frame_offsets))
17206 return false;
17207 /* Fall through. */
17208
17209 case DW_OP_skip:
17210 l = l->dw_loc_oprnd1.v.val_loc;
17211 break;
17212
17213 case DW_OP_stack_value:
17214 return true;
17215
17216 default:
17217 l = l->dw_loc_next;
17218 break;
17219 }
17220 }
17221
17222 return true;
17223 }
17224
17225 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17226 operations) in order to resolve the operand of DW_OP_pick operations that
17227 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17228 offset *before* LOC is executed. Return if all relocations were
17229 successful. */
17230
17231 static bool
17232 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17233 struct dwarf_procedure_info *dpi)
17234 {
17235 /* Associate to all visited operations the frame offset *before* evaluating
17236 this operation. */
17237 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17238
17239 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17240 frame_offsets);
17241 }
17242
17243 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17244 Return NULL if it is not possible. */
17245
17246 static dw_die_ref
17247 function_to_dwarf_procedure (tree fndecl)
17248 {
17249 struct loc_descr_context ctx;
17250 struct dwarf_procedure_info dpi;
17251 dw_die_ref dwarf_proc_die;
17252 tree tree_body = DECL_SAVED_TREE (fndecl);
17253 dw_loc_descr_ref loc_body, epilogue;
17254
17255 tree cursor;
17256 unsigned i;
17257
17258 /* Do not generate multiple DWARF procedures for the same function
17259 declaration. */
17260 dwarf_proc_die = lookup_decl_die (fndecl);
17261 if (dwarf_proc_die != NULL)
17262 return dwarf_proc_die;
17263
17264 /* DWARF procedures are available starting with the DWARFv3 standard. */
17265 if (dwarf_version < 3 && dwarf_strict)
17266 return NULL;
17267
17268 /* We handle only functions for which we still have a body, that return a
17269 supported type and that takes arguments with supported types. Note that
17270 there is no point translating functions that return nothing. */
17271 if (tree_body == NULL_TREE
17272 || DECL_RESULT (fndecl) == NULL_TREE
17273 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17274 return NULL;
17275
17276 for (cursor = DECL_ARGUMENTS (fndecl);
17277 cursor != NULL_TREE;
17278 cursor = TREE_CHAIN (cursor))
17279 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17280 return NULL;
17281
17282 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17283 if (TREE_CODE (tree_body) != RETURN_EXPR)
17284 return NULL;
17285 tree_body = TREE_OPERAND (tree_body, 0);
17286 if (TREE_CODE (tree_body) != MODIFY_EXPR
17287 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17288 return NULL;
17289 tree_body = TREE_OPERAND (tree_body, 1);
17290
17291 /* Try to translate the body expression itself. Note that this will probably
17292 cause an infinite recursion if its call graph has a cycle. This is very
17293 unlikely for size functions, however, so don't bother with such things at
17294 the moment. */
17295 ctx.context_type = NULL_TREE;
17296 ctx.base_decl = NULL_TREE;
17297 ctx.dpi = &dpi;
17298 ctx.placeholder_arg = false;
17299 ctx.placeholder_seen = false;
17300 dpi.fndecl = fndecl;
17301 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17302 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17303 if (!loc_body)
17304 return NULL;
17305
17306 /* After evaluating all operands in "loc_body", we should still have on the
17307 stack all arguments plus the desired function result (top of the stack).
17308 Generate code in order to keep only the result in our stack frame. */
17309 epilogue = NULL;
17310 for (i = 0; i < dpi.args_count; ++i)
17311 {
17312 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17313 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17314 op_couple->dw_loc_next->dw_loc_next = epilogue;
17315 epilogue = op_couple;
17316 }
17317 add_loc_descr (&loc_body, epilogue);
17318 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17319 return NULL;
17320
17321 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17322 because they are considered useful. Now there is an epilogue, they are
17323 not anymore, so give it another try. */
17324 loc_descr_without_nops (loc_body);
17325
17326 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17327 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17328 though, given that size functions do not come from source, so they should
17329 not have a dedicated DW_TAG_subprogram DIE. */
17330 dwarf_proc_die
17331 = new_dwarf_proc_die (loc_body, fndecl,
17332 get_context_die (DECL_CONTEXT (fndecl)));
17333
17334 /* The called DWARF procedure consumes one stack slot per argument and
17335 returns one stack slot. */
17336 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
17337
17338 return dwarf_proc_die;
17339 }
17340
17341
17342 /* Generate Dwarf location list representing LOC.
17343 If WANT_ADDRESS is false, expression computing LOC will be computed
17344 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
17345 if WANT_ADDRESS is 2, expression computing address useable in location
17346 will be returned (i.e. DW_OP_reg can be used
17347 to refer to register values).
17348
17349 CONTEXT provides information to customize the location descriptions
17350 generation. Its context_type field specifies what type is implicitly
17351 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
17352 will not be generated.
17353
17354 Its DPI field determines whether we are generating a DWARF expression for a
17355 DWARF procedure, so PARM_DECL references are processed specifically.
17356
17357 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
17358 and dpi fields were null. */
17359
17360 static dw_loc_list_ref
17361 loc_list_from_tree_1 (tree loc, int want_address,
17362 struct loc_descr_context *context)
17363 {
17364 dw_loc_descr_ref ret = NULL, ret1 = NULL;
17365 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17366 int have_address = 0;
17367 enum dwarf_location_atom op;
17368
17369 /* ??? Most of the time we do not take proper care for sign/zero
17370 extending the values properly. Hopefully this won't be a real
17371 problem... */
17372
17373 if (context != NULL
17374 && context->base_decl == loc
17375 && want_address == 0)
17376 {
17377 if (dwarf_version >= 3 || !dwarf_strict)
17378 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
17379 NULL, NULL, NULL);
17380 else
17381 return NULL;
17382 }
17383
17384 switch (TREE_CODE (loc))
17385 {
17386 case ERROR_MARK:
17387 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
17388 return 0;
17389
17390 case PLACEHOLDER_EXPR:
17391 /* This case involves extracting fields from an object to determine the
17392 position of other fields. It is supposed to appear only as the first
17393 operand of COMPONENT_REF nodes and to reference precisely the type
17394 that the context allows. */
17395 if (context != NULL
17396 && TREE_TYPE (loc) == context->context_type
17397 && want_address >= 1)
17398 {
17399 if (dwarf_version >= 3 || !dwarf_strict)
17400 {
17401 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
17402 have_address = 1;
17403 break;
17404 }
17405 else
17406 return NULL;
17407 }
17408 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
17409 the single argument passed by consumer. */
17410 else if (context != NULL
17411 && context->placeholder_arg
17412 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
17413 && want_address == 0)
17414 {
17415 ret = new_loc_descr (DW_OP_pick, 0, 0);
17416 ret->frame_offset_rel = 1;
17417 context->placeholder_seen = true;
17418 break;
17419 }
17420 else
17421 expansion_failed (loc, NULL_RTX,
17422 "PLACEHOLDER_EXPR for an unexpected type");
17423 break;
17424
17425 case CALL_EXPR:
17426 {
17427 const int nargs = call_expr_nargs (loc);
17428 tree callee = get_callee_fndecl (loc);
17429 int i;
17430 dw_die_ref dwarf_proc;
17431
17432 if (callee == NULL_TREE)
17433 goto call_expansion_failed;
17434
17435 /* We handle only functions that return an integer. */
17436 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
17437 goto call_expansion_failed;
17438
17439 dwarf_proc = function_to_dwarf_procedure (callee);
17440 if (dwarf_proc == NULL)
17441 goto call_expansion_failed;
17442
17443 /* Evaluate arguments right-to-left so that the first argument will
17444 be the top-most one on the stack. */
17445 for (i = nargs - 1; i >= 0; --i)
17446 {
17447 dw_loc_descr_ref loc_descr
17448 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
17449 context);
17450
17451 if (loc_descr == NULL)
17452 goto call_expansion_failed;
17453
17454 add_loc_descr (&ret, loc_descr);
17455 }
17456
17457 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
17458 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
17459 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
17460 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
17461 add_loc_descr (&ret, ret1);
17462 break;
17463
17464 call_expansion_failed:
17465 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
17466 /* There are no opcodes for these operations. */
17467 return 0;
17468 }
17469
17470 case PREINCREMENT_EXPR:
17471 case PREDECREMENT_EXPR:
17472 case POSTINCREMENT_EXPR:
17473 case POSTDECREMENT_EXPR:
17474 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
17475 /* There are no opcodes for these operations. */
17476 return 0;
17477
17478 case ADDR_EXPR:
17479 /* If we already want an address, see if there is INDIRECT_REF inside
17480 e.g. for &this->field. */
17481 if (want_address)
17482 {
17483 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
17484 (loc, want_address == 2, context);
17485 if (list_ret)
17486 have_address = 1;
17487 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
17488 && (ret = cst_pool_loc_descr (loc)))
17489 have_address = 1;
17490 }
17491 /* Otherwise, process the argument and look for the address. */
17492 if (!list_ret && !ret)
17493 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
17494 else
17495 {
17496 if (want_address)
17497 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
17498 return NULL;
17499 }
17500 break;
17501
17502 case VAR_DECL:
17503 if (DECL_THREAD_LOCAL_P (loc))
17504 {
17505 rtx rtl;
17506 enum dwarf_location_atom tls_op;
17507 enum dtprel_bool dtprel = dtprel_false;
17508
17509 if (targetm.have_tls)
17510 {
17511 /* If this is not defined, we have no way to emit the
17512 data. */
17513 if (!targetm.asm_out.output_dwarf_dtprel)
17514 return 0;
17515
17516 /* The way DW_OP_GNU_push_tls_address is specified, we
17517 can only look up addresses of objects in the current
17518 module. We used DW_OP_addr as first op, but that's
17519 wrong, because DW_OP_addr is relocated by the debug
17520 info consumer, while DW_OP_GNU_push_tls_address
17521 operand shouldn't be. */
17522 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
17523 return 0;
17524 dtprel = dtprel_true;
17525 /* We check for DWARF 5 here because gdb did not implement
17526 DW_OP_form_tls_address until after 7.12. */
17527 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
17528 : DW_OP_GNU_push_tls_address);
17529 }
17530 else
17531 {
17532 if (!targetm.emutls.debug_form_tls_address
17533 || !(dwarf_version >= 3 || !dwarf_strict))
17534 return 0;
17535 /* We stuffed the control variable into the DECL_VALUE_EXPR
17536 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
17537 no longer appear in gimple code. We used the control
17538 variable in specific so that we could pick it up here. */
17539 loc = DECL_VALUE_EXPR (loc);
17540 tls_op = DW_OP_form_tls_address;
17541 }
17542
17543 rtl = rtl_for_decl_location (loc);
17544 if (rtl == NULL_RTX)
17545 return 0;
17546
17547 if (!MEM_P (rtl))
17548 return 0;
17549 rtl = XEXP (rtl, 0);
17550 if (! CONSTANT_P (rtl))
17551 return 0;
17552
17553 ret = new_addr_loc_descr (rtl, dtprel);
17554 ret1 = new_loc_descr (tls_op, 0, 0);
17555 add_loc_descr (&ret, ret1);
17556
17557 have_address = 1;
17558 break;
17559 }
17560 /* FALLTHRU */
17561
17562 case PARM_DECL:
17563 if (context != NULL && context->dpi != NULL
17564 && DECL_CONTEXT (loc) == context->dpi->fndecl)
17565 {
17566 /* We are generating code for a DWARF procedure and we want to access
17567 one of its arguments: find the appropriate argument offset and let
17568 the resolve_args_picking pass compute the offset that complies
17569 with the stack frame size. */
17570 unsigned i = 0;
17571 tree cursor;
17572
17573 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
17574 cursor != NULL_TREE && cursor != loc;
17575 cursor = TREE_CHAIN (cursor), ++i)
17576 ;
17577 /* If we are translating a DWARF procedure, all referenced parameters
17578 must belong to the current function. */
17579 gcc_assert (cursor != NULL_TREE);
17580
17581 ret = new_loc_descr (DW_OP_pick, i, 0);
17582 ret->frame_offset_rel = 1;
17583 break;
17584 }
17585 /* FALLTHRU */
17586
17587 case RESULT_DECL:
17588 if (DECL_HAS_VALUE_EXPR_P (loc))
17589 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
17590 want_address, context);
17591 /* FALLTHRU */
17592
17593 case FUNCTION_DECL:
17594 {
17595 rtx rtl;
17596 var_loc_list *loc_list = lookup_decl_loc (loc);
17597
17598 if (loc_list && loc_list->first)
17599 {
17600 list_ret = dw_loc_list (loc_list, loc, want_address);
17601 have_address = want_address != 0;
17602 break;
17603 }
17604 rtl = rtl_for_decl_location (loc);
17605 if (rtl == NULL_RTX)
17606 {
17607 if (TREE_CODE (loc) != FUNCTION_DECL
17608 && early_dwarf
17609 && current_function_decl
17610 && want_address != 1
17611 && ! DECL_IGNORED_P (loc)
17612 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
17613 || POINTER_TYPE_P (TREE_TYPE (loc)))
17614 && DECL_CONTEXT (loc) == current_function_decl
17615 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
17616 <= DWARF2_ADDR_SIZE))
17617 {
17618 dw_die_ref ref = lookup_decl_die (loc);
17619 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
17620 if (ref)
17621 {
17622 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
17623 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
17624 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
17625 }
17626 else
17627 {
17628 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
17629 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
17630 }
17631 break;
17632 }
17633 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
17634 return 0;
17635 }
17636 else if (CONST_INT_P (rtl))
17637 {
17638 HOST_WIDE_INT val = INTVAL (rtl);
17639 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
17640 val &= GET_MODE_MASK (DECL_MODE (loc));
17641 ret = int_loc_descriptor (val);
17642 }
17643 else if (GET_CODE (rtl) == CONST_STRING)
17644 {
17645 expansion_failed (loc, NULL_RTX, "CONST_STRING");
17646 return 0;
17647 }
17648 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
17649 ret = new_addr_loc_descr (rtl, dtprel_false);
17650 else
17651 {
17652 machine_mode mode, mem_mode;
17653
17654 /* Certain constructs can only be represented at top-level. */
17655 if (want_address == 2)
17656 {
17657 ret = loc_descriptor (rtl, VOIDmode,
17658 VAR_INIT_STATUS_INITIALIZED);
17659 have_address = 1;
17660 }
17661 else
17662 {
17663 mode = GET_MODE (rtl);
17664 mem_mode = VOIDmode;
17665 if (MEM_P (rtl))
17666 {
17667 mem_mode = mode;
17668 mode = get_address_mode (rtl);
17669 rtl = XEXP (rtl, 0);
17670 have_address = 1;
17671 }
17672 ret = mem_loc_descriptor (rtl, mode, mem_mode,
17673 VAR_INIT_STATUS_INITIALIZED);
17674 }
17675 if (!ret)
17676 expansion_failed (loc, rtl,
17677 "failed to produce loc descriptor for rtl");
17678 }
17679 }
17680 break;
17681
17682 case MEM_REF:
17683 if (!integer_zerop (TREE_OPERAND (loc, 1)))
17684 {
17685 have_address = 1;
17686 goto do_plus;
17687 }
17688 /* Fallthru. */
17689 case INDIRECT_REF:
17690 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17691 have_address = 1;
17692 break;
17693
17694 case TARGET_MEM_REF:
17695 case SSA_NAME:
17696 case DEBUG_EXPR_DECL:
17697 return NULL;
17698
17699 case COMPOUND_EXPR:
17700 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
17701 context);
17702
17703 CASE_CONVERT:
17704 case VIEW_CONVERT_EXPR:
17705 case SAVE_EXPR:
17706 case MODIFY_EXPR:
17707 case NON_LVALUE_EXPR:
17708 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
17709 context);
17710
17711 case COMPONENT_REF:
17712 case BIT_FIELD_REF:
17713 case ARRAY_REF:
17714 case ARRAY_RANGE_REF:
17715 case REALPART_EXPR:
17716 case IMAGPART_EXPR:
17717 {
17718 tree obj, offset;
17719 poly_int64 bitsize, bitpos, bytepos;
17720 machine_mode mode;
17721 int unsignedp, reversep, volatilep = 0;
17722
17723 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
17724 &unsignedp, &reversep, &volatilep);
17725
17726 gcc_assert (obj != loc);
17727
17728 list_ret = loc_list_from_tree_1 (obj,
17729 want_address == 2
17730 && known_eq (bitpos, 0)
17731 && !offset ? 2 : 1,
17732 context);
17733 /* TODO: We can extract value of the small expression via shifting even
17734 for nonzero bitpos. */
17735 if (list_ret == 0)
17736 return 0;
17737 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
17738 || !multiple_p (bitsize, BITS_PER_UNIT))
17739 {
17740 expansion_failed (loc, NULL_RTX,
17741 "bitfield access");
17742 return 0;
17743 }
17744
17745 if (offset != NULL_TREE)
17746 {
17747 /* Variable offset. */
17748 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
17749 if (list_ret1 == 0)
17750 return 0;
17751 add_loc_list (&list_ret, list_ret1);
17752 if (!list_ret)
17753 return 0;
17754 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
17755 }
17756
17757 HOST_WIDE_INT value;
17758 if (bytepos.is_constant (&value) && value > 0)
17759 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
17760 value, 0));
17761 else if (maybe_ne (bytepos, 0))
17762 loc_list_plus_const (list_ret, bytepos);
17763
17764 have_address = 1;
17765 break;
17766 }
17767
17768 case INTEGER_CST:
17769 if ((want_address || !tree_fits_shwi_p (loc))
17770 && (ret = cst_pool_loc_descr (loc)))
17771 have_address = 1;
17772 else if (want_address == 2
17773 && tree_fits_shwi_p (loc)
17774 && (ret = address_of_int_loc_descriptor
17775 (int_size_in_bytes (TREE_TYPE (loc)),
17776 tree_to_shwi (loc))))
17777 have_address = 1;
17778 else if (tree_fits_shwi_p (loc))
17779 ret = int_loc_descriptor (tree_to_shwi (loc));
17780 else if (tree_fits_uhwi_p (loc))
17781 ret = uint_loc_descriptor (tree_to_uhwi (loc));
17782 else
17783 {
17784 expansion_failed (loc, NULL_RTX,
17785 "Integer operand is not host integer");
17786 return 0;
17787 }
17788 break;
17789
17790 case CONSTRUCTOR:
17791 case REAL_CST:
17792 case STRING_CST:
17793 case COMPLEX_CST:
17794 if ((ret = cst_pool_loc_descr (loc)))
17795 have_address = 1;
17796 else if (TREE_CODE (loc) == CONSTRUCTOR)
17797 {
17798 tree type = TREE_TYPE (loc);
17799 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
17800 unsigned HOST_WIDE_INT offset = 0;
17801 unsigned HOST_WIDE_INT cnt;
17802 constructor_elt *ce;
17803
17804 if (TREE_CODE (type) == RECORD_TYPE)
17805 {
17806 /* This is very limited, but it's enough to output
17807 pointers to member functions, as long as the
17808 referenced function is defined in the current
17809 translation unit. */
17810 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
17811 {
17812 tree val = ce->value;
17813
17814 tree field = ce->index;
17815
17816 if (val)
17817 STRIP_NOPS (val);
17818
17819 if (!field || DECL_BIT_FIELD (field))
17820 {
17821 expansion_failed (loc, NULL_RTX,
17822 "bitfield in record type constructor");
17823 size = offset = (unsigned HOST_WIDE_INT)-1;
17824 ret = NULL;
17825 break;
17826 }
17827
17828 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
17829 unsigned HOST_WIDE_INT pos = int_byte_position (field);
17830 gcc_assert (pos + fieldsize <= size);
17831 if (pos < offset)
17832 {
17833 expansion_failed (loc, NULL_RTX,
17834 "out-of-order fields in record constructor");
17835 size = offset = (unsigned HOST_WIDE_INT)-1;
17836 ret = NULL;
17837 break;
17838 }
17839 if (pos > offset)
17840 {
17841 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
17842 add_loc_descr (&ret, ret1);
17843 offset = pos;
17844 }
17845 if (val && fieldsize != 0)
17846 {
17847 ret1 = loc_descriptor_from_tree (val, want_address, context);
17848 if (!ret1)
17849 {
17850 expansion_failed (loc, NULL_RTX,
17851 "unsupported expression in field");
17852 size = offset = (unsigned HOST_WIDE_INT)-1;
17853 ret = NULL;
17854 break;
17855 }
17856 add_loc_descr (&ret, ret1);
17857 }
17858 if (fieldsize)
17859 {
17860 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
17861 add_loc_descr (&ret, ret1);
17862 offset = pos + fieldsize;
17863 }
17864 }
17865
17866 if (offset != size)
17867 {
17868 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
17869 add_loc_descr (&ret, ret1);
17870 offset = size;
17871 }
17872
17873 have_address = !!want_address;
17874 }
17875 else
17876 expansion_failed (loc, NULL_RTX,
17877 "constructor of non-record type");
17878 }
17879 else
17880 /* We can construct small constants here using int_loc_descriptor. */
17881 expansion_failed (loc, NULL_RTX,
17882 "constructor or constant not in constant pool");
17883 break;
17884
17885 case TRUTH_AND_EXPR:
17886 case TRUTH_ANDIF_EXPR:
17887 case BIT_AND_EXPR:
17888 op = DW_OP_and;
17889 goto do_binop;
17890
17891 case TRUTH_XOR_EXPR:
17892 case BIT_XOR_EXPR:
17893 op = DW_OP_xor;
17894 goto do_binop;
17895
17896 case TRUTH_OR_EXPR:
17897 case TRUTH_ORIF_EXPR:
17898 case BIT_IOR_EXPR:
17899 op = DW_OP_or;
17900 goto do_binop;
17901
17902 case FLOOR_DIV_EXPR:
17903 case CEIL_DIV_EXPR:
17904 case ROUND_DIV_EXPR:
17905 case TRUNC_DIV_EXPR:
17906 case EXACT_DIV_EXPR:
17907 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
17908 return 0;
17909 op = DW_OP_div;
17910 goto do_binop;
17911
17912 case MINUS_EXPR:
17913 op = DW_OP_minus;
17914 goto do_binop;
17915
17916 case FLOOR_MOD_EXPR:
17917 case CEIL_MOD_EXPR:
17918 case ROUND_MOD_EXPR:
17919 case TRUNC_MOD_EXPR:
17920 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
17921 {
17922 op = DW_OP_mod;
17923 goto do_binop;
17924 }
17925 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17926 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
17927 if (list_ret == 0 || list_ret1 == 0)
17928 return 0;
17929
17930 add_loc_list (&list_ret, list_ret1);
17931 if (list_ret == 0)
17932 return 0;
17933 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
17934 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
17935 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
17936 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
17937 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
17938 break;
17939
17940 case MULT_EXPR:
17941 op = DW_OP_mul;
17942 goto do_binop;
17943
17944 case LSHIFT_EXPR:
17945 op = DW_OP_shl;
17946 goto do_binop;
17947
17948 case RSHIFT_EXPR:
17949 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
17950 goto do_binop;
17951
17952 case POINTER_PLUS_EXPR:
17953 case PLUS_EXPR:
17954 do_plus:
17955 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
17956 {
17957 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
17958 smarter to encode their opposite. The DW_OP_plus_uconst operation
17959 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
17960 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
17961 bytes, Y being the size of the operation that pushes the opposite
17962 of the addend. So let's choose the smallest representation. */
17963 const tree tree_addend = TREE_OPERAND (loc, 1);
17964 offset_int wi_addend;
17965 HOST_WIDE_INT shwi_addend;
17966 dw_loc_descr_ref loc_naddend;
17967
17968 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17969 if (list_ret == 0)
17970 return 0;
17971
17972 /* Try to get the literal to push. It is the opposite of the addend,
17973 so as we rely on wrapping during DWARF evaluation, first decode
17974 the literal as a "DWARF-sized" signed number. */
17975 wi_addend = wi::to_offset (tree_addend);
17976 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
17977 shwi_addend = wi_addend.to_shwi ();
17978 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
17979 ? int_loc_descriptor (-shwi_addend)
17980 : NULL;
17981
17982 if (loc_naddend != NULL
17983 && ((unsigned) size_of_uleb128 (shwi_addend)
17984 > size_of_loc_descr (loc_naddend)))
17985 {
17986 add_loc_descr_to_each (list_ret, loc_naddend);
17987 add_loc_descr_to_each (list_ret,
17988 new_loc_descr (DW_OP_minus, 0, 0));
17989 }
17990 else
17991 {
17992 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
17993 {
17994 loc_naddend = loc_cur;
17995 loc_cur = loc_cur->dw_loc_next;
17996 ggc_free (loc_naddend);
17997 }
17998 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
17999 }
18000 break;
18001 }
18002
18003 op = DW_OP_plus;
18004 goto do_binop;
18005
18006 case LE_EXPR:
18007 op = DW_OP_le;
18008 goto do_comp_binop;
18009
18010 case GE_EXPR:
18011 op = DW_OP_ge;
18012 goto do_comp_binop;
18013
18014 case LT_EXPR:
18015 op = DW_OP_lt;
18016 goto do_comp_binop;
18017
18018 case GT_EXPR:
18019 op = DW_OP_gt;
18020 goto do_comp_binop;
18021
18022 do_comp_binop:
18023 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18024 {
18025 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18026 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18027 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18028 TREE_CODE (loc));
18029 break;
18030 }
18031 else
18032 goto do_binop;
18033
18034 case EQ_EXPR:
18035 op = DW_OP_eq;
18036 goto do_binop;
18037
18038 case NE_EXPR:
18039 op = DW_OP_ne;
18040 goto do_binop;
18041
18042 do_binop:
18043 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18044 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18045 if (list_ret == 0 || list_ret1 == 0)
18046 return 0;
18047
18048 add_loc_list (&list_ret, list_ret1);
18049 if (list_ret == 0)
18050 return 0;
18051 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18052 break;
18053
18054 case TRUTH_NOT_EXPR:
18055 case BIT_NOT_EXPR:
18056 op = DW_OP_not;
18057 goto do_unop;
18058
18059 case ABS_EXPR:
18060 op = DW_OP_abs;
18061 goto do_unop;
18062
18063 case NEGATE_EXPR:
18064 op = DW_OP_neg;
18065 goto do_unop;
18066
18067 do_unop:
18068 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18069 if (list_ret == 0)
18070 return 0;
18071
18072 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18073 break;
18074
18075 case MIN_EXPR:
18076 case MAX_EXPR:
18077 {
18078 const enum tree_code code =
18079 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18080
18081 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18082 build2 (code, integer_type_node,
18083 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18084 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18085 }
18086
18087 /* fall through */
18088
18089 case COND_EXPR:
18090 {
18091 dw_loc_descr_ref lhs
18092 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18093 dw_loc_list_ref rhs
18094 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18095 dw_loc_descr_ref bra_node, jump_node, tmp;
18096
18097 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18098 if (list_ret == 0 || lhs == 0 || rhs == 0)
18099 return 0;
18100
18101 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18102 add_loc_descr_to_each (list_ret, bra_node);
18103
18104 add_loc_list (&list_ret, rhs);
18105 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18106 add_loc_descr_to_each (list_ret, jump_node);
18107
18108 add_loc_descr_to_each (list_ret, lhs);
18109 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18110 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18111
18112 /* ??? Need a node to point the skip at. Use a nop. */
18113 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18114 add_loc_descr_to_each (list_ret, tmp);
18115 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18116 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18117 }
18118 break;
18119
18120 case FIX_TRUNC_EXPR:
18121 return 0;
18122
18123 default:
18124 /* Leave front-end specific codes as simply unknown. This comes
18125 up, for instance, with the C STMT_EXPR. */
18126 if ((unsigned int) TREE_CODE (loc)
18127 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18128 {
18129 expansion_failed (loc, NULL_RTX,
18130 "language specific tree node");
18131 return 0;
18132 }
18133
18134 /* Otherwise this is a generic code; we should just lists all of
18135 these explicitly. We forgot one. */
18136 if (flag_checking)
18137 gcc_unreachable ();
18138
18139 /* In a release build, we want to degrade gracefully: better to
18140 generate incomplete debugging information than to crash. */
18141 return NULL;
18142 }
18143
18144 if (!ret && !list_ret)
18145 return 0;
18146
18147 if (want_address == 2 && !have_address
18148 && (dwarf_version >= 4 || !dwarf_strict))
18149 {
18150 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18151 {
18152 expansion_failed (loc, NULL_RTX,
18153 "DWARF address size mismatch");
18154 return 0;
18155 }
18156 if (ret)
18157 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18158 else
18159 add_loc_descr_to_each (list_ret,
18160 new_loc_descr (DW_OP_stack_value, 0, 0));
18161 have_address = 1;
18162 }
18163 /* Show if we can't fill the request for an address. */
18164 if (want_address && !have_address)
18165 {
18166 expansion_failed (loc, NULL_RTX,
18167 "Want address and only have value");
18168 return 0;
18169 }
18170
18171 gcc_assert (!ret || !list_ret);
18172
18173 /* If we've got an address and don't want one, dereference. */
18174 if (!want_address && have_address)
18175 {
18176 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18177
18178 if (size > DWARF2_ADDR_SIZE || size == -1)
18179 {
18180 expansion_failed (loc, NULL_RTX,
18181 "DWARF address size mismatch");
18182 return 0;
18183 }
18184 else if (size == DWARF2_ADDR_SIZE)
18185 op = DW_OP_deref;
18186 else
18187 op = DW_OP_deref_size;
18188
18189 if (ret)
18190 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18191 else
18192 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18193 }
18194 if (ret)
18195 list_ret = new_loc_list (ret, NULL, NULL, NULL);
18196
18197 return list_ret;
18198 }
18199
18200 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18201 expressions. */
18202
18203 static dw_loc_list_ref
18204 loc_list_from_tree (tree loc, int want_address,
18205 struct loc_descr_context *context)
18206 {
18207 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18208
18209 for (dw_loc_list_ref loc_cur = result;
18210 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18211 loc_descr_without_nops (loc_cur->expr);
18212 return result;
18213 }
18214
18215 /* Same as above but return only single location expression. */
18216 static dw_loc_descr_ref
18217 loc_descriptor_from_tree (tree loc, int want_address,
18218 struct loc_descr_context *context)
18219 {
18220 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18221 if (!ret)
18222 return NULL;
18223 if (ret->dw_loc_next)
18224 {
18225 expansion_failed (loc, NULL_RTX,
18226 "Location list where only loc descriptor needed");
18227 return NULL;
18228 }
18229 return ret->expr;
18230 }
18231
18232 /* Given a value, round it up to the lowest multiple of `boundary'
18233 which is not less than the value itself. */
18234
18235 static inline HOST_WIDE_INT
18236 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18237 {
18238 return (((value + boundary - 1) / boundary) * boundary);
18239 }
18240
18241 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18242 pointer to the declared type for the relevant field variable, or return
18243 `integer_type_node' if the given node turns out to be an
18244 ERROR_MARK node. */
18245
18246 static inline tree
18247 field_type (const_tree decl)
18248 {
18249 tree type;
18250
18251 if (TREE_CODE (decl) == ERROR_MARK)
18252 return integer_type_node;
18253
18254 type = DECL_BIT_FIELD_TYPE (decl);
18255 if (type == NULL_TREE)
18256 type = TREE_TYPE (decl);
18257
18258 return type;
18259 }
18260
18261 /* Given a pointer to a tree node, return the alignment in bits for
18262 it, or else return BITS_PER_WORD if the node actually turns out to
18263 be an ERROR_MARK node. */
18264
18265 static inline unsigned
18266 simple_type_align_in_bits (const_tree type)
18267 {
18268 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18269 }
18270
18271 static inline unsigned
18272 simple_decl_align_in_bits (const_tree decl)
18273 {
18274 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18275 }
18276
18277 /* Return the result of rounding T up to ALIGN. */
18278
18279 static inline offset_int
18280 round_up_to_align (const offset_int &t, unsigned int align)
18281 {
18282 return wi::udiv_trunc (t + align - 1, align) * align;
18283 }
18284
18285 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18286 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18287 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18288 if we fail to return the size in one of these two forms. */
18289
18290 static dw_loc_descr_ref
18291 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18292 {
18293 tree tree_size;
18294 struct loc_descr_context ctx;
18295
18296 /* Return a constant integer in priority, if possible. */
18297 *cst_size = int_size_in_bytes (type);
18298 if (*cst_size != -1)
18299 return NULL;
18300
18301 ctx.context_type = const_cast<tree> (type);
18302 ctx.base_decl = NULL_TREE;
18303 ctx.dpi = NULL;
18304 ctx.placeholder_arg = false;
18305 ctx.placeholder_seen = false;
18306
18307 type = TYPE_MAIN_VARIANT (type);
18308 tree_size = TYPE_SIZE_UNIT (type);
18309 return ((tree_size != NULL_TREE)
18310 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18311 : NULL);
18312 }
18313
18314 /* Helper structure for RECORD_TYPE processing. */
18315 struct vlr_context
18316 {
18317 /* Root RECORD_TYPE. It is needed to generate data member location
18318 descriptions in variable-length records (VLR), but also to cope with
18319 variants, which are composed of nested structures multiplexed with
18320 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18321 function processing a FIELD_DECL, it is required to be non null. */
18322 tree struct_type;
18323 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18324 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18325 this variant part as part of the root record (in storage units). For
18326 regular records, it must be NULL_TREE. */
18327 tree variant_part_offset;
18328 };
18329
18330 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18331 addressed byte of the "containing object" for the given FIELD_DECL. If
18332 possible, return a native constant through CST_OFFSET (in which case NULL is
18333 returned); otherwise return a DWARF expression that computes the offset.
18334
18335 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
18336 that offset is, either because the argument turns out to be a pointer to an
18337 ERROR_MARK node, or because the offset expression is too complex for us.
18338
18339 CTX is required: see the comment for VLR_CONTEXT. */
18340
18341 static dw_loc_descr_ref
18342 field_byte_offset (const_tree decl, struct vlr_context *ctx,
18343 HOST_WIDE_INT *cst_offset)
18344 {
18345 tree tree_result;
18346 dw_loc_list_ref loc_result;
18347
18348 *cst_offset = 0;
18349
18350 if (TREE_CODE (decl) == ERROR_MARK)
18351 return NULL;
18352 else
18353 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
18354
18355 /* We cannot handle variable bit offsets at the moment, so abort if it's the
18356 case. */
18357 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
18358 return NULL;
18359
18360 #ifdef PCC_BITFIELD_TYPE_MATTERS
18361 /* We used to handle only constant offsets in all cases. Now, we handle
18362 properly dynamic byte offsets only when PCC bitfield type doesn't
18363 matter. */
18364 if (PCC_BITFIELD_TYPE_MATTERS
18365 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
18366 {
18367 offset_int object_offset_in_bits;
18368 offset_int object_offset_in_bytes;
18369 offset_int bitpos_int;
18370 tree type;
18371 tree field_size_tree;
18372 offset_int deepest_bitpos;
18373 offset_int field_size_in_bits;
18374 unsigned int type_align_in_bits;
18375 unsigned int decl_align_in_bits;
18376 offset_int type_size_in_bits;
18377
18378 bitpos_int = wi::to_offset (bit_position (decl));
18379 type = field_type (decl);
18380 type_size_in_bits = offset_int_type_size_in_bits (type);
18381 type_align_in_bits = simple_type_align_in_bits (type);
18382
18383 field_size_tree = DECL_SIZE (decl);
18384
18385 /* The size could be unspecified if there was an error, or for
18386 a flexible array member. */
18387 if (!field_size_tree)
18388 field_size_tree = bitsize_zero_node;
18389
18390 /* If the size of the field is not constant, use the type size. */
18391 if (TREE_CODE (field_size_tree) == INTEGER_CST)
18392 field_size_in_bits = wi::to_offset (field_size_tree);
18393 else
18394 field_size_in_bits = type_size_in_bits;
18395
18396 decl_align_in_bits = simple_decl_align_in_bits (decl);
18397
18398 /* The GCC front-end doesn't make any attempt to keep track of the
18399 starting bit offset (relative to the start of the containing
18400 structure type) of the hypothetical "containing object" for a
18401 bit-field. Thus, when computing the byte offset value for the
18402 start of the "containing object" of a bit-field, we must deduce
18403 this information on our own. This can be rather tricky to do in
18404 some cases. For example, handling the following structure type
18405 definition when compiling for an i386/i486 target (which only
18406 aligns long long's to 32-bit boundaries) can be very tricky:
18407
18408 struct S { int field1; long long field2:31; };
18409
18410 Fortunately, there is a simple rule-of-thumb which can be used
18411 in such cases. When compiling for an i386/i486, GCC will
18412 allocate 8 bytes for the structure shown above. It decides to
18413 do this based upon one simple rule for bit-field allocation.
18414 GCC allocates each "containing object" for each bit-field at
18415 the first (i.e. lowest addressed) legitimate alignment boundary
18416 (based upon the required minimum alignment for the declared
18417 type of the field) which it can possibly use, subject to the
18418 condition that there is still enough available space remaining
18419 in the containing object (when allocated at the selected point)
18420 to fully accommodate all of the bits of the bit-field itself.
18421
18422 This simple rule makes it obvious why GCC allocates 8 bytes for
18423 each object of the structure type shown above. When looking
18424 for a place to allocate the "containing object" for `field2',
18425 the compiler simply tries to allocate a 64-bit "containing
18426 object" at each successive 32-bit boundary (starting at zero)
18427 until it finds a place to allocate that 64- bit field such that
18428 at least 31 contiguous (and previously unallocated) bits remain
18429 within that selected 64 bit field. (As it turns out, for the
18430 example above, the compiler finds it is OK to allocate the
18431 "containing object" 64-bit field at bit-offset zero within the
18432 structure type.)
18433
18434 Here we attempt to work backwards from the limited set of facts
18435 we're given, and we try to deduce from those facts, where GCC
18436 must have believed that the containing object started (within
18437 the structure type). The value we deduce is then used (by the
18438 callers of this routine) to generate DW_AT_location and
18439 DW_AT_bit_offset attributes for fields (both bit-fields and, in
18440 the case of DW_AT_location, regular fields as well). */
18441
18442 /* Figure out the bit-distance from the start of the structure to
18443 the "deepest" bit of the bit-field. */
18444 deepest_bitpos = bitpos_int + field_size_in_bits;
18445
18446 /* This is the tricky part. Use some fancy footwork to deduce
18447 where the lowest addressed bit of the containing object must
18448 be. */
18449 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
18450
18451 /* Round up to type_align by default. This works best for
18452 bitfields. */
18453 object_offset_in_bits
18454 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
18455
18456 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
18457 {
18458 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
18459
18460 /* Round up to decl_align instead. */
18461 object_offset_in_bits
18462 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
18463 }
18464
18465 object_offset_in_bytes
18466 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
18467 if (ctx->variant_part_offset == NULL_TREE)
18468 {
18469 *cst_offset = object_offset_in_bytes.to_shwi ();
18470 return NULL;
18471 }
18472 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
18473 }
18474 else
18475 #endif /* PCC_BITFIELD_TYPE_MATTERS */
18476 tree_result = byte_position (decl);
18477
18478 if (ctx->variant_part_offset != NULL_TREE)
18479 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
18480 ctx->variant_part_offset, tree_result);
18481
18482 /* If the byte offset is a constant, it's simplier to handle a native
18483 constant rather than a DWARF expression. */
18484 if (TREE_CODE (tree_result) == INTEGER_CST)
18485 {
18486 *cst_offset = wi::to_offset (tree_result).to_shwi ();
18487 return NULL;
18488 }
18489 struct loc_descr_context loc_ctx = {
18490 ctx->struct_type, /* context_type */
18491 NULL_TREE, /* base_decl */
18492 NULL, /* dpi */
18493 false, /* placeholder_arg */
18494 false /* placeholder_seen */
18495 };
18496 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
18497
18498 /* We want a DWARF expression: abort if we only have a location list with
18499 multiple elements. */
18500 if (!loc_result || !single_element_loc_list_p (loc_result))
18501 return NULL;
18502 else
18503 return loc_result->expr;
18504 }
18505 \f
18506 /* The following routines define various Dwarf attributes and any data
18507 associated with them. */
18508
18509 /* Add a location description attribute value to a DIE.
18510
18511 This emits location attributes suitable for whole variables and
18512 whole parameters. Note that the location attributes for struct fields are
18513 generated by the routine `data_member_location_attribute' below. */
18514
18515 static inline void
18516 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
18517 dw_loc_list_ref descr)
18518 {
18519 if (descr == 0)
18520 return;
18521 if (single_element_loc_list_p (descr))
18522 add_AT_loc (die, attr_kind, descr->expr);
18523 else
18524 add_AT_loc_list (die, attr_kind, descr);
18525 }
18526
18527 /* Add DW_AT_accessibility attribute to DIE if needed. */
18528
18529 static void
18530 add_accessibility_attribute (dw_die_ref die, tree decl)
18531 {
18532 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
18533 children, otherwise the default is DW_ACCESS_public. In DWARF2
18534 the default has always been DW_ACCESS_public. */
18535 if (TREE_PROTECTED (decl))
18536 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
18537 else if (TREE_PRIVATE (decl))
18538 {
18539 if (dwarf_version == 2
18540 || die->die_parent == NULL
18541 || die->die_parent->die_tag != DW_TAG_class_type)
18542 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
18543 }
18544 else if (dwarf_version > 2
18545 && die->die_parent
18546 && die->die_parent->die_tag == DW_TAG_class_type)
18547 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
18548 }
18549
18550 /* Attach the specialized form of location attribute used for data members of
18551 struct and union types. In the special case of a FIELD_DECL node which
18552 represents a bit-field, the "offset" part of this special location
18553 descriptor must indicate the distance in bytes from the lowest-addressed
18554 byte of the containing struct or union type to the lowest-addressed byte of
18555 the "containing object" for the bit-field. (See the `field_byte_offset'
18556 function above).
18557
18558 For any given bit-field, the "containing object" is a hypothetical object
18559 (of some integral or enum type) within which the given bit-field lives. The
18560 type of this hypothetical "containing object" is always the same as the
18561 declared type of the individual bit-field itself (for GCC anyway... the
18562 DWARF spec doesn't actually mandate this). Note that it is the size (in
18563 bytes) of the hypothetical "containing object" which will be given in the
18564 DW_AT_byte_size attribute for this bit-field. (See the
18565 `byte_size_attribute' function below.) It is also used when calculating the
18566 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
18567 function below.)
18568
18569 CTX is required: see the comment for VLR_CONTEXT. */
18570
18571 static void
18572 add_data_member_location_attribute (dw_die_ref die,
18573 tree decl,
18574 struct vlr_context *ctx)
18575 {
18576 HOST_WIDE_INT offset;
18577 dw_loc_descr_ref loc_descr = 0;
18578
18579 if (TREE_CODE (decl) == TREE_BINFO)
18580 {
18581 /* We're working on the TAG_inheritance for a base class. */
18582 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
18583 {
18584 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
18585 aren't at a fixed offset from all (sub)objects of the same
18586 type. We need to extract the appropriate offset from our
18587 vtable. The following dwarf expression means
18588
18589 BaseAddr = ObAddr + *((*ObAddr) - Offset)
18590
18591 This is specific to the V3 ABI, of course. */
18592
18593 dw_loc_descr_ref tmp;
18594
18595 /* Make a copy of the object address. */
18596 tmp = new_loc_descr (DW_OP_dup, 0, 0);
18597 add_loc_descr (&loc_descr, tmp);
18598
18599 /* Extract the vtable address. */
18600 tmp = new_loc_descr (DW_OP_deref, 0, 0);
18601 add_loc_descr (&loc_descr, tmp);
18602
18603 /* Calculate the address of the offset. */
18604 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
18605 gcc_assert (offset < 0);
18606
18607 tmp = int_loc_descriptor (-offset);
18608 add_loc_descr (&loc_descr, tmp);
18609 tmp = new_loc_descr (DW_OP_minus, 0, 0);
18610 add_loc_descr (&loc_descr, tmp);
18611
18612 /* Extract the offset. */
18613 tmp = new_loc_descr (DW_OP_deref, 0, 0);
18614 add_loc_descr (&loc_descr, tmp);
18615
18616 /* Add it to the object address. */
18617 tmp = new_loc_descr (DW_OP_plus, 0, 0);
18618 add_loc_descr (&loc_descr, tmp);
18619 }
18620 else
18621 offset = tree_to_shwi (BINFO_OFFSET (decl));
18622 }
18623 else
18624 {
18625 loc_descr = field_byte_offset (decl, ctx, &offset);
18626
18627 /* If loc_descr is available then we know the field offset is dynamic.
18628 However, GDB does not handle dynamic field offsets very well at the
18629 moment. */
18630 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
18631 {
18632 loc_descr = NULL;
18633 offset = 0;
18634 }
18635
18636 /* Data member location evalutation starts with the base address on the
18637 stack. Compute the field offset and add it to this base address. */
18638 else if (loc_descr != NULL)
18639 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
18640 }
18641
18642 if (! loc_descr)
18643 {
18644 /* While DW_AT_data_bit_offset has been added already in DWARF4,
18645 e.g. GDB only added support to it in November 2016. For DWARF5
18646 we need newer debug info consumers anyway. We might change this
18647 to dwarf_version >= 4 once most consumers catched up. */
18648 if (dwarf_version >= 5
18649 && TREE_CODE (decl) == FIELD_DECL
18650 && DECL_BIT_FIELD_TYPE (decl))
18651 {
18652 tree off = bit_position (decl);
18653 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
18654 {
18655 remove_AT (die, DW_AT_byte_size);
18656 remove_AT (die, DW_AT_bit_offset);
18657 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
18658 return;
18659 }
18660 }
18661 if (dwarf_version > 2)
18662 {
18663 /* Don't need to output a location expression, just the constant. */
18664 if (offset < 0)
18665 add_AT_int (die, DW_AT_data_member_location, offset);
18666 else
18667 add_AT_unsigned (die, DW_AT_data_member_location, offset);
18668 return;
18669 }
18670 else
18671 {
18672 enum dwarf_location_atom op;
18673
18674 /* The DWARF2 standard says that we should assume that the structure
18675 address is already on the stack, so we can specify a structure
18676 field address by using DW_OP_plus_uconst. */
18677 op = DW_OP_plus_uconst;
18678 loc_descr = new_loc_descr (op, offset, 0);
18679 }
18680 }
18681
18682 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
18683 }
18684
18685 /* Writes integer values to dw_vec_const array. */
18686
18687 static void
18688 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
18689 {
18690 while (size != 0)
18691 {
18692 *dest++ = val & 0xff;
18693 val >>= 8;
18694 --size;
18695 }
18696 }
18697
18698 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
18699
18700 static HOST_WIDE_INT
18701 extract_int (const unsigned char *src, unsigned int size)
18702 {
18703 HOST_WIDE_INT val = 0;
18704
18705 src += size;
18706 while (size != 0)
18707 {
18708 val <<= 8;
18709 val |= *--src & 0xff;
18710 --size;
18711 }
18712 return val;
18713 }
18714
18715 /* Writes wide_int values to dw_vec_const array. */
18716
18717 static void
18718 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
18719 {
18720 int i;
18721
18722 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
18723 {
18724 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
18725 return;
18726 }
18727
18728 /* We'd have to extend this code to support odd sizes. */
18729 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
18730
18731 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
18732
18733 if (WORDS_BIG_ENDIAN)
18734 for (i = n - 1; i >= 0; i--)
18735 {
18736 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
18737 dest += sizeof (HOST_WIDE_INT);
18738 }
18739 else
18740 for (i = 0; i < n; i++)
18741 {
18742 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
18743 dest += sizeof (HOST_WIDE_INT);
18744 }
18745 }
18746
18747 /* Writes floating point values to dw_vec_const array. */
18748
18749 static void
18750 insert_float (const_rtx rtl, unsigned char *array)
18751 {
18752 long val[4];
18753 int i;
18754 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
18755
18756 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
18757
18758 /* real_to_target puts 32-bit pieces in each long. Pack them. */
18759 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
18760 {
18761 insert_int (val[i], 4, array);
18762 array += 4;
18763 }
18764 }
18765
18766 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
18767 does not have a "location" either in memory or in a register. These
18768 things can arise in GNU C when a constant is passed as an actual parameter
18769 to an inlined function. They can also arise in C++ where declared
18770 constants do not necessarily get memory "homes". */
18771
18772 static bool
18773 add_const_value_attribute (dw_die_ref die, rtx rtl)
18774 {
18775 switch (GET_CODE (rtl))
18776 {
18777 case CONST_INT:
18778 {
18779 HOST_WIDE_INT val = INTVAL (rtl);
18780
18781 if (val < 0)
18782 add_AT_int (die, DW_AT_const_value, val);
18783 else
18784 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
18785 }
18786 return true;
18787
18788 case CONST_WIDE_INT:
18789 {
18790 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
18791 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
18792 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
18793 wide_int w = wi::zext (w1, prec);
18794 add_AT_wide (die, DW_AT_const_value, w);
18795 }
18796 return true;
18797
18798 case CONST_DOUBLE:
18799 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
18800 floating-point constant. A CONST_DOUBLE is used whenever the
18801 constant requires more than one word in order to be adequately
18802 represented. */
18803 if (TARGET_SUPPORTS_WIDE_INT == 0
18804 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
18805 add_AT_double (die, DW_AT_const_value,
18806 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
18807 else
18808 {
18809 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
18810 unsigned int length = GET_MODE_SIZE (mode);
18811 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
18812
18813 insert_float (rtl, array);
18814 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
18815 }
18816 return true;
18817
18818 case CONST_VECTOR:
18819 {
18820 machine_mode mode = GET_MODE (rtl);
18821 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
18822 unsigned int length = CONST_VECTOR_NUNITS (rtl);
18823 unsigned char *array
18824 = ggc_vec_alloc<unsigned char> (length * elt_size);
18825 unsigned int i;
18826 unsigned char *p;
18827 machine_mode imode = GET_MODE_INNER (mode);
18828
18829 switch (GET_MODE_CLASS (mode))
18830 {
18831 case MODE_VECTOR_INT:
18832 for (i = 0, p = array; i < length; i++, p += elt_size)
18833 {
18834 rtx elt = CONST_VECTOR_ELT (rtl, i);
18835 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
18836 }
18837 break;
18838
18839 case MODE_VECTOR_FLOAT:
18840 for (i = 0, p = array; i < length; i++, p += elt_size)
18841 {
18842 rtx elt = CONST_VECTOR_ELT (rtl, i);
18843 insert_float (elt, p);
18844 }
18845 break;
18846
18847 default:
18848 gcc_unreachable ();
18849 }
18850
18851 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
18852 }
18853 return true;
18854
18855 case CONST_STRING:
18856 if (dwarf_version >= 4 || !dwarf_strict)
18857 {
18858 dw_loc_descr_ref loc_result;
18859 resolve_one_addr (&rtl);
18860 rtl_addr:
18861 loc_result = new_addr_loc_descr (rtl, dtprel_false);
18862 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
18863 add_AT_loc (die, DW_AT_location, loc_result);
18864 vec_safe_push (used_rtx_array, rtl);
18865 return true;
18866 }
18867 return false;
18868
18869 case CONST:
18870 if (CONSTANT_P (XEXP (rtl, 0)))
18871 return add_const_value_attribute (die, XEXP (rtl, 0));
18872 /* FALLTHROUGH */
18873 case SYMBOL_REF:
18874 if (!const_ok_for_output (rtl))
18875 return false;
18876 /* FALLTHROUGH */
18877 case LABEL_REF:
18878 if (dwarf_version >= 4 || !dwarf_strict)
18879 goto rtl_addr;
18880 return false;
18881
18882 case PLUS:
18883 /* In cases where an inlined instance of an inline function is passed
18884 the address of an `auto' variable (which is local to the caller) we
18885 can get a situation where the DECL_RTL of the artificial local
18886 variable (for the inlining) which acts as a stand-in for the
18887 corresponding formal parameter (of the inline function) will look
18888 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
18889 exactly a compile-time constant expression, but it isn't the address
18890 of the (artificial) local variable either. Rather, it represents the
18891 *value* which the artificial local variable always has during its
18892 lifetime. We currently have no way to represent such quasi-constant
18893 values in Dwarf, so for now we just punt and generate nothing. */
18894 return false;
18895
18896 case HIGH:
18897 case CONST_FIXED:
18898 return false;
18899
18900 case MEM:
18901 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
18902 && MEM_READONLY_P (rtl)
18903 && GET_MODE (rtl) == BLKmode)
18904 {
18905 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
18906 return true;
18907 }
18908 return false;
18909
18910 default:
18911 /* No other kinds of rtx should be possible here. */
18912 gcc_unreachable ();
18913 }
18914 return false;
18915 }
18916
18917 /* Determine whether the evaluation of EXPR references any variables
18918 or functions which aren't otherwise used (and therefore may not be
18919 output). */
18920 static tree
18921 reference_to_unused (tree * tp, int * walk_subtrees,
18922 void * data ATTRIBUTE_UNUSED)
18923 {
18924 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
18925 *walk_subtrees = 0;
18926
18927 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
18928 && ! TREE_ASM_WRITTEN (*tp))
18929 return *tp;
18930 /* ??? The C++ FE emits debug information for using decls, so
18931 putting gcc_unreachable here falls over. See PR31899. For now
18932 be conservative. */
18933 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
18934 return *tp;
18935 else if (VAR_P (*tp))
18936 {
18937 varpool_node *node = varpool_node::get (*tp);
18938 if (!node || !node->definition)
18939 return *tp;
18940 }
18941 else if (TREE_CODE (*tp) == FUNCTION_DECL
18942 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
18943 {
18944 /* The call graph machinery must have finished analyzing,
18945 optimizing and gimplifying the CU by now.
18946 So if *TP has no call graph node associated
18947 to it, it means *TP will not be emitted. */
18948 if (!cgraph_node::get (*tp))
18949 return *tp;
18950 }
18951 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
18952 return *tp;
18953
18954 return NULL_TREE;
18955 }
18956
18957 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
18958 for use in a later add_const_value_attribute call. */
18959
18960 static rtx
18961 rtl_for_decl_init (tree init, tree type)
18962 {
18963 rtx rtl = NULL_RTX;
18964
18965 STRIP_NOPS (init);
18966
18967 /* If a variable is initialized with a string constant without embedded
18968 zeros, build CONST_STRING. */
18969 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
18970 {
18971 tree enttype = TREE_TYPE (type);
18972 tree domain = TYPE_DOMAIN (type);
18973 scalar_int_mode mode;
18974
18975 if (is_int_mode (TYPE_MODE (enttype), &mode)
18976 && GET_MODE_SIZE (mode) == 1
18977 && domain
18978 && integer_zerop (TYPE_MIN_VALUE (domain))
18979 && compare_tree_int (TYPE_MAX_VALUE (domain),
18980 TREE_STRING_LENGTH (init) - 1) == 0
18981 && ((size_t) TREE_STRING_LENGTH (init)
18982 == strlen (TREE_STRING_POINTER (init)) + 1))
18983 {
18984 rtl = gen_rtx_CONST_STRING (VOIDmode,
18985 ggc_strdup (TREE_STRING_POINTER (init)));
18986 rtl = gen_rtx_MEM (BLKmode, rtl);
18987 MEM_READONLY_P (rtl) = 1;
18988 }
18989 }
18990 /* Other aggregates, and complex values, could be represented using
18991 CONCAT: FIXME! */
18992 else if (AGGREGATE_TYPE_P (type)
18993 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
18994 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
18995 || TREE_CODE (type) == COMPLEX_TYPE)
18996 ;
18997 /* Vectors only work if their mode is supported by the target.
18998 FIXME: generic vectors ought to work too. */
18999 else if (TREE_CODE (type) == VECTOR_TYPE
19000 && !VECTOR_MODE_P (TYPE_MODE (type)))
19001 ;
19002 /* If the initializer is something that we know will expand into an
19003 immediate RTL constant, expand it now. We must be careful not to
19004 reference variables which won't be output. */
19005 else if (initializer_constant_valid_p (init, type)
19006 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19007 {
19008 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19009 possible. */
19010 if (TREE_CODE (type) == VECTOR_TYPE)
19011 switch (TREE_CODE (init))
19012 {
19013 case VECTOR_CST:
19014 break;
19015 case CONSTRUCTOR:
19016 if (TREE_CONSTANT (init))
19017 {
19018 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19019 bool constant_p = true;
19020 tree value;
19021 unsigned HOST_WIDE_INT ix;
19022
19023 /* Even when ctor is constant, it might contain non-*_CST
19024 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19025 belong into VECTOR_CST nodes. */
19026 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19027 if (!CONSTANT_CLASS_P (value))
19028 {
19029 constant_p = false;
19030 break;
19031 }
19032
19033 if (constant_p)
19034 {
19035 init = build_vector_from_ctor (type, elts);
19036 break;
19037 }
19038 }
19039 /* FALLTHRU */
19040
19041 default:
19042 return NULL;
19043 }
19044
19045 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19046
19047 /* If expand_expr returns a MEM, it wasn't immediate. */
19048 gcc_assert (!rtl || !MEM_P (rtl));
19049 }
19050
19051 return rtl;
19052 }
19053
19054 /* Generate RTL for the variable DECL to represent its location. */
19055
19056 static rtx
19057 rtl_for_decl_location (tree decl)
19058 {
19059 rtx rtl;
19060
19061 /* Here we have to decide where we are going to say the parameter "lives"
19062 (as far as the debugger is concerned). We only have a couple of
19063 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19064
19065 DECL_RTL normally indicates where the parameter lives during most of the
19066 activation of the function. If optimization is enabled however, this
19067 could be either NULL or else a pseudo-reg. Both of those cases indicate
19068 that the parameter doesn't really live anywhere (as far as the code
19069 generation parts of GCC are concerned) during most of the function's
19070 activation. That will happen (for example) if the parameter is never
19071 referenced within the function.
19072
19073 We could just generate a location descriptor here for all non-NULL
19074 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19075 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19076 where DECL_RTL is NULL or is a pseudo-reg.
19077
19078 Note however that we can only get away with using DECL_INCOMING_RTL as
19079 a backup substitute for DECL_RTL in certain limited cases. In cases
19080 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19081 we can be sure that the parameter was passed using the same type as it is
19082 declared to have within the function, and that its DECL_INCOMING_RTL
19083 points us to a place where a value of that type is passed.
19084
19085 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19086 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19087 because in these cases DECL_INCOMING_RTL points us to a value of some
19088 type which is *different* from the type of the parameter itself. Thus,
19089 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19090 such cases, the debugger would end up (for example) trying to fetch a
19091 `float' from a place which actually contains the first part of a
19092 `double'. That would lead to really incorrect and confusing
19093 output at debug-time.
19094
19095 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19096 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19097 are a couple of exceptions however. On little-endian machines we can
19098 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19099 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19100 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19101 when (on a little-endian machine) a non-prototyped function has a
19102 parameter declared to be of type `short' or `char'. In such cases,
19103 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19104 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19105 passed `int' value. If the debugger then uses that address to fetch
19106 a `short' or a `char' (on a little-endian machine) the result will be
19107 the correct data, so we allow for such exceptional cases below.
19108
19109 Note that our goal here is to describe the place where the given formal
19110 parameter lives during most of the function's activation (i.e. between the
19111 end of the prologue and the start of the epilogue). We'll do that as best
19112 as we can. Note however that if the given formal parameter is modified
19113 sometime during the execution of the function, then a stack backtrace (at
19114 debug-time) will show the function as having been called with the *new*
19115 value rather than the value which was originally passed in. This happens
19116 rarely enough that it is not a major problem, but it *is* a problem, and
19117 I'd like to fix it.
19118
19119 A future version of dwarf2out.c may generate two additional attributes for
19120 any given DW_TAG_formal_parameter DIE which will describe the "passed
19121 type" and the "passed location" for the given formal parameter in addition
19122 to the attributes we now generate to indicate the "declared type" and the
19123 "active location" for each parameter. This additional set of attributes
19124 could be used by debuggers for stack backtraces. Separately, note that
19125 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19126 This happens (for example) for inlined-instances of inline function formal
19127 parameters which are never referenced. This really shouldn't be
19128 happening. All PARM_DECL nodes should get valid non-NULL
19129 DECL_INCOMING_RTL values. FIXME. */
19130
19131 /* Use DECL_RTL as the "location" unless we find something better. */
19132 rtl = DECL_RTL_IF_SET (decl);
19133
19134 /* When generating abstract instances, ignore everything except
19135 constants, symbols living in memory, and symbols living in
19136 fixed registers. */
19137 if (! reload_completed)
19138 {
19139 if (rtl
19140 && (CONSTANT_P (rtl)
19141 || (MEM_P (rtl)
19142 && CONSTANT_P (XEXP (rtl, 0)))
19143 || (REG_P (rtl)
19144 && VAR_P (decl)
19145 && TREE_STATIC (decl))))
19146 {
19147 rtl = targetm.delegitimize_address (rtl);
19148 return rtl;
19149 }
19150 rtl = NULL_RTX;
19151 }
19152 else if (TREE_CODE (decl) == PARM_DECL)
19153 {
19154 if (rtl == NULL_RTX
19155 || is_pseudo_reg (rtl)
19156 || (MEM_P (rtl)
19157 && is_pseudo_reg (XEXP (rtl, 0))
19158 && DECL_INCOMING_RTL (decl)
19159 && MEM_P (DECL_INCOMING_RTL (decl))
19160 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19161 {
19162 tree declared_type = TREE_TYPE (decl);
19163 tree passed_type = DECL_ARG_TYPE (decl);
19164 machine_mode dmode = TYPE_MODE (declared_type);
19165 machine_mode pmode = TYPE_MODE (passed_type);
19166
19167 /* This decl represents a formal parameter which was optimized out.
19168 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19169 all cases where (rtl == NULL_RTX) just below. */
19170 if (dmode == pmode)
19171 rtl = DECL_INCOMING_RTL (decl);
19172 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19173 && SCALAR_INT_MODE_P (dmode)
19174 && GET_MODE_SIZE (dmode) <= GET_MODE_SIZE (pmode)
19175 && DECL_INCOMING_RTL (decl))
19176 {
19177 rtx inc = DECL_INCOMING_RTL (decl);
19178 if (REG_P (inc))
19179 rtl = inc;
19180 else if (MEM_P (inc))
19181 {
19182 if (BYTES_BIG_ENDIAN)
19183 rtl = adjust_address_nv (inc, dmode,
19184 GET_MODE_SIZE (pmode)
19185 - GET_MODE_SIZE (dmode));
19186 else
19187 rtl = inc;
19188 }
19189 }
19190 }
19191
19192 /* If the parm was passed in registers, but lives on the stack, then
19193 make a big endian correction if the mode of the type of the
19194 parameter is not the same as the mode of the rtl. */
19195 /* ??? This is the same series of checks that are made in dbxout.c before
19196 we reach the big endian correction code there. It isn't clear if all
19197 of these checks are necessary here, but keeping them all is the safe
19198 thing to do. */
19199 else if (MEM_P (rtl)
19200 && XEXP (rtl, 0) != const0_rtx
19201 && ! CONSTANT_P (XEXP (rtl, 0))
19202 /* Not passed in memory. */
19203 && !MEM_P (DECL_INCOMING_RTL (decl))
19204 /* Not passed by invisible reference. */
19205 && (!REG_P (XEXP (rtl, 0))
19206 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19207 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19208 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19209 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19210 #endif
19211 )
19212 /* Big endian correction check. */
19213 && BYTES_BIG_ENDIAN
19214 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19215 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl)))
19216 < UNITS_PER_WORD))
19217 {
19218 machine_mode addr_mode = get_address_mode (rtl);
19219 int offset = (UNITS_PER_WORD
19220 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19221
19222 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19223 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19224 }
19225 }
19226 else if (VAR_P (decl)
19227 && rtl
19228 && MEM_P (rtl)
19229 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19230 {
19231 machine_mode addr_mode = get_address_mode (rtl);
19232 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19233 GET_MODE (rtl));
19234
19235 /* If a variable is declared "register" yet is smaller than
19236 a register, then if we store the variable to memory, it
19237 looks like we're storing a register-sized value, when in
19238 fact we are not. We need to adjust the offset of the
19239 storage location to reflect the actual value's bytes,
19240 else gdb will not be able to display it. */
19241 if (maybe_ne (offset, 0))
19242 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19243 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19244 }
19245
19246 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19247 and will have been substituted directly into all expressions that use it.
19248 C does not have such a concept, but C++ and other languages do. */
19249 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19250 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19251
19252 if (rtl)
19253 rtl = targetm.delegitimize_address (rtl);
19254
19255 /* If we don't look past the constant pool, we risk emitting a
19256 reference to a constant pool entry that isn't referenced from
19257 code, and thus is not emitted. */
19258 if (rtl)
19259 rtl = avoid_constant_pool_reference (rtl);
19260
19261 /* Try harder to get a rtl. If this symbol ends up not being emitted
19262 in the current CU, resolve_addr will remove the expression referencing
19263 it. */
19264 if (rtl == NULL_RTX
19265 && VAR_P (decl)
19266 && !DECL_EXTERNAL (decl)
19267 && TREE_STATIC (decl)
19268 && DECL_NAME (decl)
19269 && !DECL_HARD_REGISTER (decl)
19270 && DECL_MODE (decl) != VOIDmode)
19271 {
19272 rtl = make_decl_rtl_for_debug (decl);
19273 if (!MEM_P (rtl)
19274 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19275 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19276 rtl = NULL_RTX;
19277 }
19278
19279 return rtl;
19280 }
19281
19282 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19283 returned. If so, the decl for the COMMON block is returned, and the
19284 value is the offset into the common block for the symbol. */
19285
19286 static tree
19287 fortran_common (tree decl, HOST_WIDE_INT *value)
19288 {
19289 tree val_expr, cvar;
19290 machine_mode mode;
19291 poly_int64 bitsize, bitpos;
19292 tree offset;
19293 HOST_WIDE_INT cbitpos;
19294 int unsignedp, reversep, volatilep = 0;
19295
19296 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19297 it does not have a value (the offset into the common area), or if it
19298 is thread local (as opposed to global) then it isn't common, and shouldn't
19299 be handled as such. */
19300 if (!VAR_P (decl)
19301 || !TREE_STATIC (decl)
19302 || !DECL_HAS_VALUE_EXPR_P (decl)
19303 || !is_fortran ())
19304 return NULL_TREE;
19305
19306 val_expr = DECL_VALUE_EXPR (decl);
19307 if (TREE_CODE (val_expr) != COMPONENT_REF)
19308 return NULL_TREE;
19309
19310 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19311 &unsignedp, &reversep, &volatilep);
19312
19313 if (cvar == NULL_TREE
19314 || !VAR_P (cvar)
19315 || DECL_ARTIFICIAL (cvar)
19316 || !TREE_PUBLIC (cvar)
19317 /* We don't expect to have to cope with variable offsets,
19318 since at present all static data must have a constant size. */
19319 || !bitpos.is_constant (&cbitpos))
19320 return NULL_TREE;
19321
19322 *value = 0;
19323 if (offset != NULL)
19324 {
19325 if (!tree_fits_shwi_p (offset))
19326 return NULL_TREE;
19327 *value = tree_to_shwi (offset);
19328 }
19329 if (cbitpos != 0)
19330 *value += cbitpos / BITS_PER_UNIT;
19331
19332 return cvar;
19333 }
19334
19335 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
19336 data attribute for a variable or a parameter. We generate the
19337 DW_AT_const_value attribute only in those cases where the given variable
19338 or parameter does not have a true "location" either in memory or in a
19339 register. This can happen (for example) when a constant is passed as an
19340 actual argument in a call to an inline function. (It's possible that
19341 these things can crop up in other ways also.) Note that one type of
19342 constant value which can be passed into an inlined function is a constant
19343 pointer. This can happen for example if an actual argument in an inlined
19344 function call evaluates to a compile-time constant address.
19345
19346 CACHE_P is true if it is worth caching the location list for DECL,
19347 so that future calls can reuse it rather than regenerate it from scratch.
19348 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
19349 since we will need to refer to them each time the function is inlined. */
19350
19351 static bool
19352 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
19353 {
19354 rtx rtl;
19355 dw_loc_list_ref list;
19356 var_loc_list *loc_list;
19357 cached_dw_loc_list *cache;
19358
19359 if (early_dwarf)
19360 return false;
19361
19362 if (TREE_CODE (decl) == ERROR_MARK)
19363 return false;
19364
19365 if (get_AT (die, DW_AT_location)
19366 || get_AT (die, DW_AT_const_value))
19367 return true;
19368
19369 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
19370 || TREE_CODE (decl) == RESULT_DECL);
19371
19372 /* Try to get some constant RTL for this decl, and use that as the value of
19373 the location. */
19374
19375 rtl = rtl_for_decl_location (decl);
19376 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
19377 && add_const_value_attribute (die, rtl))
19378 return true;
19379
19380 /* See if we have single element location list that is equivalent to
19381 a constant value. That way we are better to use add_const_value_attribute
19382 rather than expanding constant value equivalent. */
19383 loc_list = lookup_decl_loc (decl);
19384 if (loc_list
19385 && loc_list->first
19386 && loc_list->first->next == NULL
19387 && NOTE_P (loc_list->first->loc)
19388 && NOTE_VAR_LOCATION (loc_list->first->loc)
19389 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
19390 {
19391 struct var_loc_node *node;
19392
19393 node = loc_list->first;
19394 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
19395 if (GET_CODE (rtl) == EXPR_LIST)
19396 rtl = XEXP (rtl, 0);
19397 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
19398 && add_const_value_attribute (die, rtl))
19399 return true;
19400 }
19401 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
19402 list several times. See if we've already cached the contents. */
19403 list = NULL;
19404 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
19405 cache_p = false;
19406 if (cache_p)
19407 {
19408 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
19409 if (cache)
19410 list = cache->loc_list;
19411 }
19412 if (list == NULL)
19413 {
19414 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
19415 NULL);
19416 /* It is usually worth caching this result if the decl is from
19417 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
19418 if (cache_p && list && list->dw_loc_next)
19419 {
19420 cached_dw_loc_list **slot
19421 = cached_dw_loc_list_table->find_slot_with_hash (decl,
19422 DECL_UID (decl),
19423 INSERT);
19424 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
19425 cache->decl_id = DECL_UID (decl);
19426 cache->loc_list = list;
19427 *slot = cache;
19428 }
19429 }
19430 if (list)
19431 {
19432 add_AT_location_description (die, DW_AT_location, list);
19433 return true;
19434 }
19435 /* None of that worked, so it must not really have a location;
19436 try adding a constant value attribute from the DECL_INITIAL. */
19437 return tree_add_const_value_attribute_for_decl (die, decl);
19438 }
19439
19440 /* Helper function for tree_add_const_value_attribute. Natively encode
19441 initializer INIT into an array. Return true if successful. */
19442
19443 static bool
19444 native_encode_initializer (tree init, unsigned char *array, int size)
19445 {
19446 tree type;
19447
19448 if (init == NULL_TREE)
19449 return false;
19450
19451 STRIP_NOPS (init);
19452 switch (TREE_CODE (init))
19453 {
19454 case STRING_CST:
19455 type = TREE_TYPE (init);
19456 if (TREE_CODE (type) == ARRAY_TYPE)
19457 {
19458 tree enttype = TREE_TYPE (type);
19459 scalar_int_mode mode;
19460
19461 if (!is_int_mode (TYPE_MODE (enttype), &mode)
19462 || GET_MODE_SIZE (mode) != 1)
19463 return false;
19464 if (int_size_in_bytes (type) != size)
19465 return false;
19466 if (size > TREE_STRING_LENGTH (init))
19467 {
19468 memcpy (array, TREE_STRING_POINTER (init),
19469 TREE_STRING_LENGTH (init));
19470 memset (array + TREE_STRING_LENGTH (init),
19471 '\0', size - TREE_STRING_LENGTH (init));
19472 }
19473 else
19474 memcpy (array, TREE_STRING_POINTER (init), size);
19475 return true;
19476 }
19477 return false;
19478 case CONSTRUCTOR:
19479 type = TREE_TYPE (init);
19480 if (int_size_in_bytes (type) != size)
19481 return false;
19482 if (TREE_CODE (type) == ARRAY_TYPE)
19483 {
19484 HOST_WIDE_INT min_index;
19485 unsigned HOST_WIDE_INT cnt;
19486 int curpos = 0, fieldsize;
19487 constructor_elt *ce;
19488
19489 if (TYPE_DOMAIN (type) == NULL_TREE
19490 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
19491 return false;
19492
19493 fieldsize = int_size_in_bytes (TREE_TYPE (type));
19494 if (fieldsize <= 0)
19495 return false;
19496
19497 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
19498 memset (array, '\0', size);
19499 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
19500 {
19501 tree val = ce->value;
19502 tree index = ce->index;
19503 int pos = curpos;
19504 if (index && TREE_CODE (index) == RANGE_EXPR)
19505 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
19506 * fieldsize;
19507 else if (index)
19508 pos = (tree_to_shwi (index) - min_index) * fieldsize;
19509
19510 if (val)
19511 {
19512 STRIP_NOPS (val);
19513 if (!native_encode_initializer (val, array + pos, fieldsize))
19514 return false;
19515 }
19516 curpos = pos + fieldsize;
19517 if (index && TREE_CODE (index) == RANGE_EXPR)
19518 {
19519 int count = tree_to_shwi (TREE_OPERAND (index, 1))
19520 - tree_to_shwi (TREE_OPERAND (index, 0));
19521 while (count-- > 0)
19522 {
19523 if (val)
19524 memcpy (array + curpos, array + pos, fieldsize);
19525 curpos += fieldsize;
19526 }
19527 }
19528 gcc_assert (curpos <= size);
19529 }
19530 return true;
19531 }
19532 else if (TREE_CODE (type) == RECORD_TYPE
19533 || TREE_CODE (type) == UNION_TYPE)
19534 {
19535 tree field = NULL_TREE;
19536 unsigned HOST_WIDE_INT cnt;
19537 constructor_elt *ce;
19538
19539 if (int_size_in_bytes (type) != size)
19540 return false;
19541
19542 if (TREE_CODE (type) == RECORD_TYPE)
19543 field = TYPE_FIELDS (type);
19544
19545 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
19546 {
19547 tree val = ce->value;
19548 int pos, fieldsize;
19549
19550 if (ce->index != 0)
19551 field = ce->index;
19552
19553 if (val)
19554 STRIP_NOPS (val);
19555
19556 if (field == NULL_TREE || DECL_BIT_FIELD (field))
19557 return false;
19558
19559 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
19560 && TYPE_DOMAIN (TREE_TYPE (field))
19561 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
19562 return false;
19563 else if (DECL_SIZE_UNIT (field) == NULL_TREE
19564 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
19565 return false;
19566 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
19567 pos = int_byte_position (field);
19568 gcc_assert (pos + fieldsize <= size);
19569 if (val && fieldsize != 0
19570 && !native_encode_initializer (val, array + pos, fieldsize))
19571 return false;
19572 }
19573 return true;
19574 }
19575 return false;
19576 case VIEW_CONVERT_EXPR:
19577 case NON_LVALUE_EXPR:
19578 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
19579 default:
19580 return native_encode_expr (init, array, size) == size;
19581 }
19582 }
19583
19584 /* Attach a DW_AT_const_value attribute to DIE. The value of the
19585 attribute is the const value T. */
19586
19587 static bool
19588 tree_add_const_value_attribute (dw_die_ref die, tree t)
19589 {
19590 tree init;
19591 tree type = TREE_TYPE (t);
19592 rtx rtl;
19593
19594 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
19595 return false;
19596
19597 init = t;
19598 gcc_assert (!DECL_P (init));
19599
19600 if (TREE_CODE (init) == INTEGER_CST)
19601 {
19602 if (tree_fits_uhwi_p (init))
19603 {
19604 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
19605 return true;
19606 }
19607 if (tree_fits_shwi_p (init))
19608 {
19609 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
19610 return true;
19611 }
19612 }
19613 if (! early_dwarf)
19614 {
19615 rtl = rtl_for_decl_init (init, type);
19616 if (rtl)
19617 return add_const_value_attribute (die, rtl);
19618 }
19619 /* If the host and target are sane, try harder. */
19620 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
19621 && initializer_constant_valid_p (init, type))
19622 {
19623 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
19624 if (size > 0 && (int) size == size)
19625 {
19626 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
19627
19628 if (native_encode_initializer (init, array, size))
19629 {
19630 add_AT_vec (die, DW_AT_const_value, size, 1, array);
19631 return true;
19632 }
19633 ggc_free (array);
19634 }
19635 }
19636 return false;
19637 }
19638
19639 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
19640 attribute is the const value of T, where T is an integral constant
19641 variable with static storage duration
19642 (so it can't be a PARM_DECL or a RESULT_DECL). */
19643
19644 static bool
19645 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
19646 {
19647
19648 if (!decl
19649 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
19650 || (VAR_P (decl) && !TREE_STATIC (decl)))
19651 return false;
19652
19653 if (TREE_READONLY (decl)
19654 && ! TREE_THIS_VOLATILE (decl)
19655 && DECL_INITIAL (decl))
19656 /* OK */;
19657 else
19658 return false;
19659
19660 /* Don't add DW_AT_const_value if abstract origin already has one. */
19661 if (get_AT (var_die, DW_AT_const_value))
19662 return false;
19663
19664 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
19665 }
19666
19667 /* Convert the CFI instructions for the current function into a
19668 location list. This is used for DW_AT_frame_base when we targeting
19669 a dwarf2 consumer that does not support the dwarf3
19670 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
19671 expressions. */
19672
19673 static dw_loc_list_ref
19674 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
19675 {
19676 int ix;
19677 dw_fde_ref fde;
19678 dw_loc_list_ref list, *list_tail;
19679 dw_cfi_ref cfi;
19680 dw_cfa_location last_cfa, next_cfa;
19681 const char *start_label, *last_label, *section;
19682 dw_cfa_location remember;
19683
19684 fde = cfun->fde;
19685 gcc_assert (fde != NULL);
19686
19687 section = secname_for_decl (current_function_decl);
19688 list_tail = &list;
19689 list = NULL;
19690
19691 memset (&next_cfa, 0, sizeof (next_cfa));
19692 next_cfa.reg = INVALID_REGNUM;
19693 remember = next_cfa;
19694
19695 start_label = fde->dw_fde_begin;
19696
19697 /* ??? Bald assumption that the CIE opcode list does not contain
19698 advance opcodes. */
19699 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
19700 lookup_cfa_1 (cfi, &next_cfa, &remember);
19701
19702 last_cfa = next_cfa;
19703 last_label = start_label;
19704
19705 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
19706 {
19707 /* If the first partition contained no CFI adjustments, the
19708 CIE opcodes apply to the whole first partition. */
19709 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19710 fde->dw_fde_begin, fde->dw_fde_end, section);
19711 list_tail =&(*list_tail)->dw_loc_next;
19712 start_label = last_label = fde->dw_fde_second_begin;
19713 }
19714
19715 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
19716 {
19717 switch (cfi->dw_cfi_opc)
19718 {
19719 case DW_CFA_set_loc:
19720 case DW_CFA_advance_loc1:
19721 case DW_CFA_advance_loc2:
19722 case DW_CFA_advance_loc4:
19723 if (!cfa_equal_p (&last_cfa, &next_cfa))
19724 {
19725 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19726 start_label, last_label, section);
19727
19728 list_tail = &(*list_tail)->dw_loc_next;
19729 last_cfa = next_cfa;
19730 start_label = last_label;
19731 }
19732 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
19733 break;
19734
19735 case DW_CFA_advance_loc:
19736 /* The encoding is complex enough that we should never emit this. */
19737 gcc_unreachable ();
19738
19739 default:
19740 lookup_cfa_1 (cfi, &next_cfa, &remember);
19741 break;
19742 }
19743 if (ix + 1 == fde->dw_fde_switch_cfi_index)
19744 {
19745 if (!cfa_equal_p (&last_cfa, &next_cfa))
19746 {
19747 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19748 start_label, last_label, section);
19749
19750 list_tail = &(*list_tail)->dw_loc_next;
19751 last_cfa = next_cfa;
19752 start_label = last_label;
19753 }
19754 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19755 start_label, fde->dw_fde_end, section);
19756 list_tail = &(*list_tail)->dw_loc_next;
19757 start_label = last_label = fde->dw_fde_second_begin;
19758 }
19759 }
19760
19761 if (!cfa_equal_p (&last_cfa, &next_cfa))
19762 {
19763 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19764 start_label, last_label, section);
19765 list_tail = &(*list_tail)->dw_loc_next;
19766 start_label = last_label;
19767 }
19768
19769 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
19770 start_label,
19771 fde->dw_fde_second_begin
19772 ? fde->dw_fde_second_end : fde->dw_fde_end,
19773 section);
19774
19775 if (list && list->dw_loc_next)
19776 gen_llsym (list);
19777
19778 return list;
19779 }
19780
19781 /* Compute a displacement from the "steady-state frame pointer" to the
19782 frame base (often the same as the CFA), and store it in
19783 frame_pointer_fb_offset. OFFSET is added to the displacement
19784 before the latter is negated. */
19785
19786 static void
19787 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
19788 {
19789 rtx reg, elim;
19790
19791 #ifdef FRAME_POINTER_CFA_OFFSET
19792 reg = frame_pointer_rtx;
19793 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
19794 #else
19795 reg = arg_pointer_rtx;
19796 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
19797 #endif
19798
19799 elim = (ira_use_lra_p
19800 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
19801 : eliminate_regs (reg, VOIDmode, NULL_RTX));
19802 elim = strip_offset_and_add (elim, &offset);
19803
19804 frame_pointer_fb_offset = -offset;
19805
19806 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
19807 in which to eliminate. This is because it's stack pointer isn't
19808 directly accessible as a register within the ISA. To work around
19809 this, assume that while we cannot provide a proper value for
19810 frame_pointer_fb_offset, we won't need one either. */
19811 frame_pointer_fb_offset_valid
19812 = ((SUPPORTS_STACK_ALIGNMENT
19813 && (elim == hard_frame_pointer_rtx
19814 || elim == stack_pointer_rtx))
19815 || elim == (frame_pointer_needed
19816 ? hard_frame_pointer_rtx
19817 : stack_pointer_rtx));
19818 }
19819
19820 /* Generate a DW_AT_name attribute given some string value to be included as
19821 the value of the attribute. */
19822
19823 static void
19824 add_name_attribute (dw_die_ref die, const char *name_string)
19825 {
19826 if (name_string != NULL && *name_string != 0)
19827 {
19828 if (demangle_name_func)
19829 name_string = (*demangle_name_func) (name_string);
19830
19831 add_AT_string (die, DW_AT_name, name_string);
19832 }
19833 }
19834
19835 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
19836 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
19837 of TYPE accordingly.
19838
19839 ??? This is a temporary measure until after we're able to generate
19840 regular DWARF for the complex Ada type system. */
19841
19842 static void
19843 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
19844 dw_die_ref context_die)
19845 {
19846 tree dtype;
19847 dw_die_ref dtype_die;
19848
19849 if (!lang_hooks.types.descriptive_type)
19850 return;
19851
19852 dtype = lang_hooks.types.descriptive_type (type);
19853 if (!dtype)
19854 return;
19855
19856 dtype_die = lookup_type_die (dtype);
19857 if (!dtype_die)
19858 {
19859 gen_type_die (dtype, context_die);
19860 dtype_die = lookup_type_die (dtype);
19861 gcc_assert (dtype_die);
19862 }
19863
19864 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
19865 }
19866
19867 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
19868
19869 static const char *
19870 comp_dir_string (void)
19871 {
19872 const char *wd;
19873 char *wd1;
19874 static const char *cached_wd = NULL;
19875
19876 if (cached_wd != NULL)
19877 return cached_wd;
19878
19879 wd = get_src_pwd ();
19880 if (wd == NULL)
19881 return NULL;
19882
19883 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
19884 {
19885 int wdlen;
19886
19887 wdlen = strlen (wd);
19888 wd1 = ggc_vec_alloc<char> (wdlen + 2);
19889 strcpy (wd1, wd);
19890 wd1 [wdlen] = DIR_SEPARATOR;
19891 wd1 [wdlen + 1] = 0;
19892 wd = wd1;
19893 }
19894
19895 cached_wd = remap_debug_filename (wd);
19896 return cached_wd;
19897 }
19898
19899 /* Generate a DW_AT_comp_dir attribute for DIE. */
19900
19901 static void
19902 add_comp_dir_attribute (dw_die_ref die)
19903 {
19904 const char * wd = comp_dir_string ();
19905 if (wd != NULL)
19906 add_AT_string (die, DW_AT_comp_dir, wd);
19907 }
19908
19909 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
19910 pointer computation, ...), output a representation for that bound according
19911 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
19912 loc_list_from_tree for the meaning of CONTEXT. */
19913
19914 static void
19915 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
19916 int forms, struct loc_descr_context *context)
19917 {
19918 dw_die_ref context_die, decl_die;
19919 dw_loc_list_ref list;
19920 bool strip_conversions = true;
19921 bool placeholder_seen = false;
19922
19923 while (strip_conversions)
19924 switch (TREE_CODE (value))
19925 {
19926 case ERROR_MARK:
19927 case SAVE_EXPR:
19928 return;
19929
19930 CASE_CONVERT:
19931 case VIEW_CONVERT_EXPR:
19932 value = TREE_OPERAND (value, 0);
19933 break;
19934
19935 default:
19936 strip_conversions = false;
19937 break;
19938 }
19939
19940 /* If possible and permitted, output the attribute as a constant. */
19941 if ((forms & dw_scalar_form_constant) != 0
19942 && TREE_CODE (value) == INTEGER_CST)
19943 {
19944 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
19945
19946 /* If HOST_WIDE_INT is big enough then represent the bound as
19947 a constant value. We need to choose a form based on
19948 whether the type is signed or unsigned. We cannot just
19949 call add_AT_unsigned if the value itself is positive
19950 (add_AT_unsigned might add the unsigned value encoded as
19951 DW_FORM_data[1248]). Some DWARF consumers will lookup the
19952 bounds type and then sign extend any unsigned values found
19953 for signed types. This is needed only for
19954 DW_AT_{lower,upper}_bound, since for most other attributes,
19955 consumers will treat DW_FORM_data[1248] as unsigned values,
19956 regardless of the underlying type. */
19957 if (prec <= HOST_BITS_PER_WIDE_INT
19958 || tree_fits_uhwi_p (value))
19959 {
19960 if (TYPE_UNSIGNED (TREE_TYPE (value)))
19961 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
19962 else
19963 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
19964 }
19965 else
19966 /* Otherwise represent the bound as an unsigned value with
19967 the precision of its type. The precision and signedness
19968 of the type will be necessary to re-interpret it
19969 unambiguously. */
19970 add_AT_wide (die, attr, wi::to_wide (value));
19971 return;
19972 }
19973
19974 /* Otherwise, if it's possible and permitted too, output a reference to
19975 another DIE. */
19976 if ((forms & dw_scalar_form_reference) != 0)
19977 {
19978 tree decl = NULL_TREE;
19979
19980 /* Some type attributes reference an outer type. For instance, the upper
19981 bound of an array may reference an embedding record (this happens in
19982 Ada). */
19983 if (TREE_CODE (value) == COMPONENT_REF
19984 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
19985 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
19986 decl = TREE_OPERAND (value, 1);
19987
19988 else if (VAR_P (value)
19989 || TREE_CODE (value) == PARM_DECL
19990 || TREE_CODE (value) == RESULT_DECL)
19991 decl = value;
19992
19993 if (decl != NULL_TREE)
19994 {
19995 dw_die_ref decl_die = lookup_decl_die (decl);
19996
19997 /* ??? Can this happen, or should the variable have been bound
19998 first? Probably it can, since I imagine that we try to create
19999 the types of parameters in the order in which they exist in
20000 the list, and won't have created a forward reference to a
20001 later parameter. */
20002 if (decl_die != NULL)
20003 {
20004 add_AT_die_ref (die, attr, decl_die);
20005 return;
20006 }
20007 }
20008 }
20009
20010 /* Last chance: try to create a stack operation procedure to evaluate the
20011 value. Do nothing if even that is not possible or permitted. */
20012 if ((forms & dw_scalar_form_exprloc) == 0)
20013 return;
20014
20015 list = loc_list_from_tree (value, 2, context);
20016 if (context && context->placeholder_arg)
20017 {
20018 placeholder_seen = context->placeholder_seen;
20019 context->placeholder_seen = false;
20020 }
20021 if (list == NULL || single_element_loc_list_p (list))
20022 {
20023 /* If this attribute is not a reference nor constant, it is
20024 a DWARF expression rather than location description. For that
20025 loc_list_from_tree (value, 0, &context) is needed. */
20026 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20027 if (list2 && single_element_loc_list_p (list2))
20028 {
20029 if (placeholder_seen)
20030 {
20031 struct dwarf_procedure_info dpi;
20032 dpi.fndecl = NULL_TREE;
20033 dpi.args_count = 1;
20034 if (!resolve_args_picking (list2->expr, 1, &dpi))
20035 return;
20036 }
20037 add_AT_loc (die, attr, list2->expr);
20038 return;
20039 }
20040 }
20041
20042 /* If that failed to give a single element location list, fall back to
20043 outputting this as a reference... still if permitted. */
20044 if (list == NULL
20045 || (forms & dw_scalar_form_reference) == 0
20046 || placeholder_seen)
20047 return;
20048
20049 if (current_function_decl == 0)
20050 context_die = comp_unit_die ();
20051 else
20052 context_die = lookup_decl_die (current_function_decl);
20053
20054 decl_die = new_die (DW_TAG_variable, context_die, value);
20055 add_AT_flag (decl_die, DW_AT_artificial, 1);
20056 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20057 context_die);
20058 add_AT_location_description (decl_die, DW_AT_location, list);
20059 add_AT_die_ref (die, attr, decl_die);
20060 }
20061
20062 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20063 default. */
20064
20065 static int
20066 lower_bound_default (void)
20067 {
20068 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20069 {
20070 case DW_LANG_C:
20071 case DW_LANG_C89:
20072 case DW_LANG_C99:
20073 case DW_LANG_C11:
20074 case DW_LANG_C_plus_plus:
20075 case DW_LANG_C_plus_plus_11:
20076 case DW_LANG_C_plus_plus_14:
20077 case DW_LANG_ObjC:
20078 case DW_LANG_ObjC_plus_plus:
20079 return 0;
20080 case DW_LANG_Fortran77:
20081 case DW_LANG_Fortran90:
20082 case DW_LANG_Fortran95:
20083 case DW_LANG_Fortran03:
20084 case DW_LANG_Fortran08:
20085 return 1;
20086 case DW_LANG_UPC:
20087 case DW_LANG_D:
20088 case DW_LANG_Python:
20089 return dwarf_version >= 4 ? 0 : -1;
20090 case DW_LANG_Ada95:
20091 case DW_LANG_Ada83:
20092 case DW_LANG_Cobol74:
20093 case DW_LANG_Cobol85:
20094 case DW_LANG_Modula2:
20095 case DW_LANG_PLI:
20096 return dwarf_version >= 4 ? 1 : -1;
20097 default:
20098 return -1;
20099 }
20100 }
20101
20102 /* Given a tree node describing an array bound (either lower or upper) output
20103 a representation for that bound. */
20104
20105 static void
20106 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20107 tree bound, struct loc_descr_context *context)
20108 {
20109 int dflt;
20110
20111 while (1)
20112 switch (TREE_CODE (bound))
20113 {
20114 /* Strip all conversions. */
20115 CASE_CONVERT:
20116 case VIEW_CONVERT_EXPR:
20117 bound = TREE_OPERAND (bound, 0);
20118 break;
20119
20120 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20121 are even omitted when they are the default. */
20122 case INTEGER_CST:
20123 /* If the value for this bound is the default one, we can even omit the
20124 attribute. */
20125 if (bound_attr == DW_AT_lower_bound
20126 && tree_fits_shwi_p (bound)
20127 && (dflt = lower_bound_default ()) != -1
20128 && tree_to_shwi (bound) == dflt)
20129 return;
20130
20131 /* FALLTHRU */
20132
20133 default:
20134 /* Because of the complex interaction there can be with other GNAT
20135 encodings, GDB isn't ready yet to handle proper DWARF description
20136 for self-referencial subrange bounds: let GNAT encodings do the
20137 magic in such a case. */
20138 if (is_ada ()
20139 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20140 && contains_placeholder_p (bound))
20141 return;
20142
20143 add_scalar_info (subrange_die, bound_attr, bound,
20144 dw_scalar_form_constant
20145 | dw_scalar_form_exprloc
20146 | dw_scalar_form_reference,
20147 context);
20148 return;
20149 }
20150 }
20151
20152 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20153 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20154 Note that the block of subscript information for an array type also
20155 includes information about the element type of the given array type.
20156
20157 This function reuses previously set type and bound information if
20158 available. */
20159
20160 static void
20161 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20162 {
20163 unsigned dimension_number;
20164 tree lower, upper;
20165 dw_die_ref child = type_die->die_child;
20166
20167 for (dimension_number = 0;
20168 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20169 type = TREE_TYPE (type), dimension_number++)
20170 {
20171 tree domain = TYPE_DOMAIN (type);
20172
20173 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20174 break;
20175
20176 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20177 and (in GNU C only) variable bounds. Handle all three forms
20178 here. */
20179
20180 /* Find and reuse a previously generated DW_TAG_subrange_type if
20181 available.
20182
20183 For multi-dimensional arrays, as we iterate through the
20184 various dimensions in the enclosing for loop above, we also
20185 iterate through the DIE children and pick at each
20186 DW_TAG_subrange_type previously generated (if available).
20187 Each child DW_TAG_subrange_type DIE describes the range of
20188 the current dimension. At this point we should have as many
20189 DW_TAG_subrange_type's as we have dimensions in the
20190 array. */
20191 dw_die_ref subrange_die = NULL;
20192 if (child)
20193 while (1)
20194 {
20195 child = child->die_sib;
20196 if (child->die_tag == DW_TAG_subrange_type)
20197 subrange_die = child;
20198 if (child == type_die->die_child)
20199 {
20200 /* If we wrapped around, stop looking next time. */
20201 child = NULL;
20202 break;
20203 }
20204 if (child->die_tag == DW_TAG_subrange_type)
20205 break;
20206 }
20207 if (!subrange_die)
20208 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20209
20210 if (domain)
20211 {
20212 /* We have an array type with specified bounds. */
20213 lower = TYPE_MIN_VALUE (domain);
20214 upper = TYPE_MAX_VALUE (domain);
20215
20216 /* Define the index type. */
20217 if (TREE_TYPE (domain)
20218 && !get_AT (subrange_die, DW_AT_type))
20219 {
20220 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20221 TREE_TYPE field. We can't emit debug info for this
20222 because it is an unnamed integral type. */
20223 if (TREE_CODE (domain) == INTEGER_TYPE
20224 && TYPE_NAME (domain) == NULL_TREE
20225 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20226 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20227 ;
20228 else
20229 add_type_attribute (subrange_die, TREE_TYPE (domain),
20230 TYPE_UNQUALIFIED, false, type_die);
20231 }
20232
20233 /* ??? If upper is NULL, the array has unspecified length,
20234 but it does have a lower bound. This happens with Fortran
20235 dimension arr(N:*)
20236 Since the debugger is definitely going to need to know N
20237 to produce useful results, go ahead and output the lower
20238 bound solo, and hope the debugger can cope. */
20239
20240 if (!get_AT (subrange_die, DW_AT_lower_bound))
20241 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20242 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
20243 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20244 }
20245
20246 /* Otherwise we have an array type with an unspecified length. The
20247 DWARF-2 spec does not say how to handle this; let's just leave out the
20248 bounds. */
20249 }
20250 }
20251
20252 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20253
20254 static void
20255 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20256 {
20257 dw_die_ref decl_die;
20258 HOST_WIDE_INT size;
20259 dw_loc_descr_ref size_expr = NULL;
20260
20261 switch (TREE_CODE (tree_node))
20262 {
20263 case ERROR_MARK:
20264 size = 0;
20265 break;
20266 case ENUMERAL_TYPE:
20267 case RECORD_TYPE:
20268 case UNION_TYPE:
20269 case QUAL_UNION_TYPE:
20270 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20271 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20272 {
20273 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20274 return;
20275 }
20276 size_expr = type_byte_size (tree_node, &size);
20277 break;
20278 case FIELD_DECL:
20279 /* For a data member of a struct or union, the DW_AT_byte_size is
20280 generally given as the number of bytes normally allocated for an
20281 object of the *declared* type of the member itself. This is true
20282 even for bit-fields. */
20283 size = int_size_in_bytes (field_type (tree_node));
20284 break;
20285 default:
20286 gcc_unreachable ();
20287 }
20288
20289 /* Support for dynamically-sized objects was introduced by DWARFv3.
20290 At the moment, GDB does not handle variable byte sizes very well,
20291 though. */
20292 if ((dwarf_version >= 3 || !dwarf_strict)
20293 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20294 && size_expr != NULL)
20295 add_AT_loc (die, DW_AT_byte_size, size_expr);
20296
20297 /* Note that `size' might be -1 when we get to this point. If it is, that
20298 indicates that the byte size of the entity in question is variable and
20299 that we could not generate a DWARF expression that computes it. */
20300 if (size >= 0)
20301 add_AT_unsigned (die, DW_AT_byte_size, size);
20302 }
20303
20304 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20305 alignment. */
20306
20307 static void
20308 add_alignment_attribute (dw_die_ref die, tree tree_node)
20309 {
20310 if (dwarf_version < 5 && dwarf_strict)
20311 return;
20312
20313 unsigned align;
20314
20315 if (DECL_P (tree_node))
20316 {
20317 if (!DECL_USER_ALIGN (tree_node))
20318 return;
20319
20320 align = DECL_ALIGN_UNIT (tree_node);
20321 }
20322 else if (TYPE_P (tree_node))
20323 {
20324 if (!TYPE_USER_ALIGN (tree_node))
20325 return;
20326
20327 align = TYPE_ALIGN_UNIT (tree_node);
20328 }
20329 else
20330 gcc_unreachable ();
20331
20332 add_AT_unsigned (die, DW_AT_alignment, align);
20333 }
20334
20335 /* For a FIELD_DECL node which represents a bit-field, output an attribute
20336 which specifies the distance in bits from the highest order bit of the
20337 "containing object" for the bit-field to the highest order bit of the
20338 bit-field itself.
20339
20340 For any given bit-field, the "containing object" is a hypothetical object
20341 (of some integral or enum type) within which the given bit-field lives. The
20342 type of this hypothetical "containing object" is always the same as the
20343 declared type of the individual bit-field itself. The determination of the
20344 exact location of the "containing object" for a bit-field is rather
20345 complicated. It's handled by the `field_byte_offset' function (above).
20346
20347 CTX is required: see the comment for VLR_CONTEXT.
20348
20349 Note that it is the size (in bytes) of the hypothetical "containing object"
20350 which will be given in the DW_AT_byte_size attribute for this bit-field.
20351 (See `byte_size_attribute' above). */
20352
20353 static inline void
20354 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
20355 {
20356 HOST_WIDE_INT object_offset_in_bytes;
20357 tree original_type = DECL_BIT_FIELD_TYPE (decl);
20358 HOST_WIDE_INT bitpos_int;
20359 HOST_WIDE_INT highest_order_object_bit_offset;
20360 HOST_WIDE_INT highest_order_field_bit_offset;
20361 HOST_WIDE_INT bit_offset;
20362
20363 field_byte_offset (decl, ctx, &object_offset_in_bytes);
20364
20365 /* Must be a field and a bit field. */
20366 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
20367
20368 /* We can't yet handle bit-fields whose offsets are variable, so if we
20369 encounter such things, just return without generating any attribute
20370 whatsoever. Likewise for variable or too large size. */
20371 if (! tree_fits_shwi_p (bit_position (decl))
20372 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
20373 return;
20374
20375 bitpos_int = int_bit_position (decl);
20376
20377 /* Note that the bit offset is always the distance (in bits) from the
20378 highest-order bit of the "containing object" to the highest-order bit of
20379 the bit-field itself. Since the "high-order end" of any object or field
20380 is different on big-endian and little-endian machines, the computation
20381 below must take account of these differences. */
20382 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
20383 highest_order_field_bit_offset = bitpos_int;
20384
20385 if (! BYTES_BIG_ENDIAN)
20386 {
20387 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
20388 highest_order_object_bit_offset +=
20389 simple_type_size_in_bits (original_type);
20390 }
20391
20392 bit_offset
20393 = (! BYTES_BIG_ENDIAN
20394 ? highest_order_object_bit_offset - highest_order_field_bit_offset
20395 : highest_order_field_bit_offset - highest_order_object_bit_offset);
20396
20397 if (bit_offset < 0)
20398 add_AT_int (die, DW_AT_bit_offset, bit_offset);
20399 else
20400 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
20401 }
20402
20403 /* For a FIELD_DECL node which represents a bit field, output an attribute
20404 which specifies the length in bits of the given field. */
20405
20406 static inline void
20407 add_bit_size_attribute (dw_die_ref die, tree decl)
20408 {
20409 /* Must be a field and a bit field. */
20410 gcc_assert (TREE_CODE (decl) == FIELD_DECL
20411 && DECL_BIT_FIELD_TYPE (decl));
20412
20413 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
20414 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
20415 }
20416
20417 /* If the compiled language is ANSI C, then add a 'prototyped'
20418 attribute, if arg types are given for the parameters of a function. */
20419
20420 static inline void
20421 add_prototyped_attribute (dw_die_ref die, tree func_type)
20422 {
20423 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20424 {
20425 case DW_LANG_C:
20426 case DW_LANG_C89:
20427 case DW_LANG_C99:
20428 case DW_LANG_C11:
20429 case DW_LANG_ObjC:
20430 if (prototype_p (func_type))
20431 add_AT_flag (die, DW_AT_prototyped, 1);
20432 break;
20433 default:
20434 break;
20435 }
20436 }
20437
20438 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
20439 by looking in the type declaration, the object declaration equate table or
20440 the block mapping. */
20441
20442 static inline dw_die_ref
20443 add_abstract_origin_attribute (dw_die_ref die, tree origin)
20444 {
20445 dw_die_ref origin_die = NULL;
20446
20447 if (DECL_P (origin))
20448 {
20449 dw_die_ref c;
20450 origin_die = lookup_decl_die (origin);
20451 /* "Unwrap" the decls DIE which we put in the imported unit context.
20452 We are looking for the abstract copy here. */
20453 if (in_lto_p
20454 && origin_die
20455 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
20456 /* ??? Identify this better. */
20457 && c->with_offset)
20458 origin_die = c;
20459 }
20460 else if (TYPE_P (origin))
20461 origin_die = lookup_type_die (origin);
20462 else if (TREE_CODE (origin) == BLOCK)
20463 origin_die = BLOCK_DIE (origin);
20464
20465 /* XXX: Functions that are never lowered don't always have correct block
20466 trees (in the case of java, they simply have no block tree, in some other
20467 languages). For these functions, there is nothing we can really do to
20468 output correct debug info for inlined functions in all cases. Rather
20469 than die, we'll just produce deficient debug info now, in that we will
20470 have variables without a proper abstract origin. In the future, when all
20471 functions are lowered, we should re-add a gcc_assert (origin_die)
20472 here. */
20473
20474 if (origin_die)
20475 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
20476 return origin_die;
20477 }
20478
20479 /* We do not currently support the pure_virtual attribute. */
20480
20481 static inline void
20482 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
20483 {
20484 if (DECL_VINDEX (func_decl))
20485 {
20486 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
20487
20488 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
20489 add_AT_loc (die, DW_AT_vtable_elem_location,
20490 new_loc_descr (DW_OP_constu,
20491 tree_to_shwi (DECL_VINDEX (func_decl)),
20492 0));
20493
20494 /* GNU extension: Record what type this method came from originally. */
20495 if (debug_info_level > DINFO_LEVEL_TERSE
20496 && DECL_CONTEXT (func_decl))
20497 add_AT_die_ref (die, DW_AT_containing_type,
20498 lookup_type_die (DECL_CONTEXT (func_decl)));
20499 }
20500 }
20501 \f
20502 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
20503 given decl. This used to be a vendor extension until after DWARF 4
20504 standardized it. */
20505
20506 static void
20507 add_linkage_attr (dw_die_ref die, tree decl)
20508 {
20509 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
20510
20511 /* Mimic what assemble_name_raw does with a leading '*'. */
20512 if (name[0] == '*')
20513 name = &name[1];
20514
20515 if (dwarf_version >= 4)
20516 add_AT_string (die, DW_AT_linkage_name, name);
20517 else
20518 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
20519 }
20520
20521 /* Add source coordinate attributes for the given decl. */
20522
20523 static void
20524 add_src_coords_attributes (dw_die_ref die, tree decl)
20525 {
20526 expanded_location s;
20527
20528 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
20529 return;
20530 s = expand_location (DECL_SOURCE_LOCATION (decl));
20531 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
20532 add_AT_unsigned (die, DW_AT_decl_line, s.line);
20533 if (debug_column_info && s.column)
20534 add_AT_unsigned (die, DW_AT_decl_column, s.column);
20535 }
20536
20537 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
20538
20539 static void
20540 add_linkage_name_raw (dw_die_ref die, tree decl)
20541 {
20542 /* Defer until we have an assembler name set. */
20543 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
20544 {
20545 limbo_die_node *asm_name;
20546
20547 asm_name = ggc_cleared_alloc<limbo_die_node> ();
20548 asm_name->die = die;
20549 asm_name->created_for = decl;
20550 asm_name->next = deferred_asm_name;
20551 deferred_asm_name = asm_name;
20552 }
20553 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
20554 add_linkage_attr (die, decl);
20555 }
20556
20557 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
20558
20559 static void
20560 add_linkage_name (dw_die_ref die, tree decl)
20561 {
20562 if (debug_info_level > DINFO_LEVEL_NONE
20563 && VAR_OR_FUNCTION_DECL_P (decl)
20564 && TREE_PUBLIC (decl)
20565 && !(VAR_P (decl) && DECL_REGISTER (decl))
20566 && die->die_tag != DW_TAG_member)
20567 add_linkage_name_raw (die, decl);
20568 }
20569
20570 /* Add a DW_AT_name attribute and source coordinate attribute for the
20571 given decl, but only if it actually has a name. */
20572
20573 static void
20574 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
20575 bool no_linkage_name)
20576 {
20577 tree decl_name;
20578
20579 decl_name = DECL_NAME (decl);
20580 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20581 {
20582 const char *name = dwarf2_name (decl, 0);
20583 if (name)
20584 add_name_attribute (die, name);
20585 if (! DECL_ARTIFICIAL (decl))
20586 add_src_coords_attributes (die, decl);
20587
20588 if (!no_linkage_name)
20589 add_linkage_name (die, decl);
20590 }
20591
20592 #ifdef VMS_DEBUGGING_INFO
20593 /* Get the function's name, as described by its RTL. This may be different
20594 from the DECL_NAME name used in the source file. */
20595 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
20596 {
20597 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
20598 XEXP (DECL_RTL (decl), 0), false);
20599 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
20600 }
20601 #endif /* VMS_DEBUGGING_INFO */
20602 }
20603
20604 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
20605
20606 static void
20607 add_discr_value (dw_die_ref die, dw_discr_value *value)
20608 {
20609 dw_attr_node attr;
20610
20611 attr.dw_attr = DW_AT_discr_value;
20612 attr.dw_attr_val.val_class = dw_val_class_discr_value;
20613 attr.dw_attr_val.val_entry = NULL;
20614 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
20615 if (value->pos)
20616 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
20617 else
20618 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
20619 add_dwarf_attr (die, &attr);
20620 }
20621
20622 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
20623
20624 static void
20625 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
20626 {
20627 dw_attr_node attr;
20628
20629 attr.dw_attr = DW_AT_discr_list;
20630 attr.dw_attr_val.val_class = dw_val_class_discr_list;
20631 attr.dw_attr_val.val_entry = NULL;
20632 attr.dw_attr_val.v.val_discr_list = discr_list;
20633 add_dwarf_attr (die, &attr);
20634 }
20635
20636 static inline dw_discr_list_ref
20637 AT_discr_list (dw_attr_node *attr)
20638 {
20639 return attr->dw_attr_val.v.val_discr_list;
20640 }
20641
20642 #ifdef VMS_DEBUGGING_INFO
20643 /* Output the debug main pointer die for VMS */
20644
20645 void
20646 dwarf2out_vms_debug_main_pointer (void)
20647 {
20648 char label[MAX_ARTIFICIAL_LABEL_BYTES];
20649 dw_die_ref die;
20650
20651 /* Allocate the VMS debug main subprogram die. */
20652 die = new_die_raw (DW_TAG_subprogram);
20653 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
20654 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
20655 current_function_funcdef_no);
20656 add_AT_lbl_id (die, DW_AT_entry_pc, label);
20657
20658 /* Make it the first child of comp_unit_die (). */
20659 die->die_parent = comp_unit_die ();
20660 if (comp_unit_die ()->die_child)
20661 {
20662 die->die_sib = comp_unit_die ()->die_child->die_sib;
20663 comp_unit_die ()->die_child->die_sib = die;
20664 }
20665 else
20666 {
20667 die->die_sib = die;
20668 comp_unit_die ()->die_child = die;
20669 }
20670 }
20671 #endif /* VMS_DEBUGGING_INFO */
20672
20673 /* Push a new declaration scope. */
20674
20675 static void
20676 push_decl_scope (tree scope)
20677 {
20678 vec_safe_push (decl_scope_table, scope);
20679 }
20680
20681 /* Pop a declaration scope. */
20682
20683 static inline void
20684 pop_decl_scope (void)
20685 {
20686 decl_scope_table->pop ();
20687 }
20688
20689 /* walk_tree helper function for uses_local_type, below. */
20690
20691 static tree
20692 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
20693 {
20694 if (!TYPE_P (*tp))
20695 *walk_subtrees = 0;
20696 else
20697 {
20698 tree name = TYPE_NAME (*tp);
20699 if (name && DECL_P (name) && decl_function_context (name))
20700 return *tp;
20701 }
20702 return NULL_TREE;
20703 }
20704
20705 /* If TYPE involves a function-local type (including a local typedef to a
20706 non-local type), returns that type; otherwise returns NULL_TREE. */
20707
20708 static tree
20709 uses_local_type (tree type)
20710 {
20711 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
20712 return used;
20713 }
20714
20715 /* Return the DIE for the scope that immediately contains this type.
20716 Non-named types that do not involve a function-local type get global
20717 scope. Named types nested in namespaces or other types get their
20718 containing scope. All other types (i.e. function-local named types) get
20719 the current active scope. */
20720
20721 static dw_die_ref
20722 scope_die_for (tree t, dw_die_ref context_die)
20723 {
20724 dw_die_ref scope_die = NULL;
20725 tree containing_scope;
20726
20727 /* Non-types always go in the current scope. */
20728 gcc_assert (TYPE_P (t));
20729
20730 /* Use the scope of the typedef, rather than the scope of the type
20731 it refers to. */
20732 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
20733 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
20734 else
20735 containing_scope = TYPE_CONTEXT (t);
20736
20737 /* Use the containing namespace if there is one. */
20738 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
20739 {
20740 if (context_die == lookup_decl_die (containing_scope))
20741 /* OK */;
20742 else if (debug_info_level > DINFO_LEVEL_TERSE)
20743 context_die = get_context_die (containing_scope);
20744 else
20745 containing_scope = NULL_TREE;
20746 }
20747
20748 /* Ignore function type "scopes" from the C frontend. They mean that
20749 a tagged type is local to a parmlist of a function declarator, but
20750 that isn't useful to DWARF. */
20751 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
20752 containing_scope = NULL_TREE;
20753
20754 if (SCOPE_FILE_SCOPE_P (containing_scope))
20755 {
20756 /* If T uses a local type keep it local as well, to avoid references
20757 to function-local DIEs from outside the function. */
20758 if (current_function_decl && uses_local_type (t))
20759 scope_die = context_die;
20760 else
20761 scope_die = comp_unit_die ();
20762 }
20763 else if (TYPE_P (containing_scope))
20764 {
20765 /* For types, we can just look up the appropriate DIE. */
20766 if (debug_info_level > DINFO_LEVEL_TERSE)
20767 scope_die = get_context_die (containing_scope);
20768 else
20769 {
20770 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
20771 if (scope_die == NULL)
20772 scope_die = comp_unit_die ();
20773 }
20774 }
20775 else
20776 scope_die = context_die;
20777
20778 return scope_die;
20779 }
20780
20781 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
20782
20783 static inline int
20784 local_scope_p (dw_die_ref context_die)
20785 {
20786 for (; context_die; context_die = context_die->die_parent)
20787 if (context_die->die_tag == DW_TAG_inlined_subroutine
20788 || context_die->die_tag == DW_TAG_subprogram)
20789 return 1;
20790
20791 return 0;
20792 }
20793
20794 /* Returns nonzero if CONTEXT_DIE is a class. */
20795
20796 static inline int
20797 class_scope_p (dw_die_ref context_die)
20798 {
20799 return (context_die
20800 && (context_die->die_tag == DW_TAG_structure_type
20801 || context_die->die_tag == DW_TAG_class_type
20802 || context_die->die_tag == DW_TAG_interface_type
20803 || context_die->die_tag == DW_TAG_union_type));
20804 }
20805
20806 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
20807 whether or not to treat a DIE in this context as a declaration. */
20808
20809 static inline int
20810 class_or_namespace_scope_p (dw_die_ref context_die)
20811 {
20812 return (class_scope_p (context_die)
20813 || (context_die && context_die->die_tag == DW_TAG_namespace));
20814 }
20815
20816 /* Many forms of DIEs require a "type description" attribute. This
20817 routine locates the proper "type descriptor" die for the type given
20818 by 'type' plus any additional qualifiers given by 'cv_quals', and
20819 adds a DW_AT_type attribute below the given die. */
20820
20821 static void
20822 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
20823 bool reverse, dw_die_ref context_die)
20824 {
20825 enum tree_code code = TREE_CODE (type);
20826 dw_die_ref type_die = NULL;
20827
20828 /* ??? If this type is an unnamed subrange type of an integral, floating-point
20829 or fixed-point type, use the inner type. This is because we have no
20830 support for unnamed types in base_type_die. This can happen if this is
20831 an Ada subrange type. Correct solution is emit a subrange type die. */
20832 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
20833 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
20834 type = TREE_TYPE (type), code = TREE_CODE (type);
20835
20836 if (code == ERROR_MARK
20837 /* Handle a special case. For functions whose return type is void, we
20838 generate *no* type attribute. (Note that no object may have type
20839 `void', so this only applies to function return types). */
20840 || code == VOID_TYPE)
20841 return;
20842
20843 type_die = modified_type_die (type,
20844 cv_quals | TYPE_QUALS (type),
20845 reverse,
20846 context_die);
20847
20848 if (type_die != NULL)
20849 add_AT_die_ref (object_die, DW_AT_type, type_die);
20850 }
20851
20852 /* Given an object die, add the calling convention attribute for the
20853 function call type. */
20854 static void
20855 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
20856 {
20857 enum dwarf_calling_convention value = DW_CC_normal;
20858
20859 value = ((enum dwarf_calling_convention)
20860 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
20861
20862 if (is_fortran ()
20863 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
20864 {
20865 /* DWARF 2 doesn't provide a way to identify a program's source-level
20866 entry point. DW_AT_calling_convention attributes are only meant
20867 to describe functions' calling conventions. However, lacking a
20868 better way to signal the Fortran main program, we used this for
20869 a long time, following existing custom. Now, DWARF 4 has
20870 DW_AT_main_subprogram, which we add below, but some tools still
20871 rely on the old way, which we thus keep. */
20872 value = DW_CC_program;
20873
20874 if (dwarf_version >= 4 || !dwarf_strict)
20875 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
20876 }
20877
20878 /* Only add the attribute if the backend requests it, and
20879 is not DW_CC_normal. */
20880 if (value && (value != DW_CC_normal))
20881 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
20882 }
20883
20884 /* Given a tree pointer to a struct, class, union, or enum type node, return
20885 a pointer to the (string) tag name for the given type, or zero if the type
20886 was declared without a tag. */
20887
20888 static const char *
20889 type_tag (const_tree type)
20890 {
20891 const char *name = 0;
20892
20893 if (TYPE_NAME (type) != 0)
20894 {
20895 tree t = 0;
20896
20897 /* Find the IDENTIFIER_NODE for the type name. */
20898 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
20899 && !TYPE_NAMELESS (type))
20900 t = TYPE_NAME (type);
20901
20902 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
20903 a TYPE_DECL node, regardless of whether or not a `typedef' was
20904 involved. */
20905 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
20906 && ! DECL_IGNORED_P (TYPE_NAME (type)))
20907 {
20908 /* We want to be extra verbose. Don't call dwarf_name if
20909 DECL_NAME isn't set. The default hook for decl_printable_name
20910 doesn't like that, and in this context it's correct to return
20911 0, instead of "<anonymous>" or the like. */
20912 if (DECL_NAME (TYPE_NAME (type))
20913 && !DECL_NAMELESS (TYPE_NAME (type)))
20914 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
20915 }
20916
20917 /* Now get the name as a string, or invent one. */
20918 if (!name && t != 0)
20919 name = IDENTIFIER_POINTER (t);
20920 }
20921
20922 return (name == 0 || *name == '\0') ? 0 : name;
20923 }
20924
20925 /* Return the type associated with a data member, make a special check
20926 for bit field types. */
20927
20928 static inline tree
20929 member_declared_type (const_tree member)
20930 {
20931 return (DECL_BIT_FIELD_TYPE (member)
20932 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
20933 }
20934
20935 /* Get the decl's label, as described by its RTL. This may be different
20936 from the DECL_NAME name used in the source file. */
20937
20938 #if 0
20939 static const char *
20940 decl_start_label (tree decl)
20941 {
20942 rtx x;
20943 const char *fnname;
20944
20945 x = DECL_RTL (decl);
20946 gcc_assert (MEM_P (x));
20947
20948 x = XEXP (x, 0);
20949 gcc_assert (GET_CODE (x) == SYMBOL_REF);
20950
20951 fnname = XSTR (x, 0);
20952 return fnname;
20953 }
20954 #endif
20955 \f
20956 /* For variable-length arrays that have been previously generated, but
20957 may be incomplete due to missing subscript info, fill the subscript
20958 info. Return TRUE if this is one of those cases. */
20959 static bool
20960 fill_variable_array_bounds (tree type)
20961 {
20962 if (TREE_ASM_WRITTEN (type)
20963 && TREE_CODE (type) == ARRAY_TYPE
20964 && variably_modified_type_p (type, NULL))
20965 {
20966 dw_die_ref array_die = lookup_type_die (type);
20967 if (!array_die)
20968 return false;
20969 add_subscript_info (array_die, type, !is_ada ());
20970 return true;
20971 }
20972 return false;
20973 }
20974
20975 /* These routines generate the internal representation of the DIE's for
20976 the compilation unit. Debugging information is collected by walking
20977 the declaration trees passed in from dwarf2out_decl(). */
20978
20979 static void
20980 gen_array_type_die (tree type, dw_die_ref context_die)
20981 {
20982 dw_die_ref array_die;
20983
20984 /* GNU compilers represent multidimensional array types as sequences of one
20985 dimensional array types whose element types are themselves array types.
20986 We sometimes squish that down to a single array_type DIE with multiple
20987 subscripts in the Dwarf debugging info. The draft Dwarf specification
20988 say that we are allowed to do this kind of compression in C, because
20989 there is no difference between an array of arrays and a multidimensional
20990 array. We don't do this for Ada to remain as close as possible to the
20991 actual representation, which is especially important against the language
20992 flexibilty wrt arrays of variable size. */
20993
20994 bool collapse_nested_arrays = !is_ada ();
20995
20996 if (fill_variable_array_bounds (type))
20997 return;
20998
20999 dw_die_ref scope_die = scope_die_for (type, context_die);
21000 tree element_type;
21001
21002 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21003 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21004 if (TYPE_STRING_FLAG (type)
21005 && TREE_CODE (type) == ARRAY_TYPE
21006 && is_fortran ()
21007 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21008 {
21009 HOST_WIDE_INT size;
21010
21011 array_die = new_die (DW_TAG_string_type, scope_die, type);
21012 add_name_attribute (array_die, type_tag (type));
21013 equate_type_number_to_die (type, array_die);
21014 size = int_size_in_bytes (type);
21015 if (size >= 0)
21016 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21017 /* ??? We can't annotate types late, but for LTO we may not
21018 generate a location early either (gfortran.dg/save_6.f90). */
21019 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21020 && TYPE_DOMAIN (type) != NULL_TREE
21021 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21022 {
21023 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21024 tree rszdecl = szdecl;
21025
21026 size = int_size_in_bytes (TREE_TYPE (szdecl));
21027 if (!DECL_P (szdecl))
21028 {
21029 if (TREE_CODE (szdecl) == INDIRECT_REF
21030 && DECL_P (TREE_OPERAND (szdecl, 0)))
21031 {
21032 rszdecl = TREE_OPERAND (szdecl, 0);
21033 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21034 != DWARF2_ADDR_SIZE)
21035 size = 0;
21036 }
21037 else
21038 size = 0;
21039 }
21040 if (size > 0)
21041 {
21042 dw_loc_list_ref loc
21043 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21044 NULL);
21045 if (loc)
21046 {
21047 add_AT_location_description (array_die, DW_AT_string_length,
21048 loc);
21049 if (size != DWARF2_ADDR_SIZE)
21050 add_AT_unsigned (array_die, dwarf_version >= 5
21051 ? DW_AT_string_length_byte_size
21052 : DW_AT_byte_size, size);
21053 }
21054 }
21055 }
21056 return;
21057 }
21058
21059 array_die = new_die (DW_TAG_array_type, scope_die, type);
21060 add_name_attribute (array_die, type_tag (type));
21061 equate_type_number_to_die (type, array_die);
21062
21063 if (TREE_CODE (type) == VECTOR_TYPE)
21064 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21065
21066 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21067 if (is_fortran ()
21068 && TREE_CODE (type) == ARRAY_TYPE
21069 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21070 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21071 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21072
21073 #if 0
21074 /* We default the array ordering. Debuggers will probably do the right
21075 things even if DW_AT_ordering is not present. It's not even an issue
21076 until we start to get into multidimensional arrays anyway. If a debugger
21077 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21078 then we'll have to put the DW_AT_ordering attribute back in. (But if
21079 and when we find out that we need to put these in, we will only do so
21080 for multidimensional arrays. */
21081 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21082 #endif
21083
21084 if (TREE_CODE (type) == VECTOR_TYPE)
21085 {
21086 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21087 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21088 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21089 add_bound_info (subrange_die, DW_AT_upper_bound,
21090 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21091 }
21092 else
21093 add_subscript_info (array_die, type, collapse_nested_arrays);
21094
21095 /* Add representation of the type of the elements of this array type and
21096 emit the corresponding DIE if we haven't done it already. */
21097 element_type = TREE_TYPE (type);
21098 if (collapse_nested_arrays)
21099 while (TREE_CODE (element_type) == ARRAY_TYPE)
21100 {
21101 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21102 break;
21103 element_type = TREE_TYPE (element_type);
21104 }
21105
21106 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21107 TREE_CODE (type) == ARRAY_TYPE
21108 && TYPE_REVERSE_STORAGE_ORDER (type),
21109 context_die);
21110
21111 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21112 if (TYPE_ARTIFICIAL (type))
21113 add_AT_flag (array_die, DW_AT_artificial, 1);
21114
21115 if (get_AT (array_die, DW_AT_name))
21116 add_pubtype (type, array_die);
21117
21118 add_alignment_attribute (array_die, type);
21119 }
21120
21121 /* This routine generates DIE for array with hidden descriptor, details
21122 are filled into *info by a langhook. */
21123
21124 static void
21125 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21126 dw_die_ref context_die)
21127 {
21128 const dw_die_ref scope_die = scope_die_for (type, context_die);
21129 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21130 struct loc_descr_context context = { type, info->base_decl, NULL,
21131 false, false };
21132 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21133 int dim;
21134
21135 add_name_attribute (array_die, type_tag (type));
21136 equate_type_number_to_die (type, array_die);
21137
21138 if (info->ndimensions > 1)
21139 switch (info->ordering)
21140 {
21141 case array_descr_ordering_row_major:
21142 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21143 break;
21144 case array_descr_ordering_column_major:
21145 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21146 break;
21147 default:
21148 break;
21149 }
21150
21151 if (dwarf_version >= 3 || !dwarf_strict)
21152 {
21153 if (info->data_location)
21154 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21155 dw_scalar_form_exprloc, &context);
21156 if (info->associated)
21157 add_scalar_info (array_die, DW_AT_associated, info->associated,
21158 dw_scalar_form_constant
21159 | dw_scalar_form_exprloc
21160 | dw_scalar_form_reference, &context);
21161 if (info->allocated)
21162 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21163 dw_scalar_form_constant
21164 | dw_scalar_form_exprloc
21165 | dw_scalar_form_reference, &context);
21166 if (info->stride)
21167 {
21168 const enum dwarf_attribute attr
21169 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21170 const int forms
21171 = (info->stride_in_bits)
21172 ? dw_scalar_form_constant
21173 : (dw_scalar_form_constant
21174 | dw_scalar_form_exprloc
21175 | dw_scalar_form_reference);
21176
21177 add_scalar_info (array_die, attr, info->stride, forms, &context);
21178 }
21179 }
21180 if (dwarf_version >= 5)
21181 {
21182 if (info->rank)
21183 {
21184 add_scalar_info (array_die, DW_AT_rank, info->rank,
21185 dw_scalar_form_constant
21186 | dw_scalar_form_exprloc, &context);
21187 subrange_tag = DW_TAG_generic_subrange;
21188 context.placeholder_arg = true;
21189 }
21190 }
21191
21192 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21193
21194 for (dim = 0; dim < info->ndimensions; dim++)
21195 {
21196 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21197
21198 if (info->dimen[dim].bounds_type)
21199 add_type_attribute (subrange_die,
21200 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21201 false, context_die);
21202 if (info->dimen[dim].lower_bound)
21203 add_bound_info (subrange_die, DW_AT_lower_bound,
21204 info->dimen[dim].lower_bound, &context);
21205 if (info->dimen[dim].upper_bound)
21206 add_bound_info (subrange_die, DW_AT_upper_bound,
21207 info->dimen[dim].upper_bound, &context);
21208 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21209 add_scalar_info (subrange_die, DW_AT_byte_stride,
21210 info->dimen[dim].stride,
21211 dw_scalar_form_constant
21212 | dw_scalar_form_exprloc
21213 | dw_scalar_form_reference,
21214 &context);
21215 }
21216
21217 gen_type_die (info->element_type, context_die);
21218 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21219 TREE_CODE (type) == ARRAY_TYPE
21220 && TYPE_REVERSE_STORAGE_ORDER (type),
21221 context_die);
21222
21223 if (get_AT (array_die, DW_AT_name))
21224 add_pubtype (type, array_die);
21225
21226 add_alignment_attribute (array_die, type);
21227 }
21228
21229 #if 0
21230 static void
21231 gen_entry_point_die (tree decl, dw_die_ref context_die)
21232 {
21233 tree origin = decl_ultimate_origin (decl);
21234 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21235
21236 if (origin != NULL)
21237 add_abstract_origin_attribute (decl_die, origin);
21238 else
21239 {
21240 add_name_and_src_coords_attributes (decl_die, decl);
21241 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21242 TYPE_UNQUALIFIED, false, context_die);
21243 }
21244
21245 if (DECL_ABSTRACT_P (decl))
21246 equate_decl_number_to_die (decl, decl_die);
21247 else
21248 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21249 }
21250 #endif
21251
21252 /* Walk through the list of incomplete types again, trying once more to
21253 emit full debugging info for them. */
21254
21255 static void
21256 retry_incomplete_types (void)
21257 {
21258 set_early_dwarf s;
21259 int i;
21260
21261 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21262 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21263 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21264 vec_safe_truncate (incomplete_types, 0);
21265 }
21266
21267 /* Determine what tag to use for a record type. */
21268
21269 static enum dwarf_tag
21270 record_type_tag (tree type)
21271 {
21272 if (! lang_hooks.types.classify_record)
21273 return DW_TAG_structure_type;
21274
21275 switch (lang_hooks.types.classify_record (type))
21276 {
21277 case RECORD_IS_STRUCT:
21278 return DW_TAG_structure_type;
21279
21280 case RECORD_IS_CLASS:
21281 return DW_TAG_class_type;
21282
21283 case RECORD_IS_INTERFACE:
21284 if (dwarf_version >= 3 || !dwarf_strict)
21285 return DW_TAG_interface_type;
21286 return DW_TAG_structure_type;
21287
21288 default:
21289 gcc_unreachable ();
21290 }
21291 }
21292
21293 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21294 include all of the information about the enumeration values also. Each
21295 enumerated type name/value is listed as a child of the enumerated type
21296 DIE. */
21297
21298 static dw_die_ref
21299 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21300 {
21301 dw_die_ref type_die = lookup_type_die (type);
21302
21303 if (type_die == NULL)
21304 {
21305 type_die = new_die (DW_TAG_enumeration_type,
21306 scope_die_for (type, context_die), type);
21307 equate_type_number_to_die (type, type_die);
21308 add_name_attribute (type_die, type_tag (type));
21309 if (dwarf_version >= 4 || !dwarf_strict)
21310 {
21311 if (ENUM_IS_SCOPED (type))
21312 add_AT_flag (type_die, DW_AT_enum_class, 1);
21313 if (ENUM_IS_OPAQUE (type))
21314 add_AT_flag (type_die, DW_AT_declaration, 1);
21315 }
21316 if (!dwarf_strict)
21317 add_AT_unsigned (type_die, DW_AT_encoding,
21318 TYPE_UNSIGNED (type)
21319 ? DW_ATE_unsigned
21320 : DW_ATE_signed);
21321 }
21322 else if (! TYPE_SIZE (type))
21323 return type_die;
21324 else
21325 remove_AT (type_die, DW_AT_declaration);
21326
21327 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
21328 given enum type is incomplete, do not generate the DW_AT_byte_size
21329 attribute or the DW_AT_element_list attribute. */
21330 if (TYPE_SIZE (type))
21331 {
21332 tree link;
21333
21334 TREE_ASM_WRITTEN (type) = 1;
21335 add_byte_size_attribute (type_die, type);
21336 add_alignment_attribute (type_die, type);
21337 if (dwarf_version >= 3 || !dwarf_strict)
21338 {
21339 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
21340 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
21341 context_die);
21342 }
21343 if (TYPE_STUB_DECL (type) != NULL_TREE)
21344 {
21345 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
21346 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
21347 }
21348
21349 /* If the first reference to this type was as the return type of an
21350 inline function, then it may not have a parent. Fix this now. */
21351 if (type_die->die_parent == NULL)
21352 add_child_die (scope_die_for (type, context_die), type_die);
21353
21354 for (link = TYPE_VALUES (type);
21355 link != NULL; link = TREE_CHAIN (link))
21356 {
21357 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
21358 tree value = TREE_VALUE (link);
21359
21360 add_name_attribute (enum_die,
21361 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
21362
21363 if (TREE_CODE (value) == CONST_DECL)
21364 value = DECL_INITIAL (value);
21365
21366 if (simple_type_size_in_bits (TREE_TYPE (value))
21367 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
21368 {
21369 /* For constant forms created by add_AT_unsigned DWARF
21370 consumers (GDB, elfutils, etc.) always zero extend
21371 the value. Only when the actual value is negative
21372 do we need to use add_AT_int to generate a constant
21373 form that can represent negative values. */
21374 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
21375 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
21376 add_AT_unsigned (enum_die, DW_AT_const_value,
21377 (unsigned HOST_WIDE_INT) val);
21378 else
21379 add_AT_int (enum_die, DW_AT_const_value, val);
21380 }
21381 else
21382 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
21383 that here. TODO: This should be re-worked to use correct
21384 signed/unsigned double tags for all cases. */
21385 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
21386 }
21387
21388 add_gnat_descriptive_type_attribute (type_die, type, context_die);
21389 if (TYPE_ARTIFICIAL (type))
21390 add_AT_flag (type_die, DW_AT_artificial, 1);
21391 }
21392 else
21393 add_AT_flag (type_die, DW_AT_declaration, 1);
21394
21395 add_pubtype (type, type_die);
21396
21397 return type_die;
21398 }
21399
21400 /* Generate a DIE to represent either a real live formal parameter decl or to
21401 represent just the type of some formal parameter position in some function
21402 type.
21403
21404 Note that this routine is a bit unusual because its argument may be a
21405 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
21406 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
21407 node. If it's the former then this function is being called to output a
21408 DIE to represent a formal parameter object (or some inlining thereof). If
21409 it's the latter, then this function is only being called to output a
21410 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
21411 argument type of some subprogram type.
21412 If EMIT_NAME_P is true, name and source coordinate attributes
21413 are emitted. */
21414
21415 static dw_die_ref
21416 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
21417 dw_die_ref context_die)
21418 {
21419 tree node_or_origin = node ? node : origin;
21420 tree ultimate_origin;
21421 dw_die_ref parm_die = NULL;
21422
21423 if (DECL_P (node_or_origin))
21424 {
21425 parm_die = lookup_decl_die (node);
21426
21427 /* If the contexts differ, we may not be talking about the same
21428 thing.
21429 ??? When in LTO the DIE parent is the "abstract" copy and the
21430 context_die is the specification "copy". But this whole block
21431 should eventually be no longer needed. */
21432 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
21433 {
21434 if (!DECL_ABSTRACT_P (node))
21435 {
21436 /* This can happen when creating an inlined instance, in
21437 which case we need to create a new DIE that will get
21438 annotated with DW_AT_abstract_origin. */
21439 parm_die = NULL;
21440 }
21441 else
21442 gcc_unreachable ();
21443 }
21444
21445 if (parm_die && parm_die->die_parent == NULL)
21446 {
21447 /* Check that parm_die already has the right attributes that
21448 we would have added below. If any attributes are
21449 missing, fall through to add them. */
21450 if (! DECL_ABSTRACT_P (node_or_origin)
21451 && !get_AT (parm_die, DW_AT_location)
21452 && !get_AT (parm_die, DW_AT_const_value))
21453 /* We are missing location info, and are about to add it. */
21454 ;
21455 else
21456 {
21457 add_child_die (context_die, parm_die);
21458 return parm_die;
21459 }
21460 }
21461 }
21462
21463 /* If we have a previously generated DIE, use it, unless this is an
21464 concrete instance (origin != NULL), in which case we need a new
21465 DIE with a corresponding DW_AT_abstract_origin. */
21466 bool reusing_die;
21467 if (parm_die && origin == NULL)
21468 reusing_die = true;
21469 else
21470 {
21471 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
21472 reusing_die = false;
21473 }
21474
21475 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
21476 {
21477 case tcc_declaration:
21478 ultimate_origin = decl_ultimate_origin (node_or_origin);
21479 if (node || ultimate_origin)
21480 origin = ultimate_origin;
21481
21482 if (reusing_die)
21483 goto add_location;
21484
21485 if (origin != NULL)
21486 add_abstract_origin_attribute (parm_die, origin);
21487 else if (emit_name_p)
21488 add_name_and_src_coords_attributes (parm_die, node);
21489 if (origin == NULL
21490 || (! DECL_ABSTRACT_P (node_or_origin)
21491 && variably_modified_type_p (TREE_TYPE (node_or_origin),
21492 decl_function_context
21493 (node_or_origin))))
21494 {
21495 tree type = TREE_TYPE (node_or_origin);
21496 if (decl_by_reference_p (node_or_origin))
21497 add_type_attribute (parm_die, TREE_TYPE (type),
21498 TYPE_UNQUALIFIED,
21499 false, context_die);
21500 else
21501 add_type_attribute (parm_die, type,
21502 decl_quals (node_or_origin),
21503 false, context_die);
21504 }
21505 if (origin == NULL && DECL_ARTIFICIAL (node))
21506 add_AT_flag (parm_die, DW_AT_artificial, 1);
21507 add_location:
21508 if (node && node != origin)
21509 equate_decl_number_to_die (node, parm_die);
21510 if (! DECL_ABSTRACT_P (node_or_origin))
21511 add_location_or_const_value_attribute (parm_die, node_or_origin,
21512 node == NULL);
21513
21514 break;
21515
21516 case tcc_type:
21517 /* We were called with some kind of a ..._TYPE node. */
21518 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
21519 context_die);
21520 break;
21521
21522 default:
21523 gcc_unreachable ();
21524 }
21525
21526 return parm_die;
21527 }
21528
21529 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
21530 children DW_TAG_formal_parameter DIEs representing the arguments of the
21531 parameter pack.
21532
21533 PARM_PACK must be a function parameter pack.
21534 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
21535 must point to the subsequent arguments of the function PACK_ARG belongs to.
21536 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
21537 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
21538 following the last one for which a DIE was generated. */
21539
21540 static dw_die_ref
21541 gen_formal_parameter_pack_die (tree parm_pack,
21542 tree pack_arg,
21543 dw_die_ref subr_die,
21544 tree *next_arg)
21545 {
21546 tree arg;
21547 dw_die_ref parm_pack_die;
21548
21549 gcc_assert (parm_pack
21550 && lang_hooks.function_parameter_pack_p (parm_pack)
21551 && subr_die);
21552
21553 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
21554 add_src_coords_attributes (parm_pack_die, parm_pack);
21555
21556 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
21557 {
21558 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
21559 parm_pack))
21560 break;
21561 gen_formal_parameter_die (arg, NULL,
21562 false /* Don't emit name attribute. */,
21563 parm_pack_die);
21564 }
21565 if (next_arg)
21566 *next_arg = arg;
21567 return parm_pack_die;
21568 }
21569
21570 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
21571 at the end of an (ANSI prototyped) formal parameters list. */
21572
21573 static void
21574 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
21575 {
21576 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
21577 }
21578
21579 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
21580 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
21581 parameters as specified in some function type specification (except for
21582 those which appear as part of a function *definition*). */
21583
21584 static void
21585 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
21586 {
21587 tree link;
21588 tree formal_type = NULL;
21589 tree first_parm_type;
21590 tree arg;
21591
21592 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
21593 {
21594 arg = DECL_ARGUMENTS (function_or_method_type);
21595 function_or_method_type = TREE_TYPE (function_or_method_type);
21596 }
21597 else
21598 arg = NULL_TREE;
21599
21600 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
21601
21602 /* Make our first pass over the list of formal parameter types and output a
21603 DW_TAG_formal_parameter DIE for each one. */
21604 for (link = first_parm_type; link; )
21605 {
21606 dw_die_ref parm_die;
21607
21608 formal_type = TREE_VALUE (link);
21609 if (formal_type == void_type_node)
21610 break;
21611
21612 /* Output a (nameless) DIE to represent the formal parameter itself. */
21613 if (!POINTER_BOUNDS_TYPE_P (formal_type))
21614 {
21615 parm_die = gen_formal_parameter_die (formal_type, NULL,
21616 true /* Emit name attribute. */,
21617 context_die);
21618 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
21619 && link == first_parm_type)
21620 {
21621 add_AT_flag (parm_die, DW_AT_artificial, 1);
21622 if (dwarf_version >= 3 || !dwarf_strict)
21623 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
21624 }
21625 else if (arg && DECL_ARTIFICIAL (arg))
21626 add_AT_flag (parm_die, DW_AT_artificial, 1);
21627 }
21628
21629 link = TREE_CHAIN (link);
21630 if (arg)
21631 arg = DECL_CHAIN (arg);
21632 }
21633
21634 /* If this function type has an ellipsis, add a
21635 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
21636 if (formal_type != void_type_node)
21637 gen_unspecified_parameters_die (function_or_method_type, context_die);
21638
21639 /* Make our second (and final) pass over the list of formal parameter types
21640 and output DIEs to represent those types (as necessary). */
21641 for (link = TYPE_ARG_TYPES (function_or_method_type);
21642 link && TREE_VALUE (link);
21643 link = TREE_CHAIN (link))
21644 gen_type_die (TREE_VALUE (link), context_die);
21645 }
21646
21647 /* We want to generate the DIE for TYPE so that we can generate the
21648 die for MEMBER, which has been defined; we will need to refer back
21649 to the member declaration nested within TYPE. If we're trying to
21650 generate minimal debug info for TYPE, processing TYPE won't do the
21651 trick; we need to attach the member declaration by hand. */
21652
21653 static void
21654 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
21655 {
21656 gen_type_die (type, context_die);
21657
21658 /* If we're trying to avoid duplicate debug info, we may not have
21659 emitted the member decl for this function. Emit it now. */
21660 if (TYPE_STUB_DECL (type)
21661 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
21662 && ! lookup_decl_die (member))
21663 {
21664 dw_die_ref type_die;
21665 gcc_assert (!decl_ultimate_origin (member));
21666
21667 push_decl_scope (type);
21668 type_die = lookup_type_die_strip_naming_typedef (type);
21669 if (TREE_CODE (member) == FUNCTION_DECL)
21670 gen_subprogram_die (member, type_die);
21671 else if (TREE_CODE (member) == FIELD_DECL)
21672 {
21673 /* Ignore the nameless fields that are used to skip bits but handle
21674 C++ anonymous unions and structs. */
21675 if (DECL_NAME (member) != NULL_TREE
21676 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
21677 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
21678 {
21679 struct vlr_context vlr_ctx = {
21680 DECL_CONTEXT (member), /* struct_type */
21681 NULL_TREE /* variant_part_offset */
21682 };
21683 gen_type_die (member_declared_type (member), type_die);
21684 gen_field_die (member, &vlr_ctx, type_die);
21685 }
21686 }
21687 else
21688 gen_variable_die (member, NULL_TREE, type_die);
21689
21690 pop_decl_scope ();
21691 }
21692 }
21693 \f
21694 /* Forward declare these functions, because they are mutually recursive
21695 with their set_block_* pairing functions. */
21696 static void set_decl_origin_self (tree);
21697
21698 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
21699 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
21700 that it points to the node itself, thus indicating that the node is its
21701 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
21702 the given node is NULL, recursively descend the decl/block tree which
21703 it is the root of, and for each other ..._DECL or BLOCK node contained
21704 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
21705 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
21706 values to point to themselves. */
21707
21708 static void
21709 set_block_origin_self (tree stmt)
21710 {
21711 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
21712 {
21713 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
21714
21715 {
21716 tree local_decl;
21717
21718 for (local_decl = BLOCK_VARS (stmt);
21719 local_decl != NULL_TREE;
21720 local_decl = DECL_CHAIN (local_decl))
21721 /* Do not recurse on nested functions since the inlining status
21722 of parent and child can be different as per the DWARF spec. */
21723 if (TREE_CODE (local_decl) != FUNCTION_DECL
21724 && !DECL_EXTERNAL (local_decl))
21725 set_decl_origin_self (local_decl);
21726 }
21727
21728 {
21729 tree subblock;
21730
21731 for (subblock = BLOCK_SUBBLOCKS (stmt);
21732 subblock != NULL_TREE;
21733 subblock = BLOCK_CHAIN (subblock))
21734 set_block_origin_self (subblock); /* Recurse. */
21735 }
21736 }
21737 }
21738
21739 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
21740 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
21741 node to so that it points to the node itself, thus indicating that the
21742 node represents its own (abstract) origin. Additionally, if the
21743 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
21744 the decl/block tree of which the given node is the root of, and for
21745 each other ..._DECL or BLOCK node contained therein whose
21746 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
21747 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
21748 point to themselves. */
21749
21750 static void
21751 set_decl_origin_self (tree decl)
21752 {
21753 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
21754 {
21755 DECL_ABSTRACT_ORIGIN (decl) = decl;
21756 if (TREE_CODE (decl) == FUNCTION_DECL)
21757 {
21758 tree arg;
21759
21760 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
21761 DECL_ABSTRACT_ORIGIN (arg) = arg;
21762 if (DECL_INITIAL (decl) != NULL_TREE
21763 && DECL_INITIAL (decl) != error_mark_node)
21764 set_block_origin_self (DECL_INITIAL (decl));
21765 }
21766 }
21767 }
21768 \f
21769 /* Mark the early DIE for DECL as the abstract instance. */
21770
21771 static void
21772 dwarf2out_abstract_function (tree decl)
21773 {
21774 dw_die_ref old_die;
21775
21776 /* Make sure we have the actual abstract inline, not a clone. */
21777 decl = DECL_ORIGIN (decl);
21778
21779 if (DECL_IGNORED_P (decl))
21780 return;
21781
21782 old_die = lookup_decl_die (decl);
21783 /* With early debug we always have an old DIE unless we are in LTO
21784 and the user did not compile but only link with debug. */
21785 if (in_lto_p && ! old_die)
21786 return;
21787 gcc_assert (old_die != NULL);
21788 if (get_AT (old_die, DW_AT_inline)
21789 || get_AT (old_die, DW_AT_abstract_origin))
21790 /* We've already generated the abstract instance. */
21791 return;
21792
21793 /* Go ahead and put DW_AT_inline on the DIE. */
21794 if (DECL_DECLARED_INLINE_P (decl))
21795 {
21796 if (cgraph_function_possibly_inlined_p (decl))
21797 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
21798 else
21799 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
21800 }
21801 else
21802 {
21803 if (cgraph_function_possibly_inlined_p (decl))
21804 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
21805 else
21806 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
21807 }
21808
21809 if (DECL_DECLARED_INLINE_P (decl)
21810 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
21811 add_AT_flag (old_die, DW_AT_artificial, 1);
21812
21813 set_decl_origin_self (decl);
21814 }
21815
21816 /* Helper function of premark_used_types() which gets called through
21817 htab_traverse.
21818
21819 Marks the DIE of a given type in *SLOT as perennial, so it never gets
21820 marked as unused by prune_unused_types. */
21821
21822 bool
21823 premark_used_types_helper (tree const &type, void *)
21824 {
21825 dw_die_ref die;
21826
21827 die = lookup_type_die (type);
21828 if (die != NULL)
21829 die->die_perennial_p = 1;
21830 return true;
21831 }
21832
21833 /* Helper function of premark_types_used_by_global_vars which gets called
21834 through htab_traverse.
21835
21836 Marks the DIE of a given type in *SLOT as perennial, so it never gets
21837 marked as unused by prune_unused_types. The DIE of the type is marked
21838 only if the global variable using the type will actually be emitted. */
21839
21840 int
21841 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
21842 void *)
21843 {
21844 struct types_used_by_vars_entry *entry;
21845 dw_die_ref die;
21846
21847 entry = (struct types_used_by_vars_entry *) *slot;
21848 gcc_assert (entry->type != NULL
21849 && entry->var_decl != NULL);
21850 die = lookup_type_die (entry->type);
21851 if (die)
21852 {
21853 /* Ask cgraph if the global variable really is to be emitted.
21854 If yes, then we'll keep the DIE of ENTRY->TYPE. */
21855 varpool_node *node = varpool_node::get (entry->var_decl);
21856 if (node && node->definition)
21857 {
21858 die->die_perennial_p = 1;
21859 /* Keep the parent DIEs as well. */
21860 while ((die = die->die_parent) && die->die_perennial_p == 0)
21861 die->die_perennial_p = 1;
21862 }
21863 }
21864 return 1;
21865 }
21866
21867 /* Mark all members of used_types_hash as perennial. */
21868
21869 static void
21870 premark_used_types (struct function *fun)
21871 {
21872 if (fun && fun->used_types_hash)
21873 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
21874 }
21875
21876 /* Mark all members of types_used_by_vars_entry as perennial. */
21877
21878 static void
21879 premark_types_used_by_global_vars (void)
21880 {
21881 if (types_used_by_vars_hash)
21882 types_used_by_vars_hash
21883 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
21884 }
21885
21886 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
21887 for CA_LOC call arg loc node. */
21888
21889 static dw_die_ref
21890 gen_call_site_die (tree decl, dw_die_ref subr_die,
21891 struct call_arg_loc_node *ca_loc)
21892 {
21893 dw_die_ref stmt_die = NULL, die;
21894 tree block = ca_loc->block;
21895
21896 while (block
21897 && block != DECL_INITIAL (decl)
21898 && TREE_CODE (block) == BLOCK)
21899 {
21900 stmt_die = BLOCK_DIE (block);
21901 if (stmt_die)
21902 break;
21903 block = BLOCK_SUPERCONTEXT (block);
21904 }
21905 if (stmt_die == NULL)
21906 stmt_die = subr_die;
21907 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
21908 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
21909 if (ca_loc->tail_call_p)
21910 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
21911 if (ca_loc->symbol_ref)
21912 {
21913 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
21914 if (tdie)
21915 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
21916 else
21917 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
21918 false);
21919 }
21920 return die;
21921 }
21922
21923 /* Generate a DIE to represent a declared function (either file-scope or
21924 block-local). */
21925
21926 static void
21927 gen_subprogram_die (tree decl, dw_die_ref context_die)
21928 {
21929 tree origin = decl_ultimate_origin (decl);
21930 dw_die_ref subr_die;
21931 dw_die_ref old_die = lookup_decl_die (decl);
21932
21933 /* This function gets called multiple times for different stages of
21934 the debug process. For example, for func() in this code:
21935
21936 namespace S
21937 {
21938 void func() { ... }
21939 }
21940
21941 ...we get called 4 times. Twice in early debug and twice in
21942 late debug:
21943
21944 Early debug
21945 -----------
21946
21947 1. Once while generating func() within the namespace. This is
21948 the declaration. The declaration bit below is set, as the
21949 context is the namespace.
21950
21951 A new DIE will be generated with DW_AT_declaration set.
21952
21953 2. Once for func() itself. This is the specification. The
21954 declaration bit below is clear as the context is the CU.
21955
21956 We will use the cached DIE from (1) to create a new DIE with
21957 DW_AT_specification pointing to the declaration in (1).
21958
21959 Late debug via rest_of_handle_final()
21960 -------------------------------------
21961
21962 3. Once generating func() within the namespace. This is also the
21963 declaration, as in (1), but this time we will early exit below
21964 as we have a cached DIE and a declaration needs no additional
21965 annotations (no locations), as the source declaration line
21966 info is enough.
21967
21968 4. Once for func() itself. As in (2), this is the specification,
21969 but this time we will re-use the cached DIE, and just annotate
21970 it with the location information that should now be available.
21971
21972 For something without namespaces, but with abstract instances, we
21973 are also called a multiple times:
21974
21975 class Base
21976 {
21977 public:
21978 Base (); // constructor declaration (1)
21979 };
21980
21981 Base::Base () { } // constructor specification (2)
21982
21983 Early debug
21984 -----------
21985
21986 1. Once for the Base() constructor by virtue of it being a
21987 member of the Base class. This is done via
21988 rest_of_type_compilation.
21989
21990 This is a declaration, so a new DIE will be created with
21991 DW_AT_declaration.
21992
21993 2. Once for the Base() constructor definition, but this time
21994 while generating the abstract instance of the base
21995 constructor (__base_ctor) which is being generated via early
21996 debug of reachable functions.
21997
21998 Even though we have a cached version of the declaration (1),
21999 we will create a DW_AT_specification of the declaration DIE
22000 in (1).
22001
22002 3. Once for the __base_ctor itself, but this time, we generate
22003 an DW_AT_abstract_origin version of the DW_AT_specification in
22004 (2).
22005
22006 Late debug via rest_of_handle_final
22007 -----------------------------------
22008
22009 4. One final time for the __base_ctor (which will have a cached
22010 DIE with DW_AT_abstract_origin created in (3). This time,
22011 we will just annotate the location information now
22012 available.
22013 */
22014 int declaration = (current_function_decl != decl
22015 || class_or_namespace_scope_p (context_die));
22016
22017 /* Now that the C++ front end lazily declares artificial member fns, we
22018 might need to retrofit the declaration into its class. */
22019 if (!declaration && !origin && !old_die
22020 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22021 && !class_or_namespace_scope_p (context_die)
22022 && debug_info_level > DINFO_LEVEL_TERSE)
22023 old_die = force_decl_die (decl);
22024
22025 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22026 if (origin != NULL)
22027 {
22028 gcc_assert (!declaration || local_scope_p (context_die));
22029
22030 /* Fixup die_parent for the abstract instance of a nested
22031 inline function. */
22032 if (old_die && old_die->die_parent == NULL)
22033 add_child_die (context_die, old_die);
22034
22035 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22036 {
22037 /* If we have a DW_AT_abstract_origin we have a working
22038 cached version. */
22039 subr_die = old_die;
22040 }
22041 else
22042 {
22043 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22044 add_abstract_origin_attribute (subr_die, origin);
22045 /* This is where the actual code for a cloned function is.
22046 Let's emit linkage name attribute for it. This helps
22047 debuggers to e.g, set breakpoints into
22048 constructors/destructors when the user asks "break
22049 K::K". */
22050 add_linkage_name (subr_die, decl);
22051 }
22052 }
22053 /* A cached copy, possibly from early dwarf generation. Reuse as
22054 much as possible. */
22055 else if (old_die)
22056 {
22057 /* A declaration that has been previously dumped needs no
22058 additional information. */
22059 if (declaration)
22060 return;
22061
22062 if (!get_AT_flag (old_die, DW_AT_declaration)
22063 /* We can have a normal definition following an inline one in the
22064 case of redefinition of GNU C extern inlines.
22065 It seems reasonable to use AT_specification in this case. */
22066 && !get_AT (old_die, DW_AT_inline))
22067 {
22068 /* Detect and ignore this case, where we are trying to output
22069 something we have already output. */
22070 if (get_AT (old_die, DW_AT_low_pc)
22071 || get_AT (old_die, DW_AT_ranges))
22072 return;
22073
22074 /* If we have no location information, this must be a
22075 partially generated DIE from early dwarf generation.
22076 Fall through and generate it. */
22077 }
22078
22079 /* If the definition comes from the same place as the declaration,
22080 maybe use the old DIE. We always want the DIE for this function
22081 that has the *_pc attributes to be under comp_unit_die so the
22082 debugger can find it. We also need to do this for abstract
22083 instances of inlines, since the spec requires the out-of-line copy
22084 to have the same parent. For local class methods, this doesn't
22085 apply; we just use the old DIE. */
22086 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22087 struct dwarf_file_data * file_index = lookup_filename (s.file);
22088 if ((is_cu_die (old_die->die_parent)
22089 /* This condition fixes the inconsistency/ICE with the
22090 following Fortran test (or some derivative thereof) while
22091 building libgfortran:
22092
22093 module some_m
22094 contains
22095 logical function funky (FLAG)
22096 funky = .true.
22097 end function
22098 end module
22099 */
22100 || (old_die->die_parent
22101 && old_die->die_parent->die_tag == DW_TAG_module)
22102 || context_die == NULL)
22103 && (DECL_ARTIFICIAL (decl)
22104 /* The location attributes may be in the abstract origin
22105 which in the case of LTO might be not available to
22106 look at. */
22107 || get_AT (old_die, DW_AT_abstract_origin)
22108 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22109 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22110 == (unsigned) s.line)
22111 && (!debug_column_info
22112 || s.column == 0
22113 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22114 == (unsigned) s.column)))))
22115 {
22116 subr_die = old_die;
22117
22118 /* Clear out the declaration attribute, but leave the
22119 parameters so they can be augmented with location
22120 information later. Unless this was a declaration, in
22121 which case, wipe out the nameless parameters and recreate
22122 them further down. */
22123 if (remove_AT (subr_die, DW_AT_declaration))
22124 {
22125
22126 remove_AT (subr_die, DW_AT_object_pointer);
22127 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22128 }
22129 }
22130 /* Make a specification pointing to the previously built
22131 declaration. */
22132 else
22133 {
22134 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22135 add_AT_specification (subr_die, old_die);
22136 add_pubname (decl, subr_die);
22137 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22138 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22139 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22140 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22141 if (debug_column_info
22142 && s.column
22143 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22144 != (unsigned) s.column))
22145 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22146
22147 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22148 emit the real type on the definition die. */
22149 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22150 {
22151 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22152 if (die == auto_die || die == decltype_auto_die)
22153 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22154 TYPE_UNQUALIFIED, false, context_die);
22155 }
22156
22157 /* When we process the method declaration, we haven't seen
22158 the out-of-class defaulted definition yet, so we have to
22159 recheck now. */
22160 if ((dwarf_version >= 5 || ! dwarf_strict)
22161 && !get_AT (subr_die, DW_AT_defaulted))
22162 {
22163 int defaulted
22164 = lang_hooks.decls.decl_dwarf_attribute (decl,
22165 DW_AT_defaulted);
22166 if (defaulted != -1)
22167 {
22168 /* Other values must have been handled before. */
22169 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22170 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22171 }
22172 }
22173 }
22174 }
22175 /* Create a fresh DIE for anything else. */
22176 else
22177 {
22178 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22179
22180 if (TREE_PUBLIC (decl))
22181 add_AT_flag (subr_die, DW_AT_external, 1);
22182
22183 add_name_and_src_coords_attributes (subr_die, decl);
22184 add_pubname (decl, subr_die);
22185 if (debug_info_level > DINFO_LEVEL_TERSE)
22186 {
22187 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22188 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22189 TYPE_UNQUALIFIED, false, context_die);
22190 }
22191
22192 add_pure_or_virtual_attribute (subr_die, decl);
22193 if (DECL_ARTIFICIAL (decl))
22194 add_AT_flag (subr_die, DW_AT_artificial, 1);
22195
22196 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22197 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22198
22199 add_alignment_attribute (subr_die, decl);
22200
22201 add_accessibility_attribute (subr_die, decl);
22202 }
22203
22204 /* Unless we have an existing non-declaration DIE, equate the new
22205 DIE. */
22206 if (!old_die || is_declaration_die (old_die))
22207 equate_decl_number_to_die (decl, subr_die);
22208
22209 if (declaration)
22210 {
22211 if (!old_die || !get_AT (old_die, DW_AT_inline))
22212 {
22213 add_AT_flag (subr_die, DW_AT_declaration, 1);
22214
22215 /* If this is an explicit function declaration then generate
22216 a DW_AT_explicit attribute. */
22217 if ((dwarf_version >= 3 || !dwarf_strict)
22218 && lang_hooks.decls.decl_dwarf_attribute (decl,
22219 DW_AT_explicit) == 1)
22220 add_AT_flag (subr_die, DW_AT_explicit, 1);
22221
22222 /* If this is a C++11 deleted special function member then generate
22223 a DW_AT_deleted attribute. */
22224 if ((dwarf_version >= 5 || !dwarf_strict)
22225 && lang_hooks.decls.decl_dwarf_attribute (decl,
22226 DW_AT_deleted) == 1)
22227 add_AT_flag (subr_die, DW_AT_deleted, 1);
22228
22229 /* If this is a C++11 defaulted special function member then
22230 generate a DW_AT_defaulted attribute. */
22231 if (dwarf_version >= 5 || !dwarf_strict)
22232 {
22233 int defaulted
22234 = lang_hooks.decls.decl_dwarf_attribute (decl,
22235 DW_AT_defaulted);
22236 if (defaulted != -1)
22237 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22238 }
22239
22240 /* If this is a C++11 non-static member function with & ref-qualifier
22241 then generate a DW_AT_reference attribute. */
22242 if ((dwarf_version >= 5 || !dwarf_strict)
22243 && lang_hooks.decls.decl_dwarf_attribute (decl,
22244 DW_AT_reference) == 1)
22245 add_AT_flag (subr_die, DW_AT_reference, 1);
22246
22247 /* If this is a C++11 non-static member function with &&
22248 ref-qualifier then generate a DW_AT_reference attribute. */
22249 if ((dwarf_version >= 5 || !dwarf_strict)
22250 && lang_hooks.decls.decl_dwarf_attribute (decl,
22251 DW_AT_rvalue_reference)
22252 == 1)
22253 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22254 }
22255 }
22256 /* For non DECL_EXTERNALs, if range information is available, fill
22257 the DIE with it. */
22258 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22259 {
22260 HOST_WIDE_INT cfa_fb_offset;
22261
22262 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22263
22264 if (!crtl->has_bb_partition)
22265 {
22266 dw_fde_ref fde = fun->fde;
22267 if (fde->dw_fde_begin)
22268 {
22269 /* We have already generated the labels. */
22270 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22271 fde->dw_fde_end, false);
22272 }
22273 else
22274 {
22275 /* Create start/end labels and add the range. */
22276 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22277 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22278 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22279 current_function_funcdef_no);
22280 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22281 current_function_funcdef_no);
22282 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22283 false);
22284 }
22285
22286 #if VMS_DEBUGGING_INFO
22287 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22288 Section 2.3 Prologue and Epilogue Attributes:
22289 When a breakpoint is set on entry to a function, it is generally
22290 desirable for execution to be suspended, not on the very first
22291 instruction of the function, but rather at a point after the
22292 function's frame has been set up, after any language defined local
22293 declaration processing has been completed, and before execution of
22294 the first statement of the function begins. Debuggers generally
22295 cannot properly determine where this point is. Similarly for a
22296 breakpoint set on exit from a function. The prologue and epilogue
22297 attributes allow a compiler to communicate the location(s) to use. */
22298
22299 {
22300 if (fde->dw_fde_vms_end_prologue)
22301 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22302 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22303
22304 if (fde->dw_fde_vms_begin_epilogue)
22305 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22306 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22307 }
22308 #endif
22309
22310 }
22311 else
22312 {
22313 /* Generate pubnames entries for the split function code ranges. */
22314 dw_fde_ref fde = fun->fde;
22315
22316 if (fde->dw_fde_second_begin)
22317 {
22318 if (dwarf_version >= 3 || !dwarf_strict)
22319 {
22320 /* We should use ranges for non-contiguous code section
22321 addresses. Use the actual code range for the initial
22322 section, since the HOT/COLD labels might precede an
22323 alignment offset. */
22324 bool range_list_added = false;
22325 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
22326 fde->dw_fde_end, &range_list_added,
22327 false);
22328 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
22329 fde->dw_fde_second_end,
22330 &range_list_added, false);
22331 if (range_list_added)
22332 add_ranges (NULL);
22333 }
22334 else
22335 {
22336 /* There is no real support in DW2 for this .. so we make
22337 a work-around. First, emit the pub name for the segment
22338 containing the function label. Then make and emit a
22339 simplified subprogram DIE for the second segment with the
22340 name pre-fixed by __hot/cold_sect_of_. We use the same
22341 linkage name for the second die so that gdb will find both
22342 sections when given "b foo". */
22343 const char *name = NULL;
22344 tree decl_name = DECL_NAME (decl);
22345 dw_die_ref seg_die;
22346
22347 /* Do the 'primary' section. */
22348 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22349 fde->dw_fde_end, false);
22350
22351 /* Build a minimal DIE for the secondary section. */
22352 seg_die = new_die (DW_TAG_subprogram,
22353 subr_die->die_parent, decl);
22354
22355 if (TREE_PUBLIC (decl))
22356 add_AT_flag (seg_die, DW_AT_external, 1);
22357
22358 if (decl_name != NULL
22359 && IDENTIFIER_POINTER (decl_name) != NULL)
22360 {
22361 name = dwarf2_name (decl, 1);
22362 if (! DECL_ARTIFICIAL (decl))
22363 add_src_coords_attributes (seg_die, decl);
22364
22365 add_linkage_name (seg_die, decl);
22366 }
22367 gcc_assert (name != NULL);
22368 add_pure_or_virtual_attribute (seg_die, decl);
22369 if (DECL_ARTIFICIAL (decl))
22370 add_AT_flag (seg_die, DW_AT_artificial, 1);
22371
22372 name = concat ("__second_sect_of_", name, NULL);
22373 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
22374 fde->dw_fde_second_end, false);
22375 add_name_attribute (seg_die, name);
22376 if (want_pubnames ())
22377 add_pubname_string (name, seg_die);
22378 }
22379 }
22380 else
22381 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
22382 false);
22383 }
22384
22385 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
22386
22387 /* We define the "frame base" as the function's CFA. This is more
22388 convenient for several reasons: (1) It's stable across the prologue
22389 and epilogue, which makes it better than just a frame pointer,
22390 (2) With dwarf3, there exists a one-byte encoding that allows us
22391 to reference the .debug_frame data by proxy, but failing that,
22392 (3) We can at least reuse the code inspection and interpretation
22393 code that determines the CFA position at various points in the
22394 function. */
22395 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
22396 {
22397 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
22398 add_AT_loc (subr_die, DW_AT_frame_base, op);
22399 }
22400 else
22401 {
22402 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
22403 if (list->dw_loc_next)
22404 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
22405 else
22406 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
22407 }
22408
22409 /* Compute a displacement from the "steady-state frame pointer" to
22410 the CFA. The former is what all stack slots and argument slots
22411 will reference in the rtl; the latter is what we've told the
22412 debugger about. We'll need to adjust all frame_base references
22413 by this displacement. */
22414 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
22415
22416 if (fun->static_chain_decl)
22417 {
22418 /* DWARF requires here a location expression that computes the
22419 address of the enclosing subprogram's frame base. The machinery
22420 in tree-nested.c is supposed to store this specific address in the
22421 last field of the FRAME record. */
22422 const tree frame_type
22423 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
22424 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
22425
22426 tree fb_expr
22427 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
22428 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
22429 fb_expr, fb_decl, NULL_TREE);
22430
22431 add_AT_location_description (subr_die, DW_AT_static_link,
22432 loc_list_from_tree (fb_expr, 0, NULL));
22433 }
22434
22435 resolve_variable_values ();
22436 }
22437
22438 /* Generate child dies for template paramaters. */
22439 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
22440 gen_generic_params_dies (decl);
22441
22442 /* Now output descriptions of the arguments for this function. This gets
22443 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
22444 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
22445 `...' at the end of the formal parameter list. In order to find out if
22446 there was a trailing ellipsis or not, we must instead look at the type
22447 associated with the FUNCTION_DECL. This will be a node of type
22448 FUNCTION_TYPE. If the chain of type nodes hanging off of this
22449 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
22450 an ellipsis at the end. */
22451
22452 /* In the case where we are describing a mere function declaration, all we
22453 need to do here (and all we *can* do here) is to describe the *types* of
22454 its formal parameters. */
22455 if (debug_info_level <= DINFO_LEVEL_TERSE)
22456 ;
22457 else if (declaration)
22458 gen_formal_types_die (decl, subr_die);
22459 else
22460 {
22461 /* Generate DIEs to represent all known formal parameters. */
22462 tree parm = DECL_ARGUMENTS (decl);
22463 tree generic_decl = early_dwarf
22464 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
22465 tree generic_decl_parm = generic_decl
22466 ? DECL_ARGUMENTS (generic_decl)
22467 : NULL;
22468
22469 /* Now we want to walk the list of parameters of the function and
22470 emit their relevant DIEs.
22471
22472 We consider the case of DECL being an instance of a generic function
22473 as well as it being a normal function.
22474
22475 If DECL is an instance of a generic function we walk the
22476 parameters of the generic function declaration _and_ the parameters of
22477 DECL itself. This is useful because we want to emit specific DIEs for
22478 function parameter packs and those are declared as part of the
22479 generic function declaration. In that particular case,
22480 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
22481 That DIE has children DIEs representing the set of arguments
22482 of the pack. Note that the set of pack arguments can be empty.
22483 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
22484 children DIE.
22485
22486 Otherwise, we just consider the parameters of DECL. */
22487 while (generic_decl_parm || parm)
22488 {
22489 if (generic_decl_parm
22490 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
22491 gen_formal_parameter_pack_die (generic_decl_parm,
22492 parm, subr_die,
22493 &parm);
22494 else if (parm && !POINTER_BOUNDS_P (parm))
22495 {
22496 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
22497
22498 if (early_dwarf
22499 && parm == DECL_ARGUMENTS (decl)
22500 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
22501 && parm_die
22502 && (dwarf_version >= 3 || !dwarf_strict))
22503 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
22504
22505 parm = DECL_CHAIN (parm);
22506 }
22507 else if (parm)
22508 parm = DECL_CHAIN (parm);
22509
22510 if (generic_decl_parm)
22511 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
22512 }
22513
22514 /* Decide whether we need an unspecified_parameters DIE at the end.
22515 There are 2 more cases to do this for: 1) the ansi ... declaration -
22516 this is detectable when the end of the arg list is not a
22517 void_type_node 2) an unprototyped function declaration (not a
22518 definition). This just means that we have no info about the
22519 parameters at all. */
22520 if (early_dwarf)
22521 {
22522 if (prototype_p (TREE_TYPE (decl)))
22523 {
22524 /* This is the prototyped case, check for.... */
22525 if (stdarg_p (TREE_TYPE (decl)))
22526 gen_unspecified_parameters_die (decl, subr_die);
22527 }
22528 else if (DECL_INITIAL (decl) == NULL_TREE)
22529 gen_unspecified_parameters_die (decl, subr_die);
22530 }
22531 }
22532
22533 if (subr_die != old_die)
22534 /* Add the calling convention attribute if requested. */
22535 add_calling_convention_attribute (subr_die, decl);
22536
22537 /* Output Dwarf info for all of the stuff within the body of the function
22538 (if it has one - it may be just a declaration).
22539
22540 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
22541 a function. This BLOCK actually represents the outermost binding contour
22542 for the function, i.e. the contour in which the function's formal
22543 parameters and labels get declared. Curiously, it appears that the front
22544 end doesn't actually put the PARM_DECL nodes for the current function onto
22545 the BLOCK_VARS list for this outer scope, but are strung off of the
22546 DECL_ARGUMENTS list for the function instead.
22547
22548 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
22549 the LABEL_DECL nodes for the function however, and we output DWARF info
22550 for those in decls_for_scope. Just within the `outer_scope' there will be
22551 a BLOCK node representing the function's outermost pair of curly braces,
22552 and any blocks used for the base and member initializers of a C++
22553 constructor function. */
22554 tree outer_scope = DECL_INITIAL (decl);
22555 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
22556 {
22557 int call_site_note_count = 0;
22558 int tail_call_site_note_count = 0;
22559
22560 /* Emit a DW_TAG_variable DIE for a named return value. */
22561 if (DECL_NAME (DECL_RESULT (decl)))
22562 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
22563
22564 /* The first time through decls_for_scope we will generate the
22565 DIEs for the locals. The second time, we fill in the
22566 location info. */
22567 decls_for_scope (outer_scope, subr_die);
22568
22569 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
22570 {
22571 struct call_arg_loc_node *ca_loc;
22572 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
22573 {
22574 dw_die_ref die = NULL;
22575 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
22576 rtx arg, next_arg;
22577
22578 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
22579 ? NOTE_VAR_LOCATION (ca_loc->call_arg_loc_note)
22580 : NULL_RTX);
22581 arg; arg = next_arg)
22582 {
22583 dw_loc_descr_ref reg, val;
22584 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
22585 dw_die_ref cdie, tdie = NULL;
22586
22587 next_arg = XEXP (arg, 1);
22588 if (REG_P (XEXP (XEXP (arg, 0), 0))
22589 && next_arg
22590 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
22591 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
22592 && REGNO (XEXP (XEXP (arg, 0), 0))
22593 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
22594 next_arg = XEXP (next_arg, 1);
22595 if (mode == VOIDmode)
22596 {
22597 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
22598 if (mode == VOIDmode)
22599 mode = GET_MODE (XEXP (arg, 0));
22600 }
22601 if (mode == VOIDmode || mode == BLKmode)
22602 continue;
22603 /* Get dynamic information about call target only if we
22604 have no static information: we cannot generate both
22605 DW_AT_call_origin and DW_AT_call_target
22606 attributes. */
22607 if (ca_loc->symbol_ref == NULL_RTX)
22608 {
22609 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
22610 {
22611 tloc = XEXP (XEXP (arg, 0), 1);
22612 continue;
22613 }
22614 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
22615 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
22616 {
22617 tlocc = XEXP (XEXP (arg, 0), 1);
22618 continue;
22619 }
22620 }
22621 reg = NULL;
22622 if (REG_P (XEXP (XEXP (arg, 0), 0)))
22623 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
22624 VAR_INIT_STATUS_INITIALIZED);
22625 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
22626 {
22627 rtx mem = XEXP (XEXP (arg, 0), 0);
22628 reg = mem_loc_descriptor (XEXP (mem, 0),
22629 get_address_mode (mem),
22630 GET_MODE (mem),
22631 VAR_INIT_STATUS_INITIALIZED);
22632 }
22633 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
22634 == DEBUG_PARAMETER_REF)
22635 {
22636 tree tdecl
22637 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
22638 tdie = lookup_decl_die (tdecl);
22639 if (tdie == NULL)
22640 continue;
22641 }
22642 else
22643 continue;
22644 if (reg == NULL
22645 && GET_CODE (XEXP (XEXP (arg, 0), 0))
22646 != DEBUG_PARAMETER_REF)
22647 continue;
22648 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
22649 VOIDmode,
22650 VAR_INIT_STATUS_INITIALIZED);
22651 if (val == NULL)
22652 continue;
22653 if (die == NULL)
22654 die = gen_call_site_die (decl, subr_die, ca_loc);
22655 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
22656 NULL_TREE);
22657 if (reg != NULL)
22658 add_AT_loc (cdie, DW_AT_location, reg);
22659 else if (tdie != NULL)
22660 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
22661 tdie);
22662 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
22663 if (next_arg != XEXP (arg, 1))
22664 {
22665 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
22666 if (mode == VOIDmode)
22667 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
22668 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
22669 0), 1),
22670 mode, VOIDmode,
22671 VAR_INIT_STATUS_INITIALIZED);
22672 if (val != NULL)
22673 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
22674 val);
22675 }
22676 }
22677 if (die == NULL
22678 && (ca_loc->symbol_ref || tloc))
22679 die = gen_call_site_die (decl, subr_die, ca_loc);
22680 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
22681 {
22682 dw_loc_descr_ref tval = NULL;
22683
22684 if (tloc != NULL_RTX)
22685 tval = mem_loc_descriptor (tloc,
22686 GET_MODE (tloc) == VOIDmode
22687 ? Pmode : GET_MODE (tloc),
22688 VOIDmode,
22689 VAR_INIT_STATUS_INITIALIZED);
22690 if (tval)
22691 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
22692 else if (tlocc != NULL_RTX)
22693 {
22694 tval = mem_loc_descriptor (tlocc,
22695 GET_MODE (tlocc) == VOIDmode
22696 ? Pmode : GET_MODE (tlocc),
22697 VOIDmode,
22698 VAR_INIT_STATUS_INITIALIZED);
22699 if (tval)
22700 add_AT_loc (die,
22701 dwarf_AT (DW_AT_call_target_clobbered),
22702 tval);
22703 }
22704 }
22705 if (die != NULL)
22706 {
22707 call_site_note_count++;
22708 if (ca_loc->tail_call_p)
22709 tail_call_site_note_count++;
22710 }
22711 }
22712 }
22713 call_arg_locations = NULL;
22714 call_arg_loc_last = NULL;
22715 if (tail_call_site_count >= 0
22716 && tail_call_site_count == tail_call_site_note_count
22717 && (!dwarf_strict || dwarf_version >= 5))
22718 {
22719 if (call_site_count >= 0
22720 && call_site_count == call_site_note_count)
22721 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
22722 else
22723 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
22724 }
22725 call_site_count = -1;
22726 tail_call_site_count = -1;
22727 }
22728
22729 /* Mark used types after we have created DIEs for the functions scopes. */
22730 premark_used_types (DECL_STRUCT_FUNCTION (decl));
22731 }
22732
22733 /* Returns a hash value for X (which really is a die_struct). */
22734
22735 hashval_t
22736 block_die_hasher::hash (die_struct *d)
22737 {
22738 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
22739 }
22740
22741 /* Return nonzero if decl_id and die_parent of die_struct X is the same
22742 as decl_id and die_parent of die_struct Y. */
22743
22744 bool
22745 block_die_hasher::equal (die_struct *x, die_struct *y)
22746 {
22747 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
22748 }
22749
22750 /* Return TRUE if DECL, which may have been previously generated as
22751 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
22752 true if decl (or its origin) is either an extern declaration or a
22753 class/namespace scoped declaration.
22754
22755 The declare_in_namespace support causes us to get two DIEs for one
22756 variable, both of which are declarations. We want to avoid
22757 considering one to be a specification, so we must test for
22758 DECLARATION and DW_AT_declaration. */
22759 static inline bool
22760 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
22761 {
22762 return (old_die && TREE_STATIC (decl) && !declaration
22763 && get_AT_flag (old_die, DW_AT_declaration) == 1);
22764 }
22765
22766 /* Return true if DECL is a local static. */
22767
22768 static inline bool
22769 local_function_static (tree decl)
22770 {
22771 gcc_assert (VAR_P (decl));
22772 return TREE_STATIC (decl)
22773 && DECL_CONTEXT (decl)
22774 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
22775 }
22776
22777 /* Generate a DIE to represent a declared data object.
22778 Either DECL or ORIGIN must be non-null. */
22779
22780 static void
22781 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
22782 {
22783 HOST_WIDE_INT off = 0;
22784 tree com_decl;
22785 tree decl_or_origin = decl ? decl : origin;
22786 tree ultimate_origin;
22787 dw_die_ref var_die;
22788 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
22789 bool declaration = (DECL_EXTERNAL (decl_or_origin)
22790 || class_or_namespace_scope_p (context_die));
22791 bool specialization_p = false;
22792 bool no_linkage_name = false;
22793
22794 /* While C++ inline static data members have definitions inside of the
22795 class, force the first DIE to be a declaration, then let gen_member_die
22796 reparent it to the class context and call gen_variable_die again
22797 to create the outside of the class DIE for the definition. */
22798 if (!declaration
22799 && old_die == NULL
22800 && decl
22801 && DECL_CONTEXT (decl)
22802 && TYPE_P (DECL_CONTEXT (decl))
22803 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
22804 {
22805 declaration = true;
22806 if (dwarf_version < 5)
22807 no_linkage_name = true;
22808 }
22809
22810 ultimate_origin = decl_ultimate_origin (decl_or_origin);
22811 if (decl || ultimate_origin)
22812 origin = ultimate_origin;
22813 com_decl = fortran_common (decl_or_origin, &off);
22814
22815 /* Symbol in common gets emitted as a child of the common block, in the form
22816 of a data member. */
22817 if (com_decl)
22818 {
22819 dw_die_ref com_die;
22820 dw_loc_list_ref loc = NULL;
22821 die_node com_die_arg;
22822
22823 var_die = lookup_decl_die (decl_or_origin);
22824 if (var_die)
22825 {
22826 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
22827 {
22828 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
22829 if (loc)
22830 {
22831 if (off)
22832 {
22833 /* Optimize the common case. */
22834 if (single_element_loc_list_p (loc)
22835 && loc->expr->dw_loc_opc == DW_OP_addr
22836 && loc->expr->dw_loc_next == NULL
22837 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
22838 == SYMBOL_REF)
22839 {
22840 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
22841 loc->expr->dw_loc_oprnd1.v.val_addr
22842 = plus_constant (GET_MODE (x), x , off);
22843 }
22844 else
22845 loc_list_plus_const (loc, off);
22846 }
22847 add_AT_location_description (var_die, DW_AT_location, loc);
22848 remove_AT (var_die, DW_AT_declaration);
22849 }
22850 }
22851 return;
22852 }
22853
22854 if (common_block_die_table == NULL)
22855 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
22856
22857 com_die_arg.decl_id = DECL_UID (com_decl);
22858 com_die_arg.die_parent = context_die;
22859 com_die = common_block_die_table->find (&com_die_arg);
22860 if (! early_dwarf)
22861 loc = loc_list_from_tree (com_decl, 2, NULL);
22862 if (com_die == NULL)
22863 {
22864 const char *cnam
22865 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
22866 die_node **slot;
22867
22868 com_die = new_die (DW_TAG_common_block, context_die, decl);
22869 add_name_and_src_coords_attributes (com_die, com_decl);
22870 if (loc)
22871 {
22872 add_AT_location_description (com_die, DW_AT_location, loc);
22873 /* Avoid sharing the same loc descriptor between
22874 DW_TAG_common_block and DW_TAG_variable. */
22875 loc = loc_list_from_tree (com_decl, 2, NULL);
22876 }
22877 else if (DECL_EXTERNAL (decl_or_origin))
22878 add_AT_flag (com_die, DW_AT_declaration, 1);
22879 if (want_pubnames ())
22880 add_pubname_string (cnam, com_die); /* ??? needed? */
22881 com_die->decl_id = DECL_UID (com_decl);
22882 slot = common_block_die_table->find_slot (com_die, INSERT);
22883 *slot = com_die;
22884 }
22885 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
22886 {
22887 add_AT_location_description (com_die, DW_AT_location, loc);
22888 loc = loc_list_from_tree (com_decl, 2, NULL);
22889 remove_AT (com_die, DW_AT_declaration);
22890 }
22891 var_die = new_die (DW_TAG_variable, com_die, decl);
22892 add_name_and_src_coords_attributes (var_die, decl_or_origin);
22893 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
22894 decl_quals (decl_or_origin), false,
22895 context_die);
22896 add_alignment_attribute (var_die, decl);
22897 add_AT_flag (var_die, DW_AT_external, 1);
22898 if (loc)
22899 {
22900 if (off)
22901 {
22902 /* Optimize the common case. */
22903 if (single_element_loc_list_p (loc)
22904 && loc->expr->dw_loc_opc == DW_OP_addr
22905 && loc->expr->dw_loc_next == NULL
22906 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
22907 {
22908 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
22909 loc->expr->dw_loc_oprnd1.v.val_addr
22910 = plus_constant (GET_MODE (x), x, off);
22911 }
22912 else
22913 loc_list_plus_const (loc, off);
22914 }
22915 add_AT_location_description (var_die, DW_AT_location, loc);
22916 }
22917 else if (DECL_EXTERNAL (decl_or_origin))
22918 add_AT_flag (var_die, DW_AT_declaration, 1);
22919 if (decl)
22920 equate_decl_number_to_die (decl, var_die);
22921 return;
22922 }
22923
22924 if (old_die)
22925 {
22926 if (declaration)
22927 {
22928 /* A declaration that has been previously dumped, needs no
22929 further annotations, since it doesn't need location on
22930 the second pass. */
22931 return;
22932 }
22933 else if (decl_will_get_specification_p (old_die, decl, declaration)
22934 && !get_AT (old_die, DW_AT_specification))
22935 {
22936 /* Fall-thru so we can make a new variable die along with a
22937 DW_AT_specification. */
22938 }
22939 else if (origin && old_die->die_parent != context_die)
22940 {
22941 /* If we will be creating an inlined instance, we need a
22942 new DIE that will get annotated with
22943 DW_AT_abstract_origin. Clear things so we can get a
22944 new DIE. */
22945 gcc_assert (!DECL_ABSTRACT_P (decl));
22946 old_die = NULL;
22947 }
22948 else
22949 {
22950 /* If a DIE was dumped early, it still needs location info.
22951 Skip to where we fill the location bits. */
22952 var_die = old_die;
22953
22954 /* ??? In LTRANS we cannot annotate early created variably
22955 modified type DIEs without copying them and adjusting all
22956 references to them. Thus we dumped them again, also add a
22957 reference to them. */
22958 tree type = TREE_TYPE (decl_or_origin);
22959 if (in_lto_p
22960 && variably_modified_type_p
22961 (type, decl_function_context (decl_or_origin)))
22962 {
22963 if (decl_by_reference_p (decl_or_origin))
22964 add_type_attribute (var_die, TREE_TYPE (type),
22965 TYPE_UNQUALIFIED, false, context_die);
22966 else
22967 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
22968 false, context_die);
22969 }
22970
22971 goto gen_variable_die_location;
22972 }
22973 }
22974
22975 /* For static data members, the declaration in the class is supposed
22976 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
22977 also in DWARF2; the specification should still be DW_TAG_variable
22978 referencing the DW_TAG_member DIE. */
22979 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
22980 var_die = new_die (DW_TAG_member, context_die, decl);
22981 else
22982 var_die = new_die (DW_TAG_variable, context_die, decl);
22983
22984 if (origin != NULL)
22985 add_abstract_origin_attribute (var_die, origin);
22986
22987 /* Loop unrolling can create multiple blocks that refer to the same
22988 static variable, so we must test for the DW_AT_declaration flag.
22989
22990 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
22991 copy decls and set the DECL_ABSTRACT_P flag on them instead of
22992 sharing them.
22993
22994 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
22995 else if (decl_will_get_specification_p (old_die, decl, declaration))
22996 {
22997 /* This is a definition of a C++ class level static. */
22998 add_AT_specification (var_die, old_die);
22999 specialization_p = true;
23000 if (DECL_NAME (decl))
23001 {
23002 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23003 struct dwarf_file_data * file_index = lookup_filename (s.file);
23004
23005 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23006 add_AT_file (var_die, DW_AT_decl_file, file_index);
23007
23008 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23009 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23010
23011 if (debug_column_info
23012 && s.column
23013 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23014 != (unsigned) s.column))
23015 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23016
23017 if (old_die->die_tag == DW_TAG_member)
23018 add_linkage_name (var_die, decl);
23019 }
23020 }
23021 else
23022 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23023
23024 if ((origin == NULL && !specialization_p)
23025 || (origin != NULL
23026 && !DECL_ABSTRACT_P (decl_or_origin)
23027 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23028 decl_function_context
23029 (decl_or_origin))))
23030 {
23031 tree type = TREE_TYPE (decl_or_origin);
23032
23033 if (decl_by_reference_p (decl_or_origin))
23034 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23035 context_die);
23036 else
23037 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23038 context_die);
23039 }
23040
23041 if (origin == NULL && !specialization_p)
23042 {
23043 if (TREE_PUBLIC (decl))
23044 add_AT_flag (var_die, DW_AT_external, 1);
23045
23046 if (DECL_ARTIFICIAL (decl))
23047 add_AT_flag (var_die, DW_AT_artificial, 1);
23048
23049 add_alignment_attribute (var_die, decl);
23050
23051 add_accessibility_attribute (var_die, decl);
23052 }
23053
23054 if (declaration)
23055 add_AT_flag (var_die, DW_AT_declaration, 1);
23056
23057 if (decl && (DECL_ABSTRACT_P (decl)
23058 || !old_die || is_declaration_die (old_die)))
23059 equate_decl_number_to_die (decl, var_die);
23060
23061 gen_variable_die_location:
23062 if (! declaration
23063 && (! DECL_ABSTRACT_P (decl_or_origin)
23064 /* Local static vars are shared between all clones/inlines,
23065 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23066 already set. */
23067 || (VAR_P (decl_or_origin)
23068 && TREE_STATIC (decl_or_origin)
23069 && DECL_RTL_SET_P (decl_or_origin))))
23070 {
23071 if (early_dwarf)
23072 add_pubname (decl_or_origin, var_die);
23073 else
23074 add_location_or_const_value_attribute (var_die, decl_or_origin,
23075 decl == NULL);
23076 }
23077 else
23078 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23079
23080 if ((dwarf_version >= 4 || !dwarf_strict)
23081 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23082 DW_AT_const_expr) == 1
23083 && !get_AT (var_die, DW_AT_const_expr)
23084 && !specialization_p)
23085 add_AT_flag (var_die, DW_AT_const_expr, 1);
23086
23087 if (!dwarf_strict)
23088 {
23089 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23090 DW_AT_inline);
23091 if (inl != -1
23092 && !get_AT (var_die, DW_AT_inline)
23093 && !specialization_p)
23094 add_AT_unsigned (var_die, DW_AT_inline, inl);
23095 }
23096 }
23097
23098 /* Generate a DIE to represent a named constant. */
23099
23100 static void
23101 gen_const_die (tree decl, dw_die_ref context_die)
23102 {
23103 dw_die_ref const_die;
23104 tree type = TREE_TYPE (decl);
23105
23106 const_die = lookup_decl_die (decl);
23107 if (const_die)
23108 return;
23109
23110 const_die = new_die (DW_TAG_constant, context_die, decl);
23111 equate_decl_number_to_die (decl, const_die);
23112 add_name_and_src_coords_attributes (const_die, decl);
23113 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23114 if (TREE_PUBLIC (decl))
23115 add_AT_flag (const_die, DW_AT_external, 1);
23116 if (DECL_ARTIFICIAL (decl))
23117 add_AT_flag (const_die, DW_AT_artificial, 1);
23118 tree_add_const_value_attribute_for_decl (const_die, decl);
23119 }
23120
23121 /* Generate a DIE to represent a label identifier. */
23122
23123 static void
23124 gen_label_die (tree decl, dw_die_ref context_die)
23125 {
23126 tree origin = decl_ultimate_origin (decl);
23127 dw_die_ref lbl_die = lookup_decl_die (decl);
23128 rtx insn;
23129 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23130
23131 if (!lbl_die)
23132 {
23133 lbl_die = new_die (DW_TAG_label, context_die, decl);
23134 equate_decl_number_to_die (decl, lbl_die);
23135
23136 if (origin != NULL)
23137 add_abstract_origin_attribute (lbl_die, origin);
23138 else
23139 add_name_and_src_coords_attributes (lbl_die, decl);
23140 }
23141
23142 if (DECL_ABSTRACT_P (decl))
23143 equate_decl_number_to_die (decl, lbl_die);
23144 else if (! early_dwarf)
23145 {
23146 insn = DECL_RTL_IF_SET (decl);
23147
23148 /* Deleted labels are programmer specified labels which have been
23149 eliminated because of various optimizations. We still emit them
23150 here so that it is possible to put breakpoints on them. */
23151 if (insn
23152 && (LABEL_P (insn)
23153 || ((NOTE_P (insn)
23154 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23155 {
23156 /* When optimization is enabled (via -O) some parts of the compiler
23157 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23158 represent source-level labels which were explicitly declared by
23159 the user. This really shouldn't be happening though, so catch
23160 it if it ever does happen. */
23161 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23162
23163 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23164 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23165 }
23166 else if (insn
23167 && NOTE_P (insn)
23168 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23169 && CODE_LABEL_NUMBER (insn) != -1)
23170 {
23171 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23172 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23173 }
23174 }
23175 }
23176
23177 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23178 attributes to the DIE for a block STMT, to describe where the inlined
23179 function was called from. This is similar to add_src_coords_attributes. */
23180
23181 static inline void
23182 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23183 {
23184 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23185
23186 if (dwarf_version >= 3 || !dwarf_strict)
23187 {
23188 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23189 add_AT_unsigned (die, DW_AT_call_line, s.line);
23190 if (debug_column_info && s.column)
23191 add_AT_unsigned (die, DW_AT_call_column, s.column);
23192 }
23193 }
23194
23195
23196 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23197 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23198
23199 static inline void
23200 add_high_low_attributes (tree stmt, dw_die_ref die)
23201 {
23202 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23203
23204 if (BLOCK_FRAGMENT_CHAIN (stmt)
23205 && (dwarf_version >= 3 || !dwarf_strict))
23206 {
23207 tree chain, superblock = NULL_TREE;
23208 dw_die_ref pdie;
23209 dw_attr_node *attr = NULL;
23210
23211 if (inlined_function_outer_scope_p (stmt))
23212 {
23213 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23214 BLOCK_NUMBER (stmt));
23215 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23216 }
23217
23218 /* Optimize duplicate .debug_ranges lists or even tails of
23219 lists. If this BLOCK has same ranges as its supercontext,
23220 lookup DW_AT_ranges attribute in the supercontext (and
23221 recursively so), verify that the ranges_table contains the
23222 right values and use it instead of adding a new .debug_range. */
23223 for (chain = stmt, pdie = die;
23224 BLOCK_SAME_RANGE (chain);
23225 chain = BLOCK_SUPERCONTEXT (chain))
23226 {
23227 dw_attr_node *new_attr;
23228
23229 pdie = pdie->die_parent;
23230 if (pdie == NULL)
23231 break;
23232 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
23233 break;
23234 new_attr = get_AT (pdie, DW_AT_ranges);
23235 if (new_attr == NULL
23236 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
23237 break;
23238 attr = new_attr;
23239 superblock = BLOCK_SUPERCONTEXT (chain);
23240 }
23241 if (attr != NULL
23242 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
23243 == BLOCK_NUMBER (superblock))
23244 && BLOCK_FRAGMENT_CHAIN (superblock))
23245 {
23246 unsigned long off = attr->dw_attr_val.v.val_offset;
23247 unsigned long supercnt = 0, thiscnt = 0;
23248 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
23249 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23250 {
23251 ++supercnt;
23252 gcc_checking_assert ((*ranges_table)[off + supercnt].num
23253 == BLOCK_NUMBER (chain));
23254 }
23255 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
23256 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
23257 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23258 ++thiscnt;
23259 gcc_assert (supercnt >= thiscnt);
23260 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
23261 false);
23262 note_rnglist_head (off + supercnt - thiscnt);
23263 return;
23264 }
23265
23266 unsigned int offset = add_ranges (stmt, true);
23267 add_AT_range_list (die, DW_AT_ranges, offset, false);
23268 note_rnglist_head (offset);
23269
23270 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
23271 chain = BLOCK_FRAGMENT_CHAIN (stmt);
23272 do
23273 {
23274 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
23275 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
23276 chain = BLOCK_FRAGMENT_CHAIN (chain);
23277 }
23278 while (chain);
23279 add_ranges (NULL);
23280 }
23281 else
23282 {
23283 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
23284 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23285 BLOCK_NUMBER (stmt));
23286 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
23287 BLOCK_NUMBER (stmt));
23288 add_AT_low_high_pc (die, label, label_high, false);
23289 }
23290 }
23291
23292 /* Generate a DIE for a lexical block. */
23293
23294 static void
23295 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
23296 {
23297 dw_die_ref old_die = BLOCK_DIE (stmt);
23298 dw_die_ref stmt_die = NULL;
23299 if (!old_die)
23300 {
23301 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
23302 BLOCK_DIE (stmt) = stmt_die;
23303 }
23304
23305 if (BLOCK_ABSTRACT (stmt))
23306 {
23307 if (old_die)
23308 {
23309 /* This must have been generated early and it won't even
23310 need location information since it's a DW_AT_inline
23311 function. */
23312 if (flag_checking)
23313 for (dw_die_ref c = context_die; c; c = c->die_parent)
23314 if (c->die_tag == DW_TAG_inlined_subroutine
23315 || c->die_tag == DW_TAG_subprogram)
23316 {
23317 gcc_assert (get_AT (c, DW_AT_inline));
23318 break;
23319 }
23320 return;
23321 }
23322 }
23323 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
23324 {
23325 /* If this is an inlined instance, create a new lexical die for
23326 anything below to attach DW_AT_abstract_origin to. */
23327 if (old_die)
23328 {
23329 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
23330 BLOCK_DIE (stmt) = stmt_die;
23331 old_die = NULL;
23332 }
23333
23334 tree origin = block_ultimate_origin (stmt);
23335 if (origin != NULL_TREE && origin != stmt)
23336 add_abstract_origin_attribute (stmt_die, origin);
23337 }
23338
23339 if (old_die)
23340 stmt_die = old_die;
23341
23342 /* A non abstract block whose blocks have already been reordered
23343 should have the instruction range for this block. If so, set the
23344 high/low attributes. */
23345 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
23346 {
23347 gcc_assert (stmt_die);
23348 add_high_low_attributes (stmt, stmt_die);
23349 }
23350
23351 decls_for_scope (stmt, stmt_die);
23352 }
23353
23354 /* Generate a DIE for an inlined subprogram. */
23355
23356 static void
23357 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
23358 {
23359 tree decl;
23360
23361 /* The instance of function that is effectively being inlined shall not
23362 be abstract. */
23363 gcc_assert (! BLOCK_ABSTRACT (stmt));
23364
23365 decl = block_ultimate_origin (stmt);
23366
23367 /* Make sure any inlined functions are known to be inlineable. */
23368 gcc_checking_assert (DECL_ABSTRACT_P (decl)
23369 || cgraph_function_possibly_inlined_p (decl));
23370
23371 if (! BLOCK_ABSTRACT (stmt))
23372 {
23373 dw_die_ref subr_die
23374 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
23375
23376 if (call_arg_locations)
23377 BLOCK_DIE (stmt) = subr_die;
23378 add_abstract_origin_attribute (subr_die, decl);
23379 if (TREE_ASM_WRITTEN (stmt))
23380 add_high_low_attributes (stmt, subr_die);
23381 add_call_src_coords_attributes (stmt, subr_die);
23382
23383 decls_for_scope (stmt, subr_die);
23384 }
23385 }
23386
23387 /* Generate a DIE for a field in a record, or structure. CTX is required: see
23388 the comment for VLR_CONTEXT. */
23389
23390 static void
23391 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
23392 {
23393 dw_die_ref decl_die;
23394
23395 if (TREE_TYPE (decl) == error_mark_node)
23396 return;
23397
23398 decl_die = new_die (DW_TAG_member, context_die, decl);
23399 add_name_and_src_coords_attributes (decl_die, decl);
23400 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
23401 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
23402 context_die);
23403
23404 if (DECL_BIT_FIELD_TYPE (decl))
23405 {
23406 add_byte_size_attribute (decl_die, decl);
23407 add_bit_size_attribute (decl_die, decl);
23408 add_bit_offset_attribute (decl_die, decl, ctx);
23409 }
23410
23411 add_alignment_attribute (decl_die, decl);
23412
23413 /* If we have a variant part offset, then we are supposed to process a member
23414 of a QUAL_UNION_TYPE, which is how we represent variant parts in
23415 trees. */
23416 gcc_assert (ctx->variant_part_offset == NULL_TREE
23417 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
23418 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
23419 add_data_member_location_attribute (decl_die, decl, ctx);
23420
23421 if (DECL_ARTIFICIAL (decl))
23422 add_AT_flag (decl_die, DW_AT_artificial, 1);
23423
23424 add_accessibility_attribute (decl_die, decl);
23425
23426 /* Equate decl number to die, so that we can look up this decl later on. */
23427 equate_decl_number_to_die (decl, decl_die);
23428 }
23429
23430 /* Generate a DIE for a pointer to a member type. TYPE can be an
23431 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
23432 pointer to member function. */
23433
23434 static void
23435 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
23436 {
23437 if (lookup_type_die (type))
23438 return;
23439
23440 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
23441 scope_die_for (type, context_die), type);
23442
23443 equate_type_number_to_die (type, ptr_die);
23444 add_AT_die_ref (ptr_die, DW_AT_containing_type,
23445 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
23446 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23447 context_die);
23448 add_alignment_attribute (ptr_die, type);
23449
23450 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
23451 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
23452 {
23453 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
23454 add_AT_loc (ptr_die, DW_AT_use_location, op);
23455 }
23456 }
23457
23458 static char *producer_string;
23459
23460 /* Return a heap allocated producer string including command line options
23461 if -grecord-gcc-switches. */
23462
23463 static char *
23464 gen_producer_string (void)
23465 {
23466 size_t j;
23467 auto_vec<const char *> switches;
23468 const char *language_string = lang_hooks.name;
23469 char *producer, *tail;
23470 const char *p;
23471 size_t len = dwarf_record_gcc_switches ? 0 : 3;
23472 size_t plen = strlen (language_string) + 1 + strlen (version_string);
23473
23474 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
23475 switch (save_decoded_options[j].opt_index)
23476 {
23477 case OPT_o:
23478 case OPT_d:
23479 case OPT_dumpbase:
23480 case OPT_dumpdir:
23481 case OPT_auxbase:
23482 case OPT_auxbase_strip:
23483 case OPT_quiet:
23484 case OPT_version:
23485 case OPT_v:
23486 case OPT_w:
23487 case OPT_L:
23488 case OPT_D:
23489 case OPT_I:
23490 case OPT_U:
23491 case OPT_SPECIAL_unknown:
23492 case OPT_SPECIAL_ignore:
23493 case OPT_SPECIAL_program_name:
23494 case OPT_SPECIAL_input_file:
23495 case OPT_grecord_gcc_switches:
23496 case OPT__output_pch_:
23497 case OPT_fdiagnostics_show_location_:
23498 case OPT_fdiagnostics_show_option:
23499 case OPT_fdiagnostics_show_caret:
23500 case OPT_fdiagnostics_color_:
23501 case OPT_fverbose_asm:
23502 case OPT____:
23503 case OPT__sysroot_:
23504 case OPT_nostdinc:
23505 case OPT_nostdinc__:
23506 case OPT_fpreprocessed:
23507 case OPT_fltrans_output_list_:
23508 case OPT_fresolution_:
23509 case OPT_fdebug_prefix_map_:
23510 case OPT_fcompare_debug:
23511 /* Ignore these. */
23512 continue;
23513 default:
23514 if (cl_options[save_decoded_options[j].opt_index].flags
23515 & CL_NO_DWARF_RECORD)
23516 continue;
23517 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
23518 == '-');
23519 switch (save_decoded_options[j].canonical_option[0][1])
23520 {
23521 case 'M':
23522 case 'i':
23523 case 'W':
23524 continue;
23525 case 'f':
23526 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
23527 "dump", 4) == 0)
23528 continue;
23529 break;
23530 default:
23531 break;
23532 }
23533 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
23534 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
23535 break;
23536 }
23537
23538 producer = XNEWVEC (char, plen + 1 + len + 1);
23539 tail = producer;
23540 sprintf (tail, "%s %s", language_string, version_string);
23541 tail += plen;
23542
23543 FOR_EACH_VEC_ELT (switches, j, p)
23544 {
23545 len = strlen (p);
23546 *tail = ' ';
23547 memcpy (tail + 1, p, len);
23548 tail += len + 1;
23549 }
23550
23551 *tail = '\0';
23552 return producer;
23553 }
23554
23555 /* Given a C and/or C++ language/version string return the "highest".
23556 C++ is assumed to be "higher" than C in this case. Used for merging
23557 LTO translation unit languages. */
23558 static const char *
23559 highest_c_language (const char *lang1, const char *lang2)
23560 {
23561 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
23562 return "GNU C++17";
23563 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
23564 return "GNU C++14";
23565 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
23566 return "GNU C++11";
23567 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
23568 return "GNU C++98";
23569
23570 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
23571 return "GNU C17";
23572 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
23573 return "GNU C11";
23574 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
23575 return "GNU C99";
23576 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
23577 return "GNU C89";
23578
23579 gcc_unreachable ();
23580 }
23581
23582
23583 /* Generate the DIE for the compilation unit. */
23584
23585 static dw_die_ref
23586 gen_compile_unit_die (const char *filename)
23587 {
23588 dw_die_ref die;
23589 const char *language_string = lang_hooks.name;
23590 int language;
23591
23592 die = new_die (DW_TAG_compile_unit, NULL, NULL);
23593
23594 if (filename)
23595 {
23596 add_name_attribute (die, filename);
23597 /* Don't add cwd for <built-in>. */
23598 if (filename[0] != '<')
23599 add_comp_dir_attribute (die);
23600 }
23601
23602 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
23603
23604 /* If our producer is LTO try to figure out a common language to use
23605 from the global list of translation units. */
23606 if (strcmp (language_string, "GNU GIMPLE") == 0)
23607 {
23608 unsigned i;
23609 tree t;
23610 const char *common_lang = NULL;
23611
23612 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
23613 {
23614 if (!TRANSLATION_UNIT_LANGUAGE (t))
23615 continue;
23616 if (!common_lang)
23617 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
23618 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
23619 ;
23620 else if (strncmp (common_lang, "GNU C", 5) == 0
23621 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
23622 /* Mixing C and C++ is ok, use C++ in that case. */
23623 common_lang = highest_c_language (common_lang,
23624 TRANSLATION_UNIT_LANGUAGE (t));
23625 else
23626 {
23627 /* Fall back to C. */
23628 common_lang = NULL;
23629 break;
23630 }
23631 }
23632
23633 if (common_lang)
23634 language_string = common_lang;
23635 }
23636
23637 language = DW_LANG_C;
23638 if (strncmp (language_string, "GNU C", 5) == 0
23639 && ISDIGIT (language_string[5]))
23640 {
23641 language = DW_LANG_C89;
23642 if (dwarf_version >= 3 || !dwarf_strict)
23643 {
23644 if (strcmp (language_string, "GNU C89") != 0)
23645 language = DW_LANG_C99;
23646
23647 if (dwarf_version >= 5 /* || !dwarf_strict */)
23648 if (strcmp (language_string, "GNU C11") == 0
23649 || strcmp (language_string, "GNU C17") == 0)
23650 language = DW_LANG_C11;
23651 }
23652 }
23653 else if (strncmp (language_string, "GNU C++", 7) == 0)
23654 {
23655 language = DW_LANG_C_plus_plus;
23656 if (dwarf_version >= 5 /* || !dwarf_strict */)
23657 {
23658 if (strcmp (language_string, "GNU C++11") == 0)
23659 language = DW_LANG_C_plus_plus_11;
23660 else if (strcmp (language_string, "GNU C++14") == 0)
23661 language = DW_LANG_C_plus_plus_14;
23662 else if (strcmp (language_string, "GNU C++17") == 0)
23663 /* For now. */
23664 language = DW_LANG_C_plus_plus_14;
23665 }
23666 }
23667 else if (strcmp (language_string, "GNU F77") == 0)
23668 language = DW_LANG_Fortran77;
23669 else if (dwarf_version >= 3 || !dwarf_strict)
23670 {
23671 if (strcmp (language_string, "GNU Ada") == 0)
23672 language = DW_LANG_Ada95;
23673 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
23674 {
23675 language = DW_LANG_Fortran95;
23676 if (dwarf_version >= 5 /* || !dwarf_strict */)
23677 {
23678 if (strcmp (language_string, "GNU Fortran2003") == 0)
23679 language = DW_LANG_Fortran03;
23680 else if (strcmp (language_string, "GNU Fortran2008") == 0)
23681 language = DW_LANG_Fortran08;
23682 }
23683 }
23684 else if (strcmp (language_string, "GNU Objective-C") == 0)
23685 language = DW_LANG_ObjC;
23686 else if (strcmp (language_string, "GNU Objective-C++") == 0)
23687 language = DW_LANG_ObjC_plus_plus;
23688 else if (dwarf_version >= 5 || !dwarf_strict)
23689 {
23690 if (strcmp (language_string, "GNU Go") == 0)
23691 language = DW_LANG_Go;
23692 }
23693 }
23694 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
23695 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
23696 language = DW_LANG_Fortran90;
23697
23698 add_AT_unsigned (die, DW_AT_language, language);
23699
23700 switch (language)
23701 {
23702 case DW_LANG_Fortran77:
23703 case DW_LANG_Fortran90:
23704 case DW_LANG_Fortran95:
23705 case DW_LANG_Fortran03:
23706 case DW_LANG_Fortran08:
23707 /* Fortran has case insensitive identifiers and the front-end
23708 lowercases everything. */
23709 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
23710 break;
23711 default:
23712 /* The default DW_ID_case_sensitive doesn't need to be specified. */
23713 break;
23714 }
23715 return die;
23716 }
23717
23718 /* Generate the DIE for a base class. */
23719
23720 static void
23721 gen_inheritance_die (tree binfo, tree access, tree type,
23722 dw_die_ref context_die)
23723 {
23724 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
23725 struct vlr_context ctx = { type, NULL };
23726
23727 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
23728 context_die);
23729 add_data_member_location_attribute (die, binfo, &ctx);
23730
23731 if (BINFO_VIRTUAL_P (binfo))
23732 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
23733
23734 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
23735 children, otherwise the default is DW_ACCESS_public. In DWARF2
23736 the default has always been DW_ACCESS_private. */
23737 if (access == access_public_node)
23738 {
23739 if (dwarf_version == 2
23740 || context_die->die_tag == DW_TAG_class_type)
23741 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
23742 }
23743 else if (access == access_protected_node)
23744 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
23745 else if (dwarf_version > 2
23746 && context_die->die_tag != DW_TAG_class_type)
23747 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
23748 }
23749
23750 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
23751 structure. */
23752 static bool
23753 is_variant_part (tree decl)
23754 {
23755 return (TREE_CODE (decl) == FIELD_DECL
23756 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
23757 }
23758
23759 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
23760 return the FIELD_DECL. Return NULL_TREE otherwise. */
23761
23762 static tree
23763 analyze_discr_in_predicate (tree operand, tree struct_type)
23764 {
23765 bool continue_stripping = true;
23766 while (continue_stripping)
23767 switch (TREE_CODE (operand))
23768 {
23769 CASE_CONVERT:
23770 operand = TREE_OPERAND (operand, 0);
23771 break;
23772 default:
23773 continue_stripping = false;
23774 break;
23775 }
23776
23777 /* Match field access to members of struct_type only. */
23778 if (TREE_CODE (operand) == COMPONENT_REF
23779 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
23780 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
23781 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
23782 return TREE_OPERAND (operand, 1);
23783 else
23784 return NULL_TREE;
23785 }
23786
23787 /* Check that SRC is a constant integer that can be represented as a native
23788 integer constant (either signed or unsigned). If so, store it into DEST and
23789 return true. Return false otherwise. */
23790
23791 static bool
23792 get_discr_value (tree src, dw_discr_value *dest)
23793 {
23794 tree discr_type = TREE_TYPE (src);
23795
23796 if (lang_hooks.types.get_debug_type)
23797 {
23798 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
23799 if (debug_type != NULL)
23800 discr_type = debug_type;
23801 }
23802
23803 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
23804 return false;
23805
23806 /* Signedness can vary between the original type and the debug type. This
23807 can happen for character types in Ada for instance: the character type
23808 used for code generation can be signed, to be compatible with the C one,
23809 but from a debugger point of view, it must be unsigned. */
23810 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
23811 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
23812
23813 if (is_orig_unsigned != is_debug_unsigned)
23814 src = fold_convert (discr_type, src);
23815
23816 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
23817 return false;
23818
23819 dest->pos = is_debug_unsigned;
23820 if (is_debug_unsigned)
23821 dest->v.uval = tree_to_uhwi (src);
23822 else
23823 dest->v.sval = tree_to_shwi (src);
23824
23825 return true;
23826 }
23827
23828 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
23829 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
23830 store NULL_TREE in DISCR_DECL. Otherwise:
23831
23832 - store the discriminant field in STRUCT_TYPE that controls the variant
23833 part to *DISCR_DECL
23834
23835 - put in *DISCR_LISTS_P an array where for each variant, the item
23836 represents the corresponding matching list of discriminant values.
23837
23838 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
23839 the above array.
23840
23841 Note that when the array is allocated (i.e. when the analysis is
23842 successful), it is up to the caller to free the array. */
23843
23844 static void
23845 analyze_variants_discr (tree variant_part_decl,
23846 tree struct_type,
23847 tree *discr_decl,
23848 dw_discr_list_ref **discr_lists_p,
23849 unsigned *discr_lists_length)
23850 {
23851 tree variant_part_type = TREE_TYPE (variant_part_decl);
23852 tree variant;
23853 dw_discr_list_ref *discr_lists;
23854 unsigned i;
23855
23856 /* Compute how many variants there are in this variant part. */
23857 *discr_lists_length = 0;
23858 for (variant = TYPE_FIELDS (variant_part_type);
23859 variant != NULL_TREE;
23860 variant = DECL_CHAIN (variant))
23861 ++*discr_lists_length;
23862
23863 *discr_decl = NULL_TREE;
23864 *discr_lists_p
23865 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
23866 sizeof (**discr_lists_p));
23867 discr_lists = *discr_lists_p;
23868
23869 /* And then analyze all variants to extract discriminant information for all
23870 of them. This analysis is conservative: as soon as we detect something we
23871 do not support, abort everything and pretend we found nothing. */
23872 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
23873 variant != NULL_TREE;
23874 variant = DECL_CHAIN (variant), ++i)
23875 {
23876 tree match_expr = DECL_QUALIFIER (variant);
23877
23878 /* Now, try to analyze the predicate and deduce a discriminant for
23879 it. */
23880 if (match_expr == boolean_true_node)
23881 /* Typically happens for the default variant: it matches all cases that
23882 previous variants rejected. Don't output any matching value for
23883 this one. */
23884 continue;
23885
23886 /* The following loop tries to iterate over each discriminant
23887 possibility: single values or ranges. */
23888 while (match_expr != NULL_TREE)
23889 {
23890 tree next_round_match_expr;
23891 tree candidate_discr = NULL_TREE;
23892 dw_discr_list_ref new_node = NULL;
23893
23894 /* Possibilities are matched one after the other by nested
23895 TRUTH_ORIF_EXPR expressions. Process the current possibility and
23896 continue with the rest at next iteration. */
23897 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
23898 {
23899 next_round_match_expr = TREE_OPERAND (match_expr, 0);
23900 match_expr = TREE_OPERAND (match_expr, 1);
23901 }
23902 else
23903 next_round_match_expr = NULL_TREE;
23904
23905 if (match_expr == boolean_false_node)
23906 /* This sub-expression matches nothing: just wait for the next
23907 one. */
23908 ;
23909
23910 else if (TREE_CODE (match_expr) == EQ_EXPR)
23911 {
23912 /* We are matching: <discr_field> == <integer_cst>
23913 This sub-expression matches a single value. */
23914 tree integer_cst = TREE_OPERAND (match_expr, 1);
23915
23916 candidate_discr
23917 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
23918 struct_type);
23919
23920 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
23921 if (!get_discr_value (integer_cst,
23922 &new_node->dw_discr_lower_bound))
23923 goto abort;
23924 new_node->dw_discr_range = false;
23925 }
23926
23927 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
23928 {
23929 /* We are matching:
23930 <discr_field> > <integer_cst>
23931 && <discr_field> < <integer_cst>.
23932 This sub-expression matches the range of values between the
23933 two matched integer constants. Note that comparisons can be
23934 inclusive or exclusive. */
23935 tree candidate_discr_1, candidate_discr_2;
23936 tree lower_cst, upper_cst;
23937 bool lower_cst_included, upper_cst_included;
23938 tree lower_op = TREE_OPERAND (match_expr, 0);
23939 tree upper_op = TREE_OPERAND (match_expr, 1);
23940
23941 /* When the comparison is exclusive, the integer constant is not
23942 the discriminant range bound we are looking for: we will have
23943 to increment or decrement it. */
23944 if (TREE_CODE (lower_op) == GE_EXPR)
23945 lower_cst_included = true;
23946 else if (TREE_CODE (lower_op) == GT_EXPR)
23947 lower_cst_included = false;
23948 else
23949 goto abort;
23950
23951 if (TREE_CODE (upper_op) == LE_EXPR)
23952 upper_cst_included = true;
23953 else if (TREE_CODE (upper_op) == LT_EXPR)
23954 upper_cst_included = false;
23955 else
23956 goto abort;
23957
23958 /* Extract the discriminant from the first operand and check it
23959 is consistant with the same analysis in the second
23960 operand. */
23961 candidate_discr_1
23962 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
23963 struct_type);
23964 candidate_discr_2
23965 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
23966 struct_type);
23967 if (candidate_discr_1 == candidate_discr_2)
23968 candidate_discr = candidate_discr_1;
23969 else
23970 goto abort;
23971
23972 /* Extract bounds from both. */
23973 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
23974 lower_cst = TREE_OPERAND (lower_op, 1);
23975 upper_cst = TREE_OPERAND (upper_op, 1);
23976
23977 if (!lower_cst_included)
23978 lower_cst
23979 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
23980 build_int_cst (TREE_TYPE (lower_cst), 1));
23981 if (!upper_cst_included)
23982 upper_cst
23983 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
23984 build_int_cst (TREE_TYPE (upper_cst), 1));
23985
23986 if (!get_discr_value (lower_cst,
23987 &new_node->dw_discr_lower_bound)
23988 || !get_discr_value (upper_cst,
23989 &new_node->dw_discr_upper_bound))
23990 goto abort;
23991
23992 new_node->dw_discr_range = true;
23993 }
23994
23995 else
23996 /* Unsupported sub-expression: we cannot determine the set of
23997 matching discriminant values. Abort everything. */
23998 goto abort;
23999
24000 /* If the discriminant info is not consistant with what we saw so
24001 far, consider the analysis failed and abort everything. */
24002 if (candidate_discr == NULL_TREE
24003 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24004 goto abort;
24005 else
24006 *discr_decl = candidate_discr;
24007
24008 if (new_node != NULL)
24009 {
24010 new_node->dw_discr_next = discr_lists[i];
24011 discr_lists[i] = new_node;
24012 }
24013 match_expr = next_round_match_expr;
24014 }
24015 }
24016
24017 /* If we reach this point, we could match everything we were interested
24018 in. */
24019 return;
24020
24021 abort:
24022 /* Clean all data structure and return no result. */
24023 free (*discr_lists_p);
24024 *discr_lists_p = NULL;
24025 *discr_decl = NULL_TREE;
24026 }
24027
24028 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24029 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24030 under CONTEXT_DIE.
24031
24032 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24033 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24034 this type, which are record types, represent the available variants and each
24035 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24036 values are inferred from these attributes.
24037
24038 In trees, the offsets for the fields inside these sub-records are relative
24039 to the variant part itself, whereas the corresponding DIEs should have
24040 offset attributes that are relative to the embedding record base address.
24041 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24042 must be an expression that computes the offset of the variant part to
24043 describe in DWARF. */
24044
24045 static void
24046 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24047 dw_die_ref context_die)
24048 {
24049 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24050 tree variant_part_offset = vlr_ctx->variant_part_offset;
24051 struct loc_descr_context ctx = {
24052 vlr_ctx->struct_type, /* context_type */
24053 NULL_TREE, /* base_decl */
24054 NULL, /* dpi */
24055 false, /* placeholder_arg */
24056 false /* placeholder_seen */
24057 };
24058
24059 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24060 NULL_TREE if there is no such field. */
24061 tree discr_decl = NULL_TREE;
24062 dw_discr_list_ref *discr_lists;
24063 unsigned discr_lists_length = 0;
24064 unsigned i;
24065
24066 dw_die_ref dwarf_proc_die = NULL;
24067 dw_die_ref variant_part_die
24068 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24069
24070 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24071
24072 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24073 &discr_decl, &discr_lists, &discr_lists_length);
24074
24075 if (discr_decl != NULL_TREE)
24076 {
24077 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24078
24079 if (discr_die)
24080 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24081 else
24082 /* We have no DIE for the discriminant, so just discard all
24083 discrimimant information in the output. */
24084 discr_decl = NULL_TREE;
24085 }
24086
24087 /* If the offset for this variant part is more complex than a constant,
24088 create a DWARF procedure for it so that we will not have to generate DWARF
24089 expressions for it for each member. */
24090 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24091 && (dwarf_version >= 3 || !dwarf_strict))
24092 {
24093 const tree dwarf_proc_fndecl
24094 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24095 build_function_type (TREE_TYPE (variant_part_offset),
24096 NULL_TREE));
24097 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24098 const dw_loc_descr_ref dwarf_proc_body
24099 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24100
24101 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24102 dwarf_proc_fndecl, context_die);
24103 if (dwarf_proc_die != NULL)
24104 variant_part_offset = dwarf_proc_call;
24105 }
24106
24107 /* Output DIEs for all variants. */
24108 i = 0;
24109 for (tree variant = TYPE_FIELDS (variant_part_type);
24110 variant != NULL_TREE;
24111 variant = DECL_CHAIN (variant), ++i)
24112 {
24113 tree variant_type = TREE_TYPE (variant);
24114 dw_die_ref variant_die;
24115
24116 /* All variants (i.e. members of a variant part) are supposed to be
24117 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24118 under these records. */
24119 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24120
24121 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24122 equate_decl_number_to_die (variant, variant_die);
24123
24124 /* Output discriminant values this variant matches, if any. */
24125 if (discr_decl == NULL || discr_lists[i] == NULL)
24126 /* In the case we have discriminant information at all, this is
24127 probably the default variant: as the standard says, don't
24128 output any discriminant value/list attribute. */
24129 ;
24130 else if (discr_lists[i]->dw_discr_next == NULL
24131 && !discr_lists[i]->dw_discr_range)
24132 /* If there is only one accepted value, don't bother outputting a
24133 list. */
24134 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24135 else
24136 add_discr_list (variant_die, discr_lists[i]);
24137
24138 for (tree member = TYPE_FIELDS (variant_type);
24139 member != NULL_TREE;
24140 member = DECL_CHAIN (member))
24141 {
24142 struct vlr_context vlr_sub_ctx = {
24143 vlr_ctx->struct_type, /* struct_type */
24144 NULL /* variant_part_offset */
24145 };
24146 if (is_variant_part (member))
24147 {
24148 /* All offsets for fields inside variant parts are relative to
24149 the top-level embedding RECORD_TYPE's base address. On the
24150 other hand, offsets in GCC's types are relative to the
24151 nested-most variant part. So we have to sum offsets each time
24152 we recurse. */
24153
24154 vlr_sub_ctx.variant_part_offset
24155 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24156 variant_part_offset, byte_position (member));
24157 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24158 }
24159 else
24160 {
24161 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24162 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24163 }
24164 }
24165 }
24166
24167 free (discr_lists);
24168 }
24169
24170 /* Generate a DIE for a class member. */
24171
24172 static void
24173 gen_member_die (tree type, dw_die_ref context_die)
24174 {
24175 tree member;
24176 tree binfo = TYPE_BINFO (type);
24177
24178 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24179
24180 /* If this is not an incomplete type, output descriptions of each of its
24181 members. Note that as we output the DIEs necessary to represent the
24182 members of this record or union type, we will also be trying to output
24183 DIEs to represent the *types* of those members. However the `type'
24184 function (above) will specifically avoid generating type DIEs for member
24185 types *within* the list of member DIEs for this (containing) type except
24186 for those types (of members) which are explicitly marked as also being
24187 members of this (containing) type themselves. The g++ front- end can
24188 force any given type to be treated as a member of some other (containing)
24189 type by setting the TYPE_CONTEXT of the given (member) type to point to
24190 the TREE node representing the appropriate (containing) type. */
24191
24192 /* First output info about the base classes. */
24193 if (binfo)
24194 {
24195 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24196 int i;
24197 tree base;
24198
24199 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24200 gen_inheritance_die (base,
24201 (accesses ? (*accesses)[i] : access_public_node),
24202 type,
24203 context_die);
24204 }
24205
24206 /* Now output info about the data members and type members. */
24207 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24208 {
24209 struct vlr_context vlr_ctx = { type, NULL_TREE };
24210 bool static_inline_p
24211 = (TREE_STATIC (member)
24212 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24213 != -1));
24214
24215 /* Ignore clones. */
24216 if (DECL_ABSTRACT_ORIGIN (member))
24217 continue;
24218
24219 /* If we thought we were generating minimal debug info for TYPE
24220 and then changed our minds, some of the member declarations
24221 may have already been defined. Don't define them again, but
24222 do put them in the right order. */
24223
24224 if (dw_die_ref child = lookup_decl_die (member))
24225 {
24226 /* Handle inline static data members, which only have in-class
24227 declarations. */
24228 dw_die_ref ref = NULL;
24229 if (child->die_tag == DW_TAG_variable
24230 && child->die_parent == comp_unit_die ())
24231 {
24232 ref = get_AT_ref (child, DW_AT_specification);
24233 /* For C++17 inline static data members followed by redundant
24234 out of class redeclaration, we might get here with
24235 child being the DIE created for the out of class
24236 redeclaration and with its DW_AT_specification being
24237 the DIE created for in-class definition. We want to
24238 reparent the latter, and don't want to create another
24239 DIE with DW_AT_specification in that case, because
24240 we already have one. */
24241 if (ref
24242 && static_inline_p
24243 && ref->die_tag == DW_TAG_variable
24244 && ref->die_parent == comp_unit_die ()
24245 && get_AT (ref, DW_AT_specification) == NULL)
24246 {
24247 child = ref;
24248 ref = NULL;
24249 static_inline_p = false;
24250 }
24251 }
24252
24253 if (child->die_tag == DW_TAG_variable
24254 && child->die_parent == comp_unit_die ()
24255 && ref == NULL)
24256 {
24257 reparent_child (child, context_die);
24258 if (dwarf_version < 5)
24259 child->die_tag = DW_TAG_member;
24260 }
24261 else
24262 splice_child_die (context_die, child);
24263 }
24264
24265 /* Do not generate standard DWARF for variant parts if we are generating
24266 the corresponding GNAT encodings: DIEs generated for both would
24267 conflict in our mappings. */
24268 else if (is_variant_part (member)
24269 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
24270 {
24271 vlr_ctx.variant_part_offset = byte_position (member);
24272 gen_variant_part (member, &vlr_ctx, context_die);
24273 }
24274 else
24275 {
24276 vlr_ctx.variant_part_offset = NULL_TREE;
24277 gen_decl_die (member, NULL, &vlr_ctx, context_die);
24278 }
24279
24280 /* For C++ inline static data members emit immediately a DW_TAG_variable
24281 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
24282 DW_AT_specification. */
24283 if (static_inline_p)
24284 {
24285 int old_extern = DECL_EXTERNAL (member);
24286 DECL_EXTERNAL (member) = 0;
24287 gen_decl_die (member, NULL, NULL, comp_unit_die ());
24288 DECL_EXTERNAL (member) = old_extern;
24289 }
24290 }
24291 }
24292
24293 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
24294 is set, we pretend that the type was never defined, so we only get the
24295 member DIEs needed by later specification DIEs. */
24296
24297 static void
24298 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
24299 enum debug_info_usage usage)
24300 {
24301 if (TREE_ASM_WRITTEN (type))
24302 {
24303 /* Fill in the bound of variable-length fields in late dwarf if
24304 still incomplete. */
24305 if (!early_dwarf && variably_modified_type_p (type, NULL))
24306 for (tree member = TYPE_FIELDS (type);
24307 member;
24308 member = DECL_CHAIN (member))
24309 fill_variable_array_bounds (TREE_TYPE (member));
24310 return;
24311 }
24312
24313 dw_die_ref type_die = lookup_type_die (type);
24314 dw_die_ref scope_die = 0;
24315 int nested = 0;
24316 int complete = (TYPE_SIZE (type)
24317 && (! TYPE_STUB_DECL (type)
24318 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
24319 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
24320 complete = complete && should_emit_struct_debug (type, usage);
24321
24322 if (type_die && ! complete)
24323 return;
24324
24325 if (TYPE_CONTEXT (type) != NULL_TREE
24326 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
24327 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
24328 nested = 1;
24329
24330 scope_die = scope_die_for (type, context_die);
24331
24332 /* Generate child dies for template paramaters. */
24333 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
24334 schedule_generic_params_dies_gen (type);
24335
24336 if (! type_die || (nested && is_cu_die (scope_die)))
24337 /* First occurrence of type or toplevel definition of nested class. */
24338 {
24339 dw_die_ref old_die = type_die;
24340
24341 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
24342 ? record_type_tag (type) : DW_TAG_union_type,
24343 scope_die, type);
24344 equate_type_number_to_die (type, type_die);
24345 if (old_die)
24346 add_AT_specification (type_die, old_die);
24347 else
24348 add_name_attribute (type_die, type_tag (type));
24349 }
24350 else
24351 remove_AT (type_die, DW_AT_declaration);
24352
24353 /* If this type has been completed, then give it a byte_size attribute and
24354 then give a list of members. */
24355 if (complete && !ns_decl)
24356 {
24357 /* Prevent infinite recursion in cases where the type of some member of
24358 this type is expressed in terms of this type itself. */
24359 TREE_ASM_WRITTEN (type) = 1;
24360 add_byte_size_attribute (type_die, type);
24361 add_alignment_attribute (type_die, type);
24362 if (TYPE_STUB_DECL (type) != NULL_TREE)
24363 {
24364 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
24365 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
24366 }
24367
24368 /* If the first reference to this type was as the return type of an
24369 inline function, then it may not have a parent. Fix this now. */
24370 if (type_die->die_parent == NULL)
24371 add_child_die (scope_die, type_die);
24372
24373 push_decl_scope (type);
24374 gen_member_die (type, type_die);
24375 pop_decl_scope ();
24376
24377 add_gnat_descriptive_type_attribute (type_die, type, context_die);
24378 if (TYPE_ARTIFICIAL (type))
24379 add_AT_flag (type_die, DW_AT_artificial, 1);
24380
24381 /* GNU extension: Record what type our vtable lives in. */
24382 if (TYPE_VFIELD (type))
24383 {
24384 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
24385
24386 gen_type_die (vtype, context_die);
24387 add_AT_die_ref (type_die, DW_AT_containing_type,
24388 lookup_type_die (vtype));
24389 }
24390 }
24391 else
24392 {
24393 add_AT_flag (type_die, DW_AT_declaration, 1);
24394
24395 /* We don't need to do this for function-local types. */
24396 if (TYPE_STUB_DECL (type)
24397 && ! decl_function_context (TYPE_STUB_DECL (type)))
24398 vec_safe_push (incomplete_types, type);
24399 }
24400
24401 if (get_AT (type_die, DW_AT_name))
24402 add_pubtype (type, type_die);
24403 }
24404
24405 /* Generate a DIE for a subroutine _type_. */
24406
24407 static void
24408 gen_subroutine_type_die (tree type, dw_die_ref context_die)
24409 {
24410 tree return_type = TREE_TYPE (type);
24411 dw_die_ref subr_die
24412 = new_die (DW_TAG_subroutine_type,
24413 scope_die_for (type, context_die), type);
24414
24415 equate_type_number_to_die (type, subr_die);
24416 add_prototyped_attribute (subr_die, type);
24417 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
24418 context_die);
24419 add_alignment_attribute (subr_die, type);
24420 gen_formal_types_die (type, subr_die);
24421
24422 if (get_AT (subr_die, DW_AT_name))
24423 add_pubtype (type, subr_die);
24424 if ((dwarf_version >= 5 || !dwarf_strict)
24425 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
24426 add_AT_flag (subr_die, DW_AT_reference, 1);
24427 if ((dwarf_version >= 5 || !dwarf_strict)
24428 && lang_hooks.types.type_dwarf_attribute (type,
24429 DW_AT_rvalue_reference) != -1)
24430 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
24431 }
24432
24433 /* Generate a DIE for a type definition. */
24434
24435 static void
24436 gen_typedef_die (tree decl, dw_die_ref context_die)
24437 {
24438 dw_die_ref type_die;
24439 tree type;
24440
24441 if (TREE_ASM_WRITTEN (decl))
24442 {
24443 if (DECL_ORIGINAL_TYPE (decl))
24444 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
24445 return;
24446 }
24447
24448 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
24449 checks in process_scope_var and modified_type_die), this should be called
24450 only for original types. */
24451 gcc_assert (decl_ultimate_origin (decl) == NULL
24452 || decl_ultimate_origin (decl) == decl);
24453
24454 TREE_ASM_WRITTEN (decl) = 1;
24455 type_die = new_die (DW_TAG_typedef, context_die, decl);
24456
24457 add_name_and_src_coords_attributes (type_die, decl);
24458 if (DECL_ORIGINAL_TYPE (decl))
24459 {
24460 type = DECL_ORIGINAL_TYPE (decl);
24461 if (type == error_mark_node)
24462 return;
24463
24464 gcc_assert (type != TREE_TYPE (decl));
24465 equate_type_number_to_die (TREE_TYPE (decl), type_die);
24466 }
24467 else
24468 {
24469 type = TREE_TYPE (decl);
24470 if (type == error_mark_node)
24471 return;
24472
24473 if (is_naming_typedef_decl (TYPE_NAME (type)))
24474 {
24475 /* Here, we are in the case of decl being a typedef naming
24476 an anonymous type, e.g:
24477 typedef struct {...} foo;
24478 In that case TREE_TYPE (decl) is not a typedef variant
24479 type and TYPE_NAME of the anonymous type is set to the
24480 TYPE_DECL of the typedef. This construct is emitted by
24481 the C++ FE.
24482
24483 TYPE is the anonymous struct named by the typedef
24484 DECL. As we need the DW_AT_type attribute of the
24485 DW_TAG_typedef to point to the DIE of TYPE, let's
24486 generate that DIE right away. add_type_attribute
24487 called below will then pick (via lookup_type_die) that
24488 anonymous struct DIE. */
24489 if (!TREE_ASM_WRITTEN (type))
24490 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
24491
24492 /* This is a GNU Extension. We are adding a
24493 DW_AT_linkage_name attribute to the DIE of the
24494 anonymous struct TYPE. The value of that attribute
24495 is the name of the typedef decl naming the anonymous
24496 struct. This greatly eases the work of consumers of
24497 this debug info. */
24498 add_linkage_name_raw (lookup_type_die (type), decl);
24499 }
24500 }
24501
24502 add_type_attribute (type_die, type, decl_quals (decl), false,
24503 context_die);
24504
24505 if (is_naming_typedef_decl (decl))
24506 /* We want that all subsequent calls to lookup_type_die with
24507 TYPE in argument yield the DW_TAG_typedef we have just
24508 created. */
24509 equate_type_number_to_die (type, type_die);
24510
24511 add_alignment_attribute (type_die, TREE_TYPE (decl));
24512
24513 add_accessibility_attribute (type_die, decl);
24514
24515 if (DECL_ABSTRACT_P (decl))
24516 equate_decl_number_to_die (decl, type_die);
24517
24518 if (get_AT (type_die, DW_AT_name))
24519 add_pubtype (decl, type_die);
24520 }
24521
24522 /* Generate a DIE for a struct, class, enum or union type. */
24523
24524 static void
24525 gen_tagged_type_die (tree type,
24526 dw_die_ref context_die,
24527 enum debug_info_usage usage)
24528 {
24529 int need_pop;
24530
24531 if (type == NULL_TREE
24532 || !is_tagged_type (type))
24533 return;
24534
24535 if (TREE_ASM_WRITTEN (type))
24536 need_pop = 0;
24537 /* If this is a nested type whose containing class hasn't been written
24538 out yet, writing it out will cover this one, too. This does not apply
24539 to instantiations of member class templates; they need to be added to
24540 the containing class as they are generated. FIXME: This hurts the
24541 idea of combining type decls from multiple TUs, since we can't predict
24542 what set of template instantiations we'll get. */
24543 else if (TYPE_CONTEXT (type)
24544 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
24545 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
24546 {
24547 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
24548
24549 if (TREE_ASM_WRITTEN (type))
24550 return;
24551
24552 /* If that failed, attach ourselves to the stub. */
24553 push_decl_scope (TYPE_CONTEXT (type));
24554 context_die = lookup_type_die (TYPE_CONTEXT (type));
24555 need_pop = 1;
24556 }
24557 else if (TYPE_CONTEXT (type) != NULL_TREE
24558 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
24559 {
24560 /* If this type is local to a function that hasn't been written
24561 out yet, use a NULL context for now; it will be fixed up in
24562 decls_for_scope. */
24563 context_die = lookup_decl_die (TYPE_CONTEXT (type));
24564 /* A declaration DIE doesn't count; nested types need to go in the
24565 specification. */
24566 if (context_die && is_declaration_die (context_die))
24567 context_die = NULL;
24568 need_pop = 0;
24569 }
24570 else
24571 {
24572 context_die = declare_in_namespace (type, context_die);
24573 need_pop = 0;
24574 }
24575
24576 if (TREE_CODE (type) == ENUMERAL_TYPE)
24577 {
24578 /* This might have been written out by the call to
24579 declare_in_namespace. */
24580 if (!TREE_ASM_WRITTEN (type))
24581 gen_enumeration_type_die (type, context_die);
24582 }
24583 else
24584 gen_struct_or_union_type_die (type, context_die, usage);
24585
24586 if (need_pop)
24587 pop_decl_scope ();
24588
24589 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
24590 it up if it is ever completed. gen_*_type_die will set it for us
24591 when appropriate. */
24592 }
24593
24594 /* Generate a type description DIE. */
24595
24596 static void
24597 gen_type_die_with_usage (tree type, dw_die_ref context_die,
24598 enum debug_info_usage usage)
24599 {
24600 struct array_descr_info info;
24601
24602 if (type == NULL_TREE || type == error_mark_node)
24603 return;
24604
24605 if (flag_checking && type)
24606 verify_type (type);
24607
24608 if (TYPE_NAME (type) != NULL_TREE
24609 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
24610 && is_redundant_typedef (TYPE_NAME (type))
24611 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
24612 /* The DECL of this type is a typedef we don't want to emit debug
24613 info for but we want debug info for its underlying typedef.
24614 This can happen for e.g, the injected-class-name of a C++
24615 type. */
24616 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
24617
24618 /* If TYPE is a typedef type variant, let's generate debug info
24619 for the parent typedef which TYPE is a type of. */
24620 if (typedef_variant_p (type))
24621 {
24622 if (TREE_ASM_WRITTEN (type))
24623 return;
24624
24625 tree name = TYPE_NAME (type);
24626 tree origin = decl_ultimate_origin (name);
24627 if (origin != NULL && origin != name)
24628 {
24629 gen_decl_die (origin, NULL, NULL, context_die);
24630 return;
24631 }
24632
24633 /* Prevent broken recursion; we can't hand off to the same type. */
24634 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
24635
24636 /* Give typedefs the right scope. */
24637 context_die = scope_die_for (type, context_die);
24638
24639 TREE_ASM_WRITTEN (type) = 1;
24640
24641 gen_decl_die (name, NULL, NULL, context_die);
24642 return;
24643 }
24644
24645 /* If type is an anonymous tagged type named by a typedef, let's
24646 generate debug info for the typedef. */
24647 if (is_naming_typedef_decl (TYPE_NAME (type)))
24648 {
24649 /* Use the DIE of the containing namespace as the parent DIE of
24650 the type description DIE we want to generate. */
24651 if (DECL_CONTEXT (TYPE_NAME (type))
24652 && TREE_CODE (DECL_CONTEXT (TYPE_NAME (type))) == NAMESPACE_DECL)
24653 context_die = get_context_die (DECL_CONTEXT (TYPE_NAME (type)));
24654
24655 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
24656 return;
24657 }
24658
24659 if (lang_hooks.types.get_debug_type)
24660 {
24661 tree debug_type = lang_hooks.types.get_debug_type (type);
24662
24663 if (debug_type != NULL_TREE && debug_type != type)
24664 {
24665 gen_type_die_with_usage (debug_type, context_die, usage);
24666 return;
24667 }
24668 }
24669
24670 /* We are going to output a DIE to represent the unqualified version
24671 of this type (i.e. without any const or volatile qualifiers) so
24672 get the main variant (i.e. the unqualified version) of this type
24673 now. (Vectors and arrays are special because the debugging info is in the
24674 cloned type itself. Similarly function/method types can contain extra
24675 ref-qualification). */
24676 if (TREE_CODE (type) == FUNCTION_TYPE
24677 || TREE_CODE (type) == METHOD_TYPE)
24678 {
24679 /* For function/method types, can't use type_main_variant here,
24680 because that can have different ref-qualifiers for C++,
24681 but try to canonicalize. */
24682 tree main = TYPE_MAIN_VARIANT (type);
24683 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
24684 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
24685 && check_base_type (t, main)
24686 && check_lang_type (t, type))
24687 {
24688 type = t;
24689 break;
24690 }
24691 }
24692 else if (TREE_CODE (type) != VECTOR_TYPE
24693 && TREE_CODE (type) != ARRAY_TYPE)
24694 type = type_main_variant (type);
24695
24696 /* If this is an array type with hidden descriptor, handle it first. */
24697 if (!TREE_ASM_WRITTEN (type)
24698 && lang_hooks.types.get_array_descr_info)
24699 {
24700 memset (&info, 0, sizeof (info));
24701 if (lang_hooks.types.get_array_descr_info (type, &info))
24702 {
24703 /* Fortran sometimes emits array types with no dimension. */
24704 gcc_assert (info.ndimensions >= 0
24705 && (info.ndimensions
24706 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
24707 gen_descr_array_type_die (type, &info, context_die);
24708 TREE_ASM_WRITTEN (type) = 1;
24709 return;
24710 }
24711 }
24712
24713 if (TREE_ASM_WRITTEN (type))
24714 {
24715 /* Variable-length types may be incomplete even if
24716 TREE_ASM_WRITTEN. For such types, fall through to
24717 gen_array_type_die() and possibly fill in
24718 DW_AT_{upper,lower}_bound attributes. */
24719 if ((TREE_CODE (type) != ARRAY_TYPE
24720 && TREE_CODE (type) != RECORD_TYPE
24721 && TREE_CODE (type) != UNION_TYPE
24722 && TREE_CODE (type) != QUAL_UNION_TYPE)
24723 || !variably_modified_type_p (type, NULL))
24724 return;
24725 }
24726
24727 switch (TREE_CODE (type))
24728 {
24729 case ERROR_MARK:
24730 break;
24731
24732 case POINTER_TYPE:
24733 case REFERENCE_TYPE:
24734 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
24735 ensures that the gen_type_die recursion will terminate even if the
24736 type is recursive. Recursive types are possible in Ada. */
24737 /* ??? We could perhaps do this for all types before the switch
24738 statement. */
24739 TREE_ASM_WRITTEN (type) = 1;
24740
24741 /* For these types, all that is required is that we output a DIE (or a
24742 set of DIEs) to represent the "basis" type. */
24743 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24744 DINFO_USAGE_IND_USE);
24745 break;
24746
24747 case OFFSET_TYPE:
24748 /* This code is used for C++ pointer-to-data-member types.
24749 Output a description of the relevant class type. */
24750 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
24751 DINFO_USAGE_IND_USE);
24752
24753 /* Output a description of the type of the object pointed to. */
24754 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24755 DINFO_USAGE_IND_USE);
24756
24757 /* Now output a DIE to represent this pointer-to-data-member type
24758 itself. */
24759 gen_ptr_to_mbr_type_die (type, context_die);
24760 break;
24761
24762 case FUNCTION_TYPE:
24763 /* Force out return type (in case it wasn't forced out already). */
24764 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24765 DINFO_USAGE_DIR_USE);
24766 gen_subroutine_type_die (type, context_die);
24767 break;
24768
24769 case METHOD_TYPE:
24770 /* Force out return type (in case it wasn't forced out already). */
24771 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24772 DINFO_USAGE_DIR_USE);
24773 gen_subroutine_type_die (type, context_die);
24774 break;
24775
24776 case ARRAY_TYPE:
24777 case VECTOR_TYPE:
24778 gen_array_type_die (type, context_die);
24779 break;
24780
24781 case ENUMERAL_TYPE:
24782 case RECORD_TYPE:
24783 case UNION_TYPE:
24784 case QUAL_UNION_TYPE:
24785 gen_tagged_type_die (type, context_die, usage);
24786 return;
24787
24788 case VOID_TYPE:
24789 case INTEGER_TYPE:
24790 case REAL_TYPE:
24791 case FIXED_POINT_TYPE:
24792 case COMPLEX_TYPE:
24793 case BOOLEAN_TYPE:
24794 case POINTER_BOUNDS_TYPE:
24795 /* No DIEs needed for fundamental types. */
24796 break;
24797
24798 case NULLPTR_TYPE:
24799 case LANG_TYPE:
24800 /* Just use DW_TAG_unspecified_type. */
24801 {
24802 dw_die_ref type_die = lookup_type_die (type);
24803 if (type_die == NULL)
24804 {
24805 tree name = TYPE_IDENTIFIER (type);
24806 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
24807 type);
24808 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
24809 equate_type_number_to_die (type, type_die);
24810 }
24811 }
24812 break;
24813
24814 default:
24815 if (is_cxx_auto (type))
24816 {
24817 tree name = TYPE_IDENTIFIER (type);
24818 dw_die_ref *die = (name == get_identifier ("auto")
24819 ? &auto_die : &decltype_auto_die);
24820 if (!*die)
24821 {
24822 *die = new_die (DW_TAG_unspecified_type,
24823 comp_unit_die (), NULL_TREE);
24824 add_name_attribute (*die, IDENTIFIER_POINTER (name));
24825 }
24826 equate_type_number_to_die (type, *die);
24827 break;
24828 }
24829 gcc_unreachable ();
24830 }
24831
24832 TREE_ASM_WRITTEN (type) = 1;
24833 }
24834
24835 static void
24836 gen_type_die (tree type, dw_die_ref context_die)
24837 {
24838 if (type != error_mark_node)
24839 {
24840 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
24841 if (flag_checking)
24842 {
24843 dw_die_ref die = lookup_type_die (type);
24844 if (die)
24845 check_die (die);
24846 }
24847 }
24848 }
24849
24850 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
24851 things which are local to the given block. */
24852
24853 static void
24854 gen_block_die (tree stmt, dw_die_ref context_die)
24855 {
24856 int must_output_die = 0;
24857 bool inlined_func;
24858
24859 /* Ignore blocks that are NULL. */
24860 if (stmt == NULL_TREE)
24861 return;
24862
24863 inlined_func = inlined_function_outer_scope_p (stmt);
24864
24865 /* If the block is one fragment of a non-contiguous block, do not
24866 process the variables, since they will have been done by the
24867 origin block. Do process subblocks. */
24868 if (BLOCK_FRAGMENT_ORIGIN (stmt))
24869 {
24870 tree sub;
24871
24872 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
24873 gen_block_die (sub, context_die);
24874
24875 return;
24876 }
24877
24878 /* Determine if we need to output any Dwarf DIEs at all to represent this
24879 block. */
24880 if (inlined_func)
24881 /* The outer scopes for inlinings *must* always be represented. We
24882 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
24883 must_output_die = 1;
24884 else
24885 {
24886 /* Determine if this block directly contains any "significant"
24887 local declarations which we will need to output DIEs for. */
24888 if (debug_info_level > DINFO_LEVEL_TERSE)
24889 /* We are not in terse mode so *any* local declaration counts
24890 as being a "significant" one. */
24891 must_output_die = ((BLOCK_VARS (stmt) != NULL
24892 || BLOCK_NUM_NONLOCALIZED_VARS (stmt))
24893 && (TREE_USED (stmt)
24894 || TREE_ASM_WRITTEN (stmt)
24895 || BLOCK_ABSTRACT (stmt)));
24896 else if ((TREE_USED (stmt)
24897 || TREE_ASM_WRITTEN (stmt)
24898 || BLOCK_ABSTRACT (stmt))
24899 && !dwarf2out_ignore_block (stmt))
24900 must_output_die = 1;
24901 }
24902
24903 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
24904 DIE for any block which contains no significant local declarations at
24905 all. Rather, in such cases we just call `decls_for_scope' so that any
24906 needed Dwarf info for any sub-blocks will get properly generated. Note
24907 that in terse mode, our definition of what constitutes a "significant"
24908 local declaration gets restricted to include only inlined function
24909 instances and local (nested) function definitions. */
24910 if (must_output_die)
24911 {
24912 if (inlined_func)
24913 {
24914 /* If STMT block is abstract, that means we have been called
24915 indirectly from dwarf2out_abstract_function.
24916 That function rightfully marks the descendent blocks (of
24917 the abstract function it is dealing with) as being abstract,
24918 precisely to prevent us from emitting any
24919 DW_TAG_inlined_subroutine DIE as a descendent
24920 of an abstract function instance. So in that case, we should
24921 not call gen_inlined_subroutine_die.
24922
24923 Later though, when cgraph asks dwarf2out to emit info
24924 for the concrete instance of the function decl into which
24925 the concrete instance of STMT got inlined, the later will lead
24926 to the generation of a DW_TAG_inlined_subroutine DIE. */
24927 if (! BLOCK_ABSTRACT (stmt))
24928 gen_inlined_subroutine_die (stmt, context_die);
24929 }
24930 else
24931 gen_lexical_block_die (stmt, context_die);
24932 }
24933 else
24934 decls_for_scope (stmt, context_die);
24935 }
24936
24937 /* Process variable DECL (or variable with origin ORIGIN) within
24938 block STMT and add it to CONTEXT_DIE. */
24939 static void
24940 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
24941 {
24942 dw_die_ref die;
24943 tree decl_or_origin = decl ? decl : origin;
24944
24945 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
24946 die = lookup_decl_die (decl_or_origin);
24947 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
24948 {
24949 if (TYPE_DECL_IS_STUB (decl_or_origin))
24950 die = lookup_type_die (TREE_TYPE (decl_or_origin));
24951 else
24952 die = lookup_decl_die (decl_or_origin);
24953 /* Avoid re-creating the DIE late if it was optimized as unused early. */
24954 if (! die && ! early_dwarf)
24955 return;
24956 }
24957 else
24958 die = NULL;
24959
24960 /* Avoid creating DIEs for local typedefs and concrete static variables that
24961 will only be pruned later. */
24962 if ((origin || decl_ultimate_origin (decl))
24963 && (TREE_CODE (decl_or_origin) == TYPE_DECL
24964 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
24965 {
24966 origin = decl_ultimate_origin (decl_or_origin);
24967 if (decl && VAR_P (decl) && die != NULL)
24968 {
24969 die = lookup_decl_die (origin);
24970 if (die != NULL)
24971 equate_decl_number_to_die (decl, die);
24972 }
24973 return;
24974 }
24975
24976 if (die != NULL && die->die_parent == NULL)
24977 add_child_die (context_die, die);
24978 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
24979 {
24980 if (early_dwarf)
24981 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
24982 stmt, context_die);
24983 }
24984 else
24985 {
24986 if (decl && DECL_P (decl))
24987 {
24988 die = lookup_decl_die (decl);
24989
24990 /* Early created DIEs do not have a parent as the decls refer
24991 to the function as DECL_CONTEXT rather than the BLOCK. */
24992 if (die && die->die_parent == NULL)
24993 {
24994 gcc_assert (in_lto_p);
24995 add_child_die (context_die, die);
24996 }
24997 }
24998
24999 gen_decl_die (decl, origin, NULL, context_die);
25000 }
25001 }
25002
25003 /* Generate all of the decls declared within a given scope and (recursively)
25004 all of its sub-blocks. */
25005
25006 static void
25007 decls_for_scope (tree stmt, dw_die_ref context_die)
25008 {
25009 tree decl;
25010 unsigned int i;
25011 tree subblocks;
25012
25013 /* Ignore NULL blocks. */
25014 if (stmt == NULL_TREE)
25015 return;
25016
25017 /* Output the DIEs to represent all of the data objects and typedefs
25018 declared directly within this block but not within any nested
25019 sub-blocks. Also, nested function and tag DIEs have been
25020 generated with a parent of NULL; fix that up now. We don't
25021 have to do this if we're at -g1. */
25022 if (debug_info_level > DINFO_LEVEL_TERSE)
25023 {
25024 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25025 process_scope_var (stmt, decl, NULL_TREE, context_die);
25026 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25027 origin - avoid doing this twice as we have no good way to see
25028 if we've done it once already. */
25029 if (! early_dwarf)
25030 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25031 {
25032 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25033 if (decl == current_function_decl)
25034 /* Ignore declarations of the current function, while they
25035 are declarations, gen_subprogram_die would treat them
25036 as definitions again, because they are equal to
25037 current_function_decl and endlessly recurse. */;
25038 else if (TREE_CODE (decl) == FUNCTION_DECL)
25039 process_scope_var (stmt, decl, NULL_TREE, context_die);
25040 else
25041 process_scope_var (stmt, NULL_TREE, decl, context_die);
25042 }
25043 }
25044
25045 /* Even if we're at -g1, we need to process the subblocks in order to get
25046 inlined call information. */
25047
25048 /* Output the DIEs to represent all sub-blocks (and the items declared
25049 therein) of this block. */
25050 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25051 subblocks != NULL;
25052 subblocks = BLOCK_CHAIN (subblocks))
25053 gen_block_die (subblocks, context_die);
25054 }
25055
25056 /* Is this a typedef we can avoid emitting? */
25057
25058 bool
25059 is_redundant_typedef (const_tree decl)
25060 {
25061 if (TYPE_DECL_IS_STUB (decl))
25062 return true;
25063
25064 if (DECL_ARTIFICIAL (decl)
25065 && DECL_CONTEXT (decl)
25066 && is_tagged_type (DECL_CONTEXT (decl))
25067 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25068 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25069 /* Also ignore the artificial member typedef for the class name. */
25070 return true;
25071
25072 return false;
25073 }
25074
25075 /* Return TRUE if TYPE is a typedef that names a type for linkage
25076 purposes. This kind of typedefs is produced by the C++ FE for
25077 constructs like:
25078
25079 typedef struct {...} foo;
25080
25081 In that case, there is no typedef variant type produced for foo.
25082 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25083 struct type. */
25084
25085 static bool
25086 is_naming_typedef_decl (const_tree decl)
25087 {
25088 if (decl == NULL_TREE
25089 || TREE_CODE (decl) != TYPE_DECL
25090 || DECL_NAMELESS (decl)
25091 || !is_tagged_type (TREE_TYPE (decl))
25092 || DECL_IS_BUILTIN (decl)
25093 || is_redundant_typedef (decl)
25094 /* It looks like Ada produces TYPE_DECLs that are very similar
25095 to C++ naming typedefs but that have different
25096 semantics. Let's be specific to c++ for now. */
25097 || !is_cxx (decl))
25098 return FALSE;
25099
25100 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25101 && TYPE_NAME (TREE_TYPE (decl)) == decl
25102 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25103 != TYPE_NAME (TREE_TYPE (decl))));
25104 }
25105
25106 /* Looks up the DIE for a context. */
25107
25108 static inline dw_die_ref
25109 lookup_context_die (tree context)
25110 {
25111 if (context)
25112 {
25113 /* Find die that represents this context. */
25114 if (TYPE_P (context))
25115 {
25116 context = TYPE_MAIN_VARIANT (context);
25117 dw_die_ref ctx = lookup_type_die (context);
25118 if (!ctx)
25119 return NULL;
25120 return strip_naming_typedef (context, ctx);
25121 }
25122 else
25123 return lookup_decl_die (context);
25124 }
25125 return comp_unit_die ();
25126 }
25127
25128 /* Returns the DIE for a context. */
25129
25130 static inline dw_die_ref
25131 get_context_die (tree context)
25132 {
25133 if (context)
25134 {
25135 /* Find die that represents this context. */
25136 if (TYPE_P (context))
25137 {
25138 context = TYPE_MAIN_VARIANT (context);
25139 return strip_naming_typedef (context, force_type_die (context));
25140 }
25141 else
25142 return force_decl_die (context);
25143 }
25144 return comp_unit_die ();
25145 }
25146
25147 /* Returns the DIE for decl. A DIE will always be returned. */
25148
25149 static dw_die_ref
25150 force_decl_die (tree decl)
25151 {
25152 dw_die_ref decl_die;
25153 unsigned saved_external_flag;
25154 tree save_fn = NULL_TREE;
25155 decl_die = lookup_decl_die (decl);
25156 if (!decl_die)
25157 {
25158 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25159
25160 decl_die = lookup_decl_die (decl);
25161 if (decl_die)
25162 return decl_die;
25163
25164 switch (TREE_CODE (decl))
25165 {
25166 case FUNCTION_DECL:
25167 /* Clear current_function_decl, so that gen_subprogram_die thinks
25168 that this is a declaration. At this point, we just want to force
25169 declaration die. */
25170 save_fn = current_function_decl;
25171 current_function_decl = NULL_TREE;
25172 gen_subprogram_die (decl, context_die);
25173 current_function_decl = save_fn;
25174 break;
25175
25176 case VAR_DECL:
25177 /* Set external flag to force declaration die. Restore it after
25178 gen_decl_die() call. */
25179 saved_external_flag = DECL_EXTERNAL (decl);
25180 DECL_EXTERNAL (decl) = 1;
25181 gen_decl_die (decl, NULL, NULL, context_die);
25182 DECL_EXTERNAL (decl) = saved_external_flag;
25183 break;
25184
25185 case NAMESPACE_DECL:
25186 if (dwarf_version >= 3 || !dwarf_strict)
25187 dwarf2out_decl (decl);
25188 else
25189 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25190 decl_die = comp_unit_die ();
25191 break;
25192
25193 case TRANSLATION_UNIT_DECL:
25194 decl_die = comp_unit_die ();
25195 break;
25196
25197 default:
25198 gcc_unreachable ();
25199 }
25200
25201 /* We should be able to find the DIE now. */
25202 if (!decl_die)
25203 decl_die = lookup_decl_die (decl);
25204 gcc_assert (decl_die);
25205 }
25206
25207 return decl_die;
25208 }
25209
25210 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25211 always returned. */
25212
25213 static dw_die_ref
25214 force_type_die (tree type)
25215 {
25216 dw_die_ref type_die;
25217
25218 type_die = lookup_type_die (type);
25219 if (!type_die)
25220 {
25221 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25222
25223 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25224 false, context_die);
25225 gcc_assert (type_die);
25226 }
25227 return type_die;
25228 }
25229
25230 /* Force out any required namespaces to be able to output DECL,
25231 and return the new context_die for it, if it's changed. */
25232
25233 static dw_die_ref
25234 setup_namespace_context (tree thing, dw_die_ref context_die)
25235 {
25236 tree context = (DECL_P (thing)
25237 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
25238 if (context && TREE_CODE (context) == NAMESPACE_DECL)
25239 /* Force out the namespace. */
25240 context_die = force_decl_die (context);
25241
25242 return context_die;
25243 }
25244
25245 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
25246 type) within its namespace, if appropriate.
25247
25248 For compatibility with older debuggers, namespace DIEs only contain
25249 declarations; all definitions are emitted at CU scope, with
25250 DW_AT_specification pointing to the declaration (like with class
25251 members). */
25252
25253 static dw_die_ref
25254 declare_in_namespace (tree thing, dw_die_ref context_die)
25255 {
25256 dw_die_ref ns_context;
25257
25258 if (debug_info_level <= DINFO_LEVEL_TERSE)
25259 return context_die;
25260
25261 /* External declarations in the local scope only need to be emitted
25262 once, not once in the namespace and once in the scope.
25263
25264 This avoids declaring the `extern' below in the
25265 namespace DIE as well as in the innermost scope:
25266
25267 namespace S
25268 {
25269 int i=5;
25270 int foo()
25271 {
25272 int i=8;
25273 extern int i;
25274 return i;
25275 }
25276 }
25277 */
25278 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
25279 return context_die;
25280
25281 /* If this decl is from an inlined function, then don't try to emit it in its
25282 namespace, as we will get confused. It would have already been emitted
25283 when the abstract instance of the inline function was emitted anyways. */
25284 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
25285 return context_die;
25286
25287 ns_context = setup_namespace_context (thing, context_die);
25288
25289 if (ns_context != context_die)
25290 {
25291 if (is_fortran ())
25292 return ns_context;
25293 if (DECL_P (thing))
25294 gen_decl_die (thing, NULL, NULL, ns_context);
25295 else
25296 gen_type_die (thing, ns_context);
25297 }
25298 return context_die;
25299 }
25300
25301 /* Generate a DIE for a namespace or namespace alias. */
25302
25303 static void
25304 gen_namespace_die (tree decl, dw_die_ref context_die)
25305 {
25306 dw_die_ref namespace_die;
25307
25308 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
25309 they are an alias of. */
25310 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
25311 {
25312 /* Output a real namespace or module. */
25313 context_die = setup_namespace_context (decl, comp_unit_die ());
25314 namespace_die = new_die (is_fortran ()
25315 ? DW_TAG_module : DW_TAG_namespace,
25316 context_die, decl);
25317 /* For Fortran modules defined in different CU don't add src coords. */
25318 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
25319 {
25320 const char *name = dwarf2_name (decl, 0);
25321 if (name)
25322 add_name_attribute (namespace_die, name);
25323 }
25324 else
25325 add_name_and_src_coords_attributes (namespace_die, decl);
25326 if (DECL_EXTERNAL (decl))
25327 add_AT_flag (namespace_die, DW_AT_declaration, 1);
25328 equate_decl_number_to_die (decl, namespace_die);
25329 }
25330 else
25331 {
25332 /* Output a namespace alias. */
25333
25334 /* Force out the namespace we are an alias of, if necessary. */
25335 dw_die_ref origin_die
25336 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
25337
25338 if (DECL_FILE_SCOPE_P (decl)
25339 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
25340 context_die = setup_namespace_context (decl, comp_unit_die ());
25341 /* Now create the namespace alias DIE. */
25342 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
25343 add_name_and_src_coords_attributes (namespace_die, decl);
25344 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
25345 equate_decl_number_to_die (decl, namespace_die);
25346 }
25347 if ((dwarf_version >= 5 || !dwarf_strict)
25348 && lang_hooks.decls.decl_dwarf_attribute (decl,
25349 DW_AT_export_symbols) == 1)
25350 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
25351
25352 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
25353 if (want_pubnames ())
25354 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
25355 }
25356
25357 /* Generate Dwarf debug information for a decl described by DECL.
25358 The return value is currently only meaningful for PARM_DECLs,
25359 for all other decls it returns NULL.
25360
25361 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
25362 It can be NULL otherwise. */
25363
25364 static dw_die_ref
25365 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
25366 dw_die_ref context_die)
25367 {
25368 tree decl_or_origin = decl ? decl : origin;
25369 tree class_origin = NULL, ultimate_origin;
25370
25371 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
25372 return NULL;
25373
25374 /* Ignore pointer bounds decls. */
25375 if (DECL_P (decl_or_origin)
25376 && TREE_TYPE (decl_or_origin)
25377 && POINTER_BOUNDS_P (decl_or_origin))
25378 return NULL;
25379
25380 switch (TREE_CODE (decl_or_origin))
25381 {
25382 case ERROR_MARK:
25383 break;
25384
25385 case CONST_DECL:
25386 if (!is_fortran () && !is_ada ())
25387 {
25388 /* The individual enumerators of an enum type get output when we output
25389 the Dwarf representation of the relevant enum type itself. */
25390 break;
25391 }
25392
25393 /* Emit its type. */
25394 gen_type_die (TREE_TYPE (decl), context_die);
25395
25396 /* And its containing namespace. */
25397 context_die = declare_in_namespace (decl, context_die);
25398
25399 gen_const_die (decl, context_die);
25400 break;
25401
25402 case FUNCTION_DECL:
25403 #if 0
25404 /* FIXME */
25405 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
25406 on local redeclarations of global functions. That seems broken. */
25407 if (current_function_decl != decl)
25408 /* This is only a declaration. */;
25409 #endif
25410
25411 /* We should have abstract copies already and should not generate
25412 stray type DIEs in late LTO dumping. */
25413 if (! early_dwarf)
25414 ;
25415
25416 /* If we're emitting a clone, emit info for the abstract instance. */
25417 else if (origin || DECL_ORIGIN (decl) != decl)
25418 dwarf2out_abstract_function (origin
25419 ? DECL_ORIGIN (origin)
25420 : DECL_ABSTRACT_ORIGIN (decl));
25421
25422 /* If we're emitting a possibly inlined function emit it as
25423 abstract instance. */
25424 else if (cgraph_function_possibly_inlined_p (decl)
25425 && ! DECL_ABSTRACT_P (decl)
25426 && ! class_or_namespace_scope_p (context_die)
25427 /* dwarf2out_abstract_function won't emit a die if this is just
25428 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
25429 that case, because that works only if we have a die. */
25430 && DECL_INITIAL (decl) != NULL_TREE)
25431 dwarf2out_abstract_function (decl);
25432
25433 /* Otherwise we're emitting the primary DIE for this decl. */
25434 else if (debug_info_level > DINFO_LEVEL_TERSE)
25435 {
25436 /* Before we describe the FUNCTION_DECL itself, make sure that we
25437 have its containing type. */
25438 if (!origin)
25439 origin = decl_class_context (decl);
25440 if (origin != NULL_TREE)
25441 gen_type_die (origin, context_die);
25442
25443 /* And its return type. */
25444 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
25445
25446 /* And its virtual context. */
25447 if (DECL_VINDEX (decl) != NULL_TREE)
25448 gen_type_die (DECL_CONTEXT (decl), context_die);
25449
25450 /* Make sure we have a member DIE for decl. */
25451 if (origin != NULL_TREE)
25452 gen_type_die_for_member (origin, decl, context_die);
25453
25454 /* And its containing namespace. */
25455 context_die = declare_in_namespace (decl, context_die);
25456 }
25457
25458 /* Now output a DIE to represent the function itself. */
25459 if (decl)
25460 gen_subprogram_die (decl, context_die);
25461 break;
25462
25463 case TYPE_DECL:
25464 /* If we are in terse mode, don't generate any DIEs to represent any
25465 actual typedefs. */
25466 if (debug_info_level <= DINFO_LEVEL_TERSE)
25467 break;
25468
25469 /* In the special case of a TYPE_DECL node representing the declaration
25470 of some type tag, if the given TYPE_DECL is marked as having been
25471 instantiated from some other (original) TYPE_DECL node (e.g. one which
25472 was generated within the original definition of an inline function) we
25473 used to generate a special (abbreviated) DW_TAG_structure_type,
25474 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
25475 should be actually referencing those DIEs, as variable DIEs with that
25476 type would be emitted already in the abstract origin, so it was always
25477 removed during unused type prunning. Don't add anything in this
25478 case. */
25479 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
25480 break;
25481
25482 if (is_redundant_typedef (decl))
25483 gen_type_die (TREE_TYPE (decl), context_die);
25484 else
25485 /* Output a DIE to represent the typedef itself. */
25486 gen_typedef_die (decl, context_die);
25487 break;
25488
25489 case LABEL_DECL:
25490 if (debug_info_level >= DINFO_LEVEL_NORMAL)
25491 gen_label_die (decl, context_die);
25492 break;
25493
25494 case VAR_DECL:
25495 case RESULT_DECL:
25496 /* If we are in terse mode, don't generate any DIEs to represent any
25497 variable declarations or definitions. */
25498 if (debug_info_level <= DINFO_LEVEL_TERSE)
25499 break;
25500
25501 /* Avoid generating stray type DIEs during late dwarf dumping.
25502 All types have been dumped early. */
25503 if (early_dwarf
25504 /* ??? But in LTRANS we cannot annotate early created variably
25505 modified type DIEs without copying them and adjusting all
25506 references to them. Dump them again as happens for inlining
25507 which copies both the decl and the types. */
25508 /* ??? And even non-LTO needs to re-visit type DIEs to fill
25509 in VLA bound information for example. */
25510 || (decl && variably_modified_type_p (TREE_TYPE (decl),
25511 current_function_decl)))
25512 {
25513 /* Output any DIEs that are needed to specify the type of this data
25514 object. */
25515 if (decl_by_reference_p (decl_or_origin))
25516 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
25517 else
25518 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
25519 }
25520
25521 if (early_dwarf)
25522 {
25523 /* And its containing type. */
25524 class_origin = decl_class_context (decl_or_origin);
25525 if (class_origin != NULL_TREE)
25526 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
25527
25528 /* And its containing namespace. */
25529 context_die = declare_in_namespace (decl_or_origin, context_die);
25530 }
25531
25532 /* Now output the DIE to represent the data object itself. This gets
25533 complicated because of the possibility that the VAR_DECL really
25534 represents an inlined instance of a formal parameter for an inline
25535 function. */
25536 ultimate_origin = decl_ultimate_origin (decl_or_origin);
25537 if (ultimate_origin != NULL_TREE
25538 && TREE_CODE (ultimate_origin) == PARM_DECL)
25539 gen_formal_parameter_die (decl, origin,
25540 true /* Emit name attribute. */,
25541 context_die);
25542 else
25543 gen_variable_die (decl, origin, context_die);
25544 break;
25545
25546 case FIELD_DECL:
25547 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
25548 /* Ignore the nameless fields that are used to skip bits but handle C++
25549 anonymous unions and structs. */
25550 if (DECL_NAME (decl) != NULL_TREE
25551 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
25552 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
25553 {
25554 gen_type_die (member_declared_type (decl), context_die);
25555 gen_field_die (decl, ctx, context_die);
25556 }
25557 break;
25558
25559 case PARM_DECL:
25560 /* Avoid generating stray type DIEs during late dwarf dumping.
25561 All types have been dumped early. */
25562 if (early_dwarf
25563 /* ??? But in LTRANS we cannot annotate early created variably
25564 modified type DIEs without copying them and adjusting all
25565 references to them. Dump them again as happens for inlining
25566 which copies both the decl and the types. */
25567 /* ??? And even non-LTO needs to re-visit type DIEs to fill
25568 in VLA bound information for example. */
25569 || (decl && variably_modified_type_p (TREE_TYPE (decl),
25570 current_function_decl)))
25571 {
25572 if (DECL_BY_REFERENCE (decl_or_origin))
25573 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
25574 else
25575 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
25576 }
25577 return gen_formal_parameter_die (decl, origin,
25578 true /* Emit name attribute. */,
25579 context_die);
25580
25581 case NAMESPACE_DECL:
25582 if (dwarf_version >= 3 || !dwarf_strict)
25583 gen_namespace_die (decl, context_die);
25584 break;
25585
25586 case IMPORTED_DECL:
25587 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
25588 DECL_CONTEXT (decl), context_die);
25589 break;
25590
25591 case NAMELIST_DECL:
25592 gen_namelist_decl (DECL_NAME (decl), context_die,
25593 NAMELIST_DECL_ASSOCIATED_DECL (decl));
25594 break;
25595
25596 default:
25597 /* Probably some frontend-internal decl. Assume we don't care. */
25598 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
25599 break;
25600 }
25601
25602 return NULL;
25603 }
25604 \f
25605 /* Output initial debug information for global DECL. Called at the
25606 end of the parsing process.
25607
25608 This is the initial debug generation process. As such, the DIEs
25609 generated may be incomplete. A later debug generation pass
25610 (dwarf2out_late_global_decl) will augment the information generated
25611 in this pass (e.g., with complete location info). */
25612
25613 static void
25614 dwarf2out_early_global_decl (tree decl)
25615 {
25616 set_early_dwarf s;
25617
25618 /* gen_decl_die() will set DECL_ABSTRACT because
25619 cgraph_function_possibly_inlined_p() returns true. This is in
25620 turn will cause DW_AT_inline attributes to be set.
25621
25622 This happens because at early dwarf generation, there is no
25623 cgraph information, causing cgraph_function_possibly_inlined_p()
25624 to return true. Trick cgraph_function_possibly_inlined_p()
25625 while we generate dwarf early. */
25626 bool save = symtab->global_info_ready;
25627 symtab->global_info_ready = true;
25628
25629 /* We don't handle TYPE_DECLs. If required, they'll be reached via
25630 other DECLs and they can point to template types or other things
25631 that dwarf2out can't handle when done via dwarf2out_decl. */
25632 if (TREE_CODE (decl) != TYPE_DECL
25633 && TREE_CODE (decl) != PARM_DECL)
25634 {
25635 if (TREE_CODE (decl) == FUNCTION_DECL)
25636 {
25637 tree save_fndecl = current_function_decl;
25638
25639 /* For nested functions, make sure we have DIEs for the parents first
25640 so that all nested DIEs are generated at the proper scope in the
25641 first shot. */
25642 tree context = decl_function_context (decl);
25643 if (context != NULL)
25644 {
25645 dw_die_ref context_die = lookup_decl_die (context);
25646 current_function_decl = context;
25647
25648 /* Avoid emitting DIEs multiple times, but still process CONTEXT
25649 enough so that it lands in its own context. This avoids type
25650 pruning issues later on. */
25651 if (context_die == NULL || is_declaration_die (context_die))
25652 dwarf2out_decl (context);
25653 }
25654
25655 /* Emit an abstract origin of a function first. This happens
25656 with C++ constructor clones for example and makes
25657 dwarf2out_abstract_function happy which requires the early
25658 DIE of the abstract instance to be present. */
25659 tree origin = DECL_ABSTRACT_ORIGIN (decl);
25660 dw_die_ref origin_die;
25661 if (origin != NULL
25662 /* Do not emit the DIE multiple times but make sure to
25663 process it fully here in case we just saw a declaration. */
25664 && ((origin_die = lookup_decl_die (origin)) == NULL
25665 || is_declaration_die (origin_die)))
25666 {
25667 current_function_decl = origin;
25668 dwarf2out_decl (origin);
25669 }
25670
25671 /* Emit the DIE for decl but avoid doing that multiple times. */
25672 dw_die_ref old_die;
25673 if ((old_die = lookup_decl_die (decl)) == NULL
25674 || is_declaration_die (old_die))
25675 {
25676 current_function_decl = decl;
25677 dwarf2out_decl (decl);
25678 }
25679
25680 current_function_decl = save_fndecl;
25681 }
25682 else
25683 dwarf2out_decl (decl);
25684 }
25685 symtab->global_info_ready = save;
25686 }
25687
25688 /* Output debug information for global decl DECL. Called from
25689 toplev.c after compilation proper has finished. */
25690
25691 static void
25692 dwarf2out_late_global_decl (tree decl)
25693 {
25694 /* Fill-in any location information we were unable to determine
25695 on the first pass. */
25696 if (VAR_P (decl) && !POINTER_BOUNDS_P (decl))
25697 {
25698 dw_die_ref die = lookup_decl_die (decl);
25699
25700 /* We may have to generate early debug late for LTO in case debug
25701 was not enabled at compile-time or the target doesn't support
25702 the LTO early debug scheme. */
25703 if (! die && in_lto_p)
25704 {
25705 dwarf2out_decl (decl);
25706 die = lookup_decl_die (decl);
25707 }
25708
25709 if (die)
25710 {
25711 /* We get called via the symtab code invoking late_global_decl
25712 for symbols that are optimized out. Do not add locations
25713 for those, except if they have a DECL_VALUE_EXPR, in which case
25714 they are relevant for debuggers. */
25715 varpool_node *node = varpool_node::get (decl);
25716 if ((! node || ! node->definition) && ! DECL_HAS_VALUE_EXPR_P (decl))
25717 tree_add_const_value_attribute_for_decl (die, decl);
25718 else
25719 add_location_or_const_value_attribute (die, decl, false);
25720 }
25721 }
25722 }
25723
25724 /* Output debug information for type decl DECL. Called from toplev.c
25725 and from language front ends (to record built-in types). */
25726 static void
25727 dwarf2out_type_decl (tree decl, int local)
25728 {
25729 if (!local)
25730 {
25731 set_early_dwarf s;
25732 dwarf2out_decl (decl);
25733 }
25734 }
25735
25736 /* Output debug information for imported module or decl DECL.
25737 NAME is non-NULL name in the lexical block if the decl has been renamed.
25738 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
25739 that DECL belongs to.
25740 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
25741 static void
25742 dwarf2out_imported_module_or_decl_1 (tree decl,
25743 tree name,
25744 tree lexical_block,
25745 dw_die_ref lexical_block_die)
25746 {
25747 expanded_location xloc;
25748 dw_die_ref imported_die = NULL;
25749 dw_die_ref at_import_die;
25750
25751 if (TREE_CODE (decl) == IMPORTED_DECL)
25752 {
25753 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
25754 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
25755 gcc_assert (decl);
25756 }
25757 else
25758 xloc = expand_location (input_location);
25759
25760 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
25761 {
25762 at_import_die = force_type_die (TREE_TYPE (decl));
25763 /* For namespace N { typedef void T; } using N::T; base_type_die
25764 returns NULL, but DW_TAG_imported_declaration requires
25765 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
25766 if (!at_import_die)
25767 {
25768 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
25769 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
25770 at_import_die = lookup_type_die (TREE_TYPE (decl));
25771 gcc_assert (at_import_die);
25772 }
25773 }
25774 else
25775 {
25776 at_import_die = lookup_decl_die (decl);
25777 if (!at_import_die)
25778 {
25779 /* If we're trying to avoid duplicate debug info, we may not have
25780 emitted the member decl for this field. Emit it now. */
25781 if (TREE_CODE (decl) == FIELD_DECL)
25782 {
25783 tree type = DECL_CONTEXT (decl);
25784
25785 if (TYPE_CONTEXT (type)
25786 && TYPE_P (TYPE_CONTEXT (type))
25787 && !should_emit_struct_debug (TYPE_CONTEXT (type),
25788 DINFO_USAGE_DIR_USE))
25789 return;
25790 gen_type_die_for_member (type, decl,
25791 get_context_die (TYPE_CONTEXT (type)));
25792 }
25793 if (TREE_CODE (decl) == NAMELIST_DECL)
25794 at_import_die = gen_namelist_decl (DECL_NAME (decl),
25795 get_context_die (DECL_CONTEXT (decl)),
25796 NULL_TREE);
25797 else
25798 at_import_die = force_decl_die (decl);
25799 }
25800 }
25801
25802 if (TREE_CODE (decl) == NAMESPACE_DECL)
25803 {
25804 if (dwarf_version >= 3 || !dwarf_strict)
25805 imported_die = new_die (DW_TAG_imported_module,
25806 lexical_block_die,
25807 lexical_block);
25808 else
25809 return;
25810 }
25811 else
25812 imported_die = new_die (DW_TAG_imported_declaration,
25813 lexical_block_die,
25814 lexical_block);
25815
25816 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
25817 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
25818 if (debug_column_info && xloc.column)
25819 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
25820 if (name)
25821 add_AT_string (imported_die, DW_AT_name,
25822 IDENTIFIER_POINTER (name));
25823 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
25824 }
25825
25826 /* Output debug information for imported module or decl DECL.
25827 NAME is non-NULL name in context if the decl has been renamed.
25828 CHILD is true if decl is one of the renamed decls as part of
25829 importing whole module.
25830 IMPLICIT is set if this hook is called for an implicit import
25831 such as inline namespace. */
25832
25833 static void
25834 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
25835 bool child, bool implicit)
25836 {
25837 /* dw_die_ref at_import_die; */
25838 dw_die_ref scope_die;
25839
25840 if (debug_info_level <= DINFO_LEVEL_TERSE)
25841 return;
25842
25843 gcc_assert (decl);
25844
25845 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
25846 should be enough, for DWARF4 and older even if we emit as extension
25847 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
25848 for the benefit of consumers unaware of DW_AT_export_symbols. */
25849 if (implicit
25850 && dwarf_version >= 5
25851 && lang_hooks.decls.decl_dwarf_attribute (decl,
25852 DW_AT_export_symbols) == 1)
25853 return;
25854
25855 set_early_dwarf s;
25856
25857 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
25858 We need decl DIE for reference and scope die. First, get DIE for the decl
25859 itself. */
25860
25861 /* Get the scope die for decl context. Use comp_unit_die for global module
25862 or decl. If die is not found for non globals, force new die. */
25863 if (context
25864 && TYPE_P (context)
25865 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
25866 return;
25867
25868 scope_die = get_context_die (context);
25869
25870 if (child)
25871 {
25872 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
25873 there is nothing we can do, here. */
25874 if (dwarf_version < 3 && dwarf_strict)
25875 return;
25876
25877 gcc_assert (scope_die->die_child);
25878 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
25879 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
25880 scope_die = scope_die->die_child;
25881 }
25882
25883 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
25884 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
25885 }
25886
25887 /* Output debug information for namelists. */
25888
25889 static dw_die_ref
25890 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
25891 {
25892 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
25893 tree value;
25894 unsigned i;
25895
25896 if (debug_info_level <= DINFO_LEVEL_TERSE)
25897 return NULL;
25898
25899 gcc_assert (scope_die != NULL);
25900 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
25901 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
25902
25903 /* If there are no item_decls, we have a nondefining namelist, e.g.
25904 with USE association; hence, set DW_AT_declaration. */
25905 if (item_decls == NULL_TREE)
25906 {
25907 add_AT_flag (nml_die, DW_AT_declaration, 1);
25908 return nml_die;
25909 }
25910
25911 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
25912 {
25913 nml_item_ref_die = lookup_decl_die (value);
25914 if (!nml_item_ref_die)
25915 nml_item_ref_die = force_decl_die (value);
25916
25917 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
25918 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
25919 }
25920 return nml_die;
25921 }
25922
25923
25924 /* Write the debugging output for DECL and return the DIE. */
25925
25926 static void
25927 dwarf2out_decl (tree decl)
25928 {
25929 dw_die_ref context_die = comp_unit_die ();
25930
25931 switch (TREE_CODE (decl))
25932 {
25933 case ERROR_MARK:
25934 return;
25935
25936 case FUNCTION_DECL:
25937 /* If we're a nested function, initially use a parent of NULL; if we're
25938 a plain function, this will be fixed up in decls_for_scope. If
25939 we're a method, it will be ignored, since we already have a DIE. */
25940 if (decl_function_context (decl)
25941 /* But if we're in terse mode, we don't care about scope. */
25942 && debug_info_level > DINFO_LEVEL_TERSE)
25943 context_die = NULL;
25944 break;
25945
25946 case VAR_DECL:
25947 /* For local statics lookup proper context die. */
25948 if (local_function_static (decl))
25949 context_die = lookup_decl_die (DECL_CONTEXT (decl));
25950
25951 /* If we are in terse mode, don't generate any DIEs to represent any
25952 variable declarations or definitions. */
25953 if (debug_info_level <= DINFO_LEVEL_TERSE)
25954 return;
25955 break;
25956
25957 case CONST_DECL:
25958 if (debug_info_level <= DINFO_LEVEL_TERSE)
25959 return;
25960 if (!is_fortran () && !is_ada ())
25961 return;
25962 if (TREE_STATIC (decl) && decl_function_context (decl))
25963 context_die = lookup_decl_die (DECL_CONTEXT (decl));
25964 break;
25965
25966 case NAMESPACE_DECL:
25967 case IMPORTED_DECL:
25968 if (debug_info_level <= DINFO_LEVEL_TERSE)
25969 return;
25970 if (lookup_decl_die (decl) != NULL)
25971 return;
25972 break;
25973
25974 case TYPE_DECL:
25975 /* Don't emit stubs for types unless they are needed by other DIEs. */
25976 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
25977 return;
25978
25979 /* Don't bother trying to generate any DIEs to represent any of the
25980 normal built-in types for the language we are compiling. */
25981 if (DECL_IS_BUILTIN (decl))
25982 return;
25983
25984 /* If we are in terse mode, don't generate any DIEs for types. */
25985 if (debug_info_level <= DINFO_LEVEL_TERSE)
25986 return;
25987
25988 /* If we're a function-scope tag, initially use a parent of NULL;
25989 this will be fixed up in decls_for_scope. */
25990 if (decl_function_context (decl))
25991 context_die = NULL;
25992
25993 break;
25994
25995 case NAMELIST_DECL:
25996 break;
25997
25998 default:
25999 return;
26000 }
26001
26002 gen_decl_die (decl, NULL, NULL, context_die);
26003
26004 if (flag_checking)
26005 {
26006 dw_die_ref die = lookup_decl_die (decl);
26007 if (die)
26008 check_die (die);
26009 }
26010 }
26011
26012 /* Write the debugging output for DECL. */
26013
26014 static void
26015 dwarf2out_function_decl (tree decl)
26016 {
26017 dwarf2out_decl (decl);
26018 call_arg_locations = NULL;
26019 call_arg_loc_last = NULL;
26020 call_site_count = -1;
26021 tail_call_site_count = -1;
26022 decl_loc_table->empty ();
26023 cached_dw_loc_list_table->empty ();
26024 }
26025
26026 /* Output a marker (i.e. a label) for the beginning of the generated code for
26027 a lexical block. */
26028
26029 static void
26030 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26031 unsigned int blocknum)
26032 {
26033 switch_to_section (current_function_section ());
26034 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26035 }
26036
26037 /* Output a marker (i.e. a label) for the end of the generated code for a
26038 lexical block. */
26039
26040 static void
26041 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26042 {
26043 switch_to_section (current_function_section ());
26044 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26045 }
26046
26047 /* Returns nonzero if it is appropriate not to emit any debugging
26048 information for BLOCK, because it doesn't contain any instructions.
26049
26050 Don't allow this for blocks with nested functions or local classes
26051 as we would end up with orphans, and in the presence of scheduling
26052 we may end up calling them anyway. */
26053
26054 static bool
26055 dwarf2out_ignore_block (const_tree block)
26056 {
26057 tree decl;
26058 unsigned int i;
26059
26060 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26061 if (TREE_CODE (decl) == FUNCTION_DECL
26062 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26063 return 0;
26064 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26065 {
26066 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26067 if (TREE_CODE (decl) == FUNCTION_DECL
26068 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26069 return 0;
26070 }
26071
26072 return 1;
26073 }
26074
26075 /* Hash table routines for file_hash. */
26076
26077 bool
26078 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26079 {
26080 return filename_cmp (p1->filename, p2) == 0;
26081 }
26082
26083 hashval_t
26084 dwarf_file_hasher::hash (dwarf_file_data *p)
26085 {
26086 return htab_hash_string (p->filename);
26087 }
26088
26089 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26090 dwarf2out.c) and return its "index". The index of each (known) filename is
26091 just a unique number which is associated with only that one filename. We
26092 need such numbers for the sake of generating labels (in the .debug_sfnames
26093 section) and references to those files numbers (in the .debug_srcinfo
26094 and .debug_macinfo sections). If the filename given as an argument is not
26095 found in our current list, add it to the list and assign it the next
26096 available unique index number. */
26097
26098 static struct dwarf_file_data *
26099 lookup_filename (const char *file_name)
26100 {
26101 struct dwarf_file_data * created;
26102
26103 if (!file_name)
26104 return NULL;
26105
26106 dwarf_file_data **slot
26107 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26108 INSERT);
26109 if (*slot)
26110 return *slot;
26111
26112 created = ggc_alloc<dwarf_file_data> ();
26113 created->filename = file_name;
26114 created->emitted_number = 0;
26115 *slot = created;
26116 return created;
26117 }
26118
26119 /* If the assembler will construct the file table, then translate the compiler
26120 internal file table number into the assembler file table number, and emit
26121 a .file directive if we haven't already emitted one yet. The file table
26122 numbers are different because we prune debug info for unused variables and
26123 types, which may include filenames. */
26124
26125 static int
26126 maybe_emit_file (struct dwarf_file_data * fd)
26127 {
26128 if (! fd->emitted_number)
26129 {
26130 if (last_emitted_file)
26131 fd->emitted_number = last_emitted_file->emitted_number + 1;
26132 else
26133 fd->emitted_number = 1;
26134 last_emitted_file = fd;
26135
26136 if (DWARF2_ASM_LINE_DEBUG_INFO)
26137 {
26138 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26139 output_quoted_string (asm_out_file,
26140 remap_debug_filename (fd->filename));
26141 fputc ('\n', asm_out_file);
26142 }
26143 }
26144
26145 return fd->emitted_number;
26146 }
26147
26148 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26149 That generation should happen after function debug info has been
26150 generated. The value of the attribute is the constant value of ARG. */
26151
26152 static void
26153 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26154 {
26155 die_arg_entry entry;
26156
26157 if (!die || !arg)
26158 return;
26159
26160 gcc_assert (early_dwarf);
26161
26162 if (!tmpl_value_parm_die_table)
26163 vec_alloc (tmpl_value_parm_die_table, 32);
26164
26165 entry.die = die;
26166 entry.arg = arg;
26167 vec_safe_push (tmpl_value_parm_die_table, entry);
26168 }
26169
26170 /* Return TRUE if T is an instance of generic type, FALSE
26171 otherwise. */
26172
26173 static bool
26174 generic_type_p (tree t)
26175 {
26176 if (t == NULL_TREE || !TYPE_P (t))
26177 return false;
26178 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26179 }
26180
26181 /* Schedule the generation of the generic parameter dies for the
26182 instance of generic type T. The proper generation itself is later
26183 done by gen_scheduled_generic_parms_dies. */
26184
26185 static void
26186 schedule_generic_params_dies_gen (tree t)
26187 {
26188 if (!generic_type_p (t))
26189 return;
26190
26191 gcc_assert (early_dwarf);
26192
26193 if (!generic_type_instances)
26194 vec_alloc (generic_type_instances, 256);
26195
26196 vec_safe_push (generic_type_instances, t);
26197 }
26198
26199 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26200 by append_entry_to_tmpl_value_parm_die_table. This function must
26201 be called after function DIEs have been generated. */
26202
26203 static void
26204 gen_remaining_tmpl_value_param_die_attribute (void)
26205 {
26206 if (tmpl_value_parm_die_table)
26207 {
26208 unsigned i, j;
26209 die_arg_entry *e;
26210
26211 /* We do this in two phases - first get the cases we can
26212 handle during early-finish, preserving those we cannot
26213 (containing symbolic constants where we don't yet know
26214 whether we are going to output the referenced symbols).
26215 For those we try again at late-finish. */
26216 j = 0;
26217 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
26218 {
26219 if (!e->die->removed
26220 && !tree_add_const_value_attribute (e->die, e->arg))
26221 {
26222 dw_loc_descr_ref loc = NULL;
26223 if (! early_dwarf
26224 && (dwarf_version >= 5 || !dwarf_strict))
26225 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
26226 if (loc)
26227 add_AT_loc (e->die, DW_AT_location, loc);
26228 else
26229 (*tmpl_value_parm_die_table)[j++] = *e;
26230 }
26231 }
26232 tmpl_value_parm_die_table->truncate (j);
26233 }
26234 }
26235
26236 /* Generate generic parameters DIEs for instances of generic types
26237 that have been previously scheduled by
26238 schedule_generic_params_dies_gen. This function must be called
26239 after all the types of the CU have been laid out. */
26240
26241 static void
26242 gen_scheduled_generic_parms_dies (void)
26243 {
26244 unsigned i;
26245 tree t;
26246
26247 if (!generic_type_instances)
26248 return;
26249
26250 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
26251 if (COMPLETE_TYPE_P (t))
26252 gen_generic_params_dies (t);
26253
26254 generic_type_instances = NULL;
26255 }
26256
26257
26258 /* Replace DW_AT_name for the decl with name. */
26259
26260 static void
26261 dwarf2out_set_name (tree decl, tree name)
26262 {
26263 dw_die_ref die;
26264 dw_attr_node *attr;
26265 const char *dname;
26266
26267 die = TYPE_SYMTAB_DIE (decl);
26268 if (!die)
26269 return;
26270
26271 dname = dwarf2_name (name, 0);
26272 if (!dname)
26273 return;
26274
26275 attr = get_AT (die, DW_AT_name);
26276 if (attr)
26277 {
26278 struct indirect_string_node *node;
26279
26280 node = find_AT_string (dname);
26281 /* replace the string. */
26282 attr->dw_attr_val.v.val_str = node;
26283 }
26284
26285 else
26286 add_name_attribute (die, dname);
26287 }
26288
26289 /* True if before or during processing of the first function being emitted. */
26290 static bool in_first_function_p = true;
26291 /* True if loc_note during dwarf2out_var_location call might still be
26292 before first real instruction at address equal to .Ltext0. */
26293 static bool maybe_at_text_label_p = true;
26294 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
26295 static unsigned int first_loclabel_num_not_at_text_label;
26296
26297 /* Look ahead for a real insn, or for a begin stmt marker. */
26298
26299 static rtx_insn *
26300 dwarf2out_next_real_insn (rtx_insn *loc_note)
26301 {
26302 rtx_insn *next_real = NEXT_INSN (loc_note);
26303
26304 while (next_real)
26305 if (INSN_P (next_real))
26306 break;
26307 else
26308 next_real = NEXT_INSN (next_real);
26309
26310 return next_real;
26311 }
26312
26313 /* Called by the final INSN scan whenever we see a var location. We
26314 use it to drop labels in the right places, and throw the location in
26315 our lookup table. */
26316
26317 static void
26318 dwarf2out_var_location (rtx_insn *loc_note)
26319 {
26320 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
26321 struct var_loc_node *newloc;
26322 rtx_insn *next_real, *next_note;
26323 rtx_insn *call_insn = NULL;
26324 static const char *last_label;
26325 static const char *last_postcall_label;
26326 static bool last_in_cold_section_p;
26327 static rtx_insn *expected_next_loc_note;
26328 tree decl;
26329 bool var_loc_p;
26330
26331 if (!NOTE_P (loc_note))
26332 {
26333 if (CALL_P (loc_note))
26334 {
26335 call_site_count++;
26336 if (SIBLING_CALL_P (loc_note))
26337 tail_call_site_count++;
26338 if (optimize == 0 && !flag_var_tracking)
26339 {
26340 /* When the var-tracking pass is not running, there is no note
26341 for indirect calls whose target is compile-time known. In this
26342 case, process such calls specifically so that we generate call
26343 sites for them anyway. */
26344 rtx x = PATTERN (loc_note);
26345 if (GET_CODE (x) == PARALLEL)
26346 x = XVECEXP (x, 0, 0);
26347 if (GET_CODE (x) == SET)
26348 x = SET_SRC (x);
26349 if (GET_CODE (x) == CALL)
26350 x = XEXP (x, 0);
26351 if (!MEM_P (x)
26352 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
26353 || !SYMBOL_REF_DECL (XEXP (x, 0))
26354 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
26355 != FUNCTION_DECL))
26356 {
26357 call_insn = loc_note;
26358 loc_note = NULL;
26359 var_loc_p = false;
26360
26361 next_real = dwarf2out_next_real_insn (call_insn);
26362 next_note = NULL;
26363 cached_next_real_insn = NULL;
26364 goto create_label;
26365 }
26366 }
26367 }
26368 return;
26369 }
26370
26371 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
26372 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
26373 return;
26374
26375 /* Optimize processing a large consecutive sequence of location
26376 notes so we don't spend too much time in next_real_insn. If the
26377 next insn is another location note, remember the next_real_insn
26378 calculation for next time. */
26379 next_real = cached_next_real_insn;
26380 if (next_real)
26381 {
26382 if (expected_next_loc_note != loc_note)
26383 next_real = NULL;
26384 }
26385
26386 next_note = NEXT_INSN (loc_note);
26387 if (! next_note
26388 || next_note->deleted ()
26389 || ! NOTE_P (next_note)
26390 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
26391 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
26392 && NOTE_KIND (next_note) != NOTE_INSN_CALL_ARG_LOCATION))
26393 next_note = NULL;
26394
26395 if (! next_real)
26396 next_real = dwarf2out_next_real_insn (loc_note);
26397
26398 if (next_note)
26399 {
26400 expected_next_loc_note = next_note;
26401 cached_next_real_insn = next_real;
26402 }
26403 else
26404 cached_next_real_insn = NULL;
26405
26406 /* If there are no instructions which would be affected by this note,
26407 don't do anything. */
26408 if (var_loc_p
26409 && next_real == NULL_RTX
26410 && !NOTE_DURING_CALL_P (loc_note))
26411 return;
26412
26413 create_label:
26414
26415 if (next_real == NULL_RTX)
26416 next_real = get_last_insn ();
26417
26418 /* If there were any real insns between note we processed last time
26419 and this note (or if it is the first note), clear
26420 last_{,postcall_}label so that they are not reused this time. */
26421 if (last_var_location_insn == NULL_RTX
26422 || last_var_location_insn != next_real
26423 || last_in_cold_section_p != in_cold_section_p)
26424 {
26425 last_label = NULL;
26426 last_postcall_label = NULL;
26427 }
26428
26429 if (var_loc_p)
26430 {
26431 decl = NOTE_VAR_LOCATION_DECL (loc_note);
26432 newloc = add_var_loc_to_decl (decl, loc_note,
26433 NOTE_DURING_CALL_P (loc_note)
26434 ? last_postcall_label : last_label);
26435 if (newloc == NULL)
26436 return;
26437 }
26438 else
26439 {
26440 decl = NULL_TREE;
26441 newloc = NULL;
26442 }
26443
26444 /* If there were no real insns between note we processed last time
26445 and this note, use the label we emitted last time. Otherwise
26446 create a new label and emit it. */
26447 if (last_label == NULL)
26448 {
26449 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
26450 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
26451 loclabel_num++;
26452 last_label = ggc_strdup (loclabel);
26453 /* See if loclabel might be equal to .Ltext0. If yes,
26454 bump first_loclabel_num_not_at_text_label. */
26455 if (!have_multiple_function_sections
26456 && in_first_function_p
26457 && maybe_at_text_label_p)
26458 {
26459 static rtx_insn *last_start;
26460 rtx_insn *insn;
26461 for (insn = loc_note; insn; insn = previous_insn (insn))
26462 if (insn == last_start)
26463 break;
26464 else if (!NONDEBUG_INSN_P (insn))
26465 continue;
26466 else
26467 {
26468 rtx body = PATTERN (insn);
26469 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
26470 continue;
26471 /* Inline asm could occupy zero bytes. */
26472 else if (GET_CODE (body) == ASM_INPUT
26473 || asm_noperands (body) >= 0)
26474 continue;
26475 #ifdef HAVE_attr_length
26476 else if (get_attr_min_length (insn) == 0)
26477 continue;
26478 #endif
26479 else
26480 {
26481 /* Assume insn has non-zero length. */
26482 maybe_at_text_label_p = false;
26483 break;
26484 }
26485 }
26486 if (maybe_at_text_label_p)
26487 {
26488 last_start = loc_note;
26489 first_loclabel_num_not_at_text_label = loclabel_num;
26490 }
26491 }
26492 }
26493
26494 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
26495 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
26496
26497 if (!var_loc_p)
26498 {
26499 struct call_arg_loc_node *ca_loc
26500 = ggc_cleared_alloc<call_arg_loc_node> ();
26501 rtx_insn *prev
26502 = loc_note != NULL_RTX ? prev_real_insn (loc_note) : call_insn;
26503
26504 ca_loc->call_arg_loc_note = loc_note;
26505 ca_loc->next = NULL;
26506 ca_loc->label = last_label;
26507 gcc_assert (prev
26508 && (CALL_P (prev)
26509 || (NONJUMP_INSN_P (prev)
26510 && GET_CODE (PATTERN (prev)) == SEQUENCE
26511 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
26512 if (!CALL_P (prev))
26513 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
26514 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
26515
26516 /* Look for a SYMBOL_REF in the "prev" instruction. */
26517 rtx x = get_call_rtx_from (PATTERN (prev));
26518 if (x)
26519 {
26520 /* Try to get the call symbol, if any. */
26521 if (MEM_P (XEXP (x, 0)))
26522 x = XEXP (x, 0);
26523 /* First, look for a memory access to a symbol_ref. */
26524 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
26525 && SYMBOL_REF_DECL (XEXP (x, 0))
26526 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
26527 ca_loc->symbol_ref = XEXP (x, 0);
26528 /* Otherwise, look at a compile-time known user-level function
26529 declaration. */
26530 else if (MEM_P (x)
26531 && MEM_EXPR (x)
26532 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
26533 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
26534 }
26535
26536 ca_loc->block = insn_scope (prev);
26537 if (call_arg_locations)
26538 call_arg_loc_last->next = ca_loc;
26539 else
26540 call_arg_locations = ca_loc;
26541 call_arg_loc_last = ca_loc;
26542 }
26543 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
26544 newloc->label = last_label;
26545 else
26546 {
26547 if (!last_postcall_label)
26548 {
26549 sprintf (loclabel, "%s-1", last_label);
26550 last_postcall_label = ggc_strdup (loclabel);
26551 }
26552 newloc->label = last_postcall_label;
26553 }
26554
26555 if (var_loc_p && flag_debug_asm)
26556 {
26557 const char *name = NULL, *sep = " => ", *patstr = NULL;
26558 if (decl && DECL_NAME (decl))
26559 name = IDENTIFIER_POINTER (DECL_NAME (decl));
26560 if (NOTE_VAR_LOCATION_LOC (loc_note))
26561 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
26562 else
26563 {
26564 sep = " ";
26565 patstr = "RESET";
26566 }
26567 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
26568 name, sep, patstr);
26569 }
26570
26571 last_var_location_insn = next_real;
26572 last_in_cold_section_p = in_cold_section_p;
26573 }
26574
26575 /* Called from finalize_size_functions for size functions so that their body
26576 can be encoded in the debug info to describe the layout of variable-length
26577 structures. */
26578
26579 static void
26580 dwarf2out_size_function (tree decl)
26581 {
26582 function_to_dwarf_procedure (decl);
26583 }
26584
26585 /* Note in one location list that text section has changed. */
26586
26587 int
26588 var_location_switch_text_section_1 (var_loc_list **slot, void *)
26589 {
26590 var_loc_list *list = *slot;
26591 if (list->first)
26592 list->last_before_switch
26593 = list->last->next ? list->last->next : list->last;
26594 return 1;
26595 }
26596
26597 /* Note in all location lists that text section has changed. */
26598
26599 static void
26600 var_location_switch_text_section (void)
26601 {
26602 if (decl_loc_table == NULL)
26603 return;
26604
26605 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
26606 }
26607
26608 /* Create a new line number table. */
26609
26610 static dw_line_info_table *
26611 new_line_info_table (void)
26612 {
26613 dw_line_info_table *table;
26614
26615 table = ggc_cleared_alloc<dw_line_info_table> ();
26616 table->file_num = 1;
26617 table->line_num = 1;
26618 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
26619
26620 return table;
26621 }
26622
26623 /* Lookup the "current" table into which we emit line info, so
26624 that we don't have to do it for every source line. */
26625
26626 static void
26627 set_cur_line_info_table (section *sec)
26628 {
26629 dw_line_info_table *table;
26630
26631 if (sec == text_section)
26632 table = text_section_line_info;
26633 else if (sec == cold_text_section)
26634 {
26635 table = cold_text_section_line_info;
26636 if (!table)
26637 {
26638 cold_text_section_line_info = table = new_line_info_table ();
26639 table->end_label = cold_end_label;
26640 }
26641 }
26642 else
26643 {
26644 const char *end_label;
26645
26646 if (crtl->has_bb_partition)
26647 {
26648 if (in_cold_section_p)
26649 end_label = crtl->subsections.cold_section_end_label;
26650 else
26651 end_label = crtl->subsections.hot_section_end_label;
26652 }
26653 else
26654 {
26655 char label[MAX_ARTIFICIAL_LABEL_BYTES];
26656 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
26657 current_function_funcdef_no);
26658 end_label = ggc_strdup (label);
26659 }
26660
26661 table = new_line_info_table ();
26662 table->end_label = end_label;
26663
26664 vec_safe_push (separate_line_info, table);
26665 }
26666
26667 if (DWARF2_ASM_LINE_DEBUG_INFO)
26668 table->is_stmt = (cur_line_info_table
26669 ? cur_line_info_table->is_stmt
26670 : DWARF_LINE_DEFAULT_IS_STMT_START);
26671 cur_line_info_table = table;
26672 }
26673
26674
26675 /* We need to reset the locations at the beginning of each
26676 function. We can't do this in the end_function hook, because the
26677 declarations that use the locations won't have been output when
26678 that hook is called. Also compute have_multiple_function_sections here. */
26679
26680 static void
26681 dwarf2out_begin_function (tree fun)
26682 {
26683 section *sec = function_section (fun);
26684
26685 if (sec != text_section)
26686 have_multiple_function_sections = true;
26687
26688 if (crtl->has_bb_partition && !cold_text_section)
26689 {
26690 gcc_assert (current_function_decl == fun);
26691 cold_text_section = unlikely_text_section ();
26692 switch_to_section (cold_text_section);
26693 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
26694 switch_to_section (sec);
26695 }
26696
26697 dwarf2out_note_section_used ();
26698 call_site_count = 0;
26699 tail_call_site_count = 0;
26700
26701 set_cur_line_info_table (sec);
26702 }
26703
26704 /* Helper function of dwarf2out_end_function, called only after emitting
26705 the very first function into assembly. Check if some .debug_loc range
26706 might end with a .LVL* label that could be equal to .Ltext0.
26707 In that case we must force using absolute addresses in .debug_loc ranges,
26708 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
26709 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
26710 list terminator.
26711 Set have_multiple_function_sections to true in that case and
26712 terminate htab traversal. */
26713
26714 int
26715 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
26716 {
26717 var_loc_list *entry = *slot;
26718 struct var_loc_node *node;
26719
26720 node = entry->first;
26721 if (node && node->next && node->next->label)
26722 {
26723 unsigned int i;
26724 const char *label = node->next->label;
26725 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
26726
26727 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
26728 {
26729 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
26730 if (strcmp (label, loclabel) == 0)
26731 {
26732 have_multiple_function_sections = true;
26733 return 0;
26734 }
26735 }
26736 }
26737 return 1;
26738 }
26739
26740 /* Hook called after emitting a function into assembly.
26741 This does something only for the very first function emitted. */
26742
26743 static void
26744 dwarf2out_end_function (unsigned int)
26745 {
26746 if (in_first_function_p
26747 && !have_multiple_function_sections
26748 && first_loclabel_num_not_at_text_label
26749 && decl_loc_table)
26750 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
26751 in_first_function_p = false;
26752 maybe_at_text_label_p = false;
26753 }
26754
26755 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
26756 front-ends register a translation unit even before dwarf2out_init is
26757 called. */
26758 static tree main_translation_unit = NULL_TREE;
26759
26760 /* Hook called by front-ends after they built their main translation unit.
26761 Associate comp_unit_die to UNIT. */
26762
26763 static void
26764 dwarf2out_register_main_translation_unit (tree unit)
26765 {
26766 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
26767 && main_translation_unit == NULL_TREE);
26768 main_translation_unit = unit;
26769 /* If dwarf2out_init has not been called yet, it will perform the association
26770 itself looking at main_translation_unit. */
26771 if (decl_die_table != NULL)
26772 equate_decl_number_to_die (unit, comp_unit_die ());
26773 }
26774
26775 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
26776
26777 static void
26778 push_dw_line_info_entry (dw_line_info_table *table,
26779 enum dw_line_info_opcode opcode, unsigned int val)
26780 {
26781 dw_line_info_entry e;
26782 e.opcode = opcode;
26783 e.val = val;
26784 vec_safe_push (table->entries, e);
26785 }
26786
26787 /* Output a label to mark the beginning of a source code line entry
26788 and record information relating to this source line, in
26789 'line_info_table' for later output of the .debug_line section. */
26790 /* ??? The discriminator parameter ought to be unsigned. */
26791
26792 static void
26793 dwarf2out_source_line (unsigned int line, unsigned int column,
26794 const char *filename,
26795 int discriminator, bool is_stmt)
26796 {
26797 unsigned int file_num;
26798 dw_line_info_table *table;
26799
26800 if (debug_info_level < DINFO_LEVEL_TERSE || line == 0)
26801 return;
26802
26803 /* The discriminator column was added in dwarf4. Simplify the below
26804 by simply removing it if we're not supposed to output it. */
26805 if (dwarf_version < 4 && dwarf_strict)
26806 discriminator = 0;
26807
26808 if (!debug_column_info)
26809 column = 0;
26810
26811 table = cur_line_info_table;
26812 file_num = maybe_emit_file (lookup_filename (filename));
26813
26814 /* ??? TODO: Elide duplicate line number entries. Traditionally,
26815 the debugger has used the second (possibly duplicate) line number
26816 at the beginning of the function to mark the end of the prologue.
26817 We could eliminate any other duplicates within the function. For
26818 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
26819 that second line number entry. */
26820 /* Recall that this end-of-prologue indication is *not* the same thing
26821 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
26822 to which the hook corresponds, follows the last insn that was
26823 emitted by gen_prologue. What we need is to precede the first insn
26824 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
26825 insn that corresponds to something the user wrote. These may be
26826 very different locations once scheduling is enabled. */
26827
26828 if (0 && file_num == table->file_num
26829 && line == table->line_num
26830 && column == table->column_num
26831 && discriminator == table->discrim_num
26832 && is_stmt == table->is_stmt)
26833 return;
26834
26835 switch_to_section (current_function_section ());
26836
26837 /* If requested, emit something human-readable. */
26838 if (flag_debug_asm)
26839 {
26840 if (debug_column_info)
26841 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
26842 filename, line, column);
26843 else
26844 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
26845 filename, line);
26846 }
26847
26848 if (DWARF2_ASM_LINE_DEBUG_INFO)
26849 {
26850 /* Emit the .loc directive understood by GNU as. */
26851 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
26852 file_num, line, is_stmt, discriminator */
26853 fputs ("\t.loc ", asm_out_file);
26854 fprint_ul (asm_out_file, file_num);
26855 putc (' ', asm_out_file);
26856 fprint_ul (asm_out_file, line);
26857 putc (' ', asm_out_file);
26858 fprint_ul (asm_out_file, column);
26859
26860 if (is_stmt != table->is_stmt)
26861 {
26862 fputs (" is_stmt ", asm_out_file);
26863 putc (is_stmt ? '1' : '0', asm_out_file);
26864 }
26865 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
26866 {
26867 gcc_assert (discriminator > 0);
26868 fputs (" discriminator ", asm_out_file);
26869 fprint_ul (asm_out_file, (unsigned long) discriminator);
26870 }
26871 putc ('\n', asm_out_file);
26872 }
26873 else
26874 {
26875 unsigned int label_num = ++line_info_label_num;
26876
26877 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
26878
26879 push_dw_line_info_entry (table, LI_set_address, label_num);
26880 if (file_num != table->file_num)
26881 push_dw_line_info_entry (table, LI_set_file, file_num);
26882 if (discriminator != table->discrim_num)
26883 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
26884 if (is_stmt != table->is_stmt)
26885 push_dw_line_info_entry (table, LI_negate_stmt, 0);
26886 push_dw_line_info_entry (table, LI_set_line, line);
26887 if (debug_column_info)
26888 push_dw_line_info_entry (table, LI_set_column, column);
26889 }
26890
26891 table->file_num = file_num;
26892 table->line_num = line;
26893 table->column_num = column;
26894 table->discrim_num = discriminator;
26895 table->is_stmt = is_stmt;
26896 table->in_use = true;
26897 }
26898
26899 /* Record the beginning of a new source file. */
26900
26901 static void
26902 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
26903 {
26904 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26905 {
26906 macinfo_entry e;
26907 e.code = DW_MACINFO_start_file;
26908 e.lineno = lineno;
26909 e.info = ggc_strdup (filename);
26910 vec_safe_push (macinfo_table, e);
26911 }
26912 }
26913
26914 /* Record the end of a source file. */
26915
26916 static void
26917 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
26918 {
26919 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26920 {
26921 macinfo_entry e;
26922 e.code = DW_MACINFO_end_file;
26923 e.lineno = lineno;
26924 e.info = NULL;
26925 vec_safe_push (macinfo_table, e);
26926 }
26927 }
26928
26929 /* Called from debug_define in toplev.c. The `buffer' parameter contains
26930 the tail part of the directive line, i.e. the part which is past the
26931 initial whitespace, #, whitespace, directive-name, whitespace part. */
26932
26933 static void
26934 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
26935 const char *buffer ATTRIBUTE_UNUSED)
26936 {
26937 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26938 {
26939 macinfo_entry e;
26940 /* Insert a dummy first entry to be able to optimize the whole
26941 predefined macro block using DW_MACRO_import. */
26942 if (macinfo_table->is_empty () && lineno <= 1)
26943 {
26944 e.code = 0;
26945 e.lineno = 0;
26946 e.info = NULL;
26947 vec_safe_push (macinfo_table, e);
26948 }
26949 e.code = DW_MACINFO_define;
26950 e.lineno = lineno;
26951 e.info = ggc_strdup (buffer);
26952 vec_safe_push (macinfo_table, e);
26953 }
26954 }
26955
26956 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
26957 the tail part of the directive line, i.e. the part which is past the
26958 initial whitespace, #, whitespace, directive-name, whitespace part. */
26959
26960 static void
26961 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
26962 const char *buffer ATTRIBUTE_UNUSED)
26963 {
26964 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26965 {
26966 macinfo_entry e;
26967 /* Insert a dummy first entry to be able to optimize the whole
26968 predefined macro block using DW_MACRO_import. */
26969 if (macinfo_table->is_empty () && lineno <= 1)
26970 {
26971 e.code = 0;
26972 e.lineno = 0;
26973 e.info = NULL;
26974 vec_safe_push (macinfo_table, e);
26975 }
26976 e.code = DW_MACINFO_undef;
26977 e.lineno = lineno;
26978 e.info = ggc_strdup (buffer);
26979 vec_safe_push (macinfo_table, e);
26980 }
26981 }
26982
26983 /* Helpers to manipulate hash table of CUs. */
26984
26985 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
26986 {
26987 static inline hashval_t hash (const macinfo_entry *);
26988 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
26989 };
26990
26991 inline hashval_t
26992 macinfo_entry_hasher::hash (const macinfo_entry *entry)
26993 {
26994 return htab_hash_string (entry->info);
26995 }
26996
26997 inline bool
26998 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
26999 const macinfo_entry *entry2)
27000 {
27001 return !strcmp (entry1->info, entry2->info);
27002 }
27003
27004 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
27005
27006 /* Output a single .debug_macinfo entry. */
27007
27008 static void
27009 output_macinfo_op (macinfo_entry *ref)
27010 {
27011 int file_num;
27012 size_t len;
27013 struct indirect_string_node *node;
27014 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27015 struct dwarf_file_data *fd;
27016
27017 switch (ref->code)
27018 {
27019 case DW_MACINFO_start_file:
27020 fd = lookup_filename (ref->info);
27021 file_num = maybe_emit_file (fd);
27022 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
27023 dw2_asm_output_data_uleb128 (ref->lineno,
27024 "Included from line number %lu",
27025 (unsigned long) ref->lineno);
27026 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
27027 break;
27028 case DW_MACINFO_end_file:
27029 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
27030 break;
27031 case DW_MACINFO_define:
27032 case DW_MACINFO_undef:
27033 len = strlen (ref->info) + 1;
27034 if (!dwarf_strict
27035 && len > DWARF_OFFSET_SIZE
27036 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
27037 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
27038 {
27039 ref->code = ref->code == DW_MACINFO_define
27040 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
27041 output_macinfo_op (ref);
27042 return;
27043 }
27044 dw2_asm_output_data (1, ref->code,
27045 ref->code == DW_MACINFO_define
27046 ? "Define macro" : "Undefine macro");
27047 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
27048 (unsigned long) ref->lineno);
27049 dw2_asm_output_nstring (ref->info, -1, "The macro");
27050 break;
27051 case DW_MACRO_define_strp:
27052 case DW_MACRO_undef_strp:
27053 node = find_AT_string (ref->info);
27054 gcc_assert (node
27055 && (node->form == DW_FORM_strp
27056 || node->form == DW_FORM_GNU_str_index));
27057 dw2_asm_output_data (1, ref->code,
27058 ref->code == DW_MACRO_define_strp
27059 ? "Define macro strp"
27060 : "Undefine macro strp");
27061 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
27062 (unsigned long) ref->lineno);
27063 if (node->form == DW_FORM_strp)
27064 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
27065 debug_str_section, "The macro: \"%s\"",
27066 ref->info);
27067 else
27068 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
27069 ref->info);
27070 break;
27071 case DW_MACRO_import:
27072 dw2_asm_output_data (1, ref->code, "Import");
27073 ASM_GENERATE_INTERNAL_LABEL (label,
27074 DEBUG_MACRO_SECTION_LABEL,
27075 ref->lineno + macinfo_label_base);
27076 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
27077 break;
27078 default:
27079 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
27080 ASM_COMMENT_START, (unsigned long) ref->code);
27081 break;
27082 }
27083 }
27084
27085 /* Attempt to make a sequence of define/undef macinfo ops shareable with
27086 other compilation unit .debug_macinfo sections. IDX is the first
27087 index of a define/undef, return the number of ops that should be
27088 emitted in a comdat .debug_macinfo section and emit
27089 a DW_MACRO_import entry referencing it.
27090 If the define/undef entry should be emitted normally, return 0. */
27091
27092 static unsigned
27093 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
27094 macinfo_hash_type **macinfo_htab)
27095 {
27096 macinfo_entry *first, *second, *cur, *inc;
27097 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
27098 unsigned char checksum[16];
27099 struct md5_ctx ctx;
27100 char *grp_name, *tail;
27101 const char *base;
27102 unsigned int i, count, encoded_filename_len, linebuf_len;
27103 macinfo_entry **slot;
27104
27105 first = &(*macinfo_table)[idx];
27106 second = &(*macinfo_table)[idx + 1];
27107
27108 /* Optimize only if there are at least two consecutive define/undef ops,
27109 and either all of them are before first DW_MACINFO_start_file
27110 with lineno {0,1} (i.e. predefined macro block), or all of them are
27111 in some included header file. */
27112 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
27113 return 0;
27114 if (vec_safe_is_empty (files))
27115 {
27116 if (first->lineno > 1 || second->lineno > 1)
27117 return 0;
27118 }
27119 else if (first->lineno == 0)
27120 return 0;
27121
27122 /* Find the last define/undef entry that can be grouped together
27123 with first and at the same time compute md5 checksum of their
27124 codes, linenumbers and strings. */
27125 md5_init_ctx (&ctx);
27126 for (i = idx; macinfo_table->iterate (i, &cur); i++)
27127 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
27128 break;
27129 else if (vec_safe_is_empty (files) && cur->lineno > 1)
27130 break;
27131 else
27132 {
27133 unsigned char code = cur->code;
27134 md5_process_bytes (&code, 1, &ctx);
27135 checksum_uleb128 (cur->lineno, &ctx);
27136 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
27137 }
27138 md5_finish_ctx (&ctx, checksum);
27139 count = i - idx;
27140
27141 /* From the containing include filename (if any) pick up just
27142 usable characters from its basename. */
27143 if (vec_safe_is_empty (files))
27144 base = "";
27145 else
27146 base = lbasename (files->last ().info);
27147 for (encoded_filename_len = 0, i = 0; base[i]; i++)
27148 if (ISIDNUM (base[i]) || base[i] == '.')
27149 encoded_filename_len++;
27150 /* Count . at the end. */
27151 if (encoded_filename_len)
27152 encoded_filename_len++;
27153
27154 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
27155 linebuf_len = strlen (linebuf);
27156
27157 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
27158 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
27159 + 16 * 2 + 1);
27160 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
27161 tail = grp_name + 4;
27162 if (encoded_filename_len)
27163 {
27164 for (i = 0; base[i]; i++)
27165 if (ISIDNUM (base[i]) || base[i] == '.')
27166 *tail++ = base[i];
27167 *tail++ = '.';
27168 }
27169 memcpy (tail, linebuf, linebuf_len);
27170 tail += linebuf_len;
27171 *tail++ = '.';
27172 for (i = 0; i < 16; i++)
27173 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
27174
27175 /* Construct a macinfo_entry for DW_MACRO_import
27176 in the empty vector entry before the first define/undef. */
27177 inc = &(*macinfo_table)[idx - 1];
27178 inc->code = DW_MACRO_import;
27179 inc->lineno = 0;
27180 inc->info = ggc_strdup (grp_name);
27181 if (!*macinfo_htab)
27182 *macinfo_htab = new macinfo_hash_type (10);
27183 /* Avoid emitting duplicates. */
27184 slot = (*macinfo_htab)->find_slot (inc, INSERT);
27185 if (*slot != NULL)
27186 {
27187 inc->code = 0;
27188 inc->info = NULL;
27189 /* If such an entry has been used before, just emit
27190 a DW_MACRO_import op. */
27191 inc = *slot;
27192 output_macinfo_op (inc);
27193 /* And clear all macinfo_entry in the range to avoid emitting them
27194 in the second pass. */
27195 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
27196 {
27197 cur->code = 0;
27198 cur->info = NULL;
27199 }
27200 }
27201 else
27202 {
27203 *slot = inc;
27204 inc->lineno = (*macinfo_htab)->elements ();
27205 output_macinfo_op (inc);
27206 }
27207 return count;
27208 }
27209
27210 /* Save any strings needed by the macinfo table in the debug str
27211 table. All strings must be collected into the table by the time
27212 index_string is called. */
27213
27214 static void
27215 save_macinfo_strings (void)
27216 {
27217 unsigned len;
27218 unsigned i;
27219 macinfo_entry *ref;
27220
27221 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
27222 {
27223 switch (ref->code)
27224 {
27225 /* Match the logic in output_macinfo_op to decide on
27226 indirect strings. */
27227 case DW_MACINFO_define:
27228 case DW_MACINFO_undef:
27229 len = strlen (ref->info) + 1;
27230 if (!dwarf_strict
27231 && len > DWARF_OFFSET_SIZE
27232 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
27233 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
27234 set_indirect_string (find_AT_string (ref->info));
27235 break;
27236 case DW_MACRO_define_strp:
27237 case DW_MACRO_undef_strp:
27238 set_indirect_string (find_AT_string (ref->info));
27239 break;
27240 default:
27241 break;
27242 }
27243 }
27244 }
27245
27246 /* Output macinfo section(s). */
27247
27248 static void
27249 output_macinfo (const char *debug_line_label, bool early_lto_debug)
27250 {
27251 unsigned i;
27252 unsigned long length = vec_safe_length (macinfo_table);
27253 macinfo_entry *ref;
27254 vec<macinfo_entry, va_gc> *files = NULL;
27255 macinfo_hash_type *macinfo_htab = NULL;
27256 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
27257
27258 if (! length)
27259 return;
27260
27261 /* output_macinfo* uses these interchangeably. */
27262 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
27263 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
27264 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
27265 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
27266
27267 /* AIX Assembler inserts the length, so adjust the reference to match the
27268 offset expected by debuggers. */
27269 strcpy (dl_section_ref, debug_line_label);
27270 if (XCOFF_DEBUGGING_INFO)
27271 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
27272
27273 /* For .debug_macro emit the section header. */
27274 if (!dwarf_strict || dwarf_version >= 5)
27275 {
27276 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
27277 "DWARF macro version number");
27278 if (DWARF_OFFSET_SIZE == 8)
27279 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
27280 else
27281 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
27282 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
27283 debug_line_section, NULL);
27284 }
27285
27286 /* In the first loop, it emits the primary .debug_macinfo section
27287 and after each emitted op the macinfo_entry is cleared.
27288 If a longer range of define/undef ops can be optimized using
27289 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
27290 the vector before the first define/undef in the range and the
27291 whole range of define/undef ops is not emitted and kept. */
27292 for (i = 0; macinfo_table->iterate (i, &ref); i++)
27293 {
27294 switch (ref->code)
27295 {
27296 case DW_MACINFO_start_file:
27297 vec_safe_push (files, *ref);
27298 break;
27299 case DW_MACINFO_end_file:
27300 if (!vec_safe_is_empty (files))
27301 files->pop ();
27302 break;
27303 case DW_MACINFO_define:
27304 case DW_MACINFO_undef:
27305 if ((!dwarf_strict || dwarf_version >= 5)
27306 && HAVE_COMDAT_GROUP
27307 && vec_safe_length (files) != 1
27308 && i > 0
27309 && i + 1 < length
27310 && (*macinfo_table)[i - 1].code == 0)
27311 {
27312 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
27313 if (count)
27314 {
27315 i += count - 1;
27316 continue;
27317 }
27318 }
27319 break;
27320 case 0:
27321 /* A dummy entry may be inserted at the beginning to be able
27322 to optimize the whole block of predefined macros. */
27323 if (i == 0)
27324 continue;
27325 default:
27326 break;
27327 }
27328 output_macinfo_op (ref);
27329 ref->info = NULL;
27330 ref->code = 0;
27331 }
27332
27333 if (!macinfo_htab)
27334 return;
27335
27336 /* Save the number of transparent includes so we can adjust the
27337 label number for the fat LTO object DWARF. */
27338 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
27339
27340 delete macinfo_htab;
27341 macinfo_htab = NULL;
27342
27343 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
27344 terminate the current chain and switch to a new comdat .debug_macinfo
27345 section and emit the define/undef entries within it. */
27346 for (i = 0; macinfo_table->iterate (i, &ref); i++)
27347 switch (ref->code)
27348 {
27349 case 0:
27350 continue;
27351 case DW_MACRO_import:
27352 {
27353 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27354 tree comdat_key = get_identifier (ref->info);
27355 /* Terminate the previous .debug_macinfo section. */
27356 dw2_asm_output_data (1, 0, "End compilation unit");
27357 targetm.asm_out.named_section (debug_macinfo_section_name,
27358 SECTION_DEBUG
27359 | SECTION_LINKONCE
27360 | (early_lto_debug
27361 ? SECTION_EXCLUDE : 0),
27362 comdat_key);
27363 ASM_GENERATE_INTERNAL_LABEL (label,
27364 DEBUG_MACRO_SECTION_LABEL,
27365 ref->lineno + macinfo_label_base);
27366 ASM_OUTPUT_LABEL (asm_out_file, label);
27367 ref->code = 0;
27368 ref->info = NULL;
27369 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
27370 "DWARF macro version number");
27371 if (DWARF_OFFSET_SIZE == 8)
27372 dw2_asm_output_data (1, 1, "Flags: 64-bit");
27373 else
27374 dw2_asm_output_data (1, 0, "Flags: 32-bit");
27375 }
27376 break;
27377 case DW_MACINFO_define:
27378 case DW_MACINFO_undef:
27379 output_macinfo_op (ref);
27380 ref->code = 0;
27381 ref->info = NULL;
27382 break;
27383 default:
27384 gcc_unreachable ();
27385 }
27386
27387 macinfo_label_base += macinfo_label_base_adj;
27388 }
27389
27390 /* Initialize the various sections and labels for dwarf output and prefix
27391 them with PREFIX if non-NULL. Returns the generation (zero based
27392 number of times function was called). */
27393
27394 static unsigned
27395 init_sections_and_labels (bool early_lto_debug)
27396 {
27397 /* As we may get called multiple times have a generation count for
27398 labels. */
27399 static unsigned generation = 0;
27400
27401 if (early_lto_debug)
27402 {
27403 if (!dwarf_split_debug_info)
27404 {
27405 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
27406 SECTION_DEBUG | SECTION_EXCLUDE,
27407 NULL);
27408 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
27409 SECTION_DEBUG | SECTION_EXCLUDE,
27410 NULL);
27411 debug_macinfo_section_name
27412 = ((dwarf_strict && dwarf_version < 5)
27413 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
27414 debug_macinfo_section = get_section (debug_macinfo_section_name,
27415 SECTION_DEBUG
27416 | SECTION_EXCLUDE, NULL);
27417 /* For macro info we have to refer to a debug_line section, so
27418 similar to split-dwarf emit a skeleton one for early debug. */
27419 debug_skeleton_line_section
27420 = get_section (DEBUG_LTO_LINE_SECTION,
27421 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27422 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
27423 DEBUG_SKELETON_LINE_SECTION_LABEL,
27424 generation);
27425 }
27426 else
27427 {
27428 /* ??? Which of the following do we need early? */
27429 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
27430 SECTION_DEBUG | SECTION_EXCLUDE,
27431 NULL);
27432 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
27433 SECTION_DEBUG | SECTION_EXCLUDE,
27434 NULL);
27435 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
27436 SECTION_DEBUG
27437 | SECTION_EXCLUDE, NULL);
27438 debug_skeleton_abbrev_section
27439 = get_section (DEBUG_LTO_ABBREV_SECTION,
27440 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27441 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
27442 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
27443 generation);
27444
27445 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
27446 stay in the main .o, but the skeleton_line goes into the split
27447 off dwo. */
27448 debug_skeleton_line_section
27449 = get_section (DEBUG_LTO_LINE_SECTION,
27450 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27451 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
27452 DEBUG_SKELETON_LINE_SECTION_LABEL,
27453 generation);
27454 debug_str_offsets_section
27455 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
27456 SECTION_DEBUG | SECTION_EXCLUDE,
27457 NULL);
27458 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
27459 DEBUG_SKELETON_INFO_SECTION_LABEL,
27460 generation);
27461 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
27462 DEBUG_STR_DWO_SECTION_FLAGS,
27463 NULL);
27464 debug_macinfo_section_name
27465 = ((dwarf_strict && dwarf_version < 5)
27466 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
27467 debug_macinfo_section = get_section (debug_macinfo_section_name,
27468 SECTION_DEBUG | SECTION_EXCLUDE,
27469 NULL);
27470 }
27471 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
27472 DEBUG_STR_SECTION_FLAGS
27473 | SECTION_EXCLUDE, NULL);
27474 if (!dwarf_split_debug_info && !DWARF2_ASM_LINE_DEBUG_INFO)
27475 debug_line_str_section
27476 = get_section (DEBUG_LTO_LINE_STR_SECTION,
27477 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
27478 }
27479 else
27480 {
27481 if (!dwarf_split_debug_info)
27482 {
27483 debug_info_section = get_section (DEBUG_INFO_SECTION,
27484 SECTION_DEBUG, NULL);
27485 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
27486 SECTION_DEBUG, NULL);
27487 debug_loc_section = get_section (dwarf_version >= 5
27488 ? DEBUG_LOCLISTS_SECTION
27489 : DEBUG_LOC_SECTION,
27490 SECTION_DEBUG, NULL);
27491 debug_macinfo_section_name
27492 = ((dwarf_strict && dwarf_version < 5)
27493 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
27494 debug_macinfo_section = get_section (debug_macinfo_section_name,
27495 SECTION_DEBUG, NULL);
27496 }
27497 else
27498 {
27499 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
27500 SECTION_DEBUG | SECTION_EXCLUDE,
27501 NULL);
27502 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
27503 SECTION_DEBUG | SECTION_EXCLUDE,
27504 NULL);
27505 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
27506 SECTION_DEBUG, NULL);
27507 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
27508 SECTION_DEBUG, NULL);
27509 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
27510 SECTION_DEBUG, NULL);
27511 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
27512 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
27513 generation);
27514
27515 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
27516 stay in the main .o, but the skeleton_line goes into the
27517 split off dwo. */
27518 debug_skeleton_line_section
27519 = get_section (DEBUG_DWO_LINE_SECTION,
27520 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27521 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
27522 DEBUG_SKELETON_LINE_SECTION_LABEL,
27523 generation);
27524 debug_str_offsets_section
27525 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
27526 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27527 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
27528 DEBUG_SKELETON_INFO_SECTION_LABEL,
27529 generation);
27530 debug_loc_section = get_section (dwarf_version >= 5
27531 ? DEBUG_DWO_LOCLISTS_SECTION
27532 : DEBUG_DWO_LOC_SECTION,
27533 SECTION_DEBUG | SECTION_EXCLUDE,
27534 NULL);
27535 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
27536 DEBUG_STR_DWO_SECTION_FLAGS,
27537 NULL);
27538 debug_macinfo_section_name
27539 = ((dwarf_strict && dwarf_version < 5)
27540 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
27541 debug_macinfo_section = get_section (debug_macinfo_section_name,
27542 SECTION_DEBUG | SECTION_EXCLUDE,
27543 NULL);
27544 }
27545 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
27546 SECTION_DEBUG, NULL);
27547 debug_line_section = get_section (DEBUG_LINE_SECTION,
27548 SECTION_DEBUG, NULL);
27549 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
27550 SECTION_DEBUG, NULL);
27551 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
27552 SECTION_DEBUG, NULL);
27553 debug_str_section = get_section (DEBUG_STR_SECTION,
27554 DEBUG_STR_SECTION_FLAGS, NULL);
27555 if (!dwarf_split_debug_info && !DWARF2_ASM_LINE_DEBUG_INFO)
27556 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
27557 DEBUG_STR_SECTION_FLAGS, NULL);
27558 debug_ranges_section = get_section (dwarf_version >= 5
27559 ? DEBUG_RNGLISTS_SECTION
27560 : DEBUG_RANGES_SECTION,
27561 SECTION_DEBUG, NULL);
27562 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
27563 SECTION_DEBUG, NULL);
27564 }
27565
27566 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
27567 DEBUG_ABBREV_SECTION_LABEL, generation);
27568 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
27569 DEBUG_INFO_SECTION_LABEL, generation);
27570 info_section_emitted = false;
27571 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
27572 DEBUG_LINE_SECTION_LABEL, generation);
27573 /* There are up to 4 unique ranges labels per generation.
27574 See also output_rnglists. */
27575 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
27576 DEBUG_RANGES_SECTION_LABEL, generation * 4);
27577 if (dwarf_version >= 5 && dwarf_split_debug_info)
27578 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
27579 DEBUG_RANGES_SECTION_LABEL,
27580 1 + generation * 4);
27581 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
27582 DEBUG_ADDR_SECTION_LABEL, generation);
27583 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
27584 (dwarf_strict && dwarf_version < 5)
27585 ? DEBUG_MACINFO_SECTION_LABEL
27586 : DEBUG_MACRO_SECTION_LABEL, generation);
27587 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
27588 generation);
27589
27590 ++generation;
27591 return generation - 1;
27592 }
27593
27594 /* Set up for Dwarf output at the start of compilation. */
27595
27596 static void
27597 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
27598 {
27599 /* Allocate the file_table. */
27600 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
27601
27602 #ifndef DWARF2_LINENO_DEBUGGING_INFO
27603 /* Allocate the decl_die_table. */
27604 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
27605
27606 /* Allocate the decl_loc_table. */
27607 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
27608
27609 /* Allocate the cached_dw_loc_list_table. */
27610 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
27611
27612 /* Allocate the initial hunk of the decl_scope_table. */
27613 vec_alloc (decl_scope_table, 256);
27614
27615 /* Allocate the initial hunk of the abbrev_die_table. */
27616 vec_alloc (abbrev_die_table, 256);
27617 /* Zero-th entry is allocated, but unused. */
27618 abbrev_die_table->quick_push (NULL);
27619
27620 /* Allocate the dwarf_proc_stack_usage_map. */
27621 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
27622
27623 /* Allocate the pubtypes and pubnames vectors. */
27624 vec_alloc (pubname_table, 32);
27625 vec_alloc (pubtype_table, 32);
27626
27627 vec_alloc (incomplete_types, 64);
27628
27629 vec_alloc (used_rtx_array, 32);
27630
27631 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27632 vec_alloc (macinfo_table, 64);
27633 #endif
27634
27635 /* If front-ends already registered a main translation unit but we were not
27636 ready to perform the association, do this now. */
27637 if (main_translation_unit != NULL_TREE)
27638 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
27639 }
27640
27641 /* Called before compile () starts outputtting functions, variables
27642 and toplevel asms into assembly. */
27643
27644 static void
27645 dwarf2out_assembly_start (void)
27646 {
27647 if (text_section_line_info)
27648 return;
27649
27650 #ifndef DWARF2_LINENO_DEBUGGING_INFO
27651 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
27652 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
27653 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
27654 COLD_TEXT_SECTION_LABEL, 0);
27655 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
27656
27657 switch_to_section (text_section);
27658 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
27659 #endif
27660
27661 /* Make sure the line number table for .text always exists. */
27662 text_section_line_info = new_line_info_table ();
27663 text_section_line_info->end_label = text_end_label;
27664
27665 #ifdef DWARF2_LINENO_DEBUGGING_INFO
27666 cur_line_info_table = text_section_line_info;
27667 #endif
27668
27669 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
27670 && dwarf2out_do_cfi_asm ()
27671 && !dwarf2out_do_eh_frame ())
27672 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
27673 }
27674
27675 /* A helper function for dwarf2out_finish called through
27676 htab_traverse. Assign a string its index. All strings must be
27677 collected into the table by the time index_string is called,
27678 because the indexing code relies on htab_traverse to traverse nodes
27679 in the same order for each run. */
27680
27681 int
27682 index_string (indirect_string_node **h, unsigned int *index)
27683 {
27684 indirect_string_node *node = *h;
27685
27686 find_string_form (node);
27687 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
27688 {
27689 gcc_assert (node->index == NO_INDEX_ASSIGNED);
27690 node->index = *index;
27691 *index += 1;
27692 }
27693 return 1;
27694 }
27695
27696 /* A helper function for output_indirect_strings called through
27697 htab_traverse. Output the offset to a string and update the
27698 current offset. */
27699
27700 int
27701 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
27702 {
27703 indirect_string_node *node = *h;
27704
27705 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
27706 {
27707 /* Assert that this node has been assigned an index. */
27708 gcc_assert (node->index != NO_INDEX_ASSIGNED
27709 && node->index != NOT_INDEXED);
27710 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
27711 "indexed string 0x%x: %s", node->index, node->str);
27712 *offset += strlen (node->str) + 1;
27713 }
27714 return 1;
27715 }
27716
27717 /* A helper function for dwarf2out_finish called through
27718 htab_traverse. Output the indexed string. */
27719
27720 int
27721 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
27722 {
27723 struct indirect_string_node *node = *h;
27724
27725 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
27726 {
27727 /* Assert that the strings are output in the same order as their
27728 indexes were assigned. */
27729 gcc_assert (*cur_idx == node->index);
27730 assemble_string (node->str, strlen (node->str) + 1);
27731 *cur_idx += 1;
27732 }
27733 return 1;
27734 }
27735
27736 /* A helper function for dwarf2out_finish called through
27737 htab_traverse. Emit one queued .debug_str string. */
27738
27739 int
27740 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
27741 {
27742 struct indirect_string_node *node = *h;
27743
27744 node->form = find_string_form (node);
27745 if (node->form == form && node->refcount > 0)
27746 {
27747 ASM_OUTPUT_LABEL (asm_out_file, node->label);
27748 assemble_string (node->str, strlen (node->str) + 1);
27749 }
27750
27751 return 1;
27752 }
27753
27754 /* Output the indexed string table. */
27755
27756 static void
27757 output_indirect_strings (void)
27758 {
27759 switch_to_section (debug_str_section);
27760 if (!dwarf_split_debug_info)
27761 debug_str_hash->traverse<enum dwarf_form,
27762 output_indirect_string> (DW_FORM_strp);
27763 else
27764 {
27765 unsigned int offset = 0;
27766 unsigned int cur_idx = 0;
27767
27768 skeleton_debug_str_hash->traverse<enum dwarf_form,
27769 output_indirect_string> (DW_FORM_strp);
27770
27771 switch_to_section (debug_str_offsets_section);
27772 debug_str_hash->traverse_noresize
27773 <unsigned int *, output_index_string_offset> (&offset);
27774 switch_to_section (debug_str_dwo_section);
27775 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
27776 (&cur_idx);
27777 }
27778 }
27779
27780 /* Callback for htab_traverse to assign an index to an entry in the
27781 table, and to write that entry to the .debug_addr section. */
27782
27783 int
27784 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
27785 {
27786 addr_table_entry *entry = *slot;
27787
27788 if (entry->refcount == 0)
27789 {
27790 gcc_assert (entry->index == NO_INDEX_ASSIGNED
27791 || entry->index == NOT_INDEXED);
27792 return 1;
27793 }
27794
27795 gcc_assert (entry->index == *cur_index);
27796 (*cur_index)++;
27797
27798 switch (entry->kind)
27799 {
27800 case ate_kind_rtx:
27801 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
27802 "0x%x", entry->index);
27803 break;
27804 case ate_kind_rtx_dtprel:
27805 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
27806 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
27807 DWARF2_ADDR_SIZE,
27808 entry->addr.rtl);
27809 fputc ('\n', asm_out_file);
27810 break;
27811 case ate_kind_label:
27812 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
27813 "0x%x", entry->index);
27814 break;
27815 default:
27816 gcc_unreachable ();
27817 }
27818 return 1;
27819 }
27820
27821 /* Produce the .debug_addr section. */
27822
27823 static void
27824 output_addr_table (void)
27825 {
27826 unsigned int index = 0;
27827 if (addr_index_table == NULL || addr_index_table->size () == 0)
27828 return;
27829
27830 switch_to_section (debug_addr_section);
27831 addr_index_table
27832 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
27833 }
27834
27835 #if ENABLE_ASSERT_CHECKING
27836 /* Verify that all marks are clear. */
27837
27838 static void
27839 verify_marks_clear (dw_die_ref die)
27840 {
27841 dw_die_ref c;
27842
27843 gcc_assert (! die->die_mark);
27844 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
27845 }
27846 #endif /* ENABLE_ASSERT_CHECKING */
27847
27848 /* Clear the marks for a die and its children.
27849 Be cool if the mark isn't set. */
27850
27851 static void
27852 prune_unmark_dies (dw_die_ref die)
27853 {
27854 dw_die_ref c;
27855
27856 if (die->die_mark)
27857 die->die_mark = 0;
27858 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
27859 }
27860
27861 /* Given LOC that is referenced by a DIE we're marking as used, find all
27862 referenced DWARF procedures it references and mark them as used. */
27863
27864 static void
27865 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
27866 {
27867 for (; loc != NULL; loc = loc->dw_loc_next)
27868 switch (loc->dw_loc_opc)
27869 {
27870 case DW_OP_implicit_pointer:
27871 case DW_OP_convert:
27872 case DW_OP_reinterpret:
27873 case DW_OP_GNU_implicit_pointer:
27874 case DW_OP_GNU_convert:
27875 case DW_OP_GNU_reinterpret:
27876 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
27877 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
27878 break;
27879 case DW_OP_GNU_variable_value:
27880 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
27881 {
27882 dw_die_ref ref
27883 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
27884 if (ref == NULL)
27885 break;
27886 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
27887 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
27888 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
27889 }
27890 /* FALLTHRU */
27891 case DW_OP_call2:
27892 case DW_OP_call4:
27893 case DW_OP_call_ref:
27894 case DW_OP_const_type:
27895 case DW_OP_GNU_const_type:
27896 case DW_OP_GNU_parameter_ref:
27897 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
27898 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
27899 break;
27900 case DW_OP_regval_type:
27901 case DW_OP_deref_type:
27902 case DW_OP_GNU_regval_type:
27903 case DW_OP_GNU_deref_type:
27904 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
27905 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
27906 break;
27907 case DW_OP_entry_value:
27908 case DW_OP_GNU_entry_value:
27909 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
27910 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
27911 break;
27912 default:
27913 break;
27914 }
27915 }
27916
27917 /* Given DIE that we're marking as used, find any other dies
27918 it references as attributes and mark them as used. */
27919
27920 static void
27921 prune_unused_types_walk_attribs (dw_die_ref die)
27922 {
27923 dw_attr_node *a;
27924 unsigned ix;
27925
27926 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
27927 {
27928 switch (AT_class (a))
27929 {
27930 /* Make sure DWARF procedures referenced by location descriptions will
27931 get emitted. */
27932 case dw_val_class_loc:
27933 prune_unused_types_walk_loc_descr (AT_loc (a));
27934 break;
27935 case dw_val_class_loc_list:
27936 for (dw_loc_list_ref list = AT_loc_list (a);
27937 list != NULL;
27938 list = list->dw_loc_next)
27939 prune_unused_types_walk_loc_descr (list->expr);
27940 break;
27941
27942 case dw_val_class_die_ref:
27943 /* A reference to another DIE.
27944 Make sure that it will get emitted.
27945 If it was broken out into a comdat group, don't follow it. */
27946 if (! AT_ref (a)->comdat_type_p
27947 || a->dw_attr == DW_AT_specification)
27948 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
27949 break;
27950
27951 case dw_val_class_str:
27952 /* Set the string's refcount to 0 so that prune_unused_types_mark
27953 accounts properly for it. */
27954 a->dw_attr_val.v.val_str->refcount = 0;
27955 break;
27956
27957 default:
27958 break;
27959 }
27960 }
27961 }
27962
27963 /* Mark the generic parameters and arguments children DIEs of DIE. */
27964
27965 static void
27966 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
27967 {
27968 dw_die_ref c;
27969
27970 if (die == NULL || die->die_child == NULL)
27971 return;
27972 c = die->die_child;
27973 do
27974 {
27975 if (is_template_parameter (c))
27976 prune_unused_types_mark (c, 1);
27977 c = c->die_sib;
27978 } while (c && c != die->die_child);
27979 }
27980
27981 /* Mark DIE as being used. If DOKIDS is true, then walk down
27982 to DIE's children. */
27983
27984 static void
27985 prune_unused_types_mark (dw_die_ref die, int dokids)
27986 {
27987 dw_die_ref c;
27988
27989 if (die->die_mark == 0)
27990 {
27991 /* We haven't done this node yet. Mark it as used. */
27992 die->die_mark = 1;
27993 /* If this is the DIE of a generic type instantiation,
27994 mark the children DIEs that describe its generic parms and
27995 args. */
27996 prune_unused_types_mark_generic_parms_dies (die);
27997
27998 /* We also have to mark its parents as used.
27999 (But we don't want to mark our parent's kids due to this,
28000 unless it is a class.) */
28001 if (die->die_parent)
28002 prune_unused_types_mark (die->die_parent,
28003 class_scope_p (die->die_parent));
28004
28005 /* Mark any referenced nodes. */
28006 prune_unused_types_walk_attribs (die);
28007
28008 /* If this node is a specification,
28009 also mark the definition, if it exists. */
28010 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
28011 prune_unused_types_mark (die->die_definition, 1);
28012 }
28013
28014 if (dokids && die->die_mark != 2)
28015 {
28016 /* We need to walk the children, but haven't done so yet.
28017 Remember that we've walked the kids. */
28018 die->die_mark = 2;
28019
28020 /* If this is an array type, we need to make sure our
28021 kids get marked, even if they're types. If we're
28022 breaking out types into comdat sections, do this
28023 for all type definitions. */
28024 if (die->die_tag == DW_TAG_array_type
28025 || (use_debug_types
28026 && is_type_die (die) && ! is_declaration_die (die)))
28027 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
28028 else
28029 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
28030 }
28031 }
28032
28033 /* For local classes, look if any static member functions were emitted
28034 and if so, mark them. */
28035
28036 static void
28037 prune_unused_types_walk_local_classes (dw_die_ref die)
28038 {
28039 dw_die_ref c;
28040
28041 if (die->die_mark == 2)
28042 return;
28043
28044 switch (die->die_tag)
28045 {
28046 case DW_TAG_structure_type:
28047 case DW_TAG_union_type:
28048 case DW_TAG_class_type:
28049 break;
28050
28051 case DW_TAG_subprogram:
28052 if (!get_AT_flag (die, DW_AT_declaration)
28053 || die->die_definition != NULL)
28054 prune_unused_types_mark (die, 1);
28055 return;
28056
28057 default:
28058 return;
28059 }
28060
28061 /* Mark children. */
28062 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
28063 }
28064
28065 /* Walk the tree DIE and mark types that we actually use. */
28066
28067 static void
28068 prune_unused_types_walk (dw_die_ref die)
28069 {
28070 dw_die_ref c;
28071
28072 /* Don't do anything if this node is already marked and
28073 children have been marked as well. */
28074 if (die->die_mark == 2)
28075 return;
28076
28077 switch (die->die_tag)
28078 {
28079 case DW_TAG_structure_type:
28080 case DW_TAG_union_type:
28081 case DW_TAG_class_type:
28082 if (die->die_perennial_p)
28083 break;
28084
28085 for (c = die->die_parent; c; c = c->die_parent)
28086 if (c->die_tag == DW_TAG_subprogram)
28087 break;
28088
28089 /* Finding used static member functions inside of classes
28090 is needed just for local classes, because for other classes
28091 static member function DIEs with DW_AT_specification
28092 are emitted outside of the DW_TAG_*_type. If we ever change
28093 it, we'd need to call this even for non-local classes. */
28094 if (c)
28095 prune_unused_types_walk_local_classes (die);
28096
28097 /* It's a type node --- don't mark it. */
28098 return;
28099
28100 case DW_TAG_const_type:
28101 case DW_TAG_packed_type:
28102 case DW_TAG_pointer_type:
28103 case DW_TAG_reference_type:
28104 case DW_TAG_rvalue_reference_type:
28105 case DW_TAG_volatile_type:
28106 case DW_TAG_typedef:
28107 case DW_TAG_array_type:
28108 case DW_TAG_interface_type:
28109 case DW_TAG_friend:
28110 case DW_TAG_enumeration_type:
28111 case DW_TAG_subroutine_type:
28112 case DW_TAG_string_type:
28113 case DW_TAG_set_type:
28114 case DW_TAG_subrange_type:
28115 case DW_TAG_ptr_to_member_type:
28116 case DW_TAG_file_type:
28117 /* Type nodes are useful only when other DIEs reference them --- don't
28118 mark them. */
28119 /* FALLTHROUGH */
28120
28121 case DW_TAG_dwarf_procedure:
28122 /* Likewise for DWARF procedures. */
28123
28124 if (die->die_perennial_p)
28125 break;
28126
28127 return;
28128
28129 default:
28130 /* Mark everything else. */
28131 break;
28132 }
28133
28134 if (die->die_mark == 0)
28135 {
28136 die->die_mark = 1;
28137
28138 /* Now, mark any dies referenced from here. */
28139 prune_unused_types_walk_attribs (die);
28140 }
28141
28142 die->die_mark = 2;
28143
28144 /* Mark children. */
28145 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
28146 }
28147
28148 /* Increment the string counts on strings referred to from DIE's
28149 attributes. */
28150
28151 static void
28152 prune_unused_types_update_strings (dw_die_ref die)
28153 {
28154 dw_attr_node *a;
28155 unsigned ix;
28156
28157 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
28158 if (AT_class (a) == dw_val_class_str)
28159 {
28160 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
28161 s->refcount++;
28162 /* Avoid unnecessarily putting strings that are used less than
28163 twice in the hash table. */
28164 if (s->refcount
28165 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
28166 {
28167 indirect_string_node **slot
28168 = debug_str_hash->find_slot_with_hash (s->str,
28169 htab_hash_string (s->str),
28170 INSERT);
28171 gcc_assert (*slot == NULL);
28172 *slot = s;
28173 }
28174 }
28175 }
28176
28177 /* Mark DIE and its children as removed. */
28178
28179 static void
28180 mark_removed (dw_die_ref die)
28181 {
28182 dw_die_ref c;
28183 die->removed = true;
28184 FOR_EACH_CHILD (die, c, mark_removed (c));
28185 }
28186
28187 /* Remove from the tree DIE any dies that aren't marked. */
28188
28189 static void
28190 prune_unused_types_prune (dw_die_ref die)
28191 {
28192 dw_die_ref c;
28193
28194 gcc_assert (die->die_mark);
28195 prune_unused_types_update_strings (die);
28196
28197 if (! die->die_child)
28198 return;
28199
28200 c = die->die_child;
28201 do {
28202 dw_die_ref prev = c, next;
28203 for (c = c->die_sib; ! c->die_mark; c = next)
28204 if (c == die->die_child)
28205 {
28206 /* No marked children between 'prev' and the end of the list. */
28207 if (prev == c)
28208 /* No marked children at all. */
28209 die->die_child = NULL;
28210 else
28211 {
28212 prev->die_sib = c->die_sib;
28213 die->die_child = prev;
28214 }
28215 c->die_sib = NULL;
28216 mark_removed (c);
28217 return;
28218 }
28219 else
28220 {
28221 next = c->die_sib;
28222 c->die_sib = NULL;
28223 mark_removed (c);
28224 }
28225
28226 if (c != prev->die_sib)
28227 prev->die_sib = c;
28228 prune_unused_types_prune (c);
28229 } while (c != die->die_child);
28230 }
28231
28232 /* Remove dies representing declarations that we never use. */
28233
28234 static void
28235 prune_unused_types (void)
28236 {
28237 unsigned int i;
28238 limbo_die_node *node;
28239 comdat_type_node *ctnode;
28240 pubname_entry *pub;
28241 dw_die_ref base_type;
28242
28243 #if ENABLE_ASSERT_CHECKING
28244 /* All the marks should already be clear. */
28245 verify_marks_clear (comp_unit_die ());
28246 for (node = limbo_die_list; node; node = node->next)
28247 verify_marks_clear (node->die);
28248 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
28249 verify_marks_clear (ctnode->root_die);
28250 #endif /* ENABLE_ASSERT_CHECKING */
28251
28252 /* Mark types that are used in global variables. */
28253 premark_types_used_by_global_vars ();
28254
28255 /* Set the mark on nodes that are actually used. */
28256 prune_unused_types_walk (comp_unit_die ());
28257 for (node = limbo_die_list; node; node = node->next)
28258 prune_unused_types_walk (node->die);
28259 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
28260 {
28261 prune_unused_types_walk (ctnode->root_die);
28262 prune_unused_types_mark (ctnode->type_die, 1);
28263 }
28264
28265 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
28266 are unusual in that they are pubnames that are the children of pubtypes.
28267 They should only be marked via their parent DW_TAG_enumeration_type die,
28268 not as roots in themselves. */
28269 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
28270 if (pub->die->die_tag != DW_TAG_enumerator)
28271 prune_unused_types_mark (pub->die, 1);
28272 for (i = 0; base_types.iterate (i, &base_type); i++)
28273 prune_unused_types_mark (base_type, 1);
28274
28275 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
28276 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
28277 callees). */
28278 cgraph_node *cnode;
28279 FOR_EACH_FUNCTION (cnode)
28280 if (cnode->referred_to_p (false))
28281 {
28282 dw_die_ref die = lookup_decl_die (cnode->decl);
28283 if (die == NULL || die->die_mark)
28284 continue;
28285 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
28286 if (e->caller != cnode
28287 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
28288 {
28289 prune_unused_types_mark (die, 1);
28290 break;
28291 }
28292 }
28293
28294 if (debug_str_hash)
28295 debug_str_hash->empty ();
28296 if (skeleton_debug_str_hash)
28297 skeleton_debug_str_hash->empty ();
28298 prune_unused_types_prune (comp_unit_die ());
28299 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
28300 {
28301 node = *pnode;
28302 if (!node->die->die_mark)
28303 *pnode = node->next;
28304 else
28305 {
28306 prune_unused_types_prune (node->die);
28307 pnode = &node->next;
28308 }
28309 }
28310 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
28311 prune_unused_types_prune (ctnode->root_die);
28312
28313 /* Leave the marks clear. */
28314 prune_unmark_dies (comp_unit_die ());
28315 for (node = limbo_die_list; node; node = node->next)
28316 prune_unmark_dies (node->die);
28317 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
28318 prune_unmark_dies (ctnode->root_die);
28319 }
28320
28321 /* Helpers to manipulate hash table of comdat type units. */
28322
28323 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
28324 {
28325 static inline hashval_t hash (const comdat_type_node *);
28326 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
28327 };
28328
28329 inline hashval_t
28330 comdat_type_hasher::hash (const comdat_type_node *type_node)
28331 {
28332 hashval_t h;
28333 memcpy (&h, type_node->signature, sizeof (h));
28334 return h;
28335 }
28336
28337 inline bool
28338 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
28339 const comdat_type_node *type_node_2)
28340 {
28341 return (! memcmp (type_node_1->signature, type_node_2->signature,
28342 DWARF_TYPE_SIGNATURE_SIZE));
28343 }
28344
28345 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
28346 to the location it would have been added, should we know its
28347 DECL_ASSEMBLER_NAME when we added other attributes. This will
28348 probably improve compactness of debug info, removing equivalent
28349 abbrevs, and hide any differences caused by deferring the
28350 computation of the assembler name, triggered by e.g. PCH. */
28351
28352 static inline void
28353 move_linkage_attr (dw_die_ref die)
28354 {
28355 unsigned ix = vec_safe_length (die->die_attr);
28356 dw_attr_node linkage = (*die->die_attr)[ix - 1];
28357
28358 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
28359 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
28360
28361 while (--ix > 0)
28362 {
28363 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
28364
28365 if (prev->dw_attr == DW_AT_decl_line
28366 || prev->dw_attr == DW_AT_decl_column
28367 || prev->dw_attr == DW_AT_name)
28368 break;
28369 }
28370
28371 if (ix != vec_safe_length (die->die_attr) - 1)
28372 {
28373 die->die_attr->pop ();
28374 die->die_attr->quick_insert (ix, linkage);
28375 }
28376 }
28377
28378 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
28379 referenced from typed stack ops and count how often they are used. */
28380
28381 static void
28382 mark_base_types (dw_loc_descr_ref loc)
28383 {
28384 dw_die_ref base_type = NULL;
28385
28386 for (; loc; loc = loc->dw_loc_next)
28387 {
28388 switch (loc->dw_loc_opc)
28389 {
28390 case DW_OP_regval_type:
28391 case DW_OP_deref_type:
28392 case DW_OP_GNU_regval_type:
28393 case DW_OP_GNU_deref_type:
28394 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
28395 break;
28396 case DW_OP_convert:
28397 case DW_OP_reinterpret:
28398 case DW_OP_GNU_convert:
28399 case DW_OP_GNU_reinterpret:
28400 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
28401 continue;
28402 /* FALLTHRU */
28403 case DW_OP_const_type:
28404 case DW_OP_GNU_const_type:
28405 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
28406 break;
28407 case DW_OP_entry_value:
28408 case DW_OP_GNU_entry_value:
28409 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
28410 continue;
28411 default:
28412 continue;
28413 }
28414 gcc_assert (base_type->die_parent == comp_unit_die ());
28415 if (base_type->die_mark)
28416 base_type->die_mark++;
28417 else
28418 {
28419 base_types.safe_push (base_type);
28420 base_type->die_mark = 1;
28421 }
28422 }
28423 }
28424
28425 /* Comparison function for sorting marked base types. */
28426
28427 static int
28428 base_type_cmp (const void *x, const void *y)
28429 {
28430 dw_die_ref dx = *(const dw_die_ref *) x;
28431 dw_die_ref dy = *(const dw_die_ref *) y;
28432 unsigned int byte_size1, byte_size2;
28433 unsigned int encoding1, encoding2;
28434 unsigned int align1, align2;
28435 if (dx->die_mark > dy->die_mark)
28436 return -1;
28437 if (dx->die_mark < dy->die_mark)
28438 return 1;
28439 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
28440 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
28441 if (byte_size1 < byte_size2)
28442 return 1;
28443 if (byte_size1 > byte_size2)
28444 return -1;
28445 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
28446 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
28447 if (encoding1 < encoding2)
28448 return 1;
28449 if (encoding1 > encoding2)
28450 return -1;
28451 align1 = get_AT_unsigned (dx, DW_AT_alignment);
28452 align2 = get_AT_unsigned (dy, DW_AT_alignment);
28453 if (align1 < align2)
28454 return 1;
28455 if (align1 > align2)
28456 return -1;
28457 return 0;
28458 }
28459
28460 /* Move base types marked by mark_base_types as early as possible
28461 in the CU, sorted by decreasing usage count both to make the
28462 uleb128 references as small as possible and to make sure they
28463 will have die_offset already computed by calc_die_sizes when
28464 sizes of typed stack loc ops is computed. */
28465
28466 static void
28467 move_marked_base_types (void)
28468 {
28469 unsigned int i;
28470 dw_die_ref base_type, die, c;
28471
28472 if (base_types.is_empty ())
28473 return;
28474
28475 /* Sort by decreasing usage count, they will be added again in that
28476 order later on. */
28477 base_types.qsort (base_type_cmp);
28478 die = comp_unit_die ();
28479 c = die->die_child;
28480 do
28481 {
28482 dw_die_ref prev = c;
28483 c = c->die_sib;
28484 while (c->die_mark)
28485 {
28486 remove_child_with_prev (c, prev);
28487 /* As base types got marked, there must be at least
28488 one node other than DW_TAG_base_type. */
28489 gcc_assert (die->die_child != NULL);
28490 c = prev->die_sib;
28491 }
28492 }
28493 while (c != die->die_child);
28494 gcc_assert (die->die_child);
28495 c = die->die_child;
28496 for (i = 0; base_types.iterate (i, &base_type); i++)
28497 {
28498 base_type->die_mark = 0;
28499 base_type->die_sib = c->die_sib;
28500 c->die_sib = base_type;
28501 c = base_type;
28502 }
28503 }
28504
28505 /* Helper function for resolve_addr, attempt to resolve
28506 one CONST_STRING, return true if successful. Similarly verify that
28507 SYMBOL_REFs refer to variables emitted in the current CU. */
28508
28509 static bool
28510 resolve_one_addr (rtx *addr)
28511 {
28512 rtx rtl = *addr;
28513
28514 if (GET_CODE (rtl) == CONST_STRING)
28515 {
28516 size_t len = strlen (XSTR (rtl, 0)) + 1;
28517 tree t = build_string (len, XSTR (rtl, 0));
28518 tree tlen = size_int (len - 1);
28519 TREE_TYPE (t)
28520 = build_array_type (char_type_node, build_index_type (tlen));
28521 rtl = lookup_constant_def (t);
28522 if (!rtl || !MEM_P (rtl))
28523 return false;
28524 rtl = XEXP (rtl, 0);
28525 if (GET_CODE (rtl) == SYMBOL_REF
28526 && SYMBOL_REF_DECL (rtl)
28527 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
28528 return false;
28529 vec_safe_push (used_rtx_array, rtl);
28530 *addr = rtl;
28531 return true;
28532 }
28533
28534 if (GET_CODE (rtl) == SYMBOL_REF
28535 && SYMBOL_REF_DECL (rtl))
28536 {
28537 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
28538 {
28539 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
28540 return false;
28541 }
28542 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
28543 return false;
28544 }
28545
28546 if (GET_CODE (rtl) == CONST)
28547 {
28548 subrtx_ptr_iterator::array_type array;
28549 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
28550 if (!resolve_one_addr (*iter))
28551 return false;
28552 }
28553
28554 return true;
28555 }
28556
28557 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
28558 if possible, and create DW_TAG_dwarf_procedure that can be referenced
28559 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
28560
28561 static rtx
28562 string_cst_pool_decl (tree t)
28563 {
28564 rtx rtl = output_constant_def (t, 1);
28565 unsigned char *array;
28566 dw_loc_descr_ref l;
28567 tree decl;
28568 size_t len;
28569 dw_die_ref ref;
28570
28571 if (!rtl || !MEM_P (rtl))
28572 return NULL_RTX;
28573 rtl = XEXP (rtl, 0);
28574 if (GET_CODE (rtl) != SYMBOL_REF
28575 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
28576 return NULL_RTX;
28577
28578 decl = SYMBOL_REF_DECL (rtl);
28579 if (!lookup_decl_die (decl))
28580 {
28581 len = TREE_STRING_LENGTH (t);
28582 vec_safe_push (used_rtx_array, rtl);
28583 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
28584 array = ggc_vec_alloc<unsigned char> (len);
28585 memcpy (array, TREE_STRING_POINTER (t), len);
28586 l = new_loc_descr (DW_OP_implicit_value, len, 0);
28587 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
28588 l->dw_loc_oprnd2.v.val_vec.length = len;
28589 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
28590 l->dw_loc_oprnd2.v.val_vec.array = array;
28591 add_AT_loc (ref, DW_AT_location, l);
28592 equate_decl_number_to_die (decl, ref);
28593 }
28594 return rtl;
28595 }
28596
28597 /* Helper function of resolve_addr_in_expr. LOC is
28598 a DW_OP_addr followed by DW_OP_stack_value, either at the start
28599 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
28600 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
28601 with DW_OP_implicit_pointer if possible
28602 and return true, if unsuccessful, return false. */
28603
28604 static bool
28605 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
28606 {
28607 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
28608 HOST_WIDE_INT offset = 0;
28609 dw_die_ref ref = NULL;
28610 tree decl;
28611
28612 if (GET_CODE (rtl) == CONST
28613 && GET_CODE (XEXP (rtl, 0)) == PLUS
28614 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
28615 {
28616 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
28617 rtl = XEXP (XEXP (rtl, 0), 0);
28618 }
28619 if (GET_CODE (rtl) == CONST_STRING)
28620 {
28621 size_t len = strlen (XSTR (rtl, 0)) + 1;
28622 tree t = build_string (len, XSTR (rtl, 0));
28623 tree tlen = size_int (len - 1);
28624
28625 TREE_TYPE (t)
28626 = build_array_type (char_type_node, build_index_type (tlen));
28627 rtl = string_cst_pool_decl (t);
28628 if (!rtl)
28629 return false;
28630 }
28631 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
28632 {
28633 decl = SYMBOL_REF_DECL (rtl);
28634 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
28635 {
28636 ref = lookup_decl_die (decl);
28637 if (ref && (get_AT (ref, DW_AT_location)
28638 || get_AT (ref, DW_AT_const_value)))
28639 {
28640 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
28641 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28642 loc->dw_loc_oprnd1.val_entry = NULL;
28643 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28644 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28645 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
28646 loc->dw_loc_oprnd2.v.val_int = offset;
28647 return true;
28648 }
28649 }
28650 }
28651 return false;
28652 }
28653
28654 /* Helper function for resolve_addr, handle one location
28655 expression, return false if at least one CONST_STRING or SYMBOL_REF in
28656 the location list couldn't be resolved. */
28657
28658 static bool
28659 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
28660 {
28661 dw_loc_descr_ref keep = NULL;
28662 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
28663 switch (loc->dw_loc_opc)
28664 {
28665 case DW_OP_addr:
28666 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
28667 {
28668 if ((prev == NULL
28669 || prev->dw_loc_opc == DW_OP_piece
28670 || prev->dw_loc_opc == DW_OP_bit_piece)
28671 && loc->dw_loc_next
28672 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
28673 && (!dwarf_strict || dwarf_version >= 5)
28674 && optimize_one_addr_into_implicit_ptr (loc))
28675 break;
28676 return false;
28677 }
28678 break;
28679 case DW_OP_GNU_addr_index:
28680 case DW_OP_GNU_const_index:
28681 if (loc->dw_loc_opc == DW_OP_GNU_addr_index
28682 || (loc->dw_loc_opc == DW_OP_GNU_const_index && loc->dtprel))
28683 {
28684 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
28685 if (!resolve_one_addr (&rtl))
28686 return false;
28687 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
28688 loc->dw_loc_oprnd1.val_entry
28689 = add_addr_table_entry (rtl, ate_kind_rtx);
28690 }
28691 break;
28692 case DW_OP_const4u:
28693 case DW_OP_const8u:
28694 if (loc->dtprel
28695 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
28696 return false;
28697 break;
28698 case DW_OP_plus_uconst:
28699 if (size_of_loc_descr (loc)
28700 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
28701 + 1
28702 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
28703 {
28704 dw_loc_descr_ref repl
28705 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
28706 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
28707 add_loc_descr (&repl, loc->dw_loc_next);
28708 *loc = *repl;
28709 }
28710 break;
28711 case DW_OP_implicit_value:
28712 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
28713 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
28714 return false;
28715 break;
28716 case DW_OP_implicit_pointer:
28717 case DW_OP_GNU_implicit_pointer:
28718 case DW_OP_GNU_parameter_ref:
28719 case DW_OP_GNU_variable_value:
28720 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28721 {
28722 dw_die_ref ref
28723 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28724 if (ref == NULL)
28725 return false;
28726 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28727 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28728 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28729 }
28730 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
28731 {
28732 if (prev == NULL
28733 && loc->dw_loc_next == NULL
28734 && AT_class (a) == dw_val_class_loc)
28735 switch (a->dw_attr)
28736 {
28737 /* Following attributes allow both exprloc and reference,
28738 so if the whole expression is DW_OP_GNU_variable_value
28739 alone we could transform it into reference. */
28740 case DW_AT_byte_size:
28741 case DW_AT_bit_size:
28742 case DW_AT_lower_bound:
28743 case DW_AT_upper_bound:
28744 case DW_AT_bit_stride:
28745 case DW_AT_count:
28746 case DW_AT_allocated:
28747 case DW_AT_associated:
28748 case DW_AT_byte_stride:
28749 a->dw_attr_val.val_class = dw_val_class_die_ref;
28750 a->dw_attr_val.val_entry = NULL;
28751 a->dw_attr_val.v.val_die_ref.die
28752 = loc->dw_loc_oprnd1.v.val_die_ref.die;
28753 a->dw_attr_val.v.val_die_ref.external = 0;
28754 return true;
28755 default:
28756 break;
28757 }
28758 if (dwarf_strict)
28759 return false;
28760 }
28761 break;
28762 case DW_OP_const_type:
28763 case DW_OP_regval_type:
28764 case DW_OP_deref_type:
28765 case DW_OP_convert:
28766 case DW_OP_reinterpret:
28767 case DW_OP_GNU_const_type:
28768 case DW_OP_GNU_regval_type:
28769 case DW_OP_GNU_deref_type:
28770 case DW_OP_GNU_convert:
28771 case DW_OP_GNU_reinterpret:
28772 while (loc->dw_loc_next
28773 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
28774 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
28775 {
28776 dw_die_ref base1, base2;
28777 unsigned enc1, enc2, size1, size2;
28778 if (loc->dw_loc_opc == DW_OP_regval_type
28779 || loc->dw_loc_opc == DW_OP_deref_type
28780 || loc->dw_loc_opc == DW_OP_GNU_regval_type
28781 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
28782 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
28783 else if (loc->dw_loc_oprnd1.val_class
28784 == dw_val_class_unsigned_const)
28785 break;
28786 else
28787 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
28788 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
28789 == dw_val_class_unsigned_const)
28790 break;
28791 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
28792 gcc_assert (base1->die_tag == DW_TAG_base_type
28793 && base2->die_tag == DW_TAG_base_type);
28794 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
28795 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
28796 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
28797 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
28798 if (size1 == size2
28799 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
28800 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
28801 && loc != keep)
28802 || enc1 == enc2))
28803 {
28804 /* Optimize away next DW_OP_convert after
28805 adjusting LOC's base type die reference. */
28806 if (loc->dw_loc_opc == DW_OP_regval_type
28807 || loc->dw_loc_opc == DW_OP_deref_type
28808 || loc->dw_loc_opc == DW_OP_GNU_regval_type
28809 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
28810 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
28811 else
28812 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
28813 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
28814 continue;
28815 }
28816 /* Don't change integer DW_OP_convert after e.g. floating
28817 point typed stack entry. */
28818 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
28819 keep = loc->dw_loc_next;
28820 break;
28821 }
28822 break;
28823 default:
28824 break;
28825 }
28826 return true;
28827 }
28828
28829 /* Helper function of resolve_addr. DIE had DW_AT_location of
28830 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
28831 and DW_OP_addr couldn't be resolved. resolve_addr has already
28832 removed the DW_AT_location attribute. This function attempts to
28833 add a new DW_AT_location attribute with DW_OP_implicit_pointer
28834 to it or DW_AT_const_value attribute, if possible. */
28835
28836 static void
28837 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
28838 {
28839 if (!VAR_P (decl)
28840 || lookup_decl_die (decl) != die
28841 || DECL_EXTERNAL (decl)
28842 || !TREE_STATIC (decl)
28843 || DECL_INITIAL (decl) == NULL_TREE
28844 || DECL_P (DECL_INITIAL (decl))
28845 || get_AT (die, DW_AT_const_value))
28846 return;
28847
28848 tree init = DECL_INITIAL (decl);
28849 HOST_WIDE_INT offset = 0;
28850 /* For variables that have been optimized away and thus
28851 don't have a memory location, see if we can emit
28852 DW_AT_const_value instead. */
28853 if (tree_add_const_value_attribute (die, init))
28854 return;
28855 if (dwarf_strict && dwarf_version < 5)
28856 return;
28857 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
28858 and ADDR_EXPR refers to a decl that has DW_AT_location or
28859 DW_AT_const_value (but isn't addressable, otherwise
28860 resolving the original DW_OP_addr wouldn't fail), see if
28861 we can add DW_OP_implicit_pointer. */
28862 STRIP_NOPS (init);
28863 if (TREE_CODE (init) == POINTER_PLUS_EXPR
28864 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
28865 {
28866 offset = tree_to_shwi (TREE_OPERAND (init, 1));
28867 init = TREE_OPERAND (init, 0);
28868 STRIP_NOPS (init);
28869 }
28870 if (TREE_CODE (init) != ADDR_EXPR)
28871 return;
28872 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
28873 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
28874 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
28875 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
28876 && TREE_OPERAND (init, 0) != decl))
28877 {
28878 dw_die_ref ref;
28879 dw_loc_descr_ref l;
28880
28881 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
28882 {
28883 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
28884 if (!rtl)
28885 return;
28886 decl = SYMBOL_REF_DECL (rtl);
28887 }
28888 else
28889 decl = TREE_OPERAND (init, 0);
28890 ref = lookup_decl_die (decl);
28891 if (ref == NULL
28892 || (!get_AT (ref, DW_AT_location)
28893 && !get_AT (ref, DW_AT_const_value)))
28894 return;
28895 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
28896 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28897 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
28898 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
28899 add_AT_loc (die, DW_AT_location, l);
28900 }
28901 }
28902
28903 /* Return NULL if l is a DWARF expression, or first op that is not
28904 valid DWARF expression. */
28905
28906 static dw_loc_descr_ref
28907 non_dwarf_expression (dw_loc_descr_ref l)
28908 {
28909 while (l)
28910 {
28911 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
28912 return l;
28913 switch (l->dw_loc_opc)
28914 {
28915 case DW_OP_regx:
28916 case DW_OP_implicit_value:
28917 case DW_OP_stack_value:
28918 case DW_OP_implicit_pointer:
28919 case DW_OP_GNU_implicit_pointer:
28920 case DW_OP_GNU_parameter_ref:
28921 case DW_OP_piece:
28922 case DW_OP_bit_piece:
28923 return l;
28924 default:
28925 break;
28926 }
28927 l = l->dw_loc_next;
28928 }
28929 return NULL;
28930 }
28931
28932 /* Return adjusted copy of EXPR:
28933 If it is empty DWARF expression, return it.
28934 If it is valid non-empty DWARF expression,
28935 return copy of EXPR with DW_OP_deref appended to it.
28936 If it is DWARF expression followed by DW_OP_reg{N,x}, return
28937 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
28938 If it is DWARF expression followed by DW_OP_stack_value, return
28939 copy of the DWARF expression without anything appended.
28940 Otherwise, return NULL. */
28941
28942 static dw_loc_descr_ref
28943 copy_deref_exprloc (dw_loc_descr_ref expr)
28944 {
28945 dw_loc_descr_ref tail = NULL;
28946
28947 if (expr == NULL)
28948 return NULL;
28949
28950 dw_loc_descr_ref l = non_dwarf_expression (expr);
28951 if (l && l->dw_loc_next)
28952 return NULL;
28953
28954 if (l)
28955 {
28956 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
28957 tail = new_loc_descr ((enum dwarf_location_atom)
28958 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
28959 0, 0);
28960 else
28961 switch (l->dw_loc_opc)
28962 {
28963 case DW_OP_regx:
28964 tail = new_loc_descr (DW_OP_bregx,
28965 l->dw_loc_oprnd1.v.val_unsigned, 0);
28966 break;
28967 case DW_OP_stack_value:
28968 break;
28969 default:
28970 return NULL;
28971 }
28972 }
28973 else
28974 tail = new_loc_descr (DW_OP_deref, 0, 0);
28975
28976 dw_loc_descr_ref ret = NULL, *p = &ret;
28977 while (expr != l)
28978 {
28979 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
28980 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
28981 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
28982 p = &(*p)->dw_loc_next;
28983 expr = expr->dw_loc_next;
28984 }
28985 *p = tail;
28986 return ret;
28987 }
28988
28989 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
28990 reference to a variable or argument, adjust it if needed and return:
28991 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
28992 attribute if present should be removed
28993 0 keep the attribute perhaps with minor modifications, no need to rescan
28994 1 if the attribute has been successfully adjusted. */
28995
28996 static int
28997 optimize_string_length (dw_attr_node *a)
28998 {
28999 dw_loc_descr_ref l = AT_loc (a), lv;
29000 dw_die_ref die;
29001 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29002 {
29003 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
29004 die = lookup_decl_die (decl);
29005 if (die)
29006 {
29007 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29008 l->dw_loc_oprnd1.v.val_die_ref.die = die;
29009 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
29010 }
29011 else
29012 return -1;
29013 }
29014 else
29015 die = l->dw_loc_oprnd1.v.val_die_ref.die;
29016
29017 /* DWARF5 allows reference class, so we can then reference the DIE.
29018 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
29019 if (l->dw_loc_next != NULL && dwarf_version >= 5)
29020 {
29021 a->dw_attr_val.val_class = dw_val_class_die_ref;
29022 a->dw_attr_val.val_entry = NULL;
29023 a->dw_attr_val.v.val_die_ref.die = die;
29024 a->dw_attr_val.v.val_die_ref.external = 0;
29025 return 0;
29026 }
29027
29028 dw_attr_node *av = get_AT (die, DW_AT_location);
29029 dw_loc_list_ref d;
29030 bool non_dwarf_expr = false;
29031
29032 if (av == NULL)
29033 return dwarf_strict ? -1 : 0;
29034 switch (AT_class (av))
29035 {
29036 case dw_val_class_loc_list:
29037 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
29038 if (d->expr && non_dwarf_expression (d->expr))
29039 non_dwarf_expr = true;
29040 break;
29041 case dw_val_class_loc:
29042 lv = AT_loc (av);
29043 if (lv == NULL)
29044 return dwarf_strict ? -1 : 0;
29045 if (non_dwarf_expression (lv))
29046 non_dwarf_expr = true;
29047 break;
29048 default:
29049 return dwarf_strict ? -1 : 0;
29050 }
29051
29052 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
29053 into DW_OP_call4 or DW_OP_GNU_variable_value into
29054 DW_OP_call4 DW_OP_deref, do so. */
29055 if (!non_dwarf_expr
29056 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
29057 {
29058 l->dw_loc_opc = DW_OP_call4;
29059 if (l->dw_loc_next)
29060 l->dw_loc_next = NULL;
29061 else
29062 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
29063 return 0;
29064 }
29065
29066 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
29067 copy over the DW_AT_location attribute from die to a. */
29068 if (l->dw_loc_next != NULL)
29069 {
29070 a->dw_attr_val = av->dw_attr_val;
29071 return 1;
29072 }
29073
29074 dw_loc_list_ref list, *p;
29075 switch (AT_class (av))
29076 {
29077 case dw_val_class_loc_list:
29078 p = &list;
29079 list = NULL;
29080 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
29081 {
29082 lv = copy_deref_exprloc (d->expr);
29083 if (lv)
29084 {
29085 *p = new_loc_list (lv, d->begin, d->end, d->section);
29086 p = &(*p)->dw_loc_next;
29087 }
29088 else if (!dwarf_strict && d->expr)
29089 return 0;
29090 }
29091 if (list == NULL)
29092 return dwarf_strict ? -1 : 0;
29093 a->dw_attr_val.val_class = dw_val_class_loc_list;
29094 gen_llsym (list);
29095 *AT_loc_list_ptr (a) = list;
29096 return 1;
29097 case dw_val_class_loc:
29098 lv = copy_deref_exprloc (AT_loc (av));
29099 if (lv == NULL)
29100 return dwarf_strict ? -1 : 0;
29101 a->dw_attr_val.v.val_loc = lv;
29102 return 1;
29103 default:
29104 gcc_unreachable ();
29105 }
29106 }
29107
29108 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
29109 an address in .rodata section if the string literal is emitted there,
29110 or remove the containing location list or replace DW_AT_const_value
29111 with DW_AT_location and empty location expression, if it isn't found
29112 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
29113 to something that has been emitted in the current CU. */
29114
29115 static void
29116 resolve_addr (dw_die_ref die)
29117 {
29118 dw_die_ref c;
29119 dw_attr_node *a;
29120 dw_loc_list_ref *curr, *start, loc;
29121 unsigned ix;
29122 bool remove_AT_byte_size = false;
29123
29124 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29125 switch (AT_class (a))
29126 {
29127 case dw_val_class_loc_list:
29128 start = curr = AT_loc_list_ptr (a);
29129 loc = *curr;
29130 gcc_assert (loc);
29131 /* The same list can be referenced more than once. See if we have
29132 already recorded the result from a previous pass. */
29133 if (loc->replaced)
29134 *curr = loc->dw_loc_next;
29135 else if (!loc->resolved_addr)
29136 {
29137 /* As things stand, we do not expect or allow one die to
29138 reference a suffix of another die's location list chain.
29139 References must be identical or completely separate.
29140 There is therefore no need to cache the result of this
29141 pass on any list other than the first; doing so
29142 would lead to unnecessary writes. */
29143 while (*curr)
29144 {
29145 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
29146 if (!resolve_addr_in_expr (a, (*curr)->expr))
29147 {
29148 dw_loc_list_ref next = (*curr)->dw_loc_next;
29149 dw_loc_descr_ref l = (*curr)->expr;
29150
29151 if (next && (*curr)->ll_symbol)
29152 {
29153 gcc_assert (!next->ll_symbol);
29154 next->ll_symbol = (*curr)->ll_symbol;
29155 }
29156 if (dwarf_split_debug_info)
29157 remove_loc_list_addr_table_entries (l);
29158 *curr = next;
29159 }
29160 else
29161 {
29162 mark_base_types ((*curr)->expr);
29163 curr = &(*curr)->dw_loc_next;
29164 }
29165 }
29166 if (loc == *start)
29167 loc->resolved_addr = 1;
29168 else
29169 {
29170 loc->replaced = 1;
29171 loc->dw_loc_next = *start;
29172 }
29173 }
29174 if (!*start)
29175 {
29176 remove_AT (die, a->dw_attr);
29177 ix--;
29178 }
29179 break;
29180 case dw_val_class_loc:
29181 {
29182 dw_loc_descr_ref l = AT_loc (a);
29183 /* DW_OP_GNU_variable_value DW_OP_stack_value or
29184 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
29185 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
29186 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
29187 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
29188 with DW_FORM_ref referencing the same DIE as
29189 DW_OP_GNU_variable_value used to reference. */
29190 if (a->dw_attr == DW_AT_string_length
29191 && l
29192 && l->dw_loc_opc == DW_OP_GNU_variable_value
29193 && (l->dw_loc_next == NULL
29194 || (l->dw_loc_next->dw_loc_next == NULL
29195 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
29196 {
29197 switch (optimize_string_length (a))
29198 {
29199 case -1:
29200 remove_AT (die, a->dw_attr);
29201 ix--;
29202 /* If we drop DW_AT_string_length, we need to drop also
29203 DW_AT_{string_length_,}byte_size. */
29204 remove_AT_byte_size = true;
29205 continue;
29206 default:
29207 break;
29208 case 1:
29209 /* Even if we keep the optimized DW_AT_string_length,
29210 it might have changed AT_class, so process it again. */
29211 ix--;
29212 continue;
29213 }
29214 }
29215 /* For -gdwarf-2 don't attempt to optimize
29216 DW_AT_data_member_location containing
29217 DW_OP_plus_uconst - older consumers might
29218 rely on it being that op instead of a more complex,
29219 but shorter, location description. */
29220 if ((dwarf_version > 2
29221 || a->dw_attr != DW_AT_data_member_location
29222 || l == NULL
29223 || l->dw_loc_opc != DW_OP_plus_uconst
29224 || l->dw_loc_next != NULL)
29225 && !resolve_addr_in_expr (a, l))
29226 {
29227 if (dwarf_split_debug_info)
29228 remove_loc_list_addr_table_entries (l);
29229 if (l != NULL
29230 && l->dw_loc_next == NULL
29231 && l->dw_loc_opc == DW_OP_addr
29232 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
29233 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
29234 && a->dw_attr == DW_AT_location)
29235 {
29236 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
29237 remove_AT (die, a->dw_attr);
29238 ix--;
29239 optimize_location_into_implicit_ptr (die, decl);
29240 break;
29241 }
29242 if (a->dw_attr == DW_AT_string_length)
29243 /* If we drop DW_AT_string_length, we need to drop also
29244 DW_AT_{string_length_,}byte_size. */
29245 remove_AT_byte_size = true;
29246 remove_AT (die, a->dw_attr);
29247 ix--;
29248 }
29249 else
29250 mark_base_types (l);
29251 }
29252 break;
29253 case dw_val_class_addr:
29254 if (a->dw_attr == DW_AT_const_value
29255 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
29256 {
29257 if (AT_index (a) != NOT_INDEXED)
29258 remove_addr_table_entry (a->dw_attr_val.val_entry);
29259 remove_AT (die, a->dw_attr);
29260 ix--;
29261 }
29262 if ((die->die_tag == DW_TAG_call_site
29263 && a->dw_attr == DW_AT_call_origin)
29264 || (die->die_tag == DW_TAG_GNU_call_site
29265 && a->dw_attr == DW_AT_abstract_origin))
29266 {
29267 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
29268 dw_die_ref tdie = lookup_decl_die (tdecl);
29269 dw_die_ref cdie;
29270 if (tdie == NULL
29271 && DECL_EXTERNAL (tdecl)
29272 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
29273 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
29274 {
29275 dw_die_ref pdie = cdie;
29276 /* Make sure we don't add these DIEs into type units.
29277 We could emit skeleton DIEs for context (namespaces,
29278 outer structs/classes) and a skeleton DIE for the
29279 innermost context with DW_AT_signature pointing to the
29280 type unit. See PR78835. */
29281 while (pdie && pdie->die_tag != DW_TAG_type_unit)
29282 pdie = pdie->die_parent;
29283 if (pdie == NULL)
29284 {
29285 /* Creating a full DIE for tdecl is overly expensive and
29286 at this point even wrong when in the LTO phase
29287 as it can end up generating new type DIEs we didn't
29288 output and thus optimize_external_refs will crash. */
29289 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
29290 add_AT_flag (tdie, DW_AT_external, 1);
29291 add_AT_flag (tdie, DW_AT_declaration, 1);
29292 add_linkage_attr (tdie, tdecl);
29293 add_name_and_src_coords_attributes (tdie, tdecl, true);
29294 equate_decl_number_to_die (tdecl, tdie);
29295 }
29296 }
29297 if (tdie)
29298 {
29299 a->dw_attr_val.val_class = dw_val_class_die_ref;
29300 a->dw_attr_val.v.val_die_ref.die = tdie;
29301 a->dw_attr_val.v.val_die_ref.external = 0;
29302 }
29303 else
29304 {
29305 if (AT_index (a) != NOT_INDEXED)
29306 remove_addr_table_entry (a->dw_attr_val.val_entry);
29307 remove_AT (die, a->dw_attr);
29308 ix--;
29309 }
29310 }
29311 break;
29312 default:
29313 break;
29314 }
29315
29316 if (remove_AT_byte_size)
29317 remove_AT (die, dwarf_version >= 5
29318 ? DW_AT_string_length_byte_size
29319 : DW_AT_byte_size);
29320
29321 FOR_EACH_CHILD (die, c, resolve_addr (c));
29322 }
29323 \f
29324 /* Helper routines for optimize_location_lists.
29325 This pass tries to share identical local lists in .debug_loc
29326 section. */
29327
29328 /* Iteratively hash operands of LOC opcode into HSTATE. */
29329
29330 static void
29331 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
29332 {
29333 dw_val_ref val1 = &loc->dw_loc_oprnd1;
29334 dw_val_ref val2 = &loc->dw_loc_oprnd2;
29335
29336 switch (loc->dw_loc_opc)
29337 {
29338 case DW_OP_const4u:
29339 case DW_OP_const8u:
29340 if (loc->dtprel)
29341 goto hash_addr;
29342 /* FALLTHRU */
29343 case DW_OP_const1u:
29344 case DW_OP_const1s:
29345 case DW_OP_const2u:
29346 case DW_OP_const2s:
29347 case DW_OP_const4s:
29348 case DW_OP_const8s:
29349 case DW_OP_constu:
29350 case DW_OP_consts:
29351 case DW_OP_pick:
29352 case DW_OP_plus_uconst:
29353 case DW_OP_breg0:
29354 case DW_OP_breg1:
29355 case DW_OP_breg2:
29356 case DW_OP_breg3:
29357 case DW_OP_breg4:
29358 case DW_OP_breg5:
29359 case DW_OP_breg6:
29360 case DW_OP_breg7:
29361 case DW_OP_breg8:
29362 case DW_OP_breg9:
29363 case DW_OP_breg10:
29364 case DW_OP_breg11:
29365 case DW_OP_breg12:
29366 case DW_OP_breg13:
29367 case DW_OP_breg14:
29368 case DW_OP_breg15:
29369 case DW_OP_breg16:
29370 case DW_OP_breg17:
29371 case DW_OP_breg18:
29372 case DW_OP_breg19:
29373 case DW_OP_breg20:
29374 case DW_OP_breg21:
29375 case DW_OP_breg22:
29376 case DW_OP_breg23:
29377 case DW_OP_breg24:
29378 case DW_OP_breg25:
29379 case DW_OP_breg26:
29380 case DW_OP_breg27:
29381 case DW_OP_breg28:
29382 case DW_OP_breg29:
29383 case DW_OP_breg30:
29384 case DW_OP_breg31:
29385 case DW_OP_regx:
29386 case DW_OP_fbreg:
29387 case DW_OP_piece:
29388 case DW_OP_deref_size:
29389 case DW_OP_xderef_size:
29390 hstate.add_object (val1->v.val_int);
29391 break;
29392 case DW_OP_skip:
29393 case DW_OP_bra:
29394 {
29395 int offset;
29396
29397 gcc_assert (val1->val_class == dw_val_class_loc);
29398 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
29399 hstate.add_object (offset);
29400 }
29401 break;
29402 case DW_OP_implicit_value:
29403 hstate.add_object (val1->v.val_unsigned);
29404 switch (val2->val_class)
29405 {
29406 case dw_val_class_const:
29407 hstate.add_object (val2->v.val_int);
29408 break;
29409 case dw_val_class_vec:
29410 {
29411 unsigned int elt_size = val2->v.val_vec.elt_size;
29412 unsigned int len = val2->v.val_vec.length;
29413
29414 hstate.add_int (elt_size);
29415 hstate.add_int (len);
29416 hstate.add (val2->v.val_vec.array, len * elt_size);
29417 }
29418 break;
29419 case dw_val_class_const_double:
29420 hstate.add_object (val2->v.val_double.low);
29421 hstate.add_object (val2->v.val_double.high);
29422 break;
29423 case dw_val_class_wide_int:
29424 hstate.add (val2->v.val_wide->get_val (),
29425 get_full_len (*val2->v.val_wide)
29426 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
29427 break;
29428 case dw_val_class_addr:
29429 inchash::add_rtx (val2->v.val_addr, hstate);
29430 break;
29431 default:
29432 gcc_unreachable ();
29433 }
29434 break;
29435 case DW_OP_bregx:
29436 case DW_OP_bit_piece:
29437 hstate.add_object (val1->v.val_int);
29438 hstate.add_object (val2->v.val_int);
29439 break;
29440 case DW_OP_addr:
29441 hash_addr:
29442 if (loc->dtprel)
29443 {
29444 unsigned char dtprel = 0xd1;
29445 hstate.add_object (dtprel);
29446 }
29447 inchash::add_rtx (val1->v.val_addr, hstate);
29448 break;
29449 case DW_OP_GNU_addr_index:
29450 case DW_OP_GNU_const_index:
29451 {
29452 if (loc->dtprel)
29453 {
29454 unsigned char dtprel = 0xd1;
29455 hstate.add_object (dtprel);
29456 }
29457 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
29458 }
29459 break;
29460 case DW_OP_implicit_pointer:
29461 case DW_OP_GNU_implicit_pointer:
29462 hstate.add_int (val2->v.val_int);
29463 break;
29464 case DW_OP_entry_value:
29465 case DW_OP_GNU_entry_value:
29466 hstate.add_object (val1->v.val_loc);
29467 break;
29468 case DW_OP_regval_type:
29469 case DW_OP_deref_type:
29470 case DW_OP_GNU_regval_type:
29471 case DW_OP_GNU_deref_type:
29472 {
29473 unsigned int byte_size
29474 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
29475 unsigned int encoding
29476 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
29477 hstate.add_object (val1->v.val_int);
29478 hstate.add_object (byte_size);
29479 hstate.add_object (encoding);
29480 }
29481 break;
29482 case DW_OP_convert:
29483 case DW_OP_reinterpret:
29484 case DW_OP_GNU_convert:
29485 case DW_OP_GNU_reinterpret:
29486 if (val1->val_class == dw_val_class_unsigned_const)
29487 {
29488 hstate.add_object (val1->v.val_unsigned);
29489 break;
29490 }
29491 /* FALLTHRU */
29492 case DW_OP_const_type:
29493 case DW_OP_GNU_const_type:
29494 {
29495 unsigned int byte_size
29496 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
29497 unsigned int encoding
29498 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
29499 hstate.add_object (byte_size);
29500 hstate.add_object (encoding);
29501 if (loc->dw_loc_opc != DW_OP_const_type
29502 && loc->dw_loc_opc != DW_OP_GNU_const_type)
29503 break;
29504 hstate.add_object (val2->val_class);
29505 switch (val2->val_class)
29506 {
29507 case dw_val_class_const:
29508 hstate.add_object (val2->v.val_int);
29509 break;
29510 case dw_val_class_vec:
29511 {
29512 unsigned int elt_size = val2->v.val_vec.elt_size;
29513 unsigned int len = val2->v.val_vec.length;
29514
29515 hstate.add_object (elt_size);
29516 hstate.add_object (len);
29517 hstate.add (val2->v.val_vec.array, len * elt_size);
29518 }
29519 break;
29520 case dw_val_class_const_double:
29521 hstate.add_object (val2->v.val_double.low);
29522 hstate.add_object (val2->v.val_double.high);
29523 break;
29524 case dw_val_class_wide_int:
29525 hstate.add (val2->v.val_wide->get_val (),
29526 get_full_len (*val2->v.val_wide)
29527 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
29528 break;
29529 default:
29530 gcc_unreachable ();
29531 }
29532 }
29533 break;
29534
29535 default:
29536 /* Other codes have no operands. */
29537 break;
29538 }
29539 }
29540
29541 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
29542
29543 static inline void
29544 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
29545 {
29546 dw_loc_descr_ref l;
29547 bool sizes_computed = false;
29548 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
29549 size_of_locs (loc);
29550
29551 for (l = loc; l != NULL; l = l->dw_loc_next)
29552 {
29553 enum dwarf_location_atom opc = l->dw_loc_opc;
29554 hstate.add_object (opc);
29555 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
29556 {
29557 size_of_locs (loc);
29558 sizes_computed = true;
29559 }
29560 hash_loc_operands (l, hstate);
29561 }
29562 }
29563
29564 /* Compute hash of the whole location list LIST_HEAD. */
29565
29566 static inline void
29567 hash_loc_list (dw_loc_list_ref list_head)
29568 {
29569 dw_loc_list_ref curr = list_head;
29570 inchash::hash hstate;
29571
29572 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
29573 {
29574 hstate.add (curr->begin, strlen (curr->begin) + 1);
29575 hstate.add (curr->end, strlen (curr->end) + 1);
29576 if (curr->section)
29577 hstate.add (curr->section, strlen (curr->section) + 1);
29578 hash_locs (curr->expr, hstate);
29579 }
29580 list_head->hash = hstate.end ();
29581 }
29582
29583 /* Return true if X and Y opcodes have the same operands. */
29584
29585 static inline bool
29586 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
29587 {
29588 dw_val_ref valx1 = &x->dw_loc_oprnd1;
29589 dw_val_ref valx2 = &x->dw_loc_oprnd2;
29590 dw_val_ref valy1 = &y->dw_loc_oprnd1;
29591 dw_val_ref valy2 = &y->dw_loc_oprnd2;
29592
29593 switch (x->dw_loc_opc)
29594 {
29595 case DW_OP_const4u:
29596 case DW_OP_const8u:
29597 if (x->dtprel)
29598 goto hash_addr;
29599 /* FALLTHRU */
29600 case DW_OP_const1u:
29601 case DW_OP_const1s:
29602 case DW_OP_const2u:
29603 case DW_OP_const2s:
29604 case DW_OP_const4s:
29605 case DW_OP_const8s:
29606 case DW_OP_constu:
29607 case DW_OP_consts:
29608 case DW_OP_pick:
29609 case DW_OP_plus_uconst:
29610 case DW_OP_breg0:
29611 case DW_OP_breg1:
29612 case DW_OP_breg2:
29613 case DW_OP_breg3:
29614 case DW_OP_breg4:
29615 case DW_OP_breg5:
29616 case DW_OP_breg6:
29617 case DW_OP_breg7:
29618 case DW_OP_breg8:
29619 case DW_OP_breg9:
29620 case DW_OP_breg10:
29621 case DW_OP_breg11:
29622 case DW_OP_breg12:
29623 case DW_OP_breg13:
29624 case DW_OP_breg14:
29625 case DW_OP_breg15:
29626 case DW_OP_breg16:
29627 case DW_OP_breg17:
29628 case DW_OP_breg18:
29629 case DW_OP_breg19:
29630 case DW_OP_breg20:
29631 case DW_OP_breg21:
29632 case DW_OP_breg22:
29633 case DW_OP_breg23:
29634 case DW_OP_breg24:
29635 case DW_OP_breg25:
29636 case DW_OP_breg26:
29637 case DW_OP_breg27:
29638 case DW_OP_breg28:
29639 case DW_OP_breg29:
29640 case DW_OP_breg30:
29641 case DW_OP_breg31:
29642 case DW_OP_regx:
29643 case DW_OP_fbreg:
29644 case DW_OP_piece:
29645 case DW_OP_deref_size:
29646 case DW_OP_xderef_size:
29647 return valx1->v.val_int == valy1->v.val_int;
29648 case DW_OP_skip:
29649 case DW_OP_bra:
29650 /* If splitting debug info, the use of DW_OP_GNU_addr_index
29651 can cause irrelevant differences in dw_loc_addr. */
29652 gcc_assert (valx1->val_class == dw_val_class_loc
29653 && valy1->val_class == dw_val_class_loc
29654 && (dwarf_split_debug_info
29655 || x->dw_loc_addr == y->dw_loc_addr));
29656 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
29657 case DW_OP_implicit_value:
29658 if (valx1->v.val_unsigned != valy1->v.val_unsigned
29659 || valx2->val_class != valy2->val_class)
29660 return false;
29661 switch (valx2->val_class)
29662 {
29663 case dw_val_class_const:
29664 return valx2->v.val_int == valy2->v.val_int;
29665 case dw_val_class_vec:
29666 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
29667 && valx2->v.val_vec.length == valy2->v.val_vec.length
29668 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
29669 valx2->v.val_vec.elt_size
29670 * valx2->v.val_vec.length) == 0;
29671 case dw_val_class_const_double:
29672 return valx2->v.val_double.low == valy2->v.val_double.low
29673 && valx2->v.val_double.high == valy2->v.val_double.high;
29674 case dw_val_class_wide_int:
29675 return *valx2->v.val_wide == *valy2->v.val_wide;
29676 case dw_val_class_addr:
29677 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
29678 default:
29679 gcc_unreachable ();
29680 }
29681 case DW_OP_bregx:
29682 case DW_OP_bit_piece:
29683 return valx1->v.val_int == valy1->v.val_int
29684 && valx2->v.val_int == valy2->v.val_int;
29685 case DW_OP_addr:
29686 hash_addr:
29687 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
29688 case DW_OP_GNU_addr_index:
29689 case DW_OP_GNU_const_index:
29690 {
29691 rtx ax1 = valx1->val_entry->addr.rtl;
29692 rtx ay1 = valy1->val_entry->addr.rtl;
29693 return rtx_equal_p (ax1, ay1);
29694 }
29695 case DW_OP_implicit_pointer:
29696 case DW_OP_GNU_implicit_pointer:
29697 return valx1->val_class == dw_val_class_die_ref
29698 && valx1->val_class == valy1->val_class
29699 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
29700 && valx2->v.val_int == valy2->v.val_int;
29701 case DW_OP_entry_value:
29702 case DW_OP_GNU_entry_value:
29703 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
29704 case DW_OP_const_type:
29705 case DW_OP_GNU_const_type:
29706 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
29707 || valx2->val_class != valy2->val_class)
29708 return false;
29709 switch (valx2->val_class)
29710 {
29711 case dw_val_class_const:
29712 return valx2->v.val_int == valy2->v.val_int;
29713 case dw_val_class_vec:
29714 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
29715 && valx2->v.val_vec.length == valy2->v.val_vec.length
29716 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
29717 valx2->v.val_vec.elt_size
29718 * valx2->v.val_vec.length) == 0;
29719 case dw_val_class_const_double:
29720 return valx2->v.val_double.low == valy2->v.val_double.low
29721 && valx2->v.val_double.high == valy2->v.val_double.high;
29722 case dw_val_class_wide_int:
29723 return *valx2->v.val_wide == *valy2->v.val_wide;
29724 default:
29725 gcc_unreachable ();
29726 }
29727 case DW_OP_regval_type:
29728 case DW_OP_deref_type:
29729 case DW_OP_GNU_regval_type:
29730 case DW_OP_GNU_deref_type:
29731 return valx1->v.val_int == valy1->v.val_int
29732 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
29733 case DW_OP_convert:
29734 case DW_OP_reinterpret:
29735 case DW_OP_GNU_convert:
29736 case DW_OP_GNU_reinterpret:
29737 if (valx1->val_class != valy1->val_class)
29738 return false;
29739 if (valx1->val_class == dw_val_class_unsigned_const)
29740 return valx1->v.val_unsigned == valy1->v.val_unsigned;
29741 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
29742 case DW_OP_GNU_parameter_ref:
29743 return valx1->val_class == dw_val_class_die_ref
29744 && valx1->val_class == valy1->val_class
29745 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
29746 default:
29747 /* Other codes have no operands. */
29748 return true;
29749 }
29750 }
29751
29752 /* Return true if DWARF location expressions X and Y are the same. */
29753
29754 static inline bool
29755 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
29756 {
29757 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
29758 if (x->dw_loc_opc != y->dw_loc_opc
29759 || x->dtprel != y->dtprel
29760 || !compare_loc_operands (x, y))
29761 break;
29762 return x == NULL && y == NULL;
29763 }
29764
29765 /* Hashtable helpers. */
29766
29767 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
29768 {
29769 static inline hashval_t hash (const dw_loc_list_struct *);
29770 static inline bool equal (const dw_loc_list_struct *,
29771 const dw_loc_list_struct *);
29772 };
29773
29774 /* Return precomputed hash of location list X. */
29775
29776 inline hashval_t
29777 loc_list_hasher::hash (const dw_loc_list_struct *x)
29778 {
29779 return x->hash;
29780 }
29781
29782 /* Return true if location lists A and B are the same. */
29783
29784 inline bool
29785 loc_list_hasher::equal (const dw_loc_list_struct *a,
29786 const dw_loc_list_struct *b)
29787 {
29788 if (a == b)
29789 return 1;
29790 if (a->hash != b->hash)
29791 return 0;
29792 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
29793 if (strcmp (a->begin, b->begin) != 0
29794 || strcmp (a->end, b->end) != 0
29795 || (a->section == NULL) != (b->section == NULL)
29796 || (a->section && strcmp (a->section, b->section) != 0)
29797 || !compare_locs (a->expr, b->expr))
29798 break;
29799 return a == NULL && b == NULL;
29800 }
29801
29802 typedef hash_table<loc_list_hasher> loc_list_hash_type;
29803
29804
29805 /* Recursively optimize location lists referenced from DIE
29806 children and share them whenever possible. */
29807
29808 static void
29809 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
29810 {
29811 dw_die_ref c;
29812 dw_attr_node *a;
29813 unsigned ix;
29814 dw_loc_list_struct **slot;
29815
29816 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29817 if (AT_class (a) == dw_val_class_loc_list)
29818 {
29819 dw_loc_list_ref list = AT_loc_list (a);
29820 /* TODO: perform some optimizations here, before hashing
29821 it and storing into the hash table. */
29822 hash_loc_list (list);
29823 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
29824 if (*slot == NULL)
29825 *slot = list;
29826 else
29827 a->dw_attr_val.v.val_loc_list = *slot;
29828 }
29829
29830 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
29831 }
29832
29833
29834 /* Recursively assign each location list a unique index into the debug_addr
29835 section. */
29836
29837 static void
29838 index_location_lists (dw_die_ref die)
29839 {
29840 dw_die_ref c;
29841 dw_attr_node *a;
29842 unsigned ix;
29843
29844 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29845 if (AT_class (a) == dw_val_class_loc_list)
29846 {
29847 dw_loc_list_ref list = AT_loc_list (a);
29848 dw_loc_list_ref curr;
29849 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
29850 {
29851 /* Don't index an entry that has already been indexed
29852 or won't be output. */
29853 if (curr->begin_entry != NULL
29854 || (strcmp (curr->begin, curr->end) == 0 && !curr->force))
29855 continue;
29856
29857 curr->begin_entry
29858 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
29859 }
29860 }
29861
29862 FOR_EACH_CHILD (die, c, index_location_lists (c));
29863 }
29864
29865 /* Optimize location lists referenced from DIE
29866 children and share them whenever possible. */
29867
29868 static void
29869 optimize_location_lists (dw_die_ref die)
29870 {
29871 loc_list_hash_type htab (500);
29872 optimize_location_lists_1 (die, &htab);
29873 }
29874 \f
29875 /* Traverse the limbo die list, and add parent/child links. The only
29876 dies without parents that should be here are concrete instances of
29877 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
29878 For concrete instances, we can get the parent die from the abstract
29879 instance. */
29880
29881 static void
29882 flush_limbo_die_list (void)
29883 {
29884 limbo_die_node *node;
29885
29886 /* get_context_die calls force_decl_die, which can put new DIEs on the
29887 limbo list in LTO mode when nested functions are put in a different
29888 partition than that of their parent function. */
29889 while ((node = limbo_die_list))
29890 {
29891 dw_die_ref die = node->die;
29892 limbo_die_list = node->next;
29893
29894 if (die->die_parent == NULL)
29895 {
29896 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
29897
29898 if (origin && origin->die_parent)
29899 add_child_die (origin->die_parent, die);
29900 else if (is_cu_die (die))
29901 ;
29902 else if (seen_error ())
29903 /* It's OK to be confused by errors in the input. */
29904 add_child_die (comp_unit_die (), die);
29905 else
29906 {
29907 /* In certain situations, the lexical block containing a
29908 nested function can be optimized away, which results
29909 in the nested function die being orphaned. Likewise
29910 with the return type of that nested function. Force
29911 this to be a child of the containing function.
29912
29913 It may happen that even the containing function got fully
29914 inlined and optimized out. In that case we are lost and
29915 assign the empty child. This should not be big issue as
29916 the function is likely unreachable too. */
29917 gcc_assert (node->created_for);
29918
29919 if (DECL_P (node->created_for))
29920 origin = get_context_die (DECL_CONTEXT (node->created_for));
29921 else if (TYPE_P (node->created_for))
29922 origin = scope_die_for (node->created_for, comp_unit_die ());
29923 else
29924 origin = comp_unit_die ();
29925
29926 add_child_die (origin, die);
29927 }
29928 }
29929 }
29930 }
29931
29932 /* Reset DIEs so we can output them again. */
29933
29934 static void
29935 reset_dies (dw_die_ref die)
29936 {
29937 dw_die_ref c;
29938
29939 /* Remove stuff we re-generate. */
29940 die->die_mark = 0;
29941 die->die_offset = 0;
29942 die->die_abbrev = 0;
29943 remove_AT (die, DW_AT_sibling);
29944
29945 FOR_EACH_CHILD (die, c, reset_dies (c));
29946 }
29947
29948 /* Output stuff that dwarf requires at the end of every file,
29949 and generate the DWARF-2 debugging info. */
29950
29951 static void
29952 dwarf2out_finish (const char *)
29953 {
29954 comdat_type_node *ctnode;
29955 dw_die_ref main_comp_unit_die;
29956 unsigned char checksum[16];
29957 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
29958
29959 /* Flush out any latecomers to the limbo party. */
29960 flush_limbo_die_list ();
29961
29962 if (flag_checking)
29963 {
29964 verify_die (comp_unit_die ());
29965 for (limbo_die_node *node = cu_die_list; node; node = node->next)
29966 verify_die (node->die);
29967 }
29968
29969 /* We shouldn't have any symbols with delayed asm names for
29970 DIEs generated after early finish. */
29971 gcc_assert (deferred_asm_name == NULL);
29972
29973 gen_remaining_tmpl_value_param_die_attribute ();
29974
29975 if (flag_generate_lto || flag_generate_offload)
29976 {
29977 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
29978
29979 /* Prune stuff so that dwarf2out_finish runs successfully
29980 for the fat part of the object. */
29981 reset_dies (comp_unit_die ());
29982 for (limbo_die_node *node = cu_die_list; node; node = node->next)
29983 reset_dies (node->die);
29984
29985 hash_table<comdat_type_hasher> comdat_type_table (100);
29986 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
29987 {
29988 comdat_type_node **slot
29989 = comdat_type_table.find_slot (ctnode, INSERT);
29990
29991 /* Don't reset types twice. */
29992 if (*slot != HTAB_EMPTY_ENTRY)
29993 continue;
29994
29995 /* Add a pointer to the line table for the main compilation unit
29996 so that the debugger can make sense of DW_AT_decl_file
29997 attributes. */
29998 if (debug_info_level >= DINFO_LEVEL_TERSE)
29999 reset_dies (ctnode->root_die);
30000
30001 *slot = ctnode;
30002 }
30003
30004 /* Reset die CU symbol so we don't output it twice. */
30005 comp_unit_die ()->die_id.die_symbol = NULL;
30006
30007 /* Remove DW_AT_macro from the early output. */
30008 if (have_macinfo)
30009 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
30010
30011 /* Remove indirect string decisions. */
30012 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
30013 }
30014
30015 #if ENABLE_ASSERT_CHECKING
30016 {
30017 dw_die_ref die = comp_unit_die (), c;
30018 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
30019 }
30020 #endif
30021 resolve_addr (comp_unit_die ());
30022 move_marked_base_types ();
30023
30024 /* Initialize sections and labels used for actual assembler output. */
30025 unsigned generation = init_sections_and_labels (false);
30026
30027 /* Traverse the DIE's and add sibling attributes to those DIE's that
30028 have children. */
30029 add_sibling_attributes (comp_unit_die ());
30030 limbo_die_node *node;
30031 for (node = cu_die_list; node; node = node->next)
30032 add_sibling_attributes (node->die);
30033 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
30034 add_sibling_attributes (ctnode->root_die);
30035
30036 /* When splitting DWARF info, we put some attributes in the
30037 skeleton compile_unit DIE that remains in the .o, while
30038 most attributes go in the DWO compile_unit_die. */
30039 if (dwarf_split_debug_info)
30040 {
30041 limbo_die_node *cu;
30042 main_comp_unit_die = gen_compile_unit_die (NULL);
30043 if (dwarf_version >= 5)
30044 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
30045 cu = limbo_die_list;
30046 gcc_assert (cu->die == main_comp_unit_die);
30047 limbo_die_list = limbo_die_list->next;
30048 cu->next = cu_die_list;
30049 cu_die_list = cu;
30050 }
30051 else
30052 main_comp_unit_die = comp_unit_die ();
30053
30054 /* Output a terminator label for the .text section. */
30055 switch_to_section (text_section);
30056 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
30057 if (cold_text_section)
30058 {
30059 switch_to_section (cold_text_section);
30060 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
30061 }
30062
30063 /* We can only use the low/high_pc attributes if all of the code was
30064 in .text. */
30065 if (!have_multiple_function_sections
30066 || (dwarf_version < 3 && dwarf_strict))
30067 {
30068 /* Don't add if the CU has no associated code. */
30069 if (text_section_used)
30070 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
30071 text_end_label, true);
30072 }
30073 else
30074 {
30075 unsigned fde_idx;
30076 dw_fde_ref fde;
30077 bool range_list_added = false;
30078
30079 if (text_section_used)
30080 add_ranges_by_labels (main_comp_unit_die, text_section_label,
30081 text_end_label, &range_list_added, true);
30082 if (cold_text_section_used)
30083 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
30084 cold_end_label, &range_list_added, true);
30085
30086 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
30087 {
30088 if (DECL_IGNORED_P (fde->decl))
30089 continue;
30090 if (!fde->in_std_section)
30091 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
30092 fde->dw_fde_end, &range_list_added,
30093 true);
30094 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
30095 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
30096 fde->dw_fde_second_end, &range_list_added,
30097 true);
30098 }
30099
30100 if (range_list_added)
30101 {
30102 /* We need to give .debug_loc and .debug_ranges an appropriate
30103 "base address". Use zero so that these addresses become
30104 absolute. Historically, we've emitted the unexpected
30105 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
30106 Emit both to give time for other tools to adapt. */
30107 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
30108 if (! dwarf_strict && dwarf_version < 4)
30109 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
30110
30111 add_ranges (NULL);
30112 }
30113 }
30114
30115 /* AIX Assembler inserts the length, so adjust the reference to match the
30116 offset expected by debuggers. */
30117 strcpy (dl_section_ref, debug_line_section_label);
30118 if (XCOFF_DEBUGGING_INFO)
30119 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
30120
30121 if (debug_info_level >= DINFO_LEVEL_TERSE)
30122 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
30123 dl_section_ref);
30124
30125 if (have_macinfo)
30126 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
30127 macinfo_section_label);
30128
30129 if (dwarf_split_debug_info)
30130 {
30131 if (have_location_lists)
30132 {
30133 if (dwarf_version >= 5)
30134 add_AT_loclistsptr (comp_unit_die (), DW_AT_loclists_base,
30135 loc_section_label);
30136 /* optimize_location_lists calculates the size of the lists,
30137 so index them first, and assign indices to the entries.
30138 Although optimize_location_lists will remove entries from
30139 the table, it only does so for duplicates, and therefore
30140 only reduces ref_counts to 1. */
30141 index_location_lists (comp_unit_die ());
30142 }
30143
30144 if (addr_index_table != NULL)
30145 {
30146 unsigned int index = 0;
30147 addr_index_table
30148 ->traverse_noresize<unsigned int *, index_addr_table_entry>
30149 (&index);
30150 }
30151 }
30152
30153 loc_list_idx = 0;
30154 if (have_location_lists)
30155 {
30156 optimize_location_lists (comp_unit_die ());
30157 /* And finally assign indexes to the entries for -gsplit-dwarf. */
30158 if (dwarf_version >= 5 && dwarf_split_debug_info)
30159 assign_location_list_indexes (comp_unit_die ());
30160 }
30161
30162 save_macinfo_strings ();
30163
30164 if (dwarf_split_debug_info)
30165 {
30166 unsigned int index = 0;
30167
30168 /* Add attributes common to skeleton compile_units and
30169 type_units. Because these attributes include strings, it
30170 must be done before freezing the string table. Top-level
30171 skeleton die attrs are added when the skeleton type unit is
30172 created, so ensure it is created by this point. */
30173 add_top_level_skeleton_die_attrs (main_comp_unit_die);
30174 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
30175 }
30176
30177 /* Output all of the compilation units. We put the main one last so that
30178 the offsets are available to output_pubnames. */
30179 for (node = cu_die_list; node; node = node->next)
30180 output_comp_unit (node->die, 0, NULL);
30181
30182 hash_table<comdat_type_hasher> comdat_type_table (100);
30183 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
30184 {
30185 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
30186
30187 /* Don't output duplicate types. */
30188 if (*slot != HTAB_EMPTY_ENTRY)
30189 continue;
30190
30191 /* Add a pointer to the line table for the main compilation unit
30192 so that the debugger can make sense of DW_AT_decl_file
30193 attributes. */
30194 if (debug_info_level >= DINFO_LEVEL_TERSE)
30195 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
30196 (!dwarf_split_debug_info
30197 ? dl_section_ref
30198 : debug_skeleton_line_section_label));
30199
30200 output_comdat_type_unit (ctnode);
30201 *slot = ctnode;
30202 }
30203
30204 if (dwarf_split_debug_info)
30205 {
30206 int mark;
30207 struct md5_ctx ctx;
30208
30209 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
30210 index_rnglists ();
30211
30212 /* Compute a checksum of the comp_unit to use as the dwo_id. */
30213 md5_init_ctx (&ctx);
30214 mark = 0;
30215 die_checksum (comp_unit_die (), &ctx, &mark);
30216 unmark_all_dies (comp_unit_die ());
30217 md5_finish_ctx (&ctx, checksum);
30218
30219 if (dwarf_version < 5)
30220 {
30221 /* Use the first 8 bytes of the checksum as the dwo_id,
30222 and add it to both comp-unit DIEs. */
30223 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
30224 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
30225 }
30226
30227 /* Add the base offset of the ranges table to the skeleton
30228 comp-unit DIE. */
30229 if (!vec_safe_is_empty (ranges_table))
30230 {
30231 if (dwarf_version >= 5)
30232 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
30233 ranges_base_label);
30234 else
30235 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
30236 ranges_section_label);
30237 }
30238
30239 switch_to_section (debug_addr_section);
30240 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
30241 output_addr_table ();
30242 }
30243
30244 /* Output the main compilation unit if non-empty or if .debug_macinfo
30245 or .debug_macro will be emitted. */
30246 output_comp_unit (comp_unit_die (), have_macinfo,
30247 dwarf_split_debug_info ? checksum : NULL);
30248
30249 if (dwarf_split_debug_info && info_section_emitted)
30250 output_skeleton_debug_sections (main_comp_unit_die, checksum);
30251
30252 /* Output the abbreviation table. */
30253 if (vec_safe_length (abbrev_die_table) != 1)
30254 {
30255 switch_to_section (debug_abbrev_section);
30256 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
30257 output_abbrev_section ();
30258 }
30259
30260 /* Output location list section if necessary. */
30261 if (have_location_lists)
30262 {
30263 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
30264 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
30265 /* Output the location lists info. */
30266 switch_to_section (debug_loc_section);
30267 if (dwarf_version >= 5)
30268 {
30269 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 1);
30270 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 2);
30271 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
30272 dw2_asm_output_data (4, 0xffffffff,
30273 "Initial length escape value indicating "
30274 "64-bit DWARF extension");
30275 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
30276 "Length of Location Lists");
30277 ASM_OUTPUT_LABEL (asm_out_file, l1);
30278 dw2_asm_output_data (2, dwarf_version, "DWARF Version");
30279 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
30280 dw2_asm_output_data (1, 0, "Segment Size");
30281 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
30282 "Offset Entry Count");
30283 }
30284 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
30285 if (dwarf_version >= 5 && dwarf_split_debug_info)
30286 {
30287 unsigned int save_loc_list_idx = loc_list_idx;
30288 loc_list_idx = 0;
30289 output_loclists_offsets (comp_unit_die ());
30290 gcc_assert (save_loc_list_idx == loc_list_idx);
30291 }
30292 output_location_lists (comp_unit_die ());
30293 if (dwarf_version >= 5)
30294 ASM_OUTPUT_LABEL (asm_out_file, l2);
30295 }
30296
30297 output_pubtables ();
30298
30299 /* Output the address range information if a CU (.debug_info section)
30300 was emitted. We output an empty table even if we had no functions
30301 to put in it. This because the consumer has no way to tell the
30302 difference between an empty table that we omitted and failure to
30303 generate a table that would have contained data. */
30304 if (info_section_emitted)
30305 {
30306 switch_to_section (debug_aranges_section);
30307 output_aranges ();
30308 }
30309
30310 /* Output ranges section if necessary. */
30311 if (!vec_safe_is_empty (ranges_table))
30312 {
30313 if (dwarf_version >= 5)
30314 output_rnglists (generation);
30315 else
30316 output_ranges ();
30317 }
30318
30319 /* Have to end the macro section. */
30320 if (have_macinfo)
30321 {
30322 switch_to_section (debug_macinfo_section);
30323 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
30324 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
30325 : debug_skeleton_line_section_label, false);
30326 dw2_asm_output_data (1, 0, "End compilation unit");
30327 }
30328
30329 /* Output the source line correspondence table. We must do this
30330 even if there is no line information. Otherwise, on an empty
30331 translation unit, we will generate a present, but empty,
30332 .debug_info section. IRIX 6.5 `nm' will then complain when
30333 examining the file. This is done late so that any filenames
30334 used by the debug_info section are marked as 'used'. */
30335 switch_to_section (debug_line_section);
30336 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
30337 if (! DWARF2_ASM_LINE_DEBUG_INFO)
30338 output_line_info (false);
30339
30340 if (dwarf_split_debug_info && info_section_emitted)
30341 {
30342 switch_to_section (debug_skeleton_line_section);
30343 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
30344 output_line_info (true);
30345 }
30346
30347 /* If we emitted any indirect strings, output the string table too. */
30348 if (debug_str_hash || skeleton_debug_str_hash)
30349 output_indirect_strings ();
30350 if (debug_line_str_hash)
30351 {
30352 switch_to_section (debug_line_str_section);
30353 const enum dwarf_form form = DW_FORM_line_strp;
30354 debug_line_str_hash->traverse<enum dwarf_form,
30355 output_indirect_string> (form);
30356 }
30357 }
30358
30359 /* Returns a hash value for X (which really is a variable_value_struct). */
30360
30361 inline hashval_t
30362 variable_value_hasher::hash (variable_value_struct *x)
30363 {
30364 return (hashval_t) x->decl_id;
30365 }
30366
30367 /* Return nonzero if decl_id of variable_value_struct X is the same as
30368 UID of decl Y. */
30369
30370 inline bool
30371 variable_value_hasher::equal (variable_value_struct *x, tree y)
30372 {
30373 return x->decl_id == DECL_UID (y);
30374 }
30375
30376 /* Helper function for resolve_variable_value, handle
30377 DW_OP_GNU_variable_value in one location expression.
30378 Return true if exprloc has been changed into loclist. */
30379
30380 static bool
30381 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
30382 {
30383 dw_loc_descr_ref next;
30384 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
30385 {
30386 next = loc->dw_loc_next;
30387 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
30388 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
30389 continue;
30390
30391 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
30392 if (DECL_CONTEXT (decl) != current_function_decl)
30393 continue;
30394
30395 dw_die_ref ref = lookup_decl_die (decl);
30396 if (ref)
30397 {
30398 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30399 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30400 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30401 continue;
30402 }
30403 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
30404 if (l == NULL)
30405 continue;
30406 if (l->dw_loc_next)
30407 {
30408 if (AT_class (a) != dw_val_class_loc)
30409 continue;
30410 switch (a->dw_attr)
30411 {
30412 /* Following attributes allow both exprloc and loclist
30413 classes, so we can change them into a loclist. */
30414 case DW_AT_location:
30415 case DW_AT_string_length:
30416 case DW_AT_return_addr:
30417 case DW_AT_data_member_location:
30418 case DW_AT_frame_base:
30419 case DW_AT_segment:
30420 case DW_AT_static_link:
30421 case DW_AT_use_location:
30422 case DW_AT_vtable_elem_location:
30423 if (prev)
30424 {
30425 prev->dw_loc_next = NULL;
30426 prepend_loc_descr_to_each (l, AT_loc (a));
30427 }
30428 if (next)
30429 add_loc_descr_to_each (l, next);
30430 a->dw_attr_val.val_class = dw_val_class_loc_list;
30431 a->dw_attr_val.val_entry = NULL;
30432 a->dw_attr_val.v.val_loc_list = l;
30433 have_location_lists = true;
30434 return true;
30435 /* Following attributes allow both exprloc and reference,
30436 so if the whole expression is DW_OP_GNU_variable_value alone
30437 we could transform it into reference. */
30438 case DW_AT_byte_size:
30439 case DW_AT_bit_size:
30440 case DW_AT_lower_bound:
30441 case DW_AT_upper_bound:
30442 case DW_AT_bit_stride:
30443 case DW_AT_count:
30444 case DW_AT_allocated:
30445 case DW_AT_associated:
30446 case DW_AT_byte_stride:
30447 if (prev == NULL && next == NULL)
30448 break;
30449 /* FALLTHRU */
30450 default:
30451 if (dwarf_strict)
30452 continue;
30453 break;
30454 }
30455 /* Create DW_TAG_variable that we can refer to. */
30456 gen_decl_die (decl, NULL_TREE, NULL,
30457 lookup_decl_die (current_function_decl));
30458 ref = lookup_decl_die (decl);
30459 if (ref)
30460 {
30461 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30462 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30463 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30464 }
30465 continue;
30466 }
30467 if (prev)
30468 {
30469 prev->dw_loc_next = l->expr;
30470 add_loc_descr (&prev->dw_loc_next, next);
30471 free_loc_descr (loc, NULL);
30472 next = prev->dw_loc_next;
30473 }
30474 else
30475 {
30476 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
30477 add_loc_descr (&loc, next);
30478 next = loc;
30479 }
30480 loc = prev;
30481 }
30482 return false;
30483 }
30484
30485 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
30486
30487 static void
30488 resolve_variable_value (dw_die_ref die)
30489 {
30490 dw_attr_node *a;
30491 dw_loc_list_ref loc;
30492 unsigned ix;
30493
30494 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30495 switch (AT_class (a))
30496 {
30497 case dw_val_class_loc:
30498 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
30499 break;
30500 /* FALLTHRU */
30501 case dw_val_class_loc_list:
30502 loc = AT_loc_list (a);
30503 gcc_assert (loc);
30504 for (; loc; loc = loc->dw_loc_next)
30505 resolve_variable_value_in_expr (a, loc->expr);
30506 break;
30507 default:
30508 break;
30509 }
30510 }
30511
30512 /* Attempt to optimize DW_OP_GNU_variable_value refering to
30513 temporaries in the current function. */
30514
30515 static void
30516 resolve_variable_values (void)
30517 {
30518 if (!variable_value_hash || !current_function_decl)
30519 return;
30520
30521 struct variable_value_struct *node
30522 = variable_value_hash->find_with_hash (current_function_decl,
30523 DECL_UID (current_function_decl));
30524
30525 if (node == NULL)
30526 return;
30527
30528 unsigned int i;
30529 dw_die_ref die;
30530 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
30531 resolve_variable_value (die);
30532 }
30533
30534 /* Helper function for note_variable_value, handle one location
30535 expression. */
30536
30537 static void
30538 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
30539 {
30540 for (; loc; loc = loc->dw_loc_next)
30541 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
30542 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30543 {
30544 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
30545 dw_die_ref ref = lookup_decl_die (decl);
30546 if (! ref && (flag_generate_lto || flag_generate_offload))
30547 {
30548 /* ??? This is somewhat a hack because we do not create DIEs
30549 for variables not in BLOCK trees early but when generating
30550 early LTO output we need the dw_val_class_decl_ref to be
30551 fully resolved. For fat LTO objects we'd also like to
30552 undo this after LTO dwarf output. */
30553 gcc_assert (DECL_CONTEXT (decl));
30554 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
30555 gcc_assert (ctx != NULL);
30556 gen_decl_die (decl, NULL_TREE, NULL, ctx);
30557 ref = lookup_decl_die (decl);
30558 gcc_assert (ref != NULL);
30559 }
30560 if (ref)
30561 {
30562 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30563 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30564 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30565 continue;
30566 }
30567 if (VAR_P (decl)
30568 && DECL_CONTEXT (decl)
30569 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
30570 && lookup_decl_die (DECL_CONTEXT (decl)))
30571 {
30572 if (!variable_value_hash)
30573 variable_value_hash
30574 = hash_table<variable_value_hasher>::create_ggc (10);
30575
30576 tree fndecl = DECL_CONTEXT (decl);
30577 struct variable_value_struct *node;
30578 struct variable_value_struct **slot
30579 = variable_value_hash->find_slot_with_hash (fndecl,
30580 DECL_UID (fndecl),
30581 INSERT);
30582 if (*slot == NULL)
30583 {
30584 node = ggc_cleared_alloc<variable_value_struct> ();
30585 node->decl_id = DECL_UID (fndecl);
30586 *slot = node;
30587 }
30588 else
30589 node = *slot;
30590
30591 vec_safe_push (node->dies, die);
30592 }
30593 }
30594 }
30595
30596 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
30597 with dw_val_class_decl_ref operand. */
30598
30599 static void
30600 note_variable_value (dw_die_ref die)
30601 {
30602 dw_die_ref c;
30603 dw_attr_node *a;
30604 dw_loc_list_ref loc;
30605 unsigned ix;
30606
30607 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30608 switch (AT_class (a))
30609 {
30610 case dw_val_class_loc_list:
30611 loc = AT_loc_list (a);
30612 gcc_assert (loc);
30613 if (!loc->noted_variable_value)
30614 {
30615 loc->noted_variable_value = 1;
30616 for (; loc; loc = loc->dw_loc_next)
30617 note_variable_value_in_expr (die, loc->expr);
30618 }
30619 break;
30620 case dw_val_class_loc:
30621 note_variable_value_in_expr (die, AT_loc (a));
30622 break;
30623 default:
30624 break;
30625 }
30626
30627 /* Mark children. */
30628 FOR_EACH_CHILD (die, c, note_variable_value (c));
30629 }
30630
30631 /* Perform any cleanups needed after the early debug generation pass
30632 has run. */
30633
30634 static void
30635 dwarf2out_early_finish (const char *filename)
30636 {
30637 set_early_dwarf s;
30638
30639 /* PCH might result in DW_AT_producer string being restored from the
30640 header compilation, so always fill it with empty string initially
30641 and overwrite only here. */
30642 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
30643 producer_string = gen_producer_string ();
30644 producer->dw_attr_val.v.val_str->refcount--;
30645 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
30646
30647 /* Add the name for the main input file now. We delayed this from
30648 dwarf2out_init to avoid complications with PCH. */
30649 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
30650 add_comp_dir_attribute (comp_unit_die ());
30651
30652 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
30653 DW_AT_comp_dir into .debug_line_str section. */
30654 if (!DWARF2_ASM_LINE_DEBUG_INFO
30655 && dwarf_version >= 5
30656 && DWARF5_USE_DEBUG_LINE_STR)
30657 {
30658 for (int i = 0; i < 2; i++)
30659 {
30660 dw_attr_node *a = get_AT (comp_unit_die (),
30661 i ? DW_AT_comp_dir : DW_AT_name);
30662 if (a == NULL
30663 || AT_class (a) != dw_val_class_str
30664 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
30665 continue;
30666
30667 if (! debug_line_str_hash)
30668 debug_line_str_hash
30669 = hash_table<indirect_string_hasher>::create_ggc (10);
30670
30671 struct indirect_string_node *node
30672 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
30673 set_indirect_string (node);
30674 node->form = DW_FORM_line_strp;
30675 a->dw_attr_val.v.val_str->refcount--;
30676 a->dw_attr_val.v.val_str = node;
30677 }
30678 }
30679
30680 /* With LTO early dwarf was really finished at compile-time, so make
30681 sure to adjust the phase after annotating the LTRANS CU DIE. */
30682 if (in_lto_p)
30683 {
30684 early_dwarf_finished = true;
30685 return;
30686 }
30687
30688 /* Walk through the list of incomplete types again, trying once more to
30689 emit full debugging info for them. */
30690 retry_incomplete_types ();
30691
30692 /* The point here is to flush out the limbo list so that it is empty
30693 and we don't need to stream it for LTO. */
30694 flush_limbo_die_list ();
30695
30696 gen_scheduled_generic_parms_dies ();
30697 gen_remaining_tmpl_value_param_die_attribute ();
30698
30699 /* Add DW_AT_linkage_name for all deferred DIEs. */
30700 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
30701 {
30702 tree decl = node->created_for;
30703 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
30704 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
30705 ended up in deferred_asm_name before we knew it was
30706 constant and never written to disk. */
30707 && DECL_ASSEMBLER_NAME (decl))
30708 {
30709 add_linkage_attr (node->die, decl);
30710 move_linkage_attr (node->die);
30711 }
30712 }
30713 deferred_asm_name = NULL;
30714
30715 if (flag_eliminate_unused_debug_types)
30716 prune_unused_types ();
30717
30718 /* Generate separate COMDAT sections for type DIEs. */
30719 if (use_debug_types)
30720 {
30721 break_out_comdat_types (comp_unit_die ());
30722
30723 /* Each new type_unit DIE was added to the limbo die list when created.
30724 Since these have all been added to comdat_type_list, clear the
30725 limbo die list. */
30726 limbo_die_list = NULL;
30727
30728 /* For each new comdat type unit, copy declarations for incomplete
30729 types to make the new unit self-contained (i.e., no direct
30730 references to the main compile unit). */
30731 for (comdat_type_node *ctnode = comdat_type_list;
30732 ctnode != NULL; ctnode = ctnode->next)
30733 copy_decls_for_unworthy_types (ctnode->root_die);
30734 copy_decls_for_unworthy_types (comp_unit_die ());
30735
30736 /* In the process of copying declarations from one unit to another,
30737 we may have left some declarations behind that are no longer
30738 referenced. Prune them. */
30739 prune_unused_types ();
30740 }
30741
30742 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
30743 with dw_val_class_decl_ref operand. */
30744 note_variable_value (comp_unit_die ());
30745 for (limbo_die_node *node = cu_die_list; node; node = node->next)
30746 note_variable_value (node->die);
30747 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
30748 ctnode = ctnode->next)
30749 note_variable_value (ctnode->root_die);
30750 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
30751 note_variable_value (node->die);
30752
30753 /* The AT_pubnames attribute needs to go in all skeleton dies, including
30754 both the main_cu and all skeleton TUs. Making this call unconditional
30755 would end up either adding a second copy of the AT_pubnames attribute, or
30756 requiring a special case in add_top_level_skeleton_die_attrs. */
30757 if (!dwarf_split_debug_info)
30758 add_AT_pubnames (comp_unit_die ());
30759
30760 /* The early debug phase is now finished. */
30761 early_dwarf_finished = true;
30762
30763 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
30764 if (!flag_generate_lto && !flag_generate_offload)
30765 return;
30766
30767 /* Now as we are going to output for LTO initialize sections and labels
30768 to the LTO variants. We don't need a random-seed postfix as other
30769 LTO sections as linking the LTO debug sections into one in a partial
30770 link is fine. */
30771 init_sections_and_labels (true);
30772
30773 /* The output below is modeled after dwarf2out_finish with all
30774 location related output removed and some LTO specific changes.
30775 Some refactoring might make both smaller and easier to match up. */
30776
30777 /* Traverse the DIE's and add add sibling attributes to those DIE's
30778 that have children. */
30779 add_sibling_attributes (comp_unit_die ());
30780 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
30781 add_sibling_attributes (node->die);
30782 for (comdat_type_node *ctnode = comdat_type_list;
30783 ctnode != NULL; ctnode = ctnode->next)
30784 add_sibling_attributes (ctnode->root_die);
30785
30786 if (have_macinfo)
30787 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
30788 macinfo_section_label);
30789
30790 save_macinfo_strings ();
30791
30792 /* Output all of the compilation units. We put the main one last so that
30793 the offsets are available to output_pubnames. */
30794 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
30795 output_comp_unit (node->die, 0, NULL);
30796
30797 hash_table<comdat_type_hasher> comdat_type_table (100);
30798 for (comdat_type_node *ctnode = comdat_type_list;
30799 ctnode != NULL; ctnode = ctnode->next)
30800 {
30801 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
30802
30803 /* Don't output duplicate types. */
30804 if (*slot != HTAB_EMPTY_ENTRY)
30805 continue;
30806
30807 /* Add a pointer to the line table for the main compilation unit
30808 so that the debugger can make sense of DW_AT_decl_file
30809 attributes. */
30810 if (debug_info_level >= DINFO_LEVEL_TERSE)
30811 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
30812 (!dwarf_split_debug_info
30813 ? debug_line_section_label
30814 : debug_skeleton_line_section_label));
30815
30816 output_comdat_type_unit (ctnode);
30817 *slot = ctnode;
30818 }
30819
30820 /* Stick a unique symbol to the main debuginfo section. */
30821 compute_comp_unit_symbol (comp_unit_die ());
30822
30823 /* Output the main compilation unit. We always need it if only for
30824 the CU symbol. */
30825 output_comp_unit (comp_unit_die (), true, NULL);
30826
30827 /* Output the abbreviation table. */
30828 if (vec_safe_length (abbrev_die_table) != 1)
30829 {
30830 switch_to_section (debug_abbrev_section);
30831 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
30832 output_abbrev_section ();
30833 }
30834
30835 /* Have to end the macro section. */
30836 if (have_macinfo)
30837 {
30838 /* We have to save macinfo state if we need to output it again
30839 for the FAT part of the object. */
30840 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
30841 if (flag_fat_lto_objects)
30842 macinfo_table = macinfo_table->copy ();
30843
30844 switch_to_section (debug_macinfo_section);
30845 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
30846 output_macinfo (debug_skeleton_line_section_label, true);
30847 dw2_asm_output_data (1, 0, "End compilation unit");
30848
30849 /* Emit a skeleton debug_line section. */
30850 switch_to_section (debug_skeleton_line_section);
30851 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
30852 output_line_info (true);
30853
30854 if (flag_fat_lto_objects)
30855 {
30856 vec_free (macinfo_table);
30857 macinfo_table = saved_macinfo_table;
30858 }
30859 }
30860
30861
30862 /* If we emitted any indirect strings, output the string table too. */
30863 if (debug_str_hash || skeleton_debug_str_hash)
30864 output_indirect_strings ();
30865
30866 /* Switch back to the text section. */
30867 switch_to_section (text_section);
30868 }
30869
30870 /* Reset all state within dwarf2out.c so that we can rerun the compiler
30871 within the same process. For use by toplev::finalize. */
30872
30873 void
30874 dwarf2out_c_finalize (void)
30875 {
30876 last_var_location_insn = NULL;
30877 cached_next_real_insn = NULL;
30878 used_rtx_array = NULL;
30879 incomplete_types = NULL;
30880 decl_scope_table = NULL;
30881 debug_info_section = NULL;
30882 debug_skeleton_info_section = NULL;
30883 debug_abbrev_section = NULL;
30884 debug_skeleton_abbrev_section = NULL;
30885 debug_aranges_section = NULL;
30886 debug_addr_section = NULL;
30887 debug_macinfo_section = NULL;
30888 debug_line_section = NULL;
30889 debug_skeleton_line_section = NULL;
30890 debug_loc_section = NULL;
30891 debug_pubnames_section = NULL;
30892 debug_pubtypes_section = NULL;
30893 debug_str_section = NULL;
30894 debug_line_str_section = NULL;
30895 debug_str_dwo_section = NULL;
30896 debug_str_offsets_section = NULL;
30897 debug_ranges_section = NULL;
30898 debug_frame_section = NULL;
30899 fde_vec = NULL;
30900 debug_str_hash = NULL;
30901 debug_line_str_hash = NULL;
30902 skeleton_debug_str_hash = NULL;
30903 dw2_string_counter = 0;
30904 have_multiple_function_sections = false;
30905 text_section_used = false;
30906 cold_text_section_used = false;
30907 cold_text_section = NULL;
30908 current_unit_personality = NULL;
30909
30910 early_dwarf = false;
30911 early_dwarf_finished = false;
30912
30913 next_die_offset = 0;
30914 single_comp_unit_die = NULL;
30915 comdat_type_list = NULL;
30916 limbo_die_list = NULL;
30917 file_table = NULL;
30918 decl_die_table = NULL;
30919 common_block_die_table = NULL;
30920 decl_loc_table = NULL;
30921 call_arg_locations = NULL;
30922 call_arg_loc_last = NULL;
30923 call_site_count = -1;
30924 tail_call_site_count = -1;
30925 cached_dw_loc_list_table = NULL;
30926 abbrev_die_table = NULL;
30927 delete dwarf_proc_stack_usage_map;
30928 dwarf_proc_stack_usage_map = NULL;
30929 line_info_label_num = 0;
30930 cur_line_info_table = NULL;
30931 text_section_line_info = NULL;
30932 cold_text_section_line_info = NULL;
30933 separate_line_info = NULL;
30934 info_section_emitted = false;
30935 pubname_table = NULL;
30936 pubtype_table = NULL;
30937 macinfo_table = NULL;
30938 ranges_table = NULL;
30939 ranges_by_label = NULL;
30940 rnglist_idx = 0;
30941 have_location_lists = false;
30942 loclabel_num = 0;
30943 poc_label_num = 0;
30944 last_emitted_file = NULL;
30945 label_num = 0;
30946 tmpl_value_parm_die_table = NULL;
30947 generic_type_instances = NULL;
30948 frame_pointer_fb_offset = 0;
30949 frame_pointer_fb_offset_valid = false;
30950 base_types.release ();
30951 XDELETEVEC (producer_string);
30952 producer_string = NULL;
30953 }
30954
30955 #include "gt-dwarf2out.h"