FT32 makes use of multiple address spaces.
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2017 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "debug.h"
87 #include "common/common-target.h"
88 #include "langhooks.h"
89 #include "lra.h"
90 #include "dumpfile.h"
91 #include "opts.h"
92 #include "tree-dfa.h"
93 #include "gdb/gdb-index.h"
94 #include "rtl-iter.h"
95 #include "stringpool.h"
96 #include "attribs.h"
97
98 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
99 int, bool);
100 static rtx_insn *last_var_location_insn;
101 static rtx_insn *cached_next_real_insn;
102 static void dwarf2out_decl (tree);
103
104 #ifndef XCOFF_DEBUGGING_INFO
105 #define XCOFF_DEBUGGING_INFO 0
106 #endif
107
108 #ifndef HAVE_XCOFF_DWARF_EXTRAS
109 #define HAVE_XCOFF_DWARF_EXTRAS 0
110 #endif
111
112 #ifdef VMS_DEBUGGING_INFO
113 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
114
115 /* Define this macro to be a nonzero value if the directory specifications
116 which are output in the debug info should end with a separator. */
117 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
118 /* Define this macro to evaluate to a nonzero value if GCC should refrain
119 from generating indirect strings in DWARF2 debug information, for instance
120 if your target is stuck with an old version of GDB that is unable to
121 process them properly or uses VMS Debug. */
122 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
123 #else
124 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
125 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
126 #endif
127
128 /* ??? Poison these here until it can be done generically. They've been
129 totally replaced in this file; make sure it stays that way. */
130 #undef DWARF2_UNWIND_INFO
131 #undef DWARF2_FRAME_INFO
132 #if (GCC_VERSION >= 3000)
133 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
134 #endif
135
136 /* The size of the target's pointer type. */
137 #ifndef PTR_SIZE
138 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
139 #endif
140
141 /* Array of RTXes referenced by the debugging information, which therefore
142 must be kept around forever. */
143 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
144
145 /* A pointer to the base of a list of incomplete types which might be
146 completed at some later time. incomplete_types_list needs to be a
147 vec<tree, va_gc> *because we want to tell the garbage collector about
148 it. */
149 static GTY(()) vec<tree, va_gc> *incomplete_types;
150
151 /* A pointer to the base of a table of references to declaration
152 scopes. This table is a display which tracks the nesting
153 of declaration scopes at the current scope and containing
154 scopes. This table is used to find the proper place to
155 define type declaration DIE's. */
156 static GTY(()) vec<tree, va_gc> *decl_scope_table;
157
158 /* Pointers to various DWARF2 sections. */
159 static GTY(()) section *debug_info_section;
160 static GTY(()) section *debug_skeleton_info_section;
161 static GTY(()) section *debug_abbrev_section;
162 static GTY(()) section *debug_skeleton_abbrev_section;
163 static GTY(()) section *debug_aranges_section;
164 static GTY(()) section *debug_addr_section;
165 static GTY(()) section *debug_macinfo_section;
166 static const char *debug_macinfo_section_name;
167 static unsigned macinfo_label_base = 1;
168 static GTY(()) section *debug_line_section;
169 static GTY(()) section *debug_skeleton_line_section;
170 static GTY(()) section *debug_loc_section;
171 static GTY(()) section *debug_pubnames_section;
172 static GTY(()) section *debug_pubtypes_section;
173 static GTY(()) section *debug_str_section;
174 static GTY(()) section *debug_line_str_section;
175 static GTY(()) section *debug_str_dwo_section;
176 static GTY(()) section *debug_str_offsets_section;
177 static GTY(()) section *debug_ranges_section;
178 static GTY(()) section *debug_frame_section;
179
180 /* Maximum size (in bytes) of an artificially generated label. */
181 #define MAX_ARTIFICIAL_LABEL_BYTES 40
182
183 /* According to the (draft) DWARF 3 specification, the initial length
184 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
185 bytes are 0xffffffff, followed by the length stored in the next 8
186 bytes.
187
188 However, the SGI/MIPS ABI uses an initial length which is equal to
189 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
190
191 #ifndef DWARF_INITIAL_LENGTH_SIZE
192 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
193 #endif
194
195 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
196 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
197 #endif
198
199 /* Round SIZE up to the nearest BOUNDARY. */
200 #define DWARF_ROUND(SIZE,BOUNDARY) \
201 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
202
203 /* CIE identifier. */
204 #if HOST_BITS_PER_WIDE_INT >= 64
205 #define DWARF_CIE_ID \
206 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
207 #else
208 #define DWARF_CIE_ID DW_CIE_ID
209 #endif
210
211
212 /* A vector for a table that contains frame description
213 information for each routine. */
214 #define NOT_INDEXED (-1U)
215 #define NO_INDEX_ASSIGNED (-2U)
216
217 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
218
219 struct GTY((for_user)) indirect_string_node {
220 const char *str;
221 unsigned int refcount;
222 enum dwarf_form form;
223 char *label;
224 unsigned int index;
225 };
226
227 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
228 {
229 typedef const char *compare_type;
230
231 static hashval_t hash (indirect_string_node *);
232 static bool equal (indirect_string_node *, const char *);
233 };
234
235 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
236
237 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
238
239 /* With split_debug_info, both the comp_dir and dwo_name go in the
240 main object file, rather than the dwo, similar to the force_direct
241 parameter elsewhere but with additional complications:
242
243 1) The string is needed in both the main object file and the dwo.
244 That is, the comp_dir and dwo_name will appear in both places.
245
246 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
247 DW_FORM_line_strp or DW_FORM_GNU_str_index.
248
249 3) GCC chooses the form to use late, depending on the size and
250 reference count.
251
252 Rather than forcing the all debug string handling functions and
253 callers to deal with these complications, simply use a separate,
254 special-cased string table for any attribute that should go in the
255 main object file. This limits the complexity to just the places
256 that need it. */
257
258 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
259
260 static GTY(()) int dw2_string_counter;
261
262 /* True if the compilation unit places functions in more than one section. */
263 static GTY(()) bool have_multiple_function_sections = false;
264
265 /* Whether the default text and cold text sections have been used at all. */
266
267 static GTY(()) bool text_section_used = false;
268 static GTY(()) bool cold_text_section_used = false;
269
270 /* The default cold text section. */
271 static GTY(()) section *cold_text_section;
272
273 /* The DIE for C++14 'auto' in a function return type. */
274 static GTY(()) dw_die_ref auto_die;
275
276 /* The DIE for C++14 'decltype(auto)' in a function return type. */
277 static GTY(()) dw_die_ref decltype_auto_die;
278
279 /* Forward declarations for functions defined in this file. */
280
281 static void output_call_frame_info (int);
282 static void dwarf2out_note_section_used (void);
283
284 /* Personality decl of current unit. Used only when assembler does not support
285 personality CFI. */
286 static GTY(()) rtx current_unit_personality;
287
288 /* .debug_rnglists next index. */
289 static unsigned int rnglist_idx;
290
291 /* Data and reference forms for relocatable data. */
292 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
293 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
294
295 #ifndef DEBUG_FRAME_SECTION
296 #define DEBUG_FRAME_SECTION ".debug_frame"
297 #endif
298
299 #ifndef FUNC_BEGIN_LABEL
300 #define FUNC_BEGIN_LABEL "LFB"
301 #endif
302
303 #ifndef FUNC_END_LABEL
304 #define FUNC_END_LABEL "LFE"
305 #endif
306
307 #ifndef PROLOGUE_END_LABEL
308 #define PROLOGUE_END_LABEL "LPE"
309 #endif
310
311 #ifndef EPILOGUE_BEGIN_LABEL
312 #define EPILOGUE_BEGIN_LABEL "LEB"
313 #endif
314
315 #ifndef FRAME_BEGIN_LABEL
316 #define FRAME_BEGIN_LABEL "Lframe"
317 #endif
318 #define CIE_AFTER_SIZE_LABEL "LSCIE"
319 #define CIE_END_LABEL "LECIE"
320 #define FDE_LABEL "LSFDE"
321 #define FDE_AFTER_SIZE_LABEL "LASFDE"
322 #define FDE_END_LABEL "LEFDE"
323 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
324 #define LINE_NUMBER_END_LABEL "LELT"
325 #define LN_PROLOG_AS_LABEL "LASLTP"
326 #define LN_PROLOG_END_LABEL "LELTP"
327 #define DIE_LABEL_PREFIX "DW"
328 \f
329 /* Match the base name of a file to the base name of a compilation unit. */
330
331 static int
332 matches_main_base (const char *path)
333 {
334 /* Cache the last query. */
335 static const char *last_path = NULL;
336 static int last_match = 0;
337 if (path != last_path)
338 {
339 const char *base;
340 int length = base_of_path (path, &base);
341 last_path = path;
342 last_match = (length == main_input_baselength
343 && memcmp (base, main_input_basename, length) == 0);
344 }
345 return last_match;
346 }
347
348 #ifdef DEBUG_DEBUG_STRUCT
349
350 static int
351 dump_struct_debug (tree type, enum debug_info_usage usage,
352 enum debug_struct_file criterion, int generic,
353 int matches, int result)
354 {
355 /* Find the type name. */
356 tree type_decl = TYPE_STUB_DECL (type);
357 tree t = type_decl;
358 const char *name = 0;
359 if (TREE_CODE (t) == TYPE_DECL)
360 t = DECL_NAME (t);
361 if (t)
362 name = IDENTIFIER_POINTER (t);
363
364 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
365 criterion,
366 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
367 matches ? "bas" : "hdr",
368 generic ? "gen" : "ord",
369 usage == DINFO_USAGE_DFN ? ";" :
370 usage == DINFO_USAGE_DIR_USE ? "." : "*",
371 result,
372 (void*) type_decl, name);
373 return result;
374 }
375 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
376 dump_struct_debug (type, usage, criterion, generic, matches, result)
377
378 #else
379
380 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
381 (result)
382
383 #endif
384
385 /* Get the number of HOST_WIDE_INTs needed to represent the precision
386 of the number. Some constants have a large uniform precision, so
387 we get the precision needed for the actual value of the number. */
388
389 static unsigned int
390 get_full_len (const wide_int &op)
391 {
392 int prec = wi::min_precision (op, UNSIGNED);
393 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
394 / HOST_BITS_PER_WIDE_INT);
395 }
396
397 static bool
398 should_emit_struct_debug (tree type, enum debug_info_usage usage)
399 {
400 enum debug_struct_file criterion;
401 tree type_decl;
402 bool generic = lang_hooks.types.generic_p (type);
403
404 if (generic)
405 criterion = debug_struct_generic[usage];
406 else
407 criterion = debug_struct_ordinary[usage];
408
409 if (criterion == DINFO_STRUCT_FILE_NONE)
410 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
411 if (criterion == DINFO_STRUCT_FILE_ANY)
412 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
413
414 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
415
416 if (type_decl != NULL)
417 {
418 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
419 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
420
421 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
422 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
423 }
424
425 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
426 }
427 \f
428 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
429 switch to the data section instead, and write out a synthetic start label
430 for collect2 the first time around. */
431
432 static void
433 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
434 {
435 if (eh_frame_section == 0)
436 {
437 int flags;
438
439 if (EH_TABLES_CAN_BE_READ_ONLY)
440 {
441 int fde_encoding;
442 int per_encoding;
443 int lsda_encoding;
444
445 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
446 /*global=*/0);
447 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
448 /*global=*/1);
449 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
450 /*global=*/0);
451 flags = ((! flag_pic
452 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
453 && (fde_encoding & 0x70) != DW_EH_PE_aligned
454 && (per_encoding & 0x70) != DW_EH_PE_absptr
455 && (per_encoding & 0x70) != DW_EH_PE_aligned
456 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
457 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
458 ? 0 : SECTION_WRITE);
459 }
460 else
461 flags = SECTION_WRITE;
462
463 #ifdef EH_FRAME_SECTION_NAME
464 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
465 #else
466 eh_frame_section = ((flags == SECTION_WRITE)
467 ? data_section : readonly_data_section);
468 #endif /* EH_FRAME_SECTION_NAME */
469 }
470
471 switch_to_section (eh_frame_section);
472
473 #ifdef EH_FRAME_THROUGH_COLLECT2
474 /* We have no special eh_frame section. Emit special labels to guide
475 collect2. */
476 if (!back)
477 {
478 tree label = get_file_function_name ("F");
479 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
480 targetm.asm_out.globalize_label (asm_out_file,
481 IDENTIFIER_POINTER (label));
482 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
483 }
484 #endif
485 }
486
487 /* Switch [BACK] to the eh or debug frame table section, depending on
488 FOR_EH. */
489
490 static void
491 switch_to_frame_table_section (int for_eh, bool back)
492 {
493 if (for_eh)
494 switch_to_eh_frame_section (back);
495 else
496 {
497 if (!debug_frame_section)
498 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
499 SECTION_DEBUG, NULL);
500 switch_to_section (debug_frame_section);
501 }
502 }
503
504 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
505
506 enum dw_cfi_oprnd_type
507 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
508 {
509 switch (cfi)
510 {
511 case DW_CFA_nop:
512 case DW_CFA_GNU_window_save:
513 case DW_CFA_remember_state:
514 case DW_CFA_restore_state:
515 return dw_cfi_oprnd_unused;
516
517 case DW_CFA_set_loc:
518 case DW_CFA_advance_loc1:
519 case DW_CFA_advance_loc2:
520 case DW_CFA_advance_loc4:
521 case DW_CFA_MIPS_advance_loc8:
522 return dw_cfi_oprnd_addr;
523
524 case DW_CFA_offset:
525 case DW_CFA_offset_extended:
526 case DW_CFA_def_cfa:
527 case DW_CFA_offset_extended_sf:
528 case DW_CFA_def_cfa_sf:
529 case DW_CFA_restore:
530 case DW_CFA_restore_extended:
531 case DW_CFA_undefined:
532 case DW_CFA_same_value:
533 case DW_CFA_def_cfa_register:
534 case DW_CFA_register:
535 case DW_CFA_expression:
536 case DW_CFA_val_expression:
537 return dw_cfi_oprnd_reg_num;
538
539 case DW_CFA_def_cfa_offset:
540 case DW_CFA_GNU_args_size:
541 case DW_CFA_def_cfa_offset_sf:
542 return dw_cfi_oprnd_offset;
543
544 case DW_CFA_def_cfa_expression:
545 return dw_cfi_oprnd_loc;
546
547 default:
548 gcc_unreachable ();
549 }
550 }
551
552 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
553
554 enum dw_cfi_oprnd_type
555 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
556 {
557 switch (cfi)
558 {
559 case DW_CFA_def_cfa:
560 case DW_CFA_def_cfa_sf:
561 case DW_CFA_offset:
562 case DW_CFA_offset_extended_sf:
563 case DW_CFA_offset_extended:
564 return dw_cfi_oprnd_offset;
565
566 case DW_CFA_register:
567 return dw_cfi_oprnd_reg_num;
568
569 case DW_CFA_expression:
570 case DW_CFA_val_expression:
571 return dw_cfi_oprnd_loc;
572
573 default:
574 return dw_cfi_oprnd_unused;
575 }
576 }
577
578 /* Output one FDE. */
579
580 static void
581 output_fde (dw_fde_ref fde, bool for_eh, bool second,
582 char *section_start_label, int fde_encoding, char *augmentation,
583 bool any_lsda_needed, int lsda_encoding)
584 {
585 const char *begin, *end;
586 static unsigned int j;
587 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
588
589 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
590 /* empty */ 0);
591 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
592 for_eh + j);
593 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
594 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
595 if (!XCOFF_DEBUGGING_INFO || for_eh)
596 {
597 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
598 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
599 " indicating 64-bit DWARF extension");
600 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
601 "FDE Length");
602 }
603 ASM_OUTPUT_LABEL (asm_out_file, l1);
604
605 if (for_eh)
606 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
607 else
608 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
609 debug_frame_section, "FDE CIE offset");
610
611 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
612 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
613
614 if (for_eh)
615 {
616 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
617 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
618 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
619 "FDE initial location");
620 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
621 end, begin, "FDE address range");
622 }
623 else
624 {
625 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
626 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
627 }
628
629 if (augmentation[0])
630 {
631 if (any_lsda_needed)
632 {
633 int size = size_of_encoded_value (lsda_encoding);
634
635 if (lsda_encoding == DW_EH_PE_aligned)
636 {
637 int offset = ( 4 /* Length */
638 + 4 /* CIE offset */
639 + 2 * size_of_encoded_value (fde_encoding)
640 + 1 /* Augmentation size */ );
641 int pad = -offset & (PTR_SIZE - 1);
642
643 size += pad;
644 gcc_assert (size_of_uleb128 (size) == 1);
645 }
646
647 dw2_asm_output_data_uleb128 (size, "Augmentation size");
648
649 if (fde->uses_eh_lsda)
650 {
651 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
652 fde->funcdef_number);
653 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
654 gen_rtx_SYMBOL_REF (Pmode, l1),
655 false,
656 "Language Specific Data Area");
657 }
658 else
659 {
660 if (lsda_encoding == DW_EH_PE_aligned)
661 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
662 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
663 "Language Specific Data Area (none)");
664 }
665 }
666 else
667 dw2_asm_output_data_uleb128 (0, "Augmentation size");
668 }
669
670 /* Loop through the Call Frame Instructions associated with this FDE. */
671 fde->dw_fde_current_label = begin;
672 {
673 size_t from, until, i;
674
675 from = 0;
676 until = vec_safe_length (fde->dw_fde_cfi);
677
678 if (fde->dw_fde_second_begin == NULL)
679 ;
680 else if (!second)
681 until = fde->dw_fde_switch_cfi_index;
682 else
683 from = fde->dw_fde_switch_cfi_index;
684
685 for (i = from; i < until; i++)
686 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
687 }
688
689 /* If we are to emit a ref/link from function bodies to their frame tables,
690 do it now. This is typically performed to make sure that tables
691 associated with functions are dragged with them and not discarded in
692 garbage collecting links. We need to do this on a per function basis to
693 cope with -ffunction-sections. */
694
695 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
696 /* Switch to the function section, emit the ref to the tables, and
697 switch *back* into the table section. */
698 switch_to_section (function_section (fde->decl));
699 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
700 switch_to_frame_table_section (for_eh, true);
701 #endif
702
703 /* Pad the FDE out to an address sized boundary. */
704 ASM_OUTPUT_ALIGN (asm_out_file,
705 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
706 ASM_OUTPUT_LABEL (asm_out_file, l2);
707
708 j += 2;
709 }
710
711 /* Return true if frame description entry FDE is needed for EH. */
712
713 static bool
714 fde_needed_for_eh_p (dw_fde_ref fde)
715 {
716 if (flag_asynchronous_unwind_tables)
717 return true;
718
719 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
720 return true;
721
722 if (fde->uses_eh_lsda)
723 return true;
724
725 /* If exceptions are enabled, we have collected nothrow info. */
726 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
727 return false;
728
729 return true;
730 }
731
732 /* Output the call frame information used to record information
733 that relates to calculating the frame pointer, and records the
734 location of saved registers. */
735
736 static void
737 output_call_frame_info (int for_eh)
738 {
739 unsigned int i;
740 dw_fde_ref fde;
741 dw_cfi_ref cfi;
742 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
743 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
744 bool any_lsda_needed = false;
745 char augmentation[6];
746 int augmentation_size;
747 int fde_encoding = DW_EH_PE_absptr;
748 int per_encoding = DW_EH_PE_absptr;
749 int lsda_encoding = DW_EH_PE_absptr;
750 int return_reg;
751 rtx personality = NULL;
752 int dw_cie_version;
753
754 /* Don't emit a CIE if there won't be any FDEs. */
755 if (!fde_vec)
756 return;
757
758 /* Nothing to do if the assembler's doing it all. */
759 if (dwarf2out_do_cfi_asm ())
760 return;
761
762 /* If we don't have any functions we'll want to unwind out of, don't emit
763 any EH unwind information. If we make FDEs linkonce, we may have to
764 emit an empty label for an FDE that wouldn't otherwise be emitted. We
765 want to avoid having an FDE kept around when the function it refers to
766 is discarded. Example where this matters: a primary function template
767 in C++ requires EH information, an explicit specialization doesn't. */
768 if (for_eh)
769 {
770 bool any_eh_needed = false;
771
772 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
773 {
774 if (fde->uses_eh_lsda)
775 any_eh_needed = any_lsda_needed = true;
776 else if (fde_needed_for_eh_p (fde))
777 any_eh_needed = true;
778 else if (TARGET_USES_WEAK_UNWIND_INFO)
779 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
780 }
781
782 if (!any_eh_needed)
783 return;
784 }
785
786 /* We're going to be generating comments, so turn on app. */
787 if (flag_debug_asm)
788 app_enable ();
789
790 /* Switch to the proper frame section, first time. */
791 switch_to_frame_table_section (for_eh, false);
792
793 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
794 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
795
796 /* Output the CIE. */
797 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
798 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
799 if (!XCOFF_DEBUGGING_INFO || for_eh)
800 {
801 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
802 dw2_asm_output_data (4, 0xffffffff,
803 "Initial length escape value indicating 64-bit DWARF extension");
804 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
805 "Length of Common Information Entry");
806 }
807 ASM_OUTPUT_LABEL (asm_out_file, l1);
808
809 /* Now that the CIE pointer is PC-relative for EH,
810 use 0 to identify the CIE. */
811 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
812 (for_eh ? 0 : DWARF_CIE_ID),
813 "CIE Identifier Tag");
814
815 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
816 use CIE version 1, unless that would produce incorrect results
817 due to overflowing the return register column. */
818 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
819 dw_cie_version = 1;
820 if (return_reg >= 256 || dwarf_version > 2)
821 dw_cie_version = 3;
822 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
823
824 augmentation[0] = 0;
825 augmentation_size = 0;
826
827 personality = current_unit_personality;
828 if (for_eh)
829 {
830 char *p;
831
832 /* Augmentation:
833 z Indicates that a uleb128 is present to size the
834 augmentation section.
835 L Indicates the encoding (and thus presence) of
836 an LSDA pointer in the FDE augmentation.
837 R Indicates a non-default pointer encoding for
838 FDE code pointers.
839 P Indicates the presence of an encoding + language
840 personality routine in the CIE augmentation. */
841
842 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
843 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
844 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
845
846 p = augmentation + 1;
847 if (personality)
848 {
849 *p++ = 'P';
850 augmentation_size += 1 + size_of_encoded_value (per_encoding);
851 assemble_external_libcall (personality);
852 }
853 if (any_lsda_needed)
854 {
855 *p++ = 'L';
856 augmentation_size += 1;
857 }
858 if (fde_encoding != DW_EH_PE_absptr)
859 {
860 *p++ = 'R';
861 augmentation_size += 1;
862 }
863 if (p > augmentation + 1)
864 {
865 augmentation[0] = 'z';
866 *p = '\0';
867 }
868
869 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
870 if (personality && per_encoding == DW_EH_PE_aligned)
871 {
872 int offset = ( 4 /* Length */
873 + 4 /* CIE Id */
874 + 1 /* CIE version */
875 + strlen (augmentation) + 1 /* Augmentation */
876 + size_of_uleb128 (1) /* Code alignment */
877 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
878 + 1 /* RA column */
879 + 1 /* Augmentation size */
880 + 1 /* Personality encoding */ );
881 int pad = -offset & (PTR_SIZE - 1);
882
883 augmentation_size += pad;
884
885 /* Augmentations should be small, so there's scarce need to
886 iterate for a solution. Die if we exceed one uleb128 byte. */
887 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
888 }
889 }
890
891 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
892 if (dw_cie_version >= 4)
893 {
894 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
895 dw2_asm_output_data (1, 0, "CIE Segment Size");
896 }
897 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
898 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
899 "CIE Data Alignment Factor");
900
901 if (dw_cie_version == 1)
902 dw2_asm_output_data (1, return_reg, "CIE RA Column");
903 else
904 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
905
906 if (augmentation[0])
907 {
908 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
909 if (personality)
910 {
911 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
912 eh_data_format_name (per_encoding));
913 dw2_asm_output_encoded_addr_rtx (per_encoding,
914 personality,
915 true, NULL);
916 }
917
918 if (any_lsda_needed)
919 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
920 eh_data_format_name (lsda_encoding));
921
922 if (fde_encoding != DW_EH_PE_absptr)
923 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
924 eh_data_format_name (fde_encoding));
925 }
926
927 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
928 output_cfi (cfi, NULL, for_eh);
929
930 /* Pad the CIE out to an address sized boundary. */
931 ASM_OUTPUT_ALIGN (asm_out_file,
932 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
933 ASM_OUTPUT_LABEL (asm_out_file, l2);
934
935 /* Loop through all of the FDE's. */
936 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
937 {
938 unsigned int k;
939
940 /* Don't emit EH unwind info for leaf functions that don't need it. */
941 if (for_eh && !fde_needed_for_eh_p (fde))
942 continue;
943
944 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
945 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
946 augmentation, any_lsda_needed, lsda_encoding);
947 }
948
949 if (for_eh && targetm.terminate_dw2_eh_frame_info)
950 dw2_asm_output_data (4, 0, "End of Table");
951
952 /* Turn off app to make assembly quicker. */
953 if (flag_debug_asm)
954 app_disable ();
955 }
956
957 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
958
959 static void
960 dwarf2out_do_cfi_startproc (bool second)
961 {
962 int enc;
963 rtx ref;
964 rtx personality = get_personality_function (current_function_decl);
965
966 fprintf (asm_out_file, "\t.cfi_startproc\n");
967
968 if (personality)
969 {
970 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
971 ref = personality;
972
973 /* ??? The GAS support isn't entirely consistent. We have to
974 handle indirect support ourselves, but PC-relative is done
975 in the assembler. Further, the assembler can't handle any
976 of the weirder relocation types. */
977 if (enc & DW_EH_PE_indirect)
978 ref = dw2_force_const_mem (ref, true);
979
980 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
981 output_addr_const (asm_out_file, ref);
982 fputc ('\n', asm_out_file);
983 }
984
985 if (crtl->uses_eh_lsda)
986 {
987 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
988
989 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
990 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
991 current_function_funcdef_no);
992 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
993 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
994
995 if (enc & DW_EH_PE_indirect)
996 ref = dw2_force_const_mem (ref, true);
997
998 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
999 output_addr_const (asm_out_file, ref);
1000 fputc ('\n', asm_out_file);
1001 }
1002 }
1003
1004 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1005 this allocation may be done before pass_final. */
1006
1007 dw_fde_ref
1008 dwarf2out_alloc_current_fde (void)
1009 {
1010 dw_fde_ref fde;
1011
1012 fde = ggc_cleared_alloc<dw_fde_node> ();
1013 fde->decl = current_function_decl;
1014 fde->funcdef_number = current_function_funcdef_no;
1015 fde->fde_index = vec_safe_length (fde_vec);
1016 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1017 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1018 fde->nothrow = crtl->nothrow;
1019 fde->drap_reg = INVALID_REGNUM;
1020 fde->vdrap_reg = INVALID_REGNUM;
1021
1022 /* Record the FDE associated with this function. */
1023 cfun->fde = fde;
1024 vec_safe_push (fde_vec, fde);
1025
1026 return fde;
1027 }
1028
1029 /* Output a marker (i.e. a label) for the beginning of a function, before
1030 the prologue. */
1031
1032 void
1033 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1034 unsigned int column ATTRIBUTE_UNUSED,
1035 const char *file ATTRIBUTE_UNUSED)
1036 {
1037 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1038 char * dup_label;
1039 dw_fde_ref fde;
1040 section *fnsec;
1041 bool do_frame;
1042
1043 current_function_func_begin_label = NULL;
1044
1045 do_frame = dwarf2out_do_frame ();
1046
1047 /* ??? current_function_func_begin_label is also used by except.c for
1048 call-site information. We must emit this label if it might be used. */
1049 if (!do_frame
1050 && (!flag_exceptions
1051 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1052 return;
1053
1054 fnsec = function_section (current_function_decl);
1055 switch_to_section (fnsec);
1056 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1057 current_function_funcdef_no);
1058 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1059 current_function_funcdef_no);
1060 dup_label = xstrdup (label);
1061 current_function_func_begin_label = dup_label;
1062
1063 /* We can elide the fde allocation if we're not emitting debug info. */
1064 if (!do_frame)
1065 return;
1066
1067 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1068 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1069 would include pass_dwarf2_frame. If we've not created the FDE yet,
1070 do so now. */
1071 fde = cfun->fde;
1072 if (fde == NULL)
1073 fde = dwarf2out_alloc_current_fde ();
1074
1075 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1076 fde->dw_fde_begin = dup_label;
1077 fde->dw_fde_current_label = dup_label;
1078 fde->in_std_section = (fnsec == text_section
1079 || (cold_text_section && fnsec == cold_text_section));
1080
1081 /* We only want to output line number information for the genuine dwarf2
1082 prologue case, not the eh frame case. */
1083 #ifdef DWARF2_DEBUGGING_INFO
1084 if (file)
1085 dwarf2out_source_line (line, column, file, 0, true);
1086 #endif
1087
1088 if (dwarf2out_do_cfi_asm ())
1089 dwarf2out_do_cfi_startproc (false);
1090 else
1091 {
1092 rtx personality = get_personality_function (current_function_decl);
1093 if (!current_unit_personality)
1094 current_unit_personality = personality;
1095
1096 /* We cannot keep a current personality per function as without CFI
1097 asm, at the point where we emit the CFI data, there is no current
1098 function anymore. */
1099 if (personality && current_unit_personality != personality)
1100 sorry ("multiple EH personalities are supported only with assemblers "
1101 "supporting .cfi_personality directive");
1102 }
1103 }
1104
1105 /* Output a marker (i.e. a label) for the end of the generated code
1106 for a function prologue. This gets called *after* the prologue code has
1107 been generated. */
1108
1109 void
1110 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1111 const char *file ATTRIBUTE_UNUSED)
1112 {
1113 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1114
1115 /* Output a label to mark the endpoint of the code generated for this
1116 function. */
1117 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1118 current_function_funcdef_no);
1119 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1120 current_function_funcdef_no);
1121 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1122 }
1123
1124 /* Output a marker (i.e. a label) for the beginning of the generated code
1125 for a function epilogue. This gets called *before* the prologue code has
1126 been generated. */
1127
1128 void
1129 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1130 const char *file ATTRIBUTE_UNUSED)
1131 {
1132 dw_fde_ref fde = cfun->fde;
1133 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1134
1135 if (fde->dw_fde_vms_begin_epilogue)
1136 return;
1137
1138 /* Output a label to mark the endpoint of the code generated for this
1139 function. */
1140 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1141 current_function_funcdef_no);
1142 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1143 current_function_funcdef_no);
1144 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1145 }
1146
1147 /* Output a marker (i.e. a label) for the absolute end of the generated code
1148 for a function definition. This gets called *after* the epilogue code has
1149 been generated. */
1150
1151 void
1152 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1153 const char *file ATTRIBUTE_UNUSED)
1154 {
1155 dw_fde_ref fde;
1156 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1157
1158 last_var_location_insn = NULL;
1159 cached_next_real_insn = NULL;
1160
1161 if (dwarf2out_do_cfi_asm ())
1162 fprintf (asm_out_file, "\t.cfi_endproc\n");
1163
1164 /* Output a label to mark the endpoint of the code generated for this
1165 function. */
1166 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1167 current_function_funcdef_no);
1168 ASM_OUTPUT_LABEL (asm_out_file, label);
1169 fde = cfun->fde;
1170 gcc_assert (fde != NULL);
1171 if (fde->dw_fde_second_begin == NULL)
1172 fde->dw_fde_end = xstrdup (label);
1173 }
1174
1175 void
1176 dwarf2out_frame_finish (void)
1177 {
1178 /* Output call frame information. */
1179 if (targetm.debug_unwind_info () == UI_DWARF2)
1180 output_call_frame_info (0);
1181
1182 /* Output another copy for the unwinder. */
1183 if ((flag_unwind_tables || flag_exceptions)
1184 && targetm_common.except_unwind_info (&global_options) == UI_DWARF2)
1185 output_call_frame_info (1);
1186 }
1187
1188 /* Note that the current function section is being used for code. */
1189
1190 static void
1191 dwarf2out_note_section_used (void)
1192 {
1193 section *sec = current_function_section ();
1194 if (sec == text_section)
1195 text_section_used = true;
1196 else if (sec == cold_text_section)
1197 cold_text_section_used = true;
1198 }
1199
1200 static void var_location_switch_text_section (void);
1201 static void set_cur_line_info_table (section *);
1202
1203 void
1204 dwarf2out_switch_text_section (void)
1205 {
1206 section *sect;
1207 dw_fde_ref fde = cfun->fde;
1208
1209 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1210
1211 if (!in_cold_section_p)
1212 {
1213 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1214 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1215 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1216 }
1217 else
1218 {
1219 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1220 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1221 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1222 }
1223 have_multiple_function_sections = true;
1224
1225 /* There is no need to mark used sections when not debugging. */
1226 if (cold_text_section != NULL)
1227 dwarf2out_note_section_used ();
1228
1229 if (dwarf2out_do_cfi_asm ())
1230 fprintf (asm_out_file, "\t.cfi_endproc\n");
1231
1232 /* Now do the real section switch. */
1233 sect = current_function_section ();
1234 switch_to_section (sect);
1235
1236 fde->second_in_std_section
1237 = (sect == text_section
1238 || (cold_text_section && sect == cold_text_section));
1239
1240 if (dwarf2out_do_cfi_asm ())
1241 dwarf2out_do_cfi_startproc (true);
1242
1243 var_location_switch_text_section ();
1244
1245 if (cold_text_section != NULL)
1246 set_cur_line_info_table (sect);
1247 }
1248 \f
1249 /* And now, the subset of the debugging information support code necessary
1250 for emitting location expressions. */
1251
1252 /* Data about a single source file. */
1253 struct GTY((for_user)) dwarf_file_data {
1254 const char * filename;
1255 int emitted_number;
1256 };
1257
1258 /* Describe an entry into the .debug_addr section. */
1259
1260 enum ate_kind {
1261 ate_kind_rtx,
1262 ate_kind_rtx_dtprel,
1263 ate_kind_label
1264 };
1265
1266 struct GTY((for_user)) addr_table_entry {
1267 enum ate_kind kind;
1268 unsigned int refcount;
1269 unsigned int index;
1270 union addr_table_entry_struct_union
1271 {
1272 rtx GTY ((tag ("0"))) rtl;
1273 char * GTY ((tag ("1"))) label;
1274 }
1275 GTY ((desc ("%1.kind"))) addr;
1276 };
1277
1278 /* Location lists are ranges + location descriptions for that range,
1279 so you can track variables that are in different places over
1280 their entire life. */
1281 typedef struct GTY(()) dw_loc_list_struct {
1282 dw_loc_list_ref dw_loc_next;
1283 const char *begin; /* Label and addr_entry for start of range */
1284 addr_table_entry *begin_entry;
1285 const char *end; /* Label for end of range */
1286 char *ll_symbol; /* Label for beginning of location list.
1287 Only on head of list */
1288 const char *section; /* Section this loclist is relative to */
1289 dw_loc_descr_ref expr;
1290 hashval_t hash;
1291 /* True if all addresses in this and subsequent lists are known to be
1292 resolved. */
1293 bool resolved_addr;
1294 /* True if this list has been replaced by dw_loc_next. */
1295 bool replaced;
1296 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1297 section. */
1298 unsigned char emitted : 1;
1299 /* True if hash field is index rather than hash value. */
1300 unsigned char num_assigned : 1;
1301 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1302 unsigned char offset_emitted : 1;
1303 /* True if note_variable_value_in_expr has been called on it. */
1304 unsigned char noted_variable_value : 1;
1305 /* True if the range should be emitted even if begin and end
1306 are the same. */
1307 bool force;
1308 } dw_loc_list_node;
1309
1310 static dw_loc_descr_ref int_loc_descriptor (HOST_WIDE_INT);
1311 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1312
1313 /* Convert a DWARF stack opcode into its string name. */
1314
1315 static const char *
1316 dwarf_stack_op_name (unsigned int op)
1317 {
1318 const char *name = get_DW_OP_name (op);
1319
1320 if (name != NULL)
1321 return name;
1322
1323 return "OP_<unknown>";
1324 }
1325
1326 /* Return a pointer to a newly allocated location description. Location
1327 descriptions are simple expression terms that can be strung
1328 together to form more complicated location (address) descriptions. */
1329
1330 static inline dw_loc_descr_ref
1331 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1332 unsigned HOST_WIDE_INT oprnd2)
1333 {
1334 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1335
1336 descr->dw_loc_opc = op;
1337 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1338 descr->dw_loc_oprnd1.val_entry = NULL;
1339 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1340 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1341 descr->dw_loc_oprnd2.val_entry = NULL;
1342 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1343
1344 return descr;
1345 }
1346
1347 /* Return a pointer to a newly allocated location description for
1348 REG and OFFSET. */
1349
1350 static inline dw_loc_descr_ref
1351 new_reg_loc_descr (unsigned int reg, unsigned HOST_WIDE_INT offset)
1352 {
1353 if (reg <= 31)
1354 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1355 offset, 0);
1356 else
1357 return new_loc_descr (DW_OP_bregx, reg, offset);
1358 }
1359
1360 /* Add a location description term to a location description expression. */
1361
1362 static inline void
1363 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1364 {
1365 dw_loc_descr_ref *d;
1366
1367 /* Find the end of the chain. */
1368 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1369 ;
1370
1371 *d = descr;
1372 }
1373
1374 /* Compare two location operands for exact equality. */
1375
1376 static bool
1377 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1378 {
1379 if (a->val_class != b->val_class)
1380 return false;
1381 switch (a->val_class)
1382 {
1383 case dw_val_class_none:
1384 return true;
1385 case dw_val_class_addr:
1386 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1387
1388 case dw_val_class_offset:
1389 case dw_val_class_unsigned_const:
1390 case dw_val_class_const:
1391 case dw_val_class_unsigned_const_implicit:
1392 case dw_val_class_const_implicit:
1393 case dw_val_class_range_list:
1394 /* These are all HOST_WIDE_INT, signed or unsigned. */
1395 return a->v.val_unsigned == b->v.val_unsigned;
1396
1397 case dw_val_class_loc:
1398 return a->v.val_loc == b->v.val_loc;
1399 case dw_val_class_loc_list:
1400 return a->v.val_loc_list == b->v.val_loc_list;
1401 case dw_val_class_die_ref:
1402 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1403 case dw_val_class_fde_ref:
1404 return a->v.val_fde_index == b->v.val_fde_index;
1405 case dw_val_class_lbl_id:
1406 case dw_val_class_lineptr:
1407 case dw_val_class_macptr:
1408 case dw_val_class_loclistsptr:
1409 case dw_val_class_high_pc:
1410 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1411 case dw_val_class_str:
1412 return a->v.val_str == b->v.val_str;
1413 case dw_val_class_flag:
1414 return a->v.val_flag == b->v.val_flag;
1415 case dw_val_class_file:
1416 case dw_val_class_file_implicit:
1417 return a->v.val_file == b->v.val_file;
1418 case dw_val_class_decl_ref:
1419 return a->v.val_decl_ref == b->v.val_decl_ref;
1420
1421 case dw_val_class_const_double:
1422 return (a->v.val_double.high == b->v.val_double.high
1423 && a->v.val_double.low == b->v.val_double.low);
1424
1425 case dw_val_class_wide_int:
1426 return *a->v.val_wide == *b->v.val_wide;
1427
1428 case dw_val_class_vec:
1429 {
1430 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1431 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1432
1433 return (a_len == b_len
1434 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1435 }
1436
1437 case dw_val_class_data8:
1438 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1439
1440 case dw_val_class_vms_delta:
1441 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1442 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1443
1444 case dw_val_class_discr_value:
1445 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1446 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1447 case dw_val_class_discr_list:
1448 /* It makes no sense comparing two discriminant value lists. */
1449 return false;
1450 }
1451 gcc_unreachable ();
1452 }
1453
1454 /* Compare two location atoms for exact equality. */
1455
1456 static bool
1457 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1458 {
1459 if (a->dw_loc_opc != b->dw_loc_opc)
1460 return false;
1461
1462 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1463 address size, but since we always allocate cleared storage it
1464 should be zero for other types of locations. */
1465 if (a->dtprel != b->dtprel)
1466 return false;
1467
1468 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1469 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1470 }
1471
1472 /* Compare two complete location expressions for exact equality. */
1473
1474 bool
1475 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1476 {
1477 while (1)
1478 {
1479 if (a == b)
1480 return true;
1481 if (a == NULL || b == NULL)
1482 return false;
1483 if (!loc_descr_equal_p_1 (a, b))
1484 return false;
1485
1486 a = a->dw_loc_next;
1487 b = b->dw_loc_next;
1488 }
1489 }
1490
1491
1492 /* Add a constant OFFSET to a location expression. */
1493
1494 static void
1495 loc_descr_plus_const (dw_loc_descr_ref *list_head, HOST_WIDE_INT offset)
1496 {
1497 dw_loc_descr_ref loc;
1498 HOST_WIDE_INT *p;
1499
1500 gcc_assert (*list_head != NULL);
1501
1502 if (!offset)
1503 return;
1504
1505 /* Find the end of the chain. */
1506 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1507 ;
1508
1509 p = NULL;
1510 if (loc->dw_loc_opc == DW_OP_fbreg
1511 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1512 p = &loc->dw_loc_oprnd1.v.val_int;
1513 else if (loc->dw_loc_opc == DW_OP_bregx)
1514 p = &loc->dw_loc_oprnd2.v.val_int;
1515
1516 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1517 offset. Don't optimize if an signed integer overflow would happen. */
1518 if (p != NULL
1519 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1520 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1521 *p += offset;
1522
1523 else if (offset > 0)
1524 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1525
1526 else
1527 {
1528 loc->dw_loc_next
1529 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1530 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1531 }
1532 }
1533
1534 /* Add a constant OFFSET to a location list. */
1535
1536 static void
1537 loc_list_plus_const (dw_loc_list_ref list_head, HOST_WIDE_INT offset)
1538 {
1539 dw_loc_list_ref d;
1540 for (d = list_head; d != NULL; d = d->dw_loc_next)
1541 loc_descr_plus_const (&d->expr, offset);
1542 }
1543
1544 #define DWARF_REF_SIZE \
1545 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1546
1547 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1548 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1549 DW_FORM_data16 with 128 bits. */
1550 #define DWARF_LARGEST_DATA_FORM_BITS \
1551 (dwarf_version >= 5 ? 128 : 64)
1552
1553 /* Utility inline function for construction of ops that were GNU extension
1554 before DWARF 5. */
1555 static inline enum dwarf_location_atom
1556 dwarf_OP (enum dwarf_location_atom op)
1557 {
1558 switch (op)
1559 {
1560 case DW_OP_implicit_pointer:
1561 if (dwarf_version < 5)
1562 return DW_OP_GNU_implicit_pointer;
1563 break;
1564
1565 case DW_OP_entry_value:
1566 if (dwarf_version < 5)
1567 return DW_OP_GNU_entry_value;
1568 break;
1569
1570 case DW_OP_const_type:
1571 if (dwarf_version < 5)
1572 return DW_OP_GNU_const_type;
1573 break;
1574
1575 case DW_OP_regval_type:
1576 if (dwarf_version < 5)
1577 return DW_OP_GNU_regval_type;
1578 break;
1579
1580 case DW_OP_deref_type:
1581 if (dwarf_version < 5)
1582 return DW_OP_GNU_deref_type;
1583 break;
1584
1585 case DW_OP_convert:
1586 if (dwarf_version < 5)
1587 return DW_OP_GNU_convert;
1588 break;
1589
1590 case DW_OP_reinterpret:
1591 if (dwarf_version < 5)
1592 return DW_OP_GNU_reinterpret;
1593 break;
1594
1595 default:
1596 break;
1597 }
1598 return op;
1599 }
1600
1601 /* Similarly for attributes. */
1602 static inline enum dwarf_attribute
1603 dwarf_AT (enum dwarf_attribute at)
1604 {
1605 switch (at)
1606 {
1607 case DW_AT_call_return_pc:
1608 if (dwarf_version < 5)
1609 return DW_AT_low_pc;
1610 break;
1611
1612 case DW_AT_call_tail_call:
1613 if (dwarf_version < 5)
1614 return DW_AT_GNU_tail_call;
1615 break;
1616
1617 case DW_AT_call_origin:
1618 if (dwarf_version < 5)
1619 return DW_AT_abstract_origin;
1620 break;
1621
1622 case DW_AT_call_target:
1623 if (dwarf_version < 5)
1624 return DW_AT_GNU_call_site_target;
1625 break;
1626
1627 case DW_AT_call_target_clobbered:
1628 if (dwarf_version < 5)
1629 return DW_AT_GNU_call_site_target_clobbered;
1630 break;
1631
1632 case DW_AT_call_parameter:
1633 if (dwarf_version < 5)
1634 return DW_AT_abstract_origin;
1635 break;
1636
1637 case DW_AT_call_value:
1638 if (dwarf_version < 5)
1639 return DW_AT_GNU_call_site_value;
1640 break;
1641
1642 case DW_AT_call_data_value:
1643 if (dwarf_version < 5)
1644 return DW_AT_GNU_call_site_data_value;
1645 break;
1646
1647 case DW_AT_call_all_calls:
1648 if (dwarf_version < 5)
1649 return DW_AT_GNU_all_call_sites;
1650 break;
1651
1652 case DW_AT_call_all_tail_calls:
1653 if (dwarf_version < 5)
1654 return DW_AT_GNU_all_tail_call_sites;
1655 break;
1656
1657 case DW_AT_dwo_name:
1658 if (dwarf_version < 5)
1659 return DW_AT_GNU_dwo_name;
1660 break;
1661
1662 default:
1663 break;
1664 }
1665 return at;
1666 }
1667
1668 /* And similarly for tags. */
1669 static inline enum dwarf_tag
1670 dwarf_TAG (enum dwarf_tag tag)
1671 {
1672 switch (tag)
1673 {
1674 case DW_TAG_call_site:
1675 if (dwarf_version < 5)
1676 return DW_TAG_GNU_call_site;
1677 break;
1678
1679 case DW_TAG_call_site_parameter:
1680 if (dwarf_version < 5)
1681 return DW_TAG_GNU_call_site_parameter;
1682 break;
1683
1684 default:
1685 break;
1686 }
1687 return tag;
1688 }
1689
1690 static unsigned long int get_base_type_offset (dw_die_ref);
1691
1692 /* Return the size of a location descriptor. */
1693
1694 static unsigned long
1695 size_of_loc_descr (dw_loc_descr_ref loc)
1696 {
1697 unsigned long size = 1;
1698
1699 switch (loc->dw_loc_opc)
1700 {
1701 case DW_OP_addr:
1702 size += DWARF2_ADDR_SIZE;
1703 break;
1704 case DW_OP_GNU_addr_index:
1705 case DW_OP_GNU_const_index:
1706 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1707 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1708 break;
1709 case DW_OP_const1u:
1710 case DW_OP_const1s:
1711 size += 1;
1712 break;
1713 case DW_OP_const2u:
1714 case DW_OP_const2s:
1715 size += 2;
1716 break;
1717 case DW_OP_const4u:
1718 case DW_OP_const4s:
1719 size += 4;
1720 break;
1721 case DW_OP_const8u:
1722 case DW_OP_const8s:
1723 size += 8;
1724 break;
1725 case DW_OP_constu:
1726 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1727 break;
1728 case DW_OP_consts:
1729 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1730 break;
1731 case DW_OP_pick:
1732 size += 1;
1733 break;
1734 case DW_OP_plus_uconst:
1735 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1736 break;
1737 case DW_OP_skip:
1738 case DW_OP_bra:
1739 size += 2;
1740 break;
1741 case DW_OP_breg0:
1742 case DW_OP_breg1:
1743 case DW_OP_breg2:
1744 case DW_OP_breg3:
1745 case DW_OP_breg4:
1746 case DW_OP_breg5:
1747 case DW_OP_breg6:
1748 case DW_OP_breg7:
1749 case DW_OP_breg8:
1750 case DW_OP_breg9:
1751 case DW_OP_breg10:
1752 case DW_OP_breg11:
1753 case DW_OP_breg12:
1754 case DW_OP_breg13:
1755 case DW_OP_breg14:
1756 case DW_OP_breg15:
1757 case DW_OP_breg16:
1758 case DW_OP_breg17:
1759 case DW_OP_breg18:
1760 case DW_OP_breg19:
1761 case DW_OP_breg20:
1762 case DW_OP_breg21:
1763 case DW_OP_breg22:
1764 case DW_OP_breg23:
1765 case DW_OP_breg24:
1766 case DW_OP_breg25:
1767 case DW_OP_breg26:
1768 case DW_OP_breg27:
1769 case DW_OP_breg28:
1770 case DW_OP_breg29:
1771 case DW_OP_breg30:
1772 case DW_OP_breg31:
1773 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1774 break;
1775 case DW_OP_regx:
1776 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1777 break;
1778 case DW_OP_fbreg:
1779 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1780 break;
1781 case DW_OP_bregx:
1782 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1783 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1784 break;
1785 case DW_OP_piece:
1786 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1787 break;
1788 case DW_OP_bit_piece:
1789 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1790 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1791 break;
1792 case DW_OP_deref_size:
1793 case DW_OP_xderef_size:
1794 size += 1;
1795 break;
1796 case DW_OP_call2:
1797 size += 2;
1798 break;
1799 case DW_OP_call4:
1800 size += 4;
1801 break;
1802 case DW_OP_call_ref:
1803 case DW_OP_GNU_variable_value:
1804 size += DWARF_REF_SIZE;
1805 break;
1806 case DW_OP_implicit_value:
1807 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1808 + loc->dw_loc_oprnd1.v.val_unsigned;
1809 break;
1810 case DW_OP_implicit_pointer:
1811 case DW_OP_GNU_implicit_pointer:
1812 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1813 break;
1814 case DW_OP_entry_value:
1815 case DW_OP_GNU_entry_value:
1816 {
1817 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1818 size += size_of_uleb128 (op_size) + op_size;
1819 break;
1820 }
1821 case DW_OP_const_type:
1822 case DW_OP_GNU_const_type:
1823 {
1824 unsigned long o
1825 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1826 size += size_of_uleb128 (o) + 1;
1827 switch (loc->dw_loc_oprnd2.val_class)
1828 {
1829 case dw_val_class_vec:
1830 size += loc->dw_loc_oprnd2.v.val_vec.length
1831 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1832 break;
1833 case dw_val_class_const:
1834 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1835 break;
1836 case dw_val_class_const_double:
1837 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1838 break;
1839 case dw_val_class_wide_int:
1840 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1841 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1842 break;
1843 default:
1844 gcc_unreachable ();
1845 }
1846 break;
1847 }
1848 case DW_OP_regval_type:
1849 case DW_OP_GNU_regval_type:
1850 {
1851 unsigned long o
1852 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1853 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1854 + size_of_uleb128 (o);
1855 }
1856 break;
1857 case DW_OP_deref_type:
1858 case DW_OP_GNU_deref_type:
1859 {
1860 unsigned long o
1861 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1862 size += 1 + size_of_uleb128 (o);
1863 }
1864 break;
1865 case DW_OP_convert:
1866 case DW_OP_reinterpret:
1867 case DW_OP_GNU_convert:
1868 case DW_OP_GNU_reinterpret:
1869 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1870 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1871 else
1872 {
1873 unsigned long o
1874 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1875 size += size_of_uleb128 (o);
1876 }
1877 break;
1878 case DW_OP_GNU_parameter_ref:
1879 size += 4;
1880 break;
1881 default:
1882 break;
1883 }
1884
1885 return size;
1886 }
1887
1888 /* Return the size of a series of location descriptors. */
1889
1890 unsigned long
1891 size_of_locs (dw_loc_descr_ref loc)
1892 {
1893 dw_loc_descr_ref l;
1894 unsigned long size;
1895
1896 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1897 field, to avoid writing to a PCH file. */
1898 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1899 {
1900 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1901 break;
1902 size += size_of_loc_descr (l);
1903 }
1904 if (! l)
1905 return size;
1906
1907 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1908 {
1909 l->dw_loc_addr = size;
1910 size += size_of_loc_descr (l);
1911 }
1912
1913 return size;
1914 }
1915
1916 /* Return the size of the value in a DW_AT_discr_value attribute. */
1917
1918 static int
1919 size_of_discr_value (dw_discr_value *discr_value)
1920 {
1921 if (discr_value->pos)
1922 return size_of_uleb128 (discr_value->v.uval);
1923 else
1924 return size_of_sleb128 (discr_value->v.sval);
1925 }
1926
1927 /* Return the size of the value in a DW_AT_discr_list attribute. */
1928
1929 static int
1930 size_of_discr_list (dw_discr_list_ref discr_list)
1931 {
1932 int size = 0;
1933
1934 for (dw_discr_list_ref list = discr_list;
1935 list != NULL;
1936 list = list->dw_discr_next)
1937 {
1938 /* One byte for the discriminant value descriptor, and then one or two
1939 LEB128 numbers, depending on whether it's a single case label or a
1940 range label. */
1941 size += 1;
1942 size += size_of_discr_value (&list->dw_discr_lower_bound);
1943 if (list->dw_discr_range != 0)
1944 size += size_of_discr_value (&list->dw_discr_upper_bound);
1945 }
1946 return size;
1947 }
1948
1949 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
1950 static void get_ref_die_offset_label (char *, dw_die_ref);
1951 static unsigned long int get_ref_die_offset (dw_die_ref);
1952
1953 /* Output location description stack opcode's operands (if any).
1954 The for_eh_or_skip parameter controls whether register numbers are
1955 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
1956 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
1957 info). This should be suppressed for the cases that have not been converted
1958 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
1959
1960 static void
1961 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
1962 {
1963 dw_val_ref val1 = &loc->dw_loc_oprnd1;
1964 dw_val_ref val2 = &loc->dw_loc_oprnd2;
1965
1966 switch (loc->dw_loc_opc)
1967 {
1968 #ifdef DWARF2_DEBUGGING_INFO
1969 case DW_OP_const2u:
1970 case DW_OP_const2s:
1971 dw2_asm_output_data (2, val1->v.val_int, NULL);
1972 break;
1973 case DW_OP_const4u:
1974 if (loc->dtprel)
1975 {
1976 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
1977 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
1978 val1->v.val_addr);
1979 fputc ('\n', asm_out_file);
1980 break;
1981 }
1982 /* FALLTHRU */
1983 case DW_OP_const4s:
1984 dw2_asm_output_data (4, val1->v.val_int, NULL);
1985 break;
1986 case DW_OP_const8u:
1987 if (loc->dtprel)
1988 {
1989 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
1990 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
1991 val1->v.val_addr);
1992 fputc ('\n', asm_out_file);
1993 break;
1994 }
1995 /* FALLTHRU */
1996 case DW_OP_const8s:
1997 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
1998 dw2_asm_output_data (8, val1->v.val_int, NULL);
1999 break;
2000 case DW_OP_skip:
2001 case DW_OP_bra:
2002 {
2003 int offset;
2004
2005 gcc_assert (val1->val_class == dw_val_class_loc);
2006 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2007
2008 dw2_asm_output_data (2, offset, NULL);
2009 }
2010 break;
2011 case DW_OP_implicit_value:
2012 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2013 switch (val2->val_class)
2014 {
2015 case dw_val_class_const:
2016 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2017 break;
2018 case dw_val_class_vec:
2019 {
2020 unsigned int elt_size = val2->v.val_vec.elt_size;
2021 unsigned int len = val2->v.val_vec.length;
2022 unsigned int i;
2023 unsigned char *p;
2024
2025 if (elt_size > sizeof (HOST_WIDE_INT))
2026 {
2027 elt_size /= 2;
2028 len *= 2;
2029 }
2030 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2031 i < len;
2032 i++, p += elt_size)
2033 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2034 "fp or vector constant word %u", i);
2035 }
2036 break;
2037 case dw_val_class_const_double:
2038 {
2039 unsigned HOST_WIDE_INT first, second;
2040
2041 if (WORDS_BIG_ENDIAN)
2042 {
2043 first = val2->v.val_double.high;
2044 second = val2->v.val_double.low;
2045 }
2046 else
2047 {
2048 first = val2->v.val_double.low;
2049 second = val2->v.val_double.high;
2050 }
2051 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2052 first, NULL);
2053 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2054 second, NULL);
2055 }
2056 break;
2057 case dw_val_class_wide_int:
2058 {
2059 int i;
2060 int len = get_full_len (*val2->v.val_wide);
2061 if (WORDS_BIG_ENDIAN)
2062 for (i = len - 1; i >= 0; --i)
2063 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2064 val2->v.val_wide->elt (i), NULL);
2065 else
2066 for (i = 0; i < len; ++i)
2067 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2068 val2->v.val_wide->elt (i), NULL);
2069 }
2070 break;
2071 case dw_val_class_addr:
2072 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2073 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2074 break;
2075 default:
2076 gcc_unreachable ();
2077 }
2078 break;
2079 #else
2080 case DW_OP_const2u:
2081 case DW_OP_const2s:
2082 case DW_OP_const4u:
2083 case DW_OP_const4s:
2084 case DW_OP_const8u:
2085 case DW_OP_const8s:
2086 case DW_OP_skip:
2087 case DW_OP_bra:
2088 case DW_OP_implicit_value:
2089 /* We currently don't make any attempt to make sure these are
2090 aligned properly like we do for the main unwind info, so
2091 don't support emitting things larger than a byte if we're
2092 only doing unwinding. */
2093 gcc_unreachable ();
2094 #endif
2095 case DW_OP_const1u:
2096 case DW_OP_const1s:
2097 dw2_asm_output_data (1, val1->v.val_int, NULL);
2098 break;
2099 case DW_OP_constu:
2100 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2101 break;
2102 case DW_OP_consts:
2103 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2104 break;
2105 case DW_OP_pick:
2106 dw2_asm_output_data (1, val1->v.val_int, NULL);
2107 break;
2108 case DW_OP_plus_uconst:
2109 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2110 break;
2111 case DW_OP_breg0:
2112 case DW_OP_breg1:
2113 case DW_OP_breg2:
2114 case DW_OP_breg3:
2115 case DW_OP_breg4:
2116 case DW_OP_breg5:
2117 case DW_OP_breg6:
2118 case DW_OP_breg7:
2119 case DW_OP_breg8:
2120 case DW_OP_breg9:
2121 case DW_OP_breg10:
2122 case DW_OP_breg11:
2123 case DW_OP_breg12:
2124 case DW_OP_breg13:
2125 case DW_OP_breg14:
2126 case DW_OP_breg15:
2127 case DW_OP_breg16:
2128 case DW_OP_breg17:
2129 case DW_OP_breg18:
2130 case DW_OP_breg19:
2131 case DW_OP_breg20:
2132 case DW_OP_breg21:
2133 case DW_OP_breg22:
2134 case DW_OP_breg23:
2135 case DW_OP_breg24:
2136 case DW_OP_breg25:
2137 case DW_OP_breg26:
2138 case DW_OP_breg27:
2139 case DW_OP_breg28:
2140 case DW_OP_breg29:
2141 case DW_OP_breg30:
2142 case DW_OP_breg31:
2143 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2144 break;
2145 case DW_OP_regx:
2146 {
2147 unsigned r = val1->v.val_unsigned;
2148 if (for_eh_or_skip >= 0)
2149 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2150 gcc_assert (size_of_uleb128 (r)
2151 == size_of_uleb128 (val1->v.val_unsigned));
2152 dw2_asm_output_data_uleb128 (r, NULL);
2153 }
2154 break;
2155 case DW_OP_fbreg:
2156 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2157 break;
2158 case DW_OP_bregx:
2159 {
2160 unsigned r = val1->v.val_unsigned;
2161 if (for_eh_or_skip >= 0)
2162 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2163 gcc_assert (size_of_uleb128 (r)
2164 == size_of_uleb128 (val1->v.val_unsigned));
2165 dw2_asm_output_data_uleb128 (r, NULL);
2166 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2167 }
2168 break;
2169 case DW_OP_piece:
2170 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2171 break;
2172 case DW_OP_bit_piece:
2173 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2174 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2175 break;
2176 case DW_OP_deref_size:
2177 case DW_OP_xderef_size:
2178 dw2_asm_output_data (1, val1->v.val_int, NULL);
2179 break;
2180
2181 case DW_OP_addr:
2182 if (loc->dtprel)
2183 {
2184 if (targetm.asm_out.output_dwarf_dtprel)
2185 {
2186 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2187 DWARF2_ADDR_SIZE,
2188 val1->v.val_addr);
2189 fputc ('\n', asm_out_file);
2190 }
2191 else
2192 gcc_unreachable ();
2193 }
2194 else
2195 {
2196 #ifdef DWARF2_DEBUGGING_INFO
2197 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2198 #else
2199 gcc_unreachable ();
2200 #endif
2201 }
2202 break;
2203
2204 case DW_OP_GNU_addr_index:
2205 case DW_OP_GNU_const_index:
2206 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2207 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2208 "(index into .debug_addr)");
2209 break;
2210
2211 case DW_OP_call2:
2212 case DW_OP_call4:
2213 {
2214 unsigned long die_offset
2215 = get_ref_die_offset (val1->v.val_die_ref.die);
2216 /* Make sure the offset has been computed and that we can encode it as
2217 an operand. */
2218 gcc_assert (die_offset > 0
2219 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2220 ? 0xffff
2221 : 0xffffffff));
2222 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2223 die_offset, NULL);
2224 }
2225 break;
2226
2227 case DW_OP_call_ref:
2228 case DW_OP_GNU_variable_value:
2229 {
2230 char label[MAX_ARTIFICIAL_LABEL_BYTES
2231 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2232 gcc_assert (val1->val_class == dw_val_class_die_ref);
2233 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2234 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2235 }
2236 break;
2237
2238 case DW_OP_implicit_pointer:
2239 case DW_OP_GNU_implicit_pointer:
2240 {
2241 char label[MAX_ARTIFICIAL_LABEL_BYTES
2242 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2243 gcc_assert (val1->val_class == dw_val_class_die_ref);
2244 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2245 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2246 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2247 }
2248 break;
2249
2250 case DW_OP_entry_value:
2251 case DW_OP_GNU_entry_value:
2252 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2253 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2254 break;
2255
2256 case DW_OP_const_type:
2257 case DW_OP_GNU_const_type:
2258 {
2259 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2260 gcc_assert (o);
2261 dw2_asm_output_data_uleb128 (o, NULL);
2262 switch (val2->val_class)
2263 {
2264 case dw_val_class_const:
2265 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2266 dw2_asm_output_data (1, l, NULL);
2267 dw2_asm_output_data (l, val2->v.val_int, NULL);
2268 break;
2269 case dw_val_class_vec:
2270 {
2271 unsigned int elt_size = val2->v.val_vec.elt_size;
2272 unsigned int len = val2->v.val_vec.length;
2273 unsigned int i;
2274 unsigned char *p;
2275
2276 l = len * elt_size;
2277 dw2_asm_output_data (1, l, NULL);
2278 if (elt_size > sizeof (HOST_WIDE_INT))
2279 {
2280 elt_size /= 2;
2281 len *= 2;
2282 }
2283 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2284 i < len;
2285 i++, p += elt_size)
2286 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2287 "fp or vector constant word %u", i);
2288 }
2289 break;
2290 case dw_val_class_const_double:
2291 {
2292 unsigned HOST_WIDE_INT first, second;
2293 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2294
2295 dw2_asm_output_data (1, 2 * l, NULL);
2296 if (WORDS_BIG_ENDIAN)
2297 {
2298 first = val2->v.val_double.high;
2299 second = val2->v.val_double.low;
2300 }
2301 else
2302 {
2303 first = val2->v.val_double.low;
2304 second = val2->v.val_double.high;
2305 }
2306 dw2_asm_output_data (l, first, NULL);
2307 dw2_asm_output_data (l, second, NULL);
2308 }
2309 break;
2310 case dw_val_class_wide_int:
2311 {
2312 int i;
2313 int len = get_full_len (*val2->v.val_wide);
2314 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2315
2316 dw2_asm_output_data (1, len * l, NULL);
2317 if (WORDS_BIG_ENDIAN)
2318 for (i = len - 1; i >= 0; --i)
2319 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2320 else
2321 for (i = 0; i < len; ++i)
2322 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2323 }
2324 break;
2325 default:
2326 gcc_unreachable ();
2327 }
2328 }
2329 break;
2330 case DW_OP_regval_type:
2331 case DW_OP_GNU_regval_type:
2332 {
2333 unsigned r = val1->v.val_unsigned;
2334 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2335 gcc_assert (o);
2336 if (for_eh_or_skip >= 0)
2337 {
2338 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2339 gcc_assert (size_of_uleb128 (r)
2340 == size_of_uleb128 (val1->v.val_unsigned));
2341 }
2342 dw2_asm_output_data_uleb128 (r, NULL);
2343 dw2_asm_output_data_uleb128 (o, NULL);
2344 }
2345 break;
2346 case DW_OP_deref_type:
2347 case DW_OP_GNU_deref_type:
2348 {
2349 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2350 gcc_assert (o);
2351 dw2_asm_output_data (1, val1->v.val_int, NULL);
2352 dw2_asm_output_data_uleb128 (o, NULL);
2353 }
2354 break;
2355 case DW_OP_convert:
2356 case DW_OP_reinterpret:
2357 case DW_OP_GNU_convert:
2358 case DW_OP_GNU_reinterpret:
2359 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2360 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2361 else
2362 {
2363 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2364 gcc_assert (o);
2365 dw2_asm_output_data_uleb128 (o, NULL);
2366 }
2367 break;
2368
2369 case DW_OP_GNU_parameter_ref:
2370 {
2371 unsigned long o;
2372 gcc_assert (val1->val_class == dw_val_class_die_ref);
2373 o = get_ref_die_offset (val1->v.val_die_ref.die);
2374 dw2_asm_output_data (4, o, NULL);
2375 }
2376 break;
2377
2378 default:
2379 /* Other codes have no operands. */
2380 break;
2381 }
2382 }
2383
2384 /* Output a sequence of location operations.
2385 The for_eh_or_skip parameter controls whether register numbers are
2386 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2387 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2388 info). This should be suppressed for the cases that have not been converted
2389 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2390
2391 void
2392 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2393 {
2394 for (; loc != NULL; loc = loc->dw_loc_next)
2395 {
2396 enum dwarf_location_atom opc = loc->dw_loc_opc;
2397 /* Output the opcode. */
2398 if (for_eh_or_skip >= 0
2399 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2400 {
2401 unsigned r = (opc - DW_OP_breg0);
2402 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2403 gcc_assert (r <= 31);
2404 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2405 }
2406 else if (for_eh_or_skip >= 0
2407 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2408 {
2409 unsigned r = (opc - DW_OP_reg0);
2410 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2411 gcc_assert (r <= 31);
2412 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2413 }
2414
2415 dw2_asm_output_data (1, opc,
2416 "%s", dwarf_stack_op_name (opc));
2417
2418 /* Output the operand(s) (if any). */
2419 output_loc_operands (loc, for_eh_or_skip);
2420 }
2421 }
2422
2423 /* Output location description stack opcode's operands (if any).
2424 The output is single bytes on a line, suitable for .cfi_escape. */
2425
2426 static void
2427 output_loc_operands_raw (dw_loc_descr_ref loc)
2428 {
2429 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2430 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2431
2432 switch (loc->dw_loc_opc)
2433 {
2434 case DW_OP_addr:
2435 case DW_OP_GNU_addr_index:
2436 case DW_OP_GNU_const_index:
2437 case DW_OP_implicit_value:
2438 /* We cannot output addresses in .cfi_escape, only bytes. */
2439 gcc_unreachable ();
2440
2441 case DW_OP_const1u:
2442 case DW_OP_const1s:
2443 case DW_OP_pick:
2444 case DW_OP_deref_size:
2445 case DW_OP_xderef_size:
2446 fputc (',', asm_out_file);
2447 dw2_asm_output_data_raw (1, val1->v.val_int);
2448 break;
2449
2450 case DW_OP_const2u:
2451 case DW_OP_const2s:
2452 fputc (',', asm_out_file);
2453 dw2_asm_output_data_raw (2, val1->v.val_int);
2454 break;
2455
2456 case DW_OP_const4u:
2457 case DW_OP_const4s:
2458 fputc (',', asm_out_file);
2459 dw2_asm_output_data_raw (4, val1->v.val_int);
2460 break;
2461
2462 case DW_OP_const8u:
2463 case DW_OP_const8s:
2464 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2465 fputc (',', asm_out_file);
2466 dw2_asm_output_data_raw (8, val1->v.val_int);
2467 break;
2468
2469 case DW_OP_skip:
2470 case DW_OP_bra:
2471 {
2472 int offset;
2473
2474 gcc_assert (val1->val_class == dw_val_class_loc);
2475 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2476
2477 fputc (',', asm_out_file);
2478 dw2_asm_output_data_raw (2, offset);
2479 }
2480 break;
2481
2482 case DW_OP_regx:
2483 {
2484 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2485 gcc_assert (size_of_uleb128 (r)
2486 == size_of_uleb128 (val1->v.val_unsigned));
2487 fputc (',', asm_out_file);
2488 dw2_asm_output_data_uleb128_raw (r);
2489 }
2490 break;
2491
2492 case DW_OP_constu:
2493 case DW_OP_plus_uconst:
2494 case DW_OP_piece:
2495 fputc (',', asm_out_file);
2496 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2497 break;
2498
2499 case DW_OP_bit_piece:
2500 fputc (',', asm_out_file);
2501 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2502 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2503 break;
2504
2505 case DW_OP_consts:
2506 case DW_OP_breg0:
2507 case DW_OP_breg1:
2508 case DW_OP_breg2:
2509 case DW_OP_breg3:
2510 case DW_OP_breg4:
2511 case DW_OP_breg5:
2512 case DW_OP_breg6:
2513 case DW_OP_breg7:
2514 case DW_OP_breg8:
2515 case DW_OP_breg9:
2516 case DW_OP_breg10:
2517 case DW_OP_breg11:
2518 case DW_OP_breg12:
2519 case DW_OP_breg13:
2520 case DW_OP_breg14:
2521 case DW_OP_breg15:
2522 case DW_OP_breg16:
2523 case DW_OP_breg17:
2524 case DW_OP_breg18:
2525 case DW_OP_breg19:
2526 case DW_OP_breg20:
2527 case DW_OP_breg21:
2528 case DW_OP_breg22:
2529 case DW_OP_breg23:
2530 case DW_OP_breg24:
2531 case DW_OP_breg25:
2532 case DW_OP_breg26:
2533 case DW_OP_breg27:
2534 case DW_OP_breg28:
2535 case DW_OP_breg29:
2536 case DW_OP_breg30:
2537 case DW_OP_breg31:
2538 case DW_OP_fbreg:
2539 fputc (',', asm_out_file);
2540 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2541 break;
2542
2543 case DW_OP_bregx:
2544 {
2545 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2546 gcc_assert (size_of_uleb128 (r)
2547 == size_of_uleb128 (val1->v.val_unsigned));
2548 fputc (',', asm_out_file);
2549 dw2_asm_output_data_uleb128_raw (r);
2550 fputc (',', asm_out_file);
2551 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2552 }
2553 break;
2554
2555 case DW_OP_implicit_pointer:
2556 case DW_OP_entry_value:
2557 case DW_OP_const_type:
2558 case DW_OP_regval_type:
2559 case DW_OP_deref_type:
2560 case DW_OP_convert:
2561 case DW_OP_reinterpret:
2562 case DW_OP_GNU_implicit_pointer:
2563 case DW_OP_GNU_entry_value:
2564 case DW_OP_GNU_const_type:
2565 case DW_OP_GNU_regval_type:
2566 case DW_OP_GNU_deref_type:
2567 case DW_OP_GNU_convert:
2568 case DW_OP_GNU_reinterpret:
2569 case DW_OP_GNU_parameter_ref:
2570 gcc_unreachable ();
2571 break;
2572
2573 default:
2574 /* Other codes have no operands. */
2575 break;
2576 }
2577 }
2578
2579 void
2580 output_loc_sequence_raw (dw_loc_descr_ref loc)
2581 {
2582 while (1)
2583 {
2584 enum dwarf_location_atom opc = loc->dw_loc_opc;
2585 /* Output the opcode. */
2586 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2587 {
2588 unsigned r = (opc - DW_OP_breg0);
2589 r = DWARF2_FRAME_REG_OUT (r, 1);
2590 gcc_assert (r <= 31);
2591 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2592 }
2593 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2594 {
2595 unsigned r = (opc - DW_OP_reg0);
2596 r = DWARF2_FRAME_REG_OUT (r, 1);
2597 gcc_assert (r <= 31);
2598 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2599 }
2600 /* Output the opcode. */
2601 fprintf (asm_out_file, "%#x", opc);
2602 output_loc_operands_raw (loc);
2603
2604 if (!loc->dw_loc_next)
2605 break;
2606 loc = loc->dw_loc_next;
2607
2608 fputc (',', asm_out_file);
2609 }
2610 }
2611
2612 /* This function builds a dwarf location descriptor sequence from a
2613 dw_cfa_location, adding the given OFFSET to the result of the
2614 expression. */
2615
2616 struct dw_loc_descr_node *
2617 build_cfa_loc (dw_cfa_location *cfa, HOST_WIDE_INT offset)
2618 {
2619 struct dw_loc_descr_node *head, *tmp;
2620
2621 offset += cfa->offset;
2622
2623 if (cfa->indirect)
2624 {
2625 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2626 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2627 head->dw_loc_oprnd1.val_entry = NULL;
2628 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2629 add_loc_descr (&head, tmp);
2630 if (offset != 0)
2631 {
2632 tmp = new_loc_descr (DW_OP_plus_uconst, offset, 0);
2633 add_loc_descr (&head, tmp);
2634 }
2635 }
2636 else
2637 head = new_reg_loc_descr (cfa->reg, offset);
2638
2639 return head;
2640 }
2641
2642 /* This function builds a dwarf location descriptor sequence for
2643 the address at OFFSET from the CFA when stack is aligned to
2644 ALIGNMENT byte. */
2645
2646 struct dw_loc_descr_node *
2647 build_cfa_aligned_loc (dw_cfa_location *cfa,
2648 HOST_WIDE_INT offset, HOST_WIDE_INT alignment)
2649 {
2650 struct dw_loc_descr_node *head;
2651 unsigned int dwarf_fp
2652 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2653
2654 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2655 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2656 {
2657 head = new_reg_loc_descr (dwarf_fp, 0);
2658 add_loc_descr (&head, int_loc_descriptor (alignment));
2659 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2660 loc_descr_plus_const (&head, offset);
2661 }
2662 else
2663 head = new_reg_loc_descr (dwarf_fp, offset);
2664 return head;
2665 }
2666 \f
2667 /* And now, the support for symbolic debugging information. */
2668
2669 /* .debug_str support. */
2670
2671 static void dwarf2out_init (const char *);
2672 static void dwarf2out_finish (const char *);
2673 static void dwarf2out_early_finish (const char *);
2674 static void dwarf2out_assembly_start (void);
2675 static void dwarf2out_define (unsigned int, const char *);
2676 static void dwarf2out_undef (unsigned int, const char *);
2677 static void dwarf2out_start_source_file (unsigned, const char *);
2678 static void dwarf2out_end_source_file (unsigned);
2679 static void dwarf2out_function_decl (tree);
2680 static void dwarf2out_begin_block (unsigned, unsigned);
2681 static void dwarf2out_end_block (unsigned, unsigned);
2682 static bool dwarf2out_ignore_block (const_tree);
2683 static void dwarf2out_early_global_decl (tree);
2684 static void dwarf2out_late_global_decl (tree);
2685 static void dwarf2out_type_decl (tree, int);
2686 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2687 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2688 dw_die_ref);
2689 static void dwarf2out_abstract_function (tree);
2690 static void dwarf2out_var_location (rtx_insn *);
2691 static void dwarf2out_size_function (tree);
2692 static void dwarf2out_begin_function (tree);
2693 static void dwarf2out_end_function (unsigned int);
2694 static void dwarf2out_register_main_translation_unit (tree unit);
2695 static void dwarf2out_set_name (tree, tree);
2696 static void dwarf2out_register_external_die (tree decl, const char *sym,
2697 unsigned HOST_WIDE_INT off);
2698 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2699 unsigned HOST_WIDE_INT *off);
2700
2701 /* The debug hooks structure. */
2702
2703 const struct gcc_debug_hooks dwarf2_debug_hooks =
2704 {
2705 dwarf2out_init,
2706 dwarf2out_finish,
2707 dwarf2out_early_finish,
2708 dwarf2out_assembly_start,
2709 dwarf2out_define,
2710 dwarf2out_undef,
2711 dwarf2out_start_source_file,
2712 dwarf2out_end_source_file,
2713 dwarf2out_begin_block,
2714 dwarf2out_end_block,
2715 dwarf2out_ignore_block,
2716 dwarf2out_source_line,
2717 dwarf2out_begin_prologue,
2718 #if VMS_DEBUGGING_INFO
2719 dwarf2out_vms_end_prologue,
2720 dwarf2out_vms_begin_epilogue,
2721 #else
2722 debug_nothing_int_charstar,
2723 debug_nothing_int_charstar,
2724 #endif
2725 dwarf2out_end_epilogue,
2726 dwarf2out_begin_function,
2727 dwarf2out_end_function, /* end_function */
2728 dwarf2out_register_main_translation_unit,
2729 dwarf2out_function_decl, /* function_decl */
2730 dwarf2out_early_global_decl,
2731 dwarf2out_late_global_decl,
2732 dwarf2out_type_decl, /* type_decl */
2733 dwarf2out_imported_module_or_decl,
2734 dwarf2out_die_ref_for_decl,
2735 dwarf2out_register_external_die,
2736 debug_nothing_tree, /* deferred_inline_function */
2737 /* The DWARF 2 backend tries to reduce debugging bloat by not
2738 emitting the abstract description of inline functions until
2739 something tries to reference them. */
2740 dwarf2out_abstract_function, /* outlining_inline_function */
2741 debug_nothing_rtx_code_label, /* label */
2742 debug_nothing_int, /* handle_pch */
2743 dwarf2out_var_location,
2744 dwarf2out_size_function, /* size_function */
2745 dwarf2out_switch_text_section,
2746 dwarf2out_set_name,
2747 1, /* start_end_main_source_file */
2748 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2749 };
2750
2751 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2752 {
2753 dwarf2out_init,
2754 debug_nothing_charstar,
2755 debug_nothing_charstar,
2756 dwarf2out_assembly_start,
2757 debug_nothing_int_charstar,
2758 debug_nothing_int_charstar,
2759 debug_nothing_int_charstar,
2760 debug_nothing_int,
2761 debug_nothing_int_int, /* begin_block */
2762 debug_nothing_int_int, /* end_block */
2763 debug_true_const_tree, /* ignore_block */
2764 dwarf2out_source_line, /* source_line */
2765 debug_nothing_int_int_charstar, /* begin_prologue */
2766 debug_nothing_int_charstar, /* end_prologue */
2767 debug_nothing_int_charstar, /* begin_epilogue */
2768 debug_nothing_int_charstar, /* end_epilogue */
2769 debug_nothing_tree, /* begin_function */
2770 debug_nothing_int, /* end_function */
2771 debug_nothing_tree, /* register_main_translation_unit */
2772 debug_nothing_tree, /* function_decl */
2773 debug_nothing_tree, /* early_global_decl */
2774 debug_nothing_tree, /* late_global_decl */
2775 debug_nothing_tree_int, /* type_decl */
2776 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2777 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2778 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2779 debug_nothing_tree, /* deferred_inline_function */
2780 debug_nothing_tree, /* outlining_inline_function */
2781 debug_nothing_rtx_code_label, /* label */
2782 debug_nothing_int, /* handle_pch */
2783 debug_nothing_rtx_insn, /* var_location */
2784 debug_nothing_tree, /* size_function */
2785 debug_nothing_void, /* switch_text_section */
2786 debug_nothing_tree_tree, /* set_name */
2787 0, /* start_end_main_source_file */
2788 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2789 };
2790 \f
2791 /* NOTE: In the comments in this file, many references are made to
2792 "Debugging Information Entries". This term is abbreviated as `DIE'
2793 throughout the remainder of this file. */
2794
2795 /* An internal representation of the DWARF output is built, and then
2796 walked to generate the DWARF debugging info. The walk of the internal
2797 representation is done after the entire program has been compiled.
2798 The types below are used to describe the internal representation. */
2799
2800 /* Whether to put type DIEs into their own section .debug_types instead
2801 of making them part of the .debug_info section. Only supported for
2802 Dwarf V4 or higher and the user didn't disable them through
2803 -fno-debug-types-section. It is more efficient to put them in a
2804 separate comdat sections since the linker will then be able to
2805 remove duplicates. But not all tools support .debug_types sections
2806 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2807 it is DW_UT_type unit type in .debug_info section. */
2808
2809 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2810
2811 /* Various DIE's use offsets relative to the beginning of the
2812 .debug_info section to refer to each other. */
2813
2814 typedef long int dw_offset;
2815
2816 struct comdat_type_node;
2817
2818 /* The entries in the line_info table more-or-less mirror the opcodes
2819 that are used in the real dwarf line table. Arrays of these entries
2820 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2821 supported. */
2822
2823 enum dw_line_info_opcode {
2824 /* Emit DW_LNE_set_address; the operand is the label index. */
2825 LI_set_address,
2826
2827 /* Emit a row to the matrix with the given line. This may be done
2828 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2829 special opcodes. */
2830 LI_set_line,
2831
2832 /* Emit a DW_LNS_set_file. */
2833 LI_set_file,
2834
2835 /* Emit a DW_LNS_set_column. */
2836 LI_set_column,
2837
2838 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2839 LI_negate_stmt,
2840
2841 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2842 LI_set_prologue_end,
2843 LI_set_epilogue_begin,
2844
2845 /* Emit a DW_LNE_set_discriminator. */
2846 LI_set_discriminator
2847 };
2848
2849 typedef struct GTY(()) dw_line_info_struct {
2850 enum dw_line_info_opcode opcode;
2851 unsigned int val;
2852 } dw_line_info_entry;
2853
2854
2855 struct GTY(()) dw_line_info_table {
2856 /* The label that marks the end of this section. */
2857 const char *end_label;
2858
2859 /* The values for the last row of the matrix, as collected in the table.
2860 These are used to minimize the changes to the next row. */
2861 unsigned int file_num;
2862 unsigned int line_num;
2863 unsigned int column_num;
2864 int discrim_num;
2865 bool is_stmt;
2866 bool in_use;
2867
2868 vec<dw_line_info_entry, va_gc> *entries;
2869 };
2870
2871
2872 /* Each DIE attribute has a field specifying the attribute kind,
2873 a link to the next attribute in the chain, and an attribute value.
2874 Attributes are typically linked below the DIE they modify. */
2875
2876 typedef struct GTY(()) dw_attr_struct {
2877 enum dwarf_attribute dw_attr;
2878 dw_val_node dw_attr_val;
2879 }
2880 dw_attr_node;
2881
2882
2883 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
2884 The children of each node form a circular list linked by
2885 die_sib. die_child points to the node *before* the "first" child node. */
2886
2887 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
2888 union die_symbol_or_type_node
2889 {
2890 const char * GTY ((tag ("0"))) die_symbol;
2891 comdat_type_node *GTY ((tag ("1"))) die_type_node;
2892 }
2893 GTY ((desc ("%0.comdat_type_p"))) die_id;
2894 vec<dw_attr_node, va_gc> *die_attr;
2895 dw_die_ref die_parent;
2896 dw_die_ref die_child;
2897 dw_die_ref die_sib;
2898 dw_die_ref die_definition; /* ref from a specification to its definition */
2899 dw_offset die_offset;
2900 unsigned long die_abbrev;
2901 int die_mark;
2902 unsigned int decl_id;
2903 enum dwarf_tag die_tag;
2904 /* Die is used and must not be pruned as unused. */
2905 BOOL_BITFIELD die_perennial_p : 1;
2906 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
2907 /* For an external ref to die_symbol if die_offset contains an extra
2908 offset to that symbol. */
2909 BOOL_BITFIELD with_offset : 1;
2910 /* Whether this DIE was removed from the DIE tree, for example via
2911 prune_unused_types. We don't consider those present from the
2912 DIE lookup routines. */
2913 BOOL_BITFIELD removed : 1;
2914 /* Lots of spare bits. */
2915 }
2916 die_node;
2917
2918 /* Set to TRUE while dwarf2out_early_global_decl is running. */
2919 static bool early_dwarf;
2920 static bool early_dwarf_finished;
2921 struct set_early_dwarf {
2922 bool saved;
2923 set_early_dwarf () : saved(early_dwarf)
2924 {
2925 gcc_assert (! early_dwarf_finished);
2926 early_dwarf = true;
2927 }
2928 ~set_early_dwarf () { early_dwarf = saved; }
2929 };
2930
2931 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
2932 #define FOR_EACH_CHILD(die, c, expr) do { \
2933 c = die->die_child; \
2934 if (c) do { \
2935 c = c->die_sib; \
2936 expr; \
2937 } while (c != die->die_child); \
2938 } while (0)
2939
2940 /* The pubname structure */
2941
2942 typedef struct GTY(()) pubname_struct {
2943 dw_die_ref die;
2944 const char *name;
2945 }
2946 pubname_entry;
2947
2948
2949 struct GTY(()) dw_ranges {
2950 const char *label;
2951 /* If this is positive, it's a block number, otherwise it's a
2952 bitwise-negated index into dw_ranges_by_label. */
2953 int num;
2954 /* Index for the range list for DW_FORM_rnglistx. */
2955 unsigned int idx : 31;
2956 /* True if this range might be possibly in a different section
2957 from previous entry. */
2958 unsigned int maybe_new_sec : 1;
2959 };
2960
2961 /* A structure to hold a macinfo entry. */
2962
2963 typedef struct GTY(()) macinfo_struct {
2964 unsigned char code;
2965 unsigned HOST_WIDE_INT lineno;
2966 const char *info;
2967 }
2968 macinfo_entry;
2969
2970
2971 struct GTY(()) dw_ranges_by_label {
2972 const char *begin;
2973 const char *end;
2974 };
2975
2976 /* The comdat type node structure. */
2977 struct GTY(()) comdat_type_node
2978 {
2979 dw_die_ref root_die;
2980 dw_die_ref type_die;
2981 dw_die_ref skeleton_die;
2982 char signature[DWARF_TYPE_SIGNATURE_SIZE];
2983 comdat_type_node *next;
2984 };
2985
2986 /* A list of DIEs for which we can't determine ancestry (parent_die
2987 field) just yet. Later in dwarf2out_finish we will fill in the
2988 missing bits. */
2989 typedef struct GTY(()) limbo_die_struct {
2990 dw_die_ref die;
2991 /* The tree for which this DIE was created. We use this to
2992 determine ancestry later. */
2993 tree created_for;
2994 struct limbo_die_struct *next;
2995 }
2996 limbo_die_node;
2997
2998 typedef struct skeleton_chain_struct
2999 {
3000 dw_die_ref old_die;
3001 dw_die_ref new_die;
3002 struct skeleton_chain_struct *parent;
3003 }
3004 skeleton_chain_node;
3005
3006 /* Define a macro which returns nonzero for a TYPE_DECL which was
3007 implicitly generated for a type.
3008
3009 Note that, unlike the C front-end (which generates a NULL named
3010 TYPE_DECL node for each complete tagged type, each array type,
3011 and each function type node created) the C++ front-end generates
3012 a _named_ TYPE_DECL node for each tagged type node created.
3013 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3014 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3015 front-end, but for each type, tagged or not. */
3016
3017 #define TYPE_DECL_IS_STUB(decl) \
3018 (DECL_NAME (decl) == NULL_TREE \
3019 || (DECL_ARTIFICIAL (decl) \
3020 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3021 /* This is necessary for stub decls that \
3022 appear in nested inline functions. */ \
3023 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3024 && (decl_ultimate_origin (decl) \
3025 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3026
3027 /* Information concerning the compilation unit's programming
3028 language, and compiler version. */
3029
3030 /* Fixed size portion of the DWARF compilation unit header. */
3031 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3032 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3033 + (dwarf_version >= 5 ? 4 : 3))
3034
3035 /* Fixed size portion of the DWARF comdat type unit header. */
3036 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3037 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3038 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3039
3040 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3041 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3042 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3043
3044 /* Fixed size portion of public names info. */
3045 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3046
3047 /* Fixed size portion of the address range info. */
3048 #define DWARF_ARANGES_HEADER_SIZE \
3049 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3050 DWARF2_ADDR_SIZE * 2) \
3051 - DWARF_INITIAL_LENGTH_SIZE)
3052
3053 /* Size of padding portion in the address range info. It must be
3054 aligned to twice the pointer size. */
3055 #define DWARF_ARANGES_PAD_SIZE \
3056 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3057 DWARF2_ADDR_SIZE * 2) \
3058 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3059
3060 /* Use assembler line directives if available. */
3061 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3062 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3063 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3064 #else
3065 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3066 #endif
3067 #endif
3068
3069 /* Minimum line offset in a special line info. opcode.
3070 This value was chosen to give a reasonable range of values. */
3071 #define DWARF_LINE_BASE -10
3072
3073 /* First special line opcode - leave room for the standard opcodes. */
3074 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3075
3076 /* Range of line offsets in a special line info. opcode. */
3077 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3078
3079 /* Flag that indicates the initial value of the is_stmt_start flag.
3080 In the present implementation, we do not mark any lines as
3081 the beginning of a source statement, because that information
3082 is not made available by the GCC front-end. */
3083 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3084
3085 /* Maximum number of operations per instruction bundle. */
3086 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3087 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3088 #endif
3089
3090 /* This location is used by calc_die_sizes() to keep track
3091 the offset of each DIE within the .debug_info section. */
3092 static unsigned long next_die_offset;
3093
3094 /* Record the root of the DIE's built for the current compilation unit. */
3095 static GTY(()) dw_die_ref single_comp_unit_die;
3096
3097 /* A list of type DIEs that have been separated into comdat sections. */
3098 static GTY(()) comdat_type_node *comdat_type_list;
3099
3100 /* A list of CU DIEs that have been separated. */
3101 static GTY(()) limbo_die_node *cu_die_list;
3102
3103 /* A list of DIEs with a NULL parent waiting to be relocated. */
3104 static GTY(()) limbo_die_node *limbo_die_list;
3105
3106 /* A list of DIEs for which we may have to generate
3107 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3108 static GTY(()) limbo_die_node *deferred_asm_name;
3109
3110 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3111 {
3112 typedef const char *compare_type;
3113
3114 static hashval_t hash (dwarf_file_data *);
3115 static bool equal (dwarf_file_data *, const char *);
3116 };
3117
3118 /* Filenames referenced by this compilation unit. */
3119 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3120
3121 struct decl_die_hasher : ggc_ptr_hash<die_node>
3122 {
3123 typedef tree compare_type;
3124
3125 static hashval_t hash (die_node *);
3126 static bool equal (die_node *, tree);
3127 };
3128 /* A hash table of references to DIE's that describe declarations.
3129 The key is a DECL_UID() which is a unique number identifying each decl. */
3130 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3131
3132 struct GTY ((for_user)) variable_value_struct {
3133 unsigned int decl_id;
3134 vec<dw_die_ref, va_gc> *dies;
3135 };
3136
3137 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3138 {
3139 typedef tree compare_type;
3140
3141 static hashval_t hash (variable_value_struct *);
3142 static bool equal (variable_value_struct *, tree);
3143 };
3144 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3145 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3146 DECL_CONTEXT of the referenced VAR_DECLs. */
3147 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3148
3149 struct block_die_hasher : ggc_ptr_hash<die_struct>
3150 {
3151 static hashval_t hash (die_struct *);
3152 static bool equal (die_struct *, die_struct *);
3153 };
3154
3155 /* A hash table of references to DIE's that describe COMMON blocks.
3156 The key is DECL_UID() ^ die_parent. */
3157 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3158
3159 typedef struct GTY(()) die_arg_entry_struct {
3160 dw_die_ref die;
3161 tree arg;
3162 } die_arg_entry;
3163
3164
3165 /* Node of the variable location list. */
3166 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3167 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3168 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3169 in mode of the EXPR_LIST node and first EXPR_LIST operand
3170 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3171 location or NULL for padding. For larger bitsizes,
3172 mode is 0 and first operand is a CONCAT with bitsize
3173 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3174 NULL as second operand. */
3175 rtx GTY (()) loc;
3176 const char * GTY (()) label;
3177 struct var_loc_node * GTY (()) next;
3178 };
3179
3180 /* Variable location list. */
3181 struct GTY ((for_user)) var_loc_list_def {
3182 struct var_loc_node * GTY (()) first;
3183
3184 /* Pointer to the last but one or last element of the
3185 chained list. If the list is empty, both first and
3186 last are NULL, if the list contains just one node
3187 or the last node certainly is not redundant, it points
3188 to the last node, otherwise points to the last but one.
3189 Do not mark it for GC because it is marked through the chain. */
3190 struct var_loc_node * GTY ((skip ("%h"))) last;
3191
3192 /* Pointer to the last element before section switch,
3193 if NULL, either sections weren't switched or first
3194 is after section switch. */
3195 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3196
3197 /* DECL_UID of the variable decl. */
3198 unsigned int decl_id;
3199 };
3200 typedef struct var_loc_list_def var_loc_list;
3201
3202 /* Call argument location list. */
3203 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3204 rtx GTY (()) call_arg_loc_note;
3205 const char * GTY (()) label;
3206 tree GTY (()) block;
3207 bool tail_call_p;
3208 rtx GTY (()) symbol_ref;
3209 struct call_arg_loc_node * GTY (()) next;
3210 };
3211
3212
3213 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3214 {
3215 typedef const_tree compare_type;
3216
3217 static hashval_t hash (var_loc_list *);
3218 static bool equal (var_loc_list *, const_tree);
3219 };
3220
3221 /* Table of decl location linked lists. */
3222 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3223
3224 /* Head and tail of call_arg_loc chain. */
3225 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3226 static struct call_arg_loc_node *call_arg_loc_last;
3227
3228 /* Number of call sites in the current function. */
3229 static int call_site_count = -1;
3230 /* Number of tail call sites in the current function. */
3231 static int tail_call_site_count = -1;
3232
3233 /* A cached location list. */
3234 struct GTY ((for_user)) cached_dw_loc_list_def {
3235 /* The DECL_UID of the decl that this entry describes. */
3236 unsigned int decl_id;
3237
3238 /* The cached location list. */
3239 dw_loc_list_ref loc_list;
3240 };
3241 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3242
3243 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3244 {
3245
3246 typedef const_tree compare_type;
3247
3248 static hashval_t hash (cached_dw_loc_list *);
3249 static bool equal (cached_dw_loc_list *, const_tree);
3250 };
3251
3252 /* Table of cached location lists. */
3253 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3254
3255 /* A vector of references to DIE's that are uniquely identified by their tag,
3256 presence/absence of children DIE's, and list of attribute/value pairs. */
3257 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3258
3259 /* A hash map to remember the stack usage for DWARF procedures. The value
3260 stored is the stack size difference between before the DWARF procedure
3261 invokation and after it returned. In other words, for a DWARF procedure
3262 that consumes N stack slots and that pushes M ones, this stores M - N. */
3263 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3264
3265 /* A global counter for generating labels for line number data. */
3266 static unsigned int line_info_label_num;
3267
3268 /* The current table to which we should emit line number information
3269 for the current function. This will be set up at the beginning of
3270 assembly for the function. */
3271 static GTY(()) dw_line_info_table *cur_line_info_table;
3272
3273 /* The two default tables of line number info. */
3274 static GTY(()) dw_line_info_table *text_section_line_info;
3275 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3276
3277 /* The set of all non-default tables of line number info. */
3278 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3279
3280 /* A flag to tell pubnames/types export if there is an info section to
3281 refer to. */
3282 static bool info_section_emitted;
3283
3284 /* A pointer to the base of a table that contains a list of publicly
3285 accessible names. */
3286 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3287
3288 /* A pointer to the base of a table that contains a list of publicly
3289 accessible types. */
3290 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3291
3292 /* A pointer to the base of a table that contains a list of macro
3293 defines/undefines (and file start/end markers). */
3294 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3295
3296 /* True if .debug_macinfo or .debug_macros section is going to be
3297 emitted. */
3298 #define have_macinfo \
3299 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3300 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3301 && !macinfo_table->is_empty ())
3302
3303 /* Vector of dies for which we should generate .debug_ranges info. */
3304 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3305
3306 /* Vector of pairs of labels referenced in ranges_table. */
3307 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3308
3309 /* Whether we have location lists that need outputting */
3310 static GTY(()) bool have_location_lists;
3311
3312 /* Unique label counter. */
3313 static GTY(()) unsigned int loclabel_num;
3314
3315 /* Unique label counter for point-of-call tables. */
3316 static GTY(()) unsigned int poc_label_num;
3317
3318 /* The last file entry emitted by maybe_emit_file(). */
3319 static GTY(()) struct dwarf_file_data * last_emitted_file;
3320
3321 /* Number of internal labels generated by gen_internal_sym(). */
3322 static GTY(()) int label_num;
3323
3324 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3325
3326 /* Instances of generic types for which we need to generate debug
3327 info that describe their generic parameters and arguments. That
3328 generation needs to happen once all types are properly laid out so
3329 we do it at the end of compilation. */
3330 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3331
3332 /* Offset from the "steady-state frame pointer" to the frame base,
3333 within the current function. */
3334 static HOST_WIDE_INT frame_pointer_fb_offset;
3335 static bool frame_pointer_fb_offset_valid;
3336
3337 static vec<dw_die_ref> base_types;
3338
3339 /* Flags to represent a set of attribute classes for attributes that represent
3340 a scalar value (bounds, pointers, ...). */
3341 enum dw_scalar_form
3342 {
3343 dw_scalar_form_constant = 0x01,
3344 dw_scalar_form_exprloc = 0x02,
3345 dw_scalar_form_reference = 0x04
3346 };
3347
3348 /* Forward declarations for functions defined in this file. */
3349
3350 static int is_pseudo_reg (const_rtx);
3351 static tree type_main_variant (tree);
3352 static int is_tagged_type (const_tree);
3353 static const char *dwarf_tag_name (unsigned);
3354 static const char *dwarf_attr_name (unsigned);
3355 static const char *dwarf_form_name (unsigned);
3356 static tree decl_ultimate_origin (const_tree);
3357 static tree decl_class_context (tree);
3358 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3359 static inline enum dw_val_class AT_class (dw_attr_node *);
3360 static inline unsigned int AT_index (dw_attr_node *);
3361 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3362 static inline unsigned AT_flag (dw_attr_node *);
3363 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3364 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3365 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3366 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3367 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3368 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3369 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3370 unsigned int, unsigned char *);
3371 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3372 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3373 static inline const char *AT_string (dw_attr_node *);
3374 static enum dwarf_form AT_string_form (dw_attr_node *);
3375 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3376 static void add_AT_specification (dw_die_ref, dw_die_ref);
3377 static inline dw_die_ref AT_ref (dw_attr_node *);
3378 static inline int AT_ref_external (dw_attr_node *);
3379 static inline void set_AT_ref_external (dw_attr_node *, int);
3380 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3381 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3382 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3383 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3384 dw_loc_list_ref);
3385 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3386 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3387 static void remove_addr_table_entry (addr_table_entry *);
3388 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3389 static inline rtx AT_addr (dw_attr_node *);
3390 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3391 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3392 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3393 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3394 const char *);
3395 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3396 unsigned HOST_WIDE_INT);
3397 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3398 unsigned long, bool);
3399 static inline const char *AT_lbl (dw_attr_node *);
3400 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3401 static const char *get_AT_low_pc (dw_die_ref);
3402 static const char *get_AT_hi_pc (dw_die_ref);
3403 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3404 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3405 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3406 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3407 static bool is_cxx (void);
3408 static bool is_cxx (const_tree);
3409 static bool is_fortran (void);
3410 static bool is_ada (void);
3411 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3412 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3413 static void add_child_die (dw_die_ref, dw_die_ref);
3414 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3415 static dw_die_ref lookup_type_die (tree);
3416 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3417 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3418 static void equate_type_number_to_die (tree, dw_die_ref);
3419 static dw_die_ref lookup_decl_die (tree);
3420 static var_loc_list *lookup_decl_loc (const_tree);
3421 static void equate_decl_number_to_die (tree, dw_die_ref);
3422 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *);
3423 static void print_spaces (FILE *);
3424 static void print_die (dw_die_ref, FILE *);
3425 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3426 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3427 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3428 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3429 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3430 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3431 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3432 struct md5_ctx *, int *);
3433 struct checksum_attributes;
3434 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3435 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3436 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3437 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3438 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3439 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3440 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3441 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3442 static int is_type_die (dw_die_ref);
3443 static int is_comdat_die (dw_die_ref);
3444 static inline bool is_template_instantiation (dw_die_ref);
3445 static int is_declaration_die (dw_die_ref);
3446 static int should_move_die_to_comdat (dw_die_ref);
3447 static dw_die_ref clone_as_declaration (dw_die_ref);
3448 static dw_die_ref clone_die (dw_die_ref);
3449 static dw_die_ref clone_tree (dw_die_ref);
3450 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3451 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3452 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3453 static dw_die_ref generate_skeleton (dw_die_ref);
3454 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3455 dw_die_ref,
3456 dw_die_ref);
3457 static void break_out_comdat_types (dw_die_ref);
3458 static void copy_decls_for_unworthy_types (dw_die_ref);
3459
3460 static void add_sibling_attributes (dw_die_ref);
3461 static void output_location_lists (dw_die_ref);
3462 static int constant_size (unsigned HOST_WIDE_INT);
3463 static unsigned long size_of_die (dw_die_ref);
3464 static void calc_die_sizes (dw_die_ref);
3465 static void calc_base_type_die_sizes (void);
3466 static void mark_dies (dw_die_ref);
3467 static void unmark_dies (dw_die_ref);
3468 static void unmark_all_dies (dw_die_ref);
3469 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3470 static unsigned long size_of_aranges (void);
3471 static enum dwarf_form value_format (dw_attr_node *);
3472 static void output_value_format (dw_attr_node *);
3473 static void output_abbrev_section (void);
3474 static void output_die_abbrevs (unsigned long, dw_die_ref);
3475 static void output_die (dw_die_ref);
3476 static void output_compilation_unit_header (enum dwarf_unit_type);
3477 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3478 static void output_comdat_type_unit (comdat_type_node *);
3479 static const char *dwarf2_name (tree, int);
3480 static void add_pubname (tree, dw_die_ref);
3481 static void add_enumerator_pubname (const char *, dw_die_ref);
3482 static void add_pubname_string (const char *, dw_die_ref);
3483 static void add_pubtype (tree, dw_die_ref);
3484 static void output_pubnames (vec<pubname_entry, va_gc> *);
3485 static void output_aranges (void);
3486 static unsigned int add_ranges (const_tree, bool = false);
3487 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3488 bool *, bool);
3489 static void output_ranges (void);
3490 static dw_line_info_table *new_line_info_table (void);
3491 static void output_line_info (bool);
3492 static void output_file_names (void);
3493 static dw_die_ref base_type_die (tree, bool);
3494 static int is_base_type (tree);
3495 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3496 static int decl_quals (const_tree);
3497 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3498 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3499 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3500 static int type_is_enum (const_tree);
3501 static unsigned int dbx_reg_number (const_rtx);
3502 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3503 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3504 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3505 enum var_init_status);
3506 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3507 enum var_init_status);
3508 static dw_loc_descr_ref based_loc_descr (rtx, HOST_WIDE_INT,
3509 enum var_init_status);
3510 static int is_based_loc (const_rtx);
3511 static bool resolve_one_addr (rtx *);
3512 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3513 enum var_init_status);
3514 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3515 enum var_init_status);
3516 struct loc_descr_context;
3517 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3518 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3519 static dw_loc_list_ref loc_list_from_tree (tree, int,
3520 struct loc_descr_context *);
3521 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3522 struct loc_descr_context *);
3523 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3524 static tree field_type (const_tree);
3525 static unsigned int simple_type_align_in_bits (const_tree);
3526 static unsigned int simple_decl_align_in_bits (const_tree);
3527 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3528 struct vlr_context;
3529 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3530 HOST_WIDE_INT *);
3531 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3532 dw_loc_list_ref);
3533 static void add_data_member_location_attribute (dw_die_ref, tree,
3534 struct vlr_context *);
3535 static bool add_const_value_attribute (dw_die_ref, rtx);
3536 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3537 static void insert_wide_int (const wide_int &, unsigned char *, int);
3538 static void insert_float (const_rtx, unsigned char *);
3539 static rtx rtl_for_decl_location (tree);
3540 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3541 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3542 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3543 static void add_name_attribute (dw_die_ref, const char *);
3544 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3545 static void add_comp_dir_attribute (dw_die_ref);
3546 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3547 struct loc_descr_context *);
3548 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3549 struct loc_descr_context *);
3550 static void add_subscript_info (dw_die_ref, tree, bool);
3551 static void add_byte_size_attribute (dw_die_ref, tree);
3552 static void add_alignment_attribute (dw_die_ref, tree);
3553 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3554 struct vlr_context *);
3555 static void add_bit_size_attribute (dw_die_ref, tree);
3556 static void add_prototyped_attribute (dw_die_ref, tree);
3557 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3558 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3559 static void add_src_coords_attributes (dw_die_ref, tree);
3560 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3561 static void add_discr_value (dw_die_ref, dw_discr_value *);
3562 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3563 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3564 static void push_decl_scope (tree);
3565 static void pop_decl_scope (void);
3566 static dw_die_ref scope_die_for (tree, dw_die_ref);
3567 static inline int local_scope_p (dw_die_ref);
3568 static inline int class_scope_p (dw_die_ref);
3569 static inline int class_or_namespace_scope_p (dw_die_ref);
3570 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3571 static void add_calling_convention_attribute (dw_die_ref, tree);
3572 static const char *type_tag (const_tree);
3573 static tree member_declared_type (const_tree);
3574 #if 0
3575 static const char *decl_start_label (tree);
3576 #endif
3577 static void gen_array_type_die (tree, dw_die_ref);
3578 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3579 #if 0
3580 static void gen_entry_point_die (tree, dw_die_ref);
3581 #endif
3582 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3583 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3584 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3585 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3586 static void gen_formal_types_die (tree, dw_die_ref);
3587 static void gen_subprogram_die (tree, dw_die_ref);
3588 static void gen_variable_die (tree, tree, dw_die_ref);
3589 static void gen_const_die (tree, dw_die_ref);
3590 static void gen_label_die (tree, dw_die_ref);
3591 static void gen_lexical_block_die (tree, dw_die_ref);
3592 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3593 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3594 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3595 static dw_die_ref gen_compile_unit_die (const char *);
3596 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3597 static void gen_member_die (tree, dw_die_ref);
3598 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3599 enum debug_info_usage);
3600 static void gen_subroutine_type_die (tree, dw_die_ref);
3601 static void gen_typedef_die (tree, dw_die_ref);
3602 static void gen_type_die (tree, dw_die_ref);
3603 static void gen_block_die (tree, dw_die_ref);
3604 static void decls_for_scope (tree, dw_die_ref);
3605 static bool is_naming_typedef_decl (const_tree);
3606 static inline dw_die_ref get_context_die (tree);
3607 static void gen_namespace_die (tree, dw_die_ref);
3608 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3609 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3610 static dw_die_ref force_decl_die (tree);
3611 static dw_die_ref force_type_die (tree);
3612 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3613 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3614 static struct dwarf_file_data * lookup_filename (const char *);
3615 static void retry_incomplete_types (void);
3616 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3617 static void gen_generic_params_dies (tree);
3618 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3619 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3620 static void splice_child_die (dw_die_ref, dw_die_ref);
3621 static int file_info_cmp (const void *, const void *);
3622 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *,
3623 const char *, const char *);
3624 static void output_loc_list (dw_loc_list_ref);
3625 static char *gen_internal_sym (const char *);
3626 static bool want_pubnames (void);
3627
3628 static void prune_unmark_dies (dw_die_ref);
3629 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3630 static void prune_unused_types_mark (dw_die_ref, int);
3631 static void prune_unused_types_walk (dw_die_ref);
3632 static void prune_unused_types_walk_attribs (dw_die_ref);
3633 static void prune_unused_types_prune (dw_die_ref);
3634 static void prune_unused_types (void);
3635 static int maybe_emit_file (struct dwarf_file_data *fd);
3636 static inline const char *AT_vms_delta1 (dw_attr_node *);
3637 static inline const char *AT_vms_delta2 (dw_attr_node *);
3638 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3639 const char *, const char *);
3640 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3641 static void gen_remaining_tmpl_value_param_die_attribute (void);
3642 static bool generic_type_p (tree);
3643 static void schedule_generic_params_dies_gen (tree t);
3644 static void gen_scheduled_generic_parms_dies (void);
3645 static void resolve_variable_values (void);
3646
3647 static const char *comp_dir_string (void);
3648
3649 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3650
3651 /* enum for tracking thread-local variables whose address is really an offset
3652 relative to the TLS pointer, which will need link-time relocation, but will
3653 not need relocation by the DWARF consumer. */
3654
3655 enum dtprel_bool
3656 {
3657 dtprel_false = 0,
3658 dtprel_true = 1
3659 };
3660
3661 /* Return the operator to use for an address of a variable. For dtprel_true, we
3662 use DW_OP_const*. For regular variables, which need both link-time
3663 relocation and consumer-level relocation (e.g., to account for shared objects
3664 loaded at a random address), we use DW_OP_addr*. */
3665
3666 static inline enum dwarf_location_atom
3667 dw_addr_op (enum dtprel_bool dtprel)
3668 {
3669 if (dtprel == dtprel_true)
3670 return (dwarf_split_debug_info ? DW_OP_GNU_const_index
3671 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3672 else
3673 return dwarf_split_debug_info ? DW_OP_GNU_addr_index : DW_OP_addr;
3674 }
3675
3676 /* Return a pointer to a newly allocated address location description. If
3677 dwarf_split_debug_info is true, then record the address with the appropriate
3678 relocation. */
3679 static inline dw_loc_descr_ref
3680 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3681 {
3682 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3683
3684 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3685 ref->dw_loc_oprnd1.v.val_addr = addr;
3686 ref->dtprel = dtprel;
3687 if (dwarf_split_debug_info)
3688 ref->dw_loc_oprnd1.val_entry
3689 = add_addr_table_entry (addr,
3690 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3691 else
3692 ref->dw_loc_oprnd1.val_entry = NULL;
3693
3694 return ref;
3695 }
3696
3697 /* Section names used to hold DWARF debugging information. */
3698
3699 #ifndef DEBUG_INFO_SECTION
3700 #define DEBUG_INFO_SECTION ".debug_info"
3701 #endif
3702 #ifndef DEBUG_DWO_INFO_SECTION
3703 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3704 #endif
3705 #ifndef DEBUG_LTO_INFO_SECTION
3706 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3707 #endif
3708 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3709 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3710 #endif
3711 #ifndef DEBUG_ABBREV_SECTION
3712 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3713 #endif
3714 #ifndef DEBUG_LTO_ABBREV_SECTION
3715 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3716 #endif
3717 #ifndef DEBUG_DWO_ABBREV_SECTION
3718 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3719 #endif
3720 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3721 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3722 #endif
3723 #ifndef DEBUG_ARANGES_SECTION
3724 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3725 #endif
3726 #ifndef DEBUG_ADDR_SECTION
3727 #define DEBUG_ADDR_SECTION ".debug_addr"
3728 #endif
3729 #ifndef DEBUG_MACINFO_SECTION
3730 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3731 #endif
3732 #ifndef DEBUG_LTO_MACINFO_SECTION
3733 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
3734 #endif
3735 #ifndef DEBUG_DWO_MACINFO_SECTION
3736 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
3737 #endif
3738 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
3739 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
3740 #endif
3741 #ifndef DEBUG_MACRO_SECTION
3742 #define DEBUG_MACRO_SECTION ".debug_macro"
3743 #endif
3744 #ifndef DEBUG_LTO_MACRO_SECTION
3745 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
3746 #endif
3747 #ifndef DEBUG_DWO_MACRO_SECTION
3748 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
3749 #endif
3750 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
3751 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
3752 #endif
3753 #ifndef DEBUG_LINE_SECTION
3754 #define DEBUG_LINE_SECTION ".debug_line"
3755 #endif
3756 #ifndef DEBUG_LTO_LINE_SECTION
3757 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
3758 #endif
3759 #ifndef DEBUG_DWO_LINE_SECTION
3760 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
3761 #endif
3762 #ifndef DEBUG_LTO_DWO_LINE_SECTION
3763 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
3764 #endif
3765 #ifndef DEBUG_LOC_SECTION
3766 #define DEBUG_LOC_SECTION ".debug_loc"
3767 #endif
3768 #ifndef DEBUG_DWO_LOC_SECTION
3769 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
3770 #endif
3771 #ifndef DEBUG_LOCLISTS_SECTION
3772 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
3773 #endif
3774 #ifndef DEBUG_DWO_LOCLISTS_SECTION
3775 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
3776 #endif
3777 #ifndef DEBUG_PUBNAMES_SECTION
3778 #define DEBUG_PUBNAMES_SECTION \
3779 ((debug_generate_pub_sections == 2) \
3780 ? ".debug_gnu_pubnames" : ".debug_pubnames")
3781 #endif
3782 #ifndef DEBUG_PUBTYPES_SECTION
3783 #define DEBUG_PUBTYPES_SECTION \
3784 ((debug_generate_pub_sections == 2) \
3785 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
3786 #endif
3787 #ifndef DEBUG_STR_OFFSETS_SECTION
3788 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
3789 #endif
3790 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
3791 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
3792 #endif
3793 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
3794 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
3795 #endif
3796 #ifndef DEBUG_STR_SECTION
3797 #define DEBUG_STR_SECTION ".debug_str"
3798 #endif
3799 #ifndef DEBUG_LTO_STR_SECTION
3800 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
3801 #endif
3802 #ifndef DEBUG_STR_DWO_SECTION
3803 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
3804 #endif
3805 #ifndef DEBUG_LTO_STR_DWO_SECTION
3806 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
3807 #endif
3808 #ifndef DEBUG_RANGES_SECTION
3809 #define DEBUG_RANGES_SECTION ".debug_ranges"
3810 #endif
3811 #ifndef DEBUG_RNGLISTS_SECTION
3812 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
3813 #endif
3814 #ifndef DEBUG_LINE_STR_SECTION
3815 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
3816 #endif
3817 #ifndef DEBUG_LTO_LINE_STR_SECTION
3818 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
3819 #endif
3820
3821 /* Standard ELF section names for compiled code and data. */
3822 #ifndef TEXT_SECTION_NAME
3823 #define TEXT_SECTION_NAME ".text"
3824 #endif
3825
3826 /* Section flags for .debug_str section. */
3827 #define DEBUG_STR_SECTION_FLAGS \
3828 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
3829 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
3830 : SECTION_DEBUG)
3831
3832 /* Section flags for .debug_str.dwo section. */
3833 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
3834
3835 /* Attribute used to refer to the macro section. */
3836 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
3837 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
3838
3839 /* Labels we insert at beginning sections we can reference instead of
3840 the section names themselves. */
3841
3842 #ifndef TEXT_SECTION_LABEL
3843 #define TEXT_SECTION_LABEL "Ltext"
3844 #endif
3845 #ifndef COLD_TEXT_SECTION_LABEL
3846 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
3847 #endif
3848 #ifndef DEBUG_LINE_SECTION_LABEL
3849 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
3850 #endif
3851 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
3852 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
3853 #endif
3854 #ifndef DEBUG_INFO_SECTION_LABEL
3855 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
3856 #endif
3857 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
3858 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
3859 #endif
3860 #ifndef DEBUG_ABBREV_SECTION_LABEL
3861 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
3862 #endif
3863 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
3864 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
3865 #endif
3866 #ifndef DEBUG_ADDR_SECTION_LABEL
3867 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
3868 #endif
3869 #ifndef DEBUG_LOC_SECTION_LABEL
3870 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
3871 #endif
3872 #ifndef DEBUG_RANGES_SECTION_LABEL
3873 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
3874 #endif
3875 #ifndef DEBUG_MACINFO_SECTION_LABEL
3876 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
3877 #endif
3878 #ifndef DEBUG_MACRO_SECTION_LABEL
3879 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
3880 #endif
3881 #define SKELETON_COMP_DIE_ABBREV 1
3882 #define SKELETON_TYPE_DIE_ABBREV 2
3883
3884 /* Definitions of defaults for formats and names of various special
3885 (artificial) labels which may be generated within this file (when the -g
3886 options is used and DWARF2_DEBUGGING_INFO is in effect.
3887 If necessary, these may be overridden from within the tm.h file, but
3888 typically, overriding these defaults is unnecessary. */
3889
3890 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
3891 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3892 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3893 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
3894 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3895 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3896 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3897 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3898 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3899 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3900 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3901 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3902 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3903 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
3904 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
3905
3906 #ifndef TEXT_END_LABEL
3907 #define TEXT_END_LABEL "Letext"
3908 #endif
3909 #ifndef COLD_END_LABEL
3910 #define COLD_END_LABEL "Letext_cold"
3911 #endif
3912 #ifndef BLOCK_BEGIN_LABEL
3913 #define BLOCK_BEGIN_LABEL "LBB"
3914 #endif
3915 #ifndef BLOCK_END_LABEL
3916 #define BLOCK_END_LABEL "LBE"
3917 #endif
3918 #ifndef LINE_CODE_LABEL
3919 #define LINE_CODE_LABEL "LM"
3920 #endif
3921
3922 \f
3923 /* Return the root of the DIE's built for the current compilation unit. */
3924 static dw_die_ref
3925 comp_unit_die (void)
3926 {
3927 if (!single_comp_unit_die)
3928 single_comp_unit_die = gen_compile_unit_die (NULL);
3929 return single_comp_unit_die;
3930 }
3931
3932 /* We allow a language front-end to designate a function that is to be
3933 called to "demangle" any name before it is put into a DIE. */
3934
3935 static const char *(*demangle_name_func) (const char *);
3936
3937 void
3938 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
3939 {
3940 demangle_name_func = func;
3941 }
3942
3943 /* Test if rtl node points to a pseudo register. */
3944
3945 static inline int
3946 is_pseudo_reg (const_rtx rtl)
3947 {
3948 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
3949 || (GET_CODE (rtl) == SUBREG
3950 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
3951 }
3952
3953 /* Return a reference to a type, with its const and volatile qualifiers
3954 removed. */
3955
3956 static inline tree
3957 type_main_variant (tree type)
3958 {
3959 type = TYPE_MAIN_VARIANT (type);
3960
3961 /* ??? There really should be only one main variant among any group of
3962 variants of a given type (and all of the MAIN_VARIANT values for all
3963 members of the group should point to that one type) but sometimes the C
3964 front-end messes this up for array types, so we work around that bug
3965 here. */
3966 if (TREE_CODE (type) == ARRAY_TYPE)
3967 while (type != TYPE_MAIN_VARIANT (type))
3968 type = TYPE_MAIN_VARIANT (type);
3969
3970 return type;
3971 }
3972
3973 /* Return nonzero if the given type node represents a tagged type. */
3974
3975 static inline int
3976 is_tagged_type (const_tree type)
3977 {
3978 enum tree_code code = TREE_CODE (type);
3979
3980 return (code == RECORD_TYPE || code == UNION_TYPE
3981 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
3982 }
3983
3984 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
3985
3986 static void
3987 get_ref_die_offset_label (char *label, dw_die_ref ref)
3988 {
3989 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
3990 }
3991
3992 /* Return die_offset of a DIE reference to a base type. */
3993
3994 static unsigned long int
3995 get_base_type_offset (dw_die_ref ref)
3996 {
3997 if (ref->die_offset)
3998 return ref->die_offset;
3999 if (comp_unit_die ()->die_abbrev)
4000 {
4001 calc_base_type_die_sizes ();
4002 gcc_assert (ref->die_offset);
4003 }
4004 return ref->die_offset;
4005 }
4006
4007 /* Return die_offset of a DIE reference other than base type. */
4008
4009 static unsigned long int
4010 get_ref_die_offset (dw_die_ref ref)
4011 {
4012 gcc_assert (ref->die_offset);
4013 return ref->die_offset;
4014 }
4015
4016 /* Convert a DIE tag into its string name. */
4017
4018 static const char *
4019 dwarf_tag_name (unsigned int tag)
4020 {
4021 const char *name = get_DW_TAG_name (tag);
4022
4023 if (name != NULL)
4024 return name;
4025
4026 return "DW_TAG_<unknown>";
4027 }
4028
4029 /* Convert a DWARF attribute code into its string name. */
4030
4031 static const char *
4032 dwarf_attr_name (unsigned int attr)
4033 {
4034 const char *name;
4035
4036 switch (attr)
4037 {
4038 #if VMS_DEBUGGING_INFO
4039 case DW_AT_HP_prologue:
4040 return "DW_AT_HP_prologue";
4041 #else
4042 case DW_AT_MIPS_loop_unroll_factor:
4043 return "DW_AT_MIPS_loop_unroll_factor";
4044 #endif
4045
4046 #if VMS_DEBUGGING_INFO
4047 case DW_AT_HP_epilogue:
4048 return "DW_AT_HP_epilogue";
4049 #else
4050 case DW_AT_MIPS_stride:
4051 return "DW_AT_MIPS_stride";
4052 #endif
4053 }
4054
4055 name = get_DW_AT_name (attr);
4056
4057 if (name != NULL)
4058 return name;
4059
4060 return "DW_AT_<unknown>";
4061 }
4062
4063 /* Convert a DWARF value form code into its string name. */
4064
4065 static const char *
4066 dwarf_form_name (unsigned int form)
4067 {
4068 const char *name = get_DW_FORM_name (form);
4069
4070 if (name != NULL)
4071 return name;
4072
4073 return "DW_FORM_<unknown>";
4074 }
4075 \f
4076 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4077 instance of an inlined instance of a decl which is local to an inline
4078 function, so we have to trace all of the way back through the origin chain
4079 to find out what sort of node actually served as the original seed for the
4080 given block. */
4081
4082 static tree
4083 decl_ultimate_origin (const_tree decl)
4084 {
4085 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4086 return NULL_TREE;
4087
4088 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4089 we're trying to output the abstract instance of this function. */
4090 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4091 return NULL_TREE;
4092
4093 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4094 most distant ancestor, this should never happen. */
4095 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4096
4097 return DECL_ABSTRACT_ORIGIN (decl);
4098 }
4099
4100 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4101 of a virtual function may refer to a base class, so we check the 'this'
4102 parameter. */
4103
4104 static tree
4105 decl_class_context (tree decl)
4106 {
4107 tree context = NULL_TREE;
4108
4109 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4110 context = DECL_CONTEXT (decl);
4111 else
4112 context = TYPE_MAIN_VARIANT
4113 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4114
4115 if (context && !TYPE_P (context))
4116 context = NULL_TREE;
4117
4118 return context;
4119 }
4120 \f
4121 /* Add an attribute/value pair to a DIE. */
4122
4123 static inline void
4124 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4125 {
4126 /* Maybe this should be an assert? */
4127 if (die == NULL)
4128 return;
4129
4130 if (flag_checking)
4131 {
4132 /* Check we do not add duplicate attrs. Can't use get_AT here
4133 because that recurses to the specification/abstract origin DIE. */
4134 dw_attr_node *a;
4135 unsigned ix;
4136 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4137 gcc_assert (a->dw_attr != attr->dw_attr);
4138 }
4139
4140 vec_safe_reserve (die->die_attr, 1);
4141 vec_safe_push (die->die_attr, *attr);
4142 }
4143
4144 static inline enum dw_val_class
4145 AT_class (dw_attr_node *a)
4146 {
4147 return a->dw_attr_val.val_class;
4148 }
4149
4150 /* Return the index for any attribute that will be referenced with a
4151 DW_FORM_GNU_addr_index or DW_FORM_GNU_str_index. String indices
4152 are stored in dw_attr_val.v.val_str for reference counting
4153 pruning. */
4154
4155 static inline unsigned int
4156 AT_index (dw_attr_node *a)
4157 {
4158 if (AT_class (a) == dw_val_class_str)
4159 return a->dw_attr_val.v.val_str->index;
4160 else if (a->dw_attr_val.val_entry != NULL)
4161 return a->dw_attr_val.val_entry->index;
4162 return NOT_INDEXED;
4163 }
4164
4165 /* Add a flag value attribute to a DIE. */
4166
4167 static inline void
4168 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4169 {
4170 dw_attr_node attr;
4171
4172 attr.dw_attr = attr_kind;
4173 attr.dw_attr_val.val_class = dw_val_class_flag;
4174 attr.dw_attr_val.val_entry = NULL;
4175 attr.dw_attr_val.v.val_flag = flag;
4176 add_dwarf_attr (die, &attr);
4177 }
4178
4179 static inline unsigned
4180 AT_flag (dw_attr_node *a)
4181 {
4182 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4183 return a->dw_attr_val.v.val_flag;
4184 }
4185
4186 /* Add a signed integer attribute value to a DIE. */
4187
4188 static inline void
4189 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4190 {
4191 dw_attr_node attr;
4192
4193 attr.dw_attr = attr_kind;
4194 attr.dw_attr_val.val_class = dw_val_class_const;
4195 attr.dw_attr_val.val_entry = NULL;
4196 attr.dw_attr_val.v.val_int = int_val;
4197 add_dwarf_attr (die, &attr);
4198 }
4199
4200 static inline HOST_WIDE_INT
4201 AT_int (dw_attr_node *a)
4202 {
4203 gcc_assert (a && (AT_class (a) == dw_val_class_const
4204 || AT_class (a) == dw_val_class_const_implicit));
4205 return a->dw_attr_val.v.val_int;
4206 }
4207
4208 /* Add an unsigned integer attribute value to a DIE. */
4209
4210 static inline void
4211 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4212 unsigned HOST_WIDE_INT unsigned_val)
4213 {
4214 dw_attr_node attr;
4215
4216 attr.dw_attr = attr_kind;
4217 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4218 attr.dw_attr_val.val_entry = NULL;
4219 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4220 add_dwarf_attr (die, &attr);
4221 }
4222
4223 static inline unsigned HOST_WIDE_INT
4224 AT_unsigned (dw_attr_node *a)
4225 {
4226 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4227 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4228 return a->dw_attr_val.v.val_unsigned;
4229 }
4230
4231 /* Add an unsigned wide integer attribute value to a DIE. */
4232
4233 static inline void
4234 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4235 const wide_int& w)
4236 {
4237 dw_attr_node attr;
4238
4239 attr.dw_attr = attr_kind;
4240 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4241 attr.dw_attr_val.val_entry = NULL;
4242 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4243 *attr.dw_attr_val.v.val_wide = w;
4244 add_dwarf_attr (die, &attr);
4245 }
4246
4247 /* Add an unsigned double integer attribute value to a DIE. */
4248
4249 static inline void
4250 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4251 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4252 {
4253 dw_attr_node attr;
4254
4255 attr.dw_attr = attr_kind;
4256 attr.dw_attr_val.val_class = dw_val_class_const_double;
4257 attr.dw_attr_val.val_entry = NULL;
4258 attr.dw_attr_val.v.val_double.high = high;
4259 attr.dw_attr_val.v.val_double.low = low;
4260 add_dwarf_attr (die, &attr);
4261 }
4262
4263 /* Add a floating point attribute value to a DIE and return it. */
4264
4265 static inline void
4266 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4267 unsigned int length, unsigned int elt_size, unsigned char *array)
4268 {
4269 dw_attr_node attr;
4270
4271 attr.dw_attr = attr_kind;
4272 attr.dw_attr_val.val_class = dw_val_class_vec;
4273 attr.dw_attr_val.val_entry = NULL;
4274 attr.dw_attr_val.v.val_vec.length = length;
4275 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4276 attr.dw_attr_val.v.val_vec.array = array;
4277 add_dwarf_attr (die, &attr);
4278 }
4279
4280 /* Add an 8-byte data attribute value to a DIE. */
4281
4282 static inline void
4283 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4284 unsigned char data8[8])
4285 {
4286 dw_attr_node attr;
4287
4288 attr.dw_attr = attr_kind;
4289 attr.dw_attr_val.val_class = dw_val_class_data8;
4290 attr.dw_attr_val.val_entry = NULL;
4291 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4292 add_dwarf_attr (die, &attr);
4293 }
4294
4295 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4296 dwarf_split_debug_info, address attributes in dies destined for the
4297 final executable have force_direct set to avoid using indexed
4298 references. */
4299
4300 static inline void
4301 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4302 bool force_direct)
4303 {
4304 dw_attr_node attr;
4305 char * lbl_id;
4306
4307 lbl_id = xstrdup (lbl_low);
4308 attr.dw_attr = DW_AT_low_pc;
4309 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4310 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4311 if (dwarf_split_debug_info && !force_direct)
4312 attr.dw_attr_val.val_entry
4313 = add_addr_table_entry (lbl_id, ate_kind_label);
4314 else
4315 attr.dw_attr_val.val_entry = NULL;
4316 add_dwarf_attr (die, &attr);
4317
4318 attr.dw_attr = DW_AT_high_pc;
4319 if (dwarf_version < 4)
4320 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4321 else
4322 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4323 lbl_id = xstrdup (lbl_high);
4324 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4325 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4326 && dwarf_split_debug_info && !force_direct)
4327 attr.dw_attr_val.val_entry
4328 = add_addr_table_entry (lbl_id, ate_kind_label);
4329 else
4330 attr.dw_attr_val.val_entry = NULL;
4331 add_dwarf_attr (die, &attr);
4332 }
4333
4334 /* Hash and equality functions for debug_str_hash. */
4335
4336 hashval_t
4337 indirect_string_hasher::hash (indirect_string_node *x)
4338 {
4339 return htab_hash_string (x->str);
4340 }
4341
4342 bool
4343 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4344 {
4345 return strcmp (x1->str, x2) == 0;
4346 }
4347
4348 /* Add STR to the given string hash table. */
4349
4350 static struct indirect_string_node *
4351 find_AT_string_in_table (const char *str,
4352 hash_table<indirect_string_hasher> *table)
4353 {
4354 struct indirect_string_node *node;
4355
4356 indirect_string_node **slot
4357 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4358 if (*slot == NULL)
4359 {
4360 node = ggc_cleared_alloc<indirect_string_node> ();
4361 node->str = ggc_strdup (str);
4362 *slot = node;
4363 }
4364 else
4365 node = *slot;
4366
4367 node->refcount++;
4368 return node;
4369 }
4370
4371 /* Add STR to the indirect string hash table. */
4372
4373 static struct indirect_string_node *
4374 find_AT_string (const char *str)
4375 {
4376 if (! debug_str_hash)
4377 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4378
4379 return find_AT_string_in_table (str, debug_str_hash);
4380 }
4381
4382 /* Add a string attribute value to a DIE. */
4383
4384 static inline void
4385 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4386 {
4387 dw_attr_node attr;
4388 struct indirect_string_node *node;
4389
4390 node = find_AT_string (str);
4391
4392 attr.dw_attr = attr_kind;
4393 attr.dw_attr_val.val_class = dw_val_class_str;
4394 attr.dw_attr_val.val_entry = NULL;
4395 attr.dw_attr_val.v.val_str = node;
4396 add_dwarf_attr (die, &attr);
4397 }
4398
4399 static inline const char *
4400 AT_string (dw_attr_node *a)
4401 {
4402 gcc_assert (a && AT_class (a) == dw_val_class_str);
4403 return a->dw_attr_val.v.val_str->str;
4404 }
4405
4406 /* Call this function directly to bypass AT_string_form's logic to put
4407 the string inline in the die. */
4408
4409 static void
4410 set_indirect_string (struct indirect_string_node *node)
4411 {
4412 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4413 /* Already indirect is a no op. */
4414 if (node->form == DW_FORM_strp
4415 || node->form == DW_FORM_line_strp
4416 || node->form == DW_FORM_GNU_str_index)
4417 {
4418 gcc_assert (node->label);
4419 return;
4420 }
4421 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4422 ++dw2_string_counter;
4423 node->label = xstrdup (label);
4424
4425 if (!dwarf_split_debug_info)
4426 {
4427 node->form = DW_FORM_strp;
4428 node->index = NOT_INDEXED;
4429 }
4430 else
4431 {
4432 node->form = DW_FORM_GNU_str_index;
4433 node->index = NO_INDEX_ASSIGNED;
4434 }
4435 }
4436
4437 /* A helper function for dwarf2out_finish, called to reset indirect
4438 string decisions done for early LTO dwarf output before fat object
4439 dwarf output. */
4440
4441 int
4442 reset_indirect_string (indirect_string_node **h, void *)
4443 {
4444 struct indirect_string_node *node = *h;
4445 if (node->form == DW_FORM_strp || node->form == DW_FORM_GNU_str_index)
4446 {
4447 free (node->label);
4448 node->label = NULL;
4449 node->form = (dwarf_form) 0;
4450 node->index = 0;
4451 }
4452 return 1;
4453 }
4454
4455 /* Find out whether a string should be output inline in DIE
4456 or out-of-line in .debug_str section. */
4457
4458 static enum dwarf_form
4459 find_string_form (struct indirect_string_node *node)
4460 {
4461 unsigned int len;
4462
4463 if (node->form)
4464 return node->form;
4465
4466 len = strlen (node->str) + 1;
4467
4468 /* If the string is shorter or equal to the size of the reference, it is
4469 always better to put it inline. */
4470 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4471 return node->form = DW_FORM_string;
4472
4473 /* If we cannot expect the linker to merge strings in .debug_str
4474 section, only put it into .debug_str if it is worth even in this
4475 single module. */
4476 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4477 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4478 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4479 return node->form = DW_FORM_string;
4480
4481 set_indirect_string (node);
4482
4483 return node->form;
4484 }
4485
4486 /* Find out whether the string referenced from the attribute should be
4487 output inline in DIE or out-of-line in .debug_str section. */
4488
4489 static enum dwarf_form
4490 AT_string_form (dw_attr_node *a)
4491 {
4492 gcc_assert (a && AT_class (a) == dw_val_class_str);
4493 return find_string_form (a->dw_attr_val.v.val_str);
4494 }
4495
4496 /* Add a DIE reference attribute value to a DIE. */
4497
4498 static inline void
4499 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4500 {
4501 dw_attr_node attr;
4502 gcc_checking_assert (targ_die != NULL);
4503
4504 /* With LTO we can end up trying to reference something we didn't create
4505 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4506 if (targ_die == NULL)
4507 return;
4508
4509 attr.dw_attr = attr_kind;
4510 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4511 attr.dw_attr_val.val_entry = NULL;
4512 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4513 attr.dw_attr_val.v.val_die_ref.external = 0;
4514 add_dwarf_attr (die, &attr);
4515 }
4516
4517 /* Change DIE reference REF to point to NEW_DIE instead. */
4518
4519 static inline void
4520 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4521 {
4522 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4523 ref->dw_attr_val.v.val_die_ref.die = new_die;
4524 ref->dw_attr_val.v.val_die_ref.external = 0;
4525 }
4526
4527 /* Add an AT_specification attribute to a DIE, and also make the back
4528 pointer from the specification to the definition. */
4529
4530 static inline void
4531 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4532 {
4533 add_AT_die_ref (die, DW_AT_specification, targ_die);
4534 gcc_assert (!targ_die->die_definition);
4535 targ_die->die_definition = die;
4536 }
4537
4538 static inline dw_die_ref
4539 AT_ref (dw_attr_node *a)
4540 {
4541 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4542 return a->dw_attr_val.v.val_die_ref.die;
4543 }
4544
4545 static inline int
4546 AT_ref_external (dw_attr_node *a)
4547 {
4548 if (a && AT_class (a) == dw_val_class_die_ref)
4549 return a->dw_attr_val.v.val_die_ref.external;
4550
4551 return 0;
4552 }
4553
4554 static inline void
4555 set_AT_ref_external (dw_attr_node *a, int i)
4556 {
4557 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4558 a->dw_attr_val.v.val_die_ref.external = i;
4559 }
4560
4561 /* Add an FDE reference attribute value to a DIE. */
4562
4563 static inline void
4564 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4565 {
4566 dw_attr_node attr;
4567
4568 attr.dw_attr = attr_kind;
4569 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4570 attr.dw_attr_val.val_entry = NULL;
4571 attr.dw_attr_val.v.val_fde_index = targ_fde;
4572 add_dwarf_attr (die, &attr);
4573 }
4574
4575 /* Add a location description attribute value to a DIE. */
4576
4577 static inline void
4578 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4579 {
4580 dw_attr_node attr;
4581
4582 attr.dw_attr = attr_kind;
4583 attr.dw_attr_val.val_class = dw_val_class_loc;
4584 attr.dw_attr_val.val_entry = NULL;
4585 attr.dw_attr_val.v.val_loc = loc;
4586 add_dwarf_attr (die, &attr);
4587 }
4588
4589 static inline dw_loc_descr_ref
4590 AT_loc (dw_attr_node *a)
4591 {
4592 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4593 return a->dw_attr_val.v.val_loc;
4594 }
4595
4596 static inline void
4597 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4598 {
4599 dw_attr_node attr;
4600
4601 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4602 return;
4603
4604 attr.dw_attr = attr_kind;
4605 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4606 attr.dw_attr_val.val_entry = NULL;
4607 attr.dw_attr_val.v.val_loc_list = loc_list;
4608 add_dwarf_attr (die, &attr);
4609 have_location_lists = true;
4610 }
4611
4612 static inline dw_loc_list_ref
4613 AT_loc_list (dw_attr_node *a)
4614 {
4615 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4616 return a->dw_attr_val.v.val_loc_list;
4617 }
4618
4619 static inline dw_loc_list_ref *
4620 AT_loc_list_ptr (dw_attr_node *a)
4621 {
4622 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4623 return &a->dw_attr_val.v.val_loc_list;
4624 }
4625
4626 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4627 {
4628 static hashval_t hash (addr_table_entry *);
4629 static bool equal (addr_table_entry *, addr_table_entry *);
4630 };
4631
4632 /* Table of entries into the .debug_addr section. */
4633
4634 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4635
4636 /* Hash an address_table_entry. */
4637
4638 hashval_t
4639 addr_hasher::hash (addr_table_entry *a)
4640 {
4641 inchash::hash hstate;
4642 switch (a->kind)
4643 {
4644 case ate_kind_rtx:
4645 hstate.add_int (0);
4646 break;
4647 case ate_kind_rtx_dtprel:
4648 hstate.add_int (1);
4649 break;
4650 case ate_kind_label:
4651 return htab_hash_string (a->addr.label);
4652 default:
4653 gcc_unreachable ();
4654 }
4655 inchash::add_rtx (a->addr.rtl, hstate);
4656 return hstate.end ();
4657 }
4658
4659 /* Determine equality for two address_table_entries. */
4660
4661 bool
4662 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4663 {
4664 if (a1->kind != a2->kind)
4665 return 0;
4666 switch (a1->kind)
4667 {
4668 case ate_kind_rtx:
4669 case ate_kind_rtx_dtprel:
4670 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4671 case ate_kind_label:
4672 return strcmp (a1->addr.label, a2->addr.label) == 0;
4673 default:
4674 gcc_unreachable ();
4675 }
4676 }
4677
4678 /* Initialize an addr_table_entry. */
4679
4680 void
4681 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4682 {
4683 e->kind = kind;
4684 switch (kind)
4685 {
4686 case ate_kind_rtx:
4687 case ate_kind_rtx_dtprel:
4688 e->addr.rtl = (rtx) addr;
4689 break;
4690 case ate_kind_label:
4691 e->addr.label = (char *) addr;
4692 break;
4693 }
4694 e->refcount = 0;
4695 e->index = NO_INDEX_ASSIGNED;
4696 }
4697
4698 /* Add attr to the address table entry to the table. Defer setting an
4699 index until output time. */
4700
4701 static addr_table_entry *
4702 add_addr_table_entry (void *addr, enum ate_kind kind)
4703 {
4704 addr_table_entry *node;
4705 addr_table_entry finder;
4706
4707 gcc_assert (dwarf_split_debug_info);
4708 if (! addr_index_table)
4709 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
4710 init_addr_table_entry (&finder, kind, addr);
4711 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
4712
4713 if (*slot == HTAB_EMPTY_ENTRY)
4714 {
4715 node = ggc_cleared_alloc<addr_table_entry> ();
4716 init_addr_table_entry (node, kind, addr);
4717 *slot = node;
4718 }
4719 else
4720 node = *slot;
4721
4722 node->refcount++;
4723 return node;
4724 }
4725
4726 /* Remove an entry from the addr table by decrementing its refcount.
4727 Strictly, decrementing the refcount would be enough, but the
4728 assertion that the entry is actually in the table has found
4729 bugs. */
4730
4731 static void
4732 remove_addr_table_entry (addr_table_entry *entry)
4733 {
4734 gcc_assert (dwarf_split_debug_info && addr_index_table);
4735 /* After an index is assigned, the table is frozen. */
4736 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
4737 entry->refcount--;
4738 }
4739
4740 /* Given a location list, remove all addresses it refers to from the
4741 address_table. */
4742
4743 static void
4744 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
4745 {
4746 for (; descr; descr = descr->dw_loc_next)
4747 if (descr->dw_loc_oprnd1.val_entry != NULL)
4748 {
4749 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
4750 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
4751 }
4752 }
4753
4754 /* A helper function for dwarf2out_finish called through
4755 htab_traverse. Assign an addr_table_entry its index. All entries
4756 must be collected into the table when this function is called,
4757 because the indexing code relies on htab_traverse to traverse nodes
4758 in the same order for each run. */
4759
4760 int
4761 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
4762 {
4763 addr_table_entry *node = *h;
4764
4765 /* Don't index unreferenced nodes. */
4766 if (node->refcount == 0)
4767 return 1;
4768
4769 gcc_assert (node->index == NO_INDEX_ASSIGNED);
4770 node->index = *index;
4771 *index += 1;
4772
4773 return 1;
4774 }
4775
4776 /* Add an address constant attribute value to a DIE. When using
4777 dwarf_split_debug_info, address attributes in dies destined for the
4778 final executable should be direct references--setting the parameter
4779 force_direct ensures this behavior. */
4780
4781 static inline void
4782 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
4783 bool force_direct)
4784 {
4785 dw_attr_node attr;
4786
4787 attr.dw_attr = attr_kind;
4788 attr.dw_attr_val.val_class = dw_val_class_addr;
4789 attr.dw_attr_val.v.val_addr = addr;
4790 if (dwarf_split_debug_info && !force_direct)
4791 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
4792 else
4793 attr.dw_attr_val.val_entry = NULL;
4794 add_dwarf_attr (die, &attr);
4795 }
4796
4797 /* Get the RTX from to an address DIE attribute. */
4798
4799 static inline rtx
4800 AT_addr (dw_attr_node *a)
4801 {
4802 gcc_assert (a && AT_class (a) == dw_val_class_addr);
4803 return a->dw_attr_val.v.val_addr;
4804 }
4805
4806 /* Add a file attribute value to a DIE. */
4807
4808 static inline void
4809 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
4810 struct dwarf_file_data *fd)
4811 {
4812 dw_attr_node attr;
4813
4814 attr.dw_attr = attr_kind;
4815 attr.dw_attr_val.val_class = dw_val_class_file;
4816 attr.dw_attr_val.val_entry = NULL;
4817 attr.dw_attr_val.v.val_file = fd;
4818 add_dwarf_attr (die, &attr);
4819 }
4820
4821 /* Get the dwarf_file_data from a file DIE attribute. */
4822
4823 static inline struct dwarf_file_data *
4824 AT_file (dw_attr_node *a)
4825 {
4826 gcc_assert (a && (AT_class (a) == dw_val_class_file
4827 || AT_class (a) == dw_val_class_file_implicit));
4828 return a->dw_attr_val.v.val_file;
4829 }
4830
4831 /* Add a vms delta attribute value to a DIE. */
4832
4833 static inline void
4834 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
4835 const char *lbl1, const char *lbl2)
4836 {
4837 dw_attr_node attr;
4838
4839 attr.dw_attr = attr_kind;
4840 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
4841 attr.dw_attr_val.val_entry = NULL;
4842 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
4843 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
4844 add_dwarf_attr (die, &attr);
4845 }
4846
4847 /* Add a label identifier attribute value to a DIE. */
4848
4849 static inline void
4850 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
4851 const char *lbl_id)
4852 {
4853 dw_attr_node attr;
4854
4855 attr.dw_attr = attr_kind;
4856 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4857 attr.dw_attr_val.val_entry = NULL;
4858 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
4859 if (dwarf_split_debug_info)
4860 attr.dw_attr_val.val_entry
4861 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
4862 ate_kind_label);
4863 add_dwarf_attr (die, &attr);
4864 }
4865
4866 /* Add a section offset attribute value to a DIE, an offset into the
4867 debug_line section. */
4868
4869 static inline void
4870 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4871 const char *label)
4872 {
4873 dw_attr_node attr;
4874
4875 attr.dw_attr = attr_kind;
4876 attr.dw_attr_val.val_class = dw_val_class_lineptr;
4877 attr.dw_attr_val.val_entry = NULL;
4878 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4879 add_dwarf_attr (die, &attr);
4880 }
4881
4882 /* Add a section offset attribute value to a DIE, an offset into the
4883 debug_loclists section. */
4884
4885 static inline void
4886 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4887 const char *label)
4888 {
4889 dw_attr_node attr;
4890
4891 attr.dw_attr = attr_kind;
4892 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
4893 attr.dw_attr_val.val_entry = NULL;
4894 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4895 add_dwarf_attr (die, &attr);
4896 }
4897
4898 /* Add a section offset attribute value to a DIE, an offset into the
4899 debug_macinfo section. */
4900
4901 static inline void
4902 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4903 const char *label)
4904 {
4905 dw_attr_node attr;
4906
4907 attr.dw_attr = attr_kind;
4908 attr.dw_attr_val.val_class = dw_val_class_macptr;
4909 attr.dw_attr_val.val_entry = NULL;
4910 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4911 add_dwarf_attr (die, &attr);
4912 }
4913
4914 /* Add an offset attribute value to a DIE. */
4915
4916 static inline void
4917 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
4918 unsigned HOST_WIDE_INT offset)
4919 {
4920 dw_attr_node attr;
4921
4922 attr.dw_attr = attr_kind;
4923 attr.dw_attr_val.val_class = dw_val_class_offset;
4924 attr.dw_attr_val.val_entry = NULL;
4925 attr.dw_attr_val.v.val_offset = offset;
4926 add_dwarf_attr (die, &attr);
4927 }
4928
4929 /* Add a range_list attribute value to a DIE. When using
4930 dwarf_split_debug_info, address attributes in dies destined for the
4931 final executable should be direct references--setting the parameter
4932 force_direct ensures this behavior. */
4933
4934 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
4935 #define RELOCATED_OFFSET (NULL)
4936
4937 static void
4938 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
4939 long unsigned int offset, bool force_direct)
4940 {
4941 dw_attr_node attr;
4942
4943 attr.dw_attr = attr_kind;
4944 attr.dw_attr_val.val_class = dw_val_class_range_list;
4945 /* For the range_list attribute, use val_entry to store whether the
4946 offset should follow split-debug-info or normal semantics. This
4947 value is read in output_range_list_offset. */
4948 if (dwarf_split_debug_info && !force_direct)
4949 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
4950 else
4951 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
4952 attr.dw_attr_val.v.val_offset = offset;
4953 add_dwarf_attr (die, &attr);
4954 }
4955
4956 /* Return the start label of a delta attribute. */
4957
4958 static inline const char *
4959 AT_vms_delta1 (dw_attr_node *a)
4960 {
4961 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
4962 return a->dw_attr_val.v.val_vms_delta.lbl1;
4963 }
4964
4965 /* Return the end label of a delta attribute. */
4966
4967 static inline const char *
4968 AT_vms_delta2 (dw_attr_node *a)
4969 {
4970 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
4971 return a->dw_attr_val.v.val_vms_delta.lbl2;
4972 }
4973
4974 static inline const char *
4975 AT_lbl (dw_attr_node *a)
4976 {
4977 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
4978 || AT_class (a) == dw_val_class_lineptr
4979 || AT_class (a) == dw_val_class_macptr
4980 || AT_class (a) == dw_val_class_loclistsptr
4981 || AT_class (a) == dw_val_class_high_pc));
4982 return a->dw_attr_val.v.val_lbl_id;
4983 }
4984
4985 /* Get the attribute of type attr_kind. */
4986
4987 static dw_attr_node *
4988 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
4989 {
4990 dw_attr_node *a;
4991 unsigned ix;
4992 dw_die_ref spec = NULL;
4993
4994 if (! die)
4995 return NULL;
4996
4997 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4998 if (a->dw_attr == attr_kind)
4999 return a;
5000 else if (a->dw_attr == DW_AT_specification
5001 || a->dw_attr == DW_AT_abstract_origin)
5002 spec = AT_ref (a);
5003
5004 if (spec)
5005 return get_AT (spec, attr_kind);
5006
5007 return NULL;
5008 }
5009
5010 /* Returns the parent of the declaration of DIE. */
5011
5012 static dw_die_ref
5013 get_die_parent (dw_die_ref die)
5014 {
5015 dw_die_ref t;
5016
5017 if (!die)
5018 return NULL;
5019
5020 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5021 || (t = get_AT_ref (die, DW_AT_specification)))
5022 die = t;
5023
5024 return die->die_parent;
5025 }
5026
5027 /* Return the "low pc" attribute value, typically associated with a subprogram
5028 DIE. Return null if the "low pc" attribute is either not present, or if it
5029 cannot be represented as an assembler label identifier. */
5030
5031 static inline const char *
5032 get_AT_low_pc (dw_die_ref die)
5033 {
5034 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5035
5036 return a ? AT_lbl (a) : NULL;
5037 }
5038
5039 /* Return the "high pc" attribute value, typically associated with a subprogram
5040 DIE. Return null if the "high pc" attribute is either not present, or if it
5041 cannot be represented as an assembler label identifier. */
5042
5043 static inline const char *
5044 get_AT_hi_pc (dw_die_ref die)
5045 {
5046 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5047
5048 return a ? AT_lbl (a) : NULL;
5049 }
5050
5051 /* Return the value of the string attribute designated by ATTR_KIND, or
5052 NULL if it is not present. */
5053
5054 static inline const char *
5055 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5056 {
5057 dw_attr_node *a = get_AT (die, attr_kind);
5058
5059 return a ? AT_string (a) : NULL;
5060 }
5061
5062 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5063 if it is not present. */
5064
5065 static inline int
5066 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5067 {
5068 dw_attr_node *a = get_AT (die, attr_kind);
5069
5070 return a ? AT_flag (a) : 0;
5071 }
5072
5073 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5074 if it is not present. */
5075
5076 static inline unsigned
5077 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5078 {
5079 dw_attr_node *a = get_AT (die, attr_kind);
5080
5081 return a ? AT_unsigned (a) : 0;
5082 }
5083
5084 static inline dw_die_ref
5085 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5086 {
5087 dw_attr_node *a = get_AT (die, attr_kind);
5088
5089 return a ? AT_ref (a) : NULL;
5090 }
5091
5092 static inline struct dwarf_file_data *
5093 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5094 {
5095 dw_attr_node *a = get_AT (die, attr_kind);
5096
5097 return a ? AT_file (a) : NULL;
5098 }
5099
5100 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
5101
5102 static const_tree
5103 get_ultimate_context (const_tree decl)
5104 {
5105 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
5106 {
5107 if (TREE_CODE (decl) == BLOCK)
5108 decl = BLOCK_SUPERCONTEXT (decl);
5109 else
5110 decl = get_containing_scope (decl);
5111 }
5112 return decl;
5113 }
5114
5115 /* Return TRUE if the language is C++. */
5116
5117 static inline bool
5118 is_cxx (void)
5119 {
5120 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5121
5122 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5123 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5124 }
5125
5126 /* Return TRUE if DECL was created by the C++ frontend. */
5127
5128 static bool
5129 is_cxx (const_tree decl)
5130 {
5131 if (in_lto_p)
5132 {
5133 const_tree context = get_ultimate_context (decl);
5134 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5135 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5136 }
5137 return is_cxx ();
5138 }
5139
5140 /* Return TRUE if the language is Fortran. */
5141
5142 static inline bool
5143 is_fortran (void)
5144 {
5145 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5146
5147 return (lang == DW_LANG_Fortran77
5148 || lang == DW_LANG_Fortran90
5149 || lang == DW_LANG_Fortran95
5150 || lang == DW_LANG_Fortran03
5151 || lang == DW_LANG_Fortran08);
5152 }
5153
5154 static inline bool
5155 is_fortran (const_tree decl)
5156 {
5157 if (in_lto_p)
5158 {
5159 const_tree context = get_ultimate_context (decl);
5160 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5161 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5162 "GNU Fortran", 11) == 0
5163 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5164 "GNU F77") == 0);
5165 }
5166 return is_fortran ();
5167 }
5168
5169 /* Return TRUE if the language is Ada. */
5170
5171 static inline bool
5172 is_ada (void)
5173 {
5174 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5175
5176 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5177 }
5178
5179 /* Remove the specified attribute if present. Return TRUE if removal
5180 was successful. */
5181
5182 static bool
5183 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5184 {
5185 dw_attr_node *a;
5186 unsigned ix;
5187
5188 if (! die)
5189 return false;
5190
5191 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5192 if (a->dw_attr == attr_kind)
5193 {
5194 if (AT_class (a) == dw_val_class_str)
5195 if (a->dw_attr_val.v.val_str->refcount)
5196 a->dw_attr_val.v.val_str->refcount--;
5197
5198 /* vec::ordered_remove should help reduce the number of abbrevs
5199 that are needed. */
5200 die->die_attr->ordered_remove (ix);
5201 return true;
5202 }
5203 return false;
5204 }
5205
5206 /* Remove CHILD from its parent. PREV must have the property that
5207 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5208
5209 static void
5210 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5211 {
5212 gcc_assert (child->die_parent == prev->die_parent);
5213 gcc_assert (prev->die_sib == child);
5214 if (prev == child)
5215 {
5216 gcc_assert (child->die_parent->die_child == child);
5217 prev = NULL;
5218 }
5219 else
5220 prev->die_sib = child->die_sib;
5221 if (child->die_parent->die_child == child)
5222 child->die_parent->die_child = prev;
5223 child->die_sib = NULL;
5224 }
5225
5226 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5227 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5228
5229 static void
5230 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5231 {
5232 dw_die_ref parent = old_child->die_parent;
5233
5234 gcc_assert (parent == prev->die_parent);
5235 gcc_assert (prev->die_sib == old_child);
5236
5237 new_child->die_parent = parent;
5238 if (prev == old_child)
5239 {
5240 gcc_assert (parent->die_child == old_child);
5241 new_child->die_sib = new_child;
5242 }
5243 else
5244 {
5245 prev->die_sib = new_child;
5246 new_child->die_sib = old_child->die_sib;
5247 }
5248 if (old_child->die_parent->die_child == old_child)
5249 old_child->die_parent->die_child = new_child;
5250 old_child->die_sib = NULL;
5251 }
5252
5253 /* Move all children from OLD_PARENT to NEW_PARENT. */
5254
5255 static void
5256 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5257 {
5258 dw_die_ref c;
5259 new_parent->die_child = old_parent->die_child;
5260 old_parent->die_child = NULL;
5261 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5262 }
5263
5264 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5265 matches TAG. */
5266
5267 static void
5268 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5269 {
5270 dw_die_ref c;
5271
5272 c = die->die_child;
5273 if (c) do {
5274 dw_die_ref prev = c;
5275 c = c->die_sib;
5276 while (c->die_tag == tag)
5277 {
5278 remove_child_with_prev (c, prev);
5279 c->die_parent = NULL;
5280 /* Might have removed every child. */
5281 if (die->die_child == NULL)
5282 return;
5283 c = prev->die_sib;
5284 }
5285 } while (c != die->die_child);
5286 }
5287
5288 /* Add a CHILD_DIE as the last child of DIE. */
5289
5290 static void
5291 add_child_die (dw_die_ref die, dw_die_ref child_die)
5292 {
5293 /* FIXME this should probably be an assert. */
5294 if (! die || ! child_die)
5295 return;
5296 gcc_assert (die != child_die);
5297
5298 child_die->die_parent = die;
5299 if (die->die_child)
5300 {
5301 child_die->die_sib = die->die_child->die_sib;
5302 die->die_child->die_sib = child_die;
5303 }
5304 else
5305 child_die->die_sib = child_die;
5306 die->die_child = child_die;
5307 }
5308
5309 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5310
5311 static void
5312 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5313 dw_die_ref after_die)
5314 {
5315 gcc_assert (die
5316 && child_die
5317 && after_die
5318 && die->die_child
5319 && die != child_die);
5320
5321 child_die->die_parent = die;
5322 child_die->die_sib = after_die->die_sib;
5323 after_die->die_sib = child_die;
5324 if (die->die_child == after_die)
5325 die->die_child = child_die;
5326 }
5327
5328 /* Unassociate CHILD from its parent, and make its parent be
5329 NEW_PARENT. */
5330
5331 static void
5332 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5333 {
5334 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5335 if (p->die_sib == child)
5336 {
5337 remove_child_with_prev (child, p);
5338 break;
5339 }
5340 add_child_die (new_parent, child);
5341 }
5342
5343 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5344 is the specification, to the end of PARENT's list of children.
5345 This is done by removing and re-adding it. */
5346
5347 static void
5348 splice_child_die (dw_die_ref parent, dw_die_ref child)
5349 {
5350 /* We want the declaration DIE from inside the class, not the
5351 specification DIE at toplevel. */
5352 if (child->die_parent != parent)
5353 {
5354 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5355
5356 if (tmp)
5357 child = tmp;
5358 }
5359
5360 gcc_assert (child->die_parent == parent
5361 || (child->die_parent
5362 == get_AT_ref (parent, DW_AT_specification)));
5363
5364 reparent_child (child, parent);
5365 }
5366
5367 /* Create and return a new die with TAG_VALUE as tag. */
5368
5369 static inline dw_die_ref
5370 new_die_raw (enum dwarf_tag tag_value)
5371 {
5372 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5373 die->die_tag = tag_value;
5374 return die;
5375 }
5376
5377 /* Create and return a new die with a parent of PARENT_DIE. If
5378 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5379 associated tree T must be supplied to determine parenthood
5380 later. */
5381
5382 static inline dw_die_ref
5383 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5384 {
5385 dw_die_ref die = new_die_raw (tag_value);
5386
5387 if (parent_die != NULL)
5388 add_child_die (parent_die, die);
5389 else
5390 {
5391 limbo_die_node *limbo_node;
5392
5393 /* No DIEs created after early dwarf should end up in limbo,
5394 because the limbo list should not persist past LTO
5395 streaming. */
5396 if (tag_value != DW_TAG_compile_unit
5397 /* These are allowed because they're generated while
5398 breaking out COMDAT units late. */
5399 && tag_value != DW_TAG_type_unit
5400 && tag_value != DW_TAG_skeleton_unit
5401 && !early_dwarf
5402 /* Allow nested functions to live in limbo because they will
5403 only temporarily live there, as decls_for_scope will fix
5404 them up. */
5405 && (TREE_CODE (t) != FUNCTION_DECL
5406 || !decl_function_context (t))
5407 /* Same as nested functions above but for types. Types that
5408 are local to a function will be fixed in
5409 decls_for_scope. */
5410 && (!RECORD_OR_UNION_TYPE_P (t)
5411 || !TYPE_CONTEXT (t)
5412 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5413 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5414 especially in the ltrans stage, but once we implement LTO
5415 dwarf streaming, we should remove this exception. */
5416 && !in_lto_p)
5417 {
5418 fprintf (stderr, "symbol ended up in limbo too late:");
5419 debug_generic_stmt (t);
5420 gcc_unreachable ();
5421 }
5422
5423 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5424 limbo_node->die = die;
5425 limbo_node->created_for = t;
5426 limbo_node->next = limbo_die_list;
5427 limbo_die_list = limbo_node;
5428 }
5429
5430 return die;
5431 }
5432
5433 /* Return the DIE associated with the given type specifier. */
5434
5435 static inline dw_die_ref
5436 lookup_type_die (tree type)
5437 {
5438 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5439 if (die && die->removed)
5440 {
5441 TYPE_SYMTAB_DIE (type) = NULL;
5442 return NULL;
5443 }
5444 return die;
5445 }
5446
5447 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5448 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5449 anonymous type instead the one of the naming typedef. */
5450
5451 static inline dw_die_ref
5452 strip_naming_typedef (tree type, dw_die_ref type_die)
5453 {
5454 if (type
5455 && TREE_CODE (type) == RECORD_TYPE
5456 && type_die
5457 && type_die->die_tag == DW_TAG_typedef
5458 && is_naming_typedef_decl (TYPE_NAME (type)))
5459 type_die = get_AT_ref (type_die, DW_AT_type);
5460 return type_die;
5461 }
5462
5463 /* Like lookup_type_die, but if type is an anonymous type named by a
5464 typedef[1], return the DIE of the anonymous type instead the one of
5465 the naming typedef. This is because in gen_typedef_die, we did
5466 equate the anonymous struct named by the typedef with the DIE of
5467 the naming typedef. So by default, lookup_type_die on an anonymous
5468 struct yields the DIE of the naming typedef.
5469
5470 [1]: Read the comment of is_naming_typedef_decl to learn about what
5471 a naming typedef is. */
5472
5473 static inline dw_die_ref
5474 lookup_type_die_strip_naming_typedef (tree type)
5475 {
5476 dw_die_ref die = lookup_type_die (type);
5477 return strip_naming_typedef (type, die);
5478 }
5479
5480 /* Equate a DIE to a given type specifier. */
5481
5482 static inline void
5483 equate_type_number_to_die (tree type, dw_die_ref type_die)
5484 {
5485 TYPE_SYMTAB_DIE (type) = type_die;
5486 }
5487
5488 /* Returns a hash value for X (which really is a die_struct). */
5489
5490 inline hashval_t
5491 decl_die_hasher::hash (die_node *x)
5492 {
5493 return (hashval_t) x->decl_id;
5494 }
5495
5496 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5497
5498 inline bool
5499 decl_die_hasher::equal (die_node *x, tree y)
5500 {
5501 return (x->decl_id == DECL_UID (y));
5502 }
5503
5504 /* Return the DIE associated with a given declaration. */
5505
5506 static inline dw_die_ref
5507 lookup_decl_die (tree decl)
5508 {
5509 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5510 NO_INSERT);
5511 if (!die)
5512 return NULL;
5513 if ((*die)->removed)
5514 {
5515 decl_die_table->clear_slot (die);
5516 return NULL;
5517 }
5518 return *die;
5519 }
5520
5521
5522 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5523 style reference. Return true if we found one refering to a DIE for
5524 DECL, otherwise return false. */
5525
5526 static bool
5527 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5528 unsigned HOST_WIDE_INT *off)
5529 {
5530 dw_die_ref die;
5531
5532 if (flag_wpa && !decl_die_table)
5533 return false;
5534
5535 if (TREE_CODE (decl) == BLOCK)
5536 die = BLOCK_DIE (decl);
5537 else
5538 die = lookup_decl_die (decl);
5539 if (!die)
5540 return false;
5541
5542 /* During WPA stage we currently use DIEs to store the
5543 decl <-> label + offset map. That's quite inefficient but it
5544 works for now. */
5545 if (flag_wpa)
5546 {
5547 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5548 if (!ref)
5549 {
5550 gcc_assert (die == comp_unit_die ());
5551 return false;
5552 }
5553 *off = ref->die_offset;
5554 *sym = ref->die_id.die_symbol;
5555 return true;
5556 }
5557
5558 /* Similar to get_ref_die_offset_label, but using the "correct"
5559 label. */
5560 *off = die->die_offset;
5561 while (die->die_parent)
5562 die = die->die_parent;
5563 /* For the containing CU DIE we compute a die_symbol in
5564 compute_comp_unit_symbol. */
5565 gcc_assert (die->die_tag == DW_TAG_compile_unit
5566 && die->die_id.die_symbol != NULL);
5567 *sym = die->die_id.die_symbol;
5568 return true;
5569 }
5570
5571 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5572
5573 static void
5574 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5575 const char *symbol, HOST_WIDE_INT offset)
5576 {
5577 /* Create a fake DIE that contains the reference. Don't use
5578 new_die because we don't want to end up in the limbo list. */
5579 dw_die_ref ref = new_die_raw (die->die_tag);
5580 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5581 ref->die_offset = offset;
5582 ref->with_offset = 1;
5583 add_AT_die_ref (die, attr_kind, ref);
5584 }
5585
5586 /* Create a DIE for DECL if required and add a reference to a DIE
5587 at SYMBOL + OFFSET which contains attributes dumped early. */
5588
5589 static void
5590 dwarf2out_register_external_die (tree decl, const char *sym,
5591 unsigned HOST_WIDE_INT off)
5592 {
5593 if (debug_info_level == DINFO_LEVEL_NONE)
5594 return;
5595
5596 if (flag_wpa && !decl_die_table)
5597 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5598
5599 dw_die_ref die
5600 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5601 gcc_assert (!die);
5602
5603 tree ctx;
5604 dw_die_ref parent = NULL;
5605 /* Need to lookup a DIE for the decls context - the containing
5606 function or translation unit. */
5607 if (TREE_CODE (decl) == BLOCK)
5608 {
5609 ctx = BLOCK_SUPERCONTEXT (decl);
5610 /* ??? We do not output DIEs for all scopes thus skip as
5611 many DIEs as needed. */
5612 while (TREE_CODE (ctx) == BLOCK
5613 && !BLOCK_DIE (ctx))
5614 ctx = BLOCK_SUPERCONTEXT (ctx);
5615 }
5616 else
5617 ctx = DECL_CONTEXT (decl);
5618 while (ctx && TYPE_P (ctx))
5619 ctx = TYPE_CONTEXT (ctx);
5620 if (ctx)
5621 {
5622 if (TREE_CODE (ctx) == BLOCK)
5623 parent = BLOCK_DIE (ctx);
5624 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5625 /* Keep the 1:1 association during WPA. */
5626 && !flag_wpa)
5627 /* Otherwise all late annotations go to the main CU which
5628 imports the original CUs. */
5629 parent = comp_unit_die ();
5630 else if (TREE_CODE (ctx) == FUNCTION_DECL
5631 && TREE_CODE (decl) != PARM_DECL
5632 && TREE_CODE (decl) != BLOCK)
5633 /* Leave function local entities parent determination to when
5634 we process scope vars. */
5635 ;
5636 else
5637 parent = lookup_decl_die (ctx);
5638 }
5639 else
5640 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5641 Handle this case gracefully by globalizing stuff. */
5642 parent = comp_unit_die ();
5643 /* Create a DIE "stub". */
5644 switch (TREE_CODE (decl))
5645 {
5646 case TRANSLATION_UNIT_DECL:
5647 if (! flag_wpa)
5648 {
5649 die = comp_unit_die ();
5650 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5651 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5652 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5653 to create a DIE for the original CUs. */
5654 return;
5655 }
5656 /* Keep the 1:1 association during WPA. */
5657 die = new_die (DW_TAG_compile_unit, NULL, decl);
5658 break;
5659 case NAMESPACE_DECL:
5660 if (is_fortran (decl))
5661 die = new_die (DW_TAG_module, parent, decl);
5662 else
5663 die = new_die (DW_TAG_namespace, parent, decl);
5664 break;
5665 case FUNCTION_DECL:
5666 die = new_die (DW_TAG_subprogram, parent, decl);
5667 break;
5668 case VAR_DECL:
5669 die = new_die (DW_TAG_variable, parent, decl);
5670 break;
5671 case RESULT_DECL:
5672 die = new_die (DW_TAG_variable, parent, decl);
5673 break;
5674 case PARM_DECL:
5675 die = new_die (DW_TAG_formal_parameter, parent, decl);
5676 break;
5677 case CONST_DECL:
5678 die = new_die (DW_TAG_constant, parent, decl);
5679 break;
5680 case LABEL_DECL:
5681 die = new_die (DW_TAG_label, parent, decl);
5682 break;
5683 case BLOCK:
5684 die = new_die (DW_TAG_lexical_block, parent, decl);
5685 break;
5686 default:
5687 gcc_unreachable ();
5688 }
5689 if (TREE_CODE (decl) == BLOCK)
5690 BLOCK_DIE (decl) = die;
5691 else
5692 equate_decl_number_to_die (decl, die);
5693
5694 /* Add a reference to the DIE providing early debug at $sym + off. */
5695 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
5696 }
5697
5698 /* Returns a hash value for X (which really is a var_loc_list). */
5699
5700 inline hashval_t
5701 decl_loc_hasher::hash (var_loc_list *x)
5702 {
5703 return (hashval_t) x->decl_id;
5704 }
5705
5706 /* Return nonzero if decl_id of var_loc_list X is the same as
5707 UID of decl *Y. */
5708
5709 inline bool
5710 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
5711 {
5712 return (x->decl_id == DECL_UID (y));
5713 }
5714
5715 /* Return the var_loc list associated with a given declaration. */
5716
5717 static inline var_loc_list *
5718 lookup_decl_loc (const_tree decl)
5719 {
5720 if (!decl_loc_table)
5721 return NULL;
5722 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
5723 }
5724
5725 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
5726
5727 inline hashval_t
5728 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
5729 {
5730 return (hashval_t) x->decl_id;
5731 }
5732
5733 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
5734 UID of decl *Y. */
5735
5736 inline bool
5737 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
5738 {
5739 return (x->decl_id == DECL_UID (y));
5740 }
5741
5742 /* Equate a DIE to a particular declaration. */
5743
5744 static void
5745 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
5746 {
5747 unsigned int decl_id = DECL_UID (decl);
5748
5749 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
5750 decl_die->decl_id = decl_id;
5751 }
5752
5753 /* Return how many bits covers PIECE EXPR_LIST. */
5754
5755 static HOST_WIDE_INT
5756 decl_piece_bitsize (rtx piece)
5757 {
5758 int ret = (int) GET_MODE (piece);
5759 if (ret)
5760 return ret;
5761 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
5762 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
5763 return INTVAL (XEXP (XEXP (piece, 0), 0));
5764 }
5765
5766 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
5767
5768 static rtx *
5769 decl_piece_varloc_ptr (rtx piece)
5770 {
5771 if ((int) GET_MODE (piece))
5772 return &XEXP (piece, 0);
5773 else
5774 return &XEXP (XEXP (piece, 0), 1);
5775 }
5776
5777 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
5778 Next is the chain of following piece nodes. */
5779
5780 static rtx_expr_list *
5781 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
5782 {
5783 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
5784 return alloc_EXPR_LIST (bitsize, loc_note, next);
5785 else
5786 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
5787 GEN_INT (bitsize),
5788 loc_note), next);
5789 }
5790
5791 /* Return rtx that should be stored into loc field for
5792 LOC_NOTE and BITPOS/BITSIZE. */
5793
5794 static rtx
5795 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
5796 HOST_WIDE_INT bitsize)
5797 {
5798 if (bitsize != -1)
5799 {
5800 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
5801 if (bitpos != 0)
5802 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
5803 }
5804 return loc_note;
5805 }
5806
5807 /* This function either modifies location piece list *DEST in
5808 place (if SRC and INNER is NULL), or copies location piece list
5809 *SRC to *DEST while modifying it. Location BITPOS is modified
5810 to contain LOC_NOTE, any pieces overlapping it are removed resp.
5811 not copied and if needed some padding around it is added.
5812 When modifying in place, DEST should point to EXPR_LIST where
5813 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
5814 to the start of the whole list and INNER points to the EXPR_LIST
5815 where earlier pieces cover PIECE_BITPOS bits. */
5816
5817 static void
5818 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
5819 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
5820 HOST_WIDE_INT bitsize, rtx loc_note)
5821 {
5822 HOST_WIDE_INT diff;
5823 bool copy = inner != NULL;
5824
5825 if (copy)
5826 {
5827 /* First copy all nodes preceding the current bitpos. */
5828 while (src != inner)
5829 {
5830 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
5831 decl_piece_bitsize (*src), NULL_RTX);
5832 dest = &XEXP (*dest, 1);
5833 src = &XEXP (*src, 1);
5834 }
5835 }
5836 /* Add padding if needed. */
5837 if (bitpos != piece_bitpos)
5838 {
5839 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
5840 copy ? NULL_RTX : *dest);
5841 dest = &XEXP (*dest, 1);
5842 }
5843 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
5844 {
5845 gcc_assert (!copy);
5846 /* A piece with correct bitpos and bitsize already exist,
5847 just update the location for it and return. */
5848 *decl_piece_varloc_ptr (*dest) = loc_note;
5849 return;
5850 }
5851 /* Add the piece that changed. */
5852 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
5853 dest = &XEXP (*dest, 1);
5854 /* Skip over pieces that overlap it. */
5855 diff = bitpos - piece_bitpos + bitsize;
5856 if (!copy)
5857 src = dest;
5858 while (diff > 0 && *src)
5859 {
5860 rtx piece = *src;
5861 diff -= decl_piece_bitsize (piece);
5862 if (copy)
5863 src = &XEXP (piece, 1);
5864 else
5865 {
5866 *src = XEXP (piece, 1);
5867 free_EXPR_LIST_node (piece);
5868 }
5869 }
5870 /* Add padding if needed. */
5871 if (diff < 0 && *src)
5872 {
5873 if (!copy)
5874 dest = src;
5875 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
5876 dest = &XEXP (*dest, 1);
5877 }
5878 if (!copy)
5879 return;
5880 /* Finally copy all nodes following it. */
5881 while (*src)
5882 {
5883 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
5884 decl_piece_bitsize (*src), NULL_RTX);
5885 dest = &XEXP (*dest, 1);
5886 src = &XEXP (*src, 1);
5887 }
5888 }
5889
5890 /* Add a variable location node to the linked list for DECL. */
5891
5892 static struct var_loc_node *
5893 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label)
5894 {
5895 unsigned int decl_id;
5896 var_loc_list *temp;
5897 struct var_loc_node *loc = NULL;
5898 HOST_WIDE_INT bitsize = -1, bitpos = -1;
5899
5900 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
5901 {
5902 tree realdecl = DECL_DEBUG_EXPR (decl);
5903 if (handled_component_p (realdecl)
5904 || (TREE_CODE (realdecl) == MEM_REF
5905 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5906 {
5907 HOST_WIDE_INT maxsize;
5908 bool reverse;
5909 tree innerdecl
5910 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize, &maxsize,
5911 &reverse);
5912 if (!DECL_P (innerdecl)
5913 || DECL_IGNORED_P (innerdecl)
5914 || TREE_STATIC (innerdecl)
5915 || bitsize <= 0
5916 || bitpos + bitsize > 256
5917 || bitsize != maxsize)
5918 return NULL;
5919 decl = innerdecl;
5920 }
5921 }
5922
5923 decl_id = DECL_UID (decl);
5924 var_loc_list **slot
5925 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
5926 if (*slot == NULL)
5927 {
5928 temp = ggc_cleared_alloc<var_loc_list> ();
5929 temp->decl_id = decl_id;
5930 *slot = temp;
5931 }
5932 else
5933 temp = *slot;
5934
5935 /* For PARM_DECLs try to keep around the original incoming value,
5936 even if that means we'll emit a zero-range .debug_loc entry. */
5937 if (temp->last
5938 && temp->first == temp->last
5939 && TREE_CODE (decl) == PARM_DECL
5940 && NOTE_P (temp->first->loc)
5941 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
5942 && DECL_INCOMING_RTL (decl)
5943 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
5944 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
5945 == GET_CODE (DECL_INCOMING_RTL (decl))
5946 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
5947 && (bitsize != -1
5948 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
5949 NOTE_VAR_LOCATION_LOC (loc_note))
5950 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
5951 != NOTE_VAR_LOCATION_STATUS (loc_note))))
5952 {
5953 loc = ggc_cleared_alloc<var_loc_node> ();
5954 temp->first->next = loc;
5955 temp->last = loc;
5956 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
5957 }
5958 else if (temp->last)
5959 {
5960 struct var_loc_node *last = temp->last, *unused = NULL;
5961 rtx *piece_loc = NULL, last_loc_note;
5962 HOST_WIDE_INT piece_bitpos = 0;
5963 if (last->next)
5964 {
5965 last = last->next;
5966 gcc_assert (last->next == NULL);
5967 }
5968 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
5969 {
5970 piece_loc = &last->loc;
5971 do
5972 {
5973 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
5974 if (piece_bitpos + cur_bitsize > bitpos)
5975 break;
5976 piece_bitpos += cur_bitsize;
5977 piece_loc = &XEXP (*piece_loc, 1);
5978 }
5979 while (*piece_loc);
5980 }
5981 /* TEMP->LAST here is either pointer to the last but one or
5982 last element in the chained list, LAST is pointer to the
5983 last element. */
5984 if (label && strcmp (last->label, label) == 0)
5985 {
5986 /* For SRA optimized variables if there weren't any real
5987 insns since last note, just modify the last node. */
5988 if (piece_loc != NULL)
5989 {
5990 adjust_piece_list (piece_loc, NULL, NULL,
5991 bitpos, piece_bitpos, bitsize, loc_note);
5992 return NULL;
5993 }
5994 /* If the last note doesn't cover any instructions, remove it. */
5995 if (temp->last != last)
5996 {
5997 temp->last->next = NULL;
5998 unused = last;
5999 last = temp->last;
6000 gcc_assert (strcmp (last->label, label) != 0);
6001 }
6002 else
6003 {
6004 gcc_assert (temp->first == temp->last
6005 || (temp->first->next == temp->last
6006 && TREE_CODE (decl) == PARM_DECL));
6007 memset (temp->last, '\0', sizeof (*temp->last));
6008 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6009 return temp->last;
6010 }
6011 }
6012 if (bitsize == -1 && NOTE_P (last->loc))
6013 last_loc_note = last->loc;
6014 else if (piece_loc != NULL
6015 && *piece_loc != NULL_RTX
6016 && piece_bitpos == bitpos
6017 && decl_piece_bitsize (*piece_loc) == bitsize)
6018 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6019 else
6020 last_loc_note = NULL_RTX;
6021 /* If the current location is the same as the end of the list,
6022 and either both or neither of the locations is uninitialized,
6023 we have nothing to do. */
6024 if (last_loc_note == NULL_RTX
6025 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6026 NOTE_VAR_LOCATION_LOC (loc_note)))
6027 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6028 != NOTE_VAR_LOCATION_STATUS (loc_note))
6029 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6030 == VAR_INIT_STATUS_UNINITIALIZED)
6031 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6032 == VAR_INIT_STATUS_UNINITIALIZED))))
6033 {
6034 /* Add LOC to the end of list and update LAST. If the last
6035 element of the list has been removed above, reuse its
6036 memory for the new node, otherwise allocate a new one. */
6037 if (unused)
6038 {
6039 loc = unused;
6040 memset (loc, '\0', sizeof (*loc));
6041 }
6042 else
6043 loc = ggc_cleared_alloc<var_loc_node> ();
6044 if (bitsize == -1 || piece_loc == NULL)
6045 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6046 else
6047 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6048 bitpos, piece_bitpos, bitsize, loc_note);
6049 last->next = loc;
6050 /* Ensure TEMP->LAST will point either to the new last but one
6051 element of the chain, or to the last element in it. */
6052 if (last != temp->last)
6053 temp->last = last;
6054 }
6055 else if (unused)
6056 ggc_free (unused);
6057 }
6058 else
6059 {
6060 loc = ggc_cleared_alloc<var_loc_node> ();
6061 temp->first = loc;
6062 temp->last = loc;
6063 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6064 }
6065 return loc;
6066 }
6067 \f
6068 /* Keep track of the number of spaces used to indent the
6069 output of the debugging routines that print the structure of
6070 the DIE internal representation. */
6071 static int print_indent;
6072
6073 /* Indent the line the number of spaces given by print_indent. */
6074
6075 static inline void
6076 print_spaces (FILE *outfile)
6077 {
6078 fprintf (outfile, "%*s", print_indent, "");
6079 }
6080
6081 /* Print a type signature in hex. */
6082
6083 static inline void
6084 print_signature (FILE *outfile, char *sig)
6085 {
6086 int i;
6087
6088 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6089 fprintf (outfile, "%02x", sig[i] & 0xff);
6090 }
6091
6092 static inline void
6093 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6094 {
6095 if (discr_value->pos)
6096 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6097 else
6098 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6099 }
6100
6101 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6102
6103 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6104 RECURSE, output location descriptor operations. */
6105
6106 static void
6107 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6108 {
6109 switch (val->val_class)
6110 {
6111 case dw_val_class_addr:
6112 fprintf (outfile, "address");
6113 break;
6114 case dw_val_class_offset:
6115 fprintf (outfile, "offset");
6116 break;
6117 case dw_val_class_loc:
6118 fprintf (outfile, "location descriptor");
6119 if (val->v.val_loc == NULL)
6120 fprintf (outfile, " -> <null>\n");
6121 else if (recurse)
6122 {
6123 fprintf (outfile, ":\n");
6124 print_indent += 4;
6125 print_loc_descr (val->v.val_loc, outfile);
6126 print_indent -= 4;
6127 }
6128 else
6129 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6130 break;
6131 case dw_val_class_loc_list:
6132 fprintf (outfile, "location list -> label:%s",
6133 val->v.val_loc_list->ll_symbol);
6134 break;
6135 case dw_val_class_range_list:
6136 fprintf (outfile, "range list");
6137 break;
6138 case dw_val_class_const:
6139 case dw_val_class_const_implicit:
6140 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6141 break;
6142 case dw_val_class_unsigned_const:
6143 case dw_val_class_unsigned_const_implicit:
6144 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6145 break;
6146 case dw_val_class_const_double:
6147 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6148 HOST_WIDE_INT_PRINT_UNSIGNED")",
6149 val->v.val_double.high,
6150 val->v.val_double.low);
6151 break;
6152 case dw_val_class_wide_int:
6153 {
6154 int i = val->v.val_wide->get_len ();
6155 fprintf (outfile, "constant (");
6156 gcc_assert (i > 0);
6157 if (val->v.val_wide->elt (i - 1) == 0)
6158 fprintf (outfile, "0x");
6159 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6160 val->v.val_wide->elt (--i));
6161 while (--i >= 0)
6162 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6163 val->v.val_wide->elt (i));
6164 fprintf (outfile, ")");
6165 break;
6166 }
6167 case dw_val_class_vec:
6168 fprintf (outfile, "floating-point or vector constant");
6169 break;
6170 case dw_val_class_flag:
6171 fprintf (outfile, "%u", val->v.val_flag);
6172 break;
6173 case dw_val_class_die_ref:
6174 if (val->v.val_die_ref.die != NULL)
6175 {
6176 dw_die_ref die = val->v.val_die_ref.die;
6177
6178 if (die->comdat_type_p)
6179 {
6180 fprintf (outfile, "die -> signature: ");
6181 print_signature (outfile,
6182 die->die_id.die_type_node->signature);
6183 }
6184 else if (die->die_id.die_symbol)
6185 {
6186 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6187 if (die->with_offset)
6188 fprintf (outfile, " + %ld", die->die_offset);
6189 }
6190 else
6191 fprintf (outfile, "die -> %ld", die->die_offset);
6192 fprintf (outfile, " (%p)", (void *) die);
6193 }
6194 else
6195 fprintf (outfile, "die -> <null>");
6196 break;
6197 case dw_val_class_vms_delta:
6198 fprintf (outfile, "delta: @slotcount(%s-%s)",
6199 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6200 break;
6201 case dw_val_class_lbl_id:
6202 case dw_val_class_lineptr:
6203 case dw_val_class_macptr:
6204 case dw_val_class_loclistsptr:
6205 case dw_val_class_high_pc:
6206 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6207 break;
6208 case dw_val_class_str:
6209 if (val->v.val_str->str != NULL)
6210 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6211 else
6212 fprintf (outfile, "<null>");
6213 break;
6214 case dw_val_class_file:
6215 case dw_val_class_file_implicit:
6216 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6217 val->v.val_file->emitted_number);
6218 break;
6219 case dw_val_class_data8:
6220 {
6221 int i;
6222
6223 for (i = 0; i < 8; i++)
6224 fprintf (outfile, "%02x", val->v.val_data8[i]);
6225 break;
6226 }
6227 case dw_val_class_discr_value:
6228 print_discr_value (outfile, &val->v.val_discr_value);
6229 break;
6230 case dw_val_class_discr_list:
6231 for (dw_discr_list_ref node = val->v.val_discr_list;
6232 node != NULL;
6233 node = node->dw_discr_next)
6234 {
6235 if (node->dw_discr_range)
6236 {
6237 fprintf (outfile, " .. ");
6238 print_discr_value (outfile, &node->dw_discr_lower_bound);
6239 print_discr_value (outfile, &node->dw_discr_upper_bound);
6240 }
6241 else
6242 print_discr_value (outfile, &node->dw_discr_lower_bound);
6243
6244 if (node->dw_discr_next != NULL)
6245 fprintf (outfile, " | ");
6246 }
6247 default:
6248 break;
6249 }
6250 }
6251
6252 /* Likewise, for a DIE attribute. */
6253
6254 static void
6255 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6256 {
6257 print_dw_val (&a->dw_attr_val, recurse, outfile);
6258 }
6259
6260
6261 /* Print the list of operands in the LOC location description to OUTFILE. This
6262 routine is a debugging aid only. */
6263
6264 static void
6265 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6266 {
6267 dw_loc_descr_ref l = loc;
6268
6269 if (loc == NULL)
6270 {
6271 print_spaces (outfile);
6272 fprintf (outfile, "<null>\n");
6273 return;
6274 }
6275
6276 for (l = loc; l != NULL; l = l->dw_loc_next)
6277 {
6278 print_spaces (outfile);
6279 fprintf (outfile, "(%p) %s",
6280 (void *) l,
6281 dwarf_stack_op_name (l->dw_loc_opc));
6282 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6283 {
6284 fprintf (outfile, " ");
6285 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6286 }
6287 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6288 {
6289 fprintf (outfile, ", ");
6290 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6291 }
6292 fprintf (outfile, "\n");
6293 }
6294 }
6295
6296 /* Print the information associated with a given DIE, and its children.
6297 This routine is a debugging aid only. */
6298
6299 static void
6300 print_die (dw_die_ref die, FILE *outfile)
6301 {
6302 dw_attr_node *a;
6303 dw_die_ref c;
6304 unsigned ix;
6305
6306 print_spaces (outfile);
6307 fprintf (outfile, "DIE %4ld: %s (%p)\n",
6308 die->die_offset, dwarf_tag_name (die->die_tag),
6309 (void*) die);
6310 print_spaces (outfile);
6311 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6312 fprintf (outfile, " offset: %ld", die->die_offset);
6313 fprintf (outfile, " mark: %d\n", die->die_mark);
6314
6315 if (die->comdat_type_p)
6316 {
6317 print_spaces (outfile);
6318 fprintf (outfile, " signature: ");
6319 print_signature (outfile, die->die_id.die_type_node->signature);
6320 fprintf (outfile, "\n");
6321 }
6322
6323 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6324 {
6325 print_spaces (outfile);
6326 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6327
6328 print_attribute (a, true, outfile);
6329 fprintf (outfile, "\n");
6330 }
6331
6332 if (die->die_child != NULL)
6333 {
6334 print_indent += 4;
6335 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6336 print_indent -= 4;
6337 }
6338 if (print_indent == 0)
6339 fprintf (outfile, "\n");
6340 }
6341
6342 /* Print the list of operations in the LOC location description. */
6343
6344 DEBUG_FUNCTION void
6345 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6346 {
6347 print_loc_descr (loc, stderr);
6348 }
6349
6350 /* Print the information collected for a given DIE. */
6351
6352 DEBUG_FUNCTION void
6353 debug_dwarf_die (dw_die_ref die)
6354 {
6355 print_die (die, stderr);
6356 }
6357
6358 DEBUG_FUNCTION void
6359 debug (die_struct &ref)
6360 {
6361 print_die (&ref, stderr);
6362 }
6363
6364 DEBUG_FUNCTION void
6365 debug (die_struct *ptr)
6366 {
6367 if (ptr)
6368 debug (*ptr);
6369 else
6370 fprintf (stderr, "<nil>\n");
6371 }
6372
6373
6374 /* Print all DWARF information collected for the compilation unit.
6375 This routine is a debugging aid only. */
6376
6377 DEBUG_FUNCTION void
6378 debug_dwarf (void)
6379 {
6380 print_indent = 0;
6381 print_die (comp_unit_die (), stderr);
6382 }
6383
6384 /* Verify the DIE tree structure. */
6385
6386 DEBUG_FUNCTION void
6387 verify_die (dw_die_ref die)
6388 {
6389 gcc_assert (!die->die_mark);
6390 if (die->die_parent == NULL
6391 && die->die_sib == NULL)
6392 return;
6393 /* Verify the die_sib list is cyclic. */
6394 dw_die_ref x = die;
6395 do
6396 {
6397 x->die_mark = 1;
6398 x = x->die_sib;
6399 }
6400 while (x && !x->die_mark);
6401 gcc_assert (x == die);
6402 x = die;
6403 do
6404 {
6405 /* Verify all dies have the same parent. */
6406 gcc_assert (x->die_parent == die->die_parent);
6407 if (x->die_child)
6408 {
6409 /* Verify the child has the proper parent and recurse. */
6410 gcc_assert (x->die_child->die_parent == x);
6411 verify_die (x->die_child);
6412 }
6413 x->die_mark = 0;
6414 x = x->die_sib;
6415 }
6416 while (x && x->die_mark);
6417 }
6418
6419 /* Sanity checks on DIEs. */
6420
6421 static void
6422 check_die (dw_die_ref die)
6423 {
6424 unsigned ix;
6425 dw_attr_node *a;
6426 bool inline_found = false;
6427 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6428 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6429 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6430 {
6431 switch (a->dw_attr)
6432 {
6433 case DW_AT_inline:
6434 if (a->dw_attr_val.v.val_unsigned)
6435 inline_found = true;
6436 break;
6437 case DW_AT_location:
6438 ++n_location;
6439 break;
6440 case DW_AT_low_pc:
6441 ++n_low_pc;
6442 break;
6443 case DW_AT_high_pc:
6444 ++n_high_pc;
6445 break;
6446 case DW_AT_artificial:
6447 ++n_artificial;
6448 break;
6449 case DW_AT_decl_column:
6450 ++n_decl_column;
6451 break;
6452 case DW_AT_decl_line:
6453 ++n_decl_line;
6454 break;
6455 case DW_AT_decl_file:
6456 ++n_decl_file;
6457 break;
6458 default:
6459 break;
6460 }
6461 }
6462 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6463 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6464 {
6465 fprintf (stderr, "Duplicate attributes in DIE:\n");
6466 debug_dwarf_die (die);
6467 gcc_unreachable ();
6468 }
6469 if (inline_found)
6470 {
6471 /* A debugging information entry that is a member of an abstract
6472 instance tree [that has DW_AT_inline] should not contain any
6473 attributes which describe aspects of the subroutine which vary
6474 between distinct inlined expansions or distinct out-of-line
6475 expansions. */
6476 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6477 gcc_assert (a->dw_attr != DW_AT_low_pc
6478 && a->dw_attr != DW_AT_high_pc
6479 && a->dw_attr != DW_AT_location
6480 && a->dw_attr != DW_AT_frame_base
6481 && a->dw_attr != DW_AT_call_all_calls
6482 && a->dw_attr != DW_AT_GNU_all_call_sites);
6483 }
6484 }
6485 \f
6486 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6487 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6488 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6489
6490 /* Calculate the checksum of a location expression. */
6491
6492 static inline void
6493 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6494 {
6495 int tem;
6496 inchash::hash hstate;
6497 hashval_t hash;
6498
6499 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6500 CHECKSUM (tem);
6501 hash_loc_operands (loc, hstate);
6502 hash = hstate.end();
6503 CHECKSUM (hash);
6504 }
6505
6506 /* Calculate the checksum of an attribute. */
6507
6508 static void
6509 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6510 {
6511 dw_loc_descr_ref loc;
6512 rtx r;
6513
6514 CHECKSUM (at->dw_attr);
6515
6516 /* We don't care that this was compiled with a different compiler
6517 snapshot; if the output is the same, that's what matters. */
6518 if (at->dw_attr == DW_AT_producer)
6519 return;
6520
6521 switch (AT_class (at))
6522 {
6523 case dw_val_class_const:
6524 case dw_val_class_const_implicit:
6525 CHECKSUM (at->dw_attr_val.v.val_int);
6526 break;
6527 case dw_val_class_unsigned_const:
6528 case dw_val_class_unsigned_const_implicit:
6529 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6530 break;
6531 case dw_val_class_const_double:
6532 CHECKSUM (at->dw_attr_val.v.val_double);
6533 break;
6534 case dw_val_class_wide_int:
6535 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6536 get_full_len (*at->dw_attr_val.v.val_wide)
6537 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6538 break;
6539 case dw_val_class_vec:
6540 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6541 (at->dw_attr_val.v.val_vec.length
6542 * at->dw_attr_val.v.val_vec.elt_size));
6543 break;
6544 case dw_val_class_flag:
6545 CHECKSUM (at->dw_attr_val.v.val_flag);
6546 break;
6547 case dw_val_class_str:
6548 CHECKSUM_STRING (AT_string (at));
6549 break;
6550
6551 case dw_val_class_addr:
6552 r = AT_addr (at);
6553 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6554 CHECKSUM_STRING (XSTR (r, 0));
6555 break;
6556
6557 case dw_val_class_offset:
6558 CHECKSUM (at->dw_attr_val.v.val_offset);
6559 break;
6560
6561 case dw_val_class_loc:
6562 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6563 loc_checksum (loc, ctx);
6564 break;
6565
6566 case dw_val_class_die_ref:
6567 die_checksum (AT_ref (at), ctx, mark);
6568 break;
6569
6570 case dw_val_class_fde_ref:
6571 case dw_val_class_vms_delta:
6572 case dw_val_class_lbl_id:
6573 case dw_val_class_lineptr:
6574 case dw_val_class_macptr:
6575 case dw_val_class_loclistsptr:
6576 case dw_val_class_high_pc:
6577 break;
6578
6579 case dw_val_class_file:
6580 case dw_val_class_file_implicit:
6581 CHECKSUM_STRING (AT_file (at)->filename);
6582 break;
6583
6584 case dw_val_class_data8:
6585 CHECKSUM (at->dw_attr_val.v.val_data8);
6586 break;
6587
6588 default:
6589 break;
6590 }
6591 }
6592
6593 /* Calculate the checksum of a DIE. */
6594
6595 static void
6596 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6597 {
6598 dw_die_ref c;
6599 dw_attr_node *a;
6600 unsigned ix;
6601
6602 /* To avoid infinite recursion. */
6603 if (die->die_mark)
6604 {
6605 CHECKSUM (die->die_mark);
6606 return;
6607 }
6608 die->die_mark = ++(*mark);
6609
6610 CHECKSUM (die->die_tag);
6611
6612 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6613 attr_checksum (a, ctx, mark);
6614
6615 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6616 }
6617
6618 #undef CHECKSUM
6619 #undef CHECKSUM_BLOCK
6620 #undef CHECKSUM_STRING
6621
6622 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6623 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6624 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6625 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6626 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6627 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6628 #define CHECKSUM_ATTR(FOO) \
6629 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6630
6631 /* Calculate the checksum of a number in signed LEB128 format. */
6632
6633 static void
6634 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6635 {
6636 unsigned char byte;
6637 bool more;
6638
6639 while (1)
6640 {
6641 byte = (value & 0x7f);
6642 value >>= 7;
6643 more = !((value == 0 && (byte & 0x40) == 0)
6644 || (value == -1 && (byte & 0x40) != 0));
6645 if (more)
6646 byte |= 0x80;
6647 CHECKSUM (byte);
6648 if (!more)
6649 break;
6650 }
6651 }
6652
6653 /* Calculate the checksum of a number in unsigned LEB128 format. */
6654
6655 static void
6656 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6657 {
6658 while (1)
6659 {
6660 unsigned char byte = (value & 0x7f);
6661 value >>= 7;
6662 if (value != 0)
6663 /* More bytes to follow. */
6664 byte |= 0x80;
6665 CHECKSUM (byte);
6666 if (value == 0)
6667 break;
6668 }
6669 }
6670
6671 /* Checksum the context of the DIE. This adds the names of any
6672 surrounding namespaces or structures to the checksum. */
6673
6674 static void
6675 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
6676 {
6677 const char *name;
6678 dw_die_ref spec;
6679 int tag = die->die_tag;
6680
6681 if (tag != DW_TAG_namespace
6682 && tag != DW_TAG_structure_type
6683 && tag != DW_TAG_class_type)
6684 return;
6685
6686 name = get_AT_string (die, DW_AT_name);
6687
6688 spec = get_AT_ref (die, DW_AT_specification);
6689 if (spec != NULL)
6690 die = spec;
6691
6692 if (die->die_parent != NULL)
6693 checksum_die_context (die->die_parent, ctx);
6694
6695 CHECKSUM_ULEB128 ('C');
6696 CHECKSUM_ULEB128 (tag);
6697 if (name != NULL)
6698 CHECKSUM_STRING (name);
6699 }
6700
6701 /* Calculate the checksum of a location expression. */
6702
6703 static inline void
6704 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6705 {
6706 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
6707 were emitted as a DW_FORM_sdata instead of a location expression. */
6708 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
6709 {
6710 CHECKSUM_ULEB128 (DW_FORM_sdata);
6711 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
6712 return;
6713 }
6714
6715 /* Otherwise, just checksum the raw location expression. */
6716 while (loc != NULL)
6717 {
6718 inchash::hash hstate;
6719 hashval_t hash;
6720
6721 CHECKSUM_ULEB128 (loc->dtprel);
6722 CHECKSUM_ULEB128 (loc->dw_loc_opc);
6723 hash_loc_operands (loc, hstate);
6724 hash = hstate.end ();
6725 CHECKSUM (hash);
6726 loc = loc->dw_loc_next;
6727 }
6728 }
6729
6730 /* Calculate the checksum of an attribute. */
6731
6732 static void
6733 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
6734 struct md5_ctx *ctx, int *mark)
6735 {
6736 dw_loc_descr_ref loc;
6737 rtx r;
6738
6739 if (AT_class (at) == dw_val_class_die_ref)
6740 {
6741 dw_die_ref target_die = AT_ref (at);
6742
6743 /* For pointer and reference types, we checksum only the (qualified)
6744 name of the target type (if there is a name). For friend entries,
6745 we checksum only the (qualified) name of the target type or function.
6746 This allows the checksum to remain the same whether the target type
6747 is complete or not. */
6748 if ((at->dw_attr == DW_AT_type
6749 && (tag == DW_TAG_pointer_type
6750 || tag == DW_TAG_reference_type
6751 || tag == DW_TAG_rvalue_reference_type
6752 || tag == DW_TAG_ptr_to_member_type))
6753 || (at->dw_attr == DW_AT_friend
6754 && tag == DW_TAG_friend))
6755 {
6756 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
6757
6758 if (name_attr != NULL)
6759 {
6760 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
6761
6762 if (decl == NULL)
6763 decl = target_die;
6764 CHECKSUM_ULEB128 ('N');
6765 CHECKSUM_ULEB128 (at->dw_attr);
6766 if (decl->die_parent != NULL)
6767 checksum_die_context (decl->die_parent, ctx);
6768 CHECKSUM_ULEB128 ('E');
6769 CHECKSUM_STRING (AT_string (name_attr));
6770 return;
6771 }
6772 }
6773
6774 /* For all other references to another DIE, we check to see if the
6775 target DIE has already been visited. If it has, we emit a
6776 backward reference; if not, we descend recursively. */
6777 if (target_die->die_mark > 0)
6778 {
6779 CHECKSUM_ULEB128 ('R');
6780 CHECKSUM_ULEB128 (at->dw_attr);
6781 CHECKSUM_ULEB128 (target_die->die_mark);
6782 }
6783 else
6784 {
6785 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
6786
6787 if (decl == NULL)
6788 decl = target_die;
6789 target_die->die_mark = ++(*mark);
6790 CHECKSUM_ULEB128 ('T');
6791 CHECKSUM_ULEB128 (at->dw_attr);
6792 if (decl->die_parent != NULL)
6793 checksum_die_context (decl->die_parent, ctx);
6794 die_checksum_ordered (target_die, ctx, mark);
6795 }
6796 return;
6797 }
6798
6799 CHECKSUM_ULEB128 ('A');
6800 CHECKSUM_ULEB128 (at->dw_attr);
6801
6802 switch (AT_class (at))
6803 {
6804 case dw_val_class_const:
6805 case dw_val_class_const_implicit:
6806 CHECKSUM_ULEB128 (DW_FORM_sdata);
6807 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
6808 break;
6809
6810 case dw_val_class_unsigned_const:
6811 case dw_val_class_unsigned_const_implicit:
6812 CHECKSUM_ULEB128 (DW_FORM_sdata);
6813 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
6814 break;
6815
6816 case dw_val_class_const_double:
6817 CHECKSUM_ULEB128 (DW_FORM_block);
6818 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
6819 CHECKSUM (at->dw_attr_val.v.val_double);
6820 break;
6821
6822 case dw_val_class_wide_int:
6823 CHECKSUM_ULEB128 (DW_FORM_block);
6824 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
6825 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
6826 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6827 get_full_len (*at->dw_attr_val.v.val_wide)
6828 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6829 break;
6830
6831 case dw_val_class_vec:
6832 CHECKSUM_ULEB128 (DW_FORM_block);
6833 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
6834 * at->dw_attr_val.v.val_vec.elt_size);
6835 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6836 (at->dw_attr_val.v.val_vec.length
6837 * at->dw_attr_val.v.val_vec.elt_size));
6838 break;
6839
6840 case dw_val_class_flag:
6841 CHECKSUM_ULEB128 (DW_FORM_flag);
6842 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
6843 break;
6844
6845 case dw_val_class_str:
6846 CHECKSUM_ULEB128 (DW_FORM_string);
6847 CHECKSUM_STRING (AT_string (at));
6848 break;
6849
6850 case dw_val_class_addr:
6851 r = AT_addr (at);
6852 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6853 CHECKSUM_ULEB128 (DW_FORM_string);
6854 CHECKSUM_STRING (XSTR (r, 0));
6855 break;
6856
6857 case dw_val_class_offset:
6858 CHECKSUM_ULEB128 (DW_FORM_sdata);
6859 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
6860 break;
6861
6862 case dw_val_class_loc:
6863 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6864 loc_checksum_ordered (loc, ctx);
6865 break;
6866
6867 case dw_val_class_fde_ref:
6868 case dw_val_class_lbl_id:
6869 case dw_val_class_lineptr:
6870 case dw_val_class_macptr:
6871 case dw_val_class_loclistsptr:
6872 case dw_val_class_high_pc:
6873 break;
6874
6875 case dw_val_class_file:
6876 case dw_val_class_file_implicit:
6877 CHECKSUM_ULEB128 (DW_FORM_string);
6878 CHECKSUM_STRING (AT_file (at)->filename);
6879 break;
6880
6881 case dw_val_class_data8:
6882 CHECKSUM (at->dw_attr_val.v.val_data8);
6883 break;
6884
6885 default:
6886 break;
6887 }
6888 }
6889
6890 struct checksum_attributes
6891 {
6892 dw_attr_node *at_name;
6893 dw_attr_node *at_type;
6894 dw_attr_node *at_friend;
6895 dw_attr_node *at_accessibility;
6896 dw_attr_node *at_address_class;
6897 dw_attr_node *at_alignment;
6898 dw_attr_node *at_allocated;
6899 dw_attr_node *at_artificial;
6900 dw_attr_node *at_associated;
6901 dw_attr_node *at_binary_scale;
6902 dw_attr_node *at_bit_offset;
6903 dw_attr_node *at_bit_size;
6904 dw_attr_node *at_bit_stride;
6905 dw_attr_node *at_byte_size;
6906 dw_attr_node *at_byte_stride;
6907 dw_attr_node *at_const_value;
6908 dw_attr_node *at_containing_type;
6909 dw_attr_node *at_count;
6910 dw_attr_node *at_data_location;
6911 dw_attr_node *at_data_member_location;
6912 dw_attr_node *at_decimal_scale;
6913 dw_attr_node *at_decimal_sign;
6914 dw_attr_node *at_default_value;
6915 dw_attr_node *at_digit_count;
6916 dw_attr_node *at_discr;
6917 dw_attr_node *at_discr_list;
6918 dw_attr_node *at_discr_value;
6919 dw_attr_node *at_encoding;
6920 dw_attr_node *at_endianity;
6921 dw_attr_node *at_explicit;
6922 dw_attr_node *at_is_optional;
6923 dw_attr_node *at_location;
6924 dw_attr_node *at_lower_bound;
6925 dw_attr_node *at_mutable;
6926 dw_attr_node *at_ordering;
6927 dw_attr_node *at_picture_string;
6928 dw_attr_node *at_prototyped;
6929 dw_attr_node *at_small;
6930 dw_attr_node *at_segment;
6931 dw_attr_node *at_string_length;
6932 dw_attr_node *at_string_length_bit_size;
6933 dw_attr_node *at_string_length_byte_size;
6934 dw_attr_node *at_threads_scaled;
6935 dw_attr_node *at_upper_bound;
6936 dw_attr_node *at_use_location;
6937 dw_attr_node *at_use_UTF8;
6938 dw_attr_node *at_variable_parameter;
6939 dw_attr_node *at_virtuality;
6940 dw_attr_node *at_visibility;
6941 dw_attr_node *at_vtable_elem_location;
6942 };
6943
6944 /* Collect the attributes that we will want to use for the checksum. */
6945
6946 static void
6947 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
6948 {
6949 dw_attr_node *a;
6950 unsigned ix;
6951
6952 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6953 {
6954 switch (a->dw_attr)
6955 {
6956 case DW_AT_name:
6957 attrs->at_name = a;
6958 break;
6959 case DW_AT_type:
6960 attrs->at_type = a;
6961 break;
6962 case DW_AT_friend:
6963 attrs->at_friend = a;
6964 break;
6965 case DW_AT_accessibility:
6966 attrs->at_accessibility = a;
6967 break;
6968 case DW_AT_address_class:
6969 attrs->at_address_class = a;
6970 break;
6971 case DW_AT_alignment:
6972 attrs->at_alignment = a;
6973 break;
6974 case DW_AT_allocated:
6975 attrs->at_allocated = a;
6976 break;
6977 case DW_AT_artificial:
6978 attrs->at_artificial = a;
6979 break;
6980 case DW_AT_associated:
6981 attrs->at_associated = a;
6982 break;
6983 case DW_AT_binary_scale:
6984 attrs->at_binary_scale = a;
6985 break;
6986 case DW_AT_bit_offset:
6987 attrs->at_bit_offset = a;
6988 break;
6989 case DW_AT_bit_size:
6990 attrs->at_bit_size = a;
6991 break;
6992 case DW_AT_bit_stride:
6993 attrs->at_bit_stride = a;
6994 break;
6995 case DW_AT_byte_size:
6996 attrs->at_byte_size = a;
6997 break;
6998 case DW_AT_byte_stride:
6999 attrs->at_byte_stride = a;
7000 break;
7001 case DW_AT_const_value:
7002 attrs->at_const_value = a;
7003 break;
7004 case DW_AT_containing_type:
7005 attrs->at_containing_type = a;
7006 break;
7007 case DW_AT_count:
7008 attrs->at_count = a;
7009 break;
7010 case DW_AT_data_location:
7011 attrs->at_data_location = a;
7012 break;
7013 case DW_AT_data_member_location:
7014 attrs->at_data_member_location = a;
7015 break;
7016 case DW_AT_decimal_scale:
7017 attrs->at_decimal_scale = a;
7018 break;
7019 case DW_AT_decimal_sign:
7020 attrs->at_decimal_sign = a;
7021 break;
7022 case DW_AT_default_value:
7023 attrs->at_default_value = a;
7024 break;
7025 case DW_AT_digit_count:
7026 attrs->at_digit_count = a;
7027 break;
7028 case DW_AT_discr:
7029 attrs->at_discr = a;
7030 break;
7031 case DW_AT_discr_list:
7032 attrs->at_discr_list = a;
7033 break;
7034 case DW_AT_discr_value:
7035 attrs->at_discr_value = a;
7036 break;
7037 case DW_AT_encoding:
7038 attrs->at_encoding = a;
7039 break;
7040 case DW_AT_endianity:
7041 attrs->at_endianity = a;
7042 break;
7043 case DW_AT_explicit:
7044 attrs->at_explicit = a;
7045 break;
7046 case DW_AT_is_optional:
7047 attrs->at_is_optional = a;
7048 break;
7049 case DW_AT_location:
7050 attrs->at_location = a;
7051 break;
7052 case DW_AT_lower_bound:
7053 attrs->at_lower_bound = a;
7054 break;
7055 case DW_AT_mutable:
7056 attrs->at_mutable = a;
7057 break;
7058 case DW_AT_ordering:
7059 attrs->at_ordering = a;
7060 break;
7061 case DW_AT_picture_string:
7062 attrs->at_picture_string = a;
7063 break;
7064 case DW_AT_prototyped:
7065 attrs->at_prototyped = a;
7066 break;
7067 case DW_AT_small:
7068 attrs->at_small = a;
7069 break;
7070 case DW_AT_segment:
7071 attrs->at_segment = a;
7072 break;
7073 case DW_AT_string_length:
7074 attrs->at_string_length = a;
7075 break;
7076 case DW_AT_string_length_bit_size:
7077 attrs->at_string_length_bit_size = a;
7078 break;
7079 case DW_AT_string_length_byte_size:
7080 attrs->at_string_length_byte_size = a;
7081 break;
7082 case DW_AT_threads_scaled:
7083 attrs->at_threads_scaled = a;
7084 break;
7085 case DW_AT_upper_bound:
7086 attrs->at_upper_bound = a;
7087 break;
7088 case DW_AT_use_location:
7089 attrs->at_use_location = a;
7090 break;
7091 case DW_AT_use_UTF8:
7092 attrs->at_use_UTF8 = a;
7093 break;
7094 case DW_AT_variable_parameter:
7095 attrs->at_variable_parameter = a;
7096 break;
7097 case DW_AT_virtuality:
7098 attrs->at_virtuality = a;
7099 break;
7100 case DW_AT_visibility:
7101 attrs->at_visibility = a;
7102 break;
7103 case DW_AT_vtable_elem_location:
7104 attrs->at_vtable_elem_location = a;
7105 break;
7106 default:
7107 break;
7108 }
7109 }
7110 }
7111
7112 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7113
7114 static void
7115 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7116 {
7117 dw_die_ref c;
7118 dw_die_ref decl;
7119 struct checksum_attributes attrs;
7120
7121 CHECKSUM_ULEB128 ('D');
7122 CHECKSUM_ULEB128 (die->die_tag);
7123
7124 memset (&attrs, 0, sizeof (attrs));
7125
7126 decl = get_AT_ref (die, DW_AT_specification);
7127 if (decl != NULL)
7128 collect_checksum_attributes (&attrs, decl);
7129 collect_checksum_attributes (&attrs, die);
7130
7131 CHECKSUM_ATTR (attrs.at_name);
7132 CHECKSUM_ATTR (attrs.at_accessibility);
7133 CHECKSUM_ATTR (attrs.at_address_class);
7134 CHECKSUM_ATTR (attrs.at_allocated);
7135 CHECKSUM_ATTR (attrs.at_artificial);
7136 CHECKSUM_ATTR (attrs.at_associated);
7137 CHECKSUM_ATTR (attrs.at_binary_scale);
7138 CHECKSUM_ATTR (attrs.at_bit_offset);
7139 CHECKSUM_ATTR (attrs.at_bit_size);
7140 CHECKSUM_ATTR (attrs.at_bit_stride);
7141 CHECKSUM_ATTR (attrs.at_byte_size);
7142 CHECKSUM_ATTR (attrs.at_byte_stride);
7143 CHECKSUM_ATTR (attrs.at_const_value);
7144 CHECKSUM_ATTR (attrs.at_containing_type);
7145 CHECKSUM_ATTR (attrs.at_count);
7146 CHECKSUM_ATTR (attrs.at_data_location);
7147 CHECKSUM_ATTR (attrs.at_data_member_location);
7148 CHECKSUM_ATTR (attrs.at_decimal_scale);
7149 CHECKSUM_ATTR (attrs.at_decimal_sign);
7150 CHECKSUM_ATTR (attrs.at_default_value);
7151 CHECKSUM_ATTR (attrs.at_digit_count);
7152 CHECKSUM_ATTR (attrs.at_discr);
7153 CHECKSUM_ATTR (attrs.at_discr_list);
7154 CHECKSUM_ATTR (attrs.at_discr_value);
7155 CHECKSUM_ATTR (attrs.at_encoding);
7156 CHECKSUM_ATTR (attrs.at_endianity);
7157 CHECKSUM_ATTR (attrs.at_explicit);
7158 CHECKSUM_ATTR (attrs.at_is_optional);
7159 CHECKSUM_ATTR (attrs.at_location);
7160 CHECKSUM_ATTR (attrs.at_lower_bound);
7161 CHECKSUM_ATTR (attrs.at_mutable);
7162 CHECKSUM_ATTR (attrs.at_ordering);
7163 CHECKSUM_ATTR (attrs.at_picture_string);
7164 CHECKSUM_ATTR (attrs.at_prototyped);
7165 CHECKSUM_ATTR (attrs.at_small);
7166 CHECKSUM_ATTR (attrs.at_segment);
7167 CHECKSUM_ATTR (attrs.at_string_length);
7168 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7169 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7170 CHECKSUM_ATTR (attrs.at_threads_scaled);
7171 CHECKSUM_ATTR (attrs.at_upper_bound);
7172 CHECKSUM_ATTR (attrs.at_use_location);
7173 CHECKSUM_ATTR (attrs.at_use_UTF8);
7174 CHECKSUM_ATTR (attrs.at_variable_parameter);
7175 CHECKSUM_ATTR (attrs.at_virtuality);
7176 CHECKSUM_ATTR (attrs.at_visibility);
7177 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7178 CHECKSUM_ATTR (attrs.at_type);
7179 CHECKSUM_ATTR (attrs.at_friend);
7180 CHECKSUM_ATTR (attrs.at_alignment);
7181
7182 /* Checksum the child DIEs. */
7183 c = die->die_child;
7184 if (c) do {
7185 dw_attr_node *name_attr;
7186
7187 c = c->die_sib;
7188 name_attr = get_AT (c, DW_AT_name);
7189 if (is_template_instantiation (c))
7190 {
7191 /* Ignore instantiations of member type and function templates. */
7192 }
7193 else if (name_attr != NULL
7194 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7195 {
7196 /* Use a shallow checksum for named nested types and member
7197 functions. */
7198 CHECKSUM_ULEB128 ('S');
7199 CHECKSUM_ULEB128 (c->die_tag);
7200 CHECKSUM_STRING (AT_string (name_attr));
7201 }
7202 else
7203 {
7204 /* Use a deep checksum for other children. */
7205 /* Mark this DIE so it gets processed when unmarking. */
7206 if (c->die_mark == 0)
7207 c->die_mark = -1;
7208 die_checksum_ordered (c, ctx, mark);
7209 }
7210 } while (c != die->die_child);
7211
7212 CHECKSUM_ULEB128 (0);
7213 }
7214
7215 /* Add a type name and tag to a hash. */
7216 static void
7217 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7218 {
7219 CHECKSUM_ULEB128 (tag);
7220 CHECKSUM_STRING (name);
7221 }
7222
7223 #undef CHECKSUM
7224 #undef CHECKSUM_STRING
7225 #undef CHECKSUM_ATTR
7226 #undef CHECKSUM_LEB128
7227 #undef CHECKSUM_ULEB128
7228
7229 /* Generate the type signature for DIE. This is computed by generating an
7230 MD5 checksum over the DIE's tag, its relevant attributes, and its
7231 children. Attributes that are references to other DIEs are processed
7232 by recursion, using the MARK field to prevent infinite recursion.
7233 If the DIE is nested inside a namespace or another type, we also
7234 need to include that context in the signature. The lower 64 bits
7235 of the resulting MD5 checksum comprise the signature. */
7236
7237 static void
7238 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7239 {
7240 int mark;
7241 const char *name;
7242 unsigned char checksum[16];
7243 struct md5_ctx ctx;
7244 dw_die_ref decl;
7245 dw_die_ref parent;
7246
7247 name = get_AT_string (die, DW_AT_name);
7248 decl = get_AT_ref (die, DW_AT_specification);
7249 parent = get_die_parent (die);
7250
7251 /* First, compute a signature for just the type name (and its surrounding
7252 context, if any. This is stored in the type unit DIE for link-time
7253 ODR (one-definition rule) checking. */
7254
7255 if (is_cxx () && name != NULL)
7256 {
7257 md5_init_ctx (&ctx);
7258
7259 /* Checksum the names of surrounding namespaces and structures. */
7260 if (parent != NULL)
7261 checksum_die_context (parent, &ctx);
7262
7263 /* Checksum the current DIE. */
7264 die_odr_checksum (die->die_tag, name, &ctx);
7265 md5_finish_ctx (&ctx, checksum);
7266
7267 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7268 }
7269
7270 /* Next, compute the complete type signature. */
7271
7272 md5_init_ctx (&ctx);
7273 mark = 1;
7274 die->die_mark = mark;
7275
7276 /* Checksum the names of surrounding namespaces and structures. */
7277 if (parent != NULL)
7278 checksum_die_context (parent, &ctx);
7279
7280 /* Checksum the DIE and its children. */
7281 die_checksum_ordered (die, &ctx, &mark);
7282 unmark_all_dies (die);
7283 md5_finish_ctx (&ctx, checksum);
7284
7285 /* Store the signature in the type node and link the type DIE and the
7286 type node together. */
7287 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7288 DWARF_TYPE_SIGNATURE_SIZE);
7289 die->comdat_type_p = true;
7290 die->die_id.die_type_node = type_node;
7291 type_node->type_die = die;
7292
7293 /* If the DIE is a specification, link its declaration to the type node
7294 as well. */
7295 if (decl != NULL)
7296 {
7297 decl->comdat_type_p = true;
7298 decl->die_id.die_type_node = type_node;
7299 }
7300 }
7301
7302 /* Do the location expressions look same? */
7303 static inline int
7304 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7305 {
7306 return loc1->dw_loc_opc == loc2->dw_loc_opc
7307 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7308 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7309 }
7310
7311 /* Do the values look the same? */
7312 static int
7313 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7314 {
7315 dw_loc_descr_ref loc1, loc2;
7316 rtx r1, r2;
7317
7318 if (v1->val_class != v2->val_class)
7319 return 0;
7320
7321 switch (v1->val_class)
7322 {
7323 case dw_val_class_const:
7324 case dw_val_class_const_implicit:
7325 return v1->v.val_int == v2->v.val_int;
7326 case dw_val_class_unsigned_const:
7327 case dw_val_class_unsigned_const_implicit:
7328 return v1->v.val_unsigned == v2->v.val_unsigned;
7329 case dw_val_class_const_double:
7330 return v1->v.val_double.high == v2->v.val_double.high
7331 && v1->v.val_double.low == v2->v.val_double.low;
7332 case dw_val_class_wide_int:
7333 return *v1->v.val_wide == *v2->v.val_wide;
7334 case dw_val_class_vec:
7335 if (v1->v.val_vec.length != v2->v.val_vec.length
7336 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7337 return 0;
7338 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7339 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7340 return 0;
7341 return 1;
7342 case dw_val_class_flag:
7343 return v1->v.val_flag == v2->v.val_flag;
7344 case dw_val_class_str:
7345 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7346
7347 case dw_val_class_addr:
7348 r1 = v1->v.val_addr;
7349 r2 = v2->v.val_addr;
7350 if (GET_CODE (r1) != GET_CODE (r2))
7351 return 0;
7352 return !rtx_equal_p (r1, r2);
7353
7354 case dw_val_class_offset:
7355 return v1->v.val_offset == v2->v.val_offset;
7356
7357 case dw_val_class_loc:
7358 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7359 loc1 && loc2;
7360 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7361 if (!same_loc_p (loc1, loc2, mark))
7362 return 0;
7363 return !loc1 && !loc2;
7364
7365 case dw_val_class_die_ref:
7366 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7367
7368 case dw_val_class_fde_ref:
7369 case dw_val_class_vms_delta:
7370 case dw_val_class_lbl_id:
7371 case dw_val_class_lineptr:
7372 case dw_val_class_macptr:
7373 case dw_val_class_loclistsptr:
7374 case dw_val_class_high_pc:
7375 return 1;
7376
7377 case dw_val_class_file:
7378 case dw_val_class_file_implicit:
7379 return v1->v.val_file == v2->v.val_file;
7380
7381 case dw_val_class_data8:
7382 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7383
7384 default:
7385 return 1;
7386 }
7387 }
7388
7389 /* Do the attributes look the same? */
7390
7391 static int
7392 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7393 {
7394 if (at1->dw_attr != at2->dw_attr)
7395 return 0;
7396
7397 /* We don't care that this was compiled with a different compiler
7398 snapshot; if the output is the same, that's what matters. */
7399 if (at1->dw_attr == DW_AT_producer)
7400 return 1;
7401
7402 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7403 }
7404
7405 /* Do the dies look the same? */
7406
7407 static int
7408 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7409 {
7410 dw_die_ref c1, c2;
7411 dw_attr_node *a1;
7412 unsigned ix;
7413
7414 /* To avoid infinite recursion. */
7415 if (die1->die_mark)
7416 return die1->die_mark == die2->die_mark;
7417 die1->die_mark = die2->die_mark = ++(*mark);
7418
7419 if (die1->die_tag != die2->die_tag)
7420 return 0;
7421
7422 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7423 return 0;
7424
7425 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7426 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7427 return 0;
7428
7429 c1 = die1->die_child;
7430 c2 = die2->die_child;
7431 if (! c1)
7432 {
7433 if (c2)
7434 return 0;
7435 }
7436 else
7437 for (;;)
7438 {
7439 if (!same_die_p (c1, c2, mark))
7440 return 0;
7441 c1 = c1->die_sib;
7442 c2 = c2->die_sib;
7443 if (c1 == die1->die_child)
7444 {
7445 if (c2 == die2->die_child)
7446 break;
7447 else
7448 return 0;
7449 }
7450 }
7451
7452 return 1;
7453 }
7454
7455 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7456 children, and set die_symbol. */
7457
7458 static void
7459 compute_comp_unit_symbol (dw_die_ref unit_die)
7460 {
7461 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7462 const char *base = die_name ? lbasename (die_name) : "anonymous";
7463 char *name = XALLOCAVEC (char, strlen (base) + 64);
7464 char *p;
7465 int i, mark;
7466 unsigned char checksum[16];
7467 struct md5_ctx ctx;
7468
7469 /* Compute the checksum of the DIE, then append part of it as hex digits to
7470 the name filename of the unit. */
7471
7472 md5_init_ctx (&ctx);
7473 mark = 0;
7474 die_checksum (unit_die, &ctx, &mark);
7475 unmark_all_dies (unit_die);
7476 md5_finish_ctx (&ctx, checksum);
7477
7478 /* When we this for comp_unit_die () we have a DW_AT_name that might
7479 not start with a letter but with anything valid for filenames and
7480 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7481 character is not a letter. */
7482 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7483 clean_symbol_name (name);
7484
7485 p = name + strlen (name);
7486 for (i = 0; i < 4; i++)
7487 {
7488 sprintf (p, "%.2x", checksum[i]);
7489 p += 2;
7490 }
7491
7492 unit_die->die_id.die_symbol = xstrdup (name);
7493 }
7494
7495 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7496
7497 static int
7498 is_type_die (dw_die_ref die)
7499 {
7500 switch (die->die_tag)
7501 {
7502 case DW_TAG_array_type:
7503 case DW_TAG_class_type:
7504 case DW_TAG_interface_type:
7505 case DW_TAG_enumeration_type:
7506 case DW_TAG_pointer_type:
7507 case DW_TAG_reference_type:
7508 case DW_TAG_rvalue_reference_type:
7509 case DW_TAG_string_type:
7510 case DW_TAG_structure_type:
7511 case DW_TAG_subroutine_type:
7512 case DW_TAG_union_type:
7513 case DW_TAG_ptr_to_member_type:
7514 case DW_TAG_set_type:
7515 case DW_TAG_subrange_type:
7516 case DW_TAG_base_type:
7517 case DW_TAG_const_type:
7518 case DW_TAG_file_type:
7519 case DW_TAG_packed_type:
7520 case DW_TAG_volatile_type:
7521 case DW_TAG_typedef:
7522 return 1;
7523 default:
7524 return 0;
7525 }
7526 }
7527
7528 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7529 Basically, we want to choose the bits that are likely to be shared between
7530 compilations (types) and leave out the bits that are specific to individual
7531 compilations (functions). */
7532
7533 static int
7534 is_comdat_die (dw_die_ref c)
7535 {
7536 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7537 we do for stabs. The advantage is a greater likelihood of sharing between
7538 objects that don't include headers in the same order (and therefore would
7539 put the base types in a different comdat). jason 8/28/00 */
7540
7541 if (c->die_tag == DW_TAG_base_type)
7542 return 0;
7543
7544 if (c->die_tag == DW_TAG_pointer_type
7545 || c->die_tag == DW_TAG_reference_type
7546 || c->die_tag == DW_TAG_rvalue_reference_type
7547 || c->die_tag == DW_TAG_const_type
7548 || c->die_tag == DW_TAG_volatile_type)
7549 {
7550 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7551
7552 return t ? is_comdat_die (t) : 0;
7553 }
7554
7555 return is_type_die (c);
7556 }
7557
7558 /* Returns true iff C is a compile-unit DIE. */
7559
7560 static inline bool
7561 is_cu_die (dw_die_ref c)
7562 {
7563 return c && (c->die_tag == DW_TAG_compile_unit
7564 || c->die_tag == DW_TAG_skeleton_unit);
7565 }
7566
7567 /* Returns true iff C is a unit DIE of some sort. */
7568
7569 static inline bool
7570 is_unit_die (dw_die_ref c)
7571 {
7572 return c && (c->die_tag == DW_TAG_compile_unit
7573 || c->die_tag == DW_TAG_partial_unit
7574 || c->die_tag == DW_TAG_type_unit
7575 || c->die_tag == DW_TAG_skeleton_unit);
7576 }
7577
7578 /* Returns true iff C is a namespace DIE. */
7579
7580 static inline bool
7581 is_namespace_die (dw_die_ref c)
7582 {
7583 return c && c->die_tag == DW_TAG_namespace;
7584 }
7585
7586 /* Returns true iff C is a class or structure DIE. */
7587
7588 static inline bool
7589 is_class_die (dw_die_ref c)
7590 {
7591 return c && (c->die_tag == DW_TAG_class_type
7592 || c->die_tag == DW_TAG_structure_type);
7593 }
7594
7595 /* Return non-zero if this DIE is a template parameter. */
7596
7597 static inline bool
7598 is_template_parameter (dw_die_ref die)
7599 {
7600 switch (die->die_tag)
7601 {
7602 case DW_TAG_template_type_param:
7603 case DW_TAG_template_value_param:
7604 case DW_TAG_GNU_template_template_param:
7605 case DW_TAG_GNU_template_parameter_pack:
7606 return true;
7607 default:
7608 return false;
7609 }
7610 }
7611
7612 /* Return non-zero if this DIE represents a template instantiation. */
7613
7614 static inline bool
7615 is_template_instantiation (dw_die_ref die)
7616 {
7617 dw_die_ref c;
7618
7619 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7620 return false;
7621 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7622 return false;
7623 }
7624
7625 static char *
7626 gen_internal_sym (const char *prefix)
7627 {
7628 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7629
7630 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7631 return xstrdup (buf);
7632 }
7633
7634 /* Return non-zero if this DIE is a declaration. */
7635
7636 static int
7637 is_declaration_die (dw_die_ref die)
7638 {
7639 dw_attr_node *a;
7640 unsigned ix;
7641
7642 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7643 if (a->dw_attr == DW_AT_declaration)
7644 return 1;
7645
7646 return 0;
7647 }
7648
7649 /* Return non-zero if this DIE is nested inside a subprogram. */
7650
7651 static int
7652 is_nested_in_subprogram (dw_die_ref die)
7653 {
7654 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7655
7656 if (decl == NULL)
7657 decl = die;
7658 return local_scope_p (decl);
7659 }
7660
7661 /* Return non-zero if this DIE contains a defining declaration of a
7662 subprogram. */
7663
7664 static int
7665 contains_subprogram_definition (dw_die_ref die)
7666 {
7667 dw_die_ref c;
7668
7669 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7670 return 1;
7671 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7672 return 0;
7673 }
7674
7675 /* Return non-zero if this is a type DIE that should be moved to a
7676 COMDAT .debug_types section or .debug_info section with DW_UT_*type
7677 unit type. */
7678
7679 static int
7680 should_move_die_to_comdat (dw_die_ref die)
7681 {
7682 switch (die->die_tag)
7683 {
7684 case DW_TAG_class_type:
7685 case DW_TAG_structure_type:
7686 case DW_TAG_enumeration_type:
7687 case DW_TAG_union_type:
7688 /* Don't move declarations, inlined instances, types nested in a
7689 subprogram, or types that contain subprogram definitions. */
7690 if (is_declaration_die (die)
7691 || get_AT (die, DW_AT_abstract_origin)
7692 || is_nested_in_subprogram (die)
7693 || contains_subprogram_definition (die))
7694 return 0;
7695 return 1;
7696 case DW_TAG_array_type:
7697 case DW_TAG_interface_type:
7698 case DW_TAG_pointer_type:
7699 case DW_TAG_reference_type:
7700 case DW_TAG_rvalue_reference_type:
7701 case DW_TAG_string_type:
7702 case DW_TAG_subroutine_type:
7703 case DW_TAG_ptr_to_member_type:
7704 case DW_TAG_set_type:
7705 case DW_TAG_subrange_type:
7706 case DW_TAG_base_type:
7707 case DW_TAG_const_type:
7708 case DW_TAG_file_type:
7709 case DW_TAG_packed_type:
7710 case DW_TAG_volatile_type:
7711 case DW_TAG_typedef:
7712 default:
7713 return 0;
7714 }
7715 }
7716
7717 /* Make a clone of DIE. */
7718
7719 static dw_die_ref
7720 clone_die (dw_die_ref die)
7721 {
7722 dw_die_ref clone = new_die_raw (die->die_tag);
7723 dw_attr_node *a;
7724 unsigned ix;
7725
7726 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7727 add_dwarf_attr (clone, a);
7728
7729 return clone;
7730 }
7731
7732 /* Make a clone of the tree rooted at DIE. */
7733
7734 static dw_die_ref
7735 clone_tree (dw_die_ref die)
7736 {
7737 dw_die_ref c;
7738 dw_die_ref clone = clone_die (die);
7739
7740 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
7741
7742 return clone;
7743 }
7744
7745 /* Make a clone of DIE as a declaration. */
7746
7747 static dw_die_ref
7748 clone_as_declaration (dw_die_ref die)
7749 {
7750 dw_die_ref clone;
7751 dw_die_ref decl;
7752 dw_attr_node *a;
7753 unsigned ix;
7754
7755 /* If the DIE is already a declaration, just clone it. */
7756 if (is_declaration_die (die))
7757 return clone_die (die);
7758
7759 /* If the DIE is a specification, just clone its declaration DIE. */
7760 decl = get_AT_ref (die, DW_AT_specification);
7761 if (decl != NULL)
7762 {
7763 clone = clone_die (decl);
7764 if (die->comdat_type_p)
7765 add_AT_die_ref (clone, DW_AT_signature, die);
7766 return clone;
7767 }
7768
7769 clone = new_die_raw (die->die_tag);
7770
7771 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7772 {
7773 /* We don't want to copy over all attributes.
7774 For example we don't want DW_AT_byte_size because otherwise we will no
7775 longer have a declaration and GDB will treat it as a definition. */
7776
7777 switch (a->dw_attr)
7778 {
7779 case DW_AT_abstract_origin:
7780 case DW_AT_artificial:
7781 case DW_AT_containing_type:
7782 case DW_AT_external:
7783 case DW_AT_name:
7784 case DW_AT_type:
7785 case DW_AT_virtuality:
7786 case DW_AT_linkage_name:
7787 case DW_AT_MIPS_linkage_name:
7788 add_dwarf_attr (clone, a);
7789 break;
7790 case DW_AT_byte_size:
7791 case DW_AT_alignment:
7792 default:
7793 break;
7794 }
7795 }
7796
7797 if (die->comdat_type_p)
7798 add_AT_die_ref (clone, DW_AT_signature, die);
7799
7800 add_AT_flag (clone, DW_AT_declaration, 1);
7801 return clone;
7802 }
7803
7804
7805 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
7806
7807 struct decl_table_entry
7808 {
7809 dw_die_ref orig;
7810 dw_die_ref copy;
7811 };
7812
7813 /* Helpers to manipulate hash table of copied declarations. */
7814
7815 /* Hashtable helpers. */
7816
7817 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
7818 {
7819 typedef die_struct *compare_type;
7820 static inline hashval_t hash (const decl_table_entry *);
7821 static inline bool equal (const decl_table_entry *, const die_struct *);
7822 };
7823
7824 inline hashval_t
7825 decl_table_entry_hasher::hash (const decl_table_entry *entry)
7826 {
7827 return htab_hash_pointer (entry->orig);
7828 }
7829
7830 inline bool
7831 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
7832 const die_struct *entry2)
7833 {
7834 return entry1->orig == entry2;
7835 }
7836
7837 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
7838
7839 /* Copy DIE and its ancestors, up to, but not including, the compile unit
7840 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
7841 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
7842 to check if the ancestor has already been copied into UNIT. */
7843
7844 static dw_die_ref
7845 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
7846 decl_hash_type *decl_table)
7847 {
7848 dw_die_ref parent = die->die_parent;
7849 dw_die_ref new_parent = unit;
7850 dw_die_ref copy;
7851 decl_table_entry **slot = NULL;
7852 struct decl_table_entry *entry = NULL;
7853
7854 if (decl_table)
7855 {
7856 /* Check if the entry has already been copied to UNIT. */
7857 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
7858 INSERT);
7859 if (*slot != HTAB_EMPTY_ENTRY)
7860 {
7861 entry = *slot;
7862 return entry->copy;
7863 }
7864
7865 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
7866 entry = XCNEW (struct decl_table_entry);
7867 entry->orig = die;
7868 entry->copy = NULL;
7869 *slot = entry;
7870 }
7871
7872 if (parent != NULL)
7873 {
7874 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
7875 if (spec != NULL)
7876 parent = spec;
7877 if (!is_unit_die (parent))
7878 new_parent = copy_ancestor_tree (unit, parent, decl_table);
7879 }
7880
7881 copy = clone_as_declaration (die);
7882 add_child_die (new_parent, copy);
7883
7884 if (decl_table)
7885 {
7886 /* Record the pointer to the copy. */
7887 entry->copy = copy;
7888 }
7889
7890 return copy;
7891 }
7892 /* Copy the declaration context to the new type unit DIE. This includes
7893 any surrounding namespace or type declarations. If the DIE has an
7894 AT_specification attribute, it also includes attributes and children
7895 attached to the specification, and returns a pointer to the original
7896 parent of the declaration DIE. Returns NULL otherwise. */
7897
7898 static dw_die_ref
7899 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
7900 {
7901 dw_die_ref decl;
7902 dw_die_ref new_decl;
7903 dw_die_ref orig_parent = NULL;
7904
7905 decl = get_AT_ref (die, DW_AT_specification);
7906 if (decl == NULL)
7907 decl = die;
7908 else
7909 {
7910 unsigned ix;
7911 dw_die_ref c;
7912 dw_attr_node *a;
7913
7914 /* The original DIE will be changed to a declaration, and must
7915 be moved to be a child of the original declaration DIE. */
7916 orig_parent = decl->die_parent;
7917
7918 /* Copy the type node pointer from the new DIE to the original
7919 declaration DIE so we can forward references later. */
7920 decl->comdat_type_p = true;
7921 decl->die_id.die_type_node = die->die_id.die_type_node;
7922
7923 remove_AT (die, DW_AT_specification);
7924
7925 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
7926 {
7927 if (a->dw_attr != DW_AT_name
7928 && a->dw_attr != DW_AT_declaration
7929 && a->dw_attr != DW_AT_external)
7930 add_dwarf_attr (die, a);
7931 }
7932
7933 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
7934 }
7935
7936 if (decl->die_parent != NULL
7937 && !is_unit_die (decl->die_parent))
7938 {
7939 new_decl = copy_ancestor_tree (unit, decl, NULL);
7940 if (new_decl != NULL)
7941 {
7942 remove_AT (new_decl, DW_AT_signature);
7943 add_AT_specification (die, new_decl);
7944 }
7945 }
7946
7947 return orig_parent;
7948 }
7949
7950 /* Generate the skeleton ancestor tree for the given NODE, then clone
7951 the DIE and add the clone into the tree. */
7952
7953 static void
7954 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
7955 {
7956 if (node->new_die != NULL)
7957 return;
7958
7959 node->new_die = clone_as_declaration (node->old_die);
7960
7961 if (node->parent != NULL)
7962 {
7963 generate_skeleton_ancestor_tree (node->parent);
7964 add_child_die (node->parent->new_die, node->new_die);
7965 }
7966 }
7967
7968 /* Generate a skeleton tree of DIEs containing any declarations that are
7969 found in the original tree. We traverse the tree looking for declaration
7970 DIEs, and construct the skeleton from the bottom up whenever we find one. */
7971
7972 static void
7973 generate_skeleton_bottom_up (skeleton_chain_node *parent)
7974 {
7975 skeleton_chain_node node;
7976 dw_die_ref c;
7977 dw_die_ref first;
7978 dw_die_ref prev = NULL;
7979 dw_die_ref next = NULL;
7980
7981 node.parent = parent;
7982
7983 first = c = parent->old_die->die_child;
7984 if (c)
7985 next = c->die_sib;
7986 if (c) do {
7987 if (prev == NULL || prev->die_sib == c)
7988 prev = c;
7989 c = next;
7990 next = (c == first ? NULL : c->die_sib);
7991 node.old_die = c;
7992 node.new_die = NULL;
7993 if (is_declaration_die (c))
7994 {
7995 if (is_template_instantiation (c))
7996 {
7997 /* Instantiated templates do not need to be cloned into the
7998 type unit. Just move the DIE and its children back to
7999 the skeleton tree (in the main CU). */
8000 remove_child_with_prev (c, prev);
8001 add_child_die (parent->new_die, c);
8002 c = prev;
8003 }
8004 else if (c->comdat_type_p)
8005 {
8006 /* This is the skeleton of earlier break_out_comdat_types
8007 type. Clone the existing DIE, but keep the children
8008 under the original (which is in the main CU). */
8009 dw_die_ref clone = clone_die (c);
8010
8011 replace_child (c, clone, prev);
8012 generate_skeleton_ancestor_tree (parent);
8013 add_child_die (parent->new_die, c);
8014 c = clone;
8015 continue;
8016 }
8017 else
8018 {
8019 /* Clone the existing DIE, move the original to the skeleton
8020 tree (which is in the main CU), and put the clone, with
8021 all the original's children, where the original came from
8022 (which is about to be moved to the type unit). */
8023 dw_die_ref clone = clone_die (c);
8024 move_all_children (c, clone);
8025
8026 /* If the original has a DW_AT_object_pointer attribute,
8027 it would now point to a child DIE just moved to the
8028 cloned tree, so we need to remove that attribute from
8029 the original. */
8030 remove_AT (c, DW_AT_object_pointer);
8031
8032 replace_child (c, clone, prev);
8033 generate_skeleton_ancestor_tree (parent);
8034 add_child_die (parent->new_die, c);
8035 node.old_die = clone;
8036 node.new_die = c;
8037 c = clone;
8038 }
8039 }
8040 generate_skeleton_bottom_up (&node);
8041 } while (next != NULL);
8042 }
8043
8044 /* Wrapper function for generate_skeleton_bottom_up. */
8045
8046 static dw_die_ref
8047 generate_skeleton (dw_die_ref die)
8048 {
8049 skeleton_chain_node node;
8050
8051 node.old_die = die;
8052 node.new_die = NULL;
8053 node.parent = NULL;
8054
8055 /* If this type definition is nested inside another type,
8056 and is not an instantiation of a template, always leave
8057 at least a declaration in its place. */
8058 if (die->die_parent != NULL
8059 && is_type_die (die->die_parent)
8060 && !is_template_instantiation (die))
8061 node.new_die = clone_as_declaration (die);
8062
8063 generate_skeleton_bottom_up (&node);
8064 return node.new_die;
8065 }
8066
8067 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8068 declaration. The original DIE is moved to a new compile unit so that
8069 existing references to it follow it to the new location. If any of the
8070 original DIE's descendants is a declaration, we need to replace the
8071 original DIE with a skeleton tree and move the declarations back into the
8072 skeleton tree. */
8073
8074 static dw_die_ref
8075 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8076 dw_die_ref prev)
8077 {
8078 dw_die_ref skeleton, orig_parent;
8079
8080 /* Copy the declaration context to the type unit DIE. If the returned
8081 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8082 that DIE. */
8083 orig_parent = copy_declaration_context (unit, child);
8084
8085 skeleton = generate_skeleton (child);
8086 if (skeleton == NULL)
8087 remove_child_with_prev (child, prev);
8088 else
8089 {
8090 skeleton->comdat_type_p = true;
8091 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8092
8093 /* If the original DIE was a specification, we need to put
8094 the skeleton under the parent DIE of the declaration.
8095 This leaves the original declaration in the tree, but
8096 it will be pruned later since there are no longer any
8097 references to it. */
8098 if (orig_parent != NULL)
8099 {
8100 remove_child_with_prev (child, prev);
8101 add_child_die (orig_parent, skeleton);
8102 }
8103 else
8104 replace_child (child, skeleton, prev);
8105 }
8106
8107 return skeleton;
8108 }
8109
8110 static void
8111 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8112 comdat_type_node *type_node,
8113 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8114
8115 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8116 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8117 DWARF procedure references in the DW_AT_location attribute. */
8118
8119 static dw_die_ref
8120 copy_dwarf_procedure (dw_die_ref die,
8121 comdat_type_node *type_node,
8122 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8123 {
8124 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8125
8126 /* DWARF procedures are not supposed to have children... */
8127 gcc_assert (die->die_child == NULL);
8128
8129 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8130 gcc_assert (vec_safe_length (die->die_attr) == 1
8131 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8132
8133 /* Do not copy more than once DWARF procedures. */
8134 bool existed;
8135 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8136 if (existed)
8137 return die_copy;
8138
8139 die_copy = clone_die (die);
8140 add_child_die (type_node->root_die, die_copy);
8141 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8142 return die_copy;
8143 }
8144
8145 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8146 procedures in DIE's attributes. */
8147
8148 static void
8149 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8150 comdat_type_node *type_node,
8151 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8152 {
8153 dw_attr_node *a;
8154 unsigned i;
8155
8156 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8157 {
8158 dw_loc_descr_ref loc;
8159
8160 if (a->dw_attr_val.val_class != dw_val_class_loc)
8161 continue;
8162
8163 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8164 {
8165 switch (loc->dw_loc_opc)
8166 {
8167 case DW_OP_call2:
8168 case DW_OP_call4:
8169 case DW_OP_call_ref:
8170 gcc_assert (loc->dw_loc_oprnd1.val_class
8171 == dw_val_class_die_ref);
8172 loc->dw_loc_oprnd1.v.val_die_ref.die
8173 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8174 type_node,
8175 copied_dwarf_procs);
8176
8177 default:
8178 break;
8179 }
8180 }
8181 }
8182 }
8183
8184 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8185 rewrite references to point to the copies.
8186
8187 References are looked for in DIE's attributes and recursively in all its
8188 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8189 mapping from old DWARF procedures to their copy. It is used not to copy
8190 twice the same DWARF procedure under TYPE_NODE. */
8191
8192 static void
8193 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8194 comdat_type_node *type_node,
8195 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8196 {
8197 dw_die_ref c;
8198
8199 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8200 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8201 type_node,
8202 copied_dwarf_procs));
8203 }
8204
8205 /* Traverse the DIE and set up additional .debug_types or .debug_info
8206 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8207 section. */
8208
8209 static void
8210 break_out_comdat_types (dw_die_ref die)
8211 {
8212 dw_die_ref c;
8213 dw_die_ref first;
8214 dw_die_ref prev = NULL;
8215 dw_die_ref next = NULL;
8216 dw_die_ref unit = NULL;
8217
8218 first = c = die->die_child;
8219 if (c)
8220 next = c->die_sib;
8221 if (c) do {
8222 if (prev == NULL || prev->die_sib == c)
8223 prev = c;
8224 c = next;
8225 next = (c == first ? NULL : c->die_sib);
8226 if (should_move_die_to_comdat (c))
8227 {
8228 dw_die_ref replacement;
8229 comdat_type_node *type_node;
8230
8231 /* Break out nested types into their own type units. */
8232 break_out_comdat_types (c);
8233
8234 /* Create a new type unit DIE as the root for the new tree, and
8235 add it to the list of comdat types. */
8236 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8237 add_AT_unsigned (unit, DW_AT_language,
8238 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8239 type_node = ggc_cleared_alloc<comdat_type_node> ();
8240 type_node->root_die = unit;
8241 type_node->next = comdat_type_list;
8242 comdat_type_list = type_node;
8243
8244 /* Generate the type signature. */
8245 generate_type_signature (c, type_node);
8246
8247 /* Copy the declaration context, attributes, and children of the
8248 declaration into the new type unit DIE, then remove this DIE
8249 from the main CU (or replace it with a skeleton if necessary). */
8250 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8251 type_node->skeleton_die = replacement;
8252
8253 /* Add the DIE to the new compunit. */
8254 add_child_die (unit, c);
8255
8256 /* Types can reference DWARF procedures for type size or data location
8257 expressions. Calls in DWARF expressions cannot target procedures
8258 that are not in the same section. So we must copy DWARF procedures
8259 along with this type and then rewrite references to them. */
8260 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8261 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8262
8263 if (replacement != NULL)
8264 c = replacement;
8265 }
8266 else if (c->die_tag == DW_TAG_namespace
8267 || c->die_tag == DW_TAG_class_type
8268 || c->die_tag == DW_TAG_structure_type
8269 || c->die_tag == DW_TAG_union_type)
8270 {
8271 /* Look for nested types that can be broken out. */
8272 break_out_comdat_types (c);
8273 }
8274 } while (next != NULL);
8275 }
8276
8277 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8278 Enter all the cloned children into the hash table decl_table. */
8279
8280 static dw_die_ref
8281 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8282 {
8283 dw_die_ref c;
8284 dw_die_ref clone;
8285 struct decl_table_entry *entry;
8286 decl_table_entry **slot;
8287
8288 if (die->die_tag == DW_TAG_subprogram)
8289 clone = clone_as_declaration (die);
8290 else
8291 clone = clone_die (die);
8292
8293 slot = decl_table->find_slot_with_hash (die,
8294 htab_hash_pointer (die), INSERT);
8295
8296 /* Assert that DIE isn't in the hash table yet. If it would be there
8297 before, the ancestors would be necessarily there as well, therefore
8298 clone_tree_partial wouldn't be called. */
8299 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8300
8301 entry = XCNEW (struct decl_table_entry);
8302 entry->orig = die;
8303 entry->copy = clone;
8304 *slot = entry;
8305
8306 if (die->die_tag != DW_TAG_subprogram)
8307 FOR_EACH_CHILD (die, c,
8308 add_child_die (clone, clone_tree_partial (c, decl_table)));
8309
8310 return clone;
8311 }
8312
8313 /* Walk the DIE and its children, looking for references to incomplete
8314 or trivial types that are unmarked (i.e., that are not in the current
8315 type_unit). */
8316
8317 static void
8318 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8319 {
8320 dw_die_ref c;
8321 dw_attr_node *a;
8322 unsigned ix;
8323
8324 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8325 {
8326 if (AT_class (a) == dw_val_class_die_ref)
8327 {
8328 dw_die_ref targ = AT_ref (a);
8329 decl_table_entry **slot;
8330 struct decl_table_entry *entry;
8331
8332 if (targ->die_mark != 0 || targ->comdat_type_p)
8333 continue;
8334
8335 slot = decl_table->find_slot_with_hash (targ,
8336 htab_hash_pointer (targ),
8337 INSERT);
8338
8339 if (*slot != HTAB_EMPTY_ENTRY)
8340 {
8341 /* TARG has already been copied, so we just need to
8342 modify the reference to point to the copy. */
8343 entry = *slot;
8344 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8345 }
8346 else
8347 {
8348 dw_die_ref parent = unit;
8349 dw_die_ref copy = clone_die (targ);
8350
8351 /* Record in DECL_TABLE that TARG has been copied.
8352 Need to do this now, before the recursive call,
8353 because DECL_TABLE may be expanded and SLOT
8354 would no longer be a valid pointer. */
8355 entry = XCNEW (struct decl_table_entry);
8356 entry->orig = targ;
8357 entry->copy = copy;
8358 *slot = entry;
8359
8360 /* If TARG is not a declaration DIE, we need to copy its
8361 children. */
8362 if (!is_declaration_die (targ))
8363 {
8364 FOR_EACH_CHILD (
8365 targ, c,
8366 add_child_die (copy,
8367 clone_tree_partial (c, decl_table)));
8368 }
8369
8370 /* Make sure the cloned tree is marked as part of the
8371 type unit. */
8372 mark_dies (copy);
8373
8374 /* If TARG has surrounding context, copy its ancestor tree
8375 into the new type unit. */
8376 if (targ->die_parent != NULL
8377 && !is_unit_die (targ->die_parent))
8378 parent = copy_ancestor_tree (unit, targ->die_parent,
8379 decl_table);
8380
8381 add_child_die (parent, copy);
8382 a->dw_attr_val.v.val_die_ref.die = copy;
8383
8384 /* Make sure the newly-copied DIE is walked. If it was
8385 installed in a previously-added context, it won't
8386 get visited otherwise. */
8387 if (parent != unit)
8388 {
8389 /* Find the highest point of the newly-added tree,
8390 mark each node along the way, and walk from there. */
8391 parent->die_mark = 1;
8392 while (parent->die_parent
8393 && parent->die_parent->die_mark == 0)
8394 {
8395 parent = parent->die_parent;
8396 parent->die_mark = 1;
8397 }
8398 copy_decls_walk (unit, parent, decl_table);
8399 }
8400 }
8401 }
8402 }
8403
8404 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8405 }
8406
8407 /* Copy declarations for "unworthy" types into the new comdat section.
8408 Incomplete types, modified types, and certain other types aren't broken
8409 out into comdat sections of their own, so they don't have a signature,
8410 and we need to copy the declaration into the same section so that we
8411 don't have an external reference. */
8412
8413 static void
8414 copy_decls_for_unworthy_types (dw_die_ref unit)
8415 {
8416 mark_dies (unit);
8417 decl_hash_type decl_table (10);
8418 copy_decls_walk (unit, unit, &decl_table);
8419 unmark_dies (unit);
8420 }
8421
8422 /* Traverse the DIE and add a sibling attribute if it may have the
8423 effect of speeding up access to siblings. To save some space,
8424 avoid generating sibling attributes for DIE's without children. */
8425
8426 static void
8427 add_sibling_attributes (dw_die_ref die)
8428 {
8429 dw_die_ref c;
8430
8431 if (! die->die_child)
8432 return;
8433
8434 if (die->die_parent && die != die->die_parent->die_child)
8435 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8436
8437 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8438 }
8439
8440 /* Output all location lists for the DIE and its children. */
8441
8442 static void
8443 output_location_lists (dw_die_ref die)
8444 {
8445 dw_die_ref c;
8446 dw_attr_node *a;
8447 unsigned ix;
8448
8449 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8450 if (AT_class (a) == dw_val_class_loc_list)
8451 output_loc_list (AT_loc_list (a));
8452
8453 FOR_EACH_CHILD (die, c, output_location_lists (c));
8454 }
8455
8456 /* During assign_location_list_indexes and output_loclists_offset the
8457 current index, after it the number of assigned indexes (i.e. how
8458 large the .debug_loclists* offset table should be). */
8459 static unsigned int loc_list_idx;
8460
8461 /* Output all location list offsets for the DIE and its children. */
8462
8463 static void
8464 output_loclists_offsets (dw_die_ref die)
8465 {
8466 dw_die_ref c;
8467 dw_attr_node *a;
8468 unsigned ix;
8469
8470 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8471 if (AT_class (a) == dw_val_class_loc_list)
8472 {
8473 dw_loc_list_ref l = AT_loc_list (a);
8474 if (l->offset_emitted)
8475 continue;
8476 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8477 loc_section_label, NULL);
8478 gcc_assert (l->hash == loc_list_idx);
8479 loc_list_idx++;
8480 l->offset_emitted = true;
8481 }
8482
8483 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8484 }
8485
8486 /* Recursively set indexes of location lists. */
8487
8488 static void
8489 assign_location_list_indexes (dw_die_ref die)
8490 {
8491 dw_die_ref c;
8492 dw_attr_node *a;
8493 unsigned ix;
8494
8495 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8496 if (AT_class (a) == dw_val_class_loc_list)
8497 {
8498 dw_loc_list_ref list = AT_loc_list (a);
8499 if (!list->num_assigned)
8500 {
8501 list->num_assigned = true;
8502 list->hash = loc_list_idx++;
8503 }
8504 }
8505
8506 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8507 }
8508
8509 /* We want to limit the number of external references, because they are
8510 larger than local references: a relocation takes multiple words, and
8511 even a sig8 reference is always eight bytes, whereas a local reference
8512 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8513 So if we encounter multiple external references to the same type DIE, we
8514 make a local typedef stub for it and redirect all references there.
8515
8516 This is the element of the hash table for keeping track of these
8517 references. */
8518
8519 struct external_ref
8520 {
8521 dw_die_ref type;
8522 dw_die_ref stub;
8523 unsigned n_refs;
8524 };
8525
8526 /* Hashtable helpers. */
8527
8528 struct external_ref_hasher : free_ptr_hash <external_ref>
8529 {
8530 static inline hashval_t hash (const external_ref *);
8531 static inline bool equal (const external_ref *, const external_ref *);
8532 };
8533
8534 inline hashval_t
8535 external_ref_hasher::hash (const external_ref *r)
8536 {
8537 dw_die_ref die = r->type;
8538 hashval_t h = 0;
8539
8540 /* We can't use the address of the DIE for hashing, because
8541 that will make the order of the stub DIEs non-deterministic. */
8542 if (! die->comdat_type_p)
8543 /* We have a symbol; use it to compute a hash. */
8544 h = htab_hash_string (die->die_id.die_symbol);
8545 else
8546 {
8547 /* We have a type signature; use a subset of the bits as the hash.
8548 The 8-byte signature is at least as large as hashval_t. */
8549 comdat_type_node *type_node = die->die_id.die_type_node;
8550 memcpy (&h, type_node->signature, sizeof (h));
8551 }
8552 return h;
8553 }
8554
8555 inline bool
8556 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8557 {
8558 return r1->type == r2->type;
8559 }
8560
8561 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8562
8563 /* Return a pointer to the external_ref for references to DIE. */
8564
8565 static struct external_ref *
8566 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8567 {
8568 struct external_ref ref, *ref_p;
8569 external_ref **slot;
8570
8571 ref.type = die;
8572 slot = map->find_slot (&ref, INSERT);
8573 if (*slot != HTAB_EMPTY_ENTRY)
8574 return *slot;
8575
8576 ref_p = XCNEW (struct external_ref);
8577 ref_p->type = die;
8578 *slot = ref_p;
8579 return ref_p;
8580 }
8581
8582 /* Subroutine of optimize_external_refs, below.
8583
8584 If we see a type skeleton, record it as our stub. If we see external
8585 references, remember how many we've seen. */
8586
8587 static void
8588 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8589 {
8590 dw_die_ref c;
8591 dw_attr_node *a;
8592 unsigned ix;
8593 struct external_ref *ref_p;
8594
8595 if (is_type_die (die)
8596 && (c = get_AT_ref (die, DW_AT_signature)))
8597 {
8598 /* This is a local skeleton; use it for local references. */
8599 ref_p = lookup_external_ref (map, c);
8600 ref_p->stub = die;
8601 }
8602
8603 /* Scan the DIE references, and remember any that refer to DIEs from
8604 other CUs (i.e. those which are not marked). */
8605 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8606 if (AT_class (a) == dw_val_class_die_ref
8607 && (c = AT_ref (a))->die_mark == 0
8608 && is_type_die (c))
8609 {
8610 ref_p = lookup_external_ref (map, c);
8611 ref_p->n_refs++;
8612 }
8613
8614 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8615 }
8616
8617 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8618 points to an external_ref, DATA is the CU we're processing. If we don't
8619 already have a local stub, and we have multiple refs, build a stub. */
8620
8621 int
8622 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8623 {
8624 struct external_ref *ref_p = *slot;
8625
8626 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8627 {
8628 /* We have multiple references to this type, so build a small stub.
8629 Both of these forms are a bit dodgy from the perspective of the
8630 DWARF standard, since technically they should have names. */
8631 dw_die_ref cu = data;
8632 dw_die_ref type = ref_p->type;
8633 dw_die_ref stub = NULL;
8634
8635 if (type->comdat_type_p)
8636 {
8637 /* If we refer to this type via sig8, use AT_signature. */
8638 stub = new_die (type->die_tag, cu, NULL_TREE);
8639 add_AT_die_ref (stub, DW_AT_signature, type);
8640 }
8641 else
8642 {
8643 /* Otherwise, use a typedef with no name. */
8644 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8645 add_AT_die_ref (stub, DW_AT_type, type);
8646 }
8647
8648 stub->die_mark++;
8649 ref_p->stub = stub;
8650 }
8651 return 1;
8652 }
8653
8654 /* DIE is a unit; look through all the DIE references to see if there are
8655 any external references to types, and if so, create local stubs for
8656 them which will be applied in build_abbrev_table. This is useful because
8657 references to local DIEs are smaller. */
8658
8659 static external_ref_hash_type *
8660 optimize_external_refs (dw_die_ref die)
8661 {
8662 external_ref_hash_type *map = new external_ref_hash_type (10);
8663 optimize_external_refs_1 (die, map);
8664 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8665 return map;
8666 }
8667
8668 /* The following 3 variables are temporaries that are computed only during the
8669 build_abbrev_table call and used and released during the following
8670 optimize_abbrev_table call. */
8671
8672 /* First abbrev_id that can be optimized based on usage. */
8673 static unsigned int abbrev_opt_start;
8674
8675 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
8676 abbrev_id smaller than this, because they must be already sized
8677 during build_abbrev_table). */
8678 static unsigned int abbrev_opt_base_type_end;
8679
8680 /* Vector of usage counts during build_abbrev_table. Indexed by
8681 abbrev_id - abbrev_opt_start. */
8682 static vec<unsigned int> abbrev_usage_count;
8683
8684 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
8685 static vec<dw_die_ref> sorted_abbrev_dies;
8686
8687 /* The format of each DIE (and its attribute value pairs) is encoded in an
8688 abbreviation table. This routine builds the abbreviation table and assigns
8689 a unique abbreviation id for each abbreviation entry. The children of each
8690 die are visited recursively. */
8691
8692 static void
8693 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
8694 {
8695 unsigned int abbrev_id = 0;
8696 dw_die_ref c;
8697 dw_attr_node *a;
8698 unsigned ix;
8699 dw_die_ref abbrev;
8700
8701 /* Scan the DIE references, and replace any that refer to
8702 DIEs from other CUs (i.e. those which are not marked) with
8703 the local stubs we built in optimize_external_refs. */
8704 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8705 if (AT_class (a) == dw_val_class_die_ref
8706 && (c = AT_ref (a))->die_mark == 0)
8707 {
8708 struct external_ref *ref_p;
8709 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
8710
8711 ref_p = lookup_external_ref (extern_map, c);
8712 if (ref_p->stub && ref_p->stub != die)
8713 change_AT_die_ref (a, ref_p->stub);
8714 else
8715 /* We aren't changing this reference, so mark it external. */
8716 set_AT_ref_external (a, 1);
8717 }
8718
8719 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
8720 {
8721 dw_attr_node *die_a, *abbrev_a;
8722 unsigned ix;
8723 bool ok = true;
8724
8725 if (abbrev_id == 0)
8726 continue;
8727 if (abbrev->die_tag != die->die_tag)
8728 continue;
8729 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
8730 continue;
8731
8732 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
8733 continue;
8734
8735 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
8736 {
8737 abbrev_a = &(*abbrev->die_attr)[ix];
8738 if ((abbrev_a->dw_attr != die_a->dw_attr)
8739 || (value_format (abbrev_a) != value_format (die_a)))
8740 {
8741 ok = false;
8742 break;
8743 }
8744 }
8745 if (ok)
8746 break;
8747 }
8748
8749 if (abbrev_id >= vec_safe_length (abbrev_die_table))
8750 {
8751 vec_safe_push (abbrev_die_table, die);
8752 if (abbrev_opt_start)
8753 abbrev_usage_count.safe_push (0);
8754 }
8755 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
8756 {
8757 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
8758 sorted_abbrev_dies.safe_push (die);
8759 }
8760
8761 die->die_abbrev = abbrev_id;
8762 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
8763 }
8764
8765 /* Callback function for sorted_abbrev_dies vector sorting. We sort
8766 by die_abbrev's usage count, from the most commonly used
8767 abbreviation to the least. */
8768
8769 static int
8770 die_abbrev_cmp (const void *p1, const void *p2)
8771 {
8772 dw_die_ref die1 = *(const dw_die_ref *) p1;
8773 dw_die_ref die2 = *(const dw_die_ref *) p2;
8774
8775 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
8776 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
8777
8778 if (die1->die_abbrev >= abbrev_opt_base_type_end
8779 && die2->die_abbrev >= abbrev_opt_base_type_end)
8780 {
8781 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
8782 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
8783 return -1;
8784 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
8785 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
8786 return 1;
8787 }
8788
8789 /* Stabilize the sort. */
8790 if (die1->die_abbrev < die2->die_abbrev)
8791 return -1;
8792 if (die1->die_abbrev > die2->die_abbrev)
8793 return 1;
8794
8795 return 0;
8796 }
8797
8798 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
8799 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
8800 into dw_val_class_const_implicit or
8801 dw_val_class_unsigned_const_implicit. */
8802
8803 static void
8804 optimize_implicit_const (unsigned int first_id, unsigned int end,
8805 vec<bool> &implicit_consts)
8806 {
8807 /* It never makes sense if there is just one DIE using the abbreviation. */
8808 if (end < first_id + 2)
8809 return;
8810
8811 dw_attr_node *a;
8812 unsigned ix, i;
8813 dw_die_ref die = sorted_abbrev_dies[first_id];
8814 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8815 if (implicit_consts[ix])
8816 {
8817 enum dw_val_class new_class = dw_val_class_none;
8818 switch (AT_class (a))
8819 {
8820 case dw_val_class_unsigned_const:
8821 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
8822 continue;
8823
8824 /* The .debug_abbrev section will grow by
8825 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
8826 in all the DIEs using that abbreviation. */
8827 if (constant_size (AT_unsigned (a)) * (end - first_id)
8828 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
8829 continue;
8830
8831 new_class = dw_val_class_unsigned_const_implicit;
8832 break;
8833
8834 case dw_val_class_const:
8835 new_class = dw_val_class_const_implicit;
8836 break;
8837
8838 case dw_val_class_file:
8839 new_class = dw_val_class_file_implicit;
8840 break;
8841
8842 default:
8843 continue;
8844 }
8845 for (i = first_id; i < end; i++)
8846 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
8847 = new_class;
8848 }
8849 }
8850
8851 /* Attempt to optimize abbreviation table from abbrev_opt_start
8852 abbreviation above. */
8853
8854 static void
8855 optimize_abbrev_table (void)
8856 {
8857 if (abbrev_opt_start
8858 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
8859 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
8860 {
8861 auto_vec<bool, 32> implicit_consts;
8862 sorted_abbrev_dies.qsort (die_abbrev_cmp);
8863
8864 unsigned int abbrev_id = abbrev_opt_start - 1;
8865 unsigned int first_id = ~0U;
8866 unsigned int last_abbrev_id = 0;
8867 unsigned int i;
8868 dw_die_ref die;
8869 if (abbrev_opt_base_type_end > abbrev_opt_start)
8870 abbrev_id = abbrev_opt_base_type_end - 1;
8871 /* Reassign abbreviation ids from abbrev_opt_start above, so that
8872 most commonly used abbreviations come first. */
8873 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
8874 {
8875 dw_attr_node *a;
8876 unsigned ix;
8877
8878 /* If calc_base_type_die_sizes has been called, the CU and
8879 base types after it can't be optimized, because we've already
8880 calculated their DIE offsets. We've sorted them first. */
8881 if (die->die_abbrev < abbrev_opt_base_type_end)
8882 continue;
8883 if (die->die_abbrev != last_abbrev_id)
8884 {
8885 last_abbrev_id = die->die_abbrev;
8886 if (dwarf_version >= 5 && first_id != ~0U)
8887 optimize_implicit_const (first_id, i, implicit_consts);
8888 abbrev_id++;
8889 (*abbrev_die_table)[abbrev_id] = die;
8890 if (dwarf_version >= 5)
8891 {
8892 first_id = i;
8893 implicit_consts.truncate (0);
8894
8895 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8896 switch (AT_class (a))
8897 {
8898 case dw_val_class_const:
8899 case dw_val_class_unsigned_const:
8900 case dw_val_class_file:
8901 implicit_consts.safe_push (true);
8902 break;
8903 default:
8904 implicit_consts.safe_push (false);
8905 break;
8906 }
8907 }
8908 }
8909 else if (dwarf_version >= 5)
8910 {
8911 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8912 if (!implicit_consts[ix])
8913 continue;
8914 else
8915 {
8916 dw_attr_node *other_a
8917 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
8918 if (!dw_val_equal_p (&a->dw_attr_val,
8919 &other_a->dw_attr_val))
8920 implicit_consts[ix] = false;
8921 }
8922 }
8923 die->die_abbrev = abbrev_id;
8924 }
8925 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
8926 if (dwarf_version >= 5 && first_id != ~0U)
8927 optimize_implicit_const (first_id, i, implicit_consts);
8928 }
8929
8930 abbrev_opt_start = 0;
8931 abbrev_opt_base_type_end = 0;
8932 abbrev_usage_count.release ();
8933 sorted_abbrev_dies.release ();
8934 }
8935 \f
8936 /* Return the power-of-two number of bytes necessary to represent VALUE. */
8937
8938 static int
8939 constant_size (unsigned HOST_WIDE_INT value)
8940 {
8941 int log;
8942
8943 if (value == 0)
8944 log = 0;
8945 else
8946 log = floor_log2 (value);
8947
8948 log = log / 8;
8949 log = 1 << (floor_log2 (log) + 1);
8950
8951 return log;
8952 }
8953
8954 /* Return the size of a DIE as it is represented in the
8955 .debug_info section. */
8956
8957 static unsigned long
8958 size_of_die (dw_die_ref die)
8959 {
8960 unsigned long size = 0;
8961 dw_attr_node *a;
8962 unsigned ix;
8963 enum dwarf_form form;
8964
8965 size += size_of_uleb128 (die->die_abbrev);
8966 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8967 {
8968 switch (AT_class (a))
8969 {
8970 case dw_val_class_addr:
8971 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
8972 {
8973 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
8974 size += size_of_uleb128 (AT_index (a));
8975 }
8976 else
8977 size += DWARF2_ADDR_SIZE;
8978 break;
8979 case dw_val_class_offset:
8980 size += DWARF_OFFSET_SIZE;
8981 break;
8982 case dw_val_class_loc:
8983 {
8984 unsigned long lsize = size_of_locs (AT_loc (a));
8985
8986 /* Block length. */
8987 if (dwarf_version >= 4)
8988 size += size_of_uleb128 (lsize);
8989 else
8990 size += constant_size (lsize);
8991 size += lsize;
8992 }
8993 break;
8994 case dw_val_class_loc_list:
8995 if (dwarf_split_debug_info && dwarf_version >= 5)
8996 {
8997 gcc_assert (AT_loc_list (a)->num_assigned);
8998 size += size_of_uleb128 (AT_loc_list (a)->hash);
8999 }
9000 else
9001 size += DWARF_OFFSET_SIZE;
9002 break;
9003 case dw_val_class_range_list:
9004 if (value_format (a) == DW_FORM_rnglistx)
9005 {
9006 gcc_assert (rnglist_idx);
9007 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9008 size += size_of_uleb128 (r->idx);
9009 }
9010 else
9011 size += DWARF_OFFSET_SIZE;
9012 break;
9013 case dw_val_class_const:
9014 size += size_of_sleb128 (AT_int (a));
9015 break;
9016 case dw_val_class_unsigned_const:
9017 {
9018 int csize = constant_size (AT_unsigned (a));
9019 if (dwarf_version == 3
9020 && a->dw_attr == DW_AT_data_member_location
9021 && csize >= 4)
9022 size += size_of_uleb128 (AT_unsigned (a));
9023 else
9024 size += csize;
9025 }
9026 break;
9027 case dw_val_class_const_implicit:
9028 case dw_val_class_unsigned_const_implicit:
9029 case dw_val_class_file_implicit:
9030 /* These occupy no size in the DIE, just an extra sleb128 in
9031 .debug_abbrev. */
9032 break;
9033 case dw_val_class_const_double:
9034 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9035 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9036 size++; /* block */
9037 break;
9038 case dw_val_class_wide_int:
9039 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9040 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9041 if (get_full_len (*a->dw_attr_val.v.val_wide)
9042 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9043 size++; /* block */
9044 break;
9045 case dw_val_class_vec:
9046 size += constant_size (a->dw_attr_val.v.val_vec.length
9047 * a->dw_attr_val.v.val_vec.elt_size)
9048 + a->dw_attr_val.v.val_vec.length
9049 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9050 break;
9051 case dw_val_class_flag:
9052 if (dwarf_version >= 4)
9053 /* Currently all add_AT_flag calls pass in 1 as last argument,
9054 so DW_FORM_flag_present can be used. If that ever changes,
9055 we'll need to use DW_FORM_flag and have some optimization
9056 in build_abbrev_table that will change those to
9057 DW_FORM_flag_present if it is set to 1 in all DIEs using
9058 the same abbrev entry. */
9059 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9060 else
9061 size += 1;
9062 break;
9063 case dw_val_class_die_ref:
9064 if (AT_ref_external (a))
9065 {
9066 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9067 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9068 is sized by target address length, whereas in DWARF3
9069 it's always sized as an offset. */
9070 if (use_debug_types)
9071 size += DWARF_TYPE_SIGNATURE_SIZE;
9072 else if (dwarf_version == 2)
9073 size += DWARF2_ADDR_SIZE;
9074 else
9075 size += DWARF_OFFSET_SIZE;
9076 }
9077 else
9078 size += DWARF_OFFSET_SIZE;
9079 break;
9080 case dw_val_class_fde_ref:
9081 size += DWARF_OFFSET_SIZE;
9082 break;
9083 case dw_val_class_lbl_id:
9084 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9085 {
9086 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9087 size += size_of_uleb128 (AT_index (a));
9088 }
9089 else
9090 size += DWARF2_ADDR_SIZE;
9091 break;
9092 case dw_val_class_lineptr:
9093 case dw_val_class_macptr:
9094 case dw_val_class_loclistsptr:
9095 size += DWARF_OFFSET_SIZE;
9096 break;
9097 case dw_val_class_str:
9098 form = AT_string_form (a);
9099 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9100 size += DWARF_OFFSET_SIZE;
9101 else if (form == DW_FORM_GNU_str_index)
9102 size += size_of_uleb128 (AT_index (a));
9103 else
9104 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9105 break;
9106 case dw_val_class_file:
9107 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9108 break;
9109 case dw_val_class_data8:
9110 size += 8;
9111 break;
9112 case dw_val_class_vms_delta:
9113 size += DWARF_OFFSET_SIZE;
9114 break;
9115 case dw_val_class_high_pc:
9116 size += DWARF2_ADDR_SIZE;
9117 break;
9118 case dw_val_class_discr_value:
9119 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9120 break;
9121 case dw_val_class_discr_list:
9122 {
9123 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9124
9125 /* This is a block, so we have the block length and then its
9126 data. */
9127 size += constant_size (block_size) + block_size;
9128 }
9129 break;
9130 default:
9131 gcc_unreachable ();
9132 }
9133 }
9134
9135 return size;
9136 }
9137
9138 /* Size the debugging information associated with a given DIE. Visits the
9139 DIE's children recursively. Updates the global variable next_die_offset, on
9140 each time through. Uses the current value of next_die_offset to update the
9141 die_offset field in each DIE. */
9142
9143 static void
9144 calc_die_sizes (dw_die_ref die)
9145 {
9146 dw_die_ref c;
9147
9148 gcc_assert (die->die_offset == 0
9149 || (unsigned long int) die->die_offset == next_die_offset);
9150 die->die_offset = next_die_offset;
9151 next_die_offset += size_of_die (die);
9152
9153 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9154
9155 if (die->die_child != NULL)
9156 /* Count the null byte used to terminate sibling lists. */
9157 next_die_offset += 1;
9158 }
9159
9160 /* Size just the base type children at the start of the CU.
9161 This is needed because build_abbrev needs to size locs
9162 and sizing of type based stack ops needs to know die_offset
9163 values for the base types. */
9164
9165 static void
9166 calc_base_type_die_sizes (void)
9167 {
9168 unsigned long die_offset = (dwarf_split_debug_info
9169 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9170 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9171 unsigned int i;
9172 dw_die_ref base_type;
9173 #if ENABLE_ASSERT_CHECKING
9174 dw_die_ref prev = comp_unit_die ()->die_child;
9175 #endif
9176
9177 die_offset += size_of_die (comp_unit_die ());
9178 for (i = 0; base_types.iterate (i, &base_type); i++)
9179 {
9180 #if ENABLE_ASSERT_CHECKING
9181 gcc_assert (base_type->die_offset == 0
9182 && prev->die_sib == base_type
9183 && base_type->die_child == NULL
9184 && base_type->die_abbrev);
9185 prev = base_type;
9186 #endif
9187 if (abbrev_opt_start
9188 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9189 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9190 base_type->die_offset = die_offset;
9191 die_offset += size_of_die (base_type);
9192 }
9193 }
9194
9195 /* Set the marks for a die and its children. We do this so
9196 that we know whether or not a reference needs to use FORM_ref_addr; only
9197 DIEs in the same CU will be marked. We used to clear out the offset
9198 and use that as the flag, but ran into ordering problems. */
9199
9200 static void
9201 mark_dies (dw_die_ref die)
9202 {
9203 dw_die_ref c;
9204
9205 gcc_assert (!die->die_mark);
9206
9207 die->die_mark = 1;
9208 FOR_EACH_CHILD (die, c, mark_dies (c));
9209 }
9210
9211 /* Clear the marks for a die and its children. */
9212
9213 static void
9214 unmark_dies (dw_die_ref die)
9215 {
9216 dw_die_ref c;
9217
9218 if (! use_debug_types)
9219 gcc_assert (die->die_mark);
9220
9221 die->die_mark = 0;
9222 FOR_EACH_CHILD (die, c, unmark_dies (c));
9223 }
9224
9225 /* Clear the marks for a die, its children and referred dies. */
9226
9227 static void
9228 unmark_all_dies (dw_die_ref die)
9229 {
9230 dw_die_ref c;
9231 dw_attr_node *a;
9232 unsigned ix;
9233
9234 if (!die->die_mark)
9235 return;
9236 die->die_mark = 0;
9237
9238 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9239
9240 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9241 if (AT_class (a) == dw_val_class_die_ref)
9242 unmark_all_dies (AT_ref (a));
9243 }
9244
9245 /* Calculate if the entry should appear in the final output file. It may be
9246 from a pruned a type. */
9247
9248 static bool
9249 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9250 {
9251 /* By limiting gnu pubnames to definitions only, gold can generate a
9252 gdb index without entries for declarations, which don't include
9253 enough information to be useful. */
9254 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9255 return false;
9256
9257 if (table == pubname_table)
9258 {
9259 /* Enumerator names are part of the pubname table, but the
9260 parent DW_TAG_enumeration_type die may have been pruned.
9261 Don't output them if that is the case. */
9262 if (p->die->die_tag == DW_TAG_enumerator &&
9263 (p->die->die_parent == NULL
9264 || !p->die->die_parent->die_perennial_p))
9265 return false;
9266
9267 /* Everything else in the pubname table is included. */
9268 return true;
9269 }
9270
9271 /* The pubtypes table shouldn't include types that have been
9272 pruned. */
9273 return (p->die->die_offset != 0
9274 || !flag_eliminate_unused_debug_types);
9275 }
9276
9277 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9278 generated for the compilation unit. */
9279
9280 static unsigned long
9281 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9282 {
9283 unsigned long size;
9284 unsigned i;
9285 pubname_entry *p;
9286 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9287
9288 size = DWARF_PUBNAMES_HEADER_SIZE;
9289 FOR_EACH_VEC_ELT (*names, i, p)
9290 if (include_pubname_in_output (names, p))
9291 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9292
9293 size += DWARF_OFFSET_SIZE;
9294 return size;
9295 }
9296
9297 /* Return the size of the information in the .debug_aranges section. */
9298
9299 static unsigned long
9300 size_of_aranges (void)
9301 {
9302 unsigned long size;
9303
9304 size = DWARF_ARANGES_HEADER_SIZE;
9305
9306 /* Count the address/length pair for this compilation unit. */
9307 if (text_section_used)
9308 size += 2 * DWARF2_ADDR_SIZE;
9309 if (cold_text_section_used)
9310 size += 2 * DWARF2_ADDR_SIZE;
9311 if (have_multiple_function_sections)
9312 {
9313 unsigned fde_idx;
9314 dw_fde_ref fde;
9315
9316 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9317 {
9318 if (DECL_IGNORED_P (fde->decl))
9319 continue;
9320 if (!fde->in_std_section)
9321 size += 2 * DWARF2_ADDR_SIZE;
9322 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9323 size += 2 * DWARF2_ADDR_SIZE;
9324 }
9325 }
9326
9327 /* Count the two zero words used to terminated the address range table. */
9328 size += 2 * DWARF2_ADDR_SIZE;
9329 return size;
9330 }
9331 \f
9332 /* Select the encoding of an attribute value. */
9333
9334 static enum dwarf_form
9335 value_format (dw_attr_node *a)
9336 {
9337 switch (AT_class (a))
9338 {
9339 case dw_val_class_addr:
9340 /* Only very few attributes allow DW_FORM_addr. */
9341 switch (a->dw_attr)
9342 {
9343 case DW_AT_low_pc:
9344 case DW_AT_high_pc:
9345 case DW_AT_entry_pc:
9346 case DW_AT_trampoline:
9347 return (AT_index (a) == NOT_INDEXED
9348 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9349 default:
9350 break;
9351 }
9352 switch (DWARF2_ADDR_SIZE)
9353 {
9354 case 1:
9355 return DW_FORM_data1;
9356 case 2:
9357 return DW_FORM_data2;
9358 case 4:
9359 return DW_FORM_data4;
9360 case 8:
9361 return DW_FORM_data8;
9362 default:
9363 gcc_unreachable ();
9364 }
9365 case dw_val_class_loc_list:
9366 if (dwarf_split_debug_info
9367 && dwarf_version >= 5
9368 && AT_loc_list (a)->num_assigned)
9369 return DW_FORM_loclistx;
9370 /* FALLTHRU */
9371 case dw_val_class_range_list:
9372 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9373 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9374 care about sizes of .debug* sections in shared libraries and
9375 executables and don't take into account relocations that affect just
9376 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9377 table in the .debug_rnglists section. */
9378 if (dwarf_split_debug_info
9379 && dwarf_version >= 5
9380 && AT_class (a) == dw_val_class_range_list
9381 && rnglist_idx
9382 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9383 return DW_FORM_rnglistx;
9384 if (dwarf_version >= 4)
9385 return DW_FORM_sec_offset;
9386 /* FALLTHRU */
9387 case dw_val_class_vms_delta:
9388 case dw_val_class_offset:
9389 switch (DWARF_OFFSET_SIZE)
9390 {
9391 case 4:
9392 return DW_FORM_data4;
9393 case 8:
9394 return DW_FORM_data8;
9395 default:
9396 gcc_unreachable ();
9397 }
9398 case dw_val_class_loc:
9399 if (dwarf_version >= 4)
9400 return DW_FORM_exprloc;
9401 switch (constant_size (size_of_locs (AT_loc (a))))
9402 {
9403 case 1:
9404 return DW_FORM_block1;
9405 case 2:
9406 return DW_FORM_block2;
9407 case 4:
9408 return DW_FORM_block4;
9409 default:
9410 gcc_unreachable ();
9411 }
9412 case dw_val_class_const:
9413 return DW_FORM_sdata;
9414 case dw_val_class_unsigned_const:
9415 switch (constant_size (AT_unsigned (a)))
9416 {
9417 case 1:
9418 return DW_FORM_data1;
9419 case 2:
9420 return DW_FORM_data2;
9421 case 4:
9422 /* In DWARF3 DW_AT_data_member_location with
9423 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9424 constant, so we need to use DW_FORM_udata if we need
9425 a large constant. */
9426 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9427 return DW_FORM_udata;
9428 return DW_FORM_data4;
9429 case 8:
9430 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9431 return DW_FORM_udata;
9432 return DW_FORM_data8;
9433 default:
9434 gcc_unreachable ();
9435 }
9436 case dw_val_class_const_implicit:
9437 case dw_val_class_unsigned_const_implicit:
9438 case dw_val_class_file_implicit:
9439 return DW_FORM_implicit_const;
9440 case dw_val_class_const_double:
9441 switch (HOST_BITS_PER_WIDE_INT)
9442 {
9443 case 8:
9444 return DW_FORM_data2;
9445 case 16:
9446 return DW_FORM_data4;
9447 case 32:
9448 return DW_FORM_data8;
9449 case 64:
9450 if (dwarf_version >= 5)
9451 return DW_FORM_data16;
9452 /* FALLTHRU */
9453 default:
9454 return DW_FORM_block1;
9455 }
9456 case dw_val_class_wide_int:
9457 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9458 {
9459 case 8:
9460 return DW_FORM_data1;
9461 case 16:
9462 return DW_FORM_data2;
9463 case 32:
9464 return DW_FORM_data4;
9465 case 64:
9466 return DW_FORM_data8;
9467 case 128:
9468 if (dwarf_version >= 5)
9469 return DW_FORM_data16;
9470 /* FALLTHRU */
9471 default:
9472 return DW_FORM_block1;
9473 }
9474 case dw_val_class_vec:
9475 switch (constant_size (a->dw_attr_val.v.val_vec.length
9476 * a->dw_attr_val.v.val_vec.elt_size))
9477 {
9478 case 1:
9479 return DW_FORM_block1;
9480 case 2:
9481 return DW_FORM_block2;
9482 case 4:
9483 return DW_FORM_block4;
9484 default:
9485 gcc_unreachable ();
9486 }
9487 case dw_val_class_flag:
9488 if (dwarf_version >= 4)
9489 {
9490 /* Currently all add_AT_flag calls pass in 1 as last argument,
9491 so DW_FORM_flag_present can be used. If that ever changes,
9492 we'll need to use DW_FORM_flag and have some optimization
9493 in build_abbrev_table that will change those to
9494 DW_FORM_flag_present if it is set to 1 in all DIEs using
9495 the same abbrev entry. */
9496 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9497 return DW_FORM_flag_present;
9498 }
9499 return DW_FORM_flag;
9500 case dw_val_class_die_ref:
9501 if (AT_ref_external (a))
9502 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9503 else
9504 return DW_FORM_ref;
9505 case dw_val_class_fde_ref:
9506 return DW_FORM_data;
9507 case dw_val_class_lbl_id:
9508 return (AT_index (a) == NOT_INDEXED
9509 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9510 case dw_val_class_lineptr:
9511 case dw_val_class_macptr:
9512 case dw_val_class_loclistsptr:
9513 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9514 case dw_val_class_str:
9515 return AT_string_form (a);
9516 case dw_val_class_file:
9517 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9518 {
9519 case 1:
9520 return DW_FORM_data1;
9521 case 2:
9522 return DW_FORM_data2;
9523 case 4:
9524 return DW_FORM_data4;
9525 default:
9526 gcc_unreachable ();
9527 }
9528
9529 case dw_val_class_data8:
9530 return DW_FORM_data8;
9531
9532 case dw_val_class_high_pc:
9533 switch (DWARF2_ADDR_SIZE)
9534 {
9535 case 1:
9536 return DW_FORM_data1;
9537 case 2:
9538 return DW_FORM_data2;
9539 case 4:
9540 return DW_FORM_data4;
9541 case 8:
9542 return DW_FORM_data8;
9543 default:
9544 gcc_unreachable ();
9545 }
9546
9547 case dw_val_class_discr_value:
9548 return (a->dw_attr_val.v.val_discr_value.pos
9549 ? DW_FORM_udata
9550 : DW_FORM_sdata);
9551 case dw_val_class_discr_list:
9552 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9553 {
9554 case 1:
9555 return DW_FORM_block1;
9556 case 2:
9557 return DW_FORM_block2;
9558 case 4:
9559 return DW_FORM_block4;
9560 default:
9561 gcc_unreachable ();
9562 }
9563
9564 default:
9565 gcc_unreachable ();
9566 }
9567 }
9568
9569 /* Output the encoding of an attribute value. */
9570
9571 static void
9572 output_value_format (dw_attr_node *a)
9573 {
9574 enum dwarf_form form = value_format (a);
9575
9576 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9577 }
9578
9579 /* Given a die and id, produce the appropriate abbreviations. */
9580
9581 static void
9582 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9583 {
9584 unsigned ix;
9585 dw_attr_node *a_attr;
9586
9587 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9588 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9589 dwarf_tag_name (abbrev->die_tag));
9590
9591 if (abbrev->die_child != NULL)
9592 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9593 else
9594 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9595
9596 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9597 {
9598 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9599 dwarf_attr_name (a_attr->dw_attr));
9600 output_value_format (a_attr);
9601 if (value_format (a_attr) == DW_FORM_implicit_const)
9602 {
9603 if (AT_class (a_attr) == dw_val_class_file_implicit)
9604 {
9605 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9606 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9607 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9608 }
9609 else
9610 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9611 }
9612 }
9613
9614 dw2_asm_output_data (1, 0, NULL);
9615 dw2_asm_output_data (1, 0, NULL);
9616 }
9617
9618
9619 /* Output the .debug_abbrev section which defines the DIE abbreviation
9620 table. */
9621
9622 static void
9623 output_abbrev_section (void)
9624 {
9625 unsigned int abbrev_id;
9626 dw_die_ref abbrev;
9627
9628 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9629 if (abbrev_id != 0)
9630 output_die_abbrevs (abbrev_id, abbrev);
9631
9632 /* Terminate the table. */
9633 dw2_asm_output_data (1, 0, NULL);
9634 }
9635
9636 /* Return a new location list, given the begin and end range, and the
9637 expression. */
9638
9639 static inline dw_loc_list_ref
9640 new_loc_list (dw_loc_descr_ref expr, const char *begin, const char *end,
9641 const char *section)
9642 {
9643 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
9644
9645 retlist->begin = begin;
9646 retlist->begin_entry = NULL;
9647 retlist->end = end;
9648 retlist->expr = expr;
9649 retlist->section = section;
9650
9651 return retlist;
9652 }
9653
9654 /* Generate a new internal symbol for this location list node, if it
9655 hasn't got one yet. */
9656
9657 static inline void
9658 gen_llsym (dw_loc_list_ref list)
9659 {
9660 gcc_assert (!list->ll_symbol);
9661 list->ll_symbol = gen_internal_sym ("LLST");
9662 }
9663
9664 /* Output the location list given to us. */
9665
9666 static void
9667 output_loc_list (dw_loc_list_ref list_head)
9668 {
9669 if (list_head->emitted)
9670 return;
9671 list_head->emitted = true;
9672
9673 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
9674
9675 dw_loc_list_ref curr = list_head;
9676 const char *last_section = NULL;
9677 const char *base_label = NULL;
9678
9679 /* Walk the location list, and output each range + expression. */
9680 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
9681 {
9682 unsigned long size;
9683 /* Don't output an entry that starts and ends at the same address. */
9684 if (strcmp (curr->begin, curr->end) == 0 && !curr->force)
9685 continue;
9686 size = size_of_locs (curr->expr);
9687 /* If the expression is too large, drop it on the floor. We could
9688 perhaps put it into DW_TAG_dwarf_procedure and refer to that
9689 in the expression, but >= 64KB expressions for a single value
9690 in a single range are unlikely very useful. */
9691 if (dwarf_version < 5 && size > 0xffff)
9692 continue;
9693 if (dwarf_version >= 5)
9694 {
9695 if (dwarf_split_debug_info)
9696 {
9697 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
9698 uleb128 index into .debug_addr and uleb128 length. */
9699 dw2_asm_output_data (1, DW_LLE_startx_length,
9700 "DW_LLE_startx_length (%s)",
9701 list_head->ll_symbol);
9702 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
9703 "Location list range start index "
9704 "(%s)", curr->begin);
9705 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
9706 For that case we probably need to emit DW_LLE_startx_endx,
9707 but we'd need 2 .debug_addr entries rather than just one. */
9708 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
9709 "Location list length (%s)",
9710 list_head->ll_symbol);
9711 }
9712 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
9713 {
9714 /* If all code is in .text section, the base address is
9715 already provided by the CU attributes. Use
9716 DW_LLE_offset_pair where both addresses are uleb128 encoded
9717 offsets against that base. */
9718 dw2_asm_output_data (1, DW_LLE_offset_pair,
9719 "DW_LLE_offset_pair (%s)",
9720 list_head->ll_symbol);
9721 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
9722 "Location list begin address (%s)",
9723 list_head->ll_symbol);
9724 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
9725 "Location list end address (%s)",
9726 list_head->ll_symbol);
9727 }
9728 else if (HAVE_AS_LEB128)
9729 {
9730 /* Otherwise, find out how many consecutive entries could share
9731 the same base entry. If just one, emit DW_LLE_start_length,
9732 otherwise emit DW_LLE_base_address for the base address
9733 followed by a series of DW_LLE_offset_pair. */
9734 if (last_section == NULL || curr->section != last_section)
9735 {
9736 dw_loc_list_ref curr2;
9737 for (curr2 = curr->dw_loc_next; curr2 != NULL;
9738 curr2 = curr2->dw_loc_next)
9739 {
9740 if (strcmp (curr2->begin, curr2->end) == 0
9741 && !curr2->force)
9742 continue;
9743 break;
9744 }
9745 if (curr2 == NULL || curr->section != curr2->section)
9746 last_section = NULL;
9747 else
9748 {
9749 last_section = curr->section;
9750 base_label = curr->begin;
9751 dw2_asm_output_data (1, DW_LLE_base_address,
9752 "DW_LLE_base_address (%s)",
9753 list_head->ll_symbol);
9754 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
9755 "Base address (%s)",
9756 list_head->ll_symbol);
9757 }
9758 }
9759 /* Only one entry with the same base address. Use
9760 DW_LLE_start_length with absolute address and uleb128
9761 length. */
9762 if (last_section == NULL)
9763 {
9764 dw2_asm_output_data (1, DW_LLE_start_length,
9765 "DW_LLE_start_length (%s)",
9766 list_head->ll_symbol);
9767 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9768 "Location list begin address (%s)",
9769 list_head->ll_symbol);
9770 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
9771 "Location list length "
9772 "(%s)", list_head->ll_symbol);
9773 }
9774 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
9775 DW_LLE_base_address. */
9776 else
9777 {
9778 dw2_asm_output_data (1, DW_LLE_offset_pair,
9779 "DW_LLE_offset_pair (%s)",
9780 list_head->ll_symbol);
9781 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
9782 "Location list begin address "
9783 "(%s)", list_head->ll_symbol);
9784 dw2_asm_output_delta_uleb128 (curr->end, base_label,
9785 "Location list end address "
9786 "(%s)", list_head->ll_symbol);
9787 }
9788 }
9789 /* The assembler does not support .uleb128 directive. Emit
9790 DW_LLE_start_end with a pair of absolute addresses. */
9791 else
9792 {
9793 dw2_asm_output_data (1, DW_LLE_start_end,
9794 "DW_LLE_start_end (%s)",
9795 list_head->ll_symbol);
9796 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9797 "Location list begin address (%s)",
9798 list_head->ll_symbol);
9799 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
9800 "Location list end address (%s)",
9801 list_head->ll_symbol);
9802 }
9803 }
9804 else if (dwarf_split_debug_info)
9805 {
9806 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
9807 and 4 byte length. */
9808 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
9809 "Location list start/length entry (%s)",
9810 list_head->ll_symbol);
9811 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
9812 "Location list range start index (%s)",
9813 curr->begin);
9814 /* The length field is 4 bytes. If we ever need to support
9815 an 8-byte length, we can add a new DW_LLE code or fall back
9816 to DW_LLE_GNU_start_end_entry. */
9817 dw2_asm_output_delta (4, curr->end, curr->begin,
9818 "Location list range length (%s)",
9819 list_head->ll_symbol);
9820 }
9821 else if (!have_multiple_function_sections)
9822 {
9823 /* Pair of relative addresses against start of text section. */
9824 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
9825 "Location list begin address (%s)",
9826 list_head->ll_symbol);
9827 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
9828 "Location list end address (%s)",
9829 list_head->ll_symbol);
9830 }
9831 else
9832 {
9833 /* Pair of absolute addresses. */
9834 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9835 "Location list begin address (%s)",
9836 list_head->ll_symbol);
9837 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
9838 "Location list end address (%s)",
9839 list_head->ll_symbol);
9840 }
9841
9842 /* Output the block length for this list of location operations. */
9843 if (dwarf_version >= 5)
9844 dw2_asm_output_data_uleb128 (size, "Location expression size");
9845 else
9846 {
9847 gcc_assert (size <= 0xffff);
9848 dw2_asm_output_data (2, size, "Location expression size");
9849 }
9850
9851 output_loc_sequence (curr->expr, -1);
9852 }
9853
9854 /* And finally list termination. */
9855 if (dwarf_version >= 5)
9856 dw2_asm_output_data (1, DW_LLE_end_of_list,
9857 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
9858 else if (dwarf_split_debug_info)
9859 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
9860 "Location list terminator (%s)",
9861 list_head->ll_symbol);
9862 else
9863 {
9864 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
9865 "Location list terminator begin (%s)",
9866 list_head->ll_symbol);
9867 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
9868 "Location list terminator end (%s)",
9869 list_head->ll_symbol);
9870 }
9871 }
9872
9873 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
9874 section. Emit a relocated reference if val_entry is NULL, otherwise,
9875 emit an indirect reference. */
9876
9877 static void
9878 output_range_list_offset (dw_attr_node *a)
9879 {
9880 const char *name = dwarf_attr_name (a->dw_attr);
9881
9882 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
9883 {
9884 if (dwarf_version >= 5)
9885 {
9886 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9887 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
9888 debug_ranges_section, "%s", name);
9889 }
9890 else
9891 {
9892 char *p = strchr (ranges_section_label, '\0');
9893 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
9894 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
9895 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
9896 debug_ranges_section, "%s", name);
9897 *p = '\0';
9898 }
9899 }
9900 else if (dwarf_version >= 5)
9901 {
9902 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9903 gcc_assert (rnglist_idx);
9904 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
9905 }
9906 else
9907 dw2_asm_output_data (DWARF_OFFSET_SIZE,
9908 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
9909 "%s (offset from %s)", name, ranges_section_label);
9910 }
9911
9912 /* Output the offset into the debug_loc section. */
9913
9914 static void
9915 output_loc_list_offset (dw_attr_node *a)
9916 {
9917 char *sym = AT_loc_list (a)->ll_symbol;
9918
9919 gcc_assert (sym);
9920 if (!dwarf_split_debug_info)
9921 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
9922 "%s", dwarf_attr_name (a->dw_attr));
9923 else if (dwarf_version >= 5)
9924 {
9925 gcc_assert (AT_loc_list (a)->num_assigned);
9926 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
9927 dwarf_attr_name (a->dw_attr),
9928 sym);
9929 }
9930 else
9931 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
9932 "%s", dwarf_attr_name (a->dw_attr));
9933 }
9934
9935 /* Output an attribute's index or value appropriately. */
9936
9937 static void
9938 output_attr_index_or_value (dw_attr_node *a)
9939 {
9940 const char *name = dwarf_attr_name (a->dw_attr);
9941
9942 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9943 {
9944 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
9945 return;
9946 }
9947 switch (AT_class (a))
9948 {
9949 case dw_val_class_addr:
9950 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
9951 break;
9952 case dw_val_class_high_pc:
9953 case dw_val_class_lbl_id:
9954 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
9955 break;
9956 default:
9957 gcc_unreachable ();
9958 }
9959 }
9960
9961 /* Output a type signature. */
9962
9963 static inline void
9964 output_signature (const char *sig, const char *name)
9965 {
9966 int i;
9967
9968 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
9969 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
9970 }
9971
9972 /* Output a discriminant value. */
9973
9974 static inline void
9975 output_discr_value (dw_discr_value *discr_value, const char *name)
9976 {
9977 if (discr_value->pos)
9978 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
9979 else
9980 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
9981 }
9982
9983 /* Output the DIE and its attributes. Called recursively to generate
9984 the definitions of each child DIE. */
9985
9986 static void
9987 output_die (dw_die_ref die)
9988 {
9989 dw_attr_node *a;
9990 dw_die_ref c;
9991 unsigned long size;
9992 unsigned ix;
9993
9994 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
9995 (unsigned long)die->die_offset,
9996 dwarf_tag_name (die->die_tag));
9997
9998 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9999 {
10000 const char *name = dwarf_attr_name (a->dw_attr);
10001
10002 switch (AT_class (a))
10003 {
10004 case dw_val_class_addr:
10005 output_attr_index_or_value (a);
10006 break;
10007
10008 case dw_val_class_offset:
10009 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10010 "%s", name);
10011 break;
10012
10013 case dw_val_class_range_list:
10014 output_range_list_offset (a);
10015 break;
10016
10017 case dw_val_class_loc:
10018 size = size_of_locs (AT_loc (a));
10019
10020 /* Output the block length for this list of location operations. */
10021 if (dwarf_version >= 4)
10022 dw2_asm_output_data_uleb128 (size, "%s", name);
10023 else
10024 dw2_asm_output_data (constant_size (size), size, "%s", name);
10025
10026 output_loc_sequence (AT_loc (a), -1);
10027 break;
10028
10029 case dw_val_class_const:
10030 /* ??? It would be slightly more efficient to use a scheme like is
10031 used for unsigned constants below, but gdb 4.x does not sign
10032 extend. Gdb 5.x does sign extend. */
10033 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10034 break;
10035
10036 case dw_val_class_unsigned_const:
10037 {
10038 int csize = constant_size (AT_unsigned (a));
10039 if (dwarf_version == 3
10040 && a->dw_attr == DW_AT_data_member_location
10041 && csize >= 4)
10042 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10043 else
10044 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10045 }
10046 break;
10047
10048 case dw_val_class_const_implicit:
10049 if (flag_debug_asm)
10050 fprintf (asm_out_file, "\t\t\t%s %s ("
10051 HOST_WIDE_INT_PRINT_DEC ")\n",
10052 ASM_COMMENT_START, name, AT_int (a));
10053 break;
10054
10055 case dw_val_class_unsigned_const_implicit:
10056 if (flag_debug_asm)
10057 fprintf (asm_out_file, "\t\t\t%s %s ("
10058 HOST_WIDE_INT_PRINT_HEX ")\n",
10059 ASM_COMMENT_START, name, AT_unsigned (a));
10060 break;
10061
10062 case dw_val_class_const_double:
10063 {
10064 unsigned HOST_WIDE_INT first, second;
10065
10066 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10067 dw2_asm_output_data (1,
10068 HOST_BITS_PER_DOUBLE_INT
10069 / HOST_BITS_PER_CHAR,
10070 NULL);
10071
10072 if (WORDS_BIG_ENDIAN)
10073 {
10074 first = a->dw_attr_val.v.val_double.high;
10075 second = a->dw_attr_val.v.val_double.low;
10076 }
10077 else
10078 {
10079 first = a->dw_attr_val.v.val_double.low;
10080 second = a->dw_attr_val.v.val_double.high;
10081 }
10082
10083 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10084 first, "%s", name);
10085 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10086 second, NULL);
10087 }
10088 break;
10089
10090 case dw_val_class_wide_int:
10091 {
10092 int i;
10093 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10094 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10095 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10096 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10097 * l, NULL);
10098
10099 if (WORDS_BIG_ENDIAN)
10100 for (i = len - 1; i >= 0; --i)
10101 {
10102 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10103 "%s", name);
10104 name = "";
10105 }
10106 else
10107 for (i = 0; i < len; ++i)
10108 {
10109 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10110 "%s", name);
10111 name = "";
10112 }
10113 }
10114 break;
10115
10116 case dw_val_class_vec:
10117 {
10118 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10119 unsigned int len = a->dw_attr_val.v.val_vec.length;
10120 unsigned int i;
10121 unsigned char *p;
10122
10123 dw2_asm_output_data (constant_size (len * elt_size),
10124 len * elt_size, "%s", name);
10125 if (elt_size > sizeof (HOST_WIDE_INT))
10126 {
10127 elt_size /= 2;
10128 len *= 2;
10129 }
10130 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10131 i < len;
10132 i++, p += elt_size)
10133 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10134 "fp or vector constant word %u", i);
10135 break;
10136 }
10137
10138 case dw_val_class_flag:
10139 if (dwarf_version >= 4)
10140 {
10141 /* Currently all add_AT_flag calls pass in 1 as last argument,
10142 so DW_FORM_flag_present can be used. If that ever changes,
10143 we'll need to use DW_FORM_flag and have some optimization
10144 in build_abbrev_table that will change those to
10145 DW_FORM_flag_present if it is set to 1 in all DIEs using
10146 the same abbrev entry. */
10147 gcc_assert (AT_flag (a) == 1);
10148 if (flag_debug_asm)
10149 fprintf (asm_out_file, "\t\t\t%s %s\n",
10150 ASM_COMMENT_START, name);
10151 break;
10152 }
10153 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10154 break;
10155
10156 case dw_val_class_loc_list:
10157 output_loc_list_offset (a);
10158 break;
10159
10160 case dw_val_class_die_ref:
10161 if (AT_ref_external (a))
10162 {
10163 if (AT_ref (a)->comdat_type_p)
10164 {
10165 comdat_type_node *type_node
10166 = AT_ref (a)->die_id.die_type_node;
10167
10168 gcc_assert (type_node);
10169 output_signature (type_node->signature, name);
10170 }
10171 else
10172 {
10173 const char *sym = AT_ref (a)->die_id.die_symbol;
10174 int size;
10175
10176 gcc_assert (sym);
10177 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10178 length, whereas in DWARF3 it's always sized as an
10179 offset. */
10180 if (dwarf_version == 2)
10181 size = DWARF2_ADDR_SIZE;
10182 else
10183 size = DWARF_OFFSET_SIZE;
10184 /* ??? We cannot unconditionally output die_offset if
10185 non-zero - others might create references to those
10186 DIEs via symbols.
10187 And we do not clear its DIE offset after outputting it
10188 (and the label refers to the actual DIEs, not the
10189 DWARF CU unit header which is when using label + offset
10190 would be the correct thing to do).
10191 ??? This is the reason for the with_offset flag. */
10192 if (AT_ref (a)->with_offset)
10193 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10194 debug_info_section, "%s", name);
10195 else
10196 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10197 name);
10198 }
10199 }
10200 else
10201 {
10202 gcc_assert (AT_ref (a)->die_offset);
10203 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10204 "%s", name);
10205 }
10206 break;
10207
10208 case dw_val_class_fde_ref:
10209 {
10210 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10211
10212 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10213 a->dw_attr_val.v.val_fde_index * 2);
10214 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10215 "%s", name);
10216 }
10217 break;
10218
10219 case dw_val_class_vms_delta:
10220 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10221 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10222 AT_vms_delta2 (a), AT_vms_delta1 (a),
10223 "%s", name);
10224 #else
10225 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10226 AT_vms_delta2 (a), AT_vms_delta1 (a),
10227 "%s", name);
10228 #endif
10229 break;
10230
10231 case dw_val_class_lbl_id:
10232 output_attr_index_or_value (a);
10233 break;
10234
10235 case dw_val_class_lineptr:
10236 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10237 debug_line_section, "%s", name);
10238 break;
10239
10240 case dw_val_class_macptr:
10241 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10242 debug_macinfo_section, "%s", name);
10243 break;
10244
10245 case dw_val_class_loclistsptr:
10246 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10247 debug_loc_section, "%s", name);
10248 break;
10249
10250 case dw_val_class_str:
10251 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10252 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10253 a->dw_attr_val.v.val_str->label,
10254 debug_str_section,
10255 "%s: \"%s\"", name, AT_string (a));
10256 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10257 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10258 a->dw_attr_val.v.val_str->label,
10259 debug_line_str_section,
10260 "%s: \"%s\"", name, AT_string (a));
10261 else if (a->dw_attr_val.v.val_str->form == DW_FORM_GNU_str_index)
10262 dw2_asm_output_data_uleb128 (AT_index (a),
10263 "%s: \"%s\"", name, AT_string (a));
10264 else
10265 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10266 break;
10267
10268 case dw_val_class_file:
10269 {
10270 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10271
10272 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10273 a->dw_attr_val.v.val_file->filename);
10274 break;
10275 }
10276
10277 case dw_val_class_file_implicit:
10278 if (flag_debug_asm)
10279 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10280 ASM_COMMENT_START, name,
10281 maybe_emit_file (a->dw_attr_val.v.val_file),
10282 a->dw_attr_val.v.val_file->filename);
10283 break;
10284
10285 case dw_val_class_data8:
10286 {
10287 int i;
10288
10289 for (i = 0; i < 8; i++)
10290 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10291 i == 0 ? "%s" : NULL, name);
10292 break;
10293 }
10294
10295 case dw_val_class_high_pc:
10296 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10297 get_AT_low_pc (die), "DW_AT_high_pc");
10298 break;
10299
10300 case dw_val_class_discr_value:
10301 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10302 break;
10303
10304 case dw_val_class_discr_list:
10305 {
10306 dw_discr_list_ref list = AT_discr_list (a);
10307 const int size = size_of_discr_list (list);
10308
10309 /* This is a block, so output its length first. */
10310 dw2_asm_output_data (constant_size (size), size,
10311 "%s: block size", name);
10312
10313 for (; list != NULL; list = list->dw_discr_next)
10314 {
10315 /* One byte for the discriminant value descriptor, and then as
10316 many LEB128 numbers as required. */
10317 if (list->dw_discr_range)
10318 dw2_asm_output_data (1, DW_DSC_range,
10319 "%s: DW_DSC_range", name);
10320 else
10321 dw2_asm_output_data (1, DW_DSC_label,
10322 "%s: DW_DSC_label", name);
10323
10324 output_discr_value (&list->dw_discr_lower_bound, name);
10325 if (list->dw_discr_range)
10326 output_discr_value (&list->dw_discr_upper_bound, name);
10327 }
10328 break;
10329 }
10330
10331 default:
10332 gcc_unreachable ();
10333 }
10334 }
10335
10336 FOR_EACH_CHILD (die, c, output_die (c));
10337
10338 /* Add null byte to terminate sibling list. */
10339 if (die->die_child != NULL)
10340 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10341 (unsigned long) die->die_offset);
10342 }
10343
10344 /* Output the compilation unit that appears at the beginning of the
10345 .debug_info section, and precedes the DIE descriptions. */
10346
10347 static void
10348 output_compilation_unit_header (enum dwarf_unit_type ut)
10349 {
10350 if (!XCOFF_DEBUGGING_INFO)
10351 {
10352 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10353 dw2_asm_output_data (4, 0xffffffff,
10354 "Initial length escape value indicating 64-bit DWARF extension");
10355 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10356 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10357 "Length of Compilation Unit Info");
10358 }
10359
10360 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10361 if (dwarf_version >= 5)
10362 {
10363 const char *name;
10364 switch (ut)
10365 {
10366 case DW_UT_compile: name = "DW_UT_compile"; break;
10367 case DW_UT_type: name = "DW_UT_type"; break;
10368 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10369 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10370 default: gcc_unreachable ();
10371 }
10372 dw2_asm_output_data (1, ut, "%s", name);
10373 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10374 }
10375 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10376 debug_abbrev_section,
10377 "Offset Into Abbrev. Section");
10378 if (dwarf_version < 5)
10379 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10380 }
10381
10382 /* Output the compilation unit DIE and its children. */
10383
10384 static void
10385 output_comp_unit (dw_die_ref die, int output_if_empty,
10386 const unsigned char *dwo_id)
10387 {
10388 const char *secname, *oldsym;
10389 char *tmp;
10390
10391 /* Unless we are outputting main CU, we may throw away empty ones. */
10392 if (!output_if_empty && die->die_child == NULL)
10393 return;
10394
10395 /* Even if there are no children of this DIE, we must output the information
10396 about the compilation unit. Otherwise, on an empty translation unit, we
10397 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
10398 will then complain when examining the file. First mark all the DIEs in
10399 this CU so we know which get local refs. */
10400 mark_dies (die);
10401
10402 external_ref_hash_type *extern_map = optimize_external_refs (die);
10403
10404 /* For now, optimize only the main CU, in order to optimize the rest
10405 we'd need to see all of them earlier. Leave the rest for post-linking
10406 tools like DWZ. */
10407 if (die == comp_unit_die ())
10408 abbrev_opt_start = vec_safe_length (abbrev_die_table);
10409
10410 build_abbrev_table (die, extern_map);
10411
10412 optimize_abbrev_table ();
10413
10414 delete extern_map;
10415
10416 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
10417 next_die_offset = (dwo_id
10418 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
10419 : DWARF_COMPILE_UNIT_HEADER_SIZE);
10420 calc_die_sizes (die);
10421
10422 oldsym = die->die_id.die_symbol;
10423 if (oldsym && die->comdat_type_p)
10424 {
10425 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
10426
10427 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
10428 secname = tmp;
10429 die->die_id.die_symbol = NULL;
10430 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
10431 }
10432 else
10433 {
10434 switch_to_section (debug_info_section);
10435 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
10436 info_section_emitted = true;
10437 }
10438
10439 /* For LTO cross unit DIE refs we want a symbol on the start of the
10440 debuginfo section, not on the CU DIE. */
10441 if ((flag_generate_lto || flag_generate_offload) && oldsym)
10442 {
10443 /* ??? No way to get visibility assembled without a decl. */
10444 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
10445 get_identifier (oldsym), char_type_node);
10446 TREE_PUBLIC (decl) = true;
10447 TREE_STATIC (decl) = true;
10448 DECL_ARTIFICIAL (decl) = true;
10449 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
10450 DECL_VISIBILITY_SPECIFIED (decl) = true;
10451 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
10452 #ifdef ASM_WEAKEN_LABEL
10453 /* We prefer a .weak because that handles duplicates from duplicate
10454 archive members in a graceful way. */
10455 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
10456 #else
10457 targetm.asm_out.globalize_label (asm_out_file, oldsym);
10458 #endif
10459 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
10460 }
10461
10462 /* Output debugging information. */
10463 output_compilation_unit_header (dwo_id
10464 ? DW_UT_split_compile : DW_UT_compile);
10465 if (dwarf_version >= 5)
10466 {
10467 if (dwo_id != NULL)
10468 for (int i = 0; i < 8; i++)
10469 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
10470 }
10471 output_die (die);
10472
10473 /* Leave the marks on the main CU, so we can check them in
10474 output_pubnames. */
10475 if (oldsym)
10476 {
10477 unmark_dies (die);
10478 die->die_id.die_symbol = oldsym;
10479 }
10480 }
10481
10482 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
10483 and .debug_pubtypes. This is configured per-target, but can be
10484 overridden by the -gpubnames or -gno-pubnames options. */
10485
10486 static inline bool
10487 want_pubnames (void)
10488 {
10489 if (debug_info_level <= DINFO_LEVEL_TERSE)
10490 return false;
10491 if (debug_generate_pub_sections != -1)
10492 return debug_generate_pub_sections;
10493 return targetm.want_debug_pub_sections;
10494 }
10495
10496 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
10497
10498 static void
10499 add_AT_pubnames (dw_die_ref die)
10500 {
10501 if (want_pubnames ())
10502 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
10503 }
10504
10505 /* Add a string attribute value to a skeleton DIE. */
10506
10507 static inline void
10508 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
10509 const char *str)
10510 {
10511 dw_attr_node attr;
10512 struct indirect_string_node *node;
10513
10514 if (! skeleton_debug_str_hash)
10515 skeleton_debug_str_hash
10516 = hash_table<indirect_string_hasher>::create_ggc (10);
10517
10518 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
10519 find_string_form (node);
10520 if (node->form == DW_FORM_GNU_str_index)
10521 node->form = DW_FORM_strp;
10522
10523 attr.dw_attr = attr_kind;
10524 attr.dw_attr_val.val_class = dw_val_class_str;
10525 attr.dw_attr_val.val_entry = NULL;
10526 attr.dw_attr_val.v.val_str = node;
10527 add_dwarf_attr (die, &attr);
10528 }
10529
10530 /* Helper function to generate top-level dies for skeleton debug_info and
10531 debug_types. */
10532
10533 static void
10534 add_top_level_skeleton_die_attrs (dw_die_ref die)
10535 {
10536 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
10537 const char *comp_dir = comp_dir_string ();
10538
10539 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
10540 if (comp_dir != NULL)
10541 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
10542 add_AT_pubnames (die);
10543 add_AT_lineptr (die, DW_AT_GNU_addr_base, debug_addr_section_label);
10544 }
10545
10546 /* Output skeleton debug sections that point to the dwo file. */
10547
10548 static void
10549 output_skeleton_debug_sections (dw_die_ref comp_unit,
10550 const unsigned char *dwo_id)
10551 {
10552 /* These attributes will be found in the full debug_info section. */
10553 remove_AT (comp_unit, DW_AT_producer);
10554 remove_AT (comp_unit, DW_AT_language);
10555
10556 switch_to_section (debug_skeleton_info_section);
10557 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
10558
10559 /* Produce the skeleton compilation-unit header. This one differs enough from
10560 a normal CU header that it's better not to call output_compilation_unit
10561 header. */
10562 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10563 dw2_asm_output_data (4, 0xffffffff,
10564 "Initial length escape value indicating 64-bit "
10565 "DWARF extension");
10566
10567 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10568 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
10569 - DWARF_INITIAL_LENGTH_SIZE
10570 + size_of_die (comp_unit),
10571 "Length of Compilation Unit Info");
10572 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10573 if (dwarf_version >= 5)
10574 {
10575 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
10576 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10577 }
10578 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
10579 debug_skeleton_abbrev_section,
10580 "Offset Into Abbrev. Section");
10581 if (dwarf_version < 5)
10582 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10583 else
10584 for (int i = 0; i < 8; i++)
10585 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
10586
10587 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
10588 output_die (comp_unit);
10589
10590 /* Build the skeleton debug_abbrev section. */
10591 switch_to_section (debug_skeleton_abbrev_section);
10592 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
10593
10594 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
10595
10596 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
10597 }
10598
10599 /* Output a comdat type unit DIE and its children. */
10600
10601 static void
10602 output_comdat_type_unit (comdat_type_node *node)
10603 {
10604 const char *secname;
10605 char *tmp;
10606 int i;
10607 #if defined (OBJECT_FORMAT_ELF)
10608 tree comdat_key;
10609 #endif
10610
10611 /* First mark all the DIEs in this CU so we know which get local refs. */
10612 mark_dies (node->root_die);
10613
10614 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
10615
10616 build_abbrev_table (node->root_die, extern_map);
10617
10618 delete extern_map;
10619 extern_map = NULL;
10620
10621 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
10622 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
10623 calc_die_sizes (node->root_die);
10624
10625 #if defined (OBJECT_FORMAT_ELF)
10626 if (dwarf_version >= 5)
10627 {
10628 if (!dwarf_split_debug_info)
10629 secname = ".debug_info";
10630 else
10631 secname = ".debug_info.dwo";
10632 }
10633 else if (!dwarf_split_debug_info)
10634 secname = ".debug_types";
10635 else
10636 secname = ".debug_types.dwo";
10637
10638 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
10639 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
10640 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10641 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
10642 comdat_key = get_identifier (tmp);
10643 targetm.asm_out.named_section (secname,
10644 SECTION_DEBUG | SECTION_LINKONCE,
10645 comdat_key);
10646 #else
10647 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
10648 sprintf (tmp, (dwarf_version >= 5
10649 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
10650 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10651 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
10652 secname = tmp;
10653 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
10654 #endif
10655
10656 /* Output debugging information. */
10657 output_compilation_unit_header (dwarf_split_debug_info
10658 ? DW_UT_split_type : DW_UT_type);
10659 output_signature (node->signature, "Type Signature");
10660 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
10661 "Offset to Type DIE");
10662 output_die (node->root_die);
10663
10664 unmark_dies (node->root_die);
10665 }
10666
10667 /* Return the DWARF2/3 pubname associated with a decl. */
10668
10669 static const char *
10670 dwarf2_name (tree decl, int scope)
10671 {
10672 if (DECL_NAMELESS (decl))
10673 return NULL;
10674 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
10675 }
10676
10677 /* Add a new entry to .debug_pubnames if appropriate. */
10678
10679 static void
10680 add_pubname_string (const char *str, dw_die_ref die)
10681 {
10682 pubname_entry e;
10683
10684 e.die = die;
10685 e.name = xstrdup (str);
10686 vec_safe_push (pubname_table, e);
10687 }
10688
10689 static void
10690 add_pubname (tree decl, dw_die_ref die)
10691 {
10692 if (!want_pubnames ())
10693 return;
10694
10695 /* Don't add items to the table when we expect that the consumer will have
10696 just read the enclosing die. For example, if the consumer is looking at a
10697 class_member, it will either be inside the class already, or will have just
10698 looked up the class to find the member. Either way, searching the class is
10699 faster than searching the index. */
10700 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
10701 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
10702 {
10703 const char *name = dwarf2_name (decl, 1);
10704
10705 if (name)
10706 add_pubname_string (name, die);
10707 }
10708 }
10709
10710 /* Add an enumerator to the pubnames section. */
10711
10712 static void
10713 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
10714 {
10715 pubname_entry e;
10716
10717 gcc_assert (scope_name);
10718 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
10719 e.die = die;
10720 vec_safe_push (pubname_table, e);
10721 }
10722
10723 /* Add a new entry to .debug_pubtypes if appropriate. */
10724
10725 static void
10726 add_pubtype (tree decl, dw_die_ref die)
10727 {
10728 pubname_entry e;
10729
10730 if (!want_pubnames ())
10731 return;
10732
10733 if ((TREE_PUBLIC (decl)
10734 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
10735 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
10736 {
10737 tree scope = NULL;
10738 const char *scope_name = "";
10739 const char *sep = is_cxx () ? "::" : ".";
10740 const char *name;
10741
10742 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
10743 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
10744 {
10745 scope_name = lang_hooks.dwarf_name (scope, 1);
10746 if (scope_name != NULL && scope_name[0] != '\0')
10747 scope_name = concat (scope_name, sep, NULL);
10748 else
10749 scope_name = "";
10750 }
10751
10752 if (TYPE_P (decl))
10753 name = type_tag (decl);
10754 else
10755 name = lang_hooks.dwarf_name (decl, 1);
10756
10757 /* If we don't have a name for the type, there's no point in adding
10758 it to the table. */
10759 if (name != NULL && name[0] != '\0')
10760 {
10761 e.die = die;
10762 e.name = concat (scope_name, name, NULL);
10763 vec_safe_push (pubtype_table, e);
10764 }
10765
10766 /* Although it might be more consistent to add the pubinfo for the
10767 enumerators as their dies are created, they should only be added if the
10768 enum type meets the criteria above. So rather than re-check the parent
10769 enum type whenever an enumerator die is created, just output them all
10770 here. This isn't protected by the name conditional because anonymous
10771 enums don't have names. */
10772 if (die->die_tag == DW_TAG_enumeration_type)
10773 {
10774 dw_die_ref c;
10775
10776 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
10777 }
10778 }
10779 }
10780
10781 /* Output a single entry in the pubnames table. */
10782
10783 static void
10784 output_pubname (dw_offset die_offset, pubname_entry *entry)
10785 {
10786 dw_die_ref die = entry->die;
10787 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
10788
10789 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
10790
10791 if (debug_generate_pub_sections == 2)
10792 {
10793 /* This logic follows gdb's method for determining the value of the flag
10794 byte. */
10795 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
10796 switch (die->die_tag)
10797 {
10798 case DW_TAG_typedef:
10799 case DW_TAG_base_type:
10800 case DW_TAG_subrange_type:
10801 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
10802 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
10803 break;
10804 case DW_TAG_enumerator:
10805 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10806 GDB_INDEX_SYMBOL_KIND_VARIABLE);
10807 if (!is_cxx ())
10808 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
10809 break;
10810 case DW_TAG_subprogram:
10811 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10812 GDB_INDEX_SYMBOL_KIND_FUNCTION);
10813 if (!is_ada ())
10814 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
10815 break;
10816 case DW_TAG_constant:
10817 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10818 GDB_INDEX_SYMBOL_KIND_VARIABLE);
10819 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
10820 break;
10821 case DW_TAG_variable:
10822 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10823 GDB_INDEX_SYMBOL_KIND_VARIABLE);
10824 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
10825 break;
10826 case DW_TAG_namespace:
10827 case DW_TAG_imported_declaration:
10828 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
10829 break;
10830 case DW_TAG_class_type:
10831 case DW_TAG_interface_type:
10832 case DW_TAG_structure_type:
10833 case DW_TAG_union_type:
10834 case DW_TAG_enumeration_type:
10835 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
10836 if (!is_cxx ())
10837 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
10838 break;
10839 default:
10840 /* An unusual tag. Leave the flag-byte empty. */
10841 break;
10842 }
10843 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
10844 "GDB-index flags");
10845 }
10846
10847 dw2_asm_output_nstring (entry->name, -1, "external name");
10848 }
10849
10850
10851 /* Output the public names table used to speed up access to externally
10852 visible names; or the public types table used to find type definitions. */
10853
10854 static void
10855 output_pubnames (vec<pubname_entry, va_gc> *names)
10856 {
10857 unsigned i;
10858 unsigned long pubnames_length = size_of_pubnames (names);
10859 pubname_entry *pub;
10860
10861 if (!XCOFF_DEBUGGING_INFO)
10862 {
10863 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10864 dw2_asm_output_data (4, 0xffffffff,
10865 "Initial length escape value indicating 64-bit DWARF extension");
10866 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
10867 "Pub Info Length");
10868 }
10869
10870 /* Version number for pubnames/pubtypes is independent of dwarf version. */
10871 dw2_asm_output_data (2, 2, "DWARF Version");
10872
10873 if (dwarf_split_debug_info)
10874 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
10875 debug_skeleton_info_section,
10876 "Offset of Compilation Unit Info");
10877 else
10878 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
10879 debug_info_section,
10880 "Offset of Compilation Unit Info");
10881 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
10882 "Compilation Unit Length");
10883
10884 FOR_EACH_VEC_ELT (*names, i, pub)
10885 {
10886 if (include_pubname_in_output (names, pub))
10887 {
10888 dw_offset die_offset = pub->die->die_offset;
10889
10890 /* We shouldn't see pubnames for DIEs outside of the main CU. */
10891 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
10892 gcc_assert (pub->die->die_mark);
10893
10894 /* If we're putting types in their own .debug_types sections,
10895 the .debug_pubtypes table will still point to the compile
10896 unit (not the type unit), so we want to use the offset of
10897 the skeleton DIE (if there is one). */
10898 if (pub->die->comdat_type_p && names == pubtype_table)
10899 {
10900 comdat_type_node *type_node = pub->die->die_id.die_type_node;
10901
10902 if (type_node != NULL)
10903 die_offset = (type_node->skeleton_die != NULL
10904 ? type_node->skeleton_die->die_offset
10905 : comp_unit_die ()->die_offset);
10906 }
10907
10908 output_pubname (die_offset, pub);
10909 }
10910 }
10911
10912 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
10913 }
10914
10915 /* Output public names and types tables if necessary. */
10916
10917 static void
10918 output_pubtables (void)
10919 {
10920 if (!want_pubnames () || !info_section_emitted)
10921 return;
10922
10923 switch_to_section (debug_pubnames_section);
10924 output_pubnames (pubname_table);
10925 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
10926 It shouldn't hurt to emit it always, since pure DWARF2 consumers
10927 simply won't look for the section. */
10928 switch_to_section (debug_pubtypes_section);
10929 output_pubnames (pubtype_table);
10930 }
10931
10932
10933 /* Output the information that goes into the .debug_aranges table.
10934 Namely, define the beginning and ending address range of the
10935 text section generated for this compilation unit. */
10936
10937 static void
10938 output_aranges (void)
10939 {
10940 unsigned i;
10941 unsigned long aranges_length = size_of_aranges ();
10942
10943 if (!XCOFF_DEBUGGING_INFO)
10944 {
10945 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10946 dw2_asm_output_data (4, 0xffffffff,
10947 "Initial length escape value indicating 64-bit DWARF extension");
10948 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
10949 "Length of Address Ranges Info");
10950 }
10951
10952 /* Version number for aranges is still 2, even up to DWARF5. */
10953 dw2_asm_output_data (2, 2, "DWARF Version");
10954 if (dwarf_split_debug_info)
10955 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
10956 debug_skeleton_info_section,
10957 "Offset of Compilation Unit Info");
10958 else
10959 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
10960 debug_info_section,
10961 "Offset of Compilation Unit Info");
10962 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
10963 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
10964
10965 /* We need to align to twice the pointer size here. */
10966 if (DWARF_ARANGES_PAD_SIZE)
10967 {
10968 /* Pad using a 2 byte words so that padding is correct for any
10969 pointer size. */
10970 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
10971 2 * DWARF2_ADDR_SIZE);
10972 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
10973 dw2_asm_output_data (2, 0, NULL);
10974 }
10975
10976 /* It is necessary not to output these entries if the sections were
10977 not used; if the sections were not used, the length will be 0 and
10978 the address may end up as 0 if the section is discarded by ld
10979 --gc-sections, leaving an invalid (0, 0) entry that can be
10980 confused with the terminator. */
10981 if (text_section_used)
10982 {
10983 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
10984 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
10985 text_section_label, "Length");
10986 }
10987 if (cold_text_section_used)
10988 {
10989 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
10990 "Address");
10991 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
10992 cold_text_section_label, "Length");
10993 }
10994
10995 if (have_multiple_function_sections)
10996 {
10997 unsigned fde_idx;
10998 dw_fde_ref fde;
10999
11000 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11001 {
11002 if (DECL_IGNORED_P (fde->decl))
11003 continue;
11004 if (!fde->in_std_section)
11005 {
11006 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11007 "Address");
11008 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11009 fde->dw_fde_begin, "Length");
11010 }
11011 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11012 {
11013 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11014 "Address");
11015 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11016 fde->dw_fde_second_begin, "Length");
11017 }
11018 }
11019 }
11020
11021 /* Output the terminator words. */
11022 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11023 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11024 }
11025
11026 /* Add a new entry to .debug_ranges. Return its index into
11027 ranges_table vector. */
11028
11029 static unsigned int
11030 add_ranges_num (int num, bool maybe_new_sec)
11031 {
11032 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11033 vec_safe_push (ranges_table, r);
11034 return vec_safe_length (ranges_table) - 1;
11035 }
11036
11037 /* Add a new entry to .debug_ranges corresponding to a block, or a
11038 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11039 this entry might be in a different section from previous range. */
11040
11041 static unsigned int
11042 add_ranges (const_tree block, bool maybe_new_sec)
11043 {
11044 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11045 }
11046
11047 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11048 chain, or middle entry of a chain that will be directly referred to. */
11049
11050 static void
11051 note_rnglist_head (unsigned int offset)
11052 {
11053 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11054 return;
11055 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11056 }
11057
11058 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11059 When using dwarf_split_debug_info, address attributes in dies destined
11060 for the final executable should be direct references--setting the
11061 parameter force_direct ensures this behavior. */
11062
11063 static void
11064 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11065 bool *added, bool force_direct)
11066 {
11067 unsigned int in_use = vec_safe_length (ranges_by_label);
11068 unsigned int offset;
11069 dw_ranges_by_label rbl = { begin, end };
11070 vec_safe_push (ranges_by_label, rbl);
11071 offset = add_ranges_num (-(int)in_use - 1, true);
11072 if (!*added)
11073 {
11074 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11075 *added = true;
11076 note_rnglist_head (offset);
11077 }
11078 }
11079
11080 /* Emit .debug_ranges section. */
11081
11082 static void
11083 output_ranges (void)
11084 {
11085 unsigned i;
11086 static const char *const start_fmt = "Offset %#x";
11087 const char *fmt = start_fmt;
11088 dw_ranges *r;
11089
11090 switch_to_section (debug_ranges_section);
11091 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11092 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11093 {
11094 int block_num = r->num;
11095
11096 if (block_num > 0)
11097 {
11098 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11099 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11100
11101 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11102 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11103
11104 /* If all code is in the text section, then the compilation
11105 unit base address defaults to DW_AT_low_pc, which is the
11106 base of the text section. */
11107 if (!have_multiple_function_sections)
11108 {
11109 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11110 text_section_label,
11111 fmt, i * 2 * DWARF2_ADDR_SIZE);
11112 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11113 text_section_label, NULL);
11114 }
11115
11116 /* Otherwise, the compilation unit base address is zero,
11117 which allows us to use absolute addresses, and not worry
11118 about whether the target supports cross-section
11119 arithmetic. */
11120 else
11121 {
11122 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11123 fmt, i * 2 * DWARF2_ADDR_SIZE);
11124 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11125 }
11126
11127 fmt = NULL;
11128 }
11129
11130 /* Negative block_num stands for an index into ranges_by_label. */
11131 else if (block_num < 0)
11132 {
11133 int lab_idx = - block_num - 1;
11134
11135 if (!have_multiple_function_sections)
11136 {
11137 gcc_unreachable ();
11138 #if 0
11139 /* If we ever use add_ranges_by_labels () for a single
11140 function section, all we have to do is to take out
11141 the #if 0 above. */
11142 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11143 (*ranges_by_label)[lab_idx].begin,
11144 text_section_label,
11145 fmt, i * 2 * DWARF2_ADDR_SIZE);
11146 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11147 (*ranges_by_label)[lab_idx].end,
11148 text_section_label, NULL);
11149 #endif
11150 }
11151 else
11152 {
11153 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11154 (*ranges_by_label)[lab_idx].begin,
11155 fmt, i * 2 * DWARF2_ADDR_SIZE);
11156 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11157 (*ranges_by_label)[lab_idx].end,
11158 NULL);
11159 }
11160 }
11161 else
11162 {
11163 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11164 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11165 fmt = start_fmt;
11166 }
11167 }
11168 }
11169
11170 /* Non-zero if .debug_line_str should be used for .debug_line section
11171 strings or strings that are likely shareable with those. */
11172 #define DWARF5_USE_DEBUG_LINE_STR \
11173 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11174 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11175 /* FIXME: there is no .debug_line_str.dwo section, \
11176 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11177 && !dwarf_split_debug_info)
11178
11179 /* Assign .debug_rnglists indexes. */
11180
11181 static void
11182 index_rnglists (void)
11183 {
11184 unsigned i;
11185 dw_ranges *r;
11186
11187 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11188 if (r->label)
11189 r->idx = rnglist_idx++;
11190 }
11191
11192 /* Emit .debug_rnglists section. */
11193
11194 static void
11195 output_rnglists (void)
11196 {
11197 unsigned i;
11198 dw_ranges *r;
11199 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11200 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11201 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11202
11203 switch_to_section (debug_ranges_section);
11204 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11205 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL, 2);
11206 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL, 3);
11207 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11208 dw2_asm_output_data (4, 0xffffffff,
11209 "Initial length escape value indicating "
11210 "64-bit DWARF extension");
11211 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11212 "Length of Range Lists");
11213 ASM_OUTPUT_LABEL (asm_out_file, l1);
11214 dw2_asm_output_data (2, dwarf_version, "DWARF Version");
11215 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11216 dw2_asm_output_data (1, 0, "Segment Size");
11217 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11218 about relocation sizes and primarily care about the size of .debug*
11219 sections in linked shared libraries and executables, then
11220 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11221 into it are usually larger than just DW_FORM_sec_offset offsets
11222 into the .debug_rnglists section. */
11223 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11224 "Offset Entry Count");
11225 if (dwarf_split_debug_info)
11226 {
11227 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11228 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11229 if (r->label)
11230 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11231 ranges_base_label, NULL);
11232 }
11233
11234 const char *lab = "";
11235 unsigned int len = vec_safe_length (ranges_table);
11236 const char *base = NULL;
11237 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11238 {
11239 int block_num = r->num;
11240
11241 if (r->label)
11242 {
11243 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11244 lab = r->label;
11245 }
11246 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11247 base = NULL;
11248 if (block_num > 0)
11249 {
11250 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11251 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11252
11253 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11254 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11255
11256 if (HAVE_AS_LEB128)
11257 {
11258 /* If all code is in the text section, then the compilation
11259 unit base address defaults to DW_AT_low_pc, which is the
11260 base of the text section. */
11261 if (!have_multiple_function_sections)
11262 {
11263 dw2_asm_output_data (1, DW_RLE_offset_pair,
11264 "DW_RLE_offset_pair (%s)", lab);
11265 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11266 "Range begin address (%s)", lab);
11267 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11268 "Range end address (%s)", lab);
11269 continue;
11270 }
11271 if (base == NULL)
11272 {
11273 dw_ranges *r2 = NULL;
11274 if (i < len - 1)
11275 r2 = &(*ranges_table)[i + 1];
11276 if (r2
11277 && r2->num != 0
11278 && r2->label == NULL
11279 && !r2->maybe_new_sec)
11280 {
11281 dw2_asm_output_data (1, DW_RLE_base_address,
11282 "DW_RLE_base_address (%s)", lab);
11283 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11284 "Base address (%s)", lab);
11285 strcpy (basebuf, blabel);
11286 base = basebuf;
11287 }
11288 }
11289 if (base)
11290 {
11291 dw2_asm_output_data (1, DW_RLE_offset_pair,
11292 "DW_RLE_offset_pair (%s)", lab);
11293 dw2_asm_output_delta_uleb128 (blabel, base,
11294 "Range begin address (%s)", lab);
11295 dw2_asm_output_delta_uleb128 (elabel, base,
11296 "Range end address (%s)", lab);
11297 continue;
11298 }
11299 dw2_asm_output_data (1, DW_RLE_start_length,
11300 "DW_RLE_start_length (%s)", lab);
11301 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11302 "Range begin address (%s)", lab);
11303 dw2_asm_output_delta_uleb128 (elabel, blabel,
11304 "Range length (%s)", lab);
11305 }
11306 else
11307 {
11308 dw2_asm_output_data (1, DW_RLE_start_end,
11309 "DW_RLE_start_end (%s)", lab);
11310 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11311 "Range begin address (%s)", lab);
11312 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11313 "Range end address (%s)", lab);
11314 }
11315 }
11316
11317 /* Negative block_num stands for an index into ranges_by_label. */
11318 else if (block_num < 0)
11319 {
11320 int lab_idx = - block_num - 1;
11321 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11322 const char *elabel = (*ranges_by_label)[lab_idx].end;
11323
11324 if (!have_multiple_function_sections)
11325 gcc_unreachable ();
11326 if (HAVE_AS_LEB128)
11327 {
11328 dw2_asm_output_data (1, DW_RLE_start_length,
11329 "DW_RLE_start_length (%s)", lab);
11330 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11331 "Range begin address (%s)", lab);
11332 dw2_asm_output_delta_uleb128 (elabel, blabel,
11333 "Range length (%s)", lab);
11334 }
11335 else
11336 {
11337 dw2_asm_output_data (1, DW_RLE_start_end,
11338 "DW_RLE_start_end (%s)", lab);
11339 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11340 "Range begin address (%s)", lab);
11341 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11342 "Range end address (%s)", lab);
11343 }
11344 }
11345 else
11346 dw2_asm_output_data (1, DW_RLE_end_of_list,
11347 "DW_RLE_end_of_list (%s)", lab);
11348 }
11349 ASM_OUTPUT_LABEL (asm_out_file, l2);
11350 }
11351
11352 /* Data structure containing information about input files. */
11353 struct file_info
11354 {
11355 const char *path; /* Complete file name. */
11356 const char *fname; /* File name part. */
11357 int length; /* Length of entire string. */
11358 struct dwarf_file_data * file_idx; /* Index in input file table. */
11359 int dir_idx; /* Index in directory table. */
11360 };
11361
11362 /* Data structure containing information about directories with source
11363 files. */
11364 struct dir_info
11365 {
11366 const char *path; /* Path including directory name. */
11367 int length; /* Path length. */
11368 int prefix; /* Index of directory entry which is a prefix. */
11369 int count; /* Number of files in this directory. */
11370 int dir_idx; /* Index of directory used as base. */
11371 };
11372
11373 /* Callback function for file_info comparison. We sort by looking at
11374 the directories in the path. */
11375
11376 static int
11377 file_info_cmp (const void *p1, const void *p2)
11378 {
11379 const struct file_info *const s1 = (const struct file_info *) p1;
11380 const struct file_info *const s2 = (const struct file_info *) p2;
11381 const unsigned char *cp1;
11382 const unsigned char *cp2;
11383
11384 /* Take care of file names without directories. We need to make sure that
11385 we return consistent values to qsort since some will get confused if
11386 we return the same value when identical operands are passed in opposite
11387 orders. So if neither has a directory, return 0 and otherwise return
11388 1 or -1 depending on which one has the directory. */
11389 if ((s1->path == s1->fname || s2->path == s2->fname))
11390 return (s2->path == s2->fname) - (s1->path == s1->fname);
11391
11392 cp1 = (const unsigned char *) s1->path;
11393 cp2 = (const unsigned char *) s2->path;
11394
11395 while (1)
11396 {
11397 ++cp1;
11398 ++cp2;
11399 /* Reached the end of the first path? If so, handle like above. */
11400 if ((cp1 == (const unsigned char *) s1->fname)
11401 || (cp2 == (const unsigned char *) s2->fname))
11402 return ((cp2 == (const unsigned char *) s2->fname)
11403 - (cp1 == (const unsigned char *) s1->fname));
11404
11405 /* Character of current path component the same? */
11406 else if (*cp1 != *cp2)
11407 return *cp1 - *cp2;
11408 }
11409 }
11410
11411 struct file_name_acquire_data
11412 {
11413 struct file_info *files;
11414 int used_files;
11415 int max_files;
11416 };
11417
11418 /* Traversal function for the hash table. */
11419
11420 int
11421 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
11422 {
11423 struct dwarf_file_data *d = *slot;
11424 struct file_info *fi;
11425 const char *f;
11426
11427 gcc_assert (fnad->max_files >= d->emitted_number);
11428
11429 if (! d->emitted_number)
11430 return 1;
11431
11432 gcc_assert (fnad->max_files != fnad->used_files);
11433
11434 fi = fnad->files + fnad->used_files++;
11435
11436 /* Skip all leading "./". */
11437 f = d->filename;
11438 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
11439 f += 2;
11440
11441 /* Create a new array entry. */
11442 fi->path = f;
11443 fi->length = strlen (f);
11444 fi->file_idx = d;
11445
11446 /* Search for the file name part. */
11447 f = strrchr (f, DIR_SEPARATOR);
11448 #if defined (DIR_SEPARATOR_2)
11449 {
11450 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
11451
11452 if (g != NULL)
11453 {
11454 if (f == NULL || f < g)
11455 f = g;
11456 }
11457 }
11458 #endif
11459
11460 fi->fname = f == NULL ? fi->path : f + 1;
11461 return 1;
11462 }
11463
11464 /* Helper function for output_file_names. Emit a FORM encoded
11465 string STR, with assembly comment start ENTRY_KIND and
11466 index IDX */
11467
11468 static void
11469 output_line_string (enum dwarf_form form, const char *str,
11470 const char *entry_kind, unsigned int idx)
11471 {
11472 switch (form)
11473 {
11474 case DW_FORM_string:
11475 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
11476 break;
11477 case DW_FORM_line_strp:
11478 if (!debug_line_str_hash)
11479 debug_line_str_hash
11480 = hash_table<indirect_string_hasher>::create_ggc (10);
11481
11482 struct indirect_string_node *node;
11483 node = find_AT_string_in_table (str, debug_line_str_hash);
11484 set_indirect_string (node);
11485 node->form = form;
11486 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
11487 debug_line_str_section, "%s: %#x: \"%s\"",
11488 entry_kind, 0, node->str);
11489 break;
11490 default:
11491 gcc_unreachable ();
11492 }
11493 }
11494
11495 /* Output the directory table and the file name table. We try to minimize
11496 the total amount of memory needed. A heuristic is used to avoid large
11497 slowdowns with many input files. */
11498
11499 static void
11500 output_file_names (void)
11501 {
11502 struct file_name_acquire_data fnad;
11503 int numfiles;
11504 struct file_info *files;
11505 struct dir_info *dirs;
11506 int *saved;
11507 int *savehere;
11508 int *backmap;
11509 int ndirs;
11510 int idx_offset;
11511 int i;
11512
11513 if (!last_emitted_file)
11514 {
11515 if (dwarf_version >= 5)
11516 {
11517 dw2_asm_output_data (1, 0, "Directory entry format count");
11518 dw2_asm_output_data_uleb128 (0, "Directories count");
11519 dw2_asm_output_data (1, 0, "File name entry format count");
11520 dw2_asm_output_data_uleb128 (0, "File names count");
11521 }
11522 else
11523 {
11524 dw2_asm_output_data (1, 0, "End directory table");
11525 dw2_asm_output_data (1, 0, "End file name table");
11526 }
11527 return;
11528 }
11529
11530 numfiles = last_emitted_file->emitted_number;
11531
11532 /* Allocate the various arrays we need. */
11533 files = XALLOCAVEC (struct file_info, numfiles);
11534 dirs = XALLOCAVEC (struct dir_info, numfiles);
11535
11536 fnad.files = files;
11537 fnad.used_files = 0;
11538 fnad.max_files = numfiles;
11539 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
11540 gcc_assert (fnad.used_files == fnad.max_files);
11541
11542 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
11543
11544 /* Find all the different directories used. */
11545 dirs[0].path = files[0].path;
11546 dirs[0].length = files[0].fname - files[0].path;
11547 dirs[0].prefix = -1;
11548 dirs[0].count = 1;
11549 dirs[0].dir_idx = 0;
11550 files[0].dir_idx = 0;
11551 ndirs = 1;
11552
11553 for (i = 1; i < numfiles; i++)
11554 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
11555 && memcmp (dirs[ndirs - 1].path, files[i].path,
11556 dirs[ndirs - 1].length) == 0)
11557 {
11558 /* Same directory as last entry. */
11559 files[i].dir_idx = ndirs - 1;
11560 ++dirs[ndirs - 1].count;
11561 }
11562 else
11563 {
11564 int j;
11565
11566 /* This is a new directory. */
11567 dirs[ndirs].path = files[i].path;
11568 dirs[ndirs].length = files[i].fname - files[i].path;
11569 dirs[ndirs].count = 1;
11570 dirs[ndirs].dir_idx = ndirs;
11571 files[i].dir_idx = ndirs;
11572
11573 /* Search for a prefix. */
11574 dirs[ndirs].prefix = -1;
11575 for (j = 0; j < ndirs; j++)
11576 if (dirs[j].length < dirs[ndirs].length
11577 && dirs[j].length > 1
11578 && (dirs[ndirs].prefix == -1
11579 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
11580 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
11581 dirs[ndirs].prefix = j;
11582
11583 ++ndirs;
11584 }
11585
11586 /* Now to the actual work. We have to find a subset of the directories which
11587 allow expressing the file name using references to the directory table
11588 with the least amount of characters. We do not do an exhaustive search
11589 where we would have to check out every combination of every single
11590 possible prefix. Instead we use a heuristic which provides nearly optimal
11591 results in most cases and never is much off. */
11592 saved = XALLOCAVEC (int, ndirs);
11593 savehere = XALLOCAVEC (int, ndirs);
11594
11595 memset (saved, '\0', ndirs * sizeof (saved[0]));
11596 for (i = 0; i < ndirs; i++)
11597 {
11598 int j;
11599 int total;
11600
11601 /* We can always save some space for the current directory. But this
11602 does not mean it will be enough to justify adding the directory. */
11603 savehere[i] = dirs[i].length;
11604 total = (savehere[i] - saved[i]) * dirs[i].count;
11605
11606 for (j = i + 1; j < ndirs; j++)
11607 {
11608 savehere[j] = 0;
11609 if (saved[j] < dirs[i].length)
11610 {
11611 /* Determine whether the dirs[i] path is a prefix of the
11612 dirs[j] path. */
11613 int k;
11614
11615 k = dirs[j].prefix;
11616 while (k != -1 && k != (int) i)
11617 k = dirs[k].prefix;
11618
11619 if (k == (int) i)
11620 {
11621 /* Yes it is. We can possibly save some memory by
11622 writing the filenames in dirs[j] relative to
11623 dirs[i]. */
11624 savehere[j] = dirs[i].length;
11625 total += (savehere[j] - saved[j]) * dirs[j].count;
11626 }
11627 }
11628 }
11629
11630 /* Check whether we can save enough to justify adding the dirs[i]
11631 directory. */
11632 if (total > dirs[i].length + 1)
11633 {
11634 /* It's worthwhile adding. */
11635 for (j = i; j < ndirs; j++)
11636 if (savehere[j] > 0)
11637 {
11638 /* Remember how much we saved for this directory so far. */
11639 saved[j] = savehere[j];
11640
11641 /* Remember the prefix directory. */
11642 dirs[j].dir_idx = i;
11643 }
11644 }
11645 }
11646
11647 /* Emit the directory name table. */
11648 idx_offset = dirs[0].length > 0 ? 1 : 0;
11649 enum dwarf_form str_form = DW_FORM_string;
11650 enum dwarf_form idx_form = DW_FORM_udata;
11651 if (dwarf_version >= 5)
11652 {
11653 const char *comp_dir = comp_dir_string ();
11654 if (comp_dir == NULL)
11655 comp_dir = "";
11656 dw2_asm_output_data (1, 1, "Directory entry format count");
11657 if (DWARF5_USE_DEBUG_LINE_STR)
11658 str_form = DW_FORM_line_strp;
11659 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
11660 dw2_asm_output_data_uleb128 (str_form, "%s",
11661 get_DW_FORM_name (str_form));
11662 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
11663 if (str_form == DW_FORM_string)
11664 {
11665 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
11666 for (i = 1 - idx_offset; i < ndirs; i++)
11667 dw2_asm_output_nstring (dirs[i].path,
11668 dirs[i].length
11669 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
11670 "Directory Entry: %#x", i + idx_offset);
11671 }
11672 else
11673 {
11674 output_line_string (str_form, comp_dir, "Directory Entry", 0);
11675 for (i = 1 - idx_offset; i < ndirs; i++)
11676 {
11677 const char *str
11678 = ggc_alloc_string (dirs[i].path,
11679 dirs[i].length
11680 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
11681 output_line_string (str_form, str, "Directory Entry",
11682 (unsigned) i + idx_offset);
11683 }
11684 }
11685 }
11686 else
11687 {
11688 for (i = 1 - idx_offset; i < ndirs; i++)
11689 dw2_asm_output_nstring (dirs[i].path,
11690 dirs[i].length
11691 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
11692 "Directory Entry: %#x", i + idx_offset);
11693
11694 dw2_asm_output_data (1, 0, "End directory table");
11695 }
11696
11697 /* We have to emit them in the order of emitted_number since that's
11698 used in the debug info generation. To do this efficiently we
11699 generate a back-mapping of the indices first. */
11700 backmap = XALLOCAVEC (int, numfiles);
11701 for (i = 0; i < numfiles; i++)
11702 backmap[files[i].file_idx->emitted_number - 1] = i;
11703
11704 if (dwarf_version >= 5)
11705 {
11706 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
11707 if (filename0 == NULL)
11708 filename0 = "";
11709 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
11710 DW_FORM_data2. Choose one based on the number of directories
11711 and how much space would they occupy in each encoding.
11712 If we have at most 256 directories, all indexes fit into
11713 a single byte, so DW_FORM_data1 is most compact (if there
11714 are at most 128 directories, DW_FORM_udata would be as
11715 compact as that, but not shorter and slower to decode). */
11716 if (ndirs + idx_offset <= 256)
11717 idx_form = DW_FORM_data1;
11718 /* If there are more than 65536 directories, we have to use
11719 DW_FORM_udata, DW_FORM_data2 can't refer to them.
11720 Otherwise, compute what space would occupy if all the indexes
11721 used DW_FORM_udata - sum - and compare that to how large would
11722 be DW_FORM_data2 encoding, and pick the more efficient one. */
11723 else if (ndirs + idx_offset <= 65536)
11724 {
11725 unsigned HOST_WIDE_INT sum = 1;
11726 for (i = 0; i < numfiles; i++)
11727 {
11728 int file_idx = backmap[i];
11729 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
11730 sum += size_of_uleb128 (dir_idx);
11731 }
11732 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
11733 idx_form = DW_FORM_data2;
11734 }
11735 #ifdef VMS_DEBUGGING_INFO
11736 dw2_asm_output_data (1, 4, "File name entry format count");
11737 #else
11738 dw2_asm_output_data (1, 2, "File name entry format count");
11739 #endif
11740 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
11741 dw2_asm_output_data_uleb128 (str_form, "%s",
11742 get_DW_FORM_name (str_form));
11743 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
11744 "DW_LNCT_directory_index");
11745 dw2_asm_output_data_uleb128 (idx_form, "%s",
11746 get_DW_FORM_name (idx_form));
11747 #ifdef VMS_DEBUGGING_INFO
11748 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
11749 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
11750 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
11751 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
11752 #endif
11753 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
11754
11755 output_line_string (str_form, filename0, "File Entry", 0);
11756
11757 /* Include directory index. */
11758 if (idx_form != DW_FORM_udata)
11759 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
11760 0, NULL);
11761 else
11762 dw2_asm_output_data_uleb128 (0, NULL);
11763
11764 #ifdef VMS_DEBUGGING_INFO
11765 dw2_asm_output_data_uleb128 (0, NULL);
11766 dw2_asm_output_data_uleb128 (0, NULL);
11767 #endif
11768 }
11769
11770 /* Now write all the file names. */
11771 for (i = 0; i < numfiles; i++)
11772 {
11773 int file_idx = backmap[i];
11774 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
11775
11776 #ifdef VMS_DEBUGGING_INFO
11777 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
11778
11779 /* Setting these fields can lead to debugger miscomparisons,
11780 but VMS Debug requires them to be set correctly. */
11781
11782 int ver;
11783 long long cdt;
11784 long siz;
11785 int maxfilelen = (strlen (files[file_idx].path)
11786 + dirs[dir_idx].length
11787 + MAX_VMS_VERSION_LEN + 1);
11788 char *filebuf = XALLOCAVEC (char, maxfilelen);
11789
11790 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
11791 snprintf (filebuf, maxfilelen, "%s;%d",
11792 files[file_idx].path + dirs[dir_idx].length, ver);
11793
11794 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
11795
11796 /* Include directory index. */
11797 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
11798 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
11799 dir_idx + idx_offset, NULL);
11800 else
11801 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
11802
11803 /* Modification time. */
11804 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
11805 &cdt, 0, 0, 0) == 0)
11806 ? cdt : 0, NULL);
11807
11808 /* File length in bytes. */
11809 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
11810 0, &siz, 0, 0) == 0)
11811 ? siz : 0, NULL);
11812 #else
11813 output_line_string (str_form,
11814 files[file_idx].path + dirs[dir_idx].length,
11815 "File Entry", (unsigned) i + 1);
11816
11817 /* Include directory index. */
11818 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
11819 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
11820 dir_idx + idx_offset, NULL);
11821 else
11822 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
11823
11824 if (dwarf_version >= 5)
11825 continue;
11826
11827 /* Modification time. */
11828 dw2_asm_output_data_uleb128 (0, NULL);
11829
11830 /* File length in bytes. */
11831 dw2_asm_output_data_uleb128 (0, NULL);
11832 #endif /* VMS_DEBUGGING_INFO */
11833 }
11834
11835 if (dwarf_version < 5)
11836 dw2_asm_output_data (1, 0, "End file name table");
11837 }
11838
11839
11840 /* Output one line number table into the .debug_line section. */
11841
11842 static void
11843 output_one_line_info_table (dw_line_info_table *table)
11844 {
11845 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
11846 unsigned int current_line = 1;
11847 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
11848 dw_line_info_entry *ent;
11849 size_t i;
11850
11851 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
11852 {
11853 switch (ent->opcode)
11854 {
11855 case LI_set_address:
11856 /* ??? Unfortunately, we have little choice here currently, and
11857 must always use the most general form. GCC does not know the
11858 address delta itself, so we can't use DW_LNS_advance_pc. Many
11859 ports do have length attributes which will give an upper bound
11860 on the address range. We could perhaps use length attributes
11861 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
11862 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
11863
11864 /* This can handle any delta. This takes
11865 4+DWARF2_ADDR_SIZE bytes. */
11866 dw2_asm_output_data (1, 0, "set address %s", line_label);
11867 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
11868 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
11869 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
11870 break;
11871
11872 case LI_set_line:
11873 if (ent->val == current_line)
11874 {
11875 /* We still need to start a new row, so output a copy insn. */
11876 dw2_asm_output_data (1, DW_LNS_copy,
11877 "copy line %u", current_line);
11878 }
11879 else
11880 {
11881 int line_offset = ent->val - current_line;
11882 int line_delta = line_offset - DWARF_LINE_BASE;
11883
11884 current_line = ent->val;
11885 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
11886 {
11887 /* This can handle deltas from -10 to 234, using the current
11888 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
11889 This takes 1 byte. */
11890 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
11891 "line %u", current_line);
11892 }
11893 else
11894 {
11895 /* This can handle any delta. This takes at least 4 bytes,
11896 depending on the value being encoded. */
11897 dw2_asm_output_data (1, DW_LNS_advance_line,
11898 "advance to line %u", current_line);
11899 dw2_asm_output_data_sleb128 (line_offset, NULL);
11900 dw2_asm_output_data (1, DW_LNS_copy, NULL);
11901 }
11902 }
11903 break;
11904
11905 case LI_set_file:
11906 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
11907 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
11908 break;
11909
11910 case LI_set_column:
11911 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
11912 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
11913 break;
11914
11915 case LI_negate_stmt:
11916 current_is_stmt = !current_is_stmt;
11917 dw2_asm_output_data (1, DW_LNS_negate_stmt,
11918 "is_stmt %d", current_is_stmt);
11919 break;
11920
11921 case LI_set_prologue_end:
11922 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
11923 "set prologue end");
11924 break;
11925
11926 case LI_set_epilogue_begin:
11927 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
11928 "set epilogue begin");
11929 break;
11930
11931 case LI_set_discriminator:
11932 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
11933 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
11934 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
11935 dw2_asm_output_data_uleb128 (ent->val, NULL);
11936 break;
11937 }
11938 }
11939
11940 /* Emit debug info for the address of the end of the table. */
11941 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
11942 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
11943 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
11944 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
11945
11946 dw2_asm_output_data (1, 0, "end sequence");
11947 dw2_asm_output_data_uleb128 (1, NULL);
11948 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
11949 }
11950
11951 /* Output the source line number correspondence information. This
11952 information goes into the .debug_line section. */
11953
11954 static void
11955 output_line_info (bool prologue_only)
11956 {
11957 static unsigned int generation;
11958 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
11959 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
11960 bool saw_one = false;
11961 int opc;
11962
11963 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
11964 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
11965 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
11966 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
11967
11968 if (!XCOFF_DEBUGGING_INFO)
11969 {
11970 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11971 dw2_asm_output_data (4, 0xffffffff,
11972 "Initial length escape value indicating 64-bit DWARF extension");
11973 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11974 "Length of Source Line Info");
11975 }
11976
11977 ASM_OUTPUT_LABEL (asm_out_file, l1);
11978
11979 dw2_asm_output_data (2, dwarf_version, "DWARF Version");
11980 if (dwarf_version >= 5)
11981 {
11982 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11983 dw2_asm_output_data (1, 0, "Segment Size");
11984 }
11985 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
11986 ASM_OUTPUT_LABEL (asm_out_file, p1);
11987
11988 /* Define the architecture-dependent minimum instruction length (in bytes).
11989 In this implementation of DWARF, this field is used for information
11990 purposes only. Since GCC generates assembly language, we have no
11991 a priori knowledge of how many instruction bytes are generated for each
11992 source line, and therefore can use only the DW_LNE_set_address and
11993 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
11994 this as '1', which is "correct enough" for all architectures,
11995 and don't let the target override. */
11996 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
11997
11998 if (dwarf_version >= 4)
11999 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12000 "Maximum Operations Per Instruction");
12001 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12002 "Default is_stmt_start flag");
12003 dw2_asm_output_data (1, DWARF_LINE_BASE,
12004 "Line Base Value (Special Opcodes)");
12005 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12006 "Line Range Value (Special Opcodes)");
12007 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12008 "Special Opcode Base");
12009
12010 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12011 {
12012 int n_op_args;
12013 switch (opc)
12014 {
12015 case DW_LNS_advance_pc:
12016 case DW_LNS_advance_line:
12017 case DW_LNS_set_file:
12018 case DW_LNS_set_column:
12019 case DW_LNS_fixed_advance_pc:
12020 case DW_LNS_set_isa:
12021 n_op_args = 1;
12022 break;
12023 default:
12024 n_op_args = 0;
12025 break;
12026 }
12027
12028 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12029 opc, n_op_args);
12030 }
12031
12032 /* Write out the information about the files we use. */
12033 output_file_names ();
12034 ASM_OUTPUT_LABEL (asm_out_file, p2);
12035 if (prologue_only)
12036 {
12037 /* Output the marker for the end of the line number info. */
12038 ASM_OUTPUT_LABEL (asm_out_file, l2);
12039 return;
12040 }
12041
12042 if (separate_line_info)
12043 {
12044 dw_line_info_table *table;
12045 size_t i;
12046
12047 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12048 if (table->in_use)
12049 {
12050 output_one_line_info_table (table);
12051 saw_one = true;
12052 }
12053 }
12054 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12055 {
12056 output_one_line_info_table (cold_text_section_line_info);
12057 saw_one = true;
12058 }
12059
12060 /* ??? Some Darwin linkers crash on a .debug_line section with no
12061 sequences. Further, merely a DW_LNE_end_sequence entry is not
12062 sufficient -- the address column must also be initialized.
12063 Make sure to output at least one set_address/end_sequence pair,
12064 choosing .text since that section is always present. */
12065 if (text_section_line_info->in_use || !saw_one)
12066 output_one_line_info_table (text_section_line_info);
12067
12068 /* Output the marker for the end of the line number info. */
12069 ASM_OUTPUT_LABEL (asm_out_file, l2);
12070 }
12071 \f
12072 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12073
12074 static inline bool
12075 need_endianity_attribute_p (bool reverse)
12076 {
12077 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12078 }
12079
12080 /* Given a pointer to a tree node for some base type, return a pointer to
12081 a DIE that describes the given type. REVERSE is true if the type is
12082 to be interpreted in the reverse storage order wrt the target order.
12083
12084 This routine must only be called for GCC type nodes that correspond to
12085 Dwarf base (fundamental) types. */
12086
12087 static dw_die_ref
12088 base_type_die (tree type, bool reverse)
12089 {
12090 dw_die_ref base_type_result;
12091 enum dwarf_type encoding;
12092 bool fpt_used = false;
12093 struct fixed_point_type_info fpt_info;
12094 tree type_bias = NULL_TREE;
12095
12096 /* If this is a subtype that should not be emitted as a subrange type,
12097 use the base type. See subrange_type_for_debug_p. */
12098 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12099 type = TREE_TYPE (type);
12100
12101 switch (TREE_CODE (type))
12102 {
12103 case INTEGER_TYPE:
12104 if ((dwarf_version >= 4 || !dwarf_strict)
12105 && TYPE_NAME (type)
12106 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12107 && DECL_IS_BUILTIN (TYPE_NAME (type))
12108 && DECL_NAME (TYPE_NAME (type)))
12109 {
12110 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12111 if (strcmp (name, "char16_t") == 0
12112 || strcmp (name, "char32_t") == 0)
12113 {
12114 encoding = DW_ATE_UTF;
12115 break;
12116 }
12117 }
12118 if ((dwarf_version >= 3 || !dwarf_strict)
12119 && lang_hooks.types.get_fixed_point_type_info)
12120 {
12121 memset (&fpt_info, 0, sizeof (fpt_info));
12122 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12123 {
12124 fpt_used = true;
12125 encoding = ((TYPE_UNSIGNED (type))
12126 ? DW_ATE_unsigned_fixed
12127 : DW_ATE_signed_fixed);
12128 break;
12129 }
12130 }
12131 if (TYPE_STRING_FLAG (type))
12132 {
12133 if (TYPE_UNSIGNED (type))
12134 encoding = DW_ATE_unsigned_char;
12135 else
12136 encoding = DW_ATE_signed_char;
12137 }
12138 else if (TYPE_UNSIGNED (type))
12139 encoding = DW_ATE_unsigned;
12140 else
12141 encoding = DW_ATE_signed;
12142
12143 if (!dwarf_strict
12144 && lang_hooks.types.get_type_bias)
12145 type_bias = lang_hooks.types.get_type_bias (type);
12146 break;
12147
12148 case REAL_TYPE:
12149 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12150 {
12151 if (dwarf_version >= 3 || !dwarf_strict)
12152 encoding = DW_ATE_decimal_float;
12153 else
12154 encoding = DW_ATE_lo_user;
12155 }
12156 else
12157 encoding = DW_ATE_float;
12158 break;
12159
12160 case FIXED_POINT_TYPE:
12161 if (!(dwarf_version >= 3 || !dwarf_strict))
12162 encoding = DW_ATE_lo_user;
12163 else if (TYPE_UNSIGNED (type))
12164 encoding = DW_ATE_unsigned_fixed;
12165 else
12166 encoding = DW_ATE_signed_fixed;
12167 break;
12168
12169 /* Dwarf2 doesn't know anything about complex ints, so use
12170 a user defined type for it. */
12171 case COMPLEX_TYPE:
12172 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12173 encoding = DW_ATE_complex_float;
12174 else
12175 encoding = DW_ATE_lo_user;
12176 break;
12177
12178 case BOOLEAN_TYPE:
12179 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12180 encoding = DW_ATE_boolean;
12181 break;
12182
12183 default:
12184 /* No other TREE_CODEs are Dwarf fundamental types. */
12185 gcc_unreachable ();
12186 }
12187
12188 base_type_result = new_die_raw (DW_TAG_base_type);
12189
12190 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12191 int_size_in_bytes (type));
12192 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12193
12194 if (need_endianity_attribute_p (reverse))
12195 add_AT_unsigned (base_type_result, DW_AT_endianity,
12196 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12197
12198 add_alignment_attribute (base_type_result, type);
12199
12200 if (fpt_used)
12201 {
12202 switch (fpt_info.scale_factor_kind)
12203 {
12204 case fixed_point_scale_factor_binary:
12205 add_AT_int (base_type_result, DW_AT_binary_scale,
12206 fpt_info.scale_factor.binary);
12207 break;
12208
12209 case fixed_point_scale_factor_decimal:
12210 add_AT_int (base_type_result, DW_AT_decimal_scale,
12211 fpt_info.scale_factor.decimal);
12212 break;
12213
12214 case fixed_point_scale_factor_arbitrary:
12215 /* Arbitrary scale factors cannot be described in standard DWARF,
12216 yet. */
12217 if (!dwarf_strict)
12218 {
12219 /* Describe the scale factor as a rational constant. */
12220 const dw_die_ref scale_factor
12221 = new_die (DW_TAG_constant, comp_unit_die (), type);
12222
12223 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12224 fpt_info.scale_factor.arbitrary.numerator);
12225 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12226 fpt_info.scale_factor.arbitrary.denominator);
12227
12228 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12229 }
12230 break;
12231
12232 default:
12233 gcc_unreachable ();
12234 }
12235 }
12236
12237 if (type_bias)
12238 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12239 dw_scalar_form_constant
12240 | dw_scalar_form_exprloc
12241 | dw_scalar_form_reference,
12242 NULL);
12243
12244 return base_type_result;
12245 }
12246
12247 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12248 named 'auto' in its type: return true for it, false otherwise. */
12249
12250 static inline bool
12251 is_cxx_auto (tree type)
12252 {
12253 if (is_cxx ())
12254 {
12255 tree name = TYPE_IDENTIFIER (type);
12256 if (name == get_identifier ("auto")
12257 || name == get_identifier ("decltype(auto)"))
12258 return true;
12259 }
12260 return false;
12261 }
12262
12263 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12264 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12265
12266 static inline int
12267 is_base_type (tree type)
12268 {
12269 switch (TREE_CODE (type))
12270 {
12271 case INTEGER_TYPE:
12272 case REAL_TYPE:
12273 case FIXED_POINT_TYPE:
12274 case COMPLEX_TYPE:
12275 case BOOLEAN_TYPE:
12276 case POINTER_BOUNDS_TYPE:
12277 return 1;
12278
12279 case VOID_TYPE:
12280 case ARRAY_TYPE:
12281 case RECORD_TYPE:
12282 case UNION_TYPE:
12283 case QUAL_UNION_TYPE:
12284 case ENUMERAL_TYPE:
12285 case FUNCTION_TYPE:
12286 case METHOD_TYPE:
12287 case POINTER_TYPE:
12288 case REFERENCE_TYPE:
12289 case NULLPTR_TYPE:
12290 case OFFSET_TYPE:
12291 case LANG_TYPE:
12292 case VECTOR_TYPE:
12293 return 0;
12294
12295 default:
12296 if (is_cxx_auto (type))
12297 return 0;
12298 gcc_unreachable ();
12299 }
12300
12301 return 0;
12302 }
12303
12304 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12305 node, return the size in bits for the type if it is a constant, or else
12306 return the alignment for the type if the type's size is not constant, or
12307 else return BITS_PER_WORD if the type actually turns out to be an
12308 ERROR_MARK node. */
12309
12310 static inline unsigned HOST_WIDE_INT
12311 simple_type_size_in_bits (const_tree type)
12312 {
12313 if (TREE_CODE (type) == ERROR_MARK)
12314 return BITS_PER_WORD;
12315 else if (TYPE_SIZE (type) == NULL_TREE)
12316 return 0;
12317 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12318 return tree_to_uhwi (TYPE_SIZE (type));
12319 else
12320 return TYPE_ALIGN (type);
12321 }
12322
12323 /* Similarly, but return an offset_int instead of UHWI. */
12324
12325 static inline offset_int
12326 offset_int_type_size_in_bits (const_tree type)
12327 {
12328 if (TREE_CODE (type) == ERROR_MARK)
12329 return BITS_PER_WORD;
12330 else if (TYPE_SIZE (type) == NULL_TREE)
12331 return 0;
12332 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12333 return wi::to_offset (TYPE_SIZE (type));
12334 else
12335 return TYPE_ALIGN (type);
12336 }
12337
12338 /* Given a pointer to a tree node for a subrange type, return a pointer
12339 to a DIE that describes the given type. */
12340
12341 static dw_die_ref
12342 subrange_type_die (tree type, tree low, tree high, tree bias,
12343 dw_die_ref context_die)
12344 {
12345 dw_die_ref subrange_die;
12346 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12347
12348 if (context_die == NULL)
12349 context_die = comp_unit_die ();
12350
12351 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12352
12353 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12354 {
12355 /* The size of the subrange type and its base type do not match,
12356 so we need to generate a size attribute for the subrange type. */
12357 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
12358 }
12359
12360 add_alignment_attribute (subrange_die, type);
12361
12362 if (low)
12363 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
12364 if (high)
12365 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
12366 if (bias && !dwarf_strict)
12367 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
12368 dw_scalar_form_constant
12369 | dw_scalar_form_exprloc
12370 | dw_scalar_form_reference,
12371 NULL);
12372
12373 return subrange_die;
12374 }
12375
12376 /* Returns the (const and/or volatile) cv_qualifiers associated with
12377 the decl node. This will normally be augmented with the
12378 cv_qualifiers of the underlying type in add_type_attribute. */
12379
12380 static int
12381 decl_quals (const_tree decl)
12382 {
12383 return ((TREE_READONLY (decl)
12384 /* The C++ front-end correctly marks reference-typed
12385 variables as readonly, but from a language (and debug
12386 info) standpoint they are not const-qualified. */
12387 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
12388 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
12389 | (TREE_THIS_VOLATILE (decl)
12390 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
12391 }
12392
12393 /* Determine the TYPE whose qualifiers match the largest strict subset
12394 of the given TYPE_QUALS, and return its qualifiers. Ignore all
12395 qualifiers outside QUAL_MASK. */
12396
12397 static int
12398 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
12399 {
12400 tree t;
12401 int best_rank = 0, best_qual = 0, max_rank;
12402
12403 type_quals &= qual_mask;
12404 max_rank = popcount_hwi (type_quals) - 1;
12405
12406 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
12407 t = TYPE_NEXT_VARIANT (t))
12408 {
12409 int q = TYPE_QUALS (t) & qual_mask;
12410
12411 if ((q & type_quals) == q && q != type_quals
12412 && check_base_type (t, type))
12413 {
12414 int rank = popcount_hwi (q);
12415
12416 if (rank > best_rank)
12417 {
12418 best_rank = rank;
12419 best_qual = q;
12420 }
12421 }
12422 }
12423
12424 return best_qual;
12425 }
12426
12427 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
12428 static const dwarf_qual_info_t dwarf_qual_info[] =
12429 {
12430 { TYPE_QUAL_CONST, DW_TAG_const_type },
12431 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
12432 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
12433 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
12434 };
12435 static const unsigned int dwarf_qual_info_size
12436 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
12437
12438 /* If DIE is a qualified DIE of some base DIE with the same parent,
12439 return the base DIE, otherwise return NULL. Set MASK to the
12440 qualifiers added compared to the returned DIE. */
12441
12442 static dw_die_ref
12443 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
12444 {
12445 unsigned int i;
12446 for (i = 0; i < dwarf_qual_info_size; i++)
12447 if (die->die_tag == dwarf_qual_info[i].t)
12448 break;
12449 if (i == dwarf_qual_info_size)
12450 return NULL;
12451 if (vec_safe_length (die->die_attr) != 1)
12452 return NULL;
12453 dw_die_ref type = get_AT_ref (die, DW_AT_type);
12454 if (type == NULL || type->die_parent != die->die_parent)
12455 return NULL;
12456 *mask |= dwarf_qual_info[i].q;
12457 if (depth)
12458 {
12459 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
12460 if (ret)
12461 return ret;
12462 }
12463 return type;
12464 }
12465
12466 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
12467 entry that chains the modifiers specified by CV_QUALS in front of the
12468 given type. REVERSE is true if the type is to be interpreted in the
12469 reverse storage order wrt the target order. */
12470
12471 static dw_die_ref
12472 modified_type_die (tree type, int cv_quals, bool reverse,
12473 dw_die_ref context_die)
12474 {
12475 enum tree_code code = TREE_CODE (type);
12476 dw_die_ref mod_type_die;
12477 dw_die_ref sub_die = NULL;
12478 tree item_type = NULL;
12479 tree qualified_type;
12480 tree name, low, high;
12481 dw_die_ref mod_scope;
12482 /* Only these cv-qualifiers are currently handled. */
12483 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
12484 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
12485 ENCODE_QUAL_ADDR_SPACE(~0U));
12486 const bool reverse_base_type
12487 = need_endianity_attribute_p (reverse) && is_base_type (type);
12488
12489 if (code == ERROR_MARK)
12490 return NULL;
12491
12492 if (lang_hooks.types.get_debug_type)
12493 {
12494 tree debug_type = lang_hooks.types.get_debug_type (type);
12495
12496 if (debug_type != NULL_TREE && debug_type != type)
12497 return modified_type_die (debug_type, cv_quals, reverse, context_die);
12498 }
12499
12500 cv_quals &= cv_qual_mask;
12501
12502 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
12503 tag modifier (and not an attribute) old consumers won't be able
12504 to handle it. */
12505 if (dwarf_version < 3)
12506 cv_quals &= ~TYPE_QUAL_RESTRICT;
12507
12508 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
12509 if (dwarf_version < 5)
12510 cv_quals &= ~TYPE_QUAL_ATOMIC;
12511
12512 /* See if we already have the appropriately qualified variant of
12513 this type. */
12514 qualified_type = get_qualified_type (type, cv_quals);
12515
12516 if (qualified_type == sizetype)
12517 {
12518 /* Try not to expose the internal sizetype type's name. */
12519 if (TYPE_NAME (qualified_type)
12520 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
12521 {
12522 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
12523
12524 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
12525 && (TYPE_PRECISION (t)
12526 == TYPE_PRECISION (qualified_type))
12527 && (TYPE_UNSIGNED (t)
12528 == TYPE_UNSIGNED (qualified_type)));
12529 qualified_type = t;
12530 }
12531 else if (qualified_type == sizetype
12532 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
12533 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
12534 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
12535 qualified_type = size_type_node;
12536 }
12537
12538 /* If we do, then we can just use its DIE, if it exists. */
12539 if (qualified_type)
12540 {
12541 mod_type_die = lookup_type_die (qualified_type);
12542
12543 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
12544 dealt with specially: the DIE with the attribute, if it exists, is
12545 placed immediately after the regular DIE for the same base type. */
12546 if (mod_type_die
12547 && (!reverse_base_type
12548 || ((mod_type_die = mod_type_die->die_sib) != NULL
12549 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
12550 return mod_type_die;
12551 }
12552
12553 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
12554
12555 /* Handle C typedef types. */
12556 if (name
12557 && TREE_CODE (name) == TYPE_DECL
12558 && DECL_ORIGINAL_TYPE (name)
12559 && !DECL_ARTIFICIAL (name))
12560 {
12561 tree dtype = TREE_TYPE (name);
12562
12563 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
12564 if (qualified_type == dtype && !reverse_base_type)
12565 {
12566 tree origin = decl_ultimate_origin (name);
12567
12568 /* Typedef variants that have an abstract origin don't get their own
12569 type DIE (see gen_typedef_die), so fall back on the ultimate
12570 abstract origin instead. */
12571 if (origin != NULL && origin != name)
12572 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
12573 context_die);
12574
12575 /* For a named type, use the typedef. */
12576 gen_type_die (qualified_type, context_die);
12577 return lookup_type_die (qualified_type);
12578 }
12579 else
12580 {
12581 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
12582 dquals &= cv_qual_mask;
12583 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
12584 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
12585 /* cv-unqualified version of named type. Just use
12586 the unnamed type to which it refers. */
12587 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
12588 reverse, context_die);
12589 /* Else cv-qualified version of named type; fall through. */
12590 }
12591 }
12592
12593 mod_scope = scope_die_for (type, context_die);
12594
12595 if (cv_quals)
12596 {
12597 int sub_quals = 0, first_quals = 0;
12598 unsigned i;
12599 dw_die_ref first = NULL, last = NULL;
12600
12601 /* Determine a lesser qualified type that most closely matches
12602 this one. Then generate DW_TAG_* entries for the remaining
12603 qualifiers. */
12604 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
12605 cv_qual_mask);
12606 if (sub_quals && use_debug_types)
12607 {
12608 bool needed = false;
12609 /* If emitting type units, make sure the order of qualifiers
12610 is canonical. Thus, start from unqualified type if
12611 an earlier qualifier is missing in sub_quals, but some later
12612 one is present there. */
12613 for (i = 0; i < dwarf_qual_info_size; i++)
12614 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
12615 needed = true;
12616 else if (needed && (dwarf_qual_info[i].q & cv_quals))
12617 {
12618 sub_quals = 0;
12619 break;
12620 }
12621 }
12622 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
12623 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
12624 {
12625 /* As not all intermediate qualified DIEs have corresponding
12626 tree types, ensure that qualified DIEs in the same scope
12627 as their DW_AT_type are emitted after their DW_AT_type,
12628 only with other qualified DIEs for the same type possibly
12629 in between them. Determine the range of such qualified
12630 DIEs now (first being the base type, last being corresponding
12631 last qualified DIE for it). */
12632 unsigned int count = 0;
12633 first = qualified_die_p (mod_type_die, &first_quals,
12634 dwarf_qual_info_size);
12635 if (first == NULL)
12636 first = mod_type_die;
12637 gcc_assert ((first_quals & ~sub_quals) == 0);
12638 for (count = 0, last = first;
12639 count < (1U << dwarf_qual_info_size);
12640 count++, last = last->die_sib)
12641 {
12642 int quals = 0;
12643 if (last == mod_scope->die_child)
12644 break;
12645 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
12646 != first)
12647 break;
12648 }
12649 }
12650
12651 for (i = 0; i < dwarf_qual_info_size; i++)
12652 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
12653 {
12654 dw_die_ref d;
12655 if (first && first != last)
12656 {
12657 for (d = first->die_sib; ; d = d->die_sib)
12658 {
12659 int quals = 0;
12660 qualified_die_p (d, &quals, dwarf_qual_info_size);
12661 if (quals == (first_quals | dwarf_qual_info[i].q))
12662 break;
12663 if (d == last)
12664 {
12665 d = NULL;
12666 break;
12667 }
12668 }
12669 if (d)
12670 {
12671 mod_type_die = d;
12672 continue;
12673 }
12674 }
12675 if (first)
12676 {
12677 d = new_die_raw (dwarf_qual_info[i].t);
12678 add_child_die_after (mod_scope, d, last);
12679 last = d;
12680 }
12681 else
12682 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
12683 if (mod_type_die)
12684 add_AT_die_ref (d, DW_AT_type, mod_type_die);
12685 mod_type_die = d;
12686 first_quals |= dwarf_qual_info[i].q;
12687 }
12688 }
12689 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
12690 {
12691 dwarf_tag tag = DW_TAG_pointer_type;
12692 if (code == REFERENCE_TYPE)
12693 {
12694 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
12695 tag = DW_TAG_rvalue_reference_type;
12696 else
12697 tag = DW_TAG_reference_type;
12698 }
12699 mod_type_die = new_die (tag, mod_scope, type);
12700
12701 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
12702 simple_type_size_in_bits (type) / BITS_PER_UNIT);
12703 add_alignment_attribute (mod_type_die, type);
12704 item_type = TREE_TYPE (type);
12705
12706 addr_space_t as = TYPE_ADDR_SPACE (item_type);
12707 if (!ADDR_SPACE_GENERIC_P (as))
12708 {
12709 int action = targetm.addr_space.debug (as);
12710 if (action >= 0)
12711 {
12712 /* Positive values indicate an address_class. */
12713 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
12714 }
12715 else
12716 {
12717 /* Negative values indicate an (inverted) segment base reg. */
12718 dw_loc_descr_ref d
12719 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
12720 add_AT_loc (mod_type_die, DW_AT_segment, d);
12721 }
12722 }
12723 }
12724 else if (code == INTEGER_TYPE
12725 && TREE_TYPE (type) != NULL_TREE
12726 && subrange_type_for_debug_p (type, &low, &high))
12727 {
12728 tree bias = NULL_TREE;
12729 if (lang_hooks.types.get_type_bias)
12730 bias = lang_hooks.types.get_type_bias (type);
12731 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
12732 item_type = TREE_TYPE (type);
12733 }
12734 else if (is_base_type (type))
12735 {
12736 mod_type_die = base_type_die (type, reverse);
12737
12738 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
12739 if (reverse_base_type)
12740 {
12741 dw_die_ref after_die
12742 = modified_type_die (type, cv_quals, false, context_die);
12743 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
12744 }
12745 else
12746 add_child_die (comp_unit_die (), mod_type_die);
12747
12748 add_pubtype (type, mod_type_die);
12749 }
12750 else
12751 {
12752 gen_type_die (type, context_die);
12753
12754 /* We have to get the type_main_variant here (and pass that to the
12755 `lookup_type_die' routine) because the ..._TYPE node we have
12756 might simply be a *copy* of some original type node (where the
12757 copy was created to help us keep track of typedef names) and
12758 that copy might have a different TYPE_UID from the original
12759 ..._TYPE node. */
12760 if (TREE_CODE (type) == FUNCTION_TYPE
12761 || TREE_CODE (type) == METHOD_TYPE)
12762 {
12763 /* For function/method types, can't just use type_main_variant here,
12764 because that can have different ref-qualifiers for C++,
12765 but try to canonicalize. */
12766 tree main = TYPE_MAIN_VARIANT (type);
12767 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
12768 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
12769 && check_base_type (t, main)
12770 && check_lang_type (t, type))
12771 return lookup_type_die (t);
12772 return lookup_type_die (type);
12773 }
12774 else if (TREE_CODE (type) != VECTOR_TYPE
12775 && TREE_CODE (type) != ARRAY_TYPE)
12776 return lookup_type_die (type_main_variant (type));
12777 else
12778 /* Vectors have the debugging information in the type,
12779 not the main variant. */
12780 return lookup_type_die (type);
12781 }
12782
12783 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
12784 don't output a DW_TAG_typedef, since there isn't one in the
12785 user's program; just attach a DW_AT_name to the type.
12786 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
12787 if the base type already has the same name. */
12788 if (name
12789 && ((TREE_CODE (name) != TYPE_DECL
12790 && (qualified_type == TYPE_MAIN_VARIANT (type)
12791 || (cv_quals == TYPE_UNQUALIFIED)))
12792 || (TREE_CODE (name) == TYPE_DECL
12793 && TREE_TYPE (name) == qualified_type
12794 && DECL_NAME (name))))
12795 {
12796 if (TREE_CODE (name) == TYPE_DECL)
12797 /* Could just call add_name_and_src_coords_attributes here,
12798 but since this is a builtin type it doesn't have any
12799 useful source coordinates anyway. */
12800 name = DECL_NAME (name);
12801 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
12802 }
12803 /* This probably indicates a bug. */
12804 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
12805 {
12806 name = TYPE_IDENTIFIER (type);
12807 add_name_attribute (mod_type_die,
12808 name ? IDENTIFIER_POINTER (name) : "__unknown__");
12809 }
12810
12811 if (qualified_type && !reverse_base_type)
12812 equate_type_number_to_die (qualified_type, mod_type_die);
12813
12814 if (item_type)
12815 /* We must do this after the equate_type_number_to_die call, in case
12816 this is a recursive type. This ensures that the modified_type_die
12817 recursion will terminate even if the type is recursive. Recursive
12818 types are possible in Ada. */
12819 sub_die = modified_type_die (item_type,
12820 TYPE_QUALS_NO_ADDR_SPACE (item_type),
12821 reverse,
12822 context_die);
12823
12824 if (sub_die != NULL)
12825 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
12826
12827 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
12828 if (TYPE_ARTIFICIAL (type))
12829 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
12830
12831 return mod_type_die;
12832 }
12833
12834 /* Generate DIEs for the generic parameters of T.
12835 T must be either a generic type or a generic function.
12836 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
12837
12838 static void
12839 gen_generic_params_dies (tree t)
12840 {
12841 tree parms, args;
12842 int parms_num, i;
12843 dw_die_ref die = NULL;
12844 int non_default;
12845
12846 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
12847 return;
12848
12849 if (TYPE_P (t))
12850 die = lookup_type_die (t);
12851 else if (DECL_P (t))
12852 die = lookup_decl_die (t);
12853
12854 gcc_assert (die);
12855
12856 parms = lang_hooks.get_innermost_generic_parms (t);
12857 if (!parms)
12858 /* T has no generic parameter. It means T is neither a generic type
12859 or function. End of story. */
12860 return;
12861
12862 parms_num = TREE_VEC_LENGTH (parms);
12863 args = lang_hooks.get_innermost_generic_args (t);
12864 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
12865 non_default = int_cst_value (TREE_CHAIN (args));
12866 else
12867 non_default = TREE_VEC_LENGTH (args);
12868 for (i = 0; i < parms_num; i++)
12869 {
12870 tree parm, arg, arg_pack_elems;
12871 dw_die_ref parm_die;
12872
12873 parm = TREE_VEC_ELT (parms, i);
12874 arg = TREE_VEC_ELT (args, i);
12875 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
12876 gcc_assert (parm && TREE_VALUE (parm) && arg);
12877
12878 if (parm && TREE_VALUE (parm) && arg)
12879 {
12880 /* If PARM represents a template parameter pack,
12881 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
12882 by DW_TAG_template_*_parameter DIEs for the argument
12883 pack elements of ARG. Note that ARG would then be
12884 an argument pack. */
12885 if (arg_pack_elems)
12886 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
12887 arg_pack_elems,
12888 die);
12889 else
12890 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
12891 true /* emit name */, die);
12892 if (i >= non_default)
12893 add_AT_flag (parm_die, DW_AT_default_value, 1);
12894 }
12895 }
12896 }
12897
12898 /* Create and return a DIE for PARM which should be
12899 the representation of a generic type parameter.
12900 For instance, in the C++ front end, PARM would be a template parameter.
12901 ARG is the argument to PARM.
12902 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
12903 name of the PARM.
12904 PARENT_DIE is the parent DIE which the new created DIE should be added to,
12905 as a child node. */
12906
12907 static dw_die_ref
12908 generic_parameter_die (tree parm, tree arg,
12909 bool emit_name_p,
12910 dw_die_ref parent_die)
12911 {
12912 dw_die_ref tmpl_die = NULL;
12913 const char *name = NULL;
12914
12915 if (!parm || !DECL_NAME (parm) || !arg)
12916 return NULL;
12917
12918 /* We support non-type generic parameters and arguments,
12919 type generic parameters and arguments, as well as
12920 generic generic parameters (a.k.a. template template parameters in C++)
12921 and arguments. */
12922 if (TREE_CODE (parm) == PARM_DECL)
12923 /* PARM is a nontype generic parameter */
12924 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
12925 else if (TREE_CODE (parm) == TYPE_DECL)
12926 /* PARM is a type generic parameter. */
12927 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
12928 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
12929 /* PARM is a generic generic parameter.
12930 Its DIE is a GNU extension. It shall have a
12931 DW_AT_name attribute to represent the name of the template template
12932 parameter, and a DW_AT_GNU_template_name attribute to represent the
12933 name of the template template argument. */
12934 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
12935 parent_die, parm);
12936 else
12937 gcc_unreachable ();
12938
12939 if (tmpl_die)
12940 {
12941 tree tmpl_type;
12942
12943 /* If PARM is a generic parameter pack, it means we are
12944 emitting debug info for a template argument pack element.
12945 In other terms, ARG is a template argument pack element.
12946 In that case, we don't emit any DW_AT_name attribute for
12947 the die. */
12948 if (emit_name_p)
12949 {
12950 name = IDENTIFIER_POINTER (DECL_NAME (parm));
12951 gcc_assert (name);
12952 add_AT_string (tmpl_die, DW_AT_name, name);
12953 }
12954
12955 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
12956 {
12957 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
12958 TMPL_DIE should have a child DW_AT_type attribute that is set
12959 to the type of the argument to PARM, which is ARG.
12960 If PARM is a type generic parameter, TMPL_DIE should have a
12961 child DW_AT_type that is set to ARG. */
12962 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
12963 add_type_attribute (tmpl_die, tmpl_type,
12964 (TREE_THIS_VOLATILE (tmpl_type)
12965 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
12966 false, parent_die);
12967 }
12968 else
12969 {
12970 /* So TMPL_DIE is a DIE representing a
12971 a generic generic template parameter, a.k.a template template
12972 parameter in C++ and arg is a template. */
12973
12974 /* The DW_AT_GNU_template_name attribute of the DIE must be set
12975 to the name of the argument. */
12976 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
12977 if (name)
12978 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
12979 }
12980
12981 if (TREE_CODE (parm) == PARM_DECL)
12982 /* So PARM is a non-type generic parameter.
12983 DWARF3 5.6.8 says we must set a DW_AT_const_value child
12984 attribute of TMPL_DIE which value represents the value
12985 of ARG.
12986 We must be careful here:
12987 The value of ARG might reference some function decls.
12988 We might currently be emitting debug info for a generic
12989 type and types are emitted before function decls, we don't
12990 know if the function decls referenced by ARG will actually be
12991 emitted after cgraph computations.
12992 So must defer the generation of the DW_AT_const_value to
12993 after cgraph is ready. */
12994 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
12995 }
12996
12997 return tmpl_die;
12998 }
12999
13000 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13001 PARM_PACK must be a template parameter pack. The returned DIE
13002 will be child DIE of PARENT_DIE. */
13003
13004 static dw_die_ref
13005 template_parameter_pack_die (tree parm_pack,
13006 tree parm_pack_args,
13007 dw_die_ref parent_die)
13008 {
13009 dw_die_ref die;
13010 int j;
13011
13012 gcc_assert (parent_die && parm_pack);
13013
13014 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13015 add_name_and_src_coords_attributes (die, parm_pack);
13016 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13017 generic_parameter_die (parm_pack,
13018 TREE_VEC_ELT (parm_pack_args, j),
13019 false /* Don't emit DW_AT_name */,
13020 die);
13021 return die;
13022 }
13023
13024 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13025 an enumerated type. */
13026
13027 static inline int
13028 type_is_enum (const_tree type)
13029 {
13030 return TREE_CODE (type) == ENUMERAL_TYPE;
13031 }
13032
13033 /* Return the DBX register number described by a given RTL node. */
13034
13035 static unsigned int
13036 dbx_reg_number (const_rtx rtl)
13037 {
13038 unsigned regno = REGNO (rtl);
13039
13040 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13041
13042 #ifdef LEAF_REG_REMAP
13043 if (crtl->uses_only_leaf_regs)
13044 {
13045 int leaf_reg = LEAF_REG_REMAP (regno);
13046 if (leaf_reg != -1)
13047 regno = (unsigned) leaf_reg;
13048 }
13049 #endif
13050
13051 regno = DBX_REGISTER_NUMBER (regno);
13052 gcc_assert (regno != INVALID_REGNUM);
13053 return regno;
13054 }
13055
13056 /* Optionally add a DW_OP_piece term to a location description expression.
13057 DW_OP_piece is only added if the location description expression already
13058 doesn't end with DW_OP_piece. */
13059
13060 static void
13061 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13062 {
13063 dw_loc_descr_ref loc;
13064
13065 if (*list_head != NULL)
13066 {
13067 /* Find the end of the chain. */
13068 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13069 ;
13070
13071 if (loc->dw_loc_opc != DW_OP_piece)
13072 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13073 }
13074 }
13075
13076 /* Return a location descriptor that designates a machine register or
13077 zero if there is none. */
13078
13079 static dw_loc_descr_ref
13080 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13081 {
13082 rtx regs;
13083
13084 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13085 return 0;
13086
13087 /* We only use "frame base" when we're sure we're talking about the
13088 post-prologue local stack frame. We do this by *not* running
13089 register elimination until this point, and recognizing the special
13090 argument pointer and soft frame pointer rtx's.
13091 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13092 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13093 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13094 {
13095 dw_loc_descr_ref result = NULL;
13096
13097 if (dwarf_version >= 4 || !dwarf_strict)
13098 {
13099 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13100 initialized);
13101 if (result)
13102 add_loc_descr (&result,
13103 new_loc_descr (DW_OP_stack_value, 0, 0));
13104 }
13105 return result;
13106 }
13107
13108 regs = targetm.dwarf_register_span (rtl);
13109
13110 if (REG_NREGS (rtl) > 1 || regs)
13111 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13112 else
13113 {
13114 unsigned int dbx_regnum = dbx_reg_number (rtl);
13115 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13116 return 0;
13117 return one_reg_loc_descriptor (dbx_regnum, initialized);
13118 }
13119 }
13120
13121 /* Return a location descriptor that designates a machine register for
13122 a given hard register number. */
13123
13124 static dw_loc_descr_ref
13125 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13126 {
13127 dw_loc_descr_ref reg_loc_descr;
13128
13129 if (regno <= 31)
13130 reg_loc_descr
13131 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13132 else
13133 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13134
13135 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13136 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13137
13138 return reg_loc_descr;
13139 }
13140
13141 /* Given an RTL of a register, return a location descriptor that
13142 designates a value that spans more than one register. */
13143
13144 static dw_loc_descr_ref
13145 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13146 enum var_init_status initialized)
13147 {
13148 int size, i;
13149 dw_loc_descr_ref loc_result = NULL;
13150
13151 /* Simple, contiguous registers. */
13152 if (regs == NULL_RTX)
13153 {
13154 unsigned reg = REGNO (rtl);
13155 int nregs;
13156
13157 #ifdef LEAF_REG_REMAP
13158 if (crtl->uses_only_leaf_regs)
13159 {
13160 int leaf_reg = LEAF_REG_REMAP (reg);
13161 if (leaf_reg != -1)
13162 reg = (unsigned) leaf_reg;
13163 }
13164 #endif
13165
13166 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13167 nregs = REG_NREGS (rtl);
13168
13169 size = GET_MODE_SIZE (GET_MODE (rtl)) / nregs;
13170
13171 loc_result = NULL;
13172 while (nregs--)
13173 {
13174 dw_loc_descr_ref t;
13175
13176 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13177 VAR_INIT_STATUS_INITIALIZED);
13178 add_loc_descr (&loc_result, t);
13179 add_loc_descr_op_piece (&loc_result, size);
13180 ++reg;
13181 }
13182 return loc_result;
13183 }
13184
13185 /* Now onto stupid register sets in non contiguous locations. */
13186
13187 gcc_assert (GET_CODE (regs) == PARALLEL);
13188
13189 size = GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0)));
13190 loc_result = NULL;
13191
13192 for (i = 0; i < XVECLEN (regs, 0); ++i)
13193 {
13194 dw_loc_descr_ref t;
13195
13196 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13197 VAR_INIT_STATUS_INITIALIZED);
13198 add_loc_descr (&loc_result, t);
13199 add_loc_descr_op_piece (&loc_result, size);
13200 }
13201
13202 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13203 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13204 return loc_result;
13205 }
13206
13207 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13208
13209 /* Return a location descriptor that designates a constant i,
13210 as a compound operation from constant (i >> shift), constant shift
13211 and DW_OP_shl. */
13212
13213 static dw_loc_descr_ref
13214 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13215 {
13216 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13217 add_loc_descr (&ret, int_loc_descriptor (shift));
13218 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13219 return ret;
13220 }
13221
13222 /* Return a location descriptor that designates a constant. */
13223
13224 static dw_loc_descr_ref
13225 int_loc_descriptor (HOST_WIDE_INT i)
13226 {
13227 enum dwarf_location_atom op;
13228
13229 /* Pick the smallest representation of a constant, rather than just
13230 defaulting to the LEB encoding. */
13231 if (i >= 0)
13232 {
13233 int clz = clz_hwi (i);
13234 int ctz = ctz_hwi (i);
13235 if (i <= 31)
13236 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13237 else if (i <= 0xff)
13238 op = DW_OP_const1u;
13239 else if (i <= 0xffff)
13240 op = DW_OP_const2u;
13241 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13242 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13243 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13244 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13245 while DW_OP_const4u is 5 bytes. */
13246 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13247 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13248 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13249 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13250 while DW_OP_const4u is 5 bytes. */
13251 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13252
13253 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13254 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13255 <= 4)
13256 {
13257 /* As i >= 2**31, the double cast above will yield a negative number.
13258 Since wrapping is defined in DWARF expressions we can output big
13259 positive integers as small negative ones, regardless of the size
13260 of host wide ints.
13261
13262 Here, since the evaluator will handle 32-bit values and since i >=
13263 2**31, we know it's going to be interpreted as a negative literal:
13264 store it this way if we can do better than 5 bytes this way. */
13265 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13266 }
13267 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13268 op = DW_OP_const4u;
13269
13270 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13271 least 6 bytes: see if we can do better before falling back to it. */
13272 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13273 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13274 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13275 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13276 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13277 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13278 >= HOST_BITS_PER_WIDE_INT)
13279 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13280 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13281 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13282 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13283 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13284 && size_of_uleb128 (i) > 6)
13285 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13286 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13287 else
13288 op = DW_OP_constu;
13289 }
13290 else
13291 {
13292 if (i >= -0x80)
13293 op = DW_OP_const1s;
13294 else if (i >= -0x8000)
13295 op = DW_OP_const2s;
13296 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13297 {
13298 if (size_of_int_loc_descriptor (i) < 5)
13299 {
13300 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13301 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13302 return ret;
13303 }
13304 op = DW_OP_const4s;
13305 }
13306 else
13307 {
13308 if (size_of_int_loc_descriptor (i)
13309 < (unsigned long) 1 + size_of_sleb128 (i))
13310 {
13311 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13312 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13313 return ret;
13314 }
13315 op = DW_OP_consts;
13316 }
13317 }
13318
13319 return new_loc_descr (op, i, 0);
13320 }
13321
13322 /* Likewise, for unsigned constants. */
13323
13324 static dw_loc_descr_ref
13325 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
13326 {
13327 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
13328 const unsigned HOST_WIDE_INT max_uint
13329 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
13330
13331 /* If possible, use the clever signed constants handling. */
13332 if (i <= max_int)
13333 return int_loc_descriptor ((HOST_WIDE_INT) i);
13334
13335 /* Here, we are left with positive numbers that cannot be represented as
13336 HOST_WIDE_INT, i.e.:
13337 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
13338
13339 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
13340 whereas may be better to output a negative integer: thanks to integer
13341 wrapping, we know that:
13342 x = x - 2 ** DWARF2_ADDR_SIZE
13343 = x - 2 * (max (HOST_WIDE_INT) + 1)
13344 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
13345 small negative integers. Let's try that in cases it will clearly improve
13346 the encoding: there is no gain turning DW_OP_const4u into
13347 DW_OP_const4s. */
13348 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
13349 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
13350 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
13351 {
13352 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
13353
13354 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
13355 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
13356 const HOST_WIDE_INT second_shift
13357 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
13358
13359 /* So we finally have:
13360 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
13361 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
13362 return int_loc_descriptor (second_shift);
13363 }
13364
13365 /* Last chance: fallback to a simple constant operation. */
13366 return new_loc_descr
13367 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13368 ? DW_OP_const4u
13369 : DW_OP_const8u,
13370 i, 0);
13371 }
13372
13373 /* Generate and return a location description that computes the unsigned
13374 comparison of the two stack top entries (a OP b where b is the top-most
13375 entry and a is the second one). The KIND of comparison can be LT_EXPR,
13376 LE_EXPR, GT_EXPR or GE_EXPR. */
13377
13378 static dw_loc_descr_ref
13379 uint_comparison_loc_list (enum tree_code kind)
13380 {
13381 enum dwarf_location_atom op, flip_op;
13382 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
13383
13384 switch (kind)
13385 {
13386 case LT_EXPR:
13387 op = DW_OP_lt;
13388 break;
13389 case LE_EXPR:
13390 op = DW_OP_le;
13391 break;
13392 case GT_EXPR:
13393 op = DW_OP_gt;
13394 break;
13395 case GE_EXPR:
13396 op = DW_OP_ge;
13397 break;
13398 default:
13399 gcc_unreachable ();
13400 }
13401
13402 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
13403 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
13404
13405 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
13406 possible to perform unsigned comparisons: we just have to distinguish
13407 three cases:
13408
13409 1. when a and b have the same sign (as signed integers); then we should
13410 return: a OP(signed) b;
13411
13412 2. when a is a negative signed integer while b is a positive one, then a
13413 is a greater unsigned integer than b; likewise when a and b's roles
13414 are flipped.
13415
13416 So first, compare the sign of the two operands. */
13417 ret = new_loc_descr (DW_OP_over, 0, 0);
13418 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
13419 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
13420 /* If they have different signs (i.e. they have different sign bits), then
13421 the stack top value has now the sign bit set and thus it's smaller than
13422 zero. */
13423 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
13424 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
13425 add_loc_descr (&ret, bra_node);
13426
13427 /* We are in case 1. At this point, we know both operands have the same
13428 sign, to it's safe to use the built-in signed comparison. */
13429 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
13430 add_loc_descr (&ret, jmp_node);
13431
13432 /* We are in case 2. Here, we know both operands do not have the same sign,
13433 so we have to flip the signed comparison. */
13434 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
13435 tmp = new_loc_descr (flip_op, 0, 0);
13436 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
13437 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
13438 add_loc_descr (&ret, tmp);
13439
13440 /* This dummy operation is necessary to make the two branches join. */
13441 tmp = new_loc_descr (DW_OP_nop, 0, 0);
13442 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
13443 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
13444 add_loc_descr (&ret, tmp);
13445
13446 return ret;
13447 }
13448
13449 /* Likewise, but takes the location description lists (might be destructive on
13450 them). Return NULL if either is NULL or if concatenation fails. */
13451
13452 static dw_loc_list_ref
13453 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
13454 enum tree_code kind)
13455 {
13456 if (left == NULL || right == NULL)
13457 return NULL;
13458
13459 add_loc_list (&left, right);
13460 if (left == NULL)
13461 return NULL;
13462
13463 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
13464 return left;
13465 }
13466
13467 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
13468 without actually allocating it. */
13469
13470 static unsigned long
13471 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13472 {
13473 return size_of_int_loc_descriptor (i >> shift)
13474 + size_of_int_loc_descriptor (shift)
13475 + 1;
13476 }
13477
13478 /* Return size_of_locs (int_loc_descriptor (i)) without
13479 actually allocating it. */
13480
13481 static unsigned long
13482 size_of_int_loc_descriptor (HOST_WIDE_INT i)
13483 {
13484 unsigned long s;
13485
13486 if (i >= 0)
13487 {
13488 int clz, ctz;
13489 if (i <= 31)
13490 return 1;
13491 else if (i <= 0xff)
13492 return 2;
13493 else if (i <= 0xffff)
13494 return 3;
13495 clz = clz_hwi (i);
13496 ctz = ctz_hwi (i);
13497 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13498 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13499 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13500 - clz - 5);
13501 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13502 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13503 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13504 - clz - 8);
13505 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13506 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13507 <= 4)
13508 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13509 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13510 return 5;
13511 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
13512 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13513 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13514 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13515 - clz - 8);
13516 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13517 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
13518 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13519 - clz - 16);
13520 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13521 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13522 && s > 6)
13523 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13524 - clz - 32);
13525 else
13526 return 1 + s;
13527 }
13528 else
13529 {
13530 if (i >= -0x80)
13531 return 2;
13532 else if (i >= -0x8000)
13533 return 3;
13534 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13535 {
13536 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
13537 {
13538 s = size_of_int_loc_descriptor (-i) + 1;
13539 if (s < 5)
13540 return s;
13541 }
13542 return 5;
13543 }
13544 else
13545 {
13546 unsigned long r = 1 + size_of_sleb128 (i);
13547 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
13548 {
13549 s = size_of_int_loc_descriptor (-i) + 1;
13550 if (s < r)
13551 return s;
13552 }
13553 return r;
13554 }
13555 }
13556 }
13557
13558 /* Return loc description representing "address" of integer value.
13559 This can appear only as toplevel expression. */
13560
13561 static dw_loc_descr_ref
13562 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
13563 {
13564 int litsize;
13565 dw_loc_descr_ref loc_result = NULL;
13566
13567 if (!(dwarf_version >= 4 || !dwarf_strict))
13568 return NULL;
13569
13570 litsize = size_of_int_loc_descriptor (i);
13571 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
13572 is more compact. For DW_OP_stack_value we need:
13573 litsize + 1 (DW_OP_stack_value)
13574 and for DW_OP_implicit_value:
13575 1 (DW_OP_implicit_value) + 1 (length) + size. */
13576 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
13577 {
13578 loc_result = int_loc_descriptor (i);
13579 add_loc_descr (&loc_result,
13580 new_loc_descr (DW_OP_stack_value, 0, 0));
13581 return loc_result;
13582 }
13583
13584 loc_result = new_loc_descr (DW_OP_implicit_value,
13585 size, 0);
13586 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
13587 loc_result->dw_loc_oprnd2.v.val_int = i;
13588 return loc_result;
13589 }
13590
13591 /* Return a location descriptor that designates a base+offset location. */
13592
13593 static dw_loc_descr_ref
13594 based_loc_descr (rtx reg, HOST_WIDE_INT offset,
13595 enum var_init_status initialized)
13596 {
13597 unsigned int regno;
13598 dw_loc_descr_ref result;
13599 dw_fde_ref fde = cfun->fde;
13600
13601 /* We only use "frame base" when we're sure we're talking about the
13602 post-prologue local stack frame. We do this by *not* running
13603 register elimination until this point, and recognizing the special
13604 argument pointer and soft frame pointer rtx's. */
13605 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
13606 {
13607 rtx elim = (ira_use_lra_p
13608 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
13609 : eliminate_regs (reg, VOIDmode, NULL_RTX));
13610
13611 if (elim != reg)
13612 {
13613 if (GET_CODE (elim) == PLUS)
13614 {
13615 offset += INTVAL (XEXP (elim, 1));
13616 elim = XEXP (elim, 0);
13617 }
13618 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
13619 && (elim == hard_frame_pointer_rtx
13620 || elim == stack_pointer_rtx))
13621 || elim == (frame_pointer_needed
13622 ? hard_frame_pointer_rtx
13623 : stack_pointer_rtx));
13624
13625 /* If drap register is used to align stack, use frame
13626 pointer + offset to access stack variables. If stack
13627 is aligned without drap, use stack pointer + offset to
13628 access stack variables. */
13629 if (crtl->stack_realign_tried
13630 && reg == frame_pointer_rtx)
13631 {
13632 int base_reg
13633 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
13634 ? HARD_FRAME_POINTER_REGNUM
13635 : REGNO (elim));
13636 return new_reg_loc_descr (base_reg, offset);
13637 }
13638
13639 gcc_assert (frame_pointer_fb_offset_valid);
13640 offset += frame_pointer_fb_offset;
13641 return new_loc_descr (DW_OP_fbreg, offset, 0);
13642 }
13643 }
13644
13645 regno = REGNO (reg);
13646 #ifdef LEAF_REG_REMAP
13647 if (crtl->uses_only_leaf_regs)
13648 {
13649 int leaf_reg = LEAF_REG_REMAP (regno);
13650 if (leaf_reg != -1)
13651 regno = (unsigned) leaf_reg;
13652 }
13653 #endif
13654 regno = DWARF_FRAME_REGNUM (regno);
13655
13656 if (!optimize && fde
13657 && (fde->drap_reg == regno || fde->vdrap_reg == regno))
13658 {
13659 /* Use cfa+offset to represent the location of arguments passed
13660 on the stack when drap is used to align stack.
13661 Only do this when not optimizing, for optimized code var-tracking
13662 is supposed to track where the arguments live and the register
13663 used as vdrap or drap in some spot might be used for something
13664 else in other part of the routine. */
13665 return new_loc_descr (DW_OP_fbreg, offset, 0);
13666 }
13667
13668 if (regno <= 31)
13669 result = new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + regno),
13670 offset, 0);
13671 else
13672 result = new_loc_descr (DW_OP_bregx, regno, offset);
13673
13674 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13675 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13676
13677 return result;
13678 }
13679
13680 /* Return true if this RTL expression describes a base+offset calculation. */
13681
13682 static inline int
13683 is_based_loc (const_rtx rtl)
13684 {
13685 return (GET_CODE (rtl) == PLUS
13686 && ((REG_P (XEXP (rtl, 0))
13687 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
13688 && CONST_INT_P (XEXP (rtl, 1)))));
13689 }
13690
13691 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
13692 failed. */
13693
13694 static dw_loc_descr_ref
13695 tls_mem_loc_descriptor (rtx mem)
13696 {
13697 tree base;
13698 dw_loc_descr_ref loc_result;
13699
13700 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
13701 return NULL;
13702
13703 base = get_base_address (MEM_EXPR (mem));
13704 if (base == NULL
13705 || !VAR_P (base)
13706 || !DECL_THREAD_LOCAL_P (base))
13707 return NULL;
13708
13709 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
13710 if (loc_result == NULL)
13711 return NULL;
13712
13713 if (MEM_OFFSET (mem))
13714 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
13715
13716 return loc_result;
13717 }
13718
13719 /* Output debug info about reason why we failed to expand expression as dwarf
13720 expression. */
13721
13722 static void
13723 expansion_failed (tree expr, rtx rtl, char const *reason)
13724 {
13725 if (dump_file && (dump_flags & TDF_DETAILS))
13726 {
13727 fprintf (dump_file, "Failed to expand as dwarf: ");
13728 if (expr)
13729 print_generic_expr (dump_file, expr, dump_flags);
13730 if (rtl)
13731 {
13732 fprintf (dump_file, "\n");
13733 print_rtl (dump_file, rtl);
13734 }
13735 fprintf (dump_file, "\nReason: %s\n", reason);
13736 }
13737 }
13738
13739 /* Helper function for const_ok_for_output. */
13740
13741 static bool
13742 const_ok_for_output_1 (rtx rtl)
13743 {
13744 if (targetm.const_not_ok_for_debug_p (rtl))
13745 {
13746 if (GET_CODE (rtl) != UNSPEC)
13747 {
13748 expansion_failed (NULL_TREE, rtl,
13749 "Expression rejected for debug by the backend.\n");
13750 return false;
13751 }
13752
13753 /* If delegitimize_address couldn't do anything with the UNSPEC, and
13754 the target hook doesn't explicitly allow it in debug info, assume
13755 we can't express it in the debug info. */
13756 /* Don't complain about TLS UNSPECs, those are just too hard to
13757 delegitimize. Note this could be a non-decl SYMBOL_REF such as
13758 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
13759 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
13760 if (flag_checking
13761 && (XVECLEN (rtl, 0) == 0
13762 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
13763 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
13764 inform (current_function_decl
13765 ? DECL_SOURCE_LOCATION (current_function_decl)
13766 : UNKNOWN_LOCATION,
13767 #if NUM_UNSPEC_VALUES > 0
13768 "non-delegitimized UNSPEC %s (%d) found in variable location",
13769 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
13770 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
13771 XINT (rtl, 1));
13772 #else
13773 "non-delegitimized UNSPEC %d found in variable location",
13774 XINT (rtl, 1));
13775 #endif
13776 expansion_failed (NULL_TREE, rtl,
13777 "UNSPEC hasn't been delegitimized.\n");
13778 return false;
13779 }
13780
13781 /* FIXME: Refer to PR60655. It is possible for simplification
13782 of rtl expressions in var tracking to produce such expressions.
13783 We should really identify / validate expressions
13784 enclosed in CONST that can be handled by assemblers on various
13785 targets and only handle legitimate cases here. */
13786 if (GET_CODE (rtl) != SYMBOL_REF)
13787 {
13788 if (GET_CODE (rtl) == NOT)
13789 return false;
13790 return true;
13791 }
13792
13793 if (CONSTANT_POOL_ADDRESS_P (rtl))
13794 {
13795 bool marked;
13796 get_pool_constant_mark (rtl, &marked);
13797 /* If all references to this pool constant were optimized away,
13798 it was not output and thus we can't represent it. */
13799 if (!marked)
13800 {
13801 expansion_failed (NULL_TREE, rtl,
13802 "Constant was removed from constant pool.\n");
13803 return false;
13804 }
13805 }
13806
13807 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
13808 return false;
13809
13810 /* Avoid references to external symbols in debug info, on several targets
13811 the linker might even refuse to link when linking a shared library,
13812 and in many other cases the relocations for .debug_info/.debug_loc are
13813 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
13814 to be defined within the same shared library or executable are fine. */
13815 if (SYMBOL_REF_EXTERNAL_P (rtl))
13816 {
13817 tree decl = SYMBOL_REF_DECL (rtl);
13818
13819 if (decl == NULL || !targetm.binds_local_p (decl))
13820 {
13821 expansion_failed (NULL_TREE, rtl,
13822 "Symbol not defined in current TU.\n");
13823 return false;
13824 }
13825 }
13826
13827 return true;
13828 }
13829
13830 /* Return true if constant RTL can be emitted in DW_OP_addr or
13831 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
13832 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
13833
13834 static bool
13835 const_ok_for_output (rtx rtl)
13836 {
13837 if (GET_CODE (rtl) == SYMBOL_REF)
13838 return const_ok_for_output_1 (rtl);
13839
13840 if (GET_CODE (rtl) == CONST)
13841 {
13842 subrtx_var_iterator::array_type array;
13843 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
13844 if (!const_ok_for_output_1 (*iter))
13845 return false;
13846 return true;
13847 }
13848
13849 return true;
13850 }
13851
13852 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
13853 if possible, NULL otherwise. */
13854
13855 static dw_die_ref
13856 base_type_for_mode (machine_mode mode, bool unsignedp)
13857 {
13858 dw_die_ref type_die;
13859 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
13860
13861 if (type == NULL)
13862 return NULL;
13863 switch (TREE_CODE (type))
13864 {
13865 case INTEGER_TYPE:
13866 case REAL_TYPE:
13867 break;
13868 default:
13869 return NULL;
13870 }
13871 type_die = lookup_type_die (type);
13872 if (!type_die)
13873 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
13874 comp_unit_die ());
13875 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
13876 return NULL;
13877 return type_die;
13878 }
13879
13880 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
13881 type matching MODE, or, if MODE is narrower than or as wide as
13882 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
13883 possible. */
13884
13885 static dw_loc_descr_ref
13886 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
13887 {
13888 machine_mode outer_mode = mode;
13889 dw_die_ref type_die;
13890 dw_loc_descr_ref cvt;
13891
13892 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
13893 {
13894 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
13895 return op;
13896 }
13897 type_die = base_type_for_mode (outer_mode, 1);
13898 if (type_die == NULL)
13899 return NULL;
13900 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
13901 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13902 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13903 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13904 add_loc_descr (&op, cvt);
13905 return op;
13906 }
13907
13908 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
13909
13910 static dw_loc_descr_ref
13911 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
13912 dw_loc_descr_ref op1)
13913 {
13914 dw_loc_descr_ref ret = op0;
13915 add_loc_descr (&ret, op1);
13916 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
13917 if (STORE_FLAG_VALUE != 1)
13918 {
13919 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
13920 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13921 }
13922 return ret;
13923 }
13924
13925 /* Subroutine of scompare_loc_descriptor for the case in which we're
13926 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
13927 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
13928
13929 static dw_loc_descr_ref
13930 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
13931 scalar_int_mode op_mode,
13932 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
13933 {
13934 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
13935 dw_loc_descr_ref cvt;
13936
13937 if (type_die == NULL)
13938 return NULL;
13939 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
13940 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13941 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13942 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13943 add_loc_descr (&op0, cvt);
13944 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
13945 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13946 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13947 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13948 add_loc_descr (&op1, cvt);
13949 return compare_loc_descriptor (op, op0, op1);
13950 }
13951
13952 /* Subroutine of scompare_loc_descriptor for the case in which we're
13953 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
13954 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
13955
13956 static dw_loc_descr_ref
13957 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
13958 scalar_int_mode op_mode,
13959 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
13960 {
13961 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
13962 /* For eq/ne, if the operands are known to be zero-extended,
13963 there is no need to do the fancy shifting up. */
13964 if (op == DW_OP_eq || op == DW_OP_ne)
13965 {
13966 dw_loc_descr_ref last0, last1;
13967 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
13968 ;
13969 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
13970 ;
13971 /* deref_size zero extends, and for constants we can check
13972 whether they are zero extended or not. */
13973 if (((last0->dw_loc_opc == DW_OP_deref_size
13974 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
13975 || (CONST_INT_P (XEXP (rtl, 0))
13976 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
13977 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
13978 && ((last1->dw_loc_opc == DW_OP_deref_size
13979 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
13980 || (CONST_INT_P (XEXP (rtl, 1))
13981 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
13982 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
13983 return compare_loc_descriptor (op, op0, op1);
13984
13985 /* EQ/NE comparison against constant in narrower type than
13986 DWARF2_ADDR_SIZE can be performed either as
13987 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
13988 DW_OP_{eq,ne}
13989 or
13990 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
13991 DW_OP_{eq,ne}. Pick whatever is shorter. */
13992 if (CONST_INT_P (XEXP (rtl, 1))
13993 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
13994 && (size_of_int_loc_descriptor (shift) + 1
13995 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
13996 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
13997 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
13998 & GET_MODE_MASK (op_mode))))
13999 {
14000 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14001 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14002 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14003 & GET_MODE_MASK (op_mode));
14004 return compare_loc_descriptor (op, op0, op1);
14005 }
14006 }
14007 add_loc_descr (&op0, int_loc_descriptor (shift));
14008 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14009 if (CONST_INT_P (XEXP (rtl, 1)))
14010 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14011 else
14012 {
14013 add_loc_descr (&op1, int_loc_descriptor (shift));
14014 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14015 }
14016 return compare_loc_descriptor (op, op0, op1);
14017 }
14018
14019 /* Return location descriptor for unsigned comparison OP RTL. */
14020
14021 static dw_loc_descr_ref
14022 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14023 machine_mode mem_mode)
14024 {
14025 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14026 dw_loc_descr_ref op0, op1;
14027
14028 if (op_mode == VOIDmode)
14029 op_mode = GET_MODE (XEXP (rtl, 1));
14030 if (op_mode == VOIDmode)
14031 return NULL;
14032
14033 scalar_int_mode int_op_mode;
14034 if (dwarf_strict
14035 && dwarf_version < 5
14036 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14037 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14038 return NULL;
14039
14040 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14041 VAR_INIT_STATUS_INITIALIZED);
14042 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14043 VAR_INIT_STATUS_INITIALIZED);
14044
14045 if (op0 == NULL || op1 == NULL)
14046 return NULL;
14047
14048 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14049 {
14050 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14051 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14052
14053 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14054 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14055 }
14056 return compare_loc_descriptor (op, op0, op1);
14057 }
14058
14059 /* Return location descriptor for unsigned comparison OP RTL. */
14060
14061 static dw_loc_descr_ref
14062 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14063 machine_mode mem_mode)
14064 {
14065 dw_loc_descr_ref op0, op1;
14066
14067 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14068 if (test_op_mode == VOIDmode)
14069 test_op_mode = GET_MODE (XEXP (rtl, 1));
14070
14071 scalar_int_mode op_mode;
14072 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14073 return NULL;
14074
14075 if (dwarf_strict
14076 && dwarf_version < 5
14077 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14078 return NULL;
14079
14080 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14081 VAR_INIT_STATUS_INITIALIZED);
14082 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14083 VAR_INIT_STATUS_INITIALIZED);
14084
14085 if (op0 == NULL || op1 == NULL)
14086 return NULL;
14087
14088 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14089 {
14090 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14091 dw_loc_descr_ref last0, last1;
14092 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14093 ;
14094 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14095 ;
14096 if (CONST_INT_P (XEXP (rtl, 0)))
14097 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14098 /* deref_size zero extends, so no need to mask it again. */
14099 else if (last0->dw_loc_opc != DW_OP_deref_size
14100 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14101 {
14102 add_loc_descr (&op0, int_loc_descriptor (mask));
14103 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14104 }
14105 if (CONST_INT_P (XEXP (rtl, 1)))
14106 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14107 /* deref_size zero extends, so no need to mask it again. */
14108 else if (last1->dw_loc_opc != DW_OP_deref_size
14109 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14110 {
14111 add_loc_descr (&op1, int_loc_descriptor (mask));
14112 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14113 }
14114 }
14115 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14116 {
14117 HOST_WIDE_INT bias = 1;
14118 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14119 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14120 if (CONST_INT_P (XEXP (rtl, 1)))
14121 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14122 + INTVAL (XEXP (rtl, 1)));
14123 else
14124 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14125 bias, 0));
14126 }
14127 return compare_loc_descriptor (op, op0, op1);
14128 }
14129
14130 /* Return location descriptor for {U,S}{MIN,MAX}. */
14131
14132 static dw_loc_descr_ref
14133 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14134 machine_mode mem_mode)
14135 {
14136 enum dwarf_location_atom op;
14137 dw_loc_descr_ref op0, op1, ret;
14138 dw_loc_descr_ref bra_node, drop_node;
14139
14140 scalar_int_mode int_mode;
14141 if (dwarf_strict
14142 && dwarf_version < 5
14143 && (!is_a <scalar_int_mode> (mode, &int_mode)
14144 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14145 return NULL;
14146
14147 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14148 VAR_INIT_STATUS_INITIALIZED);
14149 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14150 VAR_INIT_STATUS_INITIALIZED);
14151
14152 if (op0 == NULL || op1 == NULL)
14153 return NULL;
14154
14155 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14156 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14157 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14158 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14159 {
14160 /* Checked by the caller. */
14161 int_mode = as_a <scalar_int_mode> (mode);
14162 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14163 {
14164 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14165 add_loc_descr (&op0, int_loc_descriptor (mask));
14166 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14167 add_loc_descr (&op1, int_loc_descriptor (mask));
14168 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14169 }
14170 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14171 {
14172 HOST_WIDE_INT bias = 1;
14173 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14174 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14175 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14176 }
14177 }
14178 else if (is_a <scalar_int_mode> (mode, &int_mode)
14179 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14180 {
14181 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14182 add_loc_descr (&op0, int_loc_descriptor (shift));
14183 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14184 add_loc_descr (&op1, int_loc_descriptor (shift));
14185 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14186 }
14187 else if (is_a <scalar_int_mode> (mode, &int_mode)
14188 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14189 {
14190 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14191 dw_loc_descr_ref cvt;
14192 if (type_die == NULL)
14193 return NULL;
14194 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14195 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14196 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14197 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14198 add_loc_descr (&op0, cvt);
14199 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14200 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14201 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14202 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14203 add_loc_descr (&op1, cvt);
14204 }
14205
14206 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14207 op = DW_OP_lt;
14208 else
14209 op = DW_OP_gt;
14210 ret = op0;
14211 add_loc_descr (&ret, op1);
14212 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14213 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14214 add_loc_descr (&ret, bra_node);
14215 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14216 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14217 add_loc_descr (&ret, drop_node);
14218 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14219 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14220 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14221 && is_a <scalar_int_mode> (mode, &int_mode)
14222 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14223 ret = convert_descriptor_to_mode (int_mode, ret);
14224 return ret;
14225 }
14226
14227 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14228 but after converting arguments to type_die, afterwards
14229 convert back to unsigned. */
14230
14231 static dw_loc_descr_ref
14232 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14233 scalar_int_mode mode, machine_mode mem_mode)
14234 {
14235 dw_loc_descr_ref cvt, op0, op1;
14236
14237 if (type_die == NULL)
14238 return NULL;
14239 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14240 VAR_INIT_STATUS_INITIALIZED);
14241 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14242 VAR_INIT_STATUS_INITIALIZED);
14243 if (op0 == NULL || op1 == NULL)
14244 return NULL;
14245 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14246 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14247 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14248 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14249 add_loc_descr (&op0, cvt);
14250 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14251 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14252 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14253 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14254 add_loc_descr (&op1, cvt);
14255 add_loc_descr (&op0, op1);
14256 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14257 return convert_descriptor_to_mode (mode, op0);
14258 }
14259
14260 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14261 const0 is DW_OP_lit0 or corresponding typed constant,
14262 const1 is DW_OP_lit1 or corresponding typed constant
14263 and constMSB is constant with just the MSB bit set
14264 for the mode):
14265 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14266 L1: const0 DW_OP_swap
14267 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14268 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14269 L3: DW_OP_drop
14270 L4: DW_OP_nop
14271
14272 CTZ is similar:
14273 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14274 L1: const0 DW_OP_swap
14275 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14276 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14277 L3: DW_OP_drop
14278 L4: DW_OP_nop
14279
14280 FFS is similar:
14281 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14282 L1: const1 DW_OP_swap
14283 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14284 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14285 L3: DW_OP_drop
14286 L4: DW_OP_nop */
14287
14288 static dw_loc_descr_ref
14289 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14290 machine_mode mem_mode)
14291 {
14292 dw_loc_descr_ref op0, ret, tmp;
14293 HOST_WIDE_INT valv;
14294 dw_loc_descr_ref l1jump, l1label;
14295 dw_loc_descr_ref l2jump, l2label;
14296 dw_loc_descr_ref l3jump, l3label;
14297 dw_loc_descr_ref l4jump, l4label;
14298 rtx msb;
14299
14300 if (GET_MODE (XEXP (rtl, 0)) != mode)
14301 return NULL;
14302
14303 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14304 VAR_INIT_STATUS_INITIALIZED);
14305 if (op0 == NULL)
14306 return NULL;
14307 ret = op0;
14308 if (GET_CODE (rtl) == CLZ)
14309 {
14310 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14311 valv = GET_MODE_BITSIZE (mode);
14312 }
14313 else if (GET_CODE (rtl) == FFS)
14314 valv = 0;
14315 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14316 valv = GET_MODE_BITSIZE (mode);
14317 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14318 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
14319 add_loc_descr (&ret, l1jump);
14320 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14321 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
14322 VAR_INIT_STATUS_INITIALIZED);
14323 if (tmp == NULL)
14324 return NULL;
14325 add_loc_descr (&ret, tmp);
14326 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
14327 add_loc_descr (&ret, l4jump);
14328 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
14329 ? const1_rtx : const0_rtx,
14330 mode, mem_mode,
14331 VAR_INIT_STATUS_INITIALIZED);
14332 if (l1label == NULL)
14333 return NULL;
14334 add_loc_descr (&ret, l1label);
14335 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14336 l2label = new_loc_descr (DW_OP_dup, 0, 0);
14337 add_loc_descr (&ret, l2label);
14338 if (GET_CODE (rtl) != CLZ)
14339 msb = const1_rtx;
14340 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
14341 msb = GEN_INT (HOST_WIDE_INT_1U
14342 << (GET_MODE_BITSIZE (mode) - 1));
14343 else
14344 msb = immed_wide_int_const
14345 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
14346 GET_MODE_PRECISION (mode)), mode);
14347 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
14348 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
14349 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
14350 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
14351 else
14352 tmp = mem_loc_descriptor (msb, mode, mem_mode,
14353 VAR_INIT_STATUS_INITIALIZED);
14354 if (tmp == NULL)
14355 return NULL;
14356 add_loc_descr (&ret, tmp);
14357 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
14358 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
14359 add_loc_descr (&ret, l3jump);
14360 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
14361 VAR_INIT_STATUS_INITIALIZED);
14362 if (tmp == NULL)
14363 return NULL;
14364 add_loc_descr (&ret, tmp);
14365 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
14366 ? DW_OP_shl : DW_OP_shr, 0, 0));
14367 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14368 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
14369 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14370 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
14371 add_loc_descr (&ret, l2jump);
14372 l3label = new_loc_descr (DW_OP_drop, 0, 0);
14373 add_loc_descr (&ret, l3label);
14374 l4label = new_loc_descr (DW_OP_nop, 0, 0);
14375 add_loc_descr (&ret, l4label);
14376 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14377 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
14378 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14379 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
14380 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14381 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
14382 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14383 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
14384 return ret;
14385 }
14386
14387 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
14388 const1 is DW_OP_lit1 or corresponding typed constant):
14389 const0 DW_OP_swap
14390 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
14391 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
14392 L2: DW_OP_drop
14393
14394 PARITY is similar:
14395 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
14396 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
14397 L2: DW_OP_drop */
14398
14399 static dw_loc_descr_ref
14400 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
14401 machine_mode mem_mode)
14402 {
14403 dw_loc_descr_ref op0, ret, tmp;
14404 dw_loc_descr_ref l1jump, l1label;
14405 dw_loc_descr_ref l2jump, l2label;
14406
14407 if (GET_MODE (XEXP (rtl, 0)) != mode)
14408 return NULL;
14409
14410 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14411 VAR_INIT_STATUS_INITIALIZED);
14412 if (op0 == NULL)
14413 return NULL;
14414 ret = op0;
14415 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
14416 VAR_INIT_STATUS_INITIALIZED);
14417 if (tmp == NULL)
14418 return NULL;
14419 add_loc_descr (&ret, tmp);
14420 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14421 l1label = new_loc_descr (DW_OP_dup, 0, 0);
14422 add_loc_descr (&ret, l1label);
14423 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
14424 add_loc_descr (&ret, l2jump);
14425 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14426 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
14427 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
14428 VAR_INIT_STATUS_INITIALIZED);
14429 if (tmp == NULL)
14430 return NULL;
14431 add_loc_descr (&ret, tmp);
14432 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
14433 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
14434 ? DW_OP_plus : DW_OP_xor, 0, 0));
14435 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14436 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
14437 VAR_INIT_STATUS_INITIALIZED);
14438 add_loc_descr (&ret, tmp);
14439 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14440 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
14441 add_loc_descr (&ret, l1jump);
14442 l2label = new_loc_descr (DW_OP_drop, 0, 0);
14443 add_loc_descr (&ret, l2label);
14444 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14445 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
14446 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14447 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
14448 return ret;
14449 }
14450
14451 /* BSWAP (constS is initial shift count, either 56 or 24):
14452 constS const0
14453 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
14454 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
14455 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
14456 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
14457 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
14458
14459 static dw_loc_descr_ref
14460 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
14461 machine_mode mem_mode)
14462 {
14463 dw_loc_descr_ref op0, ret, tmp;
14464 dw_loc_descr_ref l1jump, l1label;
14465 dw_loc_descr_ref l2jump, l2label;
14466
14467 if (BITS_PER_UNIT != 8
14468 || (GET_MODE_BITSIZE (mode) != 32
14469 && GET_MODE_BITSIZE (mode) != 64))
14470 return NULL;
14471
14472 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14473 VAR_INIT_STATUS_INITIALIZED);
14474 if (op0 == NULL)
14475 return NULL;
14476
14477 ret = op0;
14478 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
14479 mode, mem_mode,
14480 VAR_INIT_STATUS_INITIALIZED);
14481 if (tmp == NULL)
14482 return NULL;
14483 add_loc_descr (&ret, tmp);
14484 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
14485 VAR_INIT_STATUS_INITIALIZED);
14486 if (tmp == NULL)
14487 return NULL;
14488 add_loc_descr (&ret, tmp);
14489 l1label = new_loc_descr (DW_OP_pick, 2, 0);
14490 add_loc_descr (&ret, l1label);
14491 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
14492 mode, mem_mode,
14493 VAR_INIT_STATUS_INITIALIZED);
14494 add_loc_descr (&ret, tmp);
14495 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
14496 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
14497 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14498 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
14499 VAR_INIT_STATUS_INITIALIZED);
14500 if (tmp == NULL)
14501 return NULL;
14502 add_loc_descr (&ret, tmp);
14503 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
14504 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
14505 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
14506 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
14507 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14508 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14509 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
14510 VAR_INIT_STATUS_INITIALIZED);
14511 add_loc_descr (&ret, tmp);
14512 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
14513 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
14514 add_loc_descr (&ret, l2jump);
14515 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
14516 VAR_INIT_STATUS_INITIALIZED);
14517 add_loc_descr (&ret, tmp);
14518 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
14519 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14520 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
14521 add_loc_descr (&ret, l1jump);
14522 l2label = new_loc_descr (DW_OP_drop, 0, 0);
14523 add_loc_descr (&ret, l2label);
14524 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14525 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14526 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14527 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
14528 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14529 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
14530 return ret;
14531 }
14532
14533 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
14534 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
14535 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
14536 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
14537
14538 ROTATERT is similar:
14539 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
14540 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
14541 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
14542
14543 static dw_loc_descr_ref
14544 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
14545 machine_mode mem_mode)
14546 {
14547 rtx rtlop1 = XEXP (rtl, 1);
14548 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
14549 int i;
14550
14551 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
14552 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
14553 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14554 VAR_INIT_STATUS_INITIALIZED);
14555 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
14556 VAR_INIT_STATUS_INITIALIZED);
14557 if (op0 == NULL || op1 == NULL)
14558 return NULL;
14559 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
14560 for (i = 0; i < 2; i++)
14561 {
14562 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
14563 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
14564 mode, mem_mode,
14565 VAR_INIT_STATUS_INITIALIZED);
14566 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
14567 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
14568 ? DW_OP_const4u
14569 : HOST_BITS_PER_WIDE_INT == 64
14570 ? DW_OP_const8u : DW_OP_constu,
14571 GET_MODE_MASK (mode), 0);
14572 else
14573 mask[i] = NULL;
14574 if (mask[i] == NULL)
14575 return NULL;
14576 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
14577 }
14578 ret = op0;
14579 add_loc_descr (&ret, op1);
14580 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14581 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14582 if (GET_CODE (rtl) == ROTATERT)
14583 {
14584 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14585 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
14586 GET_MODE_BITSIZE (mode), 0));
14587 }
14588 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
14589 if (mask[0] != NULL)
14590 add_loc_descr (&ret, mask[0]);
14591 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
14592 if (mask[1] != NULL)
14593 {
14594 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14595 add_loc_descr (&ret, mask[1]);
14596 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14597 }
14598 if (GET_CODE (rtl) == ROTATE)
14599 {
14600 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14601 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
14602 GET_MODE_BITSIZE (mode), 0));
14603 }
14604 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14605 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
14606 return ret;
14607 }
14608
14609 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
14610 for DEBUG_PARAMETER_REF RTL. */
14611
14612 static dw_loc_descr_ref
14613 parameter_ref_descriptor (rtx rtl)
14614 {
14615 dw_loc_descr_ref ret;
14616 dw_die_ref ref;
14617
14618 if (dwarf_strict)
14619 return NULL;
14620 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
14621 /* With LTO during LTRANS we get the late DIE that refers to the early
14622 DIE, thus we add another indirection here. This seems to confuse
14623 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
14624 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
14625 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
14626 if (ref)
14627 {
14628 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14629 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
14630 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
14631 }
14632 else
14633 {
14634 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
14635 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
14636 }
14637 return ret;
14638 }
14639
14640 /* The following routine converts the RTL for a variable or parameter
14641 (resident in memory) into an equivalent Dwarf representation of a
14642 mechanism for getting the address of that same variable onto the top of a
14643 hypothetical "address evaluation" stack.
14644
14645 When creating memory location descriptors, we are effectively transforming
14646 the RTL for a memory-resident object into its Dwarf postfix expression
14647 equivalent. This routine recursively descends an RTL tree, turning
14648 it into Dwarf postfix code as it goes.
14649
14650 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
14651
14652 MEM_MODE is the mode of the memory reference, needed to handle some
14653 autoincrement addressing modes.
14654
14655 Return 0 if we can't represent the location. */
14656
14657 dw_loc_descr_ref
14658 mem_loc_descriptor (rtx rtl, machine_mode mode,
14659 machine_mode mem_mode,
14660 enum var_init_status initialized)
14661 {
14662 dw_loc_descr_ref mem_loc_result = NULL;
14663 enum dwarf_location_atom op;
14664 dw_loc_descr_ref op0, op1;
14665 rtx inner = NULL_RTX;
14666
14667 if (mode == VOIDmode)
14668 mode = GET_MODE (rtl);
14669
14670 /* Note that for a dynamically sized array, the location we will generate a
14671 description of here will be the lowest numbered location which is
14672 actually within the array. That's *not* necessarily the same as the
14673 zeroth element of the array. */
14674
14675 rtl = targetm.delegitimize_address (rtl);
14676
14677 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
14678 return NULL;
14679
14680 scalar_int_mode int_mode, inner_mode, op1_mode;
14681 switch (GET_CODE (rtl))
14682 {
14683 case POST_INC:
14684 case POST_DEC:
14685 case POST_MODIFY:
14686 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
14687
14688 case SUBREG:
14689 /* The case of a subreg may arise when we have a local (register)
14690 variable or a formal (register) parameter which doesn't quite fill
14691 up an entire register. For now, just assume that it is
14692 legitimate to make the Dwarf info refer to the whole register which
14693 contains the given subreg. */
14694 if (!subreg_lowpart_p (rtl))
14695 break;
14696 inner = SUBREG_REG (rtl);
14697 /* FALLTHRU */
14698 case TRUNCATE:
14699 if (inner == NULL_RTX)
14700 inner = XEXP (rtl, 0);
14701 if (is_a <scalar_int_mode> (mode, &int_mode)
14702 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
14703 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
14704 #ifdef POINTERS_EXTEND_UNSIGNED
14705 || (int_mode == Pmode && mem_mode != VOIDmode)
14706 #endif
14707 )
14708 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
14709 {
14710 mem_loc_result = mem_loc_descriptor (inner,
14711 inner_mode,
14712 mem_mode, initialized);
14713 break;
14714 }
14715 if (dwarf_strict && dwarf_version < 5)
14716 break;
14717 if (is_a <scalar_int_mode> (mode, &int_mode)
14718 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
14719 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
14720 : GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (inner)))
14721 {
14722 dw_die_ref type_die;
14723 dw_loc_descr_ref cvt;
14724
14725 mem_loc_result = mem_loc_descriptor (inner,
14726 GET_MODE (inner),
14727 mem_mode, initialized);
14728 if (mem_loc_result == NULL)
14729 break;
14730 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
14731 if (type_die == NULL)
14732 {
14733 mem_loc_result = NULL;
14734 break;
14735 }
14736 if (GET_MODE_SIZE (mode)
14737 != GET_MODE_SIZE (GET_MODE (inner)))
14738 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14739 else
14740 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
14741 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14742 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14743 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14744 add_loc_descr (&mem_loc_result, cvt);
14745 if (is_a <scalar_int_mode> (mode, &int_mode)
14746 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
14747 {
14748 /* Convert it to untyped afterwards. */
14749 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14750 add_loc_descr (&mem_loc_result, cvt);
14751 }
14752 }
14753 break;
14754
14755 case REG:
14756 if (!is_a <scalar_int_mode> (mode, &int_mode)
14757 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
14758 && rtl != arg_pointer_rtx
14759 && rtl != frame_pointer_rtx
14760 #ifdef POINTERS_EXTEND_UNSIGNED
14761 && (int_mode != Pmode || mem_mode == VOIDmode)
14762 #endif
14763 ))
14764 {
14765 dw_die_ref type_die;
14766 unsigned int dbx_regnum;
14767
14768 if (dwarf_strict && dwarf_version < 5)
14769 break;
14770 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
14771 break;
14772 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
14773 if (type_die == NULL)
14774 break;
14775
14776 dbx_regnum = dbx_reg_number (rtl);
14777 if (dbx_regnum == IGNORED_DWARF_REGNUM)
14778 break;
14779 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
14780 dbx_regnum, 0);
14781 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
14782 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
14783 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
14784 break;
14785 }
14786 /* Whenever a register number forms a part of the description of the
14787 method for calculating the (dynamic) address of a memory resident
14788 object, DWARF rules require the register number be referred to as
14789 a "base register". This distinction is not based in any way upon
14790 what category of register the hardware believes the given register
14791 belongs to. This is strictly DWARF terminology we're dealing with
14792 here. Note that in cases where the location of a memory-resident
14793 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
14794 OP_CONST (0)) the actual DWARF location descriptor that we generate
14795 may just be OP_BASEREG (basereg). This may look deceptively like
14796 the object in question was allocated to a register (rather than in
14797 memory) so DWARF consumers need to be aware of the subtle
14798 distinction between OP_REG and OP_BASEREG. */
14799 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
14800 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
14801 else if (stack_realign_drap
14802 && crtl->drap_reg
14803 && crtl->args.internal_arg_pointer == rtl
14804 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
14805 {
14806 /* If RTL is internal_arg_pointer, which has been optimized
14807 out, use DRAP instead. */
14808 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
14809 VAR_INIT_STATUS_INITIALIZED);
14810 }
14811 break;
14812
14813 case SIGN_EXTEND:
14814 case ZERO_EXTEND:
14815 if (!is_a <scalar_int_mode> (mode, &int_mode)
14816 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
14817 break;
14818 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
14819 mem_mode, VAR_INIT_STATUS_INITIALIZED);
14820 if (op0 == 0)
14821 break;
14822 else if (GET_CODE (rtl) == ZERO_EXTEND
14823 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
14824 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
14825 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
14826 to expand zero extend as two shifts instead of
14827 masking. */
14828 && GET_MODE_SIZE (inner_mode) <= 4)
14829 {
14830 mem_loc_result = op0;
14831 add_loc_descr (&mem_loc_result,
14832 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
14833 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
14834 }
14835 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
14836 {
14837 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
14838 shift *= BITS_PER_UNIT;
14839 if (GET_CODE (rtl) == SIGN_EXTEND)
14840 op = DW_OP_shra;
14841 else
14842 op = DW_OP_shr;
14843 mem_loc_result = op0;
14844 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
14845 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
14846 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
14847 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
14848 }
14849 else if (!dwarf_strict || dwarf_version >= 5)
14850 {
14851 dw_die_ref type_die1, type_die2;
14852 dw_loc_descr_ref cvt;
14853
14854 type_die1 = base_type_for_mode (inner_mode,
14855 GET_CODE (rtl) == ZERO_EXTEND);
14856 if (type_die1 == NULL)
14857 break;
14858 type_die2 = base_type_for_mode (int_mode, 1);
14859 if (type_die2 == NULL)
14860 break;
14861 mem_loc_result = op0;
14862 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14863 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14864 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
14865 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14866 add_loc_descr (&mem_loc_result, cvt);
14867 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14868 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14869 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
14870 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14871 add_loc_descr (&mem_loc_result, cvt);
14872 }
14873 break;
14874
14875 case MEM:
14876 {
14877 rtx new_rtl = avoid_constant_pool_reference (rtl);
14878 if (new_rtl != rtl)
14879 {
14880 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
14881 initialized);
14882 if (mem_loc_result != NULL)
14883 return mem_loc_result;
14884 }
14885 }
14886 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
14887 get_address_mode (rtl), mode,
14888 VAR_INIT_STATUS_INITIALIZED);
14889 if (mem_loc_result == NULL)
14890 mem_loc_result = tls_mem_loc_descriptor (rtl);
14891 if (mem_loc_result != NULL)
14892 {
14893 if (!is_a <scalar_int_mode> (mode, &int_mode)
14894 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14895 {
14896 dw_die_ref type_die;
14897 dw_loc_descr_ref deref;
14898
14899 if (dwarf_strict && dwarf_version < 5)
14900 return NULL;
14901 type_die
14902 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
14903 if (type_die == NULL)
14904 return NULL;
14905 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type),
14906 GET_MODE_SIZE (mode), 0);
14907 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
14908 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
14909 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
14910 add_loc_descr (&mem_loc_result, deref);
14911 }
14912 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14913 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
14914 else
14915 add_loc_descr (&mem_loc_result,
14916 new_loc_descr (DW_OP_deref_size,
14917 GET_MODE_SIZE (int_mode), 0));
14918 }
14919 break;
14920
14921 case LO_SUM:
14922 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
14923
14924 case LABEL_REF:
14925 /* Some ports can transform a symbol ref into a label ref, because
14926 the symbol ref is too far away and has to be dumped into a constant
14927 pool. */
14928 case CONST:
14929 case SYMBOL_REF:
14930 if (!is_a <scalar_int_mode> (mode, &int_mode)
14931 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
14932 #ifdef POINTERS_EXTEND_UNSIGNED
14933 && (int_mode != Pmode || mem_mode == VOIDmode)
14934 #endif
14935 ))
14936 break;
14937 if (GET_CODE (rtl) == SYMBOL_REF
14938 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14939 {
14940 dw_loc_descr_ref temp;
14941
14942 /* If this is not defined, we have no way to emit the data. */
14943 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
14944 break;
14945
14946 temp = new_addr_loc_descr (rtl, dtprel_true);
14947
14948 /* We check for DWARF 5 here because gdb did not implement
14949 DW_OP_form_tls_address until after 7.12. */
14950 mem_loc_result = new_loc_descr ((dwarf_version >= 5
14951 ? DW_OP_form_tls_address
14952 : DW_OP_GNU_push_tls_address),
14953 0, 0);
14954 add_loc_descr (&mem_loc_result, temp);
14955
14956 break;
14957 }
14958
14959 if (!const_ok_for_output (rtl))
14960 {
14961 if (GET_CODE (rtl) == CONST)
14962 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
14963 mem_mode, initialized);
14964 break;
14965 }
14966
14967 symref:
14968 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
14969 vec_safe_push (used_rtx_array, rtl);
14970 break;
14971
14972 case CONCAT:
14973 case CONCATN:
14974 case VAR_LOCATION:
14975 case DEBUG_IMPLICIT_PTR:
14976 expansion_failed (NULL_TREE, rtl,
14977 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
14978 return 0;
14979
14980 case ENTRY_VALUE:
14981 if (dwarf_strict && dwarf_version < 5)
14982 return NULL;
14983 if (REG_P (ENTRY_VALUE_EXP (rtl)))
14984 {
14985 if (!is_a <scalar_int_mode> (mode, &int_mode)
14986 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14987 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
14988 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
14989 else
14990 {
14991 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
14992 if (dbx_regnum == IGNORED_DWARF_REGNUM)
14993 return NULL;
14994 op0 = one_reg_loc_descriptor (dbx_regnum,
14995 VAR_INIT_STATUS_INITIALIZED);
14996 }
14997 }
14998 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
14999 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15000 {
15001 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15002 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15003 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15004 return NULL;
15005 }
15006 else
15007 gcc_unreachable ();
15008 if (op0 == NULL)
15009 return NULL;
15010 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15011 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15012 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15013 break;
15014
15015 case DEBUG_PARAMETER_REF:
15016 mem_loc_result = parameter_ref_descriptor (rtl);
15017 break;
15018
15019 case PRE_MODIFY:
15020 /* Extract the PLUS expression nested inside and fall into
15021 PLUS code below. */
15022 rtl = XEXP (rtl, 1);
15023 goto plus;
15024
15025 case PRE_INC:
15026 case PRE_DEC:
15027 /* Turn these into a PLUS expression and fall into the PLUS code
15028 below. */
15029 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15030 gen_int_mode (GET_CODE (rtl) == PRE_INC
15031 ? GET_MODE_UNIT_SIZE (mem_mode)
15032 : -GET_MODE_UNIT_SIZE (mem_mode),
15033 mode));
15034
15035 /* fall through */
15036
15037 case PLUS:
15038 plus:
15039 if (is_based_loc (rtl)
15040 && is_a <scalar_int_mode> (mode, &int_mode)
15041 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15042 || XEXP (rtl, 0) == arg_pointer_rtx
15043 || XEXP (rtl, 0) == frame_pointer_rtx))
15044 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15045 INTVAL (XEXP (rtl, 1)),
15046 VAR_INIT_STATUS_INITIALIZED);
15047 else
15048 {
15049 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15050 VAR_INIT_STATUS_INITIALIZED);
15051 if (mem_loc_result == 0)
15052 break;
15053
15054 if (CONST_INT_P (XEXP (rtl, 1))
15055 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15056 <= DWARF2_ADDR_SIZE))
15057 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15058 else
15059 {
15060 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15061 VAR_INIT_STATUS_INITIALIZED);
15062 if (op1 == 0)
15063 return NULL;
15064 add_loc_descr (&mem_loc_result, op1);
15065 add_loc_descr (&mem_loc_result,
15066 new_loc_descr (DW_OP_plus, 0, 0));
15067 }
15068 }
15069 break;
15070
15071 /* If a pseudo-reg is optimized away, it is possible for it to
15072 be replaced with a MEM containing a multiply or shift. */
15073 case MINUS:
15074 op = DW_OP_minus;
15075 goto do_binop;
15076
15077 case MULT:
15078 op = DW_OP_mul;
15079 goto do_binop;
15080
15081 case DIV:
15082 if ((!dwarf_strict || dwarf_version >= 5)
15083 && is_a <scalar_int_mode> (mode, &int_mode)
15084 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15085 {
15086 mem_loc_result = typed_binop (DW_OP_div, rtl,
15087 base_type_for_mode (mode, 0),
15088 int_mode, mem_mode);
15089 break;
15090 }
15091 op = DW_OP_div;
15092 goto do_binop;
15093
15094 case UMOD:
15095 op = DW_OP_mod;
15096 goto do_binop;
15097
15098 case ASHIFT:
15099 op = DW_OP_shl;
15100 goto do_shift;
15101
15102 case ASHIFTRT:
15103 op = DW_OP_shra;
15104 goto do_shift;
15105
15106 case LSHIFTRT:
15107 op = DW_OP_shr;
15108 goto do_shift;
15109
15110 do_shift:
15111 if (!is_a <scalar_int_mode> (mode, &int_mode))
15112 break;
15113 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15114 VAR_INIT_STATUS_INITIALIZED);
15115 {
15116 rtx rtlop1 = XEXP (rtl, 1);
15117 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15118 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15119 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15120 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15121 VAR_INIT_STATUS_INITIALIZED);
15122 }
15123
15124 if (op0 == 0 || op1 == 0)
15125 break;
15126
15127 mem_loc_result = op0;
15128 add_loc_descr (&mem_loc_result, op1);
15129 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15130 break;
15131
15132 case AND:
15133 op = DW_OP_and;
15134 goto do_binop;
15135
15136 case IOR:
15137 op = DW_OP_or;
15138 goto do_binop;
15139
15140 case XOR:
15141 op = DW_OP_xor;
15142 goto do_binop;
15143
15144 do_binop:
15145 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15146 VAR_INIT_STATUS_INITIALIZED);
15147 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15148 VAR_INIT_STATUS_INITIALIZED);
15149
15150 if (op0 == 0 || op1 == 0)
15151 break;
15152
15153 mem_loc_result = op0;
15154 add_loc_descr (&mem_loc_result, op1);
15155 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15156 break;
15157
15158 case MOD:
15159 if ((!dwarf_strict || dwarf_version >= 5)
15160 && is_a <scalar_int_mode> (mode, &int_mode)
15161 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15162 {
15163 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15164 base_type_for_mode (mode, 0),
15165 int_mode, mem_mode);
15166 break;
15167 }
15168
15169 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15170 VAR_INIT_STATUS_INITIALIZED);
15171 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15172 VAR_INIT_STATUS_INITIALIZED);
15173
15174 if (op0 == 0 || op1 == 0)
15175 break;
15176
15177 mem_loc_result = op0;
15178 add_loc_descr (&mem_loc_result, op1);
15179 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15180 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15181 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15182 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15183 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15184 break;
15185
15186 case UDIV:
15187 if ((!dwarf_strict || dwarf_version >= 5)
15188 && is_a <scalar_int_mode> (mode, &int_mode))
15189 {
15190 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15191 {
15192 op = DW_OP_div;
15193 goto do_binop;
15194 }
15195 mem_loc_result = typed_binop (DW_OP_div, rtl,
15196 base_type_for_mode (int_mode, 1),
15197 int_mode, mem_mode);
15198 }
15199 break;
15200
15201 case NOT:
15202 op = DW_OP_not;
15203 goto do_unop;
15204
15205 case ABS:
15206 op = DW_OP_abs;
15207 goto do_unop;
15208
15209 case NEG:
15210 op = DW_OP_neg;
15211 goto do_unop;
15212
15213 do_unop:
15214 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15215 VAR_INIT_STATUS_INITIALIZED);
15216
15217 if (op0 == 0)
15218 break;
15219
15220 mem_loc_result = op0;
15221 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15222 break;
15223
15224 case CONST_INT:
15225 if (!is_a <scalar_int_mode> (mode, &int_mode)
15226 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15227 #ifdef POINTERS_EXTEND_UNSIGNED
15228 || (int_mode == Pmode
15229 && mem_mode != VOIDmode
15230 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15231 #endif
15232 )
15233 {
15234 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15235 break;
15236 }
15237 if ((!dwarf_strict || dwarf_version >= 5)
15238 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15239 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15240 {
15241 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15242 scalar_int_mode amode;
15243 if (type_die == NULL)
15244 return NULL;
15245 if (INTVAL (rtl) >= 0
15246 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15247 .exists (&amode))
15248 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15249 /* const DW_OP_convert <XXX> vs.
15250 DW_OP_const_type <XXX, 1, const>. */
15251 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15252 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15253 {
15254 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15255 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15256 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15257 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15258 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15259 add_loc_descr (&mem_loc_result, op0);
15260 return mem_loc_result;
15261 }
15262 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15263 INTVAL (rtl));
15264 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15265 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15266 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15267 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
15268 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
15269 else
15270 {
15271 mem_loc_result->dw_loc_oprnd2.val_class
15272 = dw_val_class_const_double;
15273 mem_loc_result->dw_loc_oprnd2.v.val_double
15274 = double_int::from_shwi (INTVAL (rtl));
15275 }
15276 }
15277 break;
15278
15279 case CONST_DOUBLE:
15280 if (!dwarf_strict || dwarf_version >= 5)
15281 {
15282 dw_die_ref type_die;
15283
15284 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
15285 CONST_DOUBLE rtx could represent either a large integer
15286 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
15287 the value is always a floating point constant.
15288
15289 When it is an integer, a CONST_DOUBLE is used whenever
15290 the constant requires 2 HWIs to be adequately represented.
15291 We output CONST_DOUBLEs as blocks. */
15292 if (mode == VOIDmode
15293 || (GET_MODE (rtl) == VOIDmode
15294 && GET_MODE_BITSIZE (mode) != HOST_BITS_PER_DOUBLE_INT))
15295 break;
15296 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15297 if (type_die == NULL)
15298 return NULL;
15299 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15300 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15301 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15302 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15303 #if TARGET_SUPPORTS_WIDE_INT == 0
15304 if (!SCALAR_FLOAT_MODE_P (mode))
15305 {
15306 mem_loc_result->dw_loc_oprnd2.val_class
15307 = dw_val_class_const_double;
15308 mem_loc_result->dw_loc_oprnd2.v.val_double
15309 = rtx_to_double_int (rtl);
15310 }
15311 else
15312 #endif
15313 {
15314 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
15315 unsigned int length = GET_MODE_SIZE (float_mode);
15316 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
15317
15318 insert_float (rtl, array);
15319 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
15320 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
15321 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
15322 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
15323 }
15324 }
15325 break;
15326
15327 case CONST_WIDE_INT:
15328 if (!dwarf_strict || dwarf_version >= 5)
15329 {
15330 dw_die_ref type_die;
15331
15332 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15333 if (type_die == NULL)
15334 return NULL;
15335 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15336 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15337 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15338 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15339 mem_loc_result->dw_loc_oprnd2.val_class
15340 = dw_val_class_wide_int;
15341 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
15342 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
15343 }
15344 break;
15345
15346 case EQ:
15347 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
15348 break;
15349
15350 case GE:
15351 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
15352 break;
15353
15354 case GT:
15355 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
15356 break;
15357
15358 case LE:
15359 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
15360 break;
15361
15362 case LT:
15363 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
15364 break;
15365
15366 case NE:
15367 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
15368 break;
15369
15370 case GEU:
15371 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
15372 break;
15373
15374 case GTU:
15375 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
15376 break;
15377
15378 case LEU:
15379 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
15380 break;
15381
15382 case LTU:
15383 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
15384 break;
15385
15386 case UMIN:
15387 case UMAX:
15388 if (!SCALAR_INT_MODE_P (mode))
15389 break;
15390 /* FALLTHRU */
15391 case SMIN:
15392 case SMAX:
15393 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
15394 break;
15395
15396 case ZERO_EXTRACT:
15397 case SIGN_EXTRACT:
15398 if (CONST_INT_P (XEXP (rtl, 1))
15399 && CONST_INT_P (XEXP (rtl, 2))
15400 && is_a <scalar_int_mode> (mode, &int_mode)
15401 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
15402 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15403 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
15404 && ((unsigned) INTVAL (XEXP (rtl, 1))
15405 + (unsigned) INTVAL (XEXP (rtl, 2))
15406 <= GET_MODE_BITSIZE (int_mode)))
15407 {
15408 int shift, size;
15409 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15410 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15411 if (op0 == 0)
15412 break;
15413 if (GET_CODE (rtl) == SIGN_EXTRACT)
15414 op = DW_OP_shra;
15415 else
15416 op = DW_OP_shr;
15417 mem_loc_result = op0;
15418 size = INTVAL (XEXP (rtl, 1));
15419 shift = INTVAL (XEXP (rtl, 2));
15420 if (BITS_BIG_ENDIAN)
15421 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
15422 if (shift + size != (int) DWARF2_ADDR_SIZE)
15423 {
15424 add_loc_descr (&mem_loc_result,
15425 int_loc_descriptor (DWARF2_ADDR_SIZE
15426 - shift - size));
15427 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15428 }
15429 if (size != (int) DWARF2_ADDR_SIZE)
15430 {
15431 add_loc_descr (&mem_loc_result,
15432 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
15433 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15434 }
15435 }
15436 break;
15437
15438 case IF_THEN_ELSE:
15439 {
15440 dw_loc_descr_ref op2, bra_node, drop_node;
15441 op0 = mem_loc_descriptor (XEXP (rtl, 0),
15442 GET_MODE (XEXP (rtl, 0)) == VOIDmode
15443 ? word_mode : GET_MODE (XEXP (rtl, 0)),
15444 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15445 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15446 VAR_INIT_STATUS_INITIALIZED);
15447 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
15448 VAR_INIT_STATUS_INITIALIZED);
15449 if (op0 == NULL || op1 == NULL || op2 == NULL)
15450 break;
15451
15452 mem_loc_result = op1;
15453 add_loc_descr (&mem_loc_result, op2);
15454 add_loc_descr (&mem_loc_result, op0);
15455 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
15456 add_loc_descr (&mem_loc_result, bra_node);
15457 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
15458 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
15459 add_loc_descr (&mem_loc_result, drop_node);
15460 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
15461 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
15462 }
15463 break;
15464
15465 case FLOAT_EXTEND:
15466 case FLOAT_TRUNCATE:
15467 case FLOAT:
15468 case UNSIGNED_FLOAT:
15469 case FIX:
15470 case UNSIGNED_FIX:
15471 if (!dwarf_strict || dwarf_version >= 5)
15472 {
15473 dw_die_ref type_die;
15474 dw_loc_descr_ref cvt;
15475
15476 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
15477 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15478 if (op0 == NULL)
15479 break;
15480 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
15481 && (GET_CODE (rtl) == FLOAT
15482 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
15483 {
15484 type_die = base_type_for_mode (int_mode,
15485 GET_CODE (rtl) == UNSIGNED_FLOAT);
15486 if (type_die == NULL)
15487 break;
15488 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15489 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15490 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15491 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15492 add_loc_descr (&op0, cvt);
15493 }
15494 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
15495 if (type_die == NULL)
15496 break;
15497 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15498 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15499 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15500 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15501 add_loc_descr (&op0, cvt);
15502 if (is_a <scalar_int_mode> (mode, &int_mode)
15503 && (GET_CODE (rtl) == FIX
15504 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
15505 {
15506 op0 = convert_descriptor_to_mode (int_mode, op0);
15507 if (op0 == NULL)
15508 break;
15509 }
15510 mem_loc_result = op0;
15511 }
15512 break;
15513
15514 case CLZ:
15515 case CTZ:
15516 case FFS:
15517 if (is_a <scalar_int_mode> (mode, &int_mode))
15518 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
15519 break;
15520
15521 case POPCOUNT:
15522 case PARITY:
15523 if (is_a <scalar_int_mode> (mode, &int_mode))
15524 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
15525 break;
15526
15527 case BSWAP:
15528 if (is_a <scalar_int_mode> (mode, &int_mode))
15529 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
15530 break;
15531
15532 case ROTATE:
15533 case ROTATERT:
15534 if (is_a <scalar_int_mode> (mode, &int_mode))
15535 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
15536 break;
15537
15538 case COMPARE:
15539 /* In theory, we could implement the above. */
15540 /* DWARF cannot represent the unsigned compare operations
15541 natively. */
15542 case SS_MULT:
15543 case US_MULT:
15544 case SS_DIV:
15545 case US_DIV:
15546 case SS_PLUS:
15547 case US_PLUS:
15548 case SS_MINUS:
15549 case US_MINUS:
15550 case SS_NEG:
15551 case US_NEG:
15552 case SS_ABS:
15553 case SS_ASHIFT:
15554 case US_ASHIFT:
15555 case SS_TRUNCATE:
15556 case US_TRUNCATE:
15557 case UNORDERED:
15558 case ORDERED:
15559 case UNEQ:
15560 case UNGE:
15561 case UNGT:
15562 case UNLE:
15563 case UNLT:
15564 case LTGT:
15565 case FRACT_CONVERT:
15566 case UNSIGNED_FRACT_CONVERT:
15567 case SAT_FRACT:
15568 case UNSIGNED_SAT_FRACT:
15569 case SQRT:
15570 case ASM_OPERANDS:
15571 case VEC_MERGE:
15572 case VEC_SELECT:
15573 case VEC_CONCAT:
15574 case VEC_DUPLICATE:
15575 case UNSPEC:
15576 case HIGH:
15577 case FMA:
15578 case STRICT_LOW_PART:
15579 case CONST_VECTOR:
15580 case CONST_FIXED:
15581 case CLRSB:
15582 case CLOBBER:
15583 /* If delegitimize_address couldn't do anything with the UNSPEC, we
15584 can't express it in the debug info. This can happen e.g. with some
15585 TLS UNSPECs. */
15586 break;
15587
15588 case CONST_STRING:
15589 resolve_one_addr (&rtl);
15590 goto symref;
15591
15592 /* RTL sequences inside PARALLEL record a series of DWARF operations for
15593 the expression. An UNSPEC rtx represents a raw DWARF operation,
15594 new_loc_descr is called for it to build the operation directly.
15595 Otherwise mem_loc_descriptor is called recursively. */
15596 case PARALLEL:
15597 {
15598 int index = 0;
15599 dw_loc_descr_ref exp_result = NULL;
15600
15601 for (; index < XVECLEN (rtl, 0); index++)
15602 {
15603 rtx elem = XVECEXP (rtl, 0, index);
15604 if (GET_CODE (elem) == UNSPEC)
15605 {
15606 /* Each DWARF operation UNSPEC contain two operands, if
15607 one operand is not used for the operation, const0_rtx is
15608 passed. */
15609 gcc_assert (XVECLEN (elem, 0) == 2);
15610
15611 HOST_WIDE_INT dw_op = XINT (elem, 1);
15612 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
15613 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
15614 exp_result
15615 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
15616 oprnd2);
15617 }
15618 else
15619 exp_result
15620 = mem_loc_descriptor (elem, mode, mem_mode,
15621 VAR_INIT_STATUS_INITIALIZED);
15622
15623 if (!mem_loc_result)
15624 mem_loc_result = exp_result;
15625 else
15626 add_loc_descr (&mem_loc_result, exp_result);
15627 }
15628
15629 break;
15630 }
15631
15632 default:
15633 if (flag_checking)
15634 {
15635 print_rtl (stderr, rtl);
15636 gcc_unreachable ();
15637 }
15638 break;
15639 }
15640
15641 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
15642 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
15643
15644 return mem_loc_result;
15645 }
15646
15647 /* Return a descriptor that describes the concatenation of two locations.
15648 This is typically a complex variable. */
15649
15650 static dw_loc_descr_ref
15651 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
15652 {
15653 dw_loc_descr_ref cc_loc_result = NULL;
15654 dw_loc_descr_ref x0_ref
15655 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15656 dw_loc_descr_ref x1_ref
15657 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15658
15659 if (x0_ref == 0 || x1_ref == 0)
15660 return 0;
15661
15662 cc_loc_result = x0_ref;
15663 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x0)));
15664
15665 add_loc_descr (&cc_loc_result, x1_ref);
15666 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x1)));
15667
15668 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
15669 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
15670
15671 return cc_loc_result;
15672 }
15673
15674 /* Return a descriptor that describes the concatenation of N
15675 locations. */
15676
15677 static dw_loc_descr_ref
15678 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
15679 {
15680 unsigned int i;
15681 dw_loc_descr_ref cc_loc_result = NULL;
15682 unsigned int n = XVECLEN (concatn, 0);
15683
15684 for (i = 0; i < n; ++i)
15685 {
15686 dw_loc_descr_ref ref;
15687 rtx x = XVECEXP (concatn, 0, i);
15688
15689 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15690 if (ref == NULL)
15691 return NULL;
15692
15693 add_loc_descr (&cc_loc_result, ref);
15694 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x)));
15695 }
15696
15697 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
15698 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
15699
15700 return cc_loc_result;
15701 }
15702
15703 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
15704 for DEBUG_IMPLICIT_PTR RTL. */
15705
15706 static dw_loc_descr_ref
15707 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
15708 {
15709 dw_loc_descr_ref ret;
15710 dw_die_ref ref;
15711
15712 if (dwarf_strict && dwarf_version < 5)
15713 return NULL;
15714 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
15715 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
15716 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
15717 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
15718 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
15719 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
15720 if (ref)
15721 {
15722 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15723 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15724 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15725 }
15726 else
15727 {
15728 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15729 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
15730 }
15731 return ret;
15732 }
15733
15734 /* Output a proper Dwarf location descriptor for a variable or parameter
15735 which is either allocated in a register or in a memory location. For a
15736 register, we just generate an OP_REG and the register number. For a
15737 memory location we provide a Dwarf postfix expression describing how to
15738 generate the (dynamic) address of the object onto the address stack.
15739
15740 MODE is mode of the decl if this loc_descriptor is going to be used in
15741 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
15742 allowed, VOIDmode otherwise.
15743
15744 If we don't know how to describe it, return 0. */
15745
15746 static dw_loc_descr_ref
15747 loc_descriptor (rtx rtl, machine_mode mode,
15748 enum var_init_status initialized)
15749 {
15750 dw_loc_descr_ref loc_result = NULL;
15751 scalar_int_mode int_mode;
15752
15753 switch (GET_CODE (rtl))
15754 {
15755 case SUBREG:
15756 /* The case of a subreg may arise when we have a local (register)
15757 variable or a formal (register) parameter which doesn't quite fill
15758 up an entire register. For now, just assume that it is
15759 legitimate to make the Dwarf info refer to the whole register which
15760 contains the given subreg. */
15761 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
15762 loc_result = loc_descriptor (SUBREG_REG (rtl),
15763 GET_MODE (SUBREG_REG (rtl)), initialized);
15764 else
15765 goto do_default;
15766 break;
15767
15768 case REG:
15769 loc_result = reg_loc_descriptor (rtl, initialized);
15770 break;
15771
15772 case MEM:
15773 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
15774 GET_MODE (rtl), initialized);
15775 if (loc_result == NULL)
15776 loc_result = tls_mem_loc_descriptor (rtl);
15777 if (loc_result == NULL)
15778 {
15779 rtx new_rtl = avoid_constant_pool_reference (rtl);
15780 if (new_rtl != rtl)
15781 loc_result = loc_descriptor (new_rtl, mode, initialized);
15782 }
15783 break;
15784
15785 case CONCAT:
15786 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
15787 initialized);
15788 break;
15789
15790 case CONCATN:
15791 loc_result = concatn_loc_descriptor (rtl, initialized);
15792 break;
15793
15794 case VAR_LOCATION:
15795 /* Single part. */
15796 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
15797 {
15798 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
15799 if (GET_CODE (loc) == EXPR_LIST)
15800 loc = XEXP (loc, 0);
15801 loc_result = loc_descriptor (loc, mode, initialized);
15802 break;
15803 }
15804
15805 rtl = XEXP (rtl, 1);
15806 /* FALLTHRU */
15807
15808 case PARALLEL:
15809 {
15810 rtvec par_elems = XVEC (rtl, 0);
15811 int num_elem = GET_NUM_ELEM (par_elems);
15812 machine_mode mode;
15813 int i;
15814
15815 /* Create the first one, so we have something to add to. */
15816 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
15817 VOIDmode, initialized);
15818 if (loc_result == NULL)
15819 return NULL;
15820 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
15821 add_loc_descr_op_piece (&loc_result, GET_MODE_SIZE (mode));
15822 for (i = 1; i < num_elem; i++)
15823 {
15824 dw_loc_descr_ref temp;
15825
15826 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
15827 VOIDmode, initialized);
15828 if (temp == NULL)
15829 return NULL;
15830 add_loc_descr (&loc_result, temp);
15831 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
15832 add_loc_descr_op_piece (&loc_result, GET_MODE_SIZE (mode));
15833 }
15834 }
15835 break;
15836
15837 case CONST_INT:
15838 if (mode != VOIDmode && mode != BLKmode)
15839 {
15840 int_mode = as_a <scalar_int_mode> (mode);
15841 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
15842 INTVAL (rtl));
15843 }
15844 break;
15845
15846 case CONST_DOUBLE:
15847 if (mode == VOIDmode)
15848 mode = GET_MODE (rtl);
15849
15850 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
15851 {
15852 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
15853
15854 /* Note that a CONST_DOUBLE rtx could represent either an integer
15855 or a floating-point constant. A CONST_DOUBLE is used whenever
15856 the constant requires more than one word in order to be
15857 adequately represented. We output CONST_DOUBLEs as blocks. */
15858 scalar_mode smode = as_a <scalar_mode> (mode);
15859 loc_result = new_loc_descr (DW_OP_implicit_value,
15860 GET_MODE_SIZE (smode), 0);
15861 #if TARGET_SUPPORTS_WIDE_INT == 0
15862 if (!SCALAR_FLOAT_MODE_P (smode))
15863 {
15864 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
15865 loc_result->dw_loc_oprnd2.v.val_double
15866 = rtx_to_double_int (rtl);
15867 }
15868 else
15869 #endif
15870 {
15871 unsigned int length = GET_MODE_SIZE (smode);
15872 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
15873
15874 insert_float (rtl, array);
15875 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
15876 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
15877 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
15878 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
15879 }
15880 }
15881 break;
15882
15883 case CONST_WIDE_INT:
15884 if (mode == VOIDmode)
15885 mode = GET_MODE (rtl);
15886
15887 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
15888 {
15889 int_mode = as_a <scalar_int_mode> (mode);
15890 loc_result = new_loc_descr (DW_OP_implicit_value,
15891 GET_MODE_SIZE (int_mode), 0);
15892 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
15893 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
15894 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
15895 }
15896 break;
15897
15898 case CONST_VECTOR:
15899 if (mode == VOIDmode)
15900 mode = GET_MODE (rtl);
15901
15902 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
15903 {
15904 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
15905 unsigned int length = CONST_VECTOR_NUNITS (rtl);
15906 unsigned char *array
15907 = ggc_vec_alloc<unsigned char> (length * elt_size);
15908 unsigned int i;
15909 unsigned char *p;
15910 machine_mode imode = GET_MODE_INNER (mode);
15911
15912 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
15913 switch (GET_MODE_CLASS (mode))
15914 {
15915 case MODE_VECTOR_INT:
15916 for (i = 0, p = array; i < length; i++, p += elt_size)
15917 {
15918 rtx elt = CONST_VECTOR_ELT (rtl, i);
15919 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
15920 }
15921 break;
15922
15923 case MODE_VECTOR_FLOAT:
15924 for (i = 0, p = array; i < length; i++, p += elt_size)
15925 {
15926 rtx elt = CONST_VECTOR_ELT (rtl, i);
15927 insert_float (elt, p);
15928 }
15929 break;
15930
15931 default:
15932 gcc_unreachable ();
15933 }
15934
15935 loc_result = new_loc_descr (DW_OP_implicit_value,
15936 length * elt_size, 0);
15937 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
15938 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
15939 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
15940 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
15941 }
15942 break;
15943
15944 case CONST:
15945 if (mode == VOIDmode
15946 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
15947 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
15948 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
15949 {
15950 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
15951 break;
15952 }
15953 /* FALLTHROUGH */
15954 case SYMBOL_REF:
15955 if (!const_ok_for_output (rtl))
15956 break;
15957 /* FALLTHROUGH */
15958 case LABEL_REF:
15959 if (is_a <scalar_int_mode> (mode, &int_mode)
15960 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
15961 && (dwarf_version >= 4 || !dwarf_strict))
15962 {
15963 loc_result = new_addr_loc_descr (rtl, dtprel_false);
15964 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
15965 vec_safe_push (used_rtx_array, rtl);
15966 }
15967 break;
15968
15969 case DEBUG_IMPLICIT_PTR:
15970 loc_result = implicit_ptr_descriptor (rtl, 0);
15971 break;
15972
15973 case PLUS:
15974 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
15975 && CONST_INT_P (XEXP (rtl, 1)))
15976 {
15977 loc_result
15978 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
15979 break;
15980 }
15981 /* FALLTHRU */
15982 do_default:
15983 default:
15984 if ((is_a <scalar_int_mode> (mode, &int_mode)
15985 && GET_MODE (rtl) == int_mode
15986 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15987 && dwarf_version >= 4)
15988 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
15989 {
15990 /* Value expression. */
15991 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
15992 if (loc_result)
15993 add_loc_descr (&loc_result,
15994 new_loc_descr (DW_OP_stack_value, 0, 0));
15995 }
15996 break;
15997 }
15998
15999 return loc_result;
16000 }
16001
16002 /* We need to figure out what section we should use as the base for the
16003 address ranges where a given location is valid.
16004 1. If this particular DECL has a section associated with it, use that.
16005 2. If this function has a section associated with it, use that.
16006 3. Otherwise, use the text section.
16007 XXX: If you split a variable across multiple sections, we won't notice. */
16008
16009 static const char *
16010 secname_for_decl (const_tree decl)
16011 {
16012 const char *secname;
16013
16014 if (VAR_OR_FUNCTION_DECL_P (decl)
16015 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16016 && DECL_SECTION_NAME (decl))
16017 secname = DECL_SECTION_NAME (decl);
16018 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16019 secname = DECL_SECTION_NAME (current_function_decl);
16020 else if (cfun && in_cold_section_p)
16021 secname = crtl->subsections.cold_section_label;
16022 else
16023 secname = text_section_label;
16024
16025 return secname;
16026 }
16027
16028 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16029
16030 static bool
16031 decl_by_reference_p (tree decl)
16032 {
16033 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16034 || VAR_P (decl))
16035 && DECL_BY_REFERENCE (decl));
16036 }
16037
16038 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16039 for VARLOC. */
16040
16041 static dw_loc_descr_ref
16042 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16043 enum var_init_status initialized)
16044 {
16045 int have_address = 0;
16046 dw_loc_descr_ref descr;
16047 machine_mode mode;
16048
16049 if (want_address != 2)
16050 {
16051 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16052 /* Single part. */
16053 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16054 {
16055 varloc = PAT_VAR_LOCATION_LOC (varloc);
16056 if (GET_CODE (varloc) == EXPR_LIST)
16057 varloc = XEXP (varloc, 0);
16058 mode = GET_MODE (varloc);
16059 if (MEM_P (varloc))
16060 {
16061 rtx addr = XEXP (varloc, 0);
16062 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16063 mode, initialized);
16064 if (descr)
16065 have_address = 1;
16066 else
16067 {
16068 rtx x = avoid_constant_pool_reference (varloc);
16069 if (x != varloc)
16070 descr = mem_loc_descriptor (x, mode, VOIDmode,
16071 initialized);
16072 }
16073 }
16074 else
16075 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16076 }
16077 else
16078 return 0;
16079 }
16080 else
16081 {
16082 if (GET_CODE (varloc) == VAR_LOCATION)
16083 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16084 else
16085 mode = DECL_MODE (loc);
16086 descr = loc_descriptor (varloc, mode, initialized);
16087 have_address = 1;
16088 }
16089
16090 if (!descr)
16091 return 0;
16092
16093 if (want_address == 2 && !have_address
16094 && (dwarf_version >= 4 || !dwarf_strict))
16095 {
16096 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16097 {
16098 expansion_failed (loc, NULL_RTX,
16099 "DWARF address size mismatch");
16100 return 0;
16101 }
16102 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16103 have_address = 1;
16104 }
16105 /* Show if we can't fill the request for an address. */
16106 if (want_address && !have_address)
16107 {
16108 expansion_failed (loc, NULL_RTX,
16109 "Want address and only have value");
16110 return 0;
16111 }
16112
16113 /* If we've got an address and don't want one, dereference. */
16114 if (!want_address && have_address)
16115 {
16116 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16117 enum dwarf_location_atom op;
16118
16119 if (size > DWARF2_ADDR_SIZE || size == -1)
16120 {
16121 expansion_failed (loc, NULL_RTX,
16122 "DWARF address size mismatch");
16123 return 0;
16124 }
16125 else if (size == DWARF2_ADDR_SIZE)
16126 op = DW_OP_deref;
16127 else
16128 op = DW_OP_deref_size;
16129
16130 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16131 }
16132
16133 return descr;
16134 }
16135
16136 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16137 if it is not possible. */
16138
16139 static dw_loc_descr_ref
16140 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16141 {
16142 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16143 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16144 else if (dwarf_version >= 3 || !dwarf_strict)
16145 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16146 else
16147 return NULL;
16148 }
16149
16150 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16151 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16152
16153 static dw_loc_descr_ref
16154 dw_sra_loc_expr (tree decl, rtx loc)
16155 {
16156 rtx p;
16157 unsigned HOST_WIDE_INT padsize = 0;
16158 dw_loc_descr_ref descr, *descr_tail;
16159 unsigned HOST_WIDE_INT decl_size;
16160 rtx varloc;
16161 enum var_init_status initialized;
16162
16163 if (DECL_SIZE (decl) == NULL
16164 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16165 return NULL;
16166
16167 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16168 descr = NULL;
16169 descr_tail = &descr;
16170
16171 for (p = loc; p; p = XEXP (p, 1))
16172 {
16173 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16174 rtx loc_note = *decl_piece_varloc_ptr (p);
16175 dw_loc_descr_ref cur_descr;
16176 dw_loc_descr_ref *tail, last = NULL;
16177 unsigned HOST_WIDE_INT opsize = 0;
16178
16179 if (loc_note == NULL_RTX
16180 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16181 {
16182 padsize += bitsize;
16183 continue;
16184 }
16185 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16186 varloc = NOTE_VAR_LOCATION (loc_note);
16187 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16188 if (cur_descr == NULL)
16189 {
16190 padsize += bitsize;
16191 continue;
16192 }
16193
16194 /* Check that cur_descr either doesn't use
16195 DW_OP_*piece operations, or their sum is equal
16196 to bitsize. Otherwise we can't embed it. */
16197 for (tail = &cur_descr; *tail != NULL;
16198 tail = &(*tail)->dw_loc_next)
16199 if ((*tail)->dw_loc_opc == DW_OP_piece)
16200 {
16201 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16202 * BITS_PER_UNIT;
16203 last = *tail;
16204 }
16205 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16206 {
16207 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16208 last = *tail;
16209 }
16210
16211 if (last != NULL && opsize != bitsize)
16212 {
16213 padsize += bitsize;
16214 /* Discard the current piece of the descriptor and release any
16215 addr_table entries it uses. */
16216 remove_loc_list_addr_table_entries (cur_descr);
16217 continue;
16218 }
16219
16220 /* If there is a hole, add DW_OP_*piece after empty DWARF
16221 expression, which means that those bits are optimized out. */
16222 if (padsize)
16223 {
16224 if (padsize > decl_size)
16225 {
16226 remove_loc_list_addr_table_entries (cur_descr);
16227 goto discard_descr;
16228 }
16229 decl_size -= padsize;
16230 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16231 if (*descr_tail == NULL)
16232 {
16233 remove_loc_list_addr_table_entries (cur_descr);
16234 goto discard_descr;
16235 }
16236 descr_tail = &(*descr_tail)->dw_loc_next;
16237 padsize = 0;
16238 }
16239 *descr_tail = cur_descr;
16240 descr_tail = tail;
16241 if (bitsize > decl_size)
16242 goto discard_descr;
16243 decl_size -= bitsize;
16244 if (last == NULL)
16245 {
16246 HOST_WIDE_INT offset = 0;
16247 if (GET_CODE (varloc) == VAR_LOCATION
16248 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16249 {
16250 varloc = PAT_VAR_LOCATION_LOC (varloc);
16251 if (GET_CODE (varloc) == EXPR_LIST)
16252 varloc = XEXP (varloc, 0);
16253 }
16254 do
16255 {
16256 if (GET_CODE (varloc) == CONST
16257 || GET_CODE (varloc) == SIGN_EXTEND
16258 || GET_CODE (varloc) == ZERO_EXTEND)
16259 varloc = XEXP (varloc, 0);
16260 else if (GET_CODE (varloc) == SUBREG)
16261 varloc = SUBREG_REG (varloc);
16262 else
16263 break;
16264 }
16265 while (1);
16266 /* DW_OP_bit_size offset should be zero for register
16267 or implicit location descriptions and empty location
16268 descriptions, but for memory addresses needs big endian
16269 adjustment. */
16270 if (MEM_P (varloc))
16271 {
16272 unsigned HOST_WIDE_INT memsize
16273 = MEM_SIZE (varloc) * BITS_PER_UNIT;
16274 if (memsize != bitsize)
16275 {
16276 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
16277 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
16278 goto discard_descr;
16279 if (memsize < bitsize)
16280 goto discard_descr;
16281 if (BITS_BIG_ENDIAN)
16282 offset = memsize - bitsize;
16283 }
16284 }
16285
16286 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
16287 if (*descr_tail == NULL)
16288 goto discard_descr;
16289 descr_tail = &(*descr_tail)->dw_loc_next;
16290 }
16291 }
16292
16293 /* If there were any non-empty expressions, add padding till the end of
16294 the decl. */
16295 if (descr != NULL && decl_size != 0)
16296 {
16297 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
16298 if (*descr_tail == NULL)
16299 goto discard_descr;
16300 }
16301 return descr;
16302
16303 discard_descr:
16304 /* Discard the descriptor and release any addr_table entries it uses. */
16305 remove_loc_list_addr_table_entries (descr);
16306 return NULL;
16307 }
16308
16309 /* Return the dwarf representation of the location list LOC_LIST of
16310 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
16311 function. */
16312
16313 static dw_loc_list_ref
16314 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
16315 {
16316 const char *endname, *secname;
16317 rtx varloc;
16318 enum var_init_status initialized;
16319 struct var_loc_node *node;
16320 dw_loc_descr_ref descr;
16321 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
16322 dw_loc_list_ref list = NULL;
16323 dw_loc_list_ref *listp = &list;
16324
16325 /* Now that we know what section we are using for a base,
16326 actually construct the list of locations.
16327 The first location information is what is passed to the
16328 function that creates the location list, and the remaining
16329 locations just get added on to that list.
16330 Note that we only know the start address for a location
16331 (IE location changes), so to build the range, we use
16332 the range [current location start, next location start].
16333 This means we have to special case the last node, and generate
16334 a range of [last location start, end of function label]. */
16335
16336 secname = secname_for_decl (decl);
16337
16338 for (node = loc_list->first; node; node = node->next)
16339 if (GET_CODE (node->loc) == EXPR_LIST
16340 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
16341 {
16342 if (GET_CODE (node->loc) == EXPR_LIST)
16343 {
16344 /* This requires DW_OP_{,bit_}piece, which is not usable
16345 inside DWARF expressions. */
16346 if (want_address != 2)
16347 continue;
16348 descr = dw_sra_loc_expr (decl, node->loc);
16349 if (descr == NULL)
16350 continue;
16351 }
16352 else
16353 {
16354 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
16355 varloc = NOTE_VAR_LOCATION (node->loc);
16356 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
16357 }
16358 if (descr)
16359 {
16360 bool range_across_switch = false;
16361 /* If section switch happens in between node->label
16362 and node->next->label (or end of function) and
16363 we can't emit it as a single entry list,
16364 emit two ranges, first one ending at the end
16365 of first partition and second one starting at the
16366 beginning of second partition. */
16367 if (node == loc_list->last_before_switch
16368 && (node != loc_list->first || loc_list->first->next)
16369 && current_function_decl)
16370 {
16371 endname = cfun->fde->dw_fde_end;
16372 range_across_switch = true;
16373 }
16374 /* The variable has a location between NODE->LABEL and
16375 NODE->NEXT->LABEL. */
16376 else if (node->next)
16377 endname = node->next->label;
16378 /* If the variable has a location at the last label
16379 it keeps its location until the end of function. */
16380 else if (!current_function_decl)
16381 endname = text_end_label;
16382 else
16383 {
16384 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
16385 current_function_funcdef_no);
16386 endname = ggc_strdup (label_id);
16387 }
16388
16389 *listp = new_loc_list (descr, node->label, endname, secname);
16390 if (TREE_CODE (decl) == PARM_DECL
16391 && node == loc_list->first
16392 && NOTE_P (node->loc)
16393 && strcmp (node->label, endname) == 0)
16394 (*listp)->force = true;
16395 listp = &(*listp)->dw_loc_next;
16396
16397 if (range_across_switch)
16398 {
16399 if (GET_CODE (node->loc) == EXPR_LIST)
16400 descr = dw_sra_loc_expr (decl, node->loc);
16401 else
16402 {
16403 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
16404 varloc = NOTE_VAR_LOCATION (node->loc);
16405 descr = dw_loc_list_1 (decl, varloc, want_address,
16406 initialized);
16407 }
16408 gcc_assert (descr);
16409 /* The variable has a location between NODE->LABEL and
16410 NODE->NEXT->LABEL. */
16411 if (node->next)
16412 endname = node->next->label;
16413 else
16414 endname = cfun->fde->dw_fde_second_end;
16415 *listp = new_loc_list (descr,
16416 cfun->fde->dw_fde_second_begin,
16417 endname, secname);
16418 listp = &(*listp)->dw_loc_next;
16419 }
16420 }
16421 }
16422
16423 /* Try to avoid the overhead of a location list emitting a location
16424 expression instead, but only if we didn't have more than one
16425 location entry in the first place. If some entries were not
16426 representable, we don't want to pretend a single entry that was
16427 applies to the entire scope in which the variable is
16428 available. */
16429 if (list && loc_list->first->next)
16430 gen_llsym (list);
16431
16432 return list;
16433 }
16434
16435 /* Return if the loc_list has only single element and thus can be represented
16436 as location description. */
16437
16438 static bool
16439 single_element_loc_list_p (dw_loc_list_ref list)
16440 {
16441 gcc_assert (!list->dw_loc_next || list->ll_symbol);
16442 return !list->ll_symbol;
16443 }
16444
16445 /* Duplicate a single element of location list. */
16446
16447 static inline dw_loc_descr_ref
16448 copy_loc_descr (dw_loc_descr_ref ref)
16449 {
16450 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
16451 memcpy (copy, ref, sizeof (dw_loc_descr_node));
16452 return copy;
16453 }
16454
16455 /* To each location in list LIST append loc descr REF. */
16456
16457 static void
16458 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
16459 {
16460 dw_loc_descr_ref copy;
16461 add_loc_descr (&list->expr, ref);
16462 list = list->dw_loc_next;
16463 while (list)
16464 {
16465 copy = copy_loc_descr (ref);
16466 add_loc_descr (&list->expr, copy);
16467 while (copy->dw_loc_next)
16468 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
16469 list = list->dw_loc_next;
16470 }
16471 }
16472
16473 /* To each location in list LIST prepend loc descr REF. */
16474
16475 static void
16476 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
16477 {
16478 dw_loc_descr_ref copy;
16479 dw_loc_descr_ref ref_end = list->expr;
16480 add_loc_descr (&ref, list->expr);
16481 list->expr = ref;
16482 list = list->dw_loc_next;
16483 while (list)
16484 {
16485 dw_loc_descr_ref end = list->expr;
16486 list->expr = copy = copy_loc_descr (ref);
16487 while (copy->dw_loc_next != ref_end)
16488 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
16489 copy->dw_loc_next = end;
16490 list = list->dw_loc_next;
16491 }
16492 }
16493
16494 /* Given two lists RET and LIST
16495 produce location list that is result of adding expression in LIST
16496 to expression in RET on each position in program.
16497 Might be destructive on both RET and LIST.
16498
16499 TODO: We handle only simple cases of RET or LIST having at most one
16500 element. General case would involve sorting the lists in program order
16501 and merging them that will need some additional work.
16502 Adding that will improve quality of debug info especially for SRA-ed
16503 structures. */
16504
16505 static void
16506 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
16507 {
16508 if (!list)
16509 return;
16510 if (!*ret)
16511 {
16512 *ret = list;
16513 return;
16514 }
16515 if (!list->dw_loc_next)
16516 {
16517 add_loc_descr_to_each (*ret, list->expr);
16518 return;
16519 }
16520 if (!(*ret)->dw_loc_next)
16521 {
16522 prepend_loc_descr_to_each (list, (*ret)->expr);
16523 *ret = list;
16524 return;
16525 }
16526 expansion_failed (NULL_TREE, NULL_RTX,
16527 "Don't know how to merge two non-trivial"
16528 " location lists.\n");
16529 *ret = NULL;
16530 return;
16531 }
16532
16533 /* LOC is constant expression. Try a luck, look it up in constant
16534 pool and return its loc_descr of its address. */
16535
16536 static dw_loc_descr_ref
16537 cst_pool_loc_descr (tree loc)
16538 {
16539 /* Get an RTL for this, if something has been emitted. */
16540 rtx rtl = lookup_constant_def (loc);
16541
16542 if (!rtl || !MEM_P (rtl))
16543 {
16544 gcc_assert (!rtl);
16545 return 0;
16546 }
16547 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
16548
16549 /* TODO: We might get more coverage if we was actually delaying expansion
16550 of all expressions till end of compilation when constant pools are fully
16551 populated. */
16552 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
16553 {
16554 expansion_failed (loc, NULL_RTX,
16555 "CST value in contant pool but not marked.");
16556 return 0;
16557 }
16558 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16559 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
16560 }
16561
16562 /* Return dw_loc_list representing address of addr_expr LOC
16563 by looking for inner INDIRECT_REF expression and turning
16564 it into simple arithmetics.
16565
16566 See loc_list_from_tree for the meaning of CONTEXT. */
16567
16568 static dw_loc_list_ref
16569 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
16570 loc_descr_context *context)
16571 {
16572 tree obj, offset;
16573 HOST_WIDE_INT bitsize, bitpos, bytepos;
16574 machine_mode mode;
16575 int unsignedp, reversep, volatilep = 0;
16576 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
16577
16578 obj = get_inner_reference (TREE_OPERAND (loc, 0),
16579 &bitsize, &bitpos, &offset, &mode,
16580 &unsignedp, &reversep, &volatilep);
16581 STRIP_NOPS (obj);
16582 if (bitpos % BITS_PER_UNIT)
16583 {
16584 expansion_failed (loc, NULL_RTX, "bitfield access");
16585 return 0;
16586 }
16587 if (!INDIRECT_REF_P (obj))
16588 {
16589 expansion_failed (obj,
16590 NULL_RTX, "no indirect ref in inner refrence");
16591 return 0;
16592 }
16593 if (!offset && !bitpos)
16594 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
16595 context);
16596 else if (toplev
16597 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
16598 && (dwarf_version >= 4 || !dwarf_strict))
16599 {
16600 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
16601 if (!list_ret)
16602 return 0;
16603 if (offset)
16604 {
16605 /* Variable offset. */
16606 list_ret1 = loc_list_from_tree (offset, 0, context);
16607 if (list_ret1 == 0)
16608 return 0;
16609 add_loc_list (&list_ret, list_ret1);
16610 if (!list_ret)
16611 return 0;
16612 add_loc_descr_to_each (list_ret,
16613 new_loc_descr (DW_OP_plus, 0, 0));
16614 }
16615 bytepos = bitpos / BITS_PER_UNIT;
16616 if (bytepos > 0)
16617 add_loc_descr_to_each (list_ret,
16618 new_loc_descr (DW_OP_plus_uconst,
16619 bytepos, 0));
16620 else if (bytepos < 0)
16621 loc_list_plus_const (list_ret, bytepos);
16622 add_loc_descr_to_each (list_ret,
16623 new_loc_descr (DW_OP_stack_value, 0, 0));
16624 }
16625 return list_ret;
16626 }
16627
16628 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
16629 all operations from LOC are nops, move to the last one. Insert in NOPS all
16630 operations that are skipped. */
16631
16632 static void
16633 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
16634 hash_set<dw_loc_descr_ref> &nops)
16635 {
16636 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
16637 {
16638 nops.add (loc);
16639 loc = loc->dw_loc_next;
16640 }
16641 }
16642
16643 /* Helper for loc_descr_without_nops: free the location description operation
16644 P. */
16645
16646 bool
16647 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
16648 {
16649 ggc_free (loc);
16650 return true;
16651 }
16652
16653 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
16654 finishes LOC. */
16655
16656 static void
16657 loc_descr_without_nops (dw_loc_descr_ref &loc)
16658 {
16659 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
16660 return;
16661
16662 /* Set of all DW_OP_nop operations we remove. */
16663 hash_set<dw_loc_descr_ref> nops;
16664
16665 /* First, strip all prefix NOP operations in order to keep the head of the
16666 operations list. */
16667 loc_descr_to_next_no_nop (loc, nops);
16668
16669 for (dw_loc_descr_ref cur = loc; cur != NULL;)
16670 {
16671 /* For control flow operations: strip "prefix" nops in destination
16672 labels. */
16673 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
16674 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
16675 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
16676 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
16677
16678 /* Do the same for the operations that follow, then move to the next
16679 iteration. */
16680 if (cur->dw_loc_next != NULL)
16681 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
16682 cur = cur->dw_loc_next;
16683 }
16684
16685 nops.traverse<void *, free_loc_descr> (NULL);
16686 }
16687
16688
16689 struct dwarf_procedure_info;
16690
16691 /* Helper structure for location descriptions generation. */
16692 struct loc_descr_context
16693 {
16694 /* The type that is implicitly referenced by DW_OP_push_object_address, or
16695 NULL_TREE if DW_OP_push_object_address in invalid for this location
16696 description. This is used when processing PLACEHOLDER_EXPR nodes. */
16697 tree context_type;
16698 /* The ..._DECL node that should be translated as a
16699 DW_OP_push_object_address operation. */
16700 tree base_decl;
16701 /* Information about the DWARF procedure we are currently generating. NULL if
16702 we are not generating a DWARF procedure. */
16703 struct dwarf_procedure_info *dpi;
16704 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
16705 by consumer. Used for DW_TAG_generic_subrange attributes. */
16706 bool placeholder_arg;
16707 /* True if PLACEHOLDER_EXPR has been seen. */
16708 bool placeholder_seen;
16709 };
16710
16711 /* DWARF procedures generation
16712
16713 DWARF expressions (aka. location descriptions) are used to encode variable
16714 things such as sizes or offsets. Such computations can have redundant parts
16715 that can be factorized in order to reduce the size of the output debug
16716 information. This is the whole point of DWARF procedures.
16717
16718 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
16719 already factorized into functions ("size functions") in order to handle very
16720 big and complex types. Such functions are quite simple: they have integral
16721 arguments, they return an integral result and their body contains only a
16722 return statement with arithmetic expressions. This is the only kind of
16723 function we are interested in translating into DWARF procedures, here.
16724
16725 DWARF expressions and DWARF procedure are executed using a stack, so we have
16726 to define some calling convention for them to interact. Let's say that:
16727
16728 - Before calling a DWARF procedure, DWARF expressions must push on the stack
16729 all arguments in reverse order (right-to-left) so that when the DWARF
16730 procedure execution starts, the first argument is the top of the stack.
16731
16732 - Then, when returning, the DWARF procedure must have consumed all arguments
16733 on the stack, must have pushed the result and touched nothing else.
16734
16735 - Each integral argument and the result are integral types can be hold in a
16736 single stack slot.
16737
16738 - We call "frame offset" the number of stack slots that are "under DWARF
16739 procedure control": it includes the arguments slots, the temporaries and
16740 the result slot. Thus, it is equal to the number of arguments when the
16741 procedure execution starts and must be equal to one (the result) when it
16742 returns. */
16743
16744 /* Helper structure used when generating operations for a DWARF procedure. */
16745 struct dwarf_procedure_info
16746 {
16747 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
16748 currently translated. */
16749 tree fndecl;
16750 /* The number of arguments FNDECL takes. */
16751 unsigned args_count;
16752 };
16753
16754 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
16755 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
16756 equate it to this DIE. */
16757
16758 static dw_die_ref
16759 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
16760 dw_die_ref parent_die)
16761 {
16762 dw_die_ref dwarf_proc_die;
16763
16764 if ((dwarf_version < 3 && dwarf_strict)
16765 || location == NULL)
16766 return NULL;
16767
16768 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
16769 if (fndecl)
16770 equate_decl_number_to_die (fndecl, dwarf_proc_die);
16771 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
16772 return dwarf_proc_die;
16773 }
16774
16775 /* Return whether TYPE is a supported type as a DWARF procedure argument
16776 type or return type (we handle only scalar types and pointer types that
16777 aren't wider than the DWARF expression evaluation stack. */
16778
16779 static bool
16780 is_handled_procedure_type (tree type)
16781 {
16782 return ((INTEGRAL_TYPE_P (type)
16783 || TREE_CODE (type) == OFFSET_TYPE
16784 || TREE_CODE (type) == POINTER_TYPE)
16785 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
16786 }
16787
16788 /* Helper for resolve_args_picking: do the same but stop when coming across
16789 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
16790 offset *before* evaluating the corresponding operation. */
16791
16792 static bool
16793 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
16794 struct dwarf_procedure_info *dpi,
16795 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
16796 {
16797 /* The "frame_offset" identifier is already used to name a macro... */
16798 unsigned frame_offset_ = initial_frame_offset;
16799 dw_loc_descr_ref l;
16800
16801 for (l = loc; l != NULL;)
16802 {
16803 bool existed;
16804 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
16805
16806 /* If we already met this node, there is nothing to compute anymore. */
16807 if (existed)
16808 {
16809 /* Make sure that the stack size is consistent wherever the execution
16810 flow comes from. */
16811 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
16812 break;
16813 }
16814 l_frame_offset = frame_offset_;
16815
16816 /* If needed, relocate the picking offset with respect to the frame
16817 offset. */
16818 if (l->frame_offset_rel)
16819 {
16820 unsigned HOST_WIDE_INT off;
16821 switch (l->dw_loc_opc)
16822 {
16823 case DW_OP_pick:
16824 off = l->dw_loc_oprnd1.v.val_unsigned;
16825 break;
16826 case DW_OP_dup:
16827 off = 0;
16828 break;
16829 case DW_OP_over:
16830 off = 1;
16831 break;
16832 default:
16833 gcc_unreachable ();
16834 }
16835 /* frame_offset_ is the size of the current stack frame, including
16836 incoming arguments. Besides, the arguments are pushed
16837 right-to-left. Thus, in order to access the Nth argument from
16838 this operation node, the picking has to skip temporaries *plus*
16839 one stack slot per argument (0 for the first one, 1 for the second
16840 one, etc.).
16841
16842 The targetted argument number (N) is already set as the operand,
16843 and the number of temporaries can be computed with:
16844 frame_offsets_ - dpi->args_count */
16845 off += frame_offset_ - dpi->args_count;
16846
16847 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
16848 if (off > 255)
16849 return false;
16850
16851 if (off == 0)
16852 {
16853 l->dw_loc_opc = DW_OP_dup;
16854 l->dw_loc_oprnd1.v.val_unsigned = 0;
16855 }
16856 else if (off == 1)
16857 {
16858 l->dw_loc_opc = DW_OP_over;
16859 l->dw_loc_oprnd1.v.val_unsigned = 0;
16860 }
16861 else
16862 {
16863 l->dw_loc_opc = DW_OP_pick;
16864 l->dw_loc_oprnd1.v.val_unsigned = off;
16865 }
16866 }
16867
16868 /* Update frame_offset according to the effect the current operation has
16869 on the stack. */
16870 switch (l->dw_loc_opc)
16871 {
16872 case DW_OP_deref:
16873 case DW_OP_swap:
16874 case DW_OP_rot:
16875 case DW_OP_abs:
16876 case DW_OP_neg:
16877 case DW_OP_not:
16878 case DW_OP_plus_uconst:
16879 case DW_OP_skip:
16880 case DW_OP_reg0:
16881 case DW_OP_reg1:
16882 case DW_OP_reg2:
16883 case DW_OP_reg3:
16884 case DW_OP_reg4:
16885 case DW_OP_reg5:
16886 case DW_OP_reg6:
16887 case DW_OP_reg7:
16888 case DW_OP_reg8:
16889 case DW_OP_reg9:
16890 case DW_OP_reg10:
16891 case DW_OP_reg11:
16892 case DW_OP_reg12:
16893 case DW_OP_reg13:
16894 case DW_OP_reg14:
16895 case DW_OP_reg15:
16896 case DW_OP_reg16:
16897 case DW_OP_reg17:
16898 case DW_OP_reg18:
16899 case DW_OP_reg19:
16900 case DW_OP_reg20:
16901 case DW_OP_reg21:
16902 case DW_OP_reg22:
16903 case DW_OP_reg23:
16904 case DW_OP_reg24:
16905 case DW_OP_reg25:
16906 case DW_OP_reg26:
16907 case DW_OP_reg27:
16908 case DW_OP_reg28:
16909 case DW_OP_reg29:
16910 case DW_OP_reg30:
16911 case DW_OP_reg31:
16912 case DW_OP_bregx:
16913 case DW_OP_piece:
16914 case DW_OP_deref_size:
16915 case DW_OP_nop:
16916 case DW_OP_bit_piece:
16917 case DW_OP_implicit_value:
16918 case DW_OP_stack_value:
16919 break;
16920
16921 case DW_OP_addr:
16922 case DW_OP_const1u:
16923 case DW_OP_const1s:
16924 case DW_OP_const2u:
16925 case DW_OP_const2s:
16926 case DW_OP_const4u:
16927 case DW_OP_const4s:
16928 case DW_OP_const8u:
16929 case DW_OP_const8s:
16930 case DW_OP_constu:
16931 case DW_OP_consts:
16932 case DW_OP_dup:
16933 case DW_OP_over:
16934 case DW_OP_pick:
16935 case DW_OP_lit0:
16936 case DW_OP_lit1:
16937 case DW_OP_lit2:
16938 case DW_OP_lit3:
16939 case DW_OP_lit4:
16940 case DW_OP_lit5:
16941 case DW_OP_lit6:
16942 case DW_OP_lit7:
16943 case DW_OP_lit8:
16944 case DW_OP_lit9:
16945 case DW_OP_lit10:
16946 case DW_OP_lit11:
16947 case DW_OP_lit12:
16948 case DW_OP_lit13:
16949 case DW_OP_lit14:
16950 case DW_OP_lit15:
16951 case DW_OP_lit16:
16952 case DW_OP_lit17:
16953 case DW_OP_lit18:
16954 case DW_OP_lit19:
16955 case DW_OP_lit20:
16956 case DW_OP_lit21:
16957 case DW_OP_lit22:
16958 case DW_OP_lit23:
16959 case DW_OP_lit24:
16960 case DW_OP_lit25:
16961 case DW_OP_lit26:
16962 case DW_OP_lit27:
16963 case DW_OP_lit28:
16964 case DW_OP_lit29:
16965 case DW_OP_lit30:
16966 case DW_OP_lit31:
16967 case DW_OP_breg0:
16968 case DW_OP_breg1:
16969 case DW_OP_breg2:
16970 case DW_OP_breg3:
16971 case DW_OP_breg4:
16972 case DW_OP_breg5:
16973 case DW_OP_breg6:
16974 case DW_OP_breg7:
16975 case DW_OP_breg8:
16976 case DW_OP_breg9:
16977 case DW_OP_breg10:
16978 case DW_OP_breg11:
16979 case DW_OP_breg12:
16980 case DW_OP_breg13:
16981 case DW_OP_breg14:
16982 case DW_OP_breg15:
16983 case DW_OP_breg16:
16984 case DW_OP_breg17:
16985 case DW_OP_breg18:
16986 case DW_OP_breg19:
16987 case DW_OP_breg20:
16988 case DW_OP_breg21:
16989 case DW_OP_breg22:
16990 case DW_OP_breg23:
16991 case DW_OP_breg24:
16992 case DW_OP_breg25:
16993 case DW_OP_breg26:
16994 case DW_OP_breg27:
16995 case DW_OP_breg28:
16996 case DW_OP_breg29:
16997 case DW_OP_breg30:
16998 case DW_OP_breg31:
16999 case DW_OP_fbreg:
17000 case DW_OP_push_object_address:
17001 case DW_OP_call_frame_cfa:
17002 case DW_OP_GNU_variable_value:
17003 ++frame_offset_;
17004 break;
17005
17006 case DW_OP_drop:
17007 case DW_OP_xderef:
17008 case DW_OP_and:
17009 case DW_OP_div:
17010 case DW_OP_minus:
17011 case DW_OP_mod:
17012 case DW_OP_mul:
17013 case DW_OP_or:
17014 case DW_OP_plus:
17015 case DW_OP_shl:
17016 case DW_OP_shr:
17017 case DW_OP_shra:
17018 case DW_OP_xor:
17019 case DW_OP_bra:
17020 case DW_OP_eq:
17021 case DW_OP_ge:
17022 case DW_OP_gt:
17023 case DW_OP_le:
17024 case DW_OP_lt:
17025 case DW_OP_ne:
17026 case DW_OP_regx:
17027 case DW_OP_xderef_size:
17028 --frame_offset_;
17029 break;
17030
17031 case DW_OP_call2:
17032 case DW_OP_call4:
17033 case DW_OP_call_ref:
17034 {
17035 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17036 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17037
17038 if (stack_usage == NULL)
17039 return false;
17040 frame_offset_ += *stack_usage;
17041 break;
17042 }
17043
17044 case DW_OP_implicit_pointer:
17045 case DW_OP_entry_value:
17046 case DW_OP_const_type:
17047 case DW_OP_regval_type:
17048 case DW_OP_deref_type:
17049 case DW_OP_convert:
17050 case DW_OP_reinterpret:
17051 case DW_OP_form_tls_address:
17052 case DW_OP_GNU_push_tls_address:
17053 case DW_OP_GNU_uninit:
17054 case DW_OP_GNU_encoded_addr:
17055 case DW_OP_GNU_implicit_pointer:
17056 case DW_OP_GNU_entry_value:
17057 case DW_OP_GNU_const_type:
17058 case DW_OP_GNU_regval_type:
17059 case DW_OP_GNU_deref_type:
17060 case DW_OP_GNU_convert:
17061 case DW_OP_GNU_reinterpret:
17062 case DW_OP_GNU_parameter_ref:
17063 /* loc_list_from_tree will probably not output these operations for
17064 size functions, so assume they will not appear here. */
17065 /* Fall through... */
17066
17067 default:
17068 gcc_unreachable ();
17069 }
17070
17071 /* Now, follow the control flow (except subroutine calls). */
17072 switch (l->dw_loc_opc)
17073 {
17074 case DW_OP_bra:
17075 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17076 frame_offsets))
17077 return false;
17078 /* Fall through. */
17079
17080 case DW_OP_skip:
17081 l = l->dw_loc_oprnd1.v.val_loc;
17082 break;
17083
17084 case DW_OP_stack_value:
17085 return true;
17086
17087 default:
17088 l = l->dw_loc_next;
17089 break;
17090 }
17091 }
17092
17093 return true;
17094 }
17095
17096 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17097 operations) in order to resolve the operand of DW_OP_pick operations that
17098 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17099 offset *before* LOC is executed. Return if all relocations were
17100 successful. */
17101
17102 static bool
17103 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17104 struct dwarf_procedure_info *dpi)
17105 {
17106 /* Associate to all visited operations the frame offset *before* evaluating
17107 this operation. */
17108 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17109
17110 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17111 frame_offsets);
17112 }
17113
17114 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17115 Return NULL if it is not possible. */
17116
17117 static dw_die_ref
17118 function_to_dwarf_procedure (tree fndecl)
17119 {
17120 struct loc_descr_context ctx;
17121 struct dwarf_procedure_info dpi;
17122 dw_die_ref dwarf_proc_die;
17123 tree tree_body = DECL_SAVED_TREE (fndecl);
17124 dw_loc_descr_ref loc_body, epilogue;
17125
17126 tree cursor;
17127 unsigned i;
17128
17129 /* Do not generate multiple DWARF procedures for the same function
17130 declaration. */
17131 dwarf_proc_die = lookup_decl_die (fndecl);
17132 if (dwarf_proc_die != NULL)
17133 return dwarf_proc_die;
17134
17135 /* DWARF procedures are available starting with the DWARFv3 standard. */
17136 if (dwarf_version < 3 && dwarf_strict)
17137 return NULL;
17138
17139 /* We handle only functions for which we still have a body, that return a
17140 supported type and that takes arguments with supported types. Note that
17141 there is no point translating functions that return nothing. */
17142 if (tree_body == NULL_TREE
17143 || DECL_RESULT (fndecl) == NULL_TREE
17144 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17145 return NULL;
17146
17147 for (cursor = DECL_ARGUMENTS (fndecl);
17148 cursor != NULL_TREE;
17149 cursor = TREE_CHAIN (cursor))
17150 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17151 return NULL;
17152
17153 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17154 if (TREE_CODE (tree_body) != RETURN_EXPR)
17155 return NULL;
17156 tree_body = TREE_OPERAND (tree_body, 0);
17157 if (TREE_CODE (tree_body) != MODIFY_EXPR
17158 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17159 return NULL;
17160 tree_body = TREE_OPERAND (tree_body, 1);
17161
17162 /* Try to translate the body expression itself. Note that this will probably
17163 cause an infinite recursion if its call graph has a cycle. This is very
17164 unlikely for size functions, however, so don't bother with such things at
17165 the moment. */
17166 ctx.context_type = NULL_TREE;
17167 ctx.base_decl = NULL_TREE;
17168 ctx.dpi = &dpi;
17169 ctx.placeholder_arg = false;
17170 ctx.placeholder_seen = false;
17171 dpi.fndecl = fndecl;
17172 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17173 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17174 if (!loc_body)
17175 return NULL;
17176
17177 /* After evaluating all operands in "loc_body", we should still have on the
17178 stack all arguments plus the desired function result (top of the stack).
17179 Generate code in order to keep only the result in our stack frame. */
17180 epilogue = NULL;
17181 for (i = 0; i < dpi.args_count; ++i)
17182 {
17183 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17184 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17185 op_couple->dw_loc_next->dw_loc_next = epilogue;
17186 epilogue = op_couple;
17187 }
17188 add_loc_descr (&loc_body, epilogue);
17189 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17190 return NULL;
17191
17192 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17193 because they are considered useful. Now there is an epilogue, they are
17194 not anymore, so give it another try. */
17195 loc_descr_without_nops (loc_body);
17196
17197 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17198 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17199 though, given that size functions do not come from source, so they should
17200 not have a dedicated DW_TAG_subprogram DIE. */
17201 dwarf_proc_die
17202 = new_dwarf_proc_die (loc_body, fndecl,
17203 get_context_die (DECL_CONTEXT (fndecl)));
17204
17205 /* The called DWARF procedure consumes one stack slot per argument and
17206 returns one stack slot. */
17207 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
17208
17209 return dwarf_proc_die;
17210 }
17211
17212
17213 /* Generate Dwarf location list representing LOC.
17214 If WANT_ADDRESS is false, expression computing LOC will be computed
17215 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
17216 if WANT_ADDRESS is 2, expression computing address useable in location
17217 will be returned (i.e. DW_OP_reg can be used
17218 to refer to register values).
17219
17220 CONTEXT provides information to customize the location descriptions
17221 generation. Its context_type field specifies what type is implicitly
17222 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
17223 will not be generated.
17224
17225 Its DPI field determines whether we are generating a DWARF expression for a
17226 DWARF procedure, so PARM_DECL references are processed specifically.
17227
17228 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
17229 and dpi fields were null. */
17230
17231 static dw_loc_list_ref
17232 loc_list_from_tree_1 (tree loc, int want_address,
17233 struct loc_descr_context *context)
17234 {
17235 dw_loc_descr_ref ret = NULL, ret1 = NULL;
17236 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17237 int have_address = 0;
17238 enum dwarf_location_atom op;
17239
17240 /* ??? Most of the time we do not take proper care for sign/zero
17241 extending the values properly. Hopefully this won't be a real
17242 problem... */
17243
17244 if (context != NULL
17245 && context->base_decl == loc
17246 && want_address == 0)
17247 {
17248 if (dwarf_version >= 3 || !dwarf_strict)
17249 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
17250 NULL, NULL, NULL);
17251 else
17252 return NULL;
17253 }
17254
17255 switch (TREE_CODE (loc))
17256 {
17257 case ERROR_MARK:
17258 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
17259 return 0;
17260
17261 case PLACEHOLDER_EXPR:
17262 /* This case involves extracting fields from an object to determine the
17263 position of other fields. It is supposed to appear only as the first
17264 operand of COMPONENT_REF nodes and to reference precisely the type
17265 that the context allows. */
17266 if (context != NULL
17267 && TREE_TYPE (loc) == context->context_type
17268 && want_address >= 1)
17269 {
17270 if (dwarf_version >= 3 || !dwarf_strict)
17271 {
17272 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
17273 have_address = 1;
17274 break;
17275 }
17276 else
17277 return NULL;
17278 }
17279 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
17280 the single argument passed by consumer. */
17281 else if (context != NULL
17282 && context->placeholder_arg
17283 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
17284 && want_address == 0)
17285 {
17286 ret = new_loc_descr (DW_OP_pick, 0, 0);
17287 ret->frame_offset_rel = 1;
17288 context->placeholder_seen = true;
17289 break;
17290 }
17291 else
17292 expansion_failed (loc, NULL_RTX,
17293 "PLACEHOLDER_EXPR for an unexpected type");
17294 break;
17295
17296 case CALL_EXPR:
17297 {
17298 const int nargs = call_expr_nargs (loc);
17299 tree callee = get_callee_fndecl (loc);
17300 int i;
17301 dw_die_ref dwarf_proc;
17302
17303 if (callee == NULL_TREE)
17304 goto call_expansion_failed;
17305
17306 /* We handle only functions that return an integer. */
17307 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
17308 goto call_expansion_failed;
17309
17310 dwarf_proc = function_to_dwarf_procedure (callee);
17311 if (dwarf_proc == NULL)
17312 goto call_expansion_failed;
17313
17314 /* Evaluate arguments right-to-left so that the first argument will
17315 be the top-most one on the stack. */
17316 for (i = nargs - 1; i >= 0; --i)
17317 {
17318 dw_loc_descr_ref loc_descr
17319 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
17320 context);
17321
17322 if (loc_descr == NULL)
17323 goto call_expansion_failed;
17324
17325 add_loc_descr (&ret, loc_descr);
17326 }
17327
17328 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
17329 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
17330 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
17331 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
17332 add_loc_descr (&ret, ret1);
17333 break;
17334
17335 call_expansion_failed:
17336 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
17337 /* There are no opcodes for these operations. */
17338 return 0;
17339 }
17340
17341 case PREINCREMENT_EXPR:
17342 case PREDECREMENT_EXPR:
17343 case POSTINCREMENT_EXPR:
17344 case POSTDECREMENT_EXPR:
17345 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
17346 /* There are no opcodes for these operations. */
17347 return 0;
17348
17349 case ADDR_EXPR:
17350 /* If we already want an address, see if there is INDIRECT_REF inside
17351 e.g. for &this->field. */
17352 if (want_address)
17353 {
17354 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
17355 (loc, want_address == 2, context);
17356 if (list_ret)
17357 have_address = 1;
17358 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
17359 && (ret = cst_pool_loc_descr (loc)))
17360 have_address = 1;
17361 }
17362 /* Otherwise, process the argument and look for the address. */
17363 if (!list_ret && !ret)
17364 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
17365 else
17366 {
17367 if (want_address)
17368 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
17369 return NULL;
17370 }
17371 break;
17372
17373 case VAR_DECL:
17374 if (DECL_THREAD_LOCAL_P (loc))
17375 {
17376 rtx rtl;
17377 enum dwarf_location_atom tls_op;
17378 enum dtprel_bool dtprel = dtprel_false;
17379
17380 if (targetm.have_tls)
17381 {
17382 /* If this is not defined, we have no way to emit the
17383 data. */
17384 if (!targetm.asm_out.output_dwarf_dtprel)
17385 return 0;
17386
17387 /* The way DW_OP_GNU_push_tls_address is specified, we
17388 can only look up addresses of objects in the current
17389 module. We used DW_OP_addr as first op, but that's
17390 wrong, because DW_OP_addr is relocated by the debug
17391 info consumer, while DW_OP_GNU_push_tls_address
17392 operand shouldn't be. */
17393 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
17394 return 0;
17395 dtprel = dtprel_true;
17396 /* We check for DWARF 5 here because gdb did not implement
17397 DW_OP_form_tls_address until after 7.12. */
17398 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
17399 : DW_OP_GNU_push_tls_address);
17400 }
17401 else
17402 {
17403 if (!targetm.emutls.debug_form_tls_address
17404 || !(dwarf_version >= 3 || !dwarf_strict))
17405 return 0;
17406 /* We stuffed the control variable into the DECL_VALUE_EXPR
17407 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
17408 no longer appear in gimple code. We used the control
17409 variable in specific so that we could pick it up here. */
17410 loc = DECL_VALUE_EXPR (loc);
17411 tls_op = DW_OP_form_tls_address;
17412 }
17413
17414 rtl = rtl_for_decl_location (loc);
17415 if (rtl == NULL_RTX)
17416 return 0;
17417
17418 if (!MEM_P (rtl))
17419 return 0;
17420 rtl = XEXP (rtl, 0);
17421 if (! CONSTANT_P (rtl))
17422 return 0;
17423
17424 ret = new_addr_loc_descr (rtl, dtprel);
17425 ret1 = new_loc_descr (tls_op, 0, 0);
17426 add_loc_descr (&ret, ret1);
17427
17428 have_address = 1;
17429 break;
17430 }
17431 /* FALLTHRU */
17432
17433 case PARM_DECL:
17434 if (context != NULL && context->dpi != NULL
17435 && DECL_CONTEXT (loc) == context->dpi->fndecl)
17436 {
17437 /* We are generating code for a DWARF procedure and we want to access
17438 one of its arguments: find the appropriate argument offset and let
17439 the resolve_args_picking pass compute the offset that complies
17440 with the stack frame size. */
17441 unsigned i = 0;
17442 tree cursor;
17443
17444 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
17445 cursor != NULL_TREE && cursor != loc;
17446 cursor = TREE_CHAIN (cursor), ++i)
17447 ;
17448 /* If we are translating a DWARF procedure, all referenced parameters
17449 must belong to the current function. */
17450 gcc_assert (cursor != NULL_TREE);
17451
17452 ret = new_loc_descr (DW_OP_pick, i, 0);
17453 ret->frame_offset_rel = 1;
17454 break;
17455 }
17456 /* FALLTHRU */
17457
17458 case RESULT_DECL:
17459 if (DECL_HAS_VALUE_EXPR_P (loc))
17460 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
17461 want_address, context);
17462 /* FALLTHRU */
17463
17464 case FUNCTION_DECL:
17465 {
17466 rtx rtl;
17467 var_loc_list *loc_list = lookup_decl_loc (loc);
17468
17469 if (loc_list && loc_list->first)
17470 {
17471 list_ret = dw_loc_list (loc_list, loc, want_address);
17472 have_address = want_address != 0;
17473 break;
17474 }
17475 rtl = rtl_for_decl_location (loc);
17476 if (rtl == NULL_RTX)
17477 {
17478 if (TREE_CODE (loc) != FUNCTION_DECL
17479 && early_dwarf
17480 && current_function_decl
17481 && want_address != 1
17482 && ! DECL_IGNORED_P (loc)
17483 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
17484 || POINTER_TYPE_P (TREE_TYPE (loc)))
17485 && DECL_CONTEXT (loc) == current_function_decl
17486 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
17487 <= DWARF2_ADDR_SIZE))
17488 {
17489 dw_die_ref ref = lookup_decl_die (loc);
17490 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
17491 if (ref)
17492 {
17493 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
17494 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
17495 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
17496 }
17497 else
17498 {
17499 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
17500 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
17501 }
17502 break;
17503 }
17504 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
17505 return 0;
17506 }
17507 else if (CONST_INT_P (rtl))
17508 {
17509 HOST_WIDE_INT val = INTVAL (rtl);
17510 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
17511 val &= GET_MODE_MASK (DECL_MODE (loc));
17512 ret = int_loc_descriptor (val);
17513 }
17514 else if (GET_CODE (rtl) == CONST_STRING)
17515 {
17516 expansion_failed (loc, NULL_RTX, "CONST_STRING");
17517 return 0;
17518 }
17519 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
17520 ret = new_addr_loc_descr (rtl, dtprel_false);
17521 else
17522 {
17523 machine_mode mode, mem_mode;
17524
17525 /* Certain constructs can only be represented at top-level. */
17526 if (want_address == 2)
17527 {
17528 ret = loc_descriptor (rtl, VOIDmode,
17529 VAR_INIT_STATUS_INITIALIZED);
17530 have_address = 1;
17531 }
17532 else
17533 {
17534 mode = GET_MODE (rtl);
17535 mem_mode = VOIDmode;
17536 if (MEM_P (rtl))
17537 {
17538 mem_mode = mode;
17539 mode = get_address_mode (rtl);
17540 rtl = XEXP (rtl, 0);
17541 have_address = 1;
17542 }
17543 ret = mem_loc_descriptor (rtl, mode, mem_mode,
17544 VAR_INIT_STATUS_INITIALIZED);
17545 }
17546 if (!ret)
17547 expansion_failed (loc, rtl,
17548 "failed to produce loc descriptor for rtl");
17549 }
17550 }
17551 break;
17552
17553 case MEM_REF:
17554 if (!integer_zerop (TREE_OPERAND (loc, 1)))
17555 {
17556 have_address = 1;
17557 goto do_plus;
17558 }
17559 /* Fallthru. */
17560 case INDIRECT_REF:
17561 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17562 have_address = 1;
17563 break;
17564
17565 case TARGET_MEM_REF:
17566 case SSA_NAME:
17567 case DEBUG_EXPR_DECL:
17568 return NULL;
17569
17570 case COMPOUND_EXPR:
17571 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
17572 context);
17573
17574 CASE_CONVERT:
17575 case VIEW_CONVERT_EXPR:
17576 case SAVE_EXPR:
17577 case MODIFY_EXPR:
17578 case NON_LVALUE_EXPR:
17579 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
17580 context);
17581
17582 case COMPONENT_REF:
17583 case BIT_FIELD_REF:
17584 case ARRAY_REF:
17585 case ARRAY_RANGE_REF:
17586 case REALPART_EXPR:
17587 case IMAGPART_EXPR:
17588 {
17589 tree obj, offset;
17590 HOST_WIDE_INT bitsize, bitpos, bytepos;
17591 machine_mode mode;
17592 int unsignedp, reversep, volatilep = 0;
17593
17594 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
17595 &unsignedp, &reversep, &volatilep);
17596
17597 gcc_assert (obj != loc);
17598
17599 list_ret = loc_list_from_tree_1 (obj,
17600 want_address == 2
17601 && !bitpos && !offset ? 2 : 1,
17602 context);
17603 /* TODO: We can extract value of the small expression via shifting even
17604 for nonzero bitpos. */
17605 if (list_ret == 0)
17606 return 0;
17607 if (bitpos % BITS_PER_UNIT != 0 || bitsize % BITS_PER_UNIT != 0)
17608 {
17609 expansion_failed (loc, NULL_RTX,
17610 "bitfield access");
17611 return 0;
17612 }
17613
17614 if (offset != NULL_TREE)
17615 {
17616 /* Variable offset. */
17617 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
17618 if (list_ret1 == 0)
17619 return 0;
17620 add_loc_list (&list_ret, list_ret1);
17621 if (!list_ret)
17622 return 0;
17623 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
17624 }
17625
17626 bytepos = bitpos / BITS_PER_UNIT;
17627 if (bytepos > 0)
17628 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst, bytepos, 0));
17629 else if (bytepos < 0)
17630 loc_list_plus_const (list_ret, bytepos);
17631
17632 have_address = 1;
17633 break;
17634 }
17635
17636 case INTEGER_CST:
17637 if ((want_address || !tree_fits_shwi_p (loc))
17638 && (ret = cst_pool_loc_descr (loc)))
17639 have_address = 1;
17640 else if (want_address == 2
17641 && tree_fits_shwi_p (loc)
17642 && (ret = address_of_int_loc_descriptor
17643 (int_size_in_bytes (TREE_TYPE (loc)),
17644 tree_to_shwi (loc))))
17645 have_address = 1;
17646 else if (tree_fits_shwi_p (loc))
17647 ret = int_loc_descriptor (tree_to_shwi (loc));
17648 else if (tree_fits_uhwi_p (loc))
17649 ret = uint_loc_descriptor (tree_to_uhwi (loc));
17650 else
17651 {
17652 expansion_failed (loc, NULL_RTX,
17653 "Integer operand is not host integer");
17654 return 0;
17655 }
17656 break;
17657
17658 case CONSTRUCTOR:
17659 case REAL_CST:
17660 case STRING_CST:
17661 case COMPLEX_CST:
17662 if ((ret = cst_pool_loc_descr (loc)))
17663 have_address = 1;
17664 else if (TREE_CODE (loc) == CONSTRUCTOR)
17665 {
17666 tree type = TREE_TYPE (loc);
17667 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
17668 unsigned HOST_WIDE_INT offset = 0;
17669 unsigned HOST_WIDE_INT cnt;
17670 constructor_elt *ce;
17671
17672 if (TREE_CODE (type) == RECORD_TYPE)
17673 {
17674 /* This is very limited, but it's enough to output
17675 pointers to member functions, as long as the
17676 referenced function is defined in the current
17677 translation unit. */
17678 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
17679 {
17680 tree val = ce->value;
17681
17682 tree field = ce->index;
17683
17684 if (val)
17685 STRIP_NOPS (val);
17686
17687 if (!field || DECL_BIT_FIELD (field))
17688 {
17689 expansion_failed (loc, NULL_RTX,
17690 "bitfield in record type constructor");
17691 size = offset = (unsigned HOST_WIDE_INT)-1;
17692 ret = NULL;
17693 break;
17694 }
17695
17696 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
17697 unsigned HOST_WIDE_INT pos = int_byte_position (field);
17698 gcc_assert (pos + fieldsize <= size);
17699 if (pos < offset)
17700 {
17701 expansion_failed (loc, NULL_RTX,
17702 "out-of-order fields in record constructor");
17703 size = offset = (unsigned HOST_WIDE_INT)-1;
17704 ret = NULL;
17705 break;
17706 }
17707 if (pos > offset)
17708 {
17709 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
17710 add_loc_descr (&ret, ret1);
17711 offset = pos;
17712 }
17713 if (val && fieldsize != 0)
17714 {
17715 ret1 = loc_descriptor_from_tree (val, want_address, context);
17716 if (!ret1)
17717 {
17718 expansion_failed (loc, NULL_RTX,
17719 "unsupported expression in field");
17720 size = offset = (unsigned HOST_WIDE_INT)-1;
17721 ret = NULL;
17722 break;
17723 }
17724 add_loc_descr (&ret, ret1);
17725 }
17726 if (fieldsize)
17727 {
17728 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
17729 add_loc_descr (&ret, ret1);
17730 offset = pos + fieldsize;
17731 }
17732 }
17733
17734 if (offset != size)
17735 {
17736 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
17737 add_loc_descr (&ret, ret1);
17738 offset = size;
17739 }
17740
17741 have_address = !!want_address;
17742 }
17743 else
17744 expansion_failed (loc, NULL_RTX,
17745 "constructor of non-record type");
17746 }
17747 else
17748 /* We can construct small constants here using int_loc_descriptor. */
17749 expansion_failed (loc, NULL_RTX,
17750 "constructor or constant not in constant pool");
17751 break;
17752
17753 case TRUTH_AND_EXPR:
17754 case TRUTH_ANDIF_EXPR:
17755 case BIT_AND_EXPR:
17756 op = DW_OP_and;
17757 goto do_binop;
17758
17759 case TRUTH_XOR_EXPR:
17760 case BIT_XOR_EXPR:
17761 op = DW_OP_xor;
17762 goto do_binop;
17763
17764 case TRUTH_OR_EXPR:
17765 case TRUTH_ORIF_EXPR:
17766 case BIT_IOR_EXPR:
17767 op = DW_OP_or;
17768 goto do_binop;
17769
17770 case FLOOR_DIV_EXPR:
17771 case CEIL_DIV_EXPR:
17772 case ROUND_DIV_EXPR:
17773 case TRUNC_DIV_EXPR:
17774 case EXACT_DIV_EXPR:
17775 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
17776 return 0;
17777 op = DW_OP_div;
17778 goto do_binop;
17779
17780 case MINUS_EXPR:
17781 op = DW_OP_minus;
17782 goto do_binop;
17783
17784 case FLOOR_MOD_EXPR:
17785 case CEIL_MOD_EXPR:
17786 case ROUND_MOD_EXPR:
17787 case TRUNC_MOD_EXPR:
17788 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
17789 {
17790 op = DW_OP_mod;
17791 goto do_binop;
17792 }
17793 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17794 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
17795 if (list_ret == 0 || list_ret1 == 0)
17796 return 0;
17797
17798 add_loc_list (&list_ret, list_ret1);
17799 if (list_ret == 0)
17800 return 0;
17801 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
17802 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
17803 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
17804 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
17805 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
17806 break;
17807
17808 case MULT_EXPR:
17809 op = DW_OP_mul;
17810 goto do_binop;
17811
17812 case LSHIFT_EXPR:
17813 op = DW_OP_shl;
17814 goto do_binop;
17815
17816 case RSHIFT_EXPR:
17817 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
17818 goto do_binop;
17819
17820 case POINTER_PLUS_EXPR:
17821 case PLUS_EXPR:
17822 do_plus:
17823 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
17824 {
17825 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
17826 smarter to encode their opposite. The DW_OP_plus_uconst operation
17827 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
17828 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
17829 bytes, Y being the size of the operation that pushes the opposite
17830 of the addend. So let's choose the smallest representation. */
17831 const tree tree_addend = TREE_OPERAND (loc, 1);
17832 offset_int wi_addend;
17833 HOST_WIDE_INT shwi_addend;
17834 dw_loc_descr_ref loc_naddend;
17835
17836 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17837 if (list_ret == 0)
17838 return 0;
17839
17840 /* Try to get the literal to push. It is the opposite of the addend,
17841 so as we rely on wrapping during DWARF evaluation, first decode
17842 the literal as a "DWARF-sized" signed number. */
17843 wi_addend = wi::to_offset (tree_addend);
17844 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
17845 shwi_addend = wi_addend.to_shwi ();
17846 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
17847 ? int_loc_descriptor (-shwi_addend)
17848 : NULL;
17849
17850 if (loc_naddend != NULL
17851 && ((unsigned) size_of_uleb128 (shwi_addend)
17852 > size_of_loc_descr (loc_naddend)))
17853 {
17854 add_loc_descr_to_each (list_ret, loc_naddend);
17855 add_loc_descr_to_each (list_ret,
17856 new_loc_descr (DW_OP_minus, 0, 0));
17857 }
17858 else
17859 {
17860 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
17861 {
17862 loc_naddend = loc_cur;
17863 loc_cur = loc_cur->dw_loc_next;
17864 ggc_free (loc_naddend);
17865 }
17866 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
17867 }
17868 break;
17869 }
17870
17871 op = DW_OP_plus;
17872 goto do_binop;
17873
17874 case LE_EXPR:
17875 op = DW_OP_le;
17876 goto do_comp_binop;
17877
17878 case GE_EXPR:
17879 op = DW_OP_ge;
17880 goto do_comp_binop;
17881
17882 case LT_EXPR:
17883 op = DW_OP_lt;
17884 goto do_comp_binop;
17885
17886 case GT_EXPR:
17887 op = DW_OP_gt;
17888 goto do_comp_binop;
17889
17890 do_comp_binop:
17891 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
17892 {
17893 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
17894 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
17895 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
17896 TREE_CODE (loc));
17897 break;
17898 }
17899 else
17900 goto do_binop;
17901
17902 case EQ_EXPR:
17903 op = DW_OP_eq;
17904 goto do_binop;
17905
17906 case NE_EXPR:
17907 op = DW_OP_ne;
17908 goto do_binop;
17909
17910 do_binop:
17911 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17912 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
17913 if (list_ret == 0 || list_ret1 == 0)
17914 return 0;
17915
17916 add_loc_list (&list_ret, list_ret1);
17917 if (list_ret == 0)
17918 return 0;
17919 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
17920 break;
17921
17922 case TRUTH_NOT_EXPR:
17923 case BIT_NOT_EXPR:
17924 op = DW_OP_not;
17925 goto do_unop;
17926
17927 case ABS_EXPR:
17928 op = DW_OP_abs;
17929 goto do_unop;
17930
17931 case NEGATE_EXPR:
17932 op = DW_OP_neg;
17933 goto do_unop;
17934
17935 do_unop:
17936 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17937 if (list_ret == 0)
17938 return 0;
17939
17940 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
17941 break;
17942
17943 case MIN_EXPR:
17944 case MAX_EXPR:
17945 {
17946 const enum tree_code code =
17947 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
17948
17949 loc = build3 (COND_EXPR, TREE_TYPE (loc),
17950 build2 (code, integer_type_node,
17951 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
17952 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
17953 }
17954
17955 /* fall through */
17956
17957 case COND_EXPR:
17958 {
17959 dw_loc_descr_ref lhs
17960 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
17961 dw_loc_list_ref rhs
17962 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
17963 dw_loc_descr_ref bra_node, jump_node, tmp;
17964
17965 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17966 if (list_ret == 0 || lhs == 0 || rhs == 0)
17967 return 0;
17968
17969 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
17970 add_loc_descr_to_each (list_ret, bra_node);
17971
17972 add_loc_list (&list_ret, rhs);
17973 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
17974 add_loc_descr_to_each (list_ret, jump_node);
17975
17976 add_loc_descr_to_each (list_ret, lhs);
17977 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
17978 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
17979
17980 /* ??? Need a node to point the skip at. Use a nop. */
17981 tmp = new_loc_descr (DW_OP_nop, 0, 0);
17982 add_loc_descr_to_each (list_ret, tmp);
17983 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
17984 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
17985 }
17986 break;
17987
17988 case FIX_TRUNC_EXPR:
17989 return 0;
17990
17991 default:
17992 /* Leave front-end specific codes as simply unknown. This comes
17993 up, for instance, with the C STMT_EXPR. */
17994 if ((unsigned int) TREE_CODE (loc)
17995 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
17996 {
17997 expansion_failed (loc, NULL_RTX,
17998 "language specific tree node");
17999 return 0;
18000 }
18001
18002 /* Otherwise this is a generic code; we should just lists all of
18003 these explicitly. We forgot one. */
18004 if (flag_checking)
18005 gcc_unreachable ();
18006
18007 /* In a release build, we want to degrade gracefully: better to
18008 generate incomplete debugging information than to crash. */
18009 return NULL;
18010 }
18011
18012 if (!ret && !list_ret)
18013 return 0;
18014
18015 if (want_address == 2 && !have_address
18016 && (dwarf_version >= 4 || !dwarf_strict))
18017 {
18018 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18019 {
18020 expansion_failed (loc, NULL_RTX,
18021 "DWARF address size mismatch");
18022 return 0;
18023 }
18024 if (ret)
18025 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18026 else
18027 add_loc_descr_to_each (list_ret,
18028 new_loc_descr (DW_OP_stack_value, 0, 0));
18029 have_address = 1;
18030 }
18031 /* Show if we can't fill the request for an address. */
18032 if (want_address && !have_address)
18033 {
18034 expansion_failed (loc, NULL_RTX,
18035 "Want address and only have value");
18036 return 0;
18037 }
18038
18039 gcc_assert (!ret || !list_ret);
18040
18041 /* If we've got an address and don't want one, dereference. */
18042 if (!want_address && have_address)
18043 {
18044 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18045
18046 if (size > DWARF2_ADDR_SIZE || size == -1)
18047 {
18048 expansion_failed (loc, NULL_RTX,
18049 "DWARF address size mismatch");
18050 return 0;
18051 }
18052 else if (size == DWARF2_ADDR_SIZE)
18053 op = DW_OP_deref;
18054 else
18055 op = DW_OP_deref_size;
18056
18057 if (ret)
18058 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18059 else
18060 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18061 }
18062 if (ret)
18063 list_ret = new_loc_list (ret, NULL, NULL, NULL);
18064
18065 return list_ret;
18066 }
18067
18068 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18069 expressions. */
18070
18071 static dw_loc_list_ref
18072 loc_list_from_tree (tree loc, int want_address,
18073 struct loc_descr_context *context)
18074 {
18075 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18076
18077 for (dw_loc_list_ref loc_cur = result;
18078 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18079 loc_descr_without_nops (loc_cur->expr);
18080 return result;
18081 }
18082
18083 /* Same as above but return only single location expression. */
18084 static dw_loc_descr_ref
18085 loc_descriptor_from_tree (tree loc, int want_address,
18086 struct loc_descr_context *context)
18087 {
18088 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18089 if (!ret)
18090 return NULL;
18091 if (ret->dw_loc_next)
18092 {
18093 expansion_failed (loc, NULL_RTX,
18094 "Location list where only loc descriptor needed");
18095 return NULL;
18096 }
18097 return ret->expr;
18098 }
18099
18100 /* Given a value, round it up to the lowest multiple of `boundary'
18101 which is not less than the value itself. */
18102
18103 static inline HOST_WIDE_INT
18104 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18105 {
18106 return (((value + boundary - 1) / boundary) * boundary);
18107 }
18108
18109 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18110 pointer to the declared type for the relevant field variable, or return
18111 `integer_type_node' if the given node turns out to be an
18112 ERROR_MARK node. */
18113
18114 static inline tree
18115 field_type (const_tree decl)
18116 {
18117 tree type;
18118
18119 if (TREE_CODE (decl) == ERROR_MARK)
18120 return integer_type_node;
18121
18122 type = DECL_BIT_FIELD_TYPE (decl);
18123 if (type == NULL_TREE)
18124 type = TREE_TYPE (decl);
18125
18126 return type;
18127 }
18128
18129 /* Given a pointer to a tree node, return the alignment in bits for
18130 it, or else return BITS_PER_WORD if the node actually turns out to
18131 be an ERROR_MARK node. */
18132
18133 static inline unsigned
18134 simple_type_align_in_bits (const_tree type)
18135 {
18136 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18137 }
18138
18139 static inline unsigned
18140 simple_decl_align_in_bits (const_tree decl)
18141 {
18142 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18143 }
18144
18145 /* Return the result of rounding T up to ALIGN. */
18146
18147 static inline offset_int
18148 round_up_to_align (const offset_int &t, unsigned int align)
18149 {
18150 return wi::udiv_trunc (t + align - 1, align) * align;
18151 }
18152
18153 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18154 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18155 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18156 if we fail to return the size in one of these two forms. */
18157
18158 static dw_loc_descr_ref
18159 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18160 {
18161 tree tree_size;
18162 struct loc_descr_context ctx;
18163
18164 /* Return a constant integer in priority, if possible. */
18165 *cst_size = int_size_in_bytes (type);
18166 if (*cst_size != -1)
18167 return NULL;
18168
18169 ctx.context_type = const_cast<tree> (type);
18170 ctx.base_decl = NULL_TREE;
18171 ctx.dpi = NULL;
18172 ctx.placeholder_arg = false;
18173 ctx.placeholder_seen = false;
18174
18175 type = TYPE_MAIN_VARIANT (type);
18176 tree_size = TYPE_SIZE_UNIT (type);
18177 return ((tree_size != NULL_TREE)
18178 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18179 : NULL);
18180 }
18181
18182 /* Helper structure for RECORD_TYPE processing. */
18183 struct vlr_context
18184 {
18185 /* Root RECORD_TYPE. It is needed to generate data member location
18186 descriptions in variable-length records (VLR), but also to cope with
18187 variants, which are composed of nested structures multiplexed with
18188 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18189 function processing a FIELD_DECL, it is required to be non null. */
18190 tree struct_type;
18191 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18192 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18193 this variant part as part of the root record (in storage units). For
18194 regular records, it must be NULL_TREE. */
18195 tree variant_part_offset;
18196 };
18197
18198 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18199 addressed byte of the "containing object" for the given FIELD_DECL. If
18200 possible, return a native constant through CST_OFFSET (in which case NULL is
18201 returned); otherwise return a DWARF expression that computes the offset.
18202
18203 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
18204 that offset is, either because the argument turns out to be a pointer to an
18205 ERROR_MARK node, or because the offset expression is too complex for us.
18206
18207 CTX is required: see the comment for VLR_CONTEXT. */
18208
18209 static dw_loc_descr_ref
18210 field_byte_offset (const_tree decl, struct vlr_context *ctx,
18211 HOST_WIDE_INT *cst_offset)
18212 {
18213 tree tree_result;
18214 dw_loc_list_ref loc_result;
18215
18216 *cst_offset = 0;
18217
18218 if (TREE_CODE (decl) == ERROR_MARK)
18219 return NULL;
18220 else
18221 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
18222
18223 /* We cannot handle variable bit offsets at the moment, so abort if it's the
18224 case. */
18225 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
18226 return NULL;
18227
18228 #ifdef PCC_BITFIELD_TYPE_MATTERS
18229 /* We used to handle only constant offsets in all cases. Now, we handle
18230 properly dynamic byte offsets only when PCC bitfield type doesn't
18231 matter. */
18232 if (PCC_BITFIELD_TYPE_MATTERS
18233 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
18234 {
18235 offset_int object_offset_in_bits;
18236 offset_int object_offset_in_bytes;
18237 offset_int bitpos_int;
18238 tree type;
18239 tree field_size_tree;
18240 offset_int deepest_bitpos;
18241 offset_int field_size_in_bits;
18242 unsigned int type_align_in_bits;
18243 unsigned int decl_align_in_bits;
18244 offset_int type_size_in_bits;
18245
18246 bitpos_int = wi::to_offset (bit_position (decl));
18247 type = field_type (decl);
18248 type_size_in_bits = offset_int_type_size_in_bits (type);
18249 type_align_in_bits = simple_type_align_in_bits (type);
18250
18251 field_size_tree = DECL_SIZE (decl);
18252
18253 /* The size could be unspecified if there was an error, or for
18254 a flexible array member. */
18255 if (!field_size_tree)
18256 field_size_tree = bitsize_zero_node;
18257
18258 /* If the size of the field is not constant, use the type size. */
18259 if (TREE_CODE (field_size_tree) == INTEGER_CST)
18260 field_size_in_bits = wi::to_offset (field_size_tree);
18261 else
18262 field_size_in_bits = type_size_in_bits;
18263
18264 decl_align_in_bits = simple_decl_align_in_bits (decl);
18265
18266 /* The GCC front-end doesn't make any attempt to keep track of the
18267 starting bit offset (relative to the start of the containing
18268 structure type) of the hypothetical "containing object" for a
18269 bit-field. Thus, when computing the byte offset value for the
18270 start of the "containing object" of a bit-field, we must deduce
18271 this information on our own. This can be rather tricky to do in
18272 some cases. For example, handling the following structure type
18273 definition when compiling for an i386/i486 target (which only
18274 aligns long long's to 32-bit boundaries) can be very tricky:
18275
18276 struct S { int field1; long long field2:31; };
18277
18278 Fortunately, there is a simple rule-of-thumb which can be used
18279 in such cases. When compiling for an i386/i486, GCC will
18280 allocate 8 bytes for the structure shown above. It decides to
18281 do this based upon one simple rule for bit-field allocation.
18282 GCC allocates each "containing object" for each bit-field at
18283 the first (i.e. lowest addressed) legitimate alignment boundary
18284 (based upon the required minimum alignment for the declared
18285 type of the field) which it can possibly use, subject to the
18286 condition that there is still enough available space remaining
18287 in the containing object (when allocated at the selected point)
18288 to fully accommodate all of the bits of the bit-field itself.
18289
18290 This simple rule makes it obvious why GCC allocates 8 bytes for
18291 each object of the structure type shown above. When looking
18292 for a place to allocate the "containing object" for `field2',
18293 the compiler simply tries to allocate a 64-bit "containing
18294 object" at each successive 32-bit boundary (starting at zero)
18295 until it finds a place to allocate that 64- bit field such that
18296 at least 31 contiguous (and previously unallocated) bits remain
18297 within that selected 64 bit field. (As it turns out, for the
18298 example above, the compiler finds it is OK to allocate the
18299 "containing object" 64-bit field at bit-offset zero within the
18300 structure type.)
18301
18302 Here we attempt to work backwards from the limited set of facts
18303 we're given, and we try to deduce from those facts, where GCC
18304 must have believed that the containing object started (within
18305 the structure type). The value we deduce is then used (by the
18306 callers of this routine) to generate DW_AT_location and
18307 DW_AT_bit_offset attributes for fields (both bit-fields and, in
18308 the case of DW_AT_location, regular fields as well). */
18309
18310 /* Figure out the bit-distance from the start of the structure to
18311 the "deepest" bit of the bit-field. */
18312 deepest_bitpos = bitpos_int + field_size_in_bits;
18313
18314 /* This is the tricky part. Use some fancy footwork to deduce
18315 where the lowest addressed bit of the containing object must
18316 be. */
18317 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
18318
18319 /* Round up to type_align by default. This works best for
18320 bitfields. */
18321 object_offset_in_bits
18322 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
18323
18324 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
18325 {
18326 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
18327
18328 /* Round up to decl_align instead. */
18329 object_offset_in_bits
18330 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
18331 }
18332
18333 object_offset_in_bytes
18334 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
18335 if (ctx->variant_part_offset == NULL_TREE)
18336 {
18337 *cst_offset = object_offset_in_bytes.to_shwi ();
18338 return NULL;
18339 }
18340 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
18341 }
18342 else
18343 #endif /* PCC_BITFIELD_TYPE_MATTERS */
18344 tree_result = byte_position (decl);
18345
18346 if (ctx->variant_part_offset != NULL_TREE)
18347 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
18348 ctx->variant_part_offset, tree_result);
18349
18350 /* If the byte offset is a constant, it's simplier to handle a native
18351 constant rather than a DWARF expression. */
18352 if (TREE_CODE (tree_result) == INTEGER_CST)
18353 {
18354 *cst_offset = wi::to_offset (tree_result).to_shwi ();
18355 return NULL;
18356 }
18357 struct loc_descr_context loc_ctx = {
18358 ctx->struct_type, /* context_type */
18359 NULL_TREE, /* base_decl */
18360 NULL, /* dpi */
18361 false, /* placeholder_arg */
18362 false /* placeholder_seen */
18363 };
18364 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
18365
18366 /* We want a DWARF expression: abort if we only have a location list with
18367 multiple elements. */
18368 if (!loc_result || !single_element_loc_list_p (loc_result))
18369 return NULL;
18370 else
18371 return loc_result->expr;
18372 }
18373 \f
18374 /* The following routines define various Dwarf attributes and any data
18375 associated with them. */
18376
18377 /* Add a location description attribute value to a DIE.
18378
18379 This emits location attributes suitable for whole variables and
18380 whole parameters. Note that the location attributes for struct fields are
18381 generated by the routine `data_member_location_attribute' below. */
18382
18383 static inline void
18384 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
18385 dw_loc_list_ref descr)
18386 {
18387 if (descr == 0)
18388 return;
18389 if (single_element_loc_list_p (descr))
18390 add_AT_loc (die, attr_kind, descr->expr);
18391 else
18392 add_AT_loc_list (die, attr_kind, descr);
18393 }
18394
18395 /* Add DW_AT_accessibility attribute to DIE if needed. */
18396
18397 static void
18398 add_accessibility_attribute (dw_die_ref die, tree decl)
18399 {
18400 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
18401 children, otherwise the default is DW_ACCESS_public. In DWARF2
18402 the default has always been DW_ACCESS_public. */
18403 if (TREE_PROTECTED (decl))
18404 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
18405 else if (TREE_PRIVATE (decl))
18406 {
18407 if (dwarf_version == 2
18408 || die->die_parent == NULL
18409 || die->die_parent->die_tag != DW_TAG_class_type)
18410 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
18411 }
18412 else if (dwarf_version > 2
18413 && die->die_parent
18414 && die->die_parent->die_tag == DW_TAG_class_type)
18415 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
18416 }
18417
18418 /* Attach the specialized form of location attribute used for data members of
18419 struct and union types. In the special case of a FIELD_DECL node which
18420 represents a bit-field, the "offset" part of this special location
18421 descriptor must indicate the distance in bytes from the lowest-addressed
18422 byte of the containing struct or union type to the lowest-addressed byte of
18423 the "containing object" for the bit-field. (See the `field_byte_offset'
18424 function above).
18425
18426 For any given bit-field, the "containing object" is a hypothetical object
18427 (of some integral or enum type) within which the given bit-field lives. The
18428 type of this hypothetical "containing object" is always the same as the
18429 declared type of the individual bit-field itself (for GCC anyway... the
18430 DWARF spec doesn't actually mandate this). Note that it is the size (in
18431 bytes) of the hypothetical "containing object" which will be given in the
18432 DW_AT_byte_size attribute for this bit-field. (See the
18433 `byte_size_attribute' function below.) It is also used when calculating the
18434 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
18435 function below.)
18436
18437 CTX is required: see the comment for VLR_CONTEXT. */
18438
18439 static void
18440 add_data_member_location_attribute (dw_die_ref die,
18441 tree decl,
18442 struct vlr_context *ctx)
18443 {
18444 HOST_WIDE_INT offset;
18445 dw_loc_descr_ref loc_descr = 0;
18446
18447 if (TREE_CODE (decl) == TREE_BINFO)
18448 {
18449 /* We're working on the TAG_inheritance for a base class. */
18450 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
18451 {
18452 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
18453 aren't at a fixed offset from all (sub)objects of the same
18454 type. We need to extract the appropriate offset from our
18455 vtable. The following dwarf expression means
18456
18457 BaseAddr = ObAddr + *((*ObAddr) - Offset)
18458
18459 This is specific to the V3 ABI, of course. */
18460
18461 dw_loc_descr_ref tmp;
18462
18463 /* Make a copy of the object address. */
18464 tmp = new_loc_descr (DW_OP_dup, 0, 0);
18465 add_loc_descr (&loc_descr, tmp);
18466
18467 /* Extract the vtable address. */
18468 tmp = new_loc_descr (DW_OP_deref, 0, 0);
18469 add_loc_descr (&loc_descr, tmp);
18470
18471 /* Calculate the address of the offset. */
18472 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
18473 gcc_assert (offset < 0);
18474
18475 tmp = int_loc_descriptor (-offset);
18476 add_loc_descr (&loc_descr, tmp);
18477 tmp = new_loc_descr (DW_OP_minus, 0, 0);
18478 add_loc_descr (&loc_descr, tmp);
18479
18480 /* Extract the offset. */
18481 tmp = new_loc_descr (DW_OP_deref, 0, 0);
18482 add_loc_descr (&loc_descr, tmp);
18483
18484 /* Add it to the object address. */
18485 tmp = new_loc_descr (DW_OP_plus, 0, 0);
18486 add_loc_descr (&loc_descr, tmp);
18487 }
18488 else
18489 offset = tree_to_shwi (BINFO_OFFSET (decl));
18490 }
18491 else
18492 {
18493 loc_descr = field_byte_offset (decl, ctx, &offset);
18494
18495 /* If loc_descr is available then we know the field offset is dynamic.
18496 However, GDB does not handle dynamic field offsets very well at the
18497 moment. */
18498 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
18499 {
18500 loc_descr = NULL;
18501 offset = 0;
18502 }
18503
18504 /* Data member location evalutation starts with the base address on the
18505 stack. Compute the field offset and add it to this base address. */
18506 else if (loc_descr != NULL)
18507 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
18508 }
18509
18510 if (! loc_descr)
18511 {
18512 /* While DW_AT_data_bit_offset has been added already in DWARF4,
18513 e.g. GDB only added support to it in November 2016. For DWARF5
18514 we need newer debug info consumers anyway. We might change this
18515 to dwarf_version >= 4 once most consumers catched up. */
18516 if (dwarf_version >= 5
18517 && TREE_CODE (decl) == FIELD_DECL
18518 && DECL_BIT_FIELD_TYPE (decl))
18519 {
18520 tree off = bit_position (decl);
18521 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
18522 {
18523 remove_AT (die, DW_AT_byte_size);
18524 remove_AT (die, DW_AT_bit_offset);
18525 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
18526 return;
18527 }
18528 }
18529 if (dwarf_version > 2)
18530 {
18531 /* Don't need to output a location expression, just the constant. */
18532 if (offset < 0)
18533 add_AT_int (die, DW_AT_data_member_location, offset);
18534 else
18535 add_AT_unsigned (die, DW_AT_data_member_location, offset);
18536 return;
18537 }
18538 else
18539 {
18540 enum dwarf_location_atom op;
18541
18542 /* The DWARF2 standard says that we should assume that the structure
18543 address is already on the stack, so we can specify a structure
18544 field address by using DW_OP_plus_uconst. */
18545 op = DW_OP_plus_uconst;
18546 loc_descr = new_loc_descr (op, offset, 0);
18547 }
18548 }
18549
18550 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
18551 }
18552
18553 /* Writes integer values to dw_vec_const array. */
18554
18555 static void
18556 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
18557 {
18558 while (size != 0)
18559 {
18560 *dest++ = val & 0xff;
18561 val >>= 8;
18562 --size;
18563 }
18564 }
18565
18566 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
18567
18568 static HOST_WIDE_INT
18569 extract_int (const unsigned char *src, unsigned int size)
18570 {
18571 HOST_WIDE_INT val = 0;
18572
18573 src += size;
18574 while (size != 0)
18575 {
18576 val <<= 8;
18577 val |= *--src & 0xff;
18578 --size;
18579 }
18580 return val;
18581 }
18582
18583 /* Writes wide_int values to dw_vec_const array. */
18584
18585 static void
18586 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
18587 {
18588 int i;
18589
18590 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
18591 {
18592 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
18593 return;
18594 }
18595
18596 /* We'd have to extend this code to support odd sizes. */
18597 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
18598
18599 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
18600
18601 if (WORDS_BIG_ENDIAN)
18602 for (i = n - 1; i >= 0; i--)
18603 {
18604 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
18605 dest += sizeof (HOST_WIDE_INT);
18606 }
18607 else
18608 for (i = 0; i < n; i++)
18609 {
18610 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
18611 dest += sizeof (HOST_WIDE_INT);
18612 }
18613 }
18614
18615 /* Writes floating point values to dw_vec_const array. */
18616
18617 static void
18618 insert_float (const_rtx rtl, unsigned char *array)
18619 {
18620 long val[4];
18621 int i;
18622 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
18623
18624 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
18625
18626 /* real_to_target puts 32-bit pieces in each long. Pack them. */
18627 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
18628 {
18629 insert_int (val[i], 4, array);
18630 array += 4;
18631 }
18632 }
18633
18634 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
18635 does not have a "location" either in memory or in a register. These
18636 things can arise in GNU C when a constant is passed as an actual parameter
18637 to an inlined function. They can also arise in C++ where declared
18638 constants do not necessarily get memory "homes". */
18639
18640 static bool
18641 add_const_value_attribute (dw_die_ref die, rtx rtl)
18642 {
18643 switch (GET_CODE (rtl))
18644 {
18645 case CONST_INT:
18646 {
18647 HOST_WIDE_INT val = INTVAL (rtl);
18648
18649 if (val < 0)
18650 add_AT_int (die, DW_AT_const_value, val);
18651 else
18652 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
18653 }
18654 return true;
18655
18656 case CONST_WIDE_INT:
18657 {
18658 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
18659 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
18660 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
18661 wide_int w = wi::zext (w1, prec);
18662 add_AT_wide (die, DW_AT_const_value, w);
18663 }
18664 return true;
18665
18666 case CONST_DOUBLE:
18667 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
18668 floating-point constant. A CONST_DOUBLE is used whenever the
18669 constant requires more than one word in order to be adequately
18670 represented. */
18671 if (TARGET_SUPPORTS_WIDE_INT == 0
18672 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
18673 add_AT_double (die, DW_AT_const_value,
18674 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
18675 else
18676 {
18677 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
18678 unsigned int length = GET_MODE_SIZE (mode);
18679 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
18680
18681 insert_float (rtl, array);
18682 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
18683 }
18684 return true;
18685
18686 case CONST_VECTOR:
18687 {
18688 machine_mode mode = GET_MODE (rtl);
18689 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
18690 unsigned int length = CONST_VECTOR_NUNITS (rtl);
18691 unsigned char *array
18692 = ggc_vec_alloc<unsigned char> (length * elt_size);
18693 unsigned int i;
18694 unsigned char *p;
18695 machine_mode imode = GET_MODE_INNER (mode);
18696
18697 switch (GET_MODE_CLASS (mode))
18698 {
18699 case MODE_VECTOR_INT:
18700 for (i = 0, p = array; i < length; i++, p += elt_size)
18701 {
18702 rtx elt = CONST_VECTOR_ELT (rtl, i);
18703 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
18704 }
18705 break;
18706
18707 case MODE_VECTOR_FLOAT:
18708 for (i = 0, p = array; i < length; i++, p += elt_size)
18709 {
18710 rtx elt = CONST_VECTOR_ELT (rtl, i);
18711 insert_float (elt, p);
18712 }
18713 break;
18714
18715 default:
18716 gcc_unreachable ();
18717 }
18718
18719 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
18720 }
18721 return true;
18722
18723 case CONST_STRING:
18724 if (dwarf_version >= 4 || !dwarf_strict)
18725 {
18726 dw_loc_descr_ref loc_result;
18727 resolve_one_addr (&rtl);
18728 rtl_addr:
18729 loc_result = new_addr_loc_descr (rtl, dtprel_false);
18730 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
18731 add_AT_loc (die, DW_AT_location, loc_result);
18732 vec_safe_push (used_rtx_array, rtl);
18733 return true;
18734 }
18735 return false;
18736
18737 case CONST:
18738 if (CONSTANT_P (XEXP (rtl, 0)))
18739 return add_const_value_attribute (die, XEXP (rtl, 0));
18740 /* FALLTHROUGH */
18741 case SYMBOL_REF:
18742 if (!const_ok_for_output (rtl))
18743 return false;
18744 /* FALLTHROUGH */
18745 case LABEL_REF:
18746 if (dwarf_version >= 4 || !dwarf_strict)
18747 goto rtl_addr;
18748 return false;
18749
18750 case PLUS:
18751 /* In cases where an inlined instance of an inline function is passed
18752 the address of an `auto' variable (which is local to the caller) we
18753 can get a situation where the DECL_RTL of the artificial local
18754 variable (for the inlining) which acts as a stand-in for the
18755 corresponding formal parameter (of the inline function) will look
18756 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
18757 exactly a compile-time constant expression, but it isn't the address
18758 of the (artificial) local variable either. Rather, it represents the
18759 *value* which the artificial local variable always has during its
18760 lifetime. We currently have no way to represent such quasi-constant
18761 values in Dwarf, so for now we just punt and generate nothing. */
18762 return false;
18763
18764 case HIGH:
18765 case CONST_FIXED:
18766 return false;
18767
18768 case MEM:
18769 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
18770 && MEM_READONLY_P (rtl)
18771 && GET_MODE (rtl) == BLKmode)
18772 {
18773 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
18774 return true;
18775 }
18776 return false;
18777
18778 default:
18779 /* No other kinds of rtx should be possible here. */
18780 gcc_unreachable ();
18781 }
18782 return false;
18783 }
18784
18785 /* Determine whether the evaluation of EXPR references any variables
18786 or functions which aren't otherwise used (and therefore may not be
18787 output). */
18788 static tree
18789 reference_to_unused (tree * tp, int * walk_subtrees,
18790 void * data ATTRIBUTE_UNUSED)
18791 {
18792 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
18793 *walk_subtrees = 0;
18794
18795 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
18796 && ! TREE_ASM_WRITTEN (*tp))
18797 return *tp;
18798 /* ??? The C++ FE emits debug information for using decls, so
18799 putting gcc_unreachable here falls over. See PR31899. For now
18800 be conservative. */
18801 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
18802 return *tp;
18803 else if (VAR_P (*tp))
18804 {
18805 varpool_node *node = varpool_node::get (*tp);
18806 if (!node || !node->definition)
18807 return *tp;
18808 }
18809 else if (TREE_CODE (*tp) == FUNCTION_DECL
18810 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
18811 {
18812 /* The call graph machinery must have finished analyzing,
18813 optimizing and gimplifying the CU by now.
18814 So if *TP has no call graph node associated
18815 to it, it means *TP will not be emitted. */
18816 if (!cgraph_node::get (*tp))
18817 return *tp;
18818 }
18819 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
18820 return *tp;
18821
18822 return NULL_TREE;
18823 }
18824
18825 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
18826 for use in a later add_const_value_attribute call. */
18827
18828 static rtx
18829 rtl_for_decl_init (tree init, tree type)
18830 {
18831 rtx rtl = NULL_RTX;
18832
18833 STRIP_NOPS (init);
18834
18835 /* If a variable is initialized with a string constant without embedded
18836 zeros, build CONST_STRING. */
18837 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
18838 {
18839 tree enttype = TREE_TYPE (type);
18840 tree domain = TYPE_DOMAIN (type);
18841 scalar_int_mode mode;
18842
18843 if (is_int_mode (TYPE_MODE (enttype), &mode)
18844 && GET_MODE_SIZE (mode) == 1
18845 && domain
18846 && integer_zerop (TYPE_MIN_VALUE (domain))
18847 && compare_tree_int (TYPE_MAX_VALUE (domain),
18848 TREE_STRING_LENGTH (init) - 1) == 0
18849 && ((size_t) TREE_STRING_LENGTH (init)
18850 == strlen (TREE_STRING_POINTER (init)) + 1))
18851 {
18852 rtl = gen_rtx_CONST_STRING (VOIDmode,
18853 ggc_strdup (TREE_STRING_POINTER (init)));
18854 rtl = gen_rtx_MEM (BLKmode, rtl);
18855 MEM_READONLY_P (rtl) = 1;
18856 }
18857 }
18858 /* Other aggregates, and complex values, could be represented using
18859 CONCAT: FIXME! */
18860 else if (AGGREGATE_TYPE_P (type)
18861 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
18862 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
18863 || TREE_CODE (type) == COMPLEX_TYPE)
18864 ;
18865 /* Vectors only work if their mode is supported by the target.
18866 FIXME: generic vectors ought to work too. */
18867 else if (TREE_CODE (type) == VECTOR_TYPE
18868 && !VECTOR_MODE_P (TYPE_MODE (type)))
18869 ;
18870 /* If the initializer is something that we know will expand into an
18871 immediate RTL constant, expand it now. We must be careful not to
18872 reference variables which won't be output. */
18873 else if (initializer_constant_valid_p (init, type)
18874 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
18875 {
18876 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
18877 possible. */
18878 if (TREE_CODE (type) == VECTOR_TYPE)
18879 switch (TREE_CODE (init))
18880 {
18881 case VECTOR_CST:
18882 break;
18883 case CONSTRUCTOR:
18884 if (TREE_CONSTANT (init))
18885 {
18886 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
18887 bool constant_p = true;
18888 tree value;
18889 unsigned HOST_WIDE_INT ix;
18890
18891 /* Even when ctor is constant, it might contain non-*_CST
18892 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
18893 belong into VECTOR_CST nodes. */
18894 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
18895 if (!CONSTANT_CLASS_P (value))
18896 {
18897 constant_p = false;
18898 break;
18899 }
18900
18901 if (constant_p)
18902 {
18903 init = build_vector_from_ctor (type, elts);
18904 break;
18905 }
18906 }
18907 /* FALLTHRU */
18908
18909 default:
18910 return NULL;
18911 }
18912
18913 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
18914
18915 /* If expand_expr returns a MEM, it wasn't immediate. */
18916 gcc_assert (!rtl || !MEM_P (rtl));
18917 }
18918
18919 return rtl;
18920 }
18921
18922 /* Generate RTL for the variable DECL to represent its location. */
18923
18924 static rtx
18925 rtl_for_decl_location (tree decl)
18926 {
18927 rtx rtl;
18928
18929 /* Here we have to decide where we are going to say the parameter "lives"
18930 (as far as the debugger is concerned). We only have a couple of
18931 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
18932
18933 DECL_RTL normally indicates where the parameter lives during most of the
18934 activation of the function. If optimization is enabled however, this
18935 could be either NULL or else a pseudo-reg. Both of those cases indicate
18936 that the parameter doesn't really live anywhere (as far as the code
18937 generation parts of GCC are concerned) during most of the function's
18938 activation. That will happen (for example) if the parameter is never
18939 referenced within the function.
18940
18941 We could just generate a location descriptor here for all non-NULL
18942 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
18943 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
18944 where DECL_RTL is NULL or is a pseudo-reg.
18945
18946 Note however that we can only get away with using DECL_INCOMING_RTL as
18947 a backup substitute for DECL_RTL in certain limited cases. In cases
18948 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
18949 we can be sure that the parameter was passed using the same type as it is
18950 declared to have within the function, and that its DECL_INCOMING_RTL
18951 points us to a place where a value of that type is passed.
18952
18953 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
18954 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
18955 because in these cases DECL_INCOMING_RTL points us to a value of some
18956 type which is *different* from the type of the parameter itself. Thus,
18957 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
18958 such cases, the debugger would end up (for example) trying to fetch a
18959 `float' from a place which actually contains the first part of a
18960 `double'. That would lead to really incorrect and confusing
18961 output at debug-time.
18962
18963 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
18964 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
18965 are a couple of exceptions however. On little-endian machines we can
18966 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
18967 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
18968 an integral type that is smaller than TREE_TYPE (decl). These cases arise
18969 when (on a little-endian machine) a non-prototyped function has a
18970 parameter declared to be of type `short' or `char'. In such cases,
18971 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
18972 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
18973 passed `int' value. If the debugger then uses that address to fetch
18974 a `short' or a `char' (on a little-endian machine) the result will be
18975 the correct data, so we allow for such exceptional cases below.
18976
18977 Note that our goal here is to describe the place where the given formal
18978 parameter lives during most of the function's activation (i.e. between the
18979 end of the prologue and the start of the epilogue). We'll do that as best
18980 as we can. Note however that if the given formal parameter is modified
18981 sometime during the execution of the function, then a stack backtrace (at
18982 debug-time) will show the function as having been called with the *new*
18983 value rather than the value which was originally passed in. This happens
18984 rarely enough that it is not a major problem, but it *is* a problem, and
18985 I'd like to fix it.
18986
18987 A future version of dwarf2out.c may generate two additional attributes for
18988 any given DW_TAG_formal_parameter DIE which will describe the "passed
18989 type" and the "passed location" for the given formal parameter in addition
18990 to the attributes we now generate to indicate the "declared type" and the
18991 "active location" for each parameter. This additional set of attributes
18992 could be used by debuggers for stack backtraces. Separately, note that
18993 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
18994 This happens (for example) for inlined-instances of inline function formal
18995 parameters which are never referenced. This really shouldn't be
18996 happening. All PARM_DECL nodes should get valid non-NULL
18997 DECL_INCOMING_RTL values. FIXME. */
18998
18999 /* Use DECL_RTL as the "location" unless we find something better. */
19000 rtl = DECL_RTL_IF_SET (decl);
19001
19002 /* When generating abstract instances, ignore everything except
19003 constants, symbols living in memory, and symbols living in
19004 fixed registers. */
19005 if (! reload_completed)
19006 {
19007 if (rtl
19008 && (CONSTANT_P (rtl)
19009 || (MEM_P (rtl)
19010 && CONSTANT_P (XEXP (rtl, 0)))
19011 || (REG_P (rtl)
19012 && VAR_P (decl)
19013 && TREE_STATIC (decl))))
19014 {
19015 rtl = targetm.delegitimize_address (rtl);
19016 return rtl;
19017 }
19018 rtl = NULL_RTX;
19019 }
19020 else if (TREE_CODE (decl) == PARM_DECL)
19021 {
19022 if (rtl == NULL_RTX
19023 || is_pseudo_reg (rtl)
19024 || (MEM_P (rtl)
19025 && is_pseudo_reg (XEXP (rtl, 0))
19026 && DECL_INCOMING_RTL (decl)
19027 && MEM_P (DECL_INCOMING_RTL (decl))
19028 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19029 {
19030 tree declared_type = TREE_TYPE (decl);
19031 tree passed_type = DECL_ARG_TYPE (decl);
19032 machine_mode dmode = TYPE_MODE (declared_type);
19033 machine_mode pmode = TYPE_MODE (passed_type);
19034
19035 /* This decl represents a formal parameter which was optimized out.
19036 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19037 all cases where (rtl == NULL_RTX) just below. */
19038 if (dmode == pmode)
19039 rtl = DECL_INCOMING_RTL (decl);
19040 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19041 && SCALAR_INT_MODE_P (dmode)
19042 && GET_MODE_SIZE (dmode) <= GET_MODE_SIZE (pmode)
19043 && DECL_INCOMING_RTL (decl))
19044 {
19045 rtx inc = DECL_INCOMING_RTL (decl);
19046 if (REG_P (inc))
19047 rtl = inc;
19048 else if (MEM_P (inc))
19049 {
19050 if (BYTES_BIG_ENDIAN)
19051 rtl = adjust_address_nv (inc, dmode,
19052 GET_MODE_SIZE (pmode)
19053 - GET_MODE_SIZE (dmode));
19054 else
19055 rtl = inc;
19056 }
19057 }
19058 }
19059
19060 /* If the parm was passed in registers, but lives on the stack, then
19061 make a big endian correction if the mode of the type of the
19062 parameter is not the same as the mode of the rtl. */
19063 /* ??? This is the same series of checks that are made in dbxout.c before
19064 we reach the big endian correction code there. It isn't clear if all
19065 of these checks are necessary here, but keeping them all is the safe
19066 thing to do. */
19067 else if (MEM_P (rtl)
19068 && XEXP (rtl, 0) != const0_rtx
19069 && ! CONSTANT_P (XEXP (rtl, 0))
19070 /* Not passed in memory. */
19071 && !MEM_P (DECL_INCOMING_RTL (decl))
19072 /* Not passed by invisible reference. */
19073 && (!REG_P (XEXP (rtl, 0))
19074 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19075 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19076 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19077 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19078 #endif
19079 )
19080 /* Big endian correction check. */
19081 && BYTES_BIG_ENDIAN
19082 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19083 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl)))
19084 < UNITS_PER_WORD))
19085 {
19086 machine_mode addr_mode = get_address_mode (rtl);
19087 int offset = (UNITS_PER_WORD
19088 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19089
19090 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19091 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19092 }
19093 }
19094 else if (VAR_P (decl)
19095 && rtl
19096 && MEM_P (rtl)
19097 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19098 {
19099 machine_mode addr_mode = get_address_mode (rtl);
19100 HOST_WIDE_INT offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19101 GET_MODE (rtl));
19102
19103 /* If a variable is declared "register" yet is smaller than
19104 a register, then if we store the variable to memory, it
19105 looks like we're storing a register-sized value, when in
19106 fact we are not. We need to adjust the offset of the
19107 storage location to reflect the actual value's bytes,
19108 else gdb will not be able to display it. */
19109 if (offset != 0)
19110 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19111 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19112 }
19113
19114 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19115 and will have been substituted directly into all expressions that use it.
19116 C does not have such a concept, but C++ and other languages do. */
19117 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19118 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19119
19120 if (rtl)
19121 rtl = targetm.delegitimize_address (rtl);
19122
19123 /* If we don't look past the constant pool, we risk emitting a
19124 reference to a constant pool entry that isn't referenced from
19125 code, and thus is not emitted. */
19126 if (rtl)
19127 rtl = avoid_constant_pool_reference (rtl);
19128
19129 /* Try harder to get a rtl. If this symbol ends up not being emitted
19130 in the current CU, resolve_addr will remove the expression referencing
19131 it. */
19132 if (rtl == NULL_RTX
19133 && VAR_P (decl)
19134 && !DECL_EXTERNAL (decl)
19135 && TREE_STATIC (decl)
19136 && DECL_NAME (decl)
19137 && !DECL_HARD_REGISTER (decl)
19138 && DECL_MODE (decl) != VOIDmode)
19139 {
19140 rtl = make_decl_rtl_for_debug (decl);
19141 if (!MEM_P (rtl)
19142 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19143 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19144 rtl = NULL_RTX;
19145 }
19146
19147 return rtl;
19148 }
19149
19150 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19151 returned. If so, the decl for the COMMON block is returned, and the
19152 value is the offset into the common block for the symbol. */
19153
19154 static tree
19155 fortran_common (tree decl, HOST_WIDE_INT *value)
19156 {
19157 tree val_expr, cvar;
19158 machine_mode mode;
19159 HOST_WIDE_INT bitsize, bitpos;
19160 tree offset;
19161 int unsignedp, reversep, volatilep = 0;
19162
19163 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19164 it does not have a value (the offset into the common area), or if it
19165 is thread local (as opposed to global) then it isn't common, and shouldn't
19166 be handled as such. */
19167 if (!VAR_P (decl)
19168 || !TREE_STATIC (decl)
19169 || !DECL_HAS_VALUE_EXPR_P (decl)
19170 || !is_fortran ())
19171 return NULL_TREE;
19172
19173 val_expr = DECL_VALUE_EXPR (decl);
19174 if (TREE_CODE (val_expr) != COMPONENT_REF)
19175 return NULL_TREE;
19176
19177 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19178 &unsignedp, &reversep, &volatilep);
19179
19180 if (cvar == NULL_TREE
19181 || !VAR_P (cvar)
19182 || DECL_ARTIFICIAL (cvar)
19183 || !TREE_PUBLIC (cvar))
19184 return NULL_TREE;
19185
19186 *value = 0;
19187 if (offset != NULL)
19188 {
19189 if (!tree_fits_shwi_p (offset))
19190 return NULL_TREE;
19191 *value = tree_to_shwi (offset);
19192 }
19193 if (bitpos != 0)
19194 *value += bitpos / BITS_PER_UNIT;
19195
19196 return cvar;
19197 }
19198
19199 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
19200 data attribute for a variable or a parameter. We generate the
19201 DW_AT_const_value attribute only in those cases where the given variable
19202 or parameter does not have a true "location" either in memory or in a
19203 register. This can happen (for example) when a constant is passed as an
19204 actual argument in a call to an inline function. (It's possible that
19205 these things can crop up in other ways also.) Note that one type of
19206 constant value which can be passed into an inlined function is a constant
19207 pointer. This can happen for example if an actual argument in an inlined
19208 function call evaluates to a compile-time constant address.
19209
19210 CACHE_P is true if it is worth caching the location list for DECL,
19211 so that future calls can reuse it rather than regenerate it from scratch.
19212 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
19213 since we will need to refer to them each time the function is inlined. */
19214
19215 static bool
19216 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
19217 {
19218 rtx rtl;
19219 dw_loc_list_ref list;
19220 var_loc_list *loc_list;
19221 cached_dw_loc_list *cache;
19222
19223 if (early_dwarf)
19224 return false;
19225
19226 if (TREE_CODE (decl) == ERROR_MARK)
19227 return false;
19228
19229 if (get_AT (die, DW_AT_location)
19230 || get_AT (die, DW_AT_const_value))
19231 return true;
19232
19233 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
19234 || TREE_CODE (decl) == RESULT_DECL);
19235
19236 /* Try to get some constant RTL for this decl, and use that as the value of
19237 the location. */
19238
19239 rtl = rtl_for_decl_location (decl);
19240 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
19241 && add_const_value_attribute (die, rtl))
19242 return true;
19243
19244 /* See if we have single element location list that is equivalent to
19245 a constant value. That way we are better to use add_const_value_attribute
19246 rather than expanding constant value equivalent. */
19247 loc_list = lookup_decl_loc (decl);
19248 if (loc_list
19249 && loc_list->first
19250 && loc_list->first->next == NULL
19251 && NOTE_P (loc_list->first->loc)
19252 && NOTE_VAR_LOCATION (loc_list->first->loc)
19253 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
19254 {
19255 struct var_loc_node *node;
19256
19257 node = loc_list->first;
19258 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
19259 if (GET_CODE (rtl) == EXPR_LIST)
19260 rtl = XEXP (rtl, 0);
19261 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
19262 && add_const_value_attribute (die, rtl))
19263 return true;
19264 }
19265 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
19266 list several times. See if we've already cached the contents. */
19267 list = NULL;
19268 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
19269 cache_p = false;
19270 if (cache_p)
19271 {
19272 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
19273 if (cache)
19274 list = cache->loc_list;
19275 }
19276 if (list == NULL)
19277 {
19278 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
19279 NULL);
19280 /* It is usually worth caching this result if the decl is from
19281 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
19282 if (cache_p && list && list->dw_loc_next)
19283 {
19284 cached_dw_loc_list **slot
19285 = cached_dw_loc_list_table->find_slot_with_hash (decl,
19286 DECL_UID (decl),
19287 INSERT);
19288 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
19289 cache->decl_id = DECL_UID (decl);
19290 cache->loc_list = list;
19291 *slot = cache;
19292 }
19293 }
19294 if (list)
19295 {
19296 add_AT_location_description (die, DW_AT_location, list);
19297 return true;
19298 }
19299 /* None of that worked, so it must not really have a location;
19300 try adding a constant value attribute from the DECL_INITIAL. */
19301 return tree_add_const_value_attribute_for_decl (die, decl);
19302 }
19303
19304 /* Helper function for tree_add_const_value_attribute. Natively encode
19305 initializer INIT into an array. Return true if successful. */
19306
19307 static bool
19308 native_encode_initializer (tree init, unsigned char *array, int size)
19309 {
19310 tree type;
19311
19312 if (init == NULL_TREE)
19313 return false;
19314
19315 STRIP_NOPS (init);
19316 switch (TREE_CODE (init))
19317 {
19318 case STRING_CST:
19319 type = TREE_TYPE (init);
19320 if (TREE_CODE (type) == ARRAY_TYPE)
19321 {
19322 tree enttype = TREE_TYPE (type);
19323 scalar_int_mode mode;
19324
19325 if (!is_int_mode (TYPE_MODE (enttype), &mode)
19326 || GET_MODE_SIZE (mode) != 1)
19327 return false;
19328 if (int_size_in_bytes (type) != size)
19329 return false;
19330 if (size > TREE_STRING_LENGTH (init))
19331 {
19332 memcpy (array, TREE_STRING_POINTER (init),
19333 TREE_STRING_LENGTH (init));
19334 memset (array + TREE_STRING_LENGTH (init),
19335 '\0', size - TREE_STRING_LENGTH (init));
19336 }
19337 else
19338 memcpy (array, TREE_STRING_POINTER (init), size);
19339 return true;
19340 }
19341 return false;
19342 case CONSTRUCTOR:
19343 type = TREE_TYPE (init);
19344 if (int_size_in_bytes (type) != size)
19345 return false;
19346 if (TREE_CODE (type) == ARRAY_TYPE)
19347 {
19348 HOST_WIDE_INT min_index;
19349 unsigned HOST_WIDE_INT cnt;
19350 int curpos = 0, fieldsize;
19351 constructor_elt *ce;
19352
19353 if (TYPE_DOMAIN (type) == NULL_TREE
19354 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
19355 return false;
19356
19357 fieldsize = int_size_in_bytes (TREE_TYPE (type));
19358 if (fieldsize <= 0)
19359 return false;
19360
19361 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
19362 memset (array, '\0', size);
19363 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
19364 {
19365 tree val = ce->value;
19366 tree index = ce->index;
19367 int pos = curpos;
19368 if (index && TREE_CODE (index) == RANGE_EXPR)
19369 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
19370 * fieldsize;
19371 else if (index)
19372 pos = (tree_to_shwi (index) - min_index) * fieldsize;
19373
19374 if (val)
19375 {
19376 STRIP_NOPS (val);
19377 if (!native_encode_initializer (val, array + pos, fieldsize))
19378 return false;
19379 }
19380 curpos = pos + fieldsize;
19381 if (index && TREE_CODE (index) == RANGE_EXPR)
19382 {
19383 int count = tree_to_shwi (TREE_OPERAND (index, 1))
19384 - tree_to_shwi (TREE_OPERAND (index, 0));
19385 while (count-- > 0)
19386 {
19387 if (val)
19388 memcpy (array + curpos, array + pos, fieldsize);
19389 curpos += fieldsize;
19390 }
19391 }
19392 gcc_assert (curpos <= size);
19393 }
19394 return true;
19395 }
19396 else if (TREE_CODE (type) == RECORD_TYPE
19397 || TREE_CODE (type) == UNION_TYPE)
19398 {
19399 tree field = NULL_TREE;
19400 unsigned HOST_WIDE_INT cnt;
19401 constructor_elt *ce;
19402
19403 if (int_size_in_bytes (type) != size)
19404 return false;
19405
19406 if (TREE_CODE (type) == RECORD_TYPE)
19407 field = TYPE_FIELDS (type);
19408
19409 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
19410 {
19411 tree val = ce->value;
19412 int pos, fieldsize;
19413
19414 if (ce->index != 0)
19415 field = ce->index;
19416
19417 if (val)
19418 STRIP_NOPS (val);
19419
19420 if (field == NULL_TREE || DECL_BIT_FIELD (field))
19421 return false;
19422
19423 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
19424 && TYPE_DOMAIN (TREE_TYPE (field))
19425 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
19426 return false;
19427 else if (DECL_SIZE_UNIT (field) == NULL_TREE
19428 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
19429 return false;
19430 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
19431 pos = int_byte_position (field);
19432 gcc_assert (pos + fieldsize <= size);
19433 if (val && fieldsize != 0
19434 && !native_encode_initializer (val, array + pos, fieldsize))
19435 return false;
19436 }
19437 return true;
19438 }
19439 return false;
19440 case VIEW_CONVERT_EXPR:
19441 case NON_LVALUE_EXPR:
19442 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
19443 default:
19444 return native_encode_expr (init, array, size) == size;
19445 }
19446 }
19447
19448 /* Attach a DW_AT_const_value attribute to DIE. The value of the
19449 attribute is the const value T. */
19450
19451 static bool
19452 tree_add_const_value_attribute (dw_die_ref die, tree t)
19453 {
19454 tree init;
19455 tree type = TREE_TYPE (t);
19456 rtx rtl;
19457
19458 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
19459 return false;
19460
19461 init = t;
19462 gcc_assert (!DECL_P (init));
19463
19464 if (TREE_CODE (init) == INTEGER_CST)
19465 {
19466 if (tree_fits_uhwi_p (init))
19467 {
19468 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
19469 return true;
19470 }
19471 if (tree_fits_shwi_p (init))
19472 {
19473 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
19474 return true;
19475 }
19476 }
19477 if (! early_dwarf)
19478 {
19479 rtl = rtl_for_decl_init (init, type);
19480 if (rtl)
19481 return add_const_value_attribute (die, rtl);
19482 }
19483 /* If the host and target are sane, try harder. */
19484 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
19485 && initializer_constant_valid_p (init, type))
19486 {
19487 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
19488 if (size > 0 && (int) size == size)
19489 {
19490 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
19491
19492 if (native_encode_initializer (init, array, size))
19493 {
19494 add_AT_vec (die, DW_AT_const_value, size, 1, array);
19495 return true;
19496 }
19497 ggc_free (array);
19498 }
19499 }
19500 return false;
19501 }
19502
19503 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
19504 attribute is the const value of T, where T is an integral constant
19505 variable with static storage duration
19506 (so it can't be a PARM_DECL or a RESULT_DECL). */
19507
19508 static bool
19509 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
19510 {
19511
19512 if (!decl
19513 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
19514 || (VAR_P (decl) && !TREE_STATIC (decl)))
19515 return false;
19516
19517 if (TREE_READONLY (decl)
19518 && ! TREE_THIS_VOLATILE (decl)
19519 && DECL_INITIAL (decl))
19520 /* OK */;
19521 else
19522 return false;
19523
19524 /* Don't add DW_AT_const_value if abstract origin already has one. */
19525 if (get_AT (var_die, DW_AT_const_value))
19526 return false;
19527
19528 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
19529 }
19530
19531 /* Convert the CFI instructions for the current function into a
19532 location list. This is used for DW_AT_frame_base when we targeting
19533 a dwarf2 consumer that does not support the dwarf3
19534 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
19535 expressions. */
19536
19537 static dw_loc_list_ref
19538 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
19539 {
19540 int ix;
19541 dw_fde_ref fde;
19542 dw_loc_list_ref list, *list_tail;
19543 dw_cfi_ref cfi;
19544 dw_cfa_location last_cfa, next_cfa;
19545 const char *start_label, *last_label, *section;
19546 dw_cfa_location remember;
19547
19548 fde = cfun->fde;
19549 gcc_assert (fde != NULL);
19550
19551 section = secname_for_decl (current_function_decl);
19552 list_tail = &list;
19553 list = NULL;
19554
19555 memset (&next_cfa, 0, sizeof (next_cfa));
19556 next_cfa.reg = INVALID_REGNUM;
19557 remember = next_cfa;
19558
19559 start_label = fde->dw_fde_begin;
19560
19561 /* ??? Bald assumption that the CIE opcode list does not contain
19562 advance opcodes. */
19563 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
19564 lookup_cfa_1 (cfi, &next_cfa, &remember);
19565
19566 last_cfa = next_cfa;
19567 last_label = start_label;
19568
19569 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
19570 {
19571 /* If the first partition contained no CFI adjustments, the
19572 CIE opcodes apply to the whole first partition. */
19573 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19574 fde->dw_fde_begin, fde->dw_fde_end, section);
19575 list_tail =&(*list_tail)->dw_loc_next;
19576 start_label = last_label = fde->dw_fde_second_begin;
19577 }
19578
19579 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
19580 {
19581 switch (cfi->dw_cfi_opc)
19582 {
19583 case DW_CFA_set_loc:
19584 case DW_CFA_advance_loc1:
19585 case DW_CFA_advance_loc2:
19586 case DW_CFA_advance_loc4:
19587 if (!cfa_equal_p (&last_cfa, &next_cfa))
19588 {
19589 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19590 start_label, last_label, section);
19591
19592 list_tail = &(*list_tail)->dw_loc_next;
19593 last_cfa = next_cfa;
19594 start_label = last_label;
19595 }
19596 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
19597 break;
19598
19599 case DW_CFA_advance_loc:
19600 /* The encoding is complex enough that we should never emit this. */
19601 gcc_unreachable ();
19602
19603 default:
19604 lookup_cfa_1 (cfi, &next_cfa, &remember);
19605 break;
19606 }
19607 if (ix + 1 == fde->dw_fde_switch_cfi_index)
19608 {
19609 if (!cfa_equal_p (&last_cfa, &next_cfa))
19610 {
19611 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19612 start_label, last_label, section);
19613
19614 list_tail = &(*list_tail)->dw_loc_next;
19615 last_cfa = next_cfa;
19616 start_label = last_label;
19617 }
19618 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19619 start_label, fde->dw_fde_end, section);
19620 list_tail = &(*list_tail)->dw_loc_next;
19621 start_label = last_label = fde->dw_fde_second_begin;
19622 }
19623 }
19624
19625 if (!cfa_equal_p (&last_cfa, &next_cfa))
19626 {
19627 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19628 start_label, last_label, section);
19629 list_tail = &(*list_tail)->dw_loc_next;
19630 start_label = last_label;
19631 }
19632
19633 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
19634 start_label,
19635 fde->dw_fde_second_begin
19636 ? fde->dw_fde_second_end : fde->dw_fde_end,
19637 section);
19638
19639 if (list && list->dw_loc_next)
19640 gen_llsym (list);
19641
19642 return list;
19643 }
19644
19645 /* Compute a displacement from the "steady-state frame pointer" to the
19646 frame base (often the same as the CFA), and store it in
19647 frame_pointer_fb_offset. OFFSET is added to the displacement
19648 before the latter is negated. */
19649
19650 static void
19651 compute_frame_pointer_to_fb_displacement (HOST_WIDE_INT offset)
19652 {
19653 rtx reg, elim;
19654
19655 #ifdef FRAME_POINTER_CFA_OFFSET
19656 reg = frame_pointer_rtx;
19657 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
19658 #else
19659 reg = arg_pointer_rtx;
19660 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
19661 #endif
19662
19663 elim = (ira_use_lra_p
19664 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
19665 : eliminate_regs (reg, VOIDmode, NULL_RTX));
19666 if (GET_CODE (elim) == PLUS)
19667 {
19668 offset += INTVAL (XEXP (elim, 1));
19669 elim = XEXP (elim, 0);
19670 }
19671
19672 frame_pointer_fb_offset = -offset;
19673
19674 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
19675 in which to eliminate. This is because it's stack pointer isn't
19676 directly accessible as a register within the ISA. To work around
19677 this, assume that while we cannot provide a proper value for
19678 frame_pointer_fb_offset, we won't need one either. */
19679 frame_pointer_fb_offset_valid
19680 = ((SUPPORTS_STACK_ALIGNMENT
19681 && (elim == hard_frame_pointer_rtx
19682 || elim == stack_pointer_rtx))
19683 || elim == (frame_pointer_needed
19684 ? hard_frame_pointer_rtx
19685 : stack_pointer_rtx));
19686 }
19687
19688 /* Generate a DW_AT_name attribute given some string value to be included as
19689 the value of the attribute. */
19690
19691 static void
19692 add_name_attribute (dw_die_ref die, const char *name_string)
19693 {
19694 if (name_string != NULL && *name_string != 0)
19695 {
19696 if (demangle_name_func)
19697 name_string = (*demangle_name_func) (name_string);
19698
19699 add_AT_string (die, DW_AT_name, name_string);
19700 }
19701 }
19702
19703 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
19704 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
19705 of TYPE accordingly.
19706
19707 ??? This is a temporary measure until after we're able to generate
19708 regular DWARF for the complex Ada type system. */
19709
19710 static void
19711 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
19712 dw_die_ref context_die)
19713 {
19714 tree dtype;
19715 dw_die_ref dtype_die;
19716
19717 if (!lang_hooks.types.descriptive_type)
19718 return;
19719
19720 dtype = lang_hooks.types.descriptive_type (type);
19721 if (!dtype)
19722 return;
19723
19724 dtype_die = lookup_type_die (dtype);
19725 if (!dtype_die)
19726 {
19727 gen_type_die (dtype, context_die);
19728 dtype_die = lookup_type_die (dtype);
19729 gcc_assert (dtype_die);
19730 }
19731
19732 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
19733 }
19734
19735 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
19736
19737 static const char *
19738 comp_dir_string (void)
19739 {
19740 const char *wd;
19741 char *wd1;
19742 static const char *cached_wd = NULL;
19743
19744 if (cached_wd != NULL)
19745 return cached_wd;
19746
19747 wd = get_src_pwd ();
19748 if (wd == NULL)
19749 return NULL;
19750
19751 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
19752 {
19753 int wdlen;
19754
19755 wdlen = strlen (wd);
19756 wd1 = ggc_vec_alloc<char> (wdlen + 2);
19757 strcpy (wd1, wd);
19758 wd1 [wdlen] = DIR_SEPARATOR;
19759 wd1 [wdlen + 1] = 0;
19760 wd = wd1;
19761 }
19762
19763 cached_wd = remap_debug_filename (wd);
19764 return cached_wd;
19765 }
19766
19767 /* Generate a DW_AT_comp_dir attribute for DIE. */
19768
19769 static void
19770 add_comp_dir_attribute (dw_die_ref die)
19771 {
19772 const char * wd = comp_dir_string ();
19773 if (wd != NULL)
19774 add_AT_string (die, DW_AT_comp_dir, wd);
19775 }
19776
19777 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
19778 pointer computation, ...), output a representation for that bound according
19779 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
19780 loc_list_from_tree for the meaning of CONTEXT. */
19781
19782 static void
19783 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
19784 int forms, struct loc_descr_context *context)
19785 {
19786 dw_die_ref context_die, decl_die;
19787 dw_loc_list_ref list;
19788 bool strip_conversions = true;
19789 bool placeholder_seen = false;
19790
19791 while (strip_conversions)
19792 switch (TREE_CODE (value))
19793 {
19794 case ERROR_MARK:
19795 case SAVE_EXPR:
19796 return;
19797
19798 CASE_CONVERT:
19799 case VIEW_CONVERT_EXPR:
19800 value = TREE_OPERAND (value, 0);
19801 break;
19802
19803 default:
19804 strip_conversions = false;
19805 break;
19806 }
19807
19808 /* If possible and permitted, output the attribute as a constant. */
19809 if ((forms & dw_scalar_form_constant) != 0
19810 && TREE_CODE (value) == INTEGER_CST)
19811 {
19812 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
19813
19814 /* If HOST_WIDE_INT is big enough then represent the bound as
19815 a constant value. We need to choose a form based on
19816 whether the type is signed or unsigned. We cannot just
19817 call add_AT_unsigned if the value itself is positive
19818 (add_AT_unsigned might add the unsigned value encoded as
19819 DW_FORM_data[1248]). Some DWARF consumers will lookup the
19820 bounds type and then sign extend any unsigned values found
19821 for signed types. This is needed only for
19822 DW_AT_{lower,upper}_bound, since for most other attributes,
19823 consumers will treat DW_FORM_data[1248] as unsigned values,
19824 regardless of the underlying type. */
19825 if (prec <= HOST_BITS_PER_WIDE_INT
19826 || tree_fits_uhwi_p (value))
19827 {
19828 if (TYPE_UNSIGNED (TREE_TYPE (value)))
19829 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
19830 else
19831 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
19832 }
19833 else
19834 /* Otherwise represent the bound as an unsigned value with
19835 the precision of its type. The precision and signedness
19836 of the type will be necessary to re-interpret it
19837 unambiguously. */
19838 add_AT_wide (die, attr, wi::to_wide (value));
19839 return;
19840 }
19841
19842 /* Otherwise, if it's possible and permitted too, output a reference to
19843 another DIE. */
19844 if ((forms & dw_scalar_form_reference) != 0)
19845 {
19846 tree decl = NULL_TREE;
19847
19848 /* Some type attributes reference an outer type. For instance, the upper
19849 bound of an array may reference an embedding record (this happens in
19850 Ada). */
19851 if (TREE_CODE (value) == COMPONENT_REF
19852 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
19853 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
19854 decl = TREE_OPERAND (value, 1);
19855
19856 else if (VAR_P (value)
19857 || TREE_CODE (value) == PARM_DECL
19858 || TREE_CODE (value) == RESULT_DECL)
19859 decl = value;
19860
19861 if (decl != NULL_TREE)
19862 {
19863 dw_die_ref decl_die = lookup_decl_die (decl);
19864
19865 /* ??? Can this happen, or should the variable have been bound
19866 first? Probably it can, since I imagine that we try to create
19867 the types of parameters in the order in which they exist in
19868 the list, and won't have created a forward reference to a
19869 later parameter. */
19870 if (decl_die != NULL)
19871 {
19872 add_AT_die_ref (die, attr, decl_die);
19873 return;
19874 }
19875 }
19876 }
19877
19878 /* Last chance: try to create a stack operation procedure to evaluate the
19879 value. Do nothing if even that is not possible or permitted. */
19880 if ((forms & dw_scalar_form_exprloc) == 0)
19881 return;
19882
19883 list = loc_list_from_tree (value, 2, context);
19884 if (context && context->placeholder_arg)
19885 {
19886 placeholder_seen = context->placeholder_seen;
19887 context->placeholder_seen = false;
19888 }
19889 if (list == NULL || single_element_loc_list_p (list))
19890 {
19891 /* If this attribute is not a reference nor constant, it is
19892 a DWARF expression rather than location description. For that
19893 loc_list_from_tree (value, 0, &context) is needed. */
19894 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
19895 if (list2 && single_element_loc_list_p (list2))
19896 {
19897 if (placeholder_seen)
19898 {
19899 struct dwarf_procedure_info dpi;
19900 dpi.fndecl = NULL_TREE;
19901 dpi.args_count = 1;
19902 if (!resolve_args_picking (list2->expr, 1, &dpi))
19903 return;
19904 }
19905 add_AT_loc (die, attr, list2->expr);
19906 return;
19907 }
19908 }
19909
19910 /* If that failed to give a single element location list, fall back to
19911 outputting this as a reference... still if permitted. */
19912 if (list == NULL
19913 || (forms & dw_scalar_form_reference) == 0
19914 || placeholder_seen)
19915 return;
19916
19917 if (current_function_decl == 0)
19918 context_die = comp_unit_die ();
19919 else
19920 context_die = lookup_decl_die (current_function_decl);
19921
19922 decl_die = new_die (DW_TAG_variable, context_die, value);
19923 add_AT_flag (decl_die, DW_AT_artificial, 1);
19924 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
19925 context_die);
19926 add_AT_location_description (decl_die, DW_AT_location, list);
19927 add_AT_die_ref (die, attr, decl_die);
19928 }
19929
19930 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
19931 default. */
19932
19933 static int
19934 lower_bound_default (void)
19935 {
19936 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
19937 {
19938 case DW_LANG_C:
19939 case DW_LANG_C89:
19940 case DW_LANG_C99:
19941 case DW_LANG_C11:
19942 case DW_LANG_C_plus_plus:
19943 case DW_LANG_C_plus_plus_11:
19944 case DW_LANG_C_plus_plus_14:
19945 case DW_LANG_ObjC:
19946 case DW_LANG_ObjC_plus_plus:
19947 return 0;
19948 case DW_LANG_Fortran77:
19949 case DW_LANG_Fortran90:
19950 case DW_LANG_Fortran95:
19951 case DW_LANG_Fortran03:
19952 case DW_LANG_Fortran08:
19953 return 1;
19954 case DW_LANG_UPC:
19955 case DW_LANG_D:
19956 case DW_LANG_Python:
19957 return dwarf_version >= 4 ? 0 : -1;
19958 case DW_LANG_Ada95:
19959 case DW_LANG_Ada83:
19960 case DW_LANG_Cobol74:
19961 case DW_LANG_Cobol85:
19962 case DW_LANG_Modula2:
19963 case DW_LANG_PLI:
19964 return dwarf_version >= 4 ? 1 : -1;
19965 default:
19966 return -1;
19967 }
19968 }
19969
19970 /* Given a tree node describing an array bound (either lower or upper) output
19971 a representation for that bound. */
19972
19973 static void
19974 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
19975 tree bound, struct loc_descr_context *context)
19976 {
19977 int dflt;
19978
19979 while (1)
19980 switch (TREE_CODE (bound))
19981 {
19982 /* Strip all conversions. */
19983 CASE_CONVERT:
19984 case VIEW_CONVERT_EXPR:
19985 bound = TREE_OPERAND (bound, 0);
19986 break;
19987
19988 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
19989 are even omitted when they are the default. */
19990 case INTEGER_CST:
19991 /* If the value for this bound is the default one, we can even omit the
19992 attribute. */
19993 if (bound_attr == DW_AT_lower_bound
19994 && tree_fits_shwi_p (bound)
19995 && (dflt = lower_bound_default ()) != -1
19996 && tree_to_shwi (bound) == dflt)
19997 return;
19998
19999 /* FALLTHRU */
20000
20001 default:
20002 /* Because of the complex interaction there can be with other GNAT
20003 encodings, GDB isn't ready yet to handle proper DWARF description
20004 for self-referencial subrange bounds: let GNAT encodings do the
20005 magic in such a case. */
20006 if (is_ada ()
20007 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20008 && contains_placeholder_p (bound))
20009 return;
20010
20011 add_scalar_info (subrange_die, bound_attr, bound,
20012 dw_scalar_form_constant
20013 | dw_scalar_form_exprloc
20014 | dw_scalar_form_reference,
20015 context);
20016 return;
20017 }
20018 }
20019
20020 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20021 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20022 Note that the block of subscript information for an array type also
20023 includes information about the element type of the given array type.
20024
20025 This function reuses previously set type and bound information if
20026 available. */
20027
20028 static void
20029 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20030 {
20031 unsigned dimension_number;
20032 tree lower, upper;
20033 dw_die_ref child = type_die->die_child;
20034
20035 for (dimension_number = 0;
20036 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20037 type = TREE_TYPE (type), dimension_number++)
20038 {
20039 tree domain = TYPE_DOMAIN (type);
20040
20041 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20042 break;
20043
20044 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20045 and (in GNU C only) variable bounds. Handle all three forms
20046 here. */
20047
20048 /* Find and reuse a previously generated DW_TAG_subrange_type if
20049 available.
20050
20051 For multi-dimensional arrays, as we iterate through the
20052 various dimensions in the enclosing for loop above, we also
20053 iterate through the DIE children and pick at each
20054 DW_TAG_subrange_type previously generated (if available).
20055 Each child DW_TAG_subrange_type DIE describes the range of
20056 the current dimension. At this point we should have as many
20057 DW_TAG_subrange_type's as we have dimensions in the
20058 array. */
20059 dw_die_ref subrange_die = NULL;
20060 if (child)
20061 while (1)
20062 {
20063 child = child->die_sib;
20064 if (child->die_tag == DW_TAG_subrange_type)
20065 subrange_die = child;
20066 if (child == type_die->die_child)
20067 {
20068 /* If we wrapped around, stop looking next time. */
20069 child = NULL;
20070 break;
20071 }
20072 if (child->die_tag == DW_TAG_subrange_type)
20073 break;
20074 }
20075 if (!subrange_die)
20076 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20077
20078 if (domain)
20079 {
20080 /* We have an array type with specified bounds. */
20081 lower = TYPE_MIN_VALUE (domain);
20082 upper = TYPE_MAX_VALUE (domain);
20083
20084 /* Define the index type. */
20085 if (TREE_TYPE (domain)
20086 && !get_AT (subrange_die, DW_AT_type))
20087 {
20088 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20089 TREE_TYPE field. We can't emit debug info for this
20090 because it is an unnamed integral type. */
20091 if (TREE_CODE (domain) == INTEGER_TYPE
20092 && TYPE_NAME (domain) == NULL_TREE
20093 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20094 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20095 ;
20096 else
20097 add_type_attribute (subrange_die, TREE_TYPE (domain),
20098 TYPE_UNQUALIFIED, false, type_die);
20099 }
20100
20101 /* ??? If upper is NULL, the array has unspecified length,
20102 but it does have a lower bound. This happens with Fortran
20103 dimension arr(N:*)
20104 Since the debugger is definitely going to need to know N
20105 to produce useful results, go ahead and output the lower
20106 bound solo, and hope the debugger can cope. */
20107
20108 if (!get_AT (subrange_die, DW_AT_lower_bound))
20109 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20110 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
20111 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20112 }
20113
20114 /* Otherwise we have an array type with an unspecified length. The
20115 DWARF-2 spec does not say how to handle this; let's just leave out the
20116 bounds. */
20117 }
20118 }
20119
20120 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20121
20122 static void
20123 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20124 {
20125 dw_die_ref decl_die;
20126 HOST_WIDE_INT size;
20127 dw_loc_descr_ref size_expr = NULL;
20128
20129 switch (TREE_CODE (tree_node))
20130 {
20131 case ERROR_MARK:
20132 size = 0;
20133 break;
20134 case ENUMERAL_TYPE:
20135 case RECORD_TYPE:
20136 case UNION_TYPE:
20137 case QUAL_UNION_TYPE:
20138 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20139 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20140 {
20141 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20142 return;
20143 }
20144 size_expr = type_byte_size (tree_node, &size);
20145 break;
20146 case FIELD_DECL:
20147 /* For a data member of a struct or union, the DW_AT_byte_size is
20148 generally given as the number of bytes normally allocated for an
20149 object of the *declared* type of the member itself. This is true
20150 even for bit-fields. */
20151 size = int_size_in_bytes (field_type (tree_node));
20152 break;
20153 default:
20154 gcc_unreachable ();
20155 }
20156
20157 /* Support for dynamically-sized objects was introduced by DWARFv3.
20158 At the moment, GDB does not handle variable byte sizes very well,
20159 though. */
20160 if ((dwarf_version >= 3 || !dwarf_strict)
20161 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20162 && size_expr != NULL)
20163 add_AT_loc (die, DW_AT_byte_size, size_expr);
20164
20165 /* Note that `size' might be -1 when we get to this point. If it is, that
20166 indicates that the byte size of the entity in question is variable and
20167 that we could not generate a DWARF expression that computes it. */
20168 if (size >= 0)
20169 add_AT_unsigned (die, DW_AT_byte_size, size);
20170 }
20171
20172 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20173 alignment. */
20174
20175 static void
20176 add_alignment_attribute (dw_die_ref die, tree tree_node)
20177 {
20178 if (dwarf_version < 5 && dwarf_strict)
20179 return;
20180
20181 unsigned align;
20182
20183 if (DECL_P (tree_node))
20184 {
20185 if (!DECL_USER_ALIGN (tree_node))
20186 return;
20187
20188 align = DECL_ALIGN_UNIT (tree_node);
20189 }
20190 else if (TYPE_P (tree_node))
20191 {
20192 if (!TYPE_USER_ALIGN (tree_node))
20193 return;
20194
20195 align = TYPE_ALIGN_UNIT (tree_node);
20196 }
20197 else
20198 gcc_unreachable ();
20199
20200 add_AT_unsigned (die, DW_AT_alignment, align);
20201 }
20202
20203 /* For a FIELD_DECL node which represents a bit-field, output an attribute
20204 which specifies the distance in bits from the highest order bit of the
20205 "containing object" for the bit-field to the highest order bit of the
20206 bit-field itself.
20207
20208 For any given bit-field, the "containing object" is a hypothetical object
20209 (of some integral or enum type) within which the given bit-field lives. The
20210 type of this hypothetical "containing object" is always the same as the
20211 declared type of the individual bit-field itself. The determination of the
20212 exact location of the "containing object" for a bit-field is rather
20213 complicated. It's handled by the `field_byte_offset' function (above).
20214
20215 CTX is required: see the comment for VLR_CONTEXT.
20216
20217 Note that it is the size (in bytes) of the hypothetical "containing object"
20218 which will be given in the DW_AT_byte_size attribute for this bit-field.
20219 (See `byte_size_attribute' above). */
20220
20221 static inline void
20222 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
20223 {
20224 HOST_WIDE_INT object_offset_in_bytes;
20225 tree original_type = DECL_BIT_FIELD_TYPE (decl);
20226 HOST_WIDE_INT bitpos_int;
20227 HOST_WIDE_INT highest_order_object_bit_offset;
20228 HOST_WIDE_INT highest_order_field_bit_offset;
20229 HOST_WIDE_INT bit_offset;
20230
20231 field_byte_offset (decl, ctx, &object_offset_in_bytes);
20232
20233 /* Must be a field and a bit field. */
20234 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
20235
20236 /* We can't yet handle bit-fields whose offsets are variable, so if we
20237 encounter such things, just return without generating any attribute
20238 whatsoever. Likewise for variable or too large size. */
20239 if (! tree_fits_shwi_p (bit_position (decl))
20240 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
20241 return;
20242
20243 bitpos_int = int_bit_position (decl);
20244
20245 /* Note that the bit offset is always the distance (in bits) from the
20246 highest-order bit of the "containing object" to the highest-order bit of
20247 the bit-field itself. Since the "high-order end" of any object or field
20248 is different on big-endian and little-endian machines, the computation
20249 below must take account of these differences. */
20250 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
20251 highest_order_field_bit_offset = bitpos_int;
20252
20253 if (! BYTES_BIG_ENDIAN)
20254 {
20255 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
20256 highest_order_object_bit_offset +=
20257 simple_type_size_in_bits (original_type);
20258 }
20259
20260 bit_offset
20261 = (! BYTES_BIG_ENDIAN
20262 ? highest_order_object_bit_offset - highest_order_field_bit_offset
20263 : highest_order_field_bit_offset - highest_order_object_bit_offset);
20264
20265 if (bit_offset < 0)
20266 add_AT_int (die, DW_AT_bit_offset, bit_offset);
20267 else
20268 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
20269 }
20270
20271 /* For a FIELD_DECL node which represents a bit field, output an attribute
20272 which specifies the length in bits of the given field. */
20273
20274 static inline void
20275 add_bit_size_attribute (dw_die_ref die, tree decl)
20276 {
20277 /* Must be a field and a bit field. */
20278 gcc_assert (TREE_CODE (decl) == FIELD_DECL
20279 && DECL_BIT_FIELD_TYPE (decl));
20280
20281 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
20282 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
20283 }
20284
20285 /* If the compiled language is ANSI C, then add a 'prototyped'
20286 attribute, if arg types are given for the parameters of a function. */
20287
20288 static inline void
20289 add_prototyped_attribute (dw_die_ref die, tree func_type)
20290 {
20291 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20292 {
20293 case DW_LANG_C:
20294 case DW_LANG_C89:
20295 case DW_LANG_C99:
20296 case DW_LANG_C11:
20297 case DW_LANG_ObjC:
20298 if (prototype_p (func_type))
20299 add_AT_flag (die, DW_AT_prototyped, 1);
20300 break;
20301 default:
20302 break;
20303 }
20304 }
20305
20306 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
20307 by looking in the type declaration, the object declaration equate table or
20308 the block mapping. */
20309
20310 static inline dw_die_ref
20311 add_abstract_origin_attribute (dw_die_ref die, tree origin)
20312 {
20313 dw_die_ref origin_die = NULL;
20314
20315 if (DECL_P (origin))
20316 {
20317 dw_die_ref c;
20318 origin_die = lookup_decl_die (origin);
20319 /* "Unwrap" the decls DIE which we put in the imported unit context.
20320 We are looking for the abstract copy here. */
20321 if (in_lto_p
20322 && origin_die
20323 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
20324 /* ??? Identify this better. */
20325 && c->with_offset)
20326 origin_die = c;
20327 }
20328 else if (TYPE_P (origin))
20329 origin_die = lookup_type_die (origin);
20330 else if (TREE_CODE (origin) == BLOCK)
20331 origin_die = BLOCK_DIE (origin);
20332
20333 /* XXX: Functions that are never lowered don't always have correct block
20334 trees (in the case of java, they simply have no block tree, in some other
20335 languages). For these functions, there is nothing we can really do to
20336 output correct debug info for inlined functions in all cases. Rather
20337 than die, we'll just produce deficient debug info now, in that we will
20338 have variables without a proper abstract origin. In the future, when all
20339 functions are lowered, we should re-add a gcc_assert (origin_die)
20340 here. */
20341
20342 if (origin_die)
20343 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
20344 return origin_die;
20345 }
20346
20347 /* We do not currently support the pure_virtual attribute. */
20348
20349 static inline void
20350 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
20351 {
20352 if (DECL_VINDEX (func_decl))
20353 {
20354 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
20355
20356 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
20357 add_AT_loc (die, DW_AT_vtable_elem_location,
20358 new_loc_descr (DW_OP_constu,
20359 tree_to_shwi (DECL_VINDEX (func_decl)),
20360 0));
20361
20362 /* GNU extension: Record what type this method came from originally. */
20363 if (debug_info_level > DINFO_LEVEL_TERSE
20364 && DECL_CONTEXT (func_decl))
20365 add_AT_die_ref (die, DW_AT_containing_type,
20366 lookup_type_die (DECL_CONTEXT (func_decl)));
20367 }
20368 }
20369 \f
20370 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
20371 given decl. This used to be a vendor extension until after DWARF 4
20372 standardized it. */
20373
20374 static void
20375 add_linkage_attr (dw_die_ref die, tree decl)
20376 {
20377 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
20378
20379 /* Mimic what assemble_name_raw does with a leading '*'. */
20380 if (name[0] == '*')
20381 name = &name[1];
20382
20383 if (dwarf_version >= 4)
20384 add_AT_string (die, DW_AT_linkage_name, name);
20385 else
20386 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
20387 }
20388
20389 /* Add source coordinate attributes for the given decl. */
20390
20391 static void
20392 add_src_coords_attributes (dw_die_ref die, tree decl)
20393 {
20394 expanded_location s;
20395
20396 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
20397 return;
20398 s = expand_location (DECL_SOURCE_LOCATION (decl));
20399 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
20400 add_AT_unsigned (die, DW_AT_decl_line, s.line);
20401 if (debug_column_info && s.column)
20402 add_AT_unsigned (die, DW_AT_decl_column, s.column);
20403 }
20404
20405 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
20406
20407 static void
20408 add_linkage_name_raw (dw_die_ref die, tree decl)
20409 {
20410 /* Defer until we have an assembler name set. */
20411 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
20412 {
20413 limbo_die_node *asm_name;
20414
20415 asm_name = ggc_cleared_alloc<limbo_die_node> ();
20416 asm_name->die = die;
20417 asm_name->created_for = decl;
20418 asm_name->next = deferred_asm_name;
20419 deferred_asm_name = asm_name;
20420 }
20421 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
20422 add_linkage_attr (die, decl);
20423 }
20424
20425 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
20426
20427 static void
20428 add_linkage_name (dw_die_ref die, tree decl)
20429 {
20430 if (debug_info_level > DINFO_LEVEL_NONE
20431 && VAR_OR_FUNCTION_DECL_P (decl)
20432 && TREE_PUBLIC (decl)
20433 && !(VAR_P (decl) && DECL_REGISTER (decl))
20434 && die->die_tag != DW_TAG_member)
20435 add_linkage_name_raw (die, decl);
20436 }
20437
20438 /* Add a DW_AT_name attribute and source coordinate attribute for the
20439 given decl, but only if it actually has a name. */
20440
20441 static void
20442 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
20443 bool no_linkage_name)
20444 {
20445 tree decl_name;
20446
20447 decl_name = DECL_NAME (decl);
20448 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20449 {
20450 const char *name = dwarf2_name (decl, 0);
20451 if (name)
20452 add_name_attribute (die, name);
20453 if (! DECL_ARTIFICIAL (decl))
20454 add_src_coords_attributes (die, decl);
20455
20456 if (!no_linkage_name)
20457 add_linkage_name (die, decl);
20458 }
20459
20460 #ifdef VMS_DEBUGGING_INFO
20461 /* Get the function's name, as described by its RTL. This may be different
20462 from the DECL_NAME name used in the source file. */
20463 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
20464 {
20465 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
20466 XEXP (DECL_RTL (decl), 0), false);
20467 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
20468 }
20469 #endif /* VMS_DEBUGGING_INFO */
20470 }
20471
20472 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
20473
20474 static void
20475 add_discr_value (dw_die_ref die, dw_discr_value *value)
20476 {
20477 dw_attr_node attr;
20478
20479 attr.dw_attr = DW_AT_discr_value;
20480 attr.dw_attr_val.val_class = dw_val_class_discr_value;
20481 attr.dw_attr_val.val_entry = NULL;
20482 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
20483 if (value->pos)
20484 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
20485 else
20486 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
20487 add_dwarf_attr (die, &attr);
20488 }
20489
20490 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
20491
20492 static void
20493 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
20494 {
20495 dw_attr_node attr;
20496
20497 attr.dw_attr = DW_AT_discr_list;
20498 attr.dw_attr_val.val_class = dw_val_class_discr_list;
20499 attr.dw_attr_val.val_entry = NULL;
20500 attr.dw_attr_val.v.val_discr_list = discr_list;
20501 add_dwarf_attr (die, &attr);
20502 }
20503
20504 static inline dw_discr_list_ref
20505 AT_discr_list (dw_attr_node *attr)
20506 {
20507 return attr->dw_attr_val.v.val_discr_list;
20508 }
20509
20510 #ifdef VMS_DEBUGGING_INFO
20511 /* Output the debug main pointer die for VMS */
20512
20513 void
20514 dwarf2out_vms_debug_main_pointer (void)
20515 {
20516 char label[MAX_ARTIFICIAL_LABEL_BYTES];
20517 dw_die_ref die;
20518
20519 /* Allocate the VMS debug main subprogram die. */
20520 die = new_die_raw (DW_TAG_subprogram);
20521 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
20522 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
20523 current_function_funcdef_no);
20524 add_AT_lbl_id (die, DW_AT_entry_pc, label);
20525
20526 /* Make it the first child of comp_unit_die (). */
20527 die->die_parent = comp_unit_die ();
20528 if (comp_unit_die ()->die_child)
20529 {
20530 die->die_sib = comp_unit_die ()->die_child->die_sib;
20531 comp_unit_die ()->die_child->die_sib = die;
20532 }
20533 else
20534 {
20535 die->die_sib = die;
20536 comp_unit_die ()->die_child = die;
20537 }
20538 }
20539 #endif /* VMS_DEBUGGING_INFO */
20540
20541 /* Push a new declaration scope. */
20542
20543 static void
20544 push_decl_scope (tree scope)
20545 {
20546 vec_safe_push (decl_scope_table, scope);
20547 }
20548
20549 /* Pop a declaration scope. */
20550
20551 static inline void
20552 pop_decl_scope (void)
20553 {
20554 decl_scope_table->pop ();
20555 }
20556
20557 /* walk_tree helper function for uses_local_type, below. */
20558
20559 static tree
20560 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
20561 {
20562 if (!TYPE_P (*tp))
20563 *walk_subtrees = 0;
20564 else
20565 {
20566 tree name = TYPE_NAME (*tp);
20567 if (name && DECL_P (name) && decl_function_context (name))
20568 return *tp;
20569 }
20570 return NULL_TREE;
20571 }
20572
20573 /* If TYPE involves a function-local type (including a local typedef to a
20574 non-local type), returns that type; otherwise returns NULL_TREE. */
20575
20576 static tree
20577 uses_local_type (tree type)
20578 {
20579 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
20580 return used;
20581 }
20582
20583 /* Return the DIE for the scope that immediately contains this type.
20584 Non-named types that do not involve a function-local type get global
20585 scope. Named types nested in namespaces or other types get their
20586 containing scope. All other types (i.e. function-local named types) get
20587 the current active scope. */
20588
20589 static dw_die_ref
20590 scope_die_for (tree t, dw_die_ref context_die)
20591 {
20592 dw_die_ref scope_die = NULL;
20593 tree containing_scope;
20594
20595 /* Non-types always go in the current scope. */
20596 gcc_assert (TYPE_P (t));
20597
20598 /* Use the scope of the typedef, rather than the scope of the type
20599 it refers to. */
20600 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
20601 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
20602 else
20603 containing_scope = TYPE_CONTEXT (t);
20604
20605 /* Use the containing namespace if there is one. */
20606 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
20607 {
20608 if (context_die == lookup_decl_die (containing_scope))
20609 /* OK */;
20610 else if (debug_info_level > DINFO_LEVEL_TERSE)
20611 context_die = get_context_die (containing_scope);
20612 else
20613 containing_scope = NULL_TREE;
20614 }
20615
20616 /* Ignore function type "scopes" from the C frontend. They mean that
20617 a tagged type is local to a parmlist of a function declarator, but
20618 that isn't useful to DWARF. */
20619 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
20620 containing_scope = NULL_TREE;
20621
20622 if (SCOPE_FILE_SCOPE_P (containing_scope))
20623 {
20624 /* If T uses a local type keep it local as well, to avoid references
20625 to function-local DIEs from outside the function. */
20626 if (current_function_decl && uses_local_type (t))
20627 scope_die = context_die;
20628 else
20629 scope_die = comp_unit_die ();
20630 }
20631 else if (TYPE_P (containing_scope))
20632 {
20633 /* For types, we can just look up the appropriate DIE. */
20634 if (debug_info_level > DINFO_LEVEL_TERSE)
20635 scope_die = get_context_die (containing_scope);
20636 else
20637 {
20638 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
20639 if (scope_die == NULL)
20640 scope_die = comp_unit_die ();
20641 }
20642 }
20643 else
20644 scope_die = context_die;
20645
20646 return scope_die;
20647 }
20648
20649 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
20650
20651 static inline int
20652 local_scope_p (dw_die_ref context_die)
20653 {
20654 for (; context_die; context_die = context_die->die_parent)
20655 if (context_die->die_tag == DW_TAG_inlined_subroutine
20656 || context_die->die_tag == DW_TAG_subprogram)
20657 return 1;
20658
20659 return 0;
20660 }
20661
20662 /* Returns nonzero if CONTEXT_DIE is a class. */
20663
20664 static inline int
20665 class_scope_p (dw_die_ref context_die)
20666 {
20667 return (context_die
20668 && (context_die->die_tag == DW_TAG_structure_type
20669 || context_die->die_tag == DW_TAG_class_type
20670 || context_die->die_tag == DW_TAG_interface_type
20671 || context_die->die_tag == DW_TAG_union_type));
20672 }
20673
20674 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
20675 whether or not to treat a DIE in this context as a declaration. */
20676
20677 static inline int
20678 class_or_namespace_scope_p (dw_die_ref context_die)
20679 {
20680 return (class_scope_p (context_die)
20681 || (context_die && context_die->die_tag == DW_TAG_namespace));
20682 }
20683
20684 /* Many forms of DIEs require a "type description" attribute. This
20685 routine locates the proper "type descriptor" die for the type given
20686 by 'type' plus any additional qualifiers given by 'cv_quals', and
20687 adds a DW_AT_type attribute below the given die. */
20688
20689 static void
20690 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
20691 bool reverse, dw_die_ref context_die)
20692 {
20693 enum tree_code code = TREE_CODE (type);
20694 dw_die_ref type_die = NULL;
20695
20696 /* ??? If this type is an unnamed subrange type of an integral, floating-point
20697 or fixed-point type, use the inner type. This is because we have no
20698 support for unnamed types in base_type_die. This can happen if this is
20699 an Ada subrange type. Correct solution is emit a subrange type die. */
20700 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
20701 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
20702 type = TREE_TYPE (type), code = TREE_CODE (type);
20703
20704 if (code == ERROR_MARK
20705 /* Handle a special case. For functions whose return type is void, we
20706 generate *no* type attribute. (Note that no object may have type
20707 `void', so this only applies to function return types). */
20708 || code == VOID_TYPE)
20709 return;
20710
20711 type_die = modified_type_die (type,
20712 cv_quals | TYPE_QUALS (type),
20713 reverse,
20714 context_die);
20715
20716 if (type_die != NULL)
20717 add_AT_die_ref (object_die, DW_AT_type, type_die);
20718 }
20719
20720 /* Given an object die, add the calling convention attribute for the
20721 function call type. */
20722 static void
20723 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
20724 {
20725 enum dwarf_calling_convention value = DW_CC_normal;
20726
20727 value = ((enum dwarf_calling_convention)
20728 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
20729
20730 if (is_fortran ()
20731 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
20732 {
20733 /* DWARF 2 doesn't provide a way to identify a program's source-level
20734 entry point. DW_AT_calling_convention attributes are only meant
20735 to describe functions' calling conventions. However, lacking a
20736 better way to signal the Fortran main program, we used this for
20737 a long time, following existing custom. Now, DWARF 4 has
20738 DW_AT_main_subprogram, which we add below, but some tools still
20739 rely on the old way, which we thus keep. */
20740 value = DW_CC_program;
20741
20742 if (dwarf_version >= 4 || !dwarf_strict)
20743 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
20744 }
20745
20746 /* Only add the attribute if the backend requests it, and
20747 is not DW_CC_normal. */
20748 if (value && (value != DW_CC_normal))
20749 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
20750 }
20751
20752 /* Given a tree pointer to a struct, class, union, or enum type node, return
20753 a pointer to the (string) tag name for the given type, or zero if the type
20754 was declared without a tag. */
20755
20756 static const char *
20757 type_tag (const_tree type)
20758 {
20759 const char *name = 0;
20760
20761 if (TYPE_NAME (type) != 0)
20762 {
20763 tree t = 0;
20764
20765 /* Find the IDENTIFIER_NODE for the type name. */
20766 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
20767 && !TYPE_NAMELESS (type))
20768 t = TYPE_NAME (type);
20769
20770 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
20771 a TYPE_DECL node, regardless of whether or not a `typedef' was
20772 involved. */
20773 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
20774 && ! DECL_IGNORED_P (TYPE_NAME (type)))
20775 {
20776 /* We want to be extra verbose. Don't call dwarf_name if
20777 DECL_NAME isn't set. The default hook for decl_printable_name
20778 doesn't like that, and in this context it's correct to return
20779 0, instead of "<anonymous>" or the like. */
20780 if (DECL_NAME (TYPE_NAME (type))
20781 && !DECL_NAMELESS (TYPE_NAME (type)))
20782 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
20783 }
20784
20785 /* Now get the name as a string, or invent one. */
20786 if (!name && t != 0)
20787 name = IDENTIFIER_POINTER (t);
20788 }
20789
20790 return (name == 0 || *name == '\0') ? 0 : name;
20791 }
20792
20793 /* Return the type associated with a data member, make a special check
20794 for bit field types. */
20795
20796 static inline tree
20797 member_declared_type (const_tree member)
20798 {
20799 return (DECL_BIT_FIELD_TYPE (member)
20800 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
20801 }
20802
20803 /* Get the decl's label, as described by its RTL. This may be different
20804 from the DECL_NAME name used in the source file. */
20805
20806 #if 0
20807 static const char *
20808 decl_start_label (tree decl)
20809 {
20810 rtx x;
20811 const char *fnname;
20812
20813 x = DECL_RTL (decl);
20814 gcc_assert (MEM_P (x));
20815
20816 x = XEXP (x, 0);
20817 gcc_assert (GET_CODE (x) == SYMBOL_REF);
20818
20819 fnname = XSTR (x, 0);
20820 return fnname;
20821 }
20822 #endif
20823 \f
20824 /* For variable-length arrays that have been previously generated, but
20825 may be incomplete due to missing subscript info, fill the subscript
20826 info. Return TRUE if this is one of those cases. */
20827 static bool
20828 fill_variable_array_bounds (tree type)
20829 {
20830 if (TREE_ASM_WRITTEN (type)
20831 && TREE_CODE (type) == ARRAY_TYPE
20832 && variably_modified_type_p (type, NULL))
20833 {
20834 dw_die_ref array_die = lookup_type_die (type);
20835 if (!array_die)
20836 return false;
20837 add_subscript_info (array_die, type, !is_ada ());
20838 return true;
20839 }
20840 return false;
20841 }
20842
20843 /* These routines generate the internal representation of the DIE's for
20844 the compilation unit. Debugging information is collected by walking
20845 the declaration trees passed in from dwarf2out_decl(). */
20846
20847 static void
20848 gen_array_type_die (tree type, dw_die_ref context_die)
20849 {
20850 dw_die_ref array_die;
20851
20852 /* GNU compilers represent multidimensional array types as sequences of one
20853 dimensional array types whose element types are themselves array types.
20854 We sometimes squish that down to a single array_type DIE with multiple
20855 subscripts in the Dwarf debugging info. The draft Dwarf specification
20856 say that we are allowed to do this kind of compression in C, because
20857 there is no difference between an array of arrays and a multidimensional
20858 array. We don't do this for Ada to remain as close as possible to the
20859 actual representation, which is especially important against the language
20860 flexibilty wrt arrays of variable size. */
20861
20862 bool collapse_nested_arrays = !is_ada ();
20863
20864 if (fill_variable_array_bounds (type))
20865 return;
20866
20867 dw_die_ref scope_die = scope_die_for (type, context_die);
20868 tree element_type;
20869
20870 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
20871 DW_TAG_string_type doesn't have DW_AT_type attribute). */
20872 if (TYPE_STRING_FLAG (type)
20873 && TREE_CODE (type) == ARRAY_TYPE
20874 && is_fortran ()
20875 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
20876 {
20877 HOST_WIDE_INT size;
20878
20879 array_die = new_die (DW_TAG_string_type, scope_die, type);
20880 add_name_attribute (array_die, type_tag (type));
20881 equate_type_number_to_die (type, array_die);
20882 size = int_size_in_bytes (type);
20883 if (size >= 0)
20884 add_AT_unsigned (array_die, DW_AT_byte_size, size);
20885 /* ??? We can't annotate types late, but for LTO we may not
20886 generate a location early either (gfortran.dg/save_6.f90). */
20887 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
20888 && TYPE_DOMAIN (type) != NULL_TREE
20889 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
20890 {
20891 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
20892 tree rszdecl = szdecl;
20893
20894 size = int_size_in_bytes (TREE_TYPE (szdecl));
20895 if (!DECL_P (szdecl))
20896 {
20897 if (TREE_CODE (szdecl) == INDIRECT_REF
20898 && DECL_P (TREE_OPERAND (szdecl, 0)))
20899 {
20900 rszdecl = TREE_OPERAND (szdecl, 0);
20901 if (int_size_in_bytes (TREE_TYPE (rszdecl))
20902 != DWARF2_ADDR_SIZE)
20903 size = 0;
20904 }
20905 else
20906 size = 0;
20907 }
20908 if (size > 0)
20909 {
20910 dw_loc_list_ref loc
20911 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
20912 NULL);
20913 if (loc)
20914 {
20915 add_AT_location_description (array_die, DW_AT_string_length,
20916 loc);
20917 if (size != DWARF2_ADDR_SIZE)
20918 add_AT_unsigned (array_die, dwarf_version >= 5
20919 ? DW_AT_string_length_byte_size
20920 : DW_AT_byte_size, size);
20921 }
20922 }
20923 }
20924 return;
20925 }
20926
20927 array_die = new_die (DW_TAG_array_type, scope_die, type);
20928 add_name_attribute (array_die, type_tag (type));
20929 equate_type_number_to_die (type, array_die);
20930
20931 if (TREE_CODE (type) == VECTOR_TYPE)
20932 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
20933
20934 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
20935 if (is_fortran ()
20936 && TREE_CODE (type) == ARRAY_TYPE
20937 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
20938 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
20939 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
20940
20941 #if 0
20942 /* We default the array ordering. Debuggers will probably do the right
20943 things even if DW_AT_ordering is not present. It's not even an issue
20944 until we start to get into multidimensional arrays anyway. If a debugger
20945 is ever caught doing the Wrong Thing for multi-dimensional arrays,
20946 then we'll have to put the DW_AT_ordering attribute back in. (But if
20947 and when we find out that we need to put these in, we will only do so
20948 for multidimensional arrays. */
20949 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
20950 #endif
20951
20952 if (TREE_CODE (type) == VECTOR_TYPE)
20953 {
20954 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
20955 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
20956 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
20957 add_bound_info (subrange_die, DW_AT_upper_bound,
20958 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
20959 }
20960 else
20961 add_subscript_info (array_die, type, collapse_nested_arrays);
20962
20963 /* Add representation of the type of the elements of this array type and
20964 emit the corresponding DIE if we haven't done it already. */
20965 element_type = TREE_TYPE (type);
20966 if (collapse_nested_arrays)
20967 while (TREE_CODE (element_type) == ARRAY_TYPE)
20968 {
20969 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
20970 break;
20971 element_type = TREE_TYPE (element_type);
20972 }
20973
20974 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
20975 TREE_CODE (type) == ARRAY_TYPE
20976 && TYPE_REVERSE_STORAGE_ORDER (type),
20977 context_die);
20978
20979 add_gnat_descriptive_type_attribute (array_die, type, context_die);
20980 if (TYPE_ARTIFICIAL (type))
20981 add_AT_flag (array_die, DW_AT_artificial, 1);
20982
20983 if (get_AT (array_die, DW_AT_name))
20984 add_pubtype (type, array_die);
20985
20986 add_alignment_attribute (array_die, type);
20987 }
20988
20989 /* This routine generates DIE for array with hidden descriptor, details
20990 are filled into *info by a langhook. */
20991
20992 static void
20993 gen_descr_array_type_die (tree type, struct array_descr_info *info,
20994 dw_die_ref context_die)
20995 {
20996 const dw_die_ref scope_die = scope_die_for (type, context_die);
20997 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
20998 struct loc_descr_context context = { type, info->base_decl, NULL,
20999 false, false };
21000 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21001 int dim;
21002
21003 add_name_attribute (array_die, type_tag (type));
21004 equate_type_number_to_die (type, array_die);
21005
21006 if (info->ndimensions > 1)
21007 switch (info->ordering)
21008 {
21009 case array_descr_ordering_row_major:
21010 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21011 break;
21012 case array_descr_ordering_column_major:
21013 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21014 break;
21015 default:
21016 break;
21017 }
21018
21019 if (dwarf_version >= 3 || !dwarf_strict)
21020 {
21021 if (info->data_location)
21022 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21023 dw_scalar_form_exprloc, &context);
21024 if (info->associated)
21025 add_scalar_info (array_die, DW_AT_associated, info->associated,
21026 dw_scalar_form_constant
21027 | dw_scalar_form_exprloc
21028 | dw_scalar_form_reference, &context);
21029 if (info->allocated)
21030 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21031 dw_scalar_form_constant
21032 | dw_scalar_form_exprloc
21033 | dw_scalar_form_reference, &context);
21034 if (info->stride)
21035 {
21036 const enum dwarf_attribute attr
21037 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21038 const int forms
21039 = (info->stride_in_bits)
21040 ? dw_scalar_form_constant
21041 : (dw_scalar_form_constant
21042 | dw_scalar_form_exprloc
21043 | dw_scalar_form_reference);
21044
21045 add_scalar_info (array_die, attr, info->stride, forms, &context);
21046 }
21047 }
21048 if (dwarf_version >= 5)
21049 {
21050 if (info->rank)
21051 {
21052 add_scalar_info (array_die, DW_AT_rank, info->rank,
21053 dw_scalar_form_constant
21054 | dw_scalar_form_exprloc, &context);
21055 subrange_tag = DW_TAG_generic_subrange;
21056 context.placeholder_arg = true;
21057 }
21058 }
21059
21060 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21061
21062 for (dim = 0; dim < info->ndimensions; dim++)
21063 {
21064 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21065
21066 if (info->dimen[dim].bounds_type)
21067 add_type_attribute (subrange_die,
21068 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21069 false, context_die);
21070 if (info->dimen[dim].lower_bound)
21071 add_bound_info (subrange_die, DW_AT_lower_bound,
21072 info->dimen[dim].lower_bound, &context);
21073 if (info->dimen[dim].upper_bound)
21074 add_bound_info (subrange_die, DW_AT_upper_bound,
21075 info->dimen[dim].upper_bound, &context);
21076 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21077 add_scalar_info (subrange_die, DW_AT_byte_stride,
21078 info->dimen[dim].stride,
21079 dw_scalar_form_constant
21080 | dw_scalar_form_exprloc
21081 | dw_scalar_form_reference,
21082 &context);
21083 }
21084
21085 gen_type_die (info->element_type, context_die);
21086 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21087 TREE_CODE (type) == ARRAY_TYPE
21088 && TYPE_REVERSE_STORAGE_ORDER (type),
21089 context_die);
21090
21091 if (get_AT (array_die, DW_AT_name))
21092 add_pubtype (type, array_die);
21093
21094 add_alignment_attribute (array_die, type);
21095 }
21096
21097 #if 0
21098 static void
21099 gen_entry_point_die (tree decl, dw_die_ref context_die)
21100 {
21101 tree origin = decl_ultimate_origin (decl);
21102 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21103
21104 if (origin != NULL)
21105 add_abstract_origin_attribute (decl_die, origin);
21106 else
21107 {
21108 add_name_and_src_coords_attributes (decl_die, decl);
21109 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21110 TYPE_UNQUALIFIED, false, context_die);
21111 }
21112
21113 if (DECL_ABSTRACT_P (decl))
21114 equate_decl_number_to_die (decl, decl_die);
21115 else
21116 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21117 }
21118 #endif
21119
21120 /* Walk through the list of incomplete types again, trying once more to
21121 emit full debugging info for them. */
21122
21123 static void
21124 retry_incomplete_types (void)
21125 {
21126 set_early_dwarf s;
21127 int i;
21128
21129 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21130 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21131 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21132 vec_safe_truncate (incomplete_types, 0);
21133 }
21134
21135 /* Determine what tag to use for a record type. */
21136
21137 static enum dwarf_tag
21138 record_type_tag (tree type)
21139 {
21140 if (! lang_hooks.types.classify_record)
21141 return DW_TAG_structure_type;
21142
21143 switch (lang_hooks.types.classify_record (type))
21144 {
21145 case RECORD_IS_STRUCT:
21146 return DW_TAG_structure_type;
21147
21148 case RECORD_IS_CLASS:
21149 return DW_TAG_class_type;
21150
21151 case RECORD_IS_INTERFACE:
21152 if (dwarf_version >= 3 || !dwarf_strict)
21153 return DW_TAG_interface_type;
21154 return DW_TAG_structure_type;
21155
21156 default:
21157 gcc_unreachable ();
21158 }
21159 }
21160
21161 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21162 include all of the information about the enumeration values also. Each
21163 enumerated type name/value is listed as a child of the enumerated type
21164 DIE. */
21165
21166 static dw_die_ref
21167 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21168 {
21169 dw_die_ref type_die = lookup_type_die (type);
21170
21171 if (type_die == NULL)
21172 {
21173 type_die = new_die (DW_TAG_enumeration_type,
21174 scope_die_for (type, context_die), type);
21175 equate_type_number_to_die (type, type_die);
21176 add_name_attribute (type_die, type_tag (type));
21177 if (dwarf_version >= 4 || !dwarf_strict)
21178 {
21179 if (ENUM_IS_SCOPED (type))
21180 add_AT_flag (type_die, DW_AT_enum_class, 1);
21181 if (ENUM_IS_OPAQUE (type))
21182 add_AT_flag (type_die, DW_AT_declaration, 1);
21183 }
21184 if (!dwarf_strict)
21185 add_AT_unsigned (type_die, DW_AT_encoding,
21186 TYPE_UNSIGNED (type)
21187 ? DW_ATE_unsigned
21188 : DW_ATE_signed);
21189 }
21190 else if (! TYPE_SIZE (type))
21191 return type_die;
21192 else
21193 remove_AT (type_die, DW_AT_declaration);
21194
21195 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
21196 given enum type is incomplete, do not generate the DW_AT_byte_size
21197 attribute or the DW_AT_element_list attribute. */
21198 if (TYPE_SIZE (type))
21199 {
21200 tree link;
21201
21202 TREE_ASM_WRITTEN (type) = 1;
21203 add_byte_size_attribute (type_die, type);
21204 add_alignment_attribute (type_die, type);
21205 if (dwarf_version >= 3 || !dwarf_strict)
21206 {
21207 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
21208 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
21209 context_die);
21210 }
21211 if (TYPE_STUB_DECL (type) != NULL_TREE)
21212 {
21213 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
21214 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
21215 }
21216
21217 /* If the first reference to this type was as the return type of an
21218 inline function, then it may not have a parent. Fix this now. */
21219 if (type_die->die_parent == NULL)
21220 add_child_die (scope_die_for (type, context_die), type_die);
21221
21222 for (link = TYPE_VALUES (type);
21223 link != NULL; link = TREE_CHAIN (link))
21224 {
21225 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
21226 tree value = TREE_VALUE (link);
21227
21228 add_name_attribute (enum_die,
21229 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
21230
21231 if (TREE_CODE (value) == CONST_DECL)
21232 value = DECL_INITIAL (value);
21233
21234 if (simple_type_size_in_bits (TREE_TYPE (value))
21235 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
21236 {
21237 /* For constant forms created by add_AT_unsigned DWARF
21238 consumers (GDB, elfutils, etc.) always zero extend
21239 the value. Only when the actual value is negative
21240 do we need to use add_AT_int to generate a constant
21241 form that can represent negative values. */
21242 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
21243 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
21244 add_AT_unsigned (enum_die, DW_AT_const_value,
21245 (unsigned HOST_WIDE_INT) val);
21246 else
21247 add_AT_int (enum_die, DW_AT_const_value, val);
21248 }
21249 else
21250 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
21251 that here. TODO: This should be re-worked to use correct
21252 signed/unsigned double tags for all cases. */
21253 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
21254 }
21255
21256 add_gnat_descriptive_type_attribute (type_die, type, context_die);
21257 if (TYPE_ARTIFICIAL (type))
21258 add_AT_flag (type_die, DW_AT_artificial, 1);
21259 }
21260 else
21261 add_AT_flag (type_die, DW_AT_declaration, 1);
21262
21263 add_pubtype (type, type_die);
21264
21265 return type_die;
21266 }
21267
21268 /* Generate a DIE to represent either a real live formal parameter decl or to
21269 represent just the type of some formal parameter position in some function
21270 type.
21271
21272 Note that this routine is a bit unusual because its argument may be a
21273 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
21274 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
21275 node. If it's the former then this function is being called to output a
21276 DIE to represent a formal parameter object (or some inlining thereof). If
21277 it's the latter, then this function is only being called to output a
21278 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
21279 argument type of some subprogram type.
21280 If EMIT_NAME_P is true, name and source coordinate attributes
21281 are emitted. */
21282
21283 static dw_die_ref
21284 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
21285 dw_die_ref context_die)
21286 {
21287 tree node_or_origin = node ? node : origin;
21288 tree ultimate_origin;
21289 dw_die_ref parm_die = NULL;
21290
21291 if (DECL_P (node_or_origin))
21292 {
21293 parm_die = lookup_decl_die (node);
21294
21295 /* If the contexts differ, we may not be talking about the same
21296 thing.
21297 ??? When in LTO the DIE parent is the "abstract" copy and the
21298 context_die is the specification "copy". But this whole block
21299 should eventually be no longer needed. */
21300 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
21301 {
21302 if (!DECL_ABSTRACT_P (node))
21303 {
21304 /* This can happen when creating an inlined instance, in
21305 which case we need to create a new DIE that will get
21306 annotated with DW_AT_abstract_origin. */
21307 parm_die = NULL;
21308 }
21309 else
21310 gcc_unreachable ();
21311 }
21312
21313 if (parm_die && parm_die->die_parent == NULL)
21314 {
21315 /* Check that parm_die already has the right attributes that
21316 we would have added below. If any attributes are
21317 missing, fall through to add them. */
21318 if (! DECL_ABSTRACT_P (node_or_origin)
21319 && !get_AT (parm_die, DW_AT_location)
21320 && !get_AT (parm_die, DW_AT_const_value))
21321 /* We are missing location info, and are about to add it. */
21322 ;
21323 else
21324 {
21325 add_child_die (context_die, parm_die);
21326 return parm_die;
21327 }
21328 }
21329 }
21330
21331 /* If we have a previously generated DIE, use it, unless this is an
21332 concrete instance (origin != NULL), in which case we need a new
21333 DIE with a corresponding DW_AT_abstract_origin. */
21334 bool reusing_die;
21335 if (parm_die && origin == NULL)
21336 reusing_die = true;
21337 else
21338 {
21339 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
21340 reusing_die = false;
21341 }
21342
21343 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
21344 {
21345 case tcc_declaration:
21346 ultimate_origin = decl_ultimate_origin (node_or_origin);
21347 if (node || ultimate_origin)
21348 origin = ultimate_origin;
21349
21350 if (reusing_die)
21351 goto add_location;
21352
21353 if (origin != NULL)
21354 add_abstract_origin_attribute (parm_die, origin);
21355 else if (emit_name_p)
21356 add_name_and_src_coords_attributes (parm_die, node);
21357 if (origin == NULL
21358 || (! DECL_ABSTRACT_P (node_or_origin)
21359 && variably_modified_type_p (TREE_TYPE (node_or_origin),
21360 decl_function_context
21361 (node_or_origin))))
21362 {
21363 tree type = TREE_TYPE (node_or_origin);
21364 if (decl_by_reference_p (node_or_origin))
21365 add_type_attribute (parm_die, TREE_TYPE (type),
21366 TYPE_UNQUALIFIED,
21367 false, context_die);
21368 else
21369 add_type_attribute (parm_die, type,
21370 decl_quals (node_or_origin),
21371 false, context_die);
21372 }
21373 if (origin == NULL && DECL_ARTIFICIAL (node))
21374 add_AT_flag (parm_die, DW_AT_artificial, 1);
21375 add_location:
21376 if (node && node != origin)
21377 equate_decl_number_to_die (node, parm_die);
21378 if (! DECL_ABSTRACT_P (node_or_origin))
21379 add_location_or_const_value_attribute (parm_die, node_or_origin,
21380 node == NULL);
21381
21382 break;
21383
21384 case tcc_type:
21385 /* We were called with some kind of a ..._TYPE node. */
21386 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
21387 context_die);
21388 break;
21389
21390 default:
21391 gcc_unreachable ();
21392 }
21393
21394 return parm_die;
21395 }
21396
21397 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
21398 children DW_TAG_formal_parameter DIEs representing the arguments of the
21399 parameter pack.
21400
21401 PARM_PACK must be a function parameter pack.
21402 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
21403 must point to the subsequent arguments of the function PACK_ARG belongs to.
21404 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
21405 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
21406 following the last one for which a DIE was generated. */
21407
21408 static dw_die_ref
21409 gen_formal_parameter_pack_die (tree parm_pack,
21410 tree pack_arg,
21411 dw_die_ref subr_die,
21412 tree *next_arg)
21413 {
21414 tree arg;
21415 dw_die_ref parm_pack_die;
21416
21417 gcc_assert (parm_pack
21418 && lang_hooks.function_parameter_pack_p (parm_pack)
21419 && subr_die);
21420
21421 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
21422 add_src_coords_attributes (parm_pack_die, parm_pack);
21423
21424 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
21425 {
21426 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
21427 parm_pack))
21428 break;
21429 gen_formal_parameter_die (arg, NULL,
21430 false /* Don't emit name attribute. */,
21431 parm_pack_die);
21432 }
21433 if (next_arg)
21434 *next_arg = arg;
21435 return parm_pack_die;
21436 }
21437
21438 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
21439 at the end of an (ANSI prototyped) formal parameters list. */
21440
21441 static void
21442 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
21443 {
21444 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
21445 }
21446
21447 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
21448 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
21449 parameters as specified in some function type specification (except for
21450 those which appear as part of a function *definition*). */
21451
21452 static void
21453 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
21454 {
21455 tree link;
21456 tree formal_type = NULL;
21457 tree first_parm_type;
21458 tree arg;
21459
21460 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
21461 {
21462 arg = DECL_ARGUMENTS (function_or_method_type);
21463 function_or_method_type = TREE_TYPE (function_or_method_type);
21464 }
21465 else
21466 arg = NULL_TREE;
21467
21468 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
21469
21470 /* Make our first pass over the list of formal parameter types and output a
21471 DW_TAG_formal_parameter DIE for each one. */
21472 for (link = first_parm_type; link; )
21473 {
21474 dw_die_ref parm_die;
21475
21476 formal_type = TREE_VALUE (link);
21477 if (formal_type == void_type_node)
21478 break;
21479
21480 /* Output a (nameless) DIE to represent the formal parameter itself. */
21481 if (!POINTER_BOUNDS_TYPE_P (formal_type))
21482 {
21483 parm_die = gen_formal_parameter_die (formal_type, NULL,
21484 true /* Emit name attribute. */,
21485 context_die);
21486 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
21487 && link == first_parm_type)
21488 {
21489 add_AT_flag (parm_die, DW_AT_artificial, 1);
21490 if (dwarf_version >= 3 || !dwarf_strict)
21491 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
21492 }
21493 else if (arg && DECL_ARTIFICIAL (arg))
21494 add_AT_flag (parm_die, DW_AT_artificial, 1);
21495 }
21496
21497 link = TREE_CHAIN (link);
21498 if (arg)
21499 arg = DECL_CHAIN (arg);
21500 }
21501
21502 /* If this function type has an ellipsis, add a
21503 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
21504 if (formal_type != void_type_node)
21505 gen_unspecified_parameters_die (function_or_method_type, context_die);
21506
21507 /* Make our second (and final) pass over the list of formal parameter types
21508 and output DIEs to represent those types (as necessary). */
21509 for (link = TYPE_ARG_TYPES (function_or_method_type);
21510 link && TREE_VALUE (link);
21511 link = TREE_CHAIN (link))
21512 gen_type_die (TREE_VALUE (link), context_die);
21513 }
21514
21515 /* We want to generate the DIE for TYPE so that we can generate the
21516 die for MEMBER, which has been defined; we will need to refer back
21517 to the member declaration nested within TYPE. If we're trying to
21518 generate minimal debug info for TYPE, processing TYPE won't do the
21519 trick; we need to attach the member declaration by hand. */
21520
21521 static void
21522 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
21523 {
21524 gen_type_die (type, context_die);
21525
21526 /* If we're trying to avoid duplicate debug info, we may not have
21527 emitted the member decl for this function. Emit it now. */
21528 if (TYPE_STUB_DECL (type)
21529 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
21530 && ! lookup_decl_die (member))
21531 {
21532 dw_die_ref type_die;
21533 gcc_assert (!decl_ultimate_origin (member));
21534
21535 push_decl_scope (type);
21536 type_die = lookup_type_die_strip_naming_typedef (type);
21537 if (TREE_CODE (member) == FUNCTION_DECL)
21538 gen_subprogram_die (member, type_die);
21539 else if (TREE_CODE (member) == FIELD_DECL)
21540 {
21541 /* Ignore the nameless fields that are used to skip bits but handle
21542 C++ anonymous unions and structs. */
21543 if (DECL_NAME (member) != NULL_TREE
21544 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
21545 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
21546 {
21547 struct vlr_context vlr_ctx = {
21548 DECL_CONTEXT (member), /* struct_type */
21549 NULL_TREE /* variant_part_offset */
21550 };
21551 gen_type_die (member_declared_type (member), type_die);
21552 gen_field_die (member, &vlr_ctx, type_die);
21553 }
21554 }
21555 else
21556 gen_variable_die (member, NULL_TREE, type_die);
21557
21558 pop_decl_scope ();
21559 }
21560 }
21561 \f
21562 /* Forward declare these functions, because they are mutually recursive
21563 with their set_block_* pairing functions. */
21564 static void set_decl_origin_self (tree);
21565
21566 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
21567 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
21568 that it points to the node itself, thus indicating that the node is its
21569 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
21570 the given node is NULL, recursively descend the decl/block tree which
21571 it is the root of, and for each other ..._DECL or BLOCK node contained
21572 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
21573 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
21574 values to point to themselves. */
21575
21576 static void
21577 set_block_origin_self (tree stmt)
21578 {
21579 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
21580 {
21581 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
21582
21583 {
21584 tree local_decl;
21585
21586 for (local_decl = BLOCK_VARS (stmt);
21587 local_decl != NULL_TREE;
21588 local_decl = DECL_CHAIN (local_decl))
21589 /* Do not recurse on nested functions since the inlining status
21590 of parent and child can be different as per the DWARF spec. */
21591 if (TREE_CODE (local_decl) != FUNCTION_DECL
21592 && !DECL_EXTERNAL (local_decl))
21593 set_decl_origin_self (local_decl);
21594 }
21595
21596 {
21597 tree subblock;
21598
21599 for (subblock = BLOCK_SUBBLOCKS (stmt);
21600 subblock != NULL_TREE;
21601 subblock = BLOCK_CHAIN (subblock))
21602 set_block_origin_self (subblock); /* Recurse. */
21603 }
21604 }
21605 }
21606
21607 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
21608 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
21609 node to so that it points to the node itself, thus indicating that the
21610 node represents its own (abstract) origin. Additionally, if the
21611 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
21612 the decl/block tree of which the given node is the root of, and for
21613 each other ..._DECL or BLOCK node contained therein whose
21614 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
21615 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
21616 point to themselves. */
21617
21618 static void
21619 set_decl_origin_self (tree decl)
21620 {
21621 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
21622 {
21623 DECL_ABSTRACT_ORIGIN (decl) = decl;
21624 if (TREE_CODE (decl) == FUNCTION_DECL)
21625 {
21626 tree arg;
21627
21628 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
21629 DECL_ABSTRACT_ORIGIN (arg) = arg;
21630 if (DECL_INITIAL (decl) != NULL_TREE
21631 && DECL_INITIAL (decl) != error_mark_node)
21632 set_block_origin_self (DECL_INITIAL (decl));
21633 }
21634 }
21635 }
21636 \f
21637 /* Mark the early DIE for DECL as the abstract instance. */
21638
21639 static void
21640 dwarf2out_abstract_function (tree decl)
21641 {
21642 dw_die_ref old_die;
21643
21644 /* Make sure we have the actual abstract inline, not a clone. */
21645 decl = DECL_ORIGIN (decl);
21646
21647 if (DECL_IGNORED_P (decl))
21648 return;
21649
21650 old_die = lookup_decl_die (decl);
21651 /* With early debug we always have an old DIE unless we are in LTO
21652 and the user did not compile but only link with debug. */
21653 if (in_lto_p && ! old_die)
21654 return;
21655 gcc_assert (old_die != NULL);
21656 if (get_AT (old_die, DW_AT_inline)
21657 || get_AT (old_die, DW_AT_abstract_origin))
21658 /* We've already generated the abstract instance. */
21659 return;
21660
21661 /* Go ahead and put DW_AT_inline on the DIE. */
21662 if (DECL_DECLARED_INLINE_P (decl))
21663 {
21664 if (cgraph_function_possibly_inlined_p (decl))
21665 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
21666 else
21667 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
21668 }
21669 else
21670 {
21671 if (cgraph_function_possibly_inlined_p (decl))
21672 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
21673 else
21674 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
21675 }
21676
21677 if (DECL_DECLARED_INLINE_P (decl)
21678 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
21679 add_AT_flag (old_die, DW_AT_artificial, 1);
21680
21681 set_decl_origin_self (decl);
21682 }
21683
21684 /* Helper function of premark_used_types() which gets called through
21685 htab_traverse.
21686
21687 Marks the DIE of a given type in *SLOT as perennial, so it never gets
21688 marked as unused by prune_unused_types. */
21689
21690 bool
21691 premark_used_types_helper (tree const &type, void *)
21692 {
21693 dw_die_ref die;
21694
21695 die = lookup_type_die (type);
21696 if (die != NULL)
21697 die->die_perennial_p = 1;
21698 return true;
21699 }
21700
21701 /* Helper function of premark_types_used_by_global_vars which gets called
21702 through htab_traverse.
21703
21704 Marks the DIE of a given type in *SLOT as perennial, so it never gets
21705 marked as unused by prune_unused_types. The DIE of the type is marked
21706 only if the global variable using the type will actually be emitted. */
21707
21708 int
21709 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
21710 void *)
21711 {
21712 struct types_used_by_vars_entry *entry;
21713 dw_die_ref die;
21714
21715 entry = (struct types_used_by_vars_entry *) *slot;
21716 gcc_assert (entry->type != NULL
21717 && entry->var_decl != NULL);
21718 die = lookup_type_die (entry->type);
21719 if (die)
21720 {
21721 /* Ask cgraph if the global variable really is to be emitted.
21722 If yes, then we'll keep the DIE of ENTRY->TYPE. */
21723 varpool_node *node = varpool_node::get (entry->var_decl);
21724 if (node && node->definition)
21725 {
21726 die->die_perennial_p = 1;
21727 /* Keep the parent DIEs as well. */
21728 while ((die = die->die_parent) && die->die_perennial_p == 0)
21729 die->die_perennial_p = 1;
21730 }
21731 }
21732 return 1;
21733 }
21734
21735 /* Mark all members of used_types_hash as perennial. */
21736
21737 static void
21738 premark_used_types (struct function *fun)
21739 {
21740 if (fun && fun->used_types_hash)
21741 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
21742 }
21743
21744 /* Mark all members of types_used_by_vars_entry as perennial. */
21745
21746 static void
21747 premark_types_used_by_global_vars (void)
21748 {
21749 if (types_used_by_vars_hash)
21750 types_used_by_vars_hash
21751 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
21752 }
21753
21754 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
21755 for CA_LOC call arg loc node. */
21756
21757 static dw_die_ref
21758 gen_call_site_die (tree decl, dw_die_ref subr_die,
21759 struct call_arg_loc_node *ca_loc)
21760 {
21761 dw_die_ref stmt_die = NULL, die;
21762 tree block = ca_loc->block;
21763
21764 while (block
21765 && block != DECL_INITIAL (decl)
21766 && TREE_CODE (block) == BLOCK)
21767 {
21768 stmt_die = BLOCK_DIE (block);
21769 if (stmt_die)
21770 break;
21771 block = BLOCK_SUPERCONTEXT (block);
21772 }
21773 if (stmt_die == NULL)
21774 stmt_die = subr_die;
21775 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
21776 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
21777 if (ca_loc->tail_call_p)
21778 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
21779 if (ca_loc->symbol_ref)
21780 {
21781 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
21782 if (tdie)
21783 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
21784 else
21785 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
21786 false);
21787 }
21788 return die;
21789 }
21790
21791 /* Generate a DIE to represent a declared function (either file-scope or
21792 block-local). */
21793
21794 static void
21795 gen_subprogram_die (tree decl, dw_die_ref context_die)
21796 {
21797 tree origin = decl_ultimate_origin (decl);
21798 dw_die_ref subr_die;
21799 dw_die_ref old_die = lookup_decl_die (decl);
21800
21801 /* This function gets called multiple times for different stages of
21802 the debug process. For example, for func() in this code:
21803
21804 namespace S
21805 {
21806 void func() { ... }
21807 }
21808
21809 ...we get called 4 times. Twice in early debug and twice in
21810 late debug:
21811
21812 Early debug
21813 -----------
21814
21815 1. Once while generating func() within the namespace. This is
21816 the declaration. The declaration bit below is set, as the
21817 context is the namespace.
21818
21819 A new DIE will be generated with DW_AT_declaration set.
21820
21821 2. Once for func() itself. This is the specification. The
21822 declaration bit below is clear as the context is the CU.
21823
21824 We will use the cached DIE from (1) to create a new DIE with
21825 DW_AT_specification pointing to the declaration in (1).
21826
21827 Late debug via rest_of_handle_final()
21828 -------------------------------------
21829
21830 3. Once generating func() within the namespace. This is also the
21831 declaration, as in (1), but this time we will early exit below
21832 as we have a cached DIE and a declaration needs no additional
21833 annotations (no locations), as the source declaration line
21834 info is enough.
21835
21836 4. Once for func() itself. As in (2), this is the specification,
21837 but this time we will re-use the cached DIE, and just annotate
21838 it with the location information that should now be available.
21839
21840 For something without namespaces, but with abstract instances, we
21841 are also called a multiple times:
21842
21843 class Base
21844 {
21845 public:
21846 Base (); // constructor declaration (1)
21847 };
21848
21849 Base::Base () { } // constructor specification (2)
21850
21851 Early debug
21852 -----------
21853
21854 1. Once for the Base() constructor by virtue of it being a
21855 member of the Base class. This is done via
21856 rest_of_type_compilation.
21857
21858 This is a declaration, so a new DIE will be created with
21859 DW_AT_declaration.
21860
21861 2. Once for the Base() constructor definition, but this time
21862 while generating the abstract instance of the base
21863 constructor (__base_ctor) which is being generated via early
21864 debug of reachable functions.
21865
21866 Even though we have a cached version of the declaration (1),
21867 we will create a DW_AT_specification of the declaration DIE
21868 in (1).
21869
21870 3. Once for the __base_ctor itself, but this time, we generate
21871 an DW_AT_abstract_origin version of the DW_AT_specification in
21872 (2).
21873
21874 Late debug via rest_of_handle_final
21875 -----------------------------------
21876
21877 4. One final time for the __base_ctor (which will have a cached
21878 DIE with DW_AT_abstract_origin created in (3). This time,
21879 we will just annotate the location information now
21880 available.
21881 */
21882 int declaration = (current_function_decl != decl
21883 || class_or_namespace_scope_p (context_die));
21884
21885 /* Now that the C++ front end lazily declares artificial member fns, we
21886 might need to retrofit the declaration into its class. */
21887 if (!declaration && !origin && !old_die
21888 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
21889 && !class_or_namespace_scope_p (context_die)
21890 && debug_info_level > DINFO_LEVEL_TERSE)
21891 old_die = force_decl_die (decl);
21892
21893 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
21894 if (origin != NULL)
21895 {
21896 gcc_assert (!declaration || local_scope_p (context_die));
21897
21898 /* Fixup die_parent for the abstract instance of a nested
21899 inline function. */
21900 if (old_die && old_die->die_parent == NULL)
21901 add_child_die (context_die, old_die);
21902
21903 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
21904 {
21905 /* If we have a DW_AT_abstract_origin we have a working
21906 cached version. */
21907 subr_die = old_die;
21908 }
21909 else
21910 {
21911 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
21912 add_abstract_origin_attribute (subr_die, origin);
21913 /* This is where the actual code for a cloned function is.
21914 Let's emit linkage name attribute for it. This helps
21915 debuggers to e.g, set breakpoints into
21916 constructors/destructors when the user asks "break
21917 K::K". */
21918 add_linkage_name (subr_die, decl);
21919 }
21920 }
21921 /* A cached copy, possibly from early dwarf generation. Reuse as
21922 much as possible. */
21923 else if (old_die)
21924 {
21925 /* A declaration that has been previously dumped needs no
21926 additional information. */
21927 if (declaration)
21928 return;
21929
21930 if (!get_AT_flag (old_die, DW_AT_declaration)
21931 /* We can have a normal definition following an inline one in the
21932 case of redefinition of GNU C extern inlines.
21933 It seems reasonable to use AT_specification in this case. */
21934 && !get_AT (old_die, DW_AT_inline))
21935 {
21936 /* Detect and ignore this case, where we are trying to output
21937 something we have already output. */
21938 if (get_AT (old_die, DW_AT_low_pc)
21939 || get_AT (old_die, DW_AT_ranges))
21940 return;
21941
21942 /* If we have no location information, this must be a
21943 partially generated DIE from early dwarf generation.
21944 Fall through and generate it. */
21945 }
21946
21947 /* If the definition comes from the same place as the declaration,
21948 maybe use the old DIE. We always want the DIE for this function
21949 that has the *_pc attributes to be under comp_unit_die so the
21950 debugger can find it. We also need to do this for abstract
21951 instances of inlines, since the spec requires the out-of-line copy
21952 to have the same parent. For local class methods, this doesn't
21953 apply; we just use the old DIE. */
21954 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
21955 struct dwarf_file_data * file_index = lookup_filename (s.file);
21956 if ((is_cu_die (old_die->die_parent)
21957 /* This condition fixes the inconsistency/ICE with the
21958 following Fortran test (or some derivative thereof) while
21959 building libgfortran:
21960
21961 module some_m
21962 contains
21963 logical function funky (FLAG)
21964 funky = .true.
21965 end function
21966 end module
21967 */
21968 || (old_die->die_parent
21969 && old_die->die_parent->die_tag == DW_TAG_module)
21970 || context_die == NULL)
21971 && (DECL_ARTIFICIAL (decl)
21972 /* The location attributes may be in the abstract origin
21973 which in the case of LTO might be not available to
21974 look at. */
21975 || get_AT (old_die, DW_AT_abstract_origin)
21976 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
21977 && (get_AT_unsigned (old_die, DW_AT_decl_line)
21978 == (unsigned) s.line)
21979 && (!debug_column_info
21980 || s.column == 0
21981 || (get_AT_unsigned (old_die, DW_AT_decl_column)
21982 == (unsigned) s.column)))))
21983 {
21984 subr_die = old_die;
21985
21986 /* Clear out the declaration attribute, but leave the
21987 parameters so they can be augmented with location
21988 information later. Unless this was a declaration, in
21989 which case, wipe out the nameless parameters and recreate
21990 them further down. */
21991 if (remove_AT (subr_die, DW_AT_declaration))
21992 {
21993
21994 remove_AT (subr_die, DW_AT_object_pointer);
21995 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
21996 }
21997 }
21998 /* Make a specification pointing to the previously built
21999 declaration. */
22000 else
22001 {
22002 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22003 add_AT_specification (subr_die, old_die);
22004 add_pubname (decl, subr_die);
22005 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22006 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22007 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22008 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22009 if (debug_column_info
22010 && s.column
22011 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22012 != (unsigned) s.column))
22013 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22014
22015 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22016 emit the real type on the definition die. */
22017 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22018 {
22019 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22020 if (die == auto_die || die == decltype_auto_die)
22021 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22022 TYPE_UNQUALIFIED, false, context_die);
22023 }
22024
22025 /* When we process the method declaration, we haven't seen
22026 the out-of-class defaulted definition yet, so we have to
22027 recheck now. */
22028 if ((dwarf_version >= 5 || ! dwarf_strict)
22029 && !get_AT (subr_die, DW_AT_defaulted))
22030 {
22031 int defaulted
22032 = lang_hooks.decls.decl_dwarf_attribute (decl,
22033 DW_AT_defaulted);
22034 if (defaulted != -1)
22035 {
22036 /* Other values must have been handled before. */
22037 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22038 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22039 }
22040 }
22041 }
22042 }
22043 /* Create a fresh DIE for anything else. */
22044 else
22045 {
22046 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22047
22048 if (TREE_PUBLIC (decl))
22049 add_AT_flag (subr_die, DW_AT_external, 1);
22050
22051 add_name_and_src_coords_attributes (subr_die, decl);
22052 add_pubname (decl, subr_die);
22053 if (debug_info_level > DINFO_LEVEL_TERSE)
22054 {
22055 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22056 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22057 TYPE_UNQUALIFIED, false, context_die);
22058 }
22059
22060 add_pure_or_virtual_attribute (subr_die, decl);
22061 if (DECL_ARTIFICIAL (decl))
22062 add_AT_flag (subr_die, DW_AT_artificial, 1);
22063
22064 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22065 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22066
22067 add_alignment_attribute (subr_die, decl);
22068
22069 add_accessibility_attribute (subr_die, decl);
22070 }
22071
22072 /* Unless we have an existing non-declaration DIE, equate the new
22073 DIE. */
22074 if (!old_die || is_declaration_die (old_die))
22075 equate_decl_number_to_die (decl, subr_die);
22076
22077 if (declaration)
22078 {
22079 if (!old_die || !get_AT (old_die, DW_AT_inline))
22080 {
22081 add_AT_flag (subr_die, DW_AT_declaration, 1);
22082
22083 /* If this is an explicit function declaration then generate
22084 a DW_AT_explicit attribute. */
22085 if ((dwarf_version >= 3 || !dwarf_strict)
22086 && lang_hooks.decls.decl_dwarf_attribute (decl,
22087 DW_AT_explicit) == 1)
22088 add_AT_flag (subr_die, DW_AT_explicit, 1);
22089
22090 /* If this is a C++11 deleted special function member then generate
22091 a DW_AT_deleted attribute. */
22092 if ((dwarf_version >= 5 || !dwarf_strict)
22093 && lang_hooks.decls.decl_dwarf_attribute (decl,
22094 DW_AT_deleted) == 1)
22095 add_AT_flag (subr_die, DW_AT_deleted, 1);
22096
22097 /* If this is a C++11 defaulted special function member then
22098 generate a DW_AT_defaulted attribute. */
22099 if (dwarf_version >= 5 || !dwarf_strict)
22100 {
22101 int defaulted
22102 = lang_hooks.decls.decl_dwarf_attribute (decl,
22103 DW_AT_defaulted);
22104 if (defaulted != -1)
22105 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22106 }
22107
22108 /* If this is a C++11 non-static member function with & ref-qualifier
22109 then generate a DW_AT_reference attribute. */
22110 if ((dwarf_version >= 5 || !dwarf_strict)
22111 && lang_hooks.decls.decl_dwarf_attribute (decl,
22112 DW_AT_reference) == 1)
22113 add_AT_flag (subr_die, DW_AT_reference, 1);
22114
22115 /* If this is a C++11 non-static member function with &&
22116 ref-qualifier then generate a DW_AT_reference attribute. */
22117 if ((dwarf_version >= 5 || !dwarf_strict)
22118 && lang_hooks.decls.decl_dwarf_attribute (decl,
22119 DW_AT_rvalue_reference)
22120 == 1)
22121 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22122 }
22123 }
22124 /* For non DECL_EXTERNALs, if range information is available, fill
22125 the DIE with it. */
22126 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22127 {
22128 HOST_WIDE_INT cfa_fb_offset;
22129
22130 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22131
22132 if (!crtl->has_bb_partition)
22133 {
22134 dw_fde_ref fde = fun->fde;
22135 if (fde->dw_fde_begin)
22136 {
22137 /* We have already generated the labels. */
22138 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22139 fde->dw_fde_end, false);
22140 }
22141 else
22142 {
22143 /* Create start/end labels and add the range. */
22144 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22145 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22146 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22147 current_function_funcdef_no);
22148 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22149 current_function_funcdef_no);
22150 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22151 false);
22152 }
22153
22154 #if VMS_DEBUGGING_INFO
22155 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22156 Section 2.3 Prologue and Epilogue Attributes:
22157 When a breakpoint is set on entry to a function, it is generally
22158 desirable for execution to be suspended, not on the very first
22159 instruction of the function, but rather at a point after the
22160 function's frame has been set up, after any language defined local
22161 declaration processing has been completed, and before execution of
22162 the first statement of the function begins. Debuggers generally
22163 cannot properly determine where this point is. Similarly for a
22164 breakpoint set on exit from a function. The prologue and epilogue
22165 attributes allow a compiler to communicate the location(s) to use. */
22166
22167 {
22168 if (fde->dw_fde_vms_end_prologue)
22169 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22170 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22171
22172 if (fde->dw_fde_vms_begin_epilogue)
22173 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22174 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22175 }
22176 #endif
22177
22178 }
22179 else
22180 {
22181 /* Generate pubnames entries for the split function code ranges. */
22182 dw_fde_ref fde = fun->fde;
22183
22184 if (fde->dw_fde_second_begin)
22185 {
22186 if (dwarf_version >= 3 || !dwarf_strict)
22187 {
22188 /* We should use ranges for non-contiguous code section
22189 addresses. Use the actual code range for the initial
22190 section, since the HOT/COLD labels might precede an
22191 alignment offset. */
22192 bool range_list_added = false;
22193 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
22194 fde->dw_fde_end, &range_list_added,
22195 false);
22196 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
22197 fde->dw_fde_second_end,
22198 &range_list_added, false);
22199 if (range_list_added)
22200 add_ranges (NULL);
22201 }
22202 else
22203 {
22204 /* There is no real support in DW2 for this .. so we make
22205 a work-around. First, emit the pub name for the segment
22206 containing the function label. Then make and emit a
22207 simplified subprogram DIE for the second segment with the
22208 name pre-fixed by __hot/cold_sect_of_. We use the same
22209 linkage name for the second die so that gdb will find both
22210 sections when given "b foo". */
22211 const char *name = NULL;
22212 tree decl_name = DECL_NAME (decl);
22213 dw_die_ref seg_die;
22214
22215 /* Do the 'primary' section. */
22216 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22217 fde->dw_fde_end, false);
22218
22219 /* Build a minimal DIE for the secondary section. */
22220 seg_die = new_die (DW_TAG_subprogram,
22221 subr_die->die_parent, decl);
22222
22223 if (TREE_PUBLIC (decl))
22224 add_AT_flag (seg_die, DW_AT_external, 1);
22225
22226 if (decl_name != NULL
22227 && IDENTIFIER_POINTER (decl_name) != NULL)
22228 {
22229 name = dwarf2_name (decl, 1);
22230 if (! DECL_ARTIFICIAL (decl))
22231 add_src_coords_attributes (seg_die, decl);
22232
22233 add_linkage_name (seg_die, decl);
22234 }
22235 gcc_assert (name != NULL);
22236 add_pure_or_virtual_attribute (seg_die, decl);
22237 if (DECL_ARTIFICIAL (decl))
22238 add_AT_flag (seg_die, DW_AT_artificial, 1);
22239
22240 name = concat ("__second_sect_of_", name, NULL);
22241 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
22242 fde->dw_fde_second_end, false);
22243 add_name_attribute (seg_die, name);
22244 if (want_pubnames ())
22245 add_pubname_string (name, seg_die);
22246 }
22247 }
22248 else
22249 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
22250 false);
22251 }
22252
22253 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
22254
22255 /* We define the "frame base" as the function's CFA. This is more
22256 convenient for several reasons: (1) It's stable across the prologue
22257 and epilogue, which makes it better than just a frame pointer,
22258 (2) With dwarf3, there exists a one-byte encoding that allows us
22259 to reference the .debug_frame data by proxy, but failing that,
22260 (3) We can at least reuse the code inspection and interpretation
22261 code that determines the CFA position at various points in the
22262 function. */
22263 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
22264 {
22265 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
22266 add_AT_loc (subr_die, DW_AT_frame_base, op);
22267 }
22268 else
22269 {
22270 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
22271 if (list->dw_loc_next)
22272 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
22273 else
22274 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
22275 }
22276
22277 /* Compute a displacement from the "steady-state frame pointer" to
22278 the CFA. The former is what all stack slots and argument slots
22279 will reference in the rtl; the latter is what we've told the
22280 debugger about. We'll need to adjust all frame_base references
22281 by this displacement. */
22282 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
22283
22284 if (fun->static_chain_decl)
22285 {
22286 /* DWARF requires here a location expression that computes the
22287 address of the enclosing subprogram's frame base. The machinery
22288 in tree-nested.c is supposed to store this specific address in the
22289 last field of the FRAME record. */
22290 const tree frame_type
22291 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
22292 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
22293
22294 tree fb_expr
22295 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
22296 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
22297 fb_expr, fb_decl, NULL_TREE);
22298
22299 add_AT_location_description (subr_die, DW_AT_static_link,
22300 loc_list_from_tree (fb_expr, 0, NULL));
22301 }
22302
22303 resolve_variable_values ();
22304 }
22305
22306 /* Generate child dies for template paramaters. */
22307 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
22308 gen_generic_params_dies (decl);
22309
22310 /* Now output descriptions of the arguments for this function. This gets
22311 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
22312 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
22313 `...' at the end of the formal parameter list. In order to find out if
22314 there was a trailing ellipsis or not, we must instead look at the type
22315 associated with the FUNCTION_DECL. This will be a node of type
22316 FUNCTION_TYPE. If the chain of type nodes hanging off of this
22317 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
22318 an ellipsis at the end. */
22319
22320 /* In the case where we are describing a mere function declaration, all we
22321 need to do here (and all we *can* do here) is to describe the *types* of
22322 its formal parameters. */
22323 if (debug_info_level <= DINFO_LEVEL_TERSE)
22324 ;
22325 else if (declaration)
22326 gen_formal_types_die (decl, subr_die);
22327 else
22328 {
22329 /* Generate DIEs to represent all known formal parameters. */
22330 tree parm = DECL_ARGUMENTS (decl);
22331 tree generic_decl = early_dwarf
22332 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
22333 tree generic_decl_parm = generic_decl
22334 ? DECL_ARGUMENTS (generic_decl)
22335 : NULL;
22336
22337 /* Now we want to walk the list of parameters of the function and
22338 emit their relevant DIEs.
22339
22340 We consider the case of DECL being an instance of a generic function
22341 as well as it being a normal function.
22342
22343 If DECL is an instance of a generic function we walk the
22344 parameters of the generic function declaration _and_ the parameters of
22345 DECL itself. This is useful because we want to emit specific DIEs for
22346 function parameter packs and those are declared as part of the
22347 generic function declaration. In that particular case,
22348 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
22349 That DIE has children DIEs representing the set of arguments
22350 of the pack. Note that the set of pack arguments can be empty.
22351 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
22352 children DIE.
22353
22354 Otherwise, we just consider the parameters of DECL. */
22355 while (generic_decl_parm || parm)
22356 {
22357 if (generic_decl_parm
22358 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
22359 gen_formal_parameter_pack_die (generic_decl_parm,
22360 parm, subr_die,
22361 &parm);
22362 else if (parm && !POINTER_BOUNDS_P (parm))
22363 {
22364 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
22365
22366 if (early_dwarf
22367 && parm == DECL_ARGUMENTS (decl)
22368 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
22369 && parm_die
22370 && (dwarf_version >= 3 || !dwarf_strict))
22371 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
22372
22373 parm = DECL_CHAIN (parm);
22374 }
22375 else if (parm)
22376 parm = DECL_CHAIN (parm);
22377
22378 if (generic_decl_parm)
22379 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
22380 }
22381
22382 /* Decide whether we need an unspecified_parameters DIE at the end.
22383 There are 2 more cases to do this for: 1) the ansi ... declaration -
22384 this is detectable when the end of the arg list is not a
22385 void_type_node 2) an unprototyped function declaration (not a
22386 definition). This just means that we have no info about the
22387 parameters at all. */
22388 if (early_dwarf)
22389 {
22390 if (prototype_p (TREE_TYPE (decl)))
22391 {
22392 /* This is the prototyped case, check for.... */
22393 if (stdarg_p (TREE_TYPE (decl)))
22394 gen_unspecified_parameters_die (decl, subr_die);
22395 }
22396 else if (DECL_INITIAL (decl) == NULL_TREE)
22397 gen_unspecified_parameters_die (decl, subr_die);
22398 }
22399 }
22400
22401 if (subr_die != old_die)
22402 /* Add the calling convention attribute if requested. */
22403 add_calling_convention_attribute (subr_die, decl);
22404
22405 /* Output Dwarf info for all of the stuff within the body of the function
22406 (if it has one - it may be just a declaration).
22407
22408 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
22409 a function. This BLOCK actually represents the outermost binding contour
22410 for the function, i.e. the contour in which the function's formal
22411 parameters and labels get declared. Curiously, it appears that the front
22412 end doesn't actually put the PARM_DECL nodes for the current function onto
22413 the BLOCK_VARS list for this outer scope, but are strung off of the
22414 DECL_ARGUMENTS list for the function instead.
22415
22416 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
22417 the LABEL_DECL nodes for the function however, and we output DWARF info
22418 for those in decls_for_scope. Just within the `outer_scope' there will be
22419 a BLOCK node representing the function's outermost pair of curly braces,
22420 and any blocks used for the base and member initializers of a C++
22421 constructor function. */
22422 tree outer_scope = DECL_INITIAL (decl);
22423 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
22424 {
22425 int call_site_note_count = 0;
22426 int tail_call_site_note_count = 0;
22427
22428 /* Emit a DW_TAG_variable DIE for a named return value. */
22429 if (DECL_NAME (DECL_RESULT (decl)))
22430 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
22431
22432 /* The first time through decls_for_scope we will generate the
22433 DIEs for the locals. The second time, we fill in the
22434 location info. */
22435 decls_for_scope (outer_scope, subr_die);
22436
22437 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
22438 {
22439 struct call_arg_loc_node *ca_loc;
22440 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
22441 {
22442 dw_die_ref die = NULL;
22443 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
22444 rtx arg, next_arg;
22445
22446 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
22447 ? NOTE_VAR_LOCATION (ca_loc->call_arg_loc_note)
22448 : NULL_RTX);
22449 arg; arg = next_arg)
22450 {
22451 dw_loc_descr_ref reg, val;
22452 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
22453 dw_die_ref cdie, tdie = NULL;
22454
22455 next_arg = XEXP (arg, 1);
22456 if (REG_P (XEXP (XEXP (arg, 0), 0))
22457 && next_arg
22458 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
22459 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
22460 && REGNO (XEXP (XEXP (arg, 0), 0))
22461 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
22462 next_arg = XEXP (next_arg, 1);
22463 if (mode == VOIDmode)
22464 {
22465 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
22466 if (mode == VOIDmode)
22467 mode = GET_MODE (XEXP (arg, 0));
22468 }
22469 if (mode == VOIDmode || mode == BLKmode)
22470 continue;
22471 /* Get dynamic information about call target only if we
22472 have no static information: we cannot generate both
22473 DW_AT_call_origin and DW_AT_call_target
22474 attributes. */
22475 if (ca_loc->symbol_ref == NULL_RTX)
22476 {
22477 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
22478 {
22479 tloc = XEXP (XEXP (arg, 0), 1);
22480 continue;
22481 }
22482 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
22483 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
22484 {
22485 tlocc = XEXP (XEXP (arg, 0), 1);
22486 continue;
22487 }
22488 }
22489 reg = NULL;
22490 if (REG_P (XEXP (XEXP (arg, 0), 0)))
22491 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
22492 VAR_INIT_STATUS_INITIALIZED);
22493 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
22494 {
22495 rtx mem = XEXP (XEXP (arg, 0), 0);
22496 reg = mem_loc_descriptor (XEXP (mem, 0),
22497 get_address_mode (mem),
22498 GET_MODE (mem),
22499 VAR_INIT_STATUS_INITIALIZED);
22500 }
22501 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
22502 == DEBUG_PARAMETER_REF)
22503 {
22504 tree tdecl
22505 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
22506 tdie = lookup_decl_die (tdecl);
22507 if (tdie == NULL)
22508 continue;
22509 }
22510 else
22511 continue;
22512 if (reg == NULL
22513 && GET_CODE (XEXP (XEXP (arg, 0), 0))
22514 != DEBUG_PARAMETER_REF)
22515 continue;
22516 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
22517 VOIDmode,
22518 VAR_INIT_STATUS_INITIALIZED);
22519 if (val == NULL)
22520 continue;
22521 if (die == NULL)
22522 die = gen_call_site_die (decl, subr_die, ca_loc);
22523 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
22524 NULL_TREE);
22525 if (reg != NULL)
22526 add_AT_loc (cdie, DW_AT_location, reg);
22527 else if (tdie != NULL)
22528 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
22529 tdie);
22530 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
22531 if (next_arg != XEXP (arg, 1))
22532 {
22533 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
22534 if (mode == VOIDmode)
22535 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
22536 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
22537 0), 1),
22538 mode, VOIDmode,
22539 VAR_INIT_STATUS_INITIALIZED);
22540 if (val != NULL)
22541 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
22542 val);
22543 }
22544 }
22545 if (die == NULL
22546 && (ca_loc->symbol_ref || tloc))
22547 die = gen_call_site_die (decl, subr_die, ca_loc);
22548 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
22549 {
22550 dw_loc_descr_ref tval = NULL;
22551
22552 if (tloc != NULL_RTX)
22553 tval = mem_loc_descriptor (tloc,
22554 GET_MODE (tloc) == VOIDmode
22555 ? Pmode : GET_MODE (tloc),
22556 VOIDmode,
22557 VAR_INIT_STATUS_INITIALIZED);
22558 if (tval)
22559 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
22560 else if (tlocc != NULL_RTX)
22561 {
22562 tval = mem_loc_descriptor (tlocc,
22563 GET_MODE (tlocc) == VOIDmode
22564 ? Pmode : GET_MODE (tlocc),
22565 VOIDmode,
22566 VAR_INIT_STATUS_INITIALIZED);
22567 if (tval)
22568 add_AT_loc (die,
22569 dwarf_AT (DW_AT_call_target_clobbered),
22570 tval);
22571 }
22572 }
22573 if (die != NULL)
22574 {
22575 call_site_note_count++;
22576 if (ca_loc->tail_call_p)
22577 tail_call_site_note_count++;
22578 }
22579 }
22580 }
22581 call_arg_locations = NULL;
22582 call_arg_loc_last = NULL;
22583 if (tail_call_site_count >= 0
22584 && tail_call_site_count == tail_call_site_note_count
22585 && (!dwarf_strict || dwarf_version >= 5))
22586 {
22587 if (call_site_count >= 0
22588 && call_site_count == call_site_note_count)
22589 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
22590 else
22591 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
22592 }
22593 call_site_count = -1;
22594 tail_call_site_count = -1;
22595 }
22596
22597 /* Mark used types after we have created DIEs for the functions scopes. */
22598 premark_used_types (DECL_STRUCT_FUNCTION (decl));
22599 }
22600
22601 /* Returns a hash value for X (which really is a die_struct). */
22602
22603 hashval_t
22604 block_die_hasher::hash (die_struct *d)
22605 {
22606 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
22607 }
22608
22609 /* Return nonzero if decl_id and die_parent of die_struct X is the same
22610 as decl_id and die_parent of die_struct Y. */
22611
22612 bool
22613 block_die_hasher::equal (die_struct *x, die_struct *y)
22614 {
22615 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
22616 }
22617
22618 /* Return TRUE if DECL, which may have been previously generated as
22619 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
22620 true if decl (or its origin) is either an extern declaration or a
22621 class/namespace scoped declaration.
22622
22623 The declare_in_namespace support causes us to get two DIEs for one
22624 variable, both of which are declarations. We want to avoid
22625 considering one to be a specification, so we must test for
22626 DECLARATION and DW_AT_declaration. */
22627 static inline bool
22628 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
22629 {
22630 return (old_die && TREE_STATIC (decl) && !declaration
22631 && get_AT_flag (old_die, DW_AT_declaration) == 1);
22632 }
22633
22634 /* Return true if DECL is a local static. */
22635
22636 static inline bool
22637 local_function_static (tree decl)
22638 {
22639 gcc_assert (VAR_P (decl));
22640 return TREE_STATIC (decl)
22641 && DECL_CONTEXT (decl)
22642 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
22643 }
22644
22645 /* Generate a DIE to represent a declared data object.
22646 Either DECL or ORIGIN must be non-null. */
22647
22648 static void
22649 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
22650 {
22651 HOST_WIDE_INT off = 0;
22652 tree com_decl;
22653 tree decl_or_origin = decl ? decl : origin;
22654 tree ultimate_origin;
22655 dw_die_ref var_die;
22656 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
22657 bool declaration = (DECL_EXTERNAL (decl_or_origin)
22658 || class_or_namespace_scope_p (context_die));
22659 bool specialization_p = false;
22660 bool no_linkage_name = false;
22661
22662 /* While C++ inline static data members have definitions inside of the
22663 class, force the first DIE to be a declaration, then let gen_member_die
22664 reparent it to the class context and call gen_variable_die again
22665 to create the outside of the class DIE for the definition. */
22666 if (!declaration
22667 && old_die == NULL
22668 && decl
22669 && DECL_CONTEXT (decl)
22670 && TYPE_P (DECL_CONTEXT (decl))
22671 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
22672 {
22673 declaration = true;
22674 if (dwarf_version < 5)
22675 no_linkage_name = true;
22676 }
22677
22678 ultimate_origin = decl_ultimate_origin (decl_or_origin);
22679 if (decl || ultimate_origin)
22680 origin = ultimate_origin;
22681 com_decl = fortran_common (decl_or_origin, &off);
22682
22683 /* Symbol in common gets emitted as a child of the common block, in the form
22684 of a data member. */
22685 if (com_decl)
22686 {
22687 dw_die_ref com_die;
22688 dw_loc_list_ref loc = NULL;
22689 die_node com_die_arg;
22690
22691 var_die = lookup_decl_die (decl_or_origin);
22692 if (var_die)
22693 {
22694 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
22695 {
22696 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
22697 if (loc)
22698 {
22699 if (off)
22700 {
22701 /* Optimize the common case. */
22702 if (single_element_loc_list_p (loc)
22703 && loc->expr->dw_loc_opc == DW_OP_addr
22704 && loc->expr->dw_loc_next == NULL
22705 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
22706 == SYMBOL_REF)
22707 {
22708 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
22709 loc->expr->dw_loc_oprnd1.v.val_addr
22710 = plus_constant (GET_MODE (x), x , off);
22711 }
22712 else
22713 loc_list_plus_const (loc, off);
22714 }
22715 add_AT_location_description (var_die, DW_AT_location, loc);
22716 remove_AT (var_die, DW_AT_declaration);
22717 }
22718 }
22719 return;
22720 }
22721
22722 if (common_block_die_table == NULL)
22723 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
22724
22725 com_die_arg.decl_id = DECL_UID (com_decl);
22726 com_die_arg.die_parent = context_die;
22727 com_die = common_block_die_table->find (&com_die_arg);
22728 if (! early_dwarf)
22729 loc = loc_list_from_tree (com_decl, 2, NULL);
22730 if (com_die == NULL)
22731 {
22732 const char *cnam
22733 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
22734 die_node **slot;
22735
22736 com_die = new_die (DW_TAG_common_block, context_die, decl);
22737 add_name_and_src_coords_attributes (com_die, com_decl);
22738 if (loc)
22739 {
22740 add_AT_location_description (com_die, DW_AT_location, loc);
22741 /* Avoid sharing the same loc descriptor between
22742 DW_TAG_common_block and DW_TAG_variable. */
22743 loc = loc_list_from_tree (com_decl, 2, NULL);
22744 }
22745 else if (DECL_EXTERNAL (decl_or_origin))
22746 add_AT_flag (com_die, DW_AT_declaration, 1);
22747 if (want_pubnames ())
22748 add_pubname_string (cnam, com_die); /* ??? needed? */
22749 com_die->decl_id = DECL_UID (com_decl);
22750 slot = common_block_die_table->find_slot (com_die, INSERT);
22751 *slot = com_die;
22752 }
22753 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
22754 {
22755 add_AT_location_description (com_die, DW_AT_location, loc);
22756 loc = loc_list_from_tree (com_decl, 2, NULL);
22757 remove_AT (com_die, DW_AT_declaration);
22758 }
22759 var_die = new_die (DW_TAG_variable, com_die, decl);
22760 add_name_and_src_coords_attributes (var_die, decl_or_origin);
22761 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
22762 decl_quals (decl_or_origin), false,
22763 context_die);
22764 add_alignment_attribute (var_die, decl);
22765 add_AT_flag (var_die, DW_AT_external, 1);
22766 if (loc)
22767 {
22768 if (off)
22769 {
22770 /* Optimize the common case. */
22771 if (single_element_loc_list_p (loc)
22772 && loc->expr->dw_loc_opc == DW_OP_addr
22773 && loc->expr->dw_loc_next == NULL
22774 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
22775 {
22776 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
22777 loc->expr->dw_loc_oprnd1.v.val_addr
22778 = plus_constant (GET_MODE (x), x, off);
22779 }
22780 else
22781 loc_list_plus_const (loc, off);
22782 }
22783 add_AT_location_description (var_die, DW_AT_location, loc);
22784 }
22785 else if (DECL_EXTERNAL (decl_or_origin))
22786 add_AT_flag (var_die, DW_AT_declaration, 1);
22787 if (decl)
22788 equate_decl_number_to_die (decl, var_die);
22789 return;
22790 }
22791
22792 if (old_die)
22793 {
22794 if (declaration)
22795 {
22796 /* A declaration that has been previously dumped, needs no
22797 further annotations, since it doesn't need location on
22798 the second pass. */
22799 return;
22800 }
22801 else if (decl_will_get_specification_p (old_die, decl, declaration)
22802 && !get_AT (old_die, DW_AT_specification))
22803 {
22804 /* Fall-thru so we can make a new variable die along with a
22805 DW_AT_specification. */
22806 }
22807 else if (origin && old_die->die_parent != context_die)
22808 {
22809 /* If we will be creating an inlined instance, we need a
22810 new DIE that will get annotated with
22811 DW_AT_abstract_origin. Clear things so we can get a
22812 new DIE. */
22813 gcc_assert (!DECL_ABSTRACT_P (decl));
22814 old_die = NULL;
22815 }
22816 else
22817 {
22818 /* If a DIE was dumped early, it still needs location info.
22819 Skip to where we fill the location bits. */
22820 var_die = old_die;
22821
22822 /* ??? In LTRANS we cannot annotate early created variably
22823 modified type DIEs without copying them and adjusting all
22824 references to them. Thus we dumped them again, also add a
22825 reference to them. */
22826 tree type = TREE_TYPE (decl_or_origin);
22827 if (in_lto_p
22828 && variably_modified_type_p
22829 (type, decl_function_context (decl_or_origin)))
22830 {
22831 if (decl_by_reference_p (decl_or_origin))
22832 add_type_attribute (var_die, TREE_TYPE (type),
22833 TYPE_UNQUALIFIED, false, context_die);
22834 else
22835 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
22836 false, context_die);
22837 }
22838
22839 goto gen_variable_die_location;
22840 }
22841 }
22842
22843 /* For static data members, the declaration in the class is supposed
22844 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
22845 also in DWARF2; the specification should still be DW_TAG_variable
22846 referencing the DW_TAG_member DIE. */
22847 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
22848 var_die = new_die (DW_TAG_member, context_die, decl);
22849 else
22850 var_die = new_die (DW_TAG_variable, context_die, decl);
22851
22852 if (origin != NULL)
22853 add_abstract_origin_attribute (var_die, origin);
22854
22855 /* Loop unrolling can create multiple blocks that refer to the same
22856 static variable, so we must test for the DW_AT_declaration flag.
22857
22858 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
22859 copy decls and set the DECL_ABSTRACT_P flag on them instead of
22860 sharing them.
22861
22862 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
22863 else if (decl_will_get_specification_p (old_die, decl, declaration))
22864 {
22865 /* This is a definition of a C++ class level static. */
22866 add_AT_specification (var_die, old_die);
22867 specialization_p = true;
22868 if (DECL_NAME (decl))
22869 {
22870 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22871 struct dwarf_file_data * file_index = lookup_filename (s.file);
22872
22873 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22874 add_AT_file (var_die, DW_AT_decl_file, file_index);
22875
22876 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22877 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
22878
22879 if (debug_column_info
22880 && s.column
22881 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22882 != (unsigned) s.column))
22883 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
22884
22885 if (old_die->die_tag == DW_TAG_member)
22886 add_linkage_name (var_die, decl);
22887 }
22888 }
22889 else
22890 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
22891
22892 if ((origin == NULL && !specialization_p)
22893 || (origin != NULL
22894 && !DECL_ABSTRACT_P (decl_or_origin)
22895 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
22896 decl_function_context
22897 (decl_or_origin))))
22898 {
22899 tree type = TREE_TYPE (decl_or_origin);
22900
22901 if (decl_by_reference_p (decl_or_origin))
22902 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
22903 context_die);
22904 else
22905 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
22906 context_die);
22907 }
22908
22909 if (origin == NULL && !specialization_p)
22910 {
22911 if (TREE_PUBLIC (decl))
22912 add_AT_flag (var_die, DW_AT_external, 1);
22913
22914 if (DECL_ARTIFICIAL (decl))
22915 add_AT_flag (var_die, DW_AT_artificial, 1);
22916
22917 add_alignment_attribute (var_die, decl);
22918
22919 add_accessibility_attribute (var_die, decl);
22920 }
22921
22922 if (declaration)
22923 add_AT_flag (var_die, DW_AT_declaration, 1);
22924
22925 if (decl && (DECL_ABSTRACT_P (decl)
22926 || !old_die || is_declaration_die (old_die)))
22927 equate_decl_number_to_die (decl, var_die);
22928
22929 gen_variable_die_location:
22930 if (! declaration
22931 && (! DECL_ABSTRACT_P (decl_or_origin)
22932 /* Local static vars are shared between all clones/inlines,
22933 so emit DW_AT_location on the abstract DIE if DECL_RTL is
22934 already set. */
22935 || (VAR_P (decl_or_origin)
22936 && TREE_STATIC (decl_or_origin)
22937 && DECL_RTL_SET_P (decl_or_origin))))
22938 {
22939 if (early_dwarf)
22940 add_pubname (decl_or_origin, var_die);
22941 else
22942 add_location_or_const_value_attribute (var_die, decl_or_origin,
22943 decl == NULL);
22944 }
22945 else
22946 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
22947
22948 if ((dwarf_version >= 4 || !dwarf_strict)
22949 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
22950 DW_AT_const_expr) == 1
22951 && !get_AT (var_die, DW_AT_const_expr)
22952 && !specialization_p)
22953 add_AT_flag (var_die, DW_AT_const_expr, 1);
22954
22955 if (!dwarf_strict)
22956 {
22957 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
22958 DW_AT_inline);
22959 if (inl != -1
22960 && !get_AT (var_die, DW_AT_inline)
22961 && !specialization_p)
22962 add_AT_unsigned (var_die, DW_AT_inline, inl);
22963 }
22964 }
22965
22966 /* Generate a DIE to represent a named constant. */
22967
22968 static void
22969 gen_const_die (tree decl, dw_die_ref context_die)
22970 {
22971 dw_die_ref const_die;
22972 tree type = TREE_TYPE (decl);
22973
22974 const_die = lookup_decl_die (decl);
22975 if (const_die)
22976 return;
22977
22978 const_die = new_die (DW_TAG_constant, context_die, decl);
22979 equate_decl_number_to_die (decl, const_die);
22980 add_name_and_src_coords_attributes (const_die, decl);
22981 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
22982 if (TREE_PUBLIC (decl))
22983 add_AT_flag (const_die, DW_AT_external, 1);
22984 if (DECL_ARTIFICIAL (decl))
22985 add_AT_flag (const_die, DW_AT_artificial, 1);
22986 tree_add_const_value_attribute_for_decl (const_die, decl);
22987 }
22988
22989 /* Generate a DIE to represent a label identifier. */
22990
22991 static void
22992 gen_label_die (tree decl, dw_die_ref context_die)
22993 {
22994 tree origin = decl_ultimate_origin (decl);
22995 dw_die_ref lbl_die = lookup_decl_die (decl);
22996 rtx insn;
22997 char label[MAX_ARTIFICIAL_LABEL_BYTES];
22998
22999 if (!lbl_die)
23000 {
23001 lbl_die = new_die (DW_TAG_label, context_die, decl);
23002 equate_decl_number_to_die (decl, lbl_die);
23003
23004 if (origin != NULL)
23005 add_abstract_origin_attribute (lbl_die, origin);
23006 else
23007 add_name_and_src_coords_attributes (lbl_die, decl);
23008 }
23009
23010 if (DECL_ABSTRACT_P (decl))
23011 equate_decl_number_to_die (decl, lbl_die);
23012 else if (! early_dwarf)
23013 {
23014 insn = DECL_RTL_IF_SET (decl);
23015
23016 /* Deleted labels are programmer specified labels which have been
23017 eliminated because of various optimizations. We still emit them
23018 here so that it is possible to put breakpoints on them. */
23019 if (insn
23020 && (LABEL_P (insn)
23021 || ((NOTE_P (insn)
23022 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23023 {
23024 /* When optimization is enabled (via -O) some parts of the compiler
23025 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23026 represent source-level labels which were explicitly declared by
23027 the user. This really shouldn't be happening though, so catch
23028 it if it ever does happen. */
23029 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23030
23031 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23032 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23033 }
23034 else if (insn
23035 && NOTE_P (insn)
23036 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23037 && CODE_LABEL_NUMBER (insn) != -1)
23038 {
23039 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23040 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23041 }
23042 }
23043 }
23044
23045 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23046 attributes to the DIE for a block STMT, to describe where the inlined
23047 function was called from. This is similar to add_src_coords_attributes. */
23048
23049 static inline void
23050 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23051 {
23052 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23053
23054 if (dwarf_version >= 3 || !dwarf_strict)
23055 {
23056 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23057 add_AT_unsigned (die, DW_AT_call_line, s.line);
23058 if (debug_column_info && s.column)
23059 add_AT_unsigned (die, DW_AT_call_column, s.column);
23060 }
23061 }
23062
23063
23064 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23065 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23066
23067 static inline void
23068 add_high_low_attributes (tree stmt, dw_die_ref die)
23069 {
23070 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23071
23072 if (BLOCK_FRAGMENT_CHAIN (stmt)
23073 && (dwarf_version >= 3 || !dwarf_strict))
23074 {
23075 tree chain, superblock = NULL_TREE;
23076 dw_die_ref pdie;
23077 dw_attr_node *attr = NULL;
23078
23079 if (inlined_function_outer_scope_p (stmt))
23080 {
23081 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23082 BLOCK_NUMBER (stmt));
23083 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23084 }
23085
23086 /* Optimize duplicate .debug_ranges lists or even tails of
23087 lists. If this BLOCK has same ranges as its supercontext,
23088 lookup DW_AT_ranges attribute in the supercontext (and
23089 recursively so), verify that the ranges_table contains the
23090 right values and use it instead of adding a new .debug_range. */
23091 for (chain = stmt, pdie = die;
23092 BLOCK_SAME_RANGE (chain);
23093 chain = BLOCK_SUPERCONTEXT (chain))
23094 {
23095 dw_attr_node *new_attr;
23096
23097 pdie = pdie->die_parent;
23098 if (pdie == NULL)
23099 break;
23100 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
23101 break;
23102 new_attr = get_AT (pdie, DW_AT_ranges);
23103 if (new_attr == NULL
23104 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
23105 break;
23106 attr = new_attr;
23107 superblock = BLOCK_SUPERCONTEXT (chain);
23108 }
23109 if (attr != NULL
23110 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
23111 == BLOCK_NUMBER (superblock))
23112 && BLOCK_FRAGMENT_CHAIN (superblock))
23113 {
23114 unsigned long off = attr->dw_attr_val.v.val_offset;
23115 unsigned long supercnt = 0, thiscnt = 0;
23116 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
23117 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23118 {
23119 ++supercnt;
23120 gcc_checking_assert ((*ranges_table)[off + supercnt].num
23121 == BLOCK_NUMBER (chain));
23122 }
23123 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
23124 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
23125 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23126 ++thiscnt;
23127 gcc_assert (supercnt >= thiscnt);
23128 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
23129 false);
23130 note_rnglist_head (off + supercnt - thiscnt);
23131 return;
23132 }
23133
23134 unsigned int offset = add_ranges (stmt, true);
23135 add_AT_range_list (die, DW_AT_ranges, offset, false);
23136 note_rnglist_head (offset);
23137
23138 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
23139 chain = BLOCK_FRAGMENT_CHAIN (stmt);
23140 do
23141 {
23142 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
23143 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
23144 chain = BLOCK_FRAGMENT_CHAIN (chain);
23145 }
23146 while (chain);
23147 add_ranges (NULL);
23148 }
23149 else
23150 {
23151 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
23152 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23153 BLOCK_NUMBER (stmt));
23154 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
23155 BLOCK_NUMBER (stmt));
23156 add_AT_low_high_pc (die, label, label_high, false);
23157 }
23158 }
23159
23160 /* Generate a DIE for a lexical block. */
23161
23162 static void
23163 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
23164 {
23165 dw_die_ref old_die = BLOCK_DIE (stmt);
23166 dw_die_ref stmt_die = NULL;
23167 if (!old_die)
23168 {
23169 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
23170 BLOCK_DIE (stmt) = stmt_die;
23171 }
23172
23173 if (BLOCK_ABSTRACT (stmt))
23174 {
23175 if (old_die)
23176 {
23177 /* This must have been generated early and it won't even
23178 need location information since it's a DW_AT_inline
23179 function. */
23180 if (flag_checking)
23181 for (dw_die_ref c = context_die; c; c = c->die_parent)
23182 if (c->die_tag == DW_TAG_inlined_subroutine
23183 || c->die_tag == DW_TAG_subprogram)
23184 {
23185 gcc_assert (get_AT (c, DW_AT_inline));
23186 break;
23187 }
23188 return;
23189 }
23190 }
23191 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
23192 {
23193 /* If this is an inlined instance, create a new lexical die for
23194 anything below to attach DW_AT_abstract_origin to. */
23195 if (old_die)
23196 {
23197 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
23198 BLOCK_DIE (stmt) = stmt_die;
23199 old_die = NULL;
23200 }
23201
23202 tree origin = block_ultimate_origin (stmt);
23203 if (origin != NULL_TREE && origin != stmt)
23204 add_abstract_origin_attribute (stmt_die, origin);
23205 }
23206
23207 if (old_die)
23208 stmt_die = old_die;
23209
23210 /* A non abstract block whose blocks have already been reordered
23211 should have the instruction range for this block. If so, set the
23212 high/low attributes. */
23213 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
23214 {
23215 gcc_assert (stmt_die);
23216 add_high_low_attributes (stmt, stmt_die);
23217 }
23218
23219 decls_for_scope (stmt, stmt_die);
23220 }
23221
23222 /* Generate a DIE for an inlined subprogram. */
23223
23224 static void
23225 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
23226 {
23227 tree decl;
23228
23229 /* The instance of function that is effectively being inlined shall not
23230 be abstract. */
23231 gcc_assert (! BLOCK_ABSTRACT (stmt));
23232
23233 decl = block_ultimate_origin (stmt);
23234
23235 /* Make sure any inlined functions are known to be inlineable. */
23236 gcc_checking_assert (DECL_ABSTRACT_P (decl)
23237 || cgraph_function_possibly_inlined_p (decl));
23238
23239 if (! BLOCK_ABSTRACT (stmt))
23240 {
23241 dw_die_ref subr_die
23242 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
23243
23244 if (call_arg_locations)
23245 BLOCK_DIE (stmt) = subr_die;
23246 add_abstract_origin_attribute (subr_die, decl);
23247 if (TREE_ASM_WRITTEN (stmt))
23248 add_high_low_attributes (stmt, subr_die);
23249 add_call_src_coords_attributes (stmt, subr_die);
23250
23251 decls_for_scope (stmt, subr_die);
23252 }
23253 }
23254
23255 /* Generate a DIE for a field in a record, or structure. CTX is required: see
23256 the comment for VLR_CONTEXT. */
23257
23258 static void
23259 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
23260 {
23261 dw_die_ref decl_die;
23262
23263 if (TREE_TYPE (decl) == error_mark_node)
23264 return;
23265
23266 decl_die = new_die (DW_TAG_member, context_die, decl);
23267 add_name_and_src_coords_attributes (decl_die, decl);
23268 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
23269 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
23270 context_die);
23271
23272 if (DECL_BIT_FIELD_TYPE (decl))
23273 {
23274 add_byte_size_attribute (decl_die, decl);
23275 add_bit_size_attribute (decl_die, decl);
23276 add_bit_offset_attribute (decl_die, decl, ctx);
23277 }
23278
23279 add_alignment_attribute (decl_die, decl);
23280
23281 /* If we have a variant part offset, then we are supposed to process a member
23282 of a QUAL_UNION_TYPE, which is how we represent variant parts in
23283 trees. */
23284 gcc_assert (ctx->variant_part_offset == NULL_TREE
23285 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
23286 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
23287 add_data_member_location_attribute (decl_die, decl, ctx);
23288
23289 if (DECL_ARTIFICIAL (decl))
23290 add_AT_flag (decl_die, DW_AT_artificial, 1);
23291
23292 add_accessibility_attribute (decl_die, decl);
23293
23294 /* Equate decl number to die, so that we can look up this decl later on. */
23295 equate_decl_number_to_die (decl, decl_die);
23296 }
23297
23298 /* Generate a DIE for a pointer to a member type. TYPE can be an
23299 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
23300 pointer to member function. */
23301
23302 static void
23303 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
23304 {
23305 if (lookup_type_die (type))
23306 return;
23307
23308 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
23309 scope_die_for (type, context_die), type);
23310
23311 equate_type_number_to_die (type, ptr_die);
23312 add_AT_die_ref (ptr_die, DW_AT_containing_type,
23313 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
23314 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23315 context_die);
23316 add_alignment_attribute (ptr_die, type);
23317
23318 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
23319 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
23320 {
23321 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
23322 add_AT_loc (ptr_die, DW_AT_use_location, op);
23323 }
23324 }
23325
23326 static char *producer_string;
23327
23328 /* Return a heap allocated producer string including command line options
23329 if -grecord-gcc-switches. */
23330
23331 static char *
23332 gen_producer_string (void)
23333 {
23334 size_t j;
23335 auto_vec<const char *> switches;
23336 const char *language_string = lang_hooks.name;
23337 char *producer, *tail;
23338 const char *p;
23339 size_t len = dwarf_record_gcc_switches ? 0 : 3;
23340 size_t plen = strlen (language_string) + 1 + strlen (version_string);
23341
23342 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
23343 switch (save_decoded_options[j].opt_index)
23344 {
23345 case OPT_o:
23346 case OPT_d:
23347 case OPT_dumpbase:
23348 case OPT_dumpdir:
23349 case OPT_auxbase:
23350 case OPT_auxbase_strip:
23351 case OPT_quiet:
23352 case OPT_version:
23353 case OPT_v:
23354 case OPT_w:
23355 case OPT_L:
23356 case OPT_D:
23357 case OPT_I:
23358 case OPT_U:
23359 case OPT_SPECIAL_unknown:
23360 case OPT_SPECIAL_ignore:
23361 case OPT_SPECIAL_program_name:
23362 case OPT_SPECIAL_input_file:
23363 case OPT_grecord_gcc_switches:
23364 case OPT__output_pch_:
23365 case OPT_fdiagnostics_show_location_:
23366 case OPT_fdiagnostics_show_option:
23367 case OPT_fdiagnostics_show_caret:
23368 case OPT_fdiagnostics_color_:
23369 case OPT_fverbose_asm:
23370 case OPT____:
23371 case OPT__sysroot_:
23372 case OPT_nostdinc:
23373 case OPT_nostdinc__:
23374 case OPT_fpreprocessed:
23375 case OPT_fltrans_output_list_:
23376 case OPT_fresolution_:
23377 case OPT_fdebug_prefix_map_:
23378 /* Ignore these. */
23379 continue;
23380 default:
23381 if (cl_options[save_decoded_options[j].opt_index].flags
23382 & CL_NO_DWARF_RECORD)
23383 continue;
23384 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
23385 == '-');
23386 switch (save_decoded_options[j].canonical_option[0][1])
23387 {
23388 case 'M':
23389 case 'i':
23390 case 'W':
23391 continue;
23392 case 'f':
23393 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
23394 "dump", 4) == 0)
23395 continue;
23396 break;
23397 default:
23398 break;
23399 }
23400 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
23401 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
23402 break;
23403 }
23404
23405 producer = XNEWVEC (char, plen + 1 + len + 1);
23406 tail = producer;
23407 sprintf (tail, "%s %s", language_string, version_string);
23408 tail += plen;
23409
23410 FOR_EACH_VEC_ELT (switches, j, p)
23411 {
23412 len = strlen (p);
23413 *tail = ' ';
23414 memcpy (tail + 1, p, len);
23415 tail += len + 1;
23416 }
23417
23418 *tail = '\0';
23419 return producer;
23420 }
23421
23422 /* Given a C and/or C++ language/version string return the "highest".
23423 C++ is assumed to be "higher" than C in this case. Used for merging
23424 LTO translation unit languages. */
23425 static const char *
23426 highest_c_language (const char *lang1, const char *lang2)
23427 {
23428 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
23429 return "GNU C++17";
23430 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
23431 return "GNU C++14";
23432 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
23433 return "GNU C++11";
23434 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
23435 return "GNU C++98";
23436
23437 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
23438 return "GNU C17";
23439 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
23440 return "GNU C11";
23441 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
23442 return "GNU C99";
23443 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
23444 return "GNU C89";
23445
23446 gcc_unreachable ();
23447 }
23448
23449
23450 /* Generate the DIE for the compilation unit. */
23451
23452 static dw_die_ref
23453 gen_compile_unit_die (const char *filename)
23454 {
23455 dw_die_ref die;
23456 const char *language_string = lang_hooks.name;
23457 int language;
23458
23459 die = new_die (DW_TAG_compile_unit, NULL, NULL);
23460
23461 if (filename)
23462 {
23463 add_name_attribute (die, filename);
23464 /* Don't add cwd for <built-in>. */
23465 if (filename[0] != '<')
23466 add_comp_dir_attribute (die);
23467 }
23468
23469 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
23470
23471 /* If our producer is LTO try to figure out a common language to use
23472 from the global list of translation units. */
23473 if (strcmp (language_string, "GNU GIMPLE") == 0)
23474 {
23475 unsigned i;
23476 tree t;
23477 const char *common_lang = NULL;
23478
23479 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
23480 {
23481 if (!TRANSLATION_UNIT_LANGUAGE (t))
23482 continue;
23483 if (!common_lang)
23484 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
23485 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
23486 ;
23487 else if (strncmp (common_lang, "GNU C", 5) == 0
23488 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
23489 /* Mixing C and C++ is ok, use C++ in that case. */
23490 common_lang = highest_c_language (common_lang,
23491 TRANSLATION_UNIT_LANGUAGE (t));
23492 else
23493 {
23494 /* Fall back to C. */
23495 common_lang = NULL;
23496 break;
23497 }
23498 }
23499
23500 if (common_lang)
23501 language_string = common_lang;
23502 }
23503
23504 language = DW_LANG_C;
23505 if (strncmp (language_string, "GNU C", 5) == 0
23506 && ISDIGIT (language_string[5]))
23507 {
23508 language = DW_LANG_C89;
23509 if (dwarf_version >= 3 || !dwarf_strict)
23510 {
23511 if (strcmp (language_string, "GNU C89") != 0)
23512 language = DW_LANG_C99;
23513
23514 if (dwarf_version >= 5 /* || !dwarf_strict */)
23515 if (strcmp (language_string, "GNU C11") == 0
23516 || strcmp (language_string, "GNU C17") == 0)
23517 language = DW_LANG_C11;
23518 }
23519 }
23520 else if (strncmp (language_string, "GNU C++", 7) == 0)
23521 {
23522 language = DW_LANG_C_plus_plus;
23523 if (dwarf_version >= 5 /* || !dwarf_strict */)
23524 {
23525 if (strcmp (language_string, "GNU C++11") == 0)
23526 language = DW_LANG_C_plus_plus_11;
23527 else if (strcmp (language_string, "GNU C++14") == 0)
23528 language = DW_LANG_C_plus_plus_14;
23529 else if (strcmp (language_string, "GNU C++17") == 0)
23530 /* For now. */
23531 language = DW_LANG_C_plus_plus_14;
23532 }
23533 }
23534 else if (strcmp (language_string, "GNU F77") == 0)
23535 language = DW_LANG_Fortran77;
23536 else if (dwarf_version >= 3 || !dwarf_strict)
23537 {
23538 if (strcmp (language_string, "GNU Ada") == 0)
23539 language = DW_LANG_Ada95;
23540 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
23541 {
23542 language = DW_LANG_Fortran95;
23543 if (dwarf_version >= 5 /* || !dwarf_strict */)
23544 {
23545 if (strcmp (language_string, "GNU Fortran2003") == 0)
23546 language = DW_LANG_Fortran03;
23547 else if (strcmp (language_string, "GNU Fortran2008") == 0)
23548 language = DW_LANG_Fortran08;
23549 }
23550 }
23551 else if (strcmp (language_string, "GNU Objective-C") == 0)
23552 language = DW_LANG_ObjC;
23553 else if (strcmp (language_string, "GNU Objective-C++") == 0)
23554 language = DW_LANG_ObjC_plus_plus;
23555 else if (dwarf_version >= 5 || !dwarf_strict)
23556 {
23557 if (strcmp (language_string, "GNU Go") == 0)
23558 language = DW_LANG_Go;
23559 }
23560 }
23561 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
23562 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
23563 language = DW_LANG_Fortran90;
23564
23565 add_AT_unsigned (die, DW_AT_language, language);
23566
23567 switch (language)
23568 {
23569 case DW_LANG_Fortran77:
23570 case DW_LANG_Fortran90:
23571 case DW_LANG_Fortran95:
23572 case DW_LANG_Fortran03:
23573 case DW_LANG_Fortran08:
23574 /* Fortran has case insensitive identifiers and the front-end
23575 lowercases everything. */
23576 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
23577 break;
23578 default:
23579 /* The default DW_ID_case_sensitive doesn't need to be specified. */
23580 break;
23581 }
23582 return die;
23583 }
23584
23585 /* Generate the DIE for a base class. */
23586
23587 static void
23588 gen_inheritance_die (tree binfo, tree access, tree type,
23589 dw_die_ref context_die)
23590 {
23591 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
23592 struct vlr_context ctx = { type, NULL };
23593
23594 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
23595 context_die);
23596 add_data_member_location_attribute (die, binfo, &ctx);
23597
23598 if (BINFO_VIRTUAL_P (binfo))
23599 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
23600
23601 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
23602 children, otherwise the default is DW_ACCESS_public. In DWARF2
23603 the default has always been DW_ACCESS_private. */
23604 if (access == access_public_node)
23605 {
23606 if (dwarf_version == 2
23607 || context_die->die_tag == DW_TAG_class_type)
23608 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
23609 }
23610 else if (access == access_protected_node)
23611 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
23612 else if (dwarf_version > 2
23613 && context_die->die_tag != DW_TAG_class_type)
23614 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
23615 }
23616
23617 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
23618 structure. */
23619 static bool
23620 is_variant_part (tree decl)
23621 {
23622 return (TREE_CODE (decl) == FIELD_DECL
23623 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
23624 }
23625
23626 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
23627 return the FIELD_DECL. Return NULL_TREE otherwise. */
23628
23629 static tree
23630 analyze_discr_in_predicate (tree operand, tree struct_type)
23631 {
23632 bool continue_stripping = true;
23633 while (continue_stripping)
23634 switch (TREE_CODE (operand))
23635 {
23636 CASE_CONVERT:
23637 operand = TREE_OPERAND (operand, 0);
23638 break;
23639 default:
23640 continue_stripping = false;
23641 break;
23642 }
23643
23644 /* Match field access to members of struct_type only. */
23645 if (TREE_CODE (operand) == COMPONENT_REF
23646 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
23647 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
23648 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
23649 return TREE_OPERAND (operand, 1);
23650 else
23651 return NULL_TREE;
23652 }
23653
23654 /* Check that SRC is a constant integer that can be represented as a native
23655 integer constant (either signed or unsigned). If so, store it into DEST and
23656 return true. Return false otherwise. */
23657
23658 static bool
23659 get_discr_value (tree src, dw_discr_value *dest)
23660 {
23661 tree discr_type = TREE_TYPE (src);
23662
23663 if (lang_hooks.types.get_debug_type)
23664 {
23665 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
23666 if (debug_type != NULL)
23667 discr_type = debug_type;
23668 }
23669
23670 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
23671 return false;
23672
23673 /* Signedness can vary between the original type and the debug type. This
23674 can happen for character types in Ada for instance: the character type
23675 used for code generation can be signed, to be compatible with the C one,
23676 but from a debugger point of view, it must be unsigned. */
23677 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
23678 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
23679
23680 if (is_orig_unsigned != is_debug_unsigned)
23681 src = fold_convert (discr_type, src);
23682
23683 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
23684 return false;
23685
23686 dest->pos = is_debug_unsigned;
23687 if (is_debug_unsigned)
23688 dest->v.uval = tree_to_uhwi (src);
23689 else
23690 dest->v.sval = tree_to_shwi (src);
23691
23692 return true;
23693 }
23694
23695 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
23696 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
23697 store NULL_TREE in DISCR_DECL. Otherwise:
23698
23699 - store the discriminant field in STRUCT_TYPE that controls the variant
23700 part to *DISCR_DECL
23701
23702 - put in *DISCR_LISTS_P an array where for each variant, the item
23703 represents the corresponding matching list of discriminant values.
23704
23705 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
23706 the above array.
23707
23708 Note that when the array is allocated (i.e. when the analysis is
23709 successful), it is up to the caller to free the array. */
23710
23711 static void
23712 analyze_variants_discr (tree variant_part_decl,
23713 tree struct_type,
23714 tree *discr_decl,
23715 dw_discr_list_ref **discr_lists_p,
23716 unsigned *discr_lists_length)
23717 {
23718 tree variant_part_type = TREE_TYPE (variant_part_decl);
23719 tree variant;
23720 dw_discr_list_ref *discr_lists;
23721 unsigned i;
23722
23723 /* Compute how many variants there are in this variant part. */
23724 *discr_lists_length = 0;
23725 for (variant = TYPE_FIELDS (variant_part_type);
23726 variant != NULL_TREE;
23727 variant = DECL_CHAIN (variant))
23728 ++*discr_lists_length;
23729
23730 *discr_decl = NULL_TREE;
23731 *discr_lists_p
23732 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
23733 sizeof (**discr_lists_p));
23734 discr_lists = *discr_lists_p;
23735
23736 /* And then analyze all variants to extract discriminant information for all
23737 of them. This analysis is conservative: as soon as we detect something we
23738 do not support, abort everything and pretend we found nothing. */
23739 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
23740 variant != NULL_TREE;
23741 variant = DECL_CHAIN (variant), ++i)
23742 {
23743 tree match_expr = DECL_QUALIFIER (variant);
23744
23745 /* Now, try to analyze the predicate and deduce a discriminant for
23746 it. */
23747 if (match_expr == boolean_true_node)
23748 /* Typically happens for the default variant: it matches all cases that
23749 previous variants rejected. Don't output any matching value for
23750 this one. */
23751 continue;
23752
23753 /* The following loop tries to iterate over each discriminant
23754 possibility: single values or ranges. */
23755 while (match_expr != NULL_TREE)
23756 {
23757 tree next_round_match_expr;
23758 tree candidate_discr = NULL_TREE;
23759 dw_discr_list_ref new_node = NULL;
23760
23761 /* Possibilities are matched one after the other by nested
23762 TRUTH_ORIF_EXPR expressions. Process the current possibility and
23763 continue with the rest at next iteration. */
23764 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
23765 {
23766 next_round_match_expr = TREE_OPERAND (match_expr, 0);
23767 match_expr = TREE_OPERAND (match_expr, 1);
23768 }
23769 else
23770 next_round_match_expr = NULL_TREE;
23771
23772 if (match_expr == boolean_false_node)
23773 /* This sub-expression matches nothing: just wait for the next
23774 one. */
23775 ;
23776
23777 else if (TREE_CODE (match_expr) == EQ_EXPR)
23778 {
23779 /* We are matching: <discr_field> == <integer_cst>
23780 This sub-expression matches a single value. */
23781 tree integer_cst = TREE_OPERAND (match_expr, 1);
23782
23783 candidate_discr
23784 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
23785 struct_type);
23786
23787 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
23788 if (!get_discr_value (integer_cst,
23789 &new_node->dw_discr_lower_bound))
23790 goto abort;
23791 new_node->dw_discr_range = false;
23792 }
23793
23794 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
23795 {
23796 /* We are matching:
23797 <discr_field> > <integer_cst>
23798 && <discr_field> < <integer_cst>.
23799 This sub-expression matches the range of values between the
23800 two matched integer constants. Note that comparisons can be
23801 inclusive or exclusive. */
23802 tree candidate_discr_1, candidate_discr_2;
23803 tree lower_cst, upper_cst;
23804 bool lower_cst_included, upper_cst_included;
23805 tree lower_op = TREE_OPERAND (match_expr, 0);
23806 tree upper_op = TREE_OPERAND (match_expr, 1);
23807
23808 /* When the comparison is exclusive, the integer constant is not
23809 the discriminant range bound we are looking for: we will have
23810 to increment or decrement it. */
23811 if (TREE_CODE (lower_op) == GE_EXPR)
23812 lower_cst_included = true;
23813 else if (TREE_CODE (lower_op) == GT_EXPR)
23814 lower_cst_included = false;
23815 else
23816 goto abort;
23817
23818 if (TREE_CODE (upper_op) == LE_EXPR)
23819 upper_cst_included = true;
23820 else if (TREE_CODE (upper_op) == LT_EXPR)
23821 upper_cst_included = false;
23822 else
23823 goto abort;
23824
23825 /* Extract the discriminant from the first operand and check it
23826 is consistant with the same analysis in the second
23827 operand. */
23828 candidate_discr_1
23829 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
23830 struct_type);
23831 candidate_discr_2
23832 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
23833 struct_type);
23834 if (candidate_discr_1 == candidate_discr_2)
23835 candidate_discr = candidate_discr_1;
23836 else
23837 goto abort;
23838
23839 /* Extract bounds from both. */
23840 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
23841 lower_cst = TREE_OPERAND (lower_op, 1);
23842 upper_cst = TREE_OPERAND (upper_op, 1);
23843
23844 if (!lower_cst_included)
23845 lower_cst
23846 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
23847 build_int_cst (TREE_TYPE (lower_cst), 1));
23848 if (!upper_cst_included)
23849 upper_cst
23850 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
23851 build_int_cst (TREE_TYPE (upper_cst), 1));
23852
23853 if (!get_discr_value (lower_cst,
23854 &new_node->dw_discr_lower_bound)
23855 || !get_discr_value (upper_cst,
23856 &new_node->dw_discr_upper_bound))
23857 goto abort;
23858
23859 new_node->dw_discr_range = true;
23860 }
23861
23862 else
23863 /* Unsupported sub-expression: we cannot determine the set of
23864 matching discriminant values. Abort everything. */
23865 goto abort;
23866
23867 /* If the discriminant info is not consistant with what we saw so
23868 far, consider the analysis failed and abort everything. */
23869 if (candidate_discr == NULL_TREE
23870 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
23871 goto abort;
23872 else
23873 *discr_decl = candidate_discr;
23874
23875 if (new_node != NULL)
23876 {
23877 new_node->dw_discr_next = discr_lists[i];
23878 discr_lists[i] = new_node;
23879 }
23880 match_expr = next_round_match_expr;
23881 }
23882 }
23883
23884 /* If we reach this point, we could match everything we were interested
23885 in. */
23886 return;
23887
23888 abort:
23889 /* Clean all data structure and return no result. */
23890 free (*discr_lists_p);
23891 *discr_lists_p = NULL;
23892 *discr_decl = NULL_TREE;
23893 }
23894
23895 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
23896 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
23897 under CONTEXT_DIE.
23898
23899 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
23900 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
23901 this type, which are record types, represent the available variants and each
23902 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
23903 values are inferred from these attributes.
23904
23905 In trees, the offsets for the fields inside these sub-records are relative
23906 to the variant part itself, whereas the corresponding DIEs should have
23907 offset attributes that are relative to the embedding record base address.
23908 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
23909 must be an expression that computes the offset of the variant part to
23910 describe in DWARF. */
23911
23912 static void
23913 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
23914 dw_die_ref context_die)
23915 {
23916 const tree variant_part_type = TREE_TYPE (variant_part_decl);
23917 tree variant_part_offset = vlr_ctx->variant_part_offset;
23918 struct loc_descr_context ctx = {
23919 vlr_ctx->struct_type, /* context_type */
23920 NULL_TREE, /* base_decl */
23921 NULL, /* dpi */
23922 false, /* placeholder_arg */
23923 false /* placeholder_seen */
23924 };
23925
23926 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
23927 NULL_TREE if there is no such field. */
23928 tree discr_decl = NULL_TREE;
23929 dw_discr_list_ref *discr_lists;
23930 unsigned discr_lists_length = 0;
23931 unsigned i;
23932
23933 dw_die_ref dwarf_proc_die = NULL;
23934 dw_die_ref variant_part_die
23935 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
23936
23937 equate_decl_number_to_die (variant_part_decl, variant_part_die);
23938
23939 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
23940 &discr_decl, &discr_lists, &discr_lists_length);
23941
23942 if (discr_decl != NULL_TREE)
23943 {
23944 dw_die_ref discr_die = lookup_decl_die (discr_decl);
23945
23946 if (discr_die)
23947 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
23948 else
23949 /* We have no DIE for the discriminant, so just discard all
23950 discrimimant information in the output. */
23951 discr_decl = NULL_TREE;
23952 }
23953
23954 /* If the offset for this variant part is more complex than a constant,
23955 create a DWARF procedure for it so that we will not have to generate DWARF
23956 expressions for it for each member. */
23957 if (TREE_CODE (variant_part_offset) != INTEGER_CST
23958 && (dwarf_version >= 3 || !dwarf_strict))
23959 {
23960 const tree dwarf_proc_fndecl
23961 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
23962 build_function_type (TREE_TYPE (variant_part_offset),
23963 NULL_TREE));
23964 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
23965 const dw_loc_descr_ref dwarf_proc_body
23966 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
23967
23968 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
23969 dwarf_proc_fndecl, context_die);
23970 if (dwarf_proc_die != NULL)
23971 variant_part_offset = dwarf_proc_call;
23972 }
23973
23974 /* Output DIEs for all variants. */
23975 i = 0;
23976 for (tree variant = TYPE_FIELDS (variant_part_type);
23977 variant != NULL_TREE;
23978 variant = DECL_CHAIN (variant), ++i)
23979 {
23980 tree variant_type = TREE_TYPE (variant);
23981 dw_die_ref variant_die;
23982
23983 /* All variants (i.e. members of a variant part) are supposed to be
23984 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
23985 under these records. */
23986 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
23987
23988 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
23989 equate_decl_number_to_die (variant, variant_die);
23990
23991 /* Output discriminant values this variant matches, if any. */
23992 if (discr_decl == NULL || discr_lists[i] == NULL)
23993 /* In the case we have discriminant information at all, this is
23994 probably the default variant: as the standard says, don't
23995 output any discriminant value/list attribute. */
23996 ;
23997 else if (discr_lists[i]->dw_discr_next == NULL
23998 && !discr_lists[i]->dw_discr_range)
23999 /* If there is only one accepted value, don't bother outputting a
24000 list. */
24001 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24002 else
24003 add_discr_list (variant_die, discr_lists[i]);
24004
24005 for (tree member = TYPE_FIELDS (variant_type);
24006 member != NULL_TREE;
24007 member = DECL_CHAIN (member))
24008 {
24009 struct vlr_context vlr_sub_ctx = {
24010 vlr_ctx->struct_type, /* struct_type */
24011 NULL /* variant_part_offset */
24012 };
24013 if (is_variant_part (member))
24014 {
24015 /* All offsets for fields inside variant parts are relative to
24016 the top-level embedding RECORD_TYPE's base address. On the
24017 other hand, offsets in GCC's types are relative to the
24018 nested-most variant part. So we have to sum offsets each time
24019 we recurse. */
24020
24021 vlr_sub_ctx.variant_part_offset
24022 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24023 variant_part_offset, byte_position (member));
24024 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24025 }
24026 else
24027 {
24028 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24029 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24030 }
24031 }
24032 }
24033
24034 free (discr_lists);
24035 }
24036
24037 /* Generate a DIE for a class member. */
24038
24039 static void
24040 gen_member_die (tree type, dw_die_ref context_die)
24041 {
24042 tree member;
24043 tree binfo = TYPE_BINFO (type);
24044
24045 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24046
24047 /* If this is not an incomplete type, output descriptions of each of its
24048 members. Note that as we output the DIEs necessary to represent the
24049 members of this record or union type, we will also be trying to output
24050 DIEs to represent the *types* of those members. However the `type'
24051 function (above) will specifically avoid generating type DIEs for member
24052 types *within* the list of member DIEs for this (containing) type except
24053 for those types (of members) which are explicitly marked as also being
24054 members of this (containing) type themselves. The g++ front- end can
24055 force any given type to be treated as a member of some other (containing)
24056 type by setting the TYPE_CONTEXT of the given (member) type to point to
24057 the TREE node representing the appropriate (containing) type. */
24058
24059 /* First output info about the base classes. */
24060 if (binfo)
24061 {
24062 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24063 int i;
24064 tree base;
24065
24066 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24067 gen_inheritance_die (base,
24068 (accesses ? (*accesses)[i] : access_public_node),
24069 type,
24070 context_die);
24071 }
24072
24073 /* Now output info about the data members and type members. */
24074 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24075 {
24076 struct vlr_context vlr_ctx = { type, NULL_TREE };
24077 bool static_inline_p
24078 = (TREE_STATIC (member)
24079 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24080 != -1));
24081
24082 /* Ignore clones. */
24083 if (DECL_ABSTRACT_ORIGIN (member))
24084 continue;
24085
24086 /* If we thought we were generating minimal debug info for TYPE
24087 and then changed our minds, some of the member declarations
24088 may have already been defined. Don't define them again, but
24089 do put them in the right order. */
24090
24091 if (dw_die_ref child = lookup_decl_die (member))
24092 {
24093 /* Handle inline static data members, which only have in-class
24094 declarations. */
24095 dw_die_ref ref = NULL;
24096 if (child->die_tag == DW_TAG_variable
24097 && child->die_parent == comp_unit_die ())
24098 {
24099 ref = get_AT_ref (child, DW_AT_specification);
24100 /* For C++17 inline static data members followed by redundant
24101 out of class redeclaration, we might get here with
24102 child being the DIE created for the out of class
24103 redeclaration and with its DW_AT_specification being
24104 the DIE created for in-class definition. We want to
24105 reparent the latter, and don't want to create another
24106 DIE with DW_AT_specification in that case, because
24107 we already have one. */
24108 if (ref
24109 && static_inline_p
24110 && ref->die_tag == DW_TAG_variable
24111 && ref->die_parent == comp_unit_die ()
24112 && get_AT (ref, DW_AT_specification) == NULL)
24113 {
24114 child = ref;
24115 ref = NULL;
24116 static_inline_p = false;
24117 }
24118 }
24119
24120 if (child->die_tag == DW_TAG_variable
24121 && child->die_parent == comp_unit_die ()
24122 && ref == NULL)
24123 {
24124 reparent_child (child, context_die);
24125 if (dwarf_version < 5)
24126 child->die_tag = DW_TAG_member;
24127 }
24128 else
24129 splice_child_die (context_die, child);
24130 }
24131
24132 /* Do not generate standard DWARF for variant parts if we are generating
24133 the corresponding GNAT encodings: DIEs generated for both would
24134 conflict in our mappings. */
24135 else if (is_variant_part (member)
24136 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
24137 {
24138 vlr_ctx.variant_part_offset = byte_position (member);
24139 gen_variant_part (member, &vlr_ctx, context_die);
24140 }
24141 else
24142 {
24143 vlr_ctx.variant_part_offset = NULL_TREE;
24144 gen_decl_die (member, NULL, &vlr_ctx, context_die);
24145 }
24146
24147 /* For C++ inline static data members emit immediately a DW_TAG_variable
24148 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
24149 DW_AT_specification. */
24150 if (static_inline_p)
24151 {
24152 int old_extern = DECL_EXTERNAL (member);
24153 DECL_EXTERNAL (member) = 0;
24154 gen_decl_die (member, NULL, NULL, comp_unit_die ());
24155 DECL_EXTERNAL (member) = old_extern;
24156 }
24157 }
24158 }
24159
24160 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
24161 is set, we pretend that the type was never defined, so we only get the
24162 member DIEs needed by later specification DIEs. */
24163
24164 static void
24165 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
24166 enum debug_info_usage usage)
24167 {
24168 if (TREE_ASM_WRITTEN (type))
24169 {
24170 /* Fill in the bound of variable-length fields in late dwarf if
24171 still incomplete. */
24172 if (!early_dwarf && variably_modified_type_p (type, NULL))
24173 for (tree member = TYPE_FIELDS (type);
24174 member;
24175 member = DECL_CHAIN (member))
24176 fill_variable_array_bounds (TREE_TYPE (member));
24177 return;
24178 }
24179
24180 dw_die_ref type_die = lookup_type_die (type);
24181 dw_die_ref scope_die = 0;
24182 int nested = 0;
24183 int complete = (TYPE_SIZE (type)
24184 && (! TYPE_STUB_DECL (type)
24185 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
24186 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
24187 complete = complete && should_emit_struct_debug (type, usage);
24188
24189 if (type_die && ! complete)
24190 return;
24191
24192 if (TYPE_CONTEXT (type) != NULL_TREE
24193 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
24194 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
24195 nested = 1;
24196
24197 scope_die = scope_die_for (type, context_die);
24198
24199 /* Generate child dies for template paramaters. */
24200 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
24201 schedule_generic_params_dies_gen (type);
24202
24203 if (! type_die || (nested && is_cu_die (scope_die)))
24204 /* First occurrence of type or toplevel definition of nested class. */
24205 {
24206 dw_die_ref old_die = type_die;
24207
24208 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
24209 ? record_type_tag (type) : DW_TAG_union_type,
24210 scope_die, type);
24211 equate_type_number_to_die (type, type_die);
24212 if (old_die)
24213 add_AT_specification (type_die, old_die);
24214 else
24215 add_name_attribute (type_die, type_tag (type));
24216 }
24217 else
24218 remove_AT (type_die, DW_AT_declaration);
24219
24220 /* If this type has been completed, then give it a byte_size attribute and
24221 then give a list of members. */
24222 if (complete && !ns_decl)
24223 {
24224 /* Prevent infinite recursion in cases where the type of some member of
24225 this type is expressed in terms of this type itself. */
24226 TREE_ASM_WRITTEN (type) = 1;
24227 add_byte_size_attribute (type_die, type);
24228 add_alignment_attribute (type_die, type);
24229 if (TYPE_STUB_DECL (type) != NULL_TREE)
24230 {
24231 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
24232 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
24233 }
24234
24235 /* If the first reference to this type was as the return type of an
24236 inline function, then it may not have a parent. Fix this now. */
24237 if (type_die->die_parent == NULL)
24238 add_child_die (scope_die, type_die);
24239
24240 push_decl_scope (type);
24241 gen_member_die (type, type_die);
24242 pop_decl_scope ();
24243
24244 add_gnat_descriptive_type_attribute (type_die, type, context_die);
24245 if (TYPE_ARTIFICIAL (type))
24246 add_AT_flag (type_die, DW_AT_artificial, 1);
24247
24248 /* GNU extension: Record what type our vtable lives in. */
24249 if (TYPE_VFIELD (type))
24250 {
24251 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
24252
24253 gen_type_die (vtype, context_die);
24254 add_AT_die_ref (type_die, DW_AT_containing_type,
24255 lookup_type_die (vtype));
24256 }
24257 }
24258 else
24259 {
24260 add_AT_flag (type_die, DW_AT_declaration, 1);
24261
24262 /* We don't need to do this for function-local types. */
24263 if (TYPE_STUB_DECL (type)
24264 && ! decl_function_context (TYPE_STUB_DECL (type)))
24265 vec_safe_push (incomplete_types, type);
24266 }
24267
24268 if (get_AT (type_die, DW_AT_name))
24269 add_pubtype (type, type_die);
24270 }
24271
24272 /* Generate a DIE for a subroutine _type_. */
24273
24274 static void
24275 gen_subroutine_type_die (tree type, dw_die_ref context_die)
24276 {
24277 tree return_type = TREE_TYPE (type);
24278 dw_die_ref subr_die
24279 = new_die (DW_TAG_subroutine_type,
24280 scope_die_for (type, context_die), type);
24281
24282 equate_type_number_to_die (type, subr_die);
24283 add_prototyped_attribute (subr_die, type);
24284 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
24285 context_die);
24286 add_alignment_attribute (subr_die, type);
24287 gen_formal_types_die (type, subr_die);
24288
24289 if (get_AT (subr_die, DW_AT_name))
24290 add_pubtype (type, subr_die);
24291 if ((dwarf_version >= 5 || !dwarf_strict)
24292 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
24293 add_AT_flag (subr_die, DW_AT_reference, 1);
24294 if ((dwarf_version >= 5 || !dwarf_strict)
24295 && lang_hooks.types.type_dwarf_attribute (type,
24296 DW_AT_rvalue_reference) != -1)
24297 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
24298 }
24299
24300 /* Generate a DIE for a type definition. */
24301
24302 static void
24303 gen_typedef_die (tree decl, dw_die_ref context_die)
24304 {
24305 dw_die_ref type_die;
24306 tree type;
24307
24308 if (TREE_ASM_WRITTEN (decl))
24309 {
24310 if (DECL_ORIGINAL_TYPE (decl))
24311 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
24312 return;
24313 }
24314
24315 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
24316 checks in process_scope_var and modified_type_die), this should be called
24317 only for original types. */
24318 gcc_assert (decl_ultimate_origin (decl) == NULL
24319 || decl_ultimate_origin (decl) == decl);
24320
24321 TREE_ASM_WRITTEN (decl) = 1;
24322 type_die = new_die (DW_TAG_typedef, context_die, decl);
24323
24324 add_name_and_src_coords_attributes (type_die, decl);
24325 if (DECL_ORIGINAL_TYPE (decl))
24326 {
24327 type = DECL_ORIGINAL_TYPE (decl);
24328 if (type == error_mark_node)
24329 return;
24330
24331 gcc_assert (type != TREE_TYPE (decl));
24332 equate_type_number_to_die (TREE_TYPE (decl), type_die);
24333 }
24334 else
24335 {
24336 type = TREE_TYPE (decl);
24337 if (type == error_mark_node)
24338 return;
24339
24340 if (is_naming_typedef_decl (TYPE_NAME (type)))
24341 {
24342 /* Here, we are in the case of decl being a typedef naming
24343 an anonymous type, e.g:
24344 typedef struct {...} foo;
24345 In that case TREE_TYPE (decl) is not a typedef variant
24346 type and TYPE_NAME of the anonymous type is set to the
24347 TYPE_DECL of the typedef. This construct is emitted by
24348 the C++ FE.
24349
24350 TYPE is the anonymous struct named by the typedef
24351 DECL. As we need the DW_AT_type attribute of the
24352 DW_TAG_typedef to point to the DIE of TYPE, let's
24353 generate that DIE right away. add_type_attribute
24354 called below will then pick (via lookup_type_die) that
24355 anonymous struct DIE. */
24356 if (!TREE_ASM_WRITTEN (type))
24357 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
24358
24359 /* This is a GNU Extension. We are adding a
24360 DW_AT_linkage_name attribute to the DIE of the
24361 anonymous struct TYPE. The value of that attribute
24362 is the name of the typedef decl naming the anonymous
24363 struct. This greatly eases the work of consumers of
24364 this debug info. */
24365 add_linkage_name_raw (lookup_type_die (type), decl);
24366 }
24367 }
24368
24369 add_type_attribute (type_die, type, decl_quals (decl), false,
24370 context_die);
24371
24372 if (is_naming_typedef_decl (decl))
24373 /* We want that all subsequent calls to lookup_type_die with
24374 TYPE in argument yield the DW_TAG_typedef we have just
24375 created. */
24376 equate_type_number_to_die (type, type_die);
24377
24378 add_alignment_attribute (type_die, TREE_TYPE (decl));
24379
24380 add_accessibility_attribute (type_die, decl);
24381
24382 if (DECL_ABSTRACT_P (decl))
24383 equate_decl_number_to_die (decl, type_die);
24384
24385 if (get_AT (type_die, DW_AT_name))
24386 add_pubtype (decl, type_die);
24387 }
24388
24389 /* Generate a DIE for a struct, class, enum or union type. */
24390
24391 static void
24392 gen_tagged_type_die (tree type,
24393 dw_die_ref context_die,
24394 enum debug_info_usage usage)
24395 {
24396 int need_pop;
24397
24398 if (type == NULL_TREE
24399 || !is_tagged_type (type))
24400 return;
24401
24402 if (TREE_ASM_WRITTEN (type))
24403 need_pop = 0;
24404 /* If this is a nested type whose containing class hasn't been written
24405 out yet, writing it out will cover this one, too. This does not apply
24406 to instantiations of member class templates; they need to be added to
24407 the containing class as they are generated. FIXME: This hurts the
24408 idea of combining type decls from multiple TUs, since we can't predict
24409 what set of template instantiations we'll get. */
24410 else if (TYPE_CONTEXT (type)
24411 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
24412 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
24413 {
24414 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
24415
24416 if (TREE_ASM_WRITTEN (type))
24417 return;
24418
24419 /* If that failed, attach ourselves to the stub. */
24420 push_decl_scope (TYPE_CONTEXT (type));
24421 context_die = lookup_type_die (TYPE_CONTEXT (type));
24422 need_pop = 1;
24423 }
24424 else if (TYPE_CONTEXT (type) != NULL_TREE
24425 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
24426 {
24427 /* If this type is local to a function that hasn't been written
24428 out yet, use a NULL context for now; it will be fixed up in
24429 decls_for_scope. */
24430 context_die = lookup_decl_die (TYPE_CONTEXT (type));
24431 /* A declaration DIE doesn't count; nested types need to go in the
24432 specification. */
24433 if (context_die && is_declaration_die (context_die))
24434 context_die = NULL;
24435 need_pop = 0;
24436 }
24437 else
24438 {
24439 context_die = declare_in_namespace (type, context_die);
24440 need_pop = 0;
24441 }
24442
24443 if (TREE_CODE (type) == ENUMERAL_TYPE)
24444 {
24445 /* This might have been written out by the call to
24446 declare_in_namespace. */
24447 if (!TREE_ASM_WRITTEN (type))
24448 gen_enumeration_type_die (type, context_die);
24449 }
24450 else
24451 gen_struct_or_union_type_die (type, context_die, usage);
24452
24453 if (need_pop)
24454 pop_decl_scope ();
24455
24456 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
24457 it up if it is ever completed. gen_*_type_die will set it for us
24458 when appropriate. */
24459 }
24460
24461 /* Generate a type description DIE. */
24462
24463 static void
24464 gen_type_die_with_usage (tree type, dw_die_ref context_die,
24465 enum debug_info_usage usage)
24466 {
24467 struct array_descr_info info;
24468
24469 if (type == NULL_TREE || type == error_mark_node)
24470 return;
24471
24472 if (flag_checking && type)
24473 verify_type (type);
24474
24475 if (TYPE_NAME (type) != NULL_TREE
24476 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
24477 && is_redundant_typedef (TYPE_NAME (type))
24478 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
24479 /* The DECL of this type is a typedef we don't want to emit debug
24480 info for but we want debug info for its underlying typedef.
24481 This can happen for e.g, the injected-class-name of a C++
24482 type. */
24483 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
24484
24485 /* If TYPE is a typedef type variant, let's generate debug info
24486 for the parent typedef which TYPE is a type of. */
24487 if (typedef_variant_p (type))
24488 {
24489 if (TREE_ASM_WRITTEN (type))
24490 return;
24491
24492 tree name = TYPE_NAME (type);
24493 tree origin = decl_ultimate_origin (name);
24494 if (origin != NULL && origin != name)
24495 {
24496 gen_decl_die (origin, NULL, NULL, context_die);
24497 return;
24498 }
24499
24500 /* Prevent broken recursion; we can't hand off to the same type. */
24501 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
24502
24503 /* Give typedefs the right scope. */
24504 context_die = scope_die_for (type, context_die);
24505
24506 TREE_ASM_WRITTEN (type) = 1;
24507
24508 gen_decl_die (name, NULL, NULL, context_die);
24509 return;
24510 }
24511
24512 /* If type is an anonymous tagged type named by a typedef, let's
24513 generate debug info for the typedef. */
24514 if (is_naming_typedef_decl (TYPE_NAME (type)))
24515 {
24516 /* Use the DIE of the containing namespace as the parent DIE of
24517 the type description DIE we want to generate. */
24518 if (DECL_CONTEXT (TYPE_NAME (type))
24519 && TREE_CODE (DECL_CONTEXT (TYPE_NAME (type))) == NAMESPACE_DECL)
24520 context_die = get_context_die (DECL_CONTEXT (TYPE_NAME (type)));
24521
24522 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
24523 return;
24524 }
24525
24526 if (lang_hooks.types.get_debug_type)
24527 {
24528 tree debug_type = lang_hooks.types.get_debug_type (type);
24529
24530 if (debug_type != NULL_TREE && debug_type != type)
24531 {
24532 gen_type_die_with_usage (debug_type, context_die, usage);
24533 return;
24534 }
24535 }
24536
24537 /* We are going to output a DIE to represent the unqualified version
24538 of this type (i.e. without any const or volatile qualifiers) so
24539 get the main variant (i.e. the unqualified version) of this type
24540 now. (Vectors and arrays are special because the debugging info is in the
24541 cloned type itself. Similarly function/method types can contain extra
24542 ref-qualification). */
24543 if (TREE_CODE (type) == FUNCTION_TYPE
24544 || TREE_CODE (type) == METHOD_TYPE)
24545 {
24546 /* For function/method types, can't use type_main_variant here,
24547 because that can have different ref-qualifiers for C++,
24548 but try to canonicalize. */
24549 tree main = TYPE_MAIN_VARIANT (type);
24550 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
24551 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
24552 && check_base_type (t, main)
24553 && check_lang_type (t, type))
24554 {
24555 type = t;
24556 break;
24557 }
24558 }
24559 else if (TREE_CODE (type) != VECTOR_TYPE
24560 && TREE_CODE (type) != ARRAY_TYPE)
24561 type = type_main_variant (type);
24562
24563 /* If this is an array type with hidden descriptor, handle it first. */
24564 if (!TREE_ASM_WRITTEN (type)
24565 && lang_hooks.types.get_array_descr_info)
24566 {
24567 memset (&info, 0, sizeof (info));
24568 if (lang_hooks.types.get_array_descr_info (type, &info))
24569 {
24570 /* Fortran sometimes emits array types with no dimension. */
24571 gcc_assert (info.ndimensions >= 0
24572 && (info.ndimensions
24573 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
24574 gen_descr_array_type_die (type, &info, context_die);
24575 TREE_ASM_WRITTEN (type) = 1;
24576 return;
24577 }
24578 }
24579
24580 if (TREE_ASM_WRITTEN (type))
24581 {
24582 /* Variable-length types may be incomplete even if
24583 TREE_ASM_WRITTEN. For such types, fall through to
24584 gen_array_type_die() and possibly fill in
24585 DW_AT_{upper,lower}_bound attributes. */
24586 if ((TREE_CODE (type) != ARRAY_TYPE
24587 && TREE_CODE (type) != RECORD_TYPE
24588 && TREE_CODE (type) != UNION_TYPE
24589 && TREE_CODE (type) != QUAL_UNION_TYPE)
24590 || !variably_modified_type_p (type, NULL))
24591 return;
24592 }
24593
24594 switch (TREE_CODE (type))
24595 {
24596 case ERROR_MARK:
24597 break;
24598
24599 case POINTER_TYPE:
24600 case REFERENCE_TYPE:
24601 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
24602 ensures that the gen_type_die recursion will terminate even if the
24603 type is recursive. Recursive types are possible in Ada. */
24604 /* ??? We could perhaps do this for all types before the switch
24605 statement. */
24606 TREE_ASM_WRITTEN (type) = 1;
24607
24608 /* For these types, all that is required is that we output a DIE (or a
24609 set of DIEs) to represent the "basis" type. */
24610 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24611 DINFO_USAGE_IND_USE);
24612 break;
24613
24614 case OFFSET_TYPE:
24615 /* This code is used for C++ pointer-to-data-member types.
24616 Output a description of the relevant class type. */
24617 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
24618 DINFO_USAGE_IND_USE);
24619
24620 /* Output a description of the type of the object pointed to. */
24621 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24622 DINFO_USAGE_IND_USE);
24623
24624 /* Now output a DIE to represent this pointer-to-data-member type
24625 itself. */
24626 gen_ptr_to_mbr_type_die (type, context_die);
24627 break;
24628
24629 case FUNCTION_TYPE:
24630 /* Force out return type (in case it wasn't forced out already). */
24631 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24632 DINFO_USAGE_DIR_USE);
24633 gen_subroutine_type_die (type, context_die);
24634 break;
24635
24636 case METHOD_TYPE:
24637 /* Force out return type (in case it wasn't forced out already). */
24638 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24639 DINFO_USAGE_DIR_USE);
24640 gen_subroutine_type_die (type, context_die);
24641 break;
24642
24643 case ARRAY_TYPE:
24644 case VECTOR_TYPE:
24645 gen_array_type_die (type, context_die);
24646 break;
24647
24648 case ENUMERAL_TYPE:
24649 case RECORD_TYPE:
24650 case UNION_TYPE:
24651 case QUAL_UNION_TYPE:
24652 gen_tagged_type_die (type, context_die, usage);
24653 return;
24654
24655 case VOID_TYPE:
24656 case INTEGER_TYPE:
24657 case REAL_TYPE:
24658 case FIXED_POINT_TYPE:
24659 case COMPLEX_TYPE:
24660 case BOOLEAN_TYPE:
24661 case POINTER_BOUNDS_TYPE:
24662 /* No DIEs needed for fundamental types. */
24663 break;
24664
24665 case NULLPTR_TYPE:
24666 case LANG_TYPE:
24667 /* Just use DW_TAG_unspecified_type. */
24668 {
24669 dw_die_ref type_die = lookup_type_die (type);
24670 if (type_die == NULL)
24671 {
24672 tree name = TYPE_IDENTIFIER (type);
24673 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
24674 type);
24675 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
24676 equate_type_number_to_die (type, type_die);
24677 }
24678 }
24679 break;
24680
24681 default:
24682 if (is_cxx_auto (type))
24683 {
24684 tree name = TYPE_IDENTIFIER (type);
24685 dw_die_ref *die = (name == get_identifier ("auto")
24686 ? &auto_die : &decltype_auto_die);
24687 if (!*die)
24688 {
24689 *die = new_die (DW_TAG_unspecified_type,
24690 comp_unit_die (), NULL_TREE);
24691 add_name_attribute (*die, IDENTIFIER_POINTER (name));
24692 }
24693 equate_type_number_to_die (type, *die);
24694 break;
24695 }
24696 gcc_unreachable ();
24697 }
24698
24699 TREE_ASM_WRITTEN (type) = 1;
24700 }
24701
24702 static void
24703 gen_type_die (tree type, dw_die_ref context_die)
24704 {
24705 if (type != error_mark_node)
24706 {
24707 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
24708 if (flag_checking)
24709 {
24710 dw_die_ref die = lookup_type_die (type);
24711 if (die)
24712 check_die (die);
24713 }
24714 }
24715 }
24716
24717 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
24718 things which are local to the given block. */
24719
24720 static void
24721 gen_block_die (tree stmt, dw_die_ref context_die)
24722 {
24723 int must_output_die = 0;
24724 bool inlined_func;
24725
24726 /* Ignore blocks that are NULL. */
24727 if (stmt == NULL_TREE)
24728 return;
24729
24730 inlined_func = inlined_function_outer_scope_p (stmt);
24731
24732 /* If the block is one fragment of a non-contiguous block, do not
24733 process the variables, since they will have been done by the
24734 origin block. Do process subblocks. */
24735 if (BLOCK_FRAGMENT_ORIGIN (stmt))
24736 {
24737 tree sub;
24738
24739 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
24740 gen_block_die (sub, context_die);
24741
24742 return;
24743 }
24744
24745 /* Determine if we need to output any Dwarf DIEs at all to represent this
24746 block. */
24747 if (inlined_func)
24748 /* The outer scopes for inlinings *must* always be represented. We
24749 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
24750 must_output_die = 1;
24751 else
24752 {
24753 /* Determine if this block directly contains any "significant"
24754 local declarations which we will need to output DIEs for. */
24755 if (debug_info_level > DINFO_LEVEL_TERSE)
24756 /* We are not in terse mode so *any* local declaration counts
24757 as being a "significant" one. */
24758 must_output_die = ((BLOCK_VARS (stmt) != NULL
24759 || BLOCK_NUM_NONLOCALIZED_VARS (stmt))
24760 && (TREE_USED (stmt)
24761 || TREE_ASM_WRITTEN (stmt)
24762 || BLOCK_ABSTRACT (stmt)));
24763 else if ((TREE_USED (stmt)
24764 || TREE_ASM_WRITTEN (stmt)
24765 || BLOCK_ABSTRACT (stmt))
24766 && !dwarf2out_ignore_block (stmt))
24767 must_output_die = 1;
24768 }
24769
24770 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
24771 DIE for any block which contains no significant local declarations at
24772 all. Rather, in such cases we just call `decls_for_scope' so that any
24773 needed Dwarf info for any sub-blocks will get properly generated. Note
24774 that in terse mode, our definition of what constitutes a "significant"
24775 local declaration gets restricted to include only inlined function
24776 instances and local (nested) function definitions. */
24777 if (must_output_die)
24778 {
24779 if (inlined_func)
24780 {
24781 /* If STMT block is abstract, that means we have been called
24782 indirectly from dwarf2out_abstract_function.
24783 That function rightfully marks the descendent blocks (of
24784 the abstract function it is dealing with) as being abstract,
24785 precisely to prevent us from emitting any
24786 DW_TAG_inlined_subroutine DIE as a descendent
24787 of an abstract function instance. So in that case, we should
24788 not call gen_inlined_subroutine_die.
24789
24790 Later though, when cgraph asks dwarf2out to emit info
24791 for the concrete instance of the function decl into which
24792 the concrete instance of STMT got inlined, the later will lead
24793 to the generation of a DW_TAG_inlined_subroutine DIE. */
24794 if (! BLOCK_ABSTRACT (stmt))
24795 gen_inlined_subroutine_die (stmt, context_die);
24796 }
24797 else
24798 gen_lexical_block_die (stmt, context_die);
24799 }
24800 else
24801 decls_for_scope (stmt, context_die);
24802 }
24803
24804 /* Process variable DECL (or variable with origin ORIGIN) within
24805 block STMT and add it to CONTEXT_DIE. */
24806 static void
24807 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
24808 {
24809 dw_die_ref die;
24810 tree decl_or_origin = decl ? decl : origin;
24811
24812 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
24813 die = lookup_decl_die (decl_or_origin);
24814 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
24815 {
24816 if (TYPE_DECL_IS_STUB (decl_or_origin))
24817 die = lookup_type_die (TREE_TYPE (decl_or_origin));
24818 else
24819 die = lookup_decl_die (decl_or_origin);
24820 /* Avoid re-creating the DIE late if it was optimized as unused early. */
24821 if (! die && ! early_dwarf)
24822 return;
24823 }
24824 else
24825 die = NULL;
24826
24827 /* Avoid creating DIEs for local typedefs and concrete static variables that
24828 will only be pruned later. */
24829 if ((origin || decl_ultimate_origin (decl))
24830 && (TREE_CODE (decl_or_origin) == TYPE_DECL
24831 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
24832 {
24833 origin = decl_ultimate_origin (decl_or_origin);
24834 if (decl && VAR_P (decl) && die != NULL)
24835 {
24836 die = lookup_decl_die (origin);
24837 if (die != NULL)
24838 equate_decl_number_to_die (decl, die);
24839 }
24840 return;
24841 }
24842
24843 if (die != NULL && die->die_parent == NULL)
24844 add_child_die (context_die, die);
24845 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
24846 {
24847 if (early_dwarf)
24848 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
24849 stmt, context_die);
24850 }
24851 else
24852 {
24853 if (decl && DECL_P (decl))
24854 {
24855 die = lookup_decl_die (decl);
24856
24857 /* Early created DIEs do not have a parent as the decls refer
24858 to the function as DECL_CONTEXT rather than the BLOCK. */
24859 if (die && die->die_parent == NULL)
24860 {
24861 gcc_assert (in_lto_p);
24862 add_child_die (context_die, die);
24863 }
24864 }
24865
24866 gen_decl_die (decl, origin, NULL, context_die);
24867 }
24868 }
24869
24870 /* Generate all of the decls declared within a given scope and (recursively)
24871 all of its sub-blocks. */
24872
24873 static void
24874 decls_for_scope (tree stmt, dw_die_ref context_die)
24875 {
24876 tree decl;
24877 unsigned int i;
24878 tree subblocks;
24879
24880 /* Ignore NULL blocks. */
24881 if (stmt == NULL_TREE)
24882 return;
24883
24884 /* Output the DIEs to represent all of the data objects and typedefs
24885 declared directly within this block but not within any nested
24886 sub-blocks. Also, nested function and tag DIEs have been
24887 generated with a parent of NULL; fix that up now. We don't
24888 have to do this if we're at -g1. */
24889 if (debug_info_level > DINFO_LEVEL_TERSE)
24890 {
24891 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
24892 process_scope_var (stmt, decl, NULL_TREE, context_die);
24893 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
24894 origin - avoid doing this twice as we have no good way to see
24895 if we've done it once already. */
24896 if (! early_dwarf)
24897 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
24898 {
24899 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
24900 if (decl == current_function_decl)
24901 /* Ignore declarations of the current function, while they
24902 are declarations, gen_subprogram_die would treat them
24903 as definitions again, because they are equal to
24904 current_function_decl and endlessly recurse. */;
24905 else if (TREE_CODE (decl) == FUNCTION_DECL)
24906 process_scope_var (stmt, decl, NULL_TREE, context_die);
24907 else
24908 process_scope_var (stmt, NULL_TREE, decl, context_die);
24909 }
24910 }
24911
24912 /* Even if we're at -g1, we need to process the subblocks in order to get
24913 inlined call information. */
24914
24915 /* Output the DIEs to represent all sub-blocks (and the items declared
24916 therein) of this block. */
24917 for (subblocks = BLOCK_SUBBLOCKS (stmt);
24918 subblocks != NULL;
24919 subblocks = BLOCK_CHAIN (subblocks))
24920 gen_block_die (subblocks, context_die);
24921 }
24922
24923 /* Is this a typedef we can avoid emitting? */
24924
24925 bool
24926 is_redundant_typedef (const_tree decl)
24927 {
24928 if (TYPE_DECL_IS_STUB (decl))
24929 return true;
24930
24931 if (DECL_ARTIFICIAL (decl)
24932 && DECL_CONTEXT (decl)
24933 && is_tagged_type (DECL_CONTEXT (decl))
24934 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
24935 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
24936 /* Also ignore the artificial member typedef for the class name. */
24937 return true;
24938
24939 return false;
24940 }
24941
24942 /* Return TRUE if TYPE is a typedef that names a type for linkage
24943 purposes. This kind of typedefs is produced by the C++ FE for
24944 constructs like:
24945
24946 typedef struct {...} foo;
24947
24948 In that case, there is no typedef variant type produced for foo.
24949 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
24950 struct type. */
24951
24952 static bool
24953 is_naming_typedef_decl (const_tree decl)
24954 {
24955 if (decl == NULL_TREE
24956 || TREE_CODE (decl) != TYPE_DECL
24957 || DECL_NAMELESS (decl)
24958 || !is_tagged_type (TREE_TYPE (decl))
24959 || DECL_IS_BUILTIN (decl)
24960 || is_redundant_typedef (decl)
24961 /* It looks like Ada produces TYPE_DECLs that are very similar
24962 to C++ naming typedefs but that have different
24963 semantics. Let's be specific to c++ for now. */
24964 || !is_cxx (decl))
24965 return FALSE;
24966
24967 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
24968 && TYPE_NAME (TREE_TYPE (decl)) == decl
24969 && (TYPE_STUB_DECL (TREE_TYPE (decl))
24970 != TYPE_NAME (TREE_TYPE (decl))));
24971 }
24972
24973 /* Looks up the DIE for a context. */
24974
24975 static inline dw_die_ref
24976 lookup_context_die (tree context)
24977 {
24978 if (context)
24979 {
24980 /* Find die that represents this context. */
24981 if (TYPE_P (context))
24982 {
24983 context = TYPE_MAIN_VARIANT (context);
24984 dw_die_ref ctx = lookup_type_die (context);
24985 if (!ctx)
24986 return NULL;
24987 return strip_naming_typedef (context, ctx);
24988 }
24989 else
24990 return lookup_decl_die (context);
24991 }
24992 return comp_unit_die ();
24993 }
24994
24995 /* Returns the DIE for a context. */
24996
24997 static inline dw_die_ref
24998 get_context_die (tree context)
24999 {
25000 if (context)
25001 {
25002 /* Find die that represents this context. */
25003 if (TYPE_P (context))
25004 {
25005 context = TYPE_MAIN_VARIANT (context);
25006 return strip_naming_typedef (context, force_type_die (context));
25007 }
25008 else
25009 return force_decl_die (context);
25010 }
25011 return comp_unit_die ();
25012 }
25013
25014 /* Returns the DIE for decl. A DIE will always be returned. */
25015
25016 static dw_die_ref
25017 force_decl_die (tree decl)
25018 {
25019 dw_die_ref decl_die;
25020 unsigned saved_external_flag;
25021 tree save_fn = NULL_TREE;
25022 decl_die = lookup_decl_die (decl);
25023 if (!decl_die)
25024 {
25025 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25026
25027 decl_die = lookup_decl_die (decl);
25028 if (decl_die)
25029 return decl_die;
25030
25031 switch (TREE_CODE (decl))
25032 {
25033 case FUNCTION_DECL:
25034 /* Clear current_function_decl, so that gen_subprogram_die thinks
25035 that this is a declaration. At this point, we just want to force
25036 declaration die. */
25037 save_fn = current_function_decl;
25038 current_function_decl = NULL_TREE;
25039 gen_subprogram_die (decl, context_die);
25040 current_function_decl = save_fn;
25041 break;
25042
25043 case VAR_DECL:
25044 /* Set external flag to force declaration die. Restore it after
25045 gen_decl_die() call. */
25046 saved_external_flag = DECL_EXTERNAL (decl);
25047 DECL_EXTERNAL (decl) = 1;
25048 gen_decl_die (decl, NULL, NULL, context_die);
25049 DECL_EXTERNAL (decl) = saved_external_flag;
25050 break;
25051
25052 case NAMESPACE_DECL:
25053 if (dwarf_version >= 3 || !dwarf_strict)
25054 dwarf2out_decl (decl);
25055 else
25056 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25057 decl_die = comp_unit_die ();
25058 break;
25059
25060 case TRANSLATION_UNIT_DECL:
25061 decl_die = comp_unit_die ();
25062 break;
25063
25064 default:
25065 gcc_unreachable ();
25066 }
25067
25068 /* We should be able to find the DIE now. */
25069 if (!decl_die)
25070 decl_die = lookup_decl_die (decl);
25071 gcc_assert (decl_die);
25072 }
25073
25074 return decl_die;
25075 }
25076
25077 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25078 always returned. */
25079
25080 static dw_die_ref
25081 force_type_die (tree type)
25082 {
25083 dw_die_ref type_die;
25084
25085 type_die = lookup_type_die (type);
25086 if (!type_die)
25087 {
25088 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25089
25090 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25091 false, context_die);
25092 gcc_assert (type_die);
25093 }
25094 return type_die;
25095 }
25096
25097 /* Force out any required namespaces to be able to output DECL,
25098 and return the new context_die for it, if it's changed. */
25099
25100 static dw_die_ref
25101 setup_namespace_context (tree thing, dw_die_ref context_die)
25102 {
25103 tree context = (DECL_P (thing)
25104 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
25105 if (context && TREE_CODE (context) == NAMESPACE_DECL)
25106 /* Force out the namespace. */
25107 context_die = force_decl_die (context);
25108
25109 return context_die;
25110 }
25111
25112 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
25113 type) within its namespace, if appropriate.
25114
25115 For compatibility with older debuggers, namespace DIEs only contain
25116 declarations; all definitions are emitted at CU scope, with
25117 DW_AT_specification pointing to the declaration (like with class
25118 members). */
25119
25120 static dw_die_ref
25121 declare_in_namespace (tree thing, dw_die_ref context_die)
25122 {
25123 dw_die_ref ns_context;
25124
25125 if (debug_info_level <= DINFO_LEVEL_TERSE)
25126 return context_die;
25127
25128 /* External declarations in the local scope only need to be emitted
25129 once, not once in the namespace and once in the scope.
25130
25131 This avoids declaring the `extern' below in the
25132 namespace DIE as well as in the innermost scope:
25133
25134 namespace S
25135 {
25136 int i=5;
25137 int foo()
25138 {
25139 int i=8;
25140 extern int i;
25141 return i;
25142 }
25143 }
25144 */
25145 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
25146 return context_die;
25147
25148 /* If this decl is from an inlined function, then don't try to emit it in its
25149 namespace, as we will get confused. It would have already been emitted
25150 when the abstract instance of the inline function was emitted anyways. */
25151 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
25152 return context_die;
25153
25154 ns_context = setup_namespace_context (thing, context_die);
25155
25156 if (ns_context != context_die)
25157 {
25158 if (is_fortran ())
25159 return ns_context;
25160 if (DECL_P (thing))
25161 gen_decl_die (thing, NULL, NULL, ns_context);
25162 else
25163 gen_type_die (thing, ns_context);
25164 }
25165 return context_die;
25166 }
25167
25168 /* Generate a DIE for a namespace or namespace alias. */
25169
25170 static void
25171 gen_namespace_die (tree decl, dw_die_ref context_die)
25172 {
25173 dw_die_ref namespace_die;
25174
25175 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
25176 they are an alias of. */
25177 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
25178 {
25179 /* Output a real namespace or module. */
25180 context_die = setup_namespace_context (decl, comp_unit_die ());
25181 namespace_die = new_die (is_fortran ()
25182 ? DW_TAG_module : DW_TAG_namespace,
25183 context_die, decl);
25184 /* For Fortran modules defined in different CU don't add src coords. */
25185 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
25186 {
25187 const char *name = dwarf2_name (decl, 0);
25188 if (name)
25189 add_name_attribute (namespace_die, name);
25190 }
25191 else
25192 add_name_and_src_coords_attributes (namespace_die, decl);
25193 if (DECL_EXTERNAL (decl))
25194 add_AT_flag (namespace_die, DW_AT_declaration, 1);
25195 equate_decl_number_to_die (decl, namespace_die);
25196 }
25197 else
25198 {
25199 /* Output a namespace alias. */
25200
25201 /* Force out the namespace we are an alias of, if necessary. */
25202 dw_die_ref origin_die
25203 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
25204
25205 if (DECL_FILE_SCOPE_P (decl)
25206 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
25207 context_die = setup_namespace_context (decl, comp_unit_die ());
25208 /* Now create the namespace alias DIE. */
25209 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
25210 add_name_and_src_coords_attributes (namespace_die, decl);
25211 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
25212 equate_decl_number_to_die (decl, namespace_die);
25213 }
25214 if ((dwarf_version >= 5 || !dwarf_strict)
25215 && lang_hooks.decls.decl_dwarf_attribute (decl,
25216 DW_AT_export_symbols) == 1)
25217 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
25218
25219 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
25220 if (want_pubnames ())
25221 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
25222 }
25223
25224 /* Generate Dwarf debug information for a decl described by DECL.
25225 The return value is currently only meaningful for PARM_DECLs,
25226 for all other decls it returns NULL.
25227
25228 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
25229 It can be NULL otherwise. */
25230
25231 static dw_die_ref
25232 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
25233 dw_die_ref context_die)
25234 {
25235 tree decl_or_origin = decl ? decl : origin;
25236 tree class_origin = NULL, ultimate_origin;
25237
25238 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
25239 return NULL;
25240
25241 /* Ignore pointer bounds decls. */
25242 if (DECL_P (decl_or_origin)
25243 && TREE_TYPE (decl_or_origin)
25244 && POINTER_BOUNDS_P (decl_or_origin))
25245 return NULL;
25246
25247 switch (TREE_CODE (decl_or_origin))
25248 {
25249 case ERROR_MARK:
25250 break;
25251
25252 case CONST_DECL:
25253 if (!is_fortran () && !is_ada ())
25254 {
25255 /* The individual enumerators of an enum type get output when we output
25256 the Dwarf representation of the relevant enum type itself. */
25257 break;
25258 }
25259
25260 /* Emit its type. */
25261 gen_type_die (TREE_TYPE (decl), context_die);
25262
25263 /* And its containing namespace. */
25264 context_die = declare_in_namespace (decl, context_die);
25265
25266 gen_const_die (decl, context_die);
25267 break;
25268
25269 case FUNCTION_DECL:
25270 #if 0
25271 /* FIXME */
25272 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
25273 on local redeclarations of global functions. That seems broken. */
25274 if (current_function_decl != decl)
25275 /* This is only a declaration. */;
25276 #endif
25277
25278 /* We should have abstract copies already and should not generate
25279 stray type DIEs in late LTO dumping. */
25280 if (! early_dwarf)
25281 ;
25282
25283 /* If we're emitting a clone, emit info for the abstract instance. */
25284 else if (origin || DECL_ORIGIN (decl) != decl)
25285 dwarf2out_abstract_function (origin
25286 ? DECL_ORIGIN (origin)
25287 : DECL_ABSTRACT_ORIGIN (decl));
25288
25289 /* If we're emitting a possibly inlined function emit it as
25290 abstract instance. */
25291 else if (cgraph_function_possibly_inlined_p (decl)
25292 && ! DECL_ABSTRACT_P (decl)
25293 && ! class_or_namespace_scope_p (context_die)
25294 /* dwarf2out_abstract_function won't emit a die if this is just
25295 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
25296 that case, because that works only if we have a die. */
25297 && DECL_INITIAL (decl) != NULL_TREE)
25298 dwarf2out_abstract_function (decl);
25299
25300 /* Otherwise we're emitting the primary DIE for this decl. */
25301 else if (debug_info_level > DINFO_LEVEL_TERSE)
25302 {
25303 /* Before we describe the FUNCTION_DECL itself, make sure that we
25304 have its containing type. */
25305 if (!origin)
25306 origin = decl_class_context (decl);
25307 if (origin != NULL_TREE)
25308 gen_type_die (origin, context_die);
25309
25310 /* And its return type. */
25311 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
25312
25313 /* And its virtual context. */
25314 if (DECL_VINDEX (decl) != NULL_TREE)
25315 gen_type_die (DECL_CONTEXT (decl), context_die);
25316
25317 /* Make sure we have a member DIE for decl. */
25318 if (origin != NULL_TREE)
25319 gen_type_die_for_member (origin, decl, context_die);
25320
25321 /* And its containing namespace. */
25322 context_die = declare_in_namespace (decl, context_die);
25323 }
25324
25325 /* Now output a DIE to represent the function itself. */
25326 if (decl)
25327 gen_subprogram_die (decl, context_die);
25328 break;
25329
25330 case TYPE_DECL:
25331 /* If we are in terse mode, don't generate any DIEs to represent any
25332 actual typedefs. */
25333 if (debug_info_level <= DINFO_LEVEL_TERSE)
25334 break;
25335
25336 /* In the special case of a TYPE_DECL node representing the declaration
25337 of some type tag, if the given TYPE_DECL is marked as having been
25338 instantiated from some other (original) TYPE_DECL node (e.g. one which
25339 was generated within the original definition of an inline function) we
25340 used to generate a special (abbreviated) DW_TAG_structure_type,
25341 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
25342 should be actually referencing those DIEs, as variable DIEs with that
25343 type would be emitted already in the abstract origin, so it was always
25344 removed during unused type prunning. Don't add anything in this
25345 case. */
25346 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
25347 break;
25348
25349 if (is_redundant_typedef (decl))
25350 gen_type_die (TREE_TYPE (decl), context_die);
25351 else
25352 /* Output a DIE to represent the typedef itself. */
25353 gen_typedef_die (decl, context_die);
25354 break;
25355
25356 case LABEL_DECL:
25357 if (debug_info_level >= DINFO_LEVEL_NORMAL)
25358 gen_label_die (decl, context_die);
25359 break;
25360
25361 case VAR_DECL:
25362 case RESULT_DECL:
25363 /* If we are in terse mode, don't generate any DIEs to represent any
25364 variable declarations or definitions. */
25365 if (debug_info_level <= DINFO_LEVEL_TERSE)
25366 break;
25367
25368 /* Avoid generating stray type DIEs during late dwarf dumping.
25369 All types have been dumped early. */
25370 if (early_dwarf
25371 /* ??? But in LTRANS we cannot annotate early created variably
25372 modified type DIEs without copying them and adjusting all
25373 references to them. Dump them again as happens for inlining
25374 which copies both the decl and the types. */
25375 /* ??? And even non-LTO needs to re-visit type DIEs to fill
25376 in VLA bound information for example. */
25377 || (decl && variably_modified_type_p (TREE_TYPE (decl),
25378 current_function_decl)))
25379 {
25380 /* Output any DIEs that are needed to specify the type of this data
25381 object. */
25382 if (decl_by_reference_p (decl_or_origin))
25383 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
25384 else
25385 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
25386 }
25387
25388 if (early_dwarf)
25389 {
25390 /* And its containing type. */
25391 class_origin = decl_class_context (decl_or_origin);
25392 if (class_origin != NULL_TREE)
25393 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
25394
25395 /* And its containing namespace. */
25396 context_die = declare_in_namespace (decl_or_origin, context_die);
25397 }
25398
25399 /* Now output the DIE to represent the data object itself. This gets
25400 complicated because of the possibility that the VAR_DECL really
25401 represents an inlined instance of a formal parameter for an inline
25402 function. */
25403 ultimate_origin = decl_ultimate_origin (decl_or_origin);
25404 if (ultimate_origin != NULL_TREE
25405 && TREE_CODE (ultimate_origin) == PARM_DECL)
25406 gen_formal_parameter_die (decl, origin,
25407 true /* Emit name attribute. */,
25408 context_die);
25409 else
25410 gen_variable_die (decl, origin, context_die);
25411 break;
25412
25413 case FIELD_DECL:
25414 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
25415 /* Ignore the nameless fields that are used to skip bits but handle C++
25416 anonymous unions and structs. */
25417 if (DECL_NAME (decl) != NULL_TREE
25418 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
25419 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
25420 {
25421 gen_type_die (member_declared_type (decl), context_die);
25422 gen_field_die (decl, ctx, context_die);
25423 }
25424 break;
25425
25426 case PARM_DECL:
25427 /* Avoid generating stray type DIEs during late dwarf dumping.
25428 All types have been dumped early. */
25429 if (early_dwarf
25430 /* ??? But in LTRANS we cannot annotate early created variably
25431 modified type DIEs without copying them and adjusting all
25432 references to them. Dump them again as happens for inlining
25433 which copies both the decl and the types. */
25434 /* ??? And even non-LTO needs to re-visit type DIEs to fill
25435 in VLA bound information for example. */
25436 || (decl && variably_modified_type_p (TREE_TYPE (decl),
25437 current_function_decl)))
25438 {
25439 if (DECL_BY_REFERENCE (decl_or_origin))
25440 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
25441 else
25442 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
25443 }
25444 return gen_formal_parameter_die (decl, origin,
25445 true /* Emit name attribute. */,
25446 context_die);
25447
25448 case NAMESPACE_DECL:
25449 if (dwarf_version >= 3 || !dwarf_strict)
25450 gen_namespace_die (decl, context_die);
25451 break;
25452
25453 case IMPORTED_DECL:
25454 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
25455 DECL_CONTEXT (decl), context_die);
25456 break;
25457
25458 case NAMELIST_DECL:
25459 gen_namelist_decl (DECL_NAME (decl), context_die,
25460 NAMELIST_DECL_ASSOCIATED_DECL (decl));
25461 break;
25462
25463 default:
25464 /* Probably some frontend-internal decl. Assume we don't care. */
25465 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
25466 break;
25467 }
25468
25469 return NULL;
25470 }
25471 \f
25472 /* Output initial debug information for global DECL. Called at the
25473 end of the parsing process.
25474
25475 This is the initial debug generation process. As such, the DIEs
25476 generated may be incomplete. A later debug generation pass
25477 (dwarf2out_late_global_decl) will augment the information generated
25478 in this pass (e.g., with complete location info). */
25479
25480 static void
25481 dwarf2out_early_global_decl (tree decl)
25482 {
25483 set_early_dwarf s;
25484
25485 /* gen_decl_die() will set DECL_ABSTRACT because
25486 cgraph_function_possibly_inlined_p() returns true. This is in
25487 turn will cause DW_AT_inline attributes to be set.
25488
25489 This happens because at early dwarf generation, there is no
25490 cgraph information, causing cgraph_function_possibly_inlined_p()
25491 to return true. Trick cgraph_function_possibly_inlined_p()
25492 while we generate dwarf early. */
25493 bool save = symtab->global_info_ready;
25494 symtab->global_info_ready = true;
25495
25496 /* We don't handle TYPE_DECLs. If required, they'll be reached via
25497 other DECLs and they can point to template types or other things
25498 that dwarf2out can't handle when done via dwarf2out_decl. */
25499 if (TREE_CODE (decl) != TYPE_DECL
25500 && TREE_CODE (decl) != PARM_DECL)
25501 {
25502 if (TREE_CODE (decl) == FUNCTION_DECL)
25503 {
25504 tree save_fndecl = current_function_decl;
25505
25506 /* For nested functions, make sure we have DIEs for the parents first
25507 so that all nested DIEs are generated at the proper scope in the
25508 first shot. */
25509 tree context = decl_function_context (decl);
25510 if (context != NULL)
25511 {
25512 dw_die_ref context_die = lookup_decl_die (context);
25513 current_function_decl = context;
25514
25515 /* Avoid emitting DIEs multiple times, but still process CONTEXT
25516 enough so that it lands in its own context. This avoids type
25517 pruning issues later on. */
25518 if (context_die == NULL || is_declaration_die (context_die))
25519 dwarf2out_decl (context);
25520 }
25521
25522 /* Emit an abstract origin of a function first. This happens
25523 with C++ constructor clones for example and makes
25524 dwarf2out_abstract_function happy which requires the early
25525 DIE of the abstract instance to be present. */
25526 tree origin = DECL_ABSTRACT_ORIGIN (decl);
25527 dw_die_ref origin_die;
25528 if (origin != NULL
25529 /* Do not emit the DIE multiple times but make sure to
25530 process it fully here in case we just saw a declaration. */
25531 && ((origin_die = lookup_decl_die (origin)) == NULL
25532 || is_declaration_die (origin_die)))
25533 {
25534 current_function_decl = origin;
25535 dwarf2out_decl (origin);
25536 }
25537
25538 /* Emit the DIE for decl but avoid doing that multiple times. */
25539 dw_die_ref old_die;
25540 if ((old_die = lookup_decl_die (decl)) == NULL
25541 || is_declaration_die (old_die))
25542 {
25543 current_function_decl = decl;
25544 dwarf2out_decl (decl);
25545 }
25546
25547 current_function_decl = save_fndecl;
25548 }
25549 else
25550 dwarf2out_decl (decl);
25551 }
25552 symtab->global_info_ready = save;
25553 }
25554
25555 /* Output debug information for global decl DECL. Called from
25556 toplev.c after compilation proper has finished. */
25557
25558 static void
25559 dwarf2out_late_global_decl (tree decl)
25560 {
25561 /* Fill-in any location information we were unable to determine
25562 on the first pass. */
25563 if (VAR_P (decl) && !POINTER_BOUNDS_P (decl))
25564 {
25565 dw_die_ref die = lookup_decl_die (decl);
25566
25567 /* We may have to generate early debug late for LTO in case debug
25568 was not enabled at compile-time or the target doesn't support
25569 the LTO early debug scheme. */
25570 if (! die && in_lto_p)
25571 {
25572 dwarf2out_decl (decl);
25573 die = lookup_decl_die (decl);
25574 }
25575
25576 if (die)
25577 {
25578 /* We get called via the symtab code invoking late_global_decl
25579 for symbols that are optimized out. Do not add locations
25580 for those, except if they have a DECL_VALUE_EXPR, in which case
25581 they are relevant for debuggers. */
25582 varpool_node *node = varpool_node::get (decl);
25583 if ((! node || ! node->definition) && ! DECL_HAS_VALUE_EXPR_P (decl))
25584 tree_add_const_value_attribute_for_decl (die, decl);
25585 else
25586 add_location_or_const_value_attribute (die, decl, false);
25587 }
25588 }
25589 }
25590
25591 /* Output debug information for type decl DECL. Called from toplev.c
25592 and from language front ends (to record built-in types). */
25593 static void
25594 dwarf2out_type_decl (tree decl, int local)
25595 {
25596 if (!local)
25597 {
25598 set_early_dwarf s;
25599 dwarf2out_decl (decl);
25600 }
25601 }
25602
25603 /* Output debug information for imported module or decl DECL.
25604 NAME is non-NULL name in the lexical block if the decl has been renamed.
25605 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
25606 that DECL belongs to.
25607 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
25608 static void
25609 dwarf2out_imported_module_or_decl_1 (tree decl,
25610 tree name,
25611 tree lexical_block,
25612 dw_die_ref lexical_block_die)
25613 {
25614 expanded_location xloc;
25615 dw_die_ref imported_die = NULL;
25616 dw_die_ref at_import_die;
25617
25618 if (TREE_CODE (decl) == IMPORTED_DECL)
25619 {
25620 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
25621 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
25622 gcc_assert (decl);
25623 }
25624 else
25625 xloc = expand_location (input_location);
25626
25627 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
25628 {
25629 at_import_die = force_type_die (TREE_TYPE (decl));
25630 /* For namespace N { typedef void T; } using N::T; base_type_die
25631 returns NULL, but DW_TAG_imported_declaration requires
25632 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
25633 if (!at_import_die)
25634 {
25635 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
25636 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
25637 at_import_die = lookup_type_die (TREE_TYPE (decl));
25638 gcc_assert (at_import_die);
25639 }
25640 }
25641 else
25642 {
25643 at_import_die = lookup_decl_die (decl);
25644 if (!at_import_die)
25645 {
25646 /* If we're trying to avoid duplicate debug info, we may not have
25647 emitted the member decl for this field. Emit it now. */
25648 if (TREE_CODE (decl) == FIELD_DECL)
25649 {
25650 tree type = DECL_CONTEXT (decl);
25651
25652 if (TYPE_CONTEXT (type)
25653 && TYPE_P (TYPE_CONTEXT (type))
25654 && !should_emit_struct_debug (TYPE_CONTEXT (type),
25655 DINFO_USAGE_DIR_USE))
25656 return;
25657 gen_type_die_for_member (type, decl,
25658 get_context_die (TYPE_CONTEXT (type)));
25659 }
25660 if (TREE_CODE (decl) == NAMELIST_DECL)
25661 at_import_die = gen_namelist_decl (DECL_NAME (decl),
25662 get_context_die (DECL_CONTEXT (decl)),
25663 NULL_TREE);
25664 else
25665 at_import_die = force_decl_die (decl);
25666 }
25667 }
25668
25669 if (TREE_CODE (decl) == NAMESPACE_DECL)
25670 {
25671 if (dwarf_version >= 3 || !dwarf_strict)
25672 imported_die = new_die (DW_TAG_imported_module,
25673 lexical_block_die,
25674 lexical_block);
25675 else
25676 return;
25677 }
25678 else
25679 imported_die = new_die (DW_TAG_imported_declaration,
25680 lexical_block_die,
25681 lexical_block);
25682
25683 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
25684 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
25685 if (debug_column_info && xloc.column)
25686 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
25687 if (name)
25688 add_AT_string (imported_die, DW_AT_name,
25689 IDENTIFIER_POINTER (name));
25690 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
25691 }
25692
25693 /* Output debug information for imported module or decl DECL.
25694 NAME is non-NULL name in context if the decl has been renamed.
25695 CHILD is true if decl is one of the renamed decls as part of
25696 importing whole module.
25697 IMPLICIT is set if this hook is called for an implicit import
25698 such as inline namespace. */
25699
25700 static void
25701 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
25702 bool child, bool implicit)
25703 {
25704 /* dw_die_ref at_import_die; */
25705 dw_die_ref scope_die;
25706
25707 if (debug_info_level <= DINFO_LEVEL_TERSE)
25708 return;
25709
25710 gcc_assert (decl);
25711
25712 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
25713 should be enough, for DWARF4 and older even if we emit as extension
25714 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
25715 for the benefit of consumers unaware of DW_AT_export_symbols. */
25716 if (implicit
25717 && dwarf_version >= 5
25718 && lang_hooks.decls.decl_dwarf_attribute (decl,
25719 DW_AT_export_symbols) == 1)
25720 return;
25721
25722 set_early_dwarf s;
25723
25724 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
25725 We need decl DIE for reference and scope die. First, get DIE for the decl
25726 itself. */
25727
25728 /* Get the scope die for decl context. Use comp_unit_die for global module
25729 or decl. If die is not found for non globals, force new die. */
25730 if (context
25731 && TYPE_P (context)
25732 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
25733 return;
25734
25735 scope_die = get_context_die (context);
25736
25737 if (child)
25738 {
25739 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
25740 there is nothing we can do, here. */
25741 if (dwarf_version < 3 && dwarf_strict)
25742 return;
25743
25744 gcc_assert (scope_die->die_child);
25745 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
25746 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
25747 scope_die = scope_die->die_child;
25748 }
25749
25750 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
25751 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
25752 }
25753
25754 /* Output debug information for namelists. */
25755
25756 static dw_die_ref
25757 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
25758 {
25759 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
25760 tree value;
25761 unsigned i;
25762
25763 if (debug_info_level <= DINFO_LEVEL_TERSE)
25764 return NULL;
25765
25766 gcc_assert (scope_die != NULL);
25767 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
25768 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
25769
25770 /* If there are no item_decls, we have a nondefining namelist, e.g.
25771 with USE association; hence, set DW_AT_declaration. */
25772 if (item_decls == NULL_TREE)
25773 {
25774 add_AT_flag (nml_die, DW_AT_declaration, 1);
25775 return nml_die;
25776 }
25777
25778 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
25779 {
25780 nml_item_ref_die = lookup_decl_die (value);
25781 if (!nml_item_ref_die)
25782 nml_item_ref_die = force_decl_die (value);
25783
25784 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
25785 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
25786 }
25787 return nml_die;
25788 }
25789
25790
25791 /* Write the debugging output for DECL and return the DIE. */
25792
25793 static void
25794 dwarf2out_decl (tree decl)
25795 {
25796 dw_die_ref context_die = comp_unit_die ();
25797
25798 switch (TREE_CODE (decl))
25799 {
25800 case ERROR_MARK:
25801 return;
25802
25803 case FUNCTION_DECL:
25804 /* If we're a nested function, initially use a parent of NULL; if we're
25805 a plain function, this will be fixed up in decls_for_scope. If
25806 we're a method, it will be ignored, since we already have a DIE. */
25807 if (decl_function_context (decl)
25808 /* But if we're in terse mode, we don't care about scope. */
25809 && debug_info_level > DINFO_LEVEL_TERSE)
25810 context_die = NULL;
25811 break;
25812
25813 case VAR_DECL:
25814 /* For local statics lookup proper context die. */
25815 if (local_function_static (decl))
25816 context_die = lookup_decl_die (DECL_CONTEXT (decl));
25817
25818 /* If we are in terse mode, don't generate any DIEs to represent any
25819 variable declarations or definitions. */
25820 if (debug_info_level <= DINFO_LEVEL_TERSE)
25821 return;
25822 break;
25823
25824 case CONST_DECL:
25825 if (debug_info_level <= DINFO_LEVEL_TERSE)
25826 return;
25827 if (!is_fortran () && !is_ada ())
25828 return;
25829 if (TREE_STATIC (decl) && decl_function_context (decl))
25830 context_die = lookup_decl_die (DECL_CONTEXT (decl));
25831 break;
25832
25833 case NAMESPACE_DECL:
25834 case IMPORTED_DECL:
25835 if (debug_info_level <= DINFO_LEVEL_TERSE)
25836 return;
25837 if (lookup_decl_die (decl) != NULL)
25838 return;
25839 break;
25840
25841 case TYPE_DECL:
25842 /* Don't emit stubs for types unless they are needed by other DIEs. */
25843 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
25844 return;
25845
25846 /* Don't bother trying to generate any DIEs to represent any of the
25847 normal built-in types for the language we are compiling. */
25848 if (DECL_IS_BUILTIN (decl))
25849 return;
25850
25851 /* If we are in terse mode, don't generate any DIEs for types. */
25852 if (debug_info_level <= DINFO_LEVEL_TERSE)
25853 return;
25854
25855 /* If we're a function-scope tag, initially use a parent of NULL;
25856 this will be fixed up in decls_for_scope. */
25857 if (decl_function_context (decl))
25858 context_die = NULL;
25859
25860 break;
25861
25862 case NAMELIST_DECL:
25863 break;
25864
25865 default:
25866 return;
25867 }
25868
25869 gen_decl_die (decl, NULL, NULL, context_die);
25870
25871 if (flag_checking)
25872 {
25873 dw_die_ref die = lookup_decl_die (decl);
25874 if (die)
25875 check_die (die);
25876 }
25877 }
25878
25879 /* Write the debugging output for DECL. */
25880
25881 static void
25882 dwarf2out_function_decl (tree decl)
25883 {
25884 dwarf2out_decl (decl);
25885 call_arg_locations = NULL;
25886 call_arg_loc_last = NULL;
25887 call_site_count = -1;
25888 tail_call_site_count = -1;
25889 decl_loc_table->empty ();
25890 cached_dw_loc_list_table->empty ();
25891 }
25892
25893 /* Output a marker (i.e. a label) for the beginning of the generated code for
25894 a lexical block. */
25895
25896 static void
25897 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
25898 unsigned int blocknum)
25899 {
25900 switch_to_section (current_function_section ());
25901 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
25902 }
25903
25904 /* Output a marker (i.e. a label) for the end of the generated code for a
25905 lexical block. */
25906
25907 static void
25908 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
25909 {
25910 switch_to_section (current_function_section ());
25911 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
25912 }
25913
25914 /* Returns nonzero if it is appropriate not to emit any debugging
25915 information for BLOCK, because it doesn't contain any instructions.
25916
25917 Don't allow this for blocks with nested functions or local classes
25918 as we would end up with orphans, and in the presence of scheduling
25919 we may end up calling them anyway. */
25920
25921 static bool
25922 dwarf2out_ignore_block (const_tree block)
25923 {
25924 tree decl;
25925 unsigned int i;
25926
25927 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
25928 if (TREE_CODE (decl) == FUNCTION_DECL
25929 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
25930 return 0;
25931 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
25932 {
25933 decl = BLOCK_NONLOCALIZED_VAR (block, i);
25934 if (TREE_CODE (decl) == FUNCTION_DECL
25935 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
25936 return 0;
25937 }
25938
25939 return 1;
25940 }
25941
25942 /* Hash table routines for file_hash. */
25943
25944 bool
25945 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
25946 {
25947 return filename_cmp (p1->filename, p2) == 0;
25948 }
25949
25950 hashval_t
25951 dwarf_file_hasher::hash (dwarf_file_data *p)
25952 {
25953 return htab_hash_string (p->filename);
25954 }
25955
25956 /* Lookup FILE_NAME (in the list of filenames that we know about here in
25957 dwarf2out.c) and return its "index". The index of each (known) filename is
25958 just a unique number which is associated with only that one filename. We
25959 need such numbers for the sake of generating labels (in the .debug_sfnames
25960 section) and references to those files numbers (in the .debug_srcinfo
25961 and .debug_macinfo sections). If the filename given as an argument is not
25962 found in our current list, add it to the list and assign it the next
25963 available unique index number. */
25964
25965 static struct dwarf_file_data *
25966 lookup_filename (const char *file_name)
25967 {
25968 struct dwarf_file_data * created;
25969
25970 if (!file_name)
25971 return NULL;
25972
25973 dwarf_file_data **slot
25974 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
25975 INSERT);
25976 if (*slot)
25977 return *slot;
25978
25979 created = ggc_alloc<dwarf_file_data> ();
25980 created->filename = file_name;
25981 created->emitted_number = 0;
25982 *slot = created;
25983 return created;
25984 }
25985
25986 /* If the assembler will construct the file table, then translate the compiler
25987 internal file table number into the assembler file table number, and emit
25988 a .file directive if we haven't already emitted one yet. The file table
25989 numbers are different because we prune debug info for unused variables and
25990 types, which may include filenames. */
25991
25992 static int
25993 maybe_emit_file (struct dwarf_file_data * fd)
25994 {
25995 if (! fd->emitted_number)
25996 {
25997 if (last_emitted_file)
25998 fd->emitted_number = last_emitted_file->emitted_number + 1;
25999 else
26000 fd->emitted_number = 1;
26001 last_emitted_file = fd;
26002
26003 if (DWARF2_ASM_LINE_DEBUG_INFO)
26004 {
26005 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26006 output_quoted_string (asm_out_file,
26007 remap_debug_filename (fd->filename));
26008 fputc ('\n', asm_out_file);
26009 }
26010 }
26011
26012 return fd->emitted_number;
26013 }
26014
26015 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26016 That generation should happen after function debug info has been
26017 generated. The value of the attribute is the constant value of ARG. */
26018
26019 static void
26020 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26021 {
26022 die_arg_entry entry;
26023
26024 if (!die || !arg)
26025 return;
26026
26027 gcc_assert (early_dwarf);
26028
26029 if (!tmpl_value_parm_die_table)
26030 vec_alloc (tmpl_value_parm_die_table, 32);
26031
26032 entry.die = die;
26033 entry.arg = arg;
26034 vec_safe_push (tmpl_value_parm_die_table, entry);
26035 }
26036
26037 /* Return TRUE if T is an instance of generic type, FALSE
26038 otherwise. */
26039
26040 static bool
26041 generic_type_p (tree t)
26042 {
26043 if (t == NULL_TREE || !TYPE_P (t))
26044 return false;
26045 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26046 }
26047
26048 /* Schedule the generation of the generic parameter dies for the
26049 instance of generic type T. The proper generation itself is later
26050 done by gen_scheduled_generic_parms_dies. */
26051
26052 static void
26053 schedule_generic_params_dies_gen (tree t)
26054 {
26055 if (!generic_type_p (t))
26056 return;
26057
26058 gcc_assert (early_dwarf);
26059
26060 if (!generic_type_instances)
26061 vec_alloc (generic_type_instances, 256);
26062
26063 vec_safe_push (generic_type_instances, t);
26064 }
26065
26066 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26067 by append_entry_to_tmpl_value_parm_die_table. This function must
26068 be called after function DIEs have been generated. */
26069
26070 static void
26071 gen_remaining_tmpl_value_param_die_attribute (void)
26072 {
26073 if (tmpl_value_parm_die_table)
26074 {
26075 unsigned i, j;
26076 die_arg_entry *e;
26077
26078 /* We do this in two phases - first get the cases we can
26079 handle during early-finish, preserving those we cannot
26080 (containing symbolic constants where we don't yet know
26081 whether we are going to output the referenced symbols).
26082 For those we try again at late-finish. */
26083 j = 0;
26084 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
26085 {
26086 if (!e->die->removed
26087 && !tree_add_const_value_attribute (e->die, e->arg))
26088 {
26089 dw_loc_descr_ref loc = NULL;
26090 if (! early_dwarf
26091 && (dwarf_version >= 5 || !dwarf_strict))
26092 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
26093 if (loc)
26094 add_AT_loc (e->die, DW_AT_location, loc);
26095 else
26096 (*tmpl_value_parm_die_table)[j++] = *e;
26097 }
26098 }
26099 tmpl_value_parm_die_table->truncate (j);
26100 }
26101 }
26102
26103 /* Generate generic parameters DIEs for instances of generic types
26104 that have been previously scheduled by
26105 schedule_generic_params_dies_gen. This function must be called
26106 after all the types of the CU have been laid out. */
26107
26108 static void
26109 gen_scheduled_generic_parms_dies (void)
26110 {
26111 unsigned i;
26112 tree t;
26113
26114 if (!generic_type_instances)
26115 return;
26116
26117 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
26118 if (COMPLETE_TYPE_P (t))
26119 gen_generic_params_dies (t);
26120
26121 generic_type_instances = NULL;
26122 }
26123
26124
26125 /* Replace DW_AT_name for the decl with name. */
26126
26127 static void
26128 dwarf2out_set_name (tree decl, tree name)
26129 {
26130 dw_die_ref die;
26131 dw_attr_node *attr;
26132 const char *dname;
26133
26134 die = TYPE_SYMTAB_DIE (decl);
26135 if (!die)
26136 return;
26137
26138 dname = dwarf2_name (name, 0);
26139 if (!dname)
26140 return;
26141
26142 attr = get_AT (die, DW_AT_name);
26143 if (attr)
26144 {
26145 struct indirect_string_node *node;
26146
26147 node = find_AT_string (dname);
26148 /* replace the string. */
26149 attr->dw_attr_val.v.val_str = node;
26150 }
26151
26152 else
26153 add_name_attribute (die, dname);
26154 }
26155
26156 /* True if before or during processing of the first function being emitted. */
26157 static bool in_first_function_p = true;
26158 /* True if loc_note during dwarf2out_var_location call might still be
26159 before first real instruction at address equal to .Ltext0. */
26160 static bool maybe_at_text_label_p = true;
26161 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
26162 static unsigned int first_loclabel_num_not_at_text_label;
26163
26164 /* Called by the final INSN scan whenever we see a var location. We
26165 use it to drop labels in the right places, and throw the location in
26166 our lookup table. */
26167
26168 static void
26169 dwarf2out_var_location (rtx_insn *loc_note)
26170 {
26171 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
26172 struct var_loc_node *newloc;
26173 rtx_insn *next_real, *next_note;
26174 rtx_insn *call_insn = NULL;
26175 static const char *last_label;
26176 static const char *last_postcall_label;
26177 static bool last_in_cold_section_p;
26178 static rtx_insn *expected_next_loc_note;
26179 tree decl;
26180 bool var_loc_p;
26181
26182 if (!NOTE_P (loc_note))
26183 {
26184 if (CALL_P (loc_note))
26185 {
26186 call_site_count++;
26187 if (SIBLING_CALL_P (loc_note))
26188 tail_call_site_count++;
26189 if (optimize == 0 && !flag_var_tracking)
26190 {
26191 /* When the var-tracking pass is not running, there is no note
26192 for indirect calls whose target is compile-time known. In this
26193 case, process such calls specifically so that we generate call
26194 sites for them anyway. */
26195 rtx x = PATTERN (loc_note);
26196 if (GET_CODE (x) == PARALLEL)
26197 x = XVECEXP (x, 0, 0);
26198 if (GET_CODE (x) == SET)
26199 x = SET_SRC (x);
26200 if (GET_CODE (x) == CALL)
26201 x = XEXP (x, 0);
26202 if (!MEM_P (x)
26203 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
26204 || !SYMBOL_REF_DECL (XEXP (x, 0))
26205 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
26206 != FUNCTION_DECL))
26207 {
26208 call_insn = loc_note;
26209 loc_note = NULL;
26210 var_loc_p = false;
26211
26212 next_real = next_real_insn (call_insn);
26213 next_note = NULL;
26214 cached_next_real_insn = NULL;
26215 goto create_label;
26216 }
26217 }
26218 }
26219 return;
26220 }
26221
26222 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
26223 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
26224 return;
26225
26226 /* Optimize processing a large consecutive sequence of location
26227 notes so we don't spend too much time in next_real_insn. If the
26228 next insn is another location note, remember the next_real_insn
26229 calculation for next time. */
26230 next_real = cached_next_real_insn;
26231 if (next_real)
26232 {
26233 if (expected_next_loc_note != loc_note)
26234 next_real = NULL;
26235 }
26236
26237 next_note = NEXT_INSN (loc_note);
26238 if (! next_note
26239 || next_note->deleted ()
26240 || ! NOTE_P (next_note)
26241 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
26242 && NOTE_KIND (next_note) != NOTE_INSN_CALL_ARG_LOCATION))
26243 next_note = NULL;
26244
26245 if (! next_real)
26246 next_real = next_real_insn (loc_note);
26247
26248 if (next_note)
26249 {
26250 expected_next_loc_note = next_note;
26251 cached_next_real_insn = next_real;
26252 }
26253 else
26254 cached_next_real_insn = NULL;
26255
26256 /* If there are no instructions which would be affected by this note,
26257 don't do anything. */
26258 if (var_loc_p
26259 && next_real == NULL_RTX
26260 && !NOTE_DURING_CALL_P (loc_note))
26261 return;
26262
26263 create_label:
26264
26265 if (next_real == NULL_RTX)
26266 next_real = get_last_insn ();
26267
26268 /* If there were any real insns between note we processed last time
26269 and this note (or if it is the first note), clear
26270 last_{,postcall_}label so that they are not reused this time. */
26271 if (last_var_location_insn == NULL_RTX
26272 || last_var_location_insn != next_real
26273 || last_in_cold_section_p != in_cold_section_p)
26274 {
26275 last_label = NULL;
26276 last_postcall_label = NULL;
26277 }
26278
26279 if (var_loc_p)
26280 {
26281 decl = NOTE_VAR_LOCATION_DECL (loc_note);
26282 newloc = add_var_loc_to_decl (decl, loc_note,
26283 NOTE_DURING_CALL_P (loc_note)
26284 ? last_postcall_label : last_label);
26285 if (newloc == NULL)
26286 return;
26287 }
26288 else
26289 {
26290 decl = NULL_TREE;
26291 newloc = NULL;
26292 }
26293
26294 /* If there were no real insns between note we processed last time
26295 and this note, use the label we emitted last time. Otherwise
26296 create a new label and emit it. */
26297 if (last_label == NULL)
26298 {
26299 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
26300 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
26301 loclabel_num++;
26302 last_label = ggc_strdup (loclabel);
26303 /* See if loclabel might be equal to .Ltext0. If yes,
26304 bump first_loclabel_num_not_at_text_label. */
26305 if (!have_multiple_function_sections
26306 && in_first_function_p
26307 && maybe_at_text_label_p)
26308 {
26309 static rtx_insn *last_start;
26310 rtx_insn *insn;
26311 for (insn = loc_note; insn; insn = previous_insn (insn))
26312 if (insn == last_start)
26313 break;
26314 else if (!NONDEBUG_INSN_P (insn))
26315 continue;
26316 else
26317 {
26318 rtx body = PATTERN (insn);
26319 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
26320 continue;
26321 /* Inline asm could occupy zero bytes. */
26322 else if (GET_CODE (body) == ASM_INPUT
26323 || asm_noperands (body) >= 0)
26324 continue;
26325 #ifdef HAVE_attr_length
26326 else if (get_attr_min_length (insn) == 0)
26327 continue;
26328 #endif
26329 else
26330 {
26331 /* Assume insn has non-zero length. */
26332 maybe_at_text_label_p = false;
26333 break;
26334 }
26335 }
26336 if (maybe_at_text_label_p)
26337 {
26338 last_start = loc_note;
26339 first_loclabel_num_not_at_text_label = loclabel_num;
26340 }
26341 }
26342 }
26343
26344 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
26345 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
26346
26347 if (!var_loc_p)
26348 {
26349 struct call_arg_loc_node *ca_loc
26350 = ggc_cleared_alloc<call_arg_loc_node> ();
26351 rtx_insn *prev
26352 = loc_note != NULL_RTX ? prev_real_insn (loc_note) : call_insn;
26353
26354 ca_loc->call_arg_loc_note = loc_note;
26355 ca_loc->next = NULL;
26356 ca_loc->label = last_label;
26357 gcc_assert (prev
26358 && (CALL_P (prev)
26359 || (NONJUMP_INSN_P (prev)
26360 && GET_CODE (PATTERN (prev)) == SEQUENCE
26361 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
26362 if (!CALL_P (prev))
26363 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
26364 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
26365
26366 /* Look for a SYMBOL_REF in the "prev" instruction. */
26367 rtx x = get_call_rtx_from (PATTERN (prev));
26368 if (x)
26369 {
26370 /* Try to get the call symbol, if any. */
26371 if (MEM_P (XEXP (x, 0)))
26372 x = XEXP (x, 0);
26373 /* First, look for a memory access to a symbol_ref. */
26374 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
26375 && SYMBOL_REF_DECL (XEXP (x, 0))
26376 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
26377 ca_loc->symbol_ref = XEXP (x, 0);
26378 /* Otherwise, look at a compile-time known user-level function
26379 declaration. */
26380 else if (MEM_P (x)
26381 && MEM_EXPR (x)
26382 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
26383 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
26384 }
26385
26386 ca_loc->block = insn_scope (prev);
26387 if (call_arg_locations)
26388 call_arg_loc_last->next = ca_loc;
26389 else
26390 call_arg_locations = ca_loc;
26391 call_arg_loc_last = ca_loc;
26392 }
26393 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
26394 newloc->label = last_label;
26395 else
26396 {
26397 if (!last_postcall_label)
26398 {
26399 sprintf (loclabel, "%s-1", last_label);
26400 last_postcall_label = ggc_strdup (loclabel);
26401 }
26402 newloc->label = last_postcall_label;
26403 }
26404
26405 last_var_location_insn = next_real;
26406 last_in_cold_section_p = in_cold_section_p;
26407 }
26408
26409 /* Called from finalize_size_functions for size functions so that their body
26410 can be encoded in the debug info to describe the layout of variable-length
26411 structures. */
26412
26413 static void
26414 dwarf2out_size_function (tree decl)
26415 {
26416 function_to_dwarf_procedure (decl);
26417 }
26418
26419 /* Note in one location list that text section has changed. */
26420
26421 int
26422 var_location_switch_text_section_1 (var_loc_list **slot, void *)
26423 {
26424 var_loc_list *list = *slot;
26425 if (list->first)
26426 list->last_before_switch
26427 = list->last->next ? list->last->next : list->last;
26428 return 1;
26429 }
26430
26431 /* Note in all location lists that text section has changed. */
26432
26433 static void
26434 var_location_switch_text_section (void)
26435 {
26436 if (decl_loc_table == NULL)
26437 return;
26438
26439 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
26440 }
26441
26442 /* Create a new line number table. */
26443
26444 static dw_line_info_table *
26445 new_line_info_table (void)
26446 {
26447 dw_line_info_table *table;
26448
26449 table = ggc_cleared_alloc<dw_line_info_table> ();
26450 table->file_num = 1;
26451 table->line_num = 1;
26452 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
26453
26454 return table;
26455 }
26456
26457 /* Lookup the "current" table into which we emit line info, so
26458 that we don't have to do it for every source line. */
26459
26460 static void
26461 set_cur_line_info_table (section *sec)
26462 {
26463 dw_line_info_table *table;
26464
26465 if (sec == text_section)
26466 table = text_section_line_info;
26467 else if (sec == cold_text_section)
26468 {
26469 table = cold_text_section_line_info;
26470 if (!table)
26471 {
26472 cold_text_section_line_info = table = new_line_info_table ();
26473 table->end_label = cold_end_label;
26474 }
26475 }
26476 else
26477 {
26478 const char *end_label;
26479
26480 if (crtl->has_bb_partition)
26481 {
26482 if (in_cold_section_p)
26483 end_label = crtl->subsections.cold_section_end_label;
26484 else
26485 end_label = crtl->subsections.hot_section_end_label;
26486 }
26487 else
26488 {
26489 char label[MAX_ARTIFICIAL_LABEL_BYTES];
26490 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
26491 current_function_funcdef_no);
26492 end_label = ggc_strdup (label);
26493 }
26494
26495 table = new_line_info_table ();
26496 table->end_label = end_label;
26497
26498 vec_safe_push (separate_line_info, table);
26499 }
26500
26501 if (DWARF2_ASM_LINE_DEBUG_INFO)
26502 table->is_stmt = (cur_line_info_table
26503 ? cur_line_info_table->is_stmt
26504 : DWARF_LINE_DEFAULT_IS_STMT_START);
26505 cur_line_info_table = table;
26506 }
26507
26508
26509 /* We need to reset the locations at the beginning of each
26510 function. We can't do this in the end_function hook, because the
26511 declarations that use the locations won't have been output when
26512 that hook is called. Also compute have_multiple_function_sections here. */
26513
26514 static void
26515 dwarf2out_begin_function (tree fun)
26516 {
26517 section *sec = function_section (fun);
26518
26519 if (sec != text_section)
26520 have_multiple_function_sections = true;
26521
26522 if (crtl->has_bb_partition && !cold_text_section)
26523 {
26524 gcc_assert (current_function_decl == fun);
26525 cold_text_section = unlikely_text_section ();
26526 switch_to_section (cold_text_section);
26527 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
26528 switch_to_section (sec);
26529 }
26530
26531 dwarf2out_note_section_used ();
26532 call_site_count = 0;
26533 tail_call_site_count = 0;
26534
26535 set_cur_line_info_table (sec);
26536 }
26537
26538 /* Helper function of dwarf2out_end_function, called only after emitting
26539 the very first function into assembly. Check if some .debug_loc range
26540 might end with a .LVL* label that could be equal to .Ltext0.
26541 In that case we must force using absolute addresses in .debug_loc ranges,
26542 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
26543 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
26544 list terminator.
26545 Set have_multiple_function_sections to true in that case and
26546 terminate htab traversal. */
26547
26548 int
26549 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
26550 {
26551 var_loc_list *entry = *slot;
26552 struct var_loc_node *node;
26553
26554 node = entry->first;
26555 if (node && node->next && node->next->label)
26556 {
26557 unsigned int i;
26558 const char *label = node->next->label;
26559 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
26560
26561 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
26562 {
26563 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
26564 if (strcmp (label, loclabel) == 0)
26565 {
26566 have_multiple_function_sections = true;
26567 return 0;
26568 }
26569 }
26570 }
26571 return 1;
26572 }
26573
26574 /* Hook called after emitting a function into assembly.
26575 This does something only for the very first function emitted. */
26576
26577 static void
26578 dwarf2out_end_function (unsigned int)
26579 {
26580 if (in_first_function_p
26581 && !have_multiple_function_sections
26582 && first_loclabel_num_not_at_text_label
26583 && decl_loc_table)
26584 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
26585 in_first_function_p = false;
26586 maybe_at_text_label_p = false;
26587 }
26588
26589 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
26590 front-ends register a translation unit even before dwarf2out_init is
26591 called. */
26592 static tree main_translation_unit = NULL_TREE;
26593
26594 /* Hook called by front-ends after they built their main translation unit.
26595 Associate comp_unit_die to UNIT. */
26596
26597 static void
26598 dwarf2out_register_main_translation_unit (tree unit)
26599 {
26600 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
26601 && main_translation_unit == NULL_TREE);
26602 main_translation_unit = unit;
26603 /* If dwarf2out_init has not been called yet, it will perform the association
26604 itself looking at main_translation_unit. */
26605 if (decl_die_table != NULL)
26606 equate_decl_number_to_die (unit, comp_unit_die ());
26607 }
26608
26609 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
26610
26611 static void
26612 push_dw_line_info_entry (dw_line_info_table *table,
26613 enum dw_line_info_opcode opcode, unsigned int val)
26614 {
26615 dw_line_info_entry e;
26616 e.opcode = opcode;
26617 e.val = val;
26618 vec_safe_push (table->entries, e);
26619 }
26620
26621 /* Output a label to mark the beginning of a source code line entry
26622 and record information relating to this source line, in
26623 'line_info_table' for later output of the .debug_line section. */
26624 /* ??? The discriminator parameter ought to be unsigned. */
26625
26626 static void
26627 dwarf2out_source_line (unsigned int line, unsigned int column,
26628 const char *filename,
26629 int discriminator, bool is_stmt)
26630 {
26631 unsigned int file_num;
26632 dw_line_info_table *table;
26633
26634 if (debug_info_level < DINFO_LEVEL_TERSE || line == 0)
26635 return;
26636
26637 /* The discriminator column was added in dwarf4. Simplify the below
26638 by simply removing it if we're not supposed to output it. */
26639 if (dwarf_version < 4 && dwarf_strict)
26640 discriminator = 0;
26641
26642 if (!debug_column_info)
26643 column = 0;
26644
26645 table = cur_line_info_table;
26646 file_num = maybe_emit_file (lookup_filename (filename));
26647
26648 /* ??? TODO: Elide duplicate line number entries. Traditionally,
26649 the debugger has used the second (possibly duplicate) line number
26650 at the beginning of the function to mark the end of the prologue.
26651 We could eliminate any other duplicates within the function. For
26652 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
26653 that second line number entry. */
26654 /* Recall that this end-of-prologue indication is *not* the same thing
26655 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
26656 to which the hook corresponds, follows the last insn that was
26657 emitted by gen_prologue. What we need is to precede the first insn
26658 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
26659 insn that corresponds to something the user wrote. These may be
26660 very different locations once scheduling is enabled. */
26661
26662 if (0 && file_num == table->file_num
26663 && line == table->line_num
26664 && column == table->column_num
26665 && discriminator == table->discrim_num
26666 && is_stmt == table->is_stmt)
26667 return;
26668
26669 switch_to_section (current_function_section ());
26670
26671 /* If requested, emit something human-readable. */
26672 if (flag_debug_asm)
26673 {
26674 if (debug_column_info)
26675 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
26676 filename, line, column);
26677 else
26678 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
26679 filename, line);
26680 }
26681
26682 if (DWARF2_ASM_LINE_DEBUG_INFO)
26683 {
26684 /* Emit the .loc directive understood by GNU as. */
26685 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
26686 file_num, line, is_stmt, discriminator */
26687 fputs ("\t.loc ", asm_out_file);
26688 fprint_ul (asm_out_file, file_num);
26689 putc (' ', asm_out_file);
26690 fprint_ul (asm_out_file, line);
26691 putc (' ', asm_out_file);
26692 fprint_ul (asm_out_file, column);
26693
26694 if (is_stmt != table->is_stmt)
26695 {
26696 fputs (" is_stmt ", asm_out_file);
26697 putc (is_stmt ? '1' : '0', asm_out_file);
26698 }
26699 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
26700 {
26701 gcc_assert (discriminator > 0);
26702 fputs (" discriminator ", asm_out_file);
26703 fprint_ul (asm_out_file, (unsigned long) discriminator);
26704 }
26705 putc ('\n', asm_out_file);
26706 }
26707 else
26708 {
26709 unsigned int label_num = ++line_info_label_num;
26710
26711 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
26712
26713 push_dw_line_info_entry (table, LI_set_address, label_num);
26714 if (file_num != table->file_num)
26715 push_dw_line_info_entry (table, LI_set_file, file_num);
26716 if (discriminator != table->discrim_num)
26717 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
26718 if (is_stmt != table->is_stmt)
26719 push_dw_line_info_entry (table, LI_negate_stmt, 0);
26720 push_dw_line_info_entry (table, LI_set_line, line);
26721 if (debug_column_info)
26722 push_dw_line_info_entry (table, LI_set_column, column);
26723 }
26724
26725 table->file_num = file_num;
26726 table->line_num = line;
26727 table->column_num = column;
26728 table->discrim_num = discriminator;
26729 table->is_stmt = is_stmt;
26730 table->in_use = true;
26731 }
26732
26733 /* Record the beginning of a new source file. */
26734
26735 static void
26736 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
26737 {
26738 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26739 {
26740 macinfo_entry e;
26741 e.code = DW_MACINFO_start_file;
26742 e.lineno = lineno;
26743 e.info = ggc_strdup (filename);
26744 vec_safe_push (macinfo_table, e);
26745 }
26746 }
26747
26748 /* Record the end of a source file. */
26749
26750 static void
26751 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
26752 {
26753 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26754 {
26755 macinfo_entry e;
26756 e.code = DW_MACINFO_end_file;
26757 e.lineno = lineno;
26758 e.info = NULL;
26759 vec_safe_push (macinfo_table, e);
26760 }
26761 }
26762
26763 /* Called from debug_define in toplev.c. The `buffer' parameter contains
26764 the tail part of the directive line, i.e. the part which is past the
26765 initial whitespace, #, whitespace, directive-name, whitespace part. */
26766
26767 static void
26768 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
26769 const char *buffer ATTRIBUTE_UNUSED)
26770 {
26771 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26772 {
26773 macinfo_entry e;
26774 /* Insert a dummy first entry to be able to optimize the whole
26775 predefined macro block using DW_MACRO_import. */
26776 if (macinfo_table->is_empty () && lineno <= 1)
26777 {
26778 e.code = 0;
26779 e.lineno = 0;
26780 e.info = NULL;
26781 vec_safe_push (macinfo_table, e);
26782 }
26783 e.code = DW_MACINFO_define;
26784 e.lineno = lineno;
26785 e.info = ggc_strdup (buffer);
26786 vec_safe_push (macinfo_table, e);
26787 }
26788 }
26789
26790 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
26791 the tail part of the directive line, i.e. the part which is past the
26792 initial whitespace, #, whitespace, directive-name, whitespace part. */
26793
26794 static void
26795 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
26796 const char *buffer ATTRIBUTE_UNUSED)
26797 {
26798 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26799 {
26800 macinfo_entry e;
26801 /* Insert a dummy first entry to be able to optimize the whole
26802 predefined macro block using DW_MACRO_import. */
26803 if (macinfo_table->is_empty () && lineno <= 1)
26804 {
26805 e.code = 0;
26806 e.lineno = 0;
26807 e.info = NULL;
26808 vec_safe_push (macinfo_table, e);
26809 }
26810 e.code = DW_MACINFO_undef;
26811 e.lineno = lineno;
26812 e.info = ggc_strdup (buffer);
26813 vec_safe_push (macinfo_table, e);
26814 }
26815 }
26816
26817 /* Helpers to manipulate hash table of CUs. */
26818
26819 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
26820 {
26821 static inline hashval_t hash (const macinfo_entry *);
26822 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
26823 };
26824
26825 inline hashval_t
26826 macinfo_entry_hasher::hash (const macinfo_entry *entry)
26827 {
26828 return htab_hash_string (entry->info);
26829 }
26830
26831 inline bool
26832 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
26833 const macinfo_entry *entry2)
26834 {
26835 return !strcmp (entry1->info, entry2->info);
26836 }
26837
26838 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
26839
26840 /* Output a single .debug_macinfo entry. */
26841
26842 static void
26843 output_macinfo_op (macinfo_entry *ref)
26844 {
26845 int file_num;
26846 size_t len;
26847 struct indirect_string_node *node;
26848 char label[MAX_ARTIFICIAL_LABEL_BYTES];
26849 struct dwarf_file_data *fd;
26850
26851 switch (ref->code)
26852 {
26853 case DW_MACINFO_start_file:
26854 fd = lookup_filename (ref->info);
26855 file_num = maybe_emit_file (fd);
26856 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
26857 dw2_asm_output_data_uleb128 (ref->lineno,
26858 "Included from line number %lu",
26859 (unsigned long) ref->lineno);
26860 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
26861 break;
26862 case DW_MACINFO_end_file:
26863 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
26864 break;
26865 case DW_MACINFO_define:
26866 case DW_MACINFO_undef:
26867 len = strlen (ref->info) + 1;
26868 if (!dwarf_strict
26869 && len > DWARF_OFFSET_SIZE
26870 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
26871 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
26872 {
26873 ref->code = ref->code == DW_MACINFO_define
26874 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
26875 output_macinfo_op (ref);
26876 return;
26877 }
26878 dw2_asm_output_data (1, ref->code,
26879 ref->code == DW_MACINFO_define
26880 ? "Define macro" : "Undefine macro");
26881 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
26882 (unsigned long) ref->lineno);
26883 dw2_asm_output_nstring (ref->info, -1, "The macro");
26884 break;
26885 case DW_MACRO_define_strp:
26886 case DW_MACRO_undef_strp:
26887 node = find_AT_string (ref->info);
26888 gcc_assert (node
26889 && (node->form == DW_FORM_strp
26890 || node->form == DW_FORM_GNU_str_index));
26891 dw2_asm_output_data (1, ref->code,
26892 ref->code == DW_MACRO_define_strp
26893 ? "Define macro strp"
26894 : "Undefine macro strp");
26895 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
26896 (unsigned long) ref->lineno);
26897 if (node->form == DW_FORM_strp)
26898 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
26899 debug_str_section, "The macro: \"%s\"",
26900 ref->info);
26901 else
26902 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
26903 ref->info);
26904 break;
26905 case DW_MACRO_import:
26906 dw2_asm_output_data (1, ref->code, "Import");
26907 ASM_GENERATE_INTERNAL_LABEL (label,
26908 DEBUG_MACRO_SECTION_LABEL,
26909 ref->lineno + macinfo_label_base);
26910 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
26911 break;
26912 default:
26913 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
26914 ASM_COMMENT_START, (unsigned long) ref->code);
26915 break;
26916 }
26917 }
26918
26919 /* Attempt to make a sequence of define/undef macinfo ops shareable with
26920 other compilation unit .debug_macinfo sections. IDX is the first
26921 index of a define/undef, return the number of ops that should be
26922 emitted in a comdat .debug_macinfo section and emit
26923 a DW_MACRO_import entry referencing it.
26924 If the define/undef entry should be emitted normally, return 0. */
26925
26926 static unsigned
26927 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
26928 macinfo_hash_type **macinfo_htab)
26929 {
26930 macinfo_entry *first, *second, *cur, *inc;
26931 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
26932 unsigned char checksum[16];
26933 struct md5_ctx ctx;
26934 char *grp_name, *tail;
26935 const char *base;
26936 unsigned int i, count, encoded_filename_len, linebuf_len;
26937 macinfo_entry **slot;
26938
26939 first = &(*macinfo_table)[idx];
26940 second = &(*macinfo_table)[idx + 1];
26941
26942 /* Optimize only if there are at least two consecutive define/undef ops,
26943 and either all of them are before first DW_MACINFO_start_file
26944 with lineno {0,1} (i.e. predefined macro block), or all of them are
26945 in some included header file. */
26946 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
26947 return 0;
26948 if (vec_safe_is_empty (files))
26949 {
26950 if (first->lineno > 1 || second->lineno > 1)
26951 return 0;
26952 }
26953 else if (first->lineno == 0)
26954 return 0;
26955
26956 /* Find the last define/undef entry that can be grouped together
26957 with first and at the same time compute md5 checksum of their
26958 codes, linenumbers and strings. */
26959 md5_init_ctx (&ctx);
26960 for (i = idx; macinfo_table->iterate (i, &cur); i++)
26961 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
26962 break;
26963 else if (vec_safe_is_empty (files) && cur->lineno > 1)
26964 break;
26965 else
26966 {
26967 unsigned char code = cur->code;
26968 md5_process_bytes (&code, 1, &ctx);
26969 checksum_uleb128 (cur->lineno, &ctx);
26970 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
26971 }
26972 md5_finish_ctx (&ctx, checksum);
26973 count = i - idx;
26974
26975 /* From the containing include filename (if any) pick up just
26976 usable characters from its basename. */
26977 if (vec_safe_is_empty (files))
26978 base = "";
26979 else
26980 base = lbasename (files->last ().info);
26981 for (encoded_filename_len = 0, i = 0; base[i]; i++)
26982 if (ISIDNUM (base[i]) || base[i] == '.')
26983 encoded_filename_len++;
26984 /* Count . at the end. */
26985 if (encoded_filename_len)
26986 encoded_filename_len++;
26987
26988 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
26989 linebuf_len = strlen (linebuf);
26990
26991 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
26992 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
26993 + 16 * 2 + 1);
26994 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
26995 tail = grp_name + 4;
26996 if (encoded_filename_len)
26997 {
26998 for (i = 0; base[i]; i++)
26999 if (ISIDNUM (base[i]) || base[i] == '.')
27000 *tail++ = base[i];
27001 *tail++ = '.';
27002 }
27003 memcpy (tail, linebuf, linebuf_len);
27004 tail += linebuf_len;
27005 *tail++ = '.';
27006 for (i = 0; i < 16; i++)
27007 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
27008
27009 /* Construct a macinfo_entry for DW_MACRO_import
27010 in the empty vector entry before the first define/undef. */
27011 inc = &(*macinfo_table)[idx - 1];
27012 inc->code = DW_MACRO_import;
27013 inc->lineno = 0;
27014 inc->info = ggc_strdup (grp_name);
27015 if (!*macinfo_htab)
27016 *macinfo_htab = new macinfo_hash_type (10);
27017 /* Avoid emitting duplicates. */
27018 slot = (*macinfo_htab)->find_slot (inc, INSERT);
27019 if (*slot != NULL)
27020 {
27021 inc->code = 0;
27022 inc->info = NULL;
27023 /* If such an entry has been used before, just emit
27024 a DW_MACRO_import op. */
27025 inc = *slot;
27026 output_macinfo_op (inc);
27027 /* And clear all macinfo_entry in the range to avoid emitting them
27028 in the second pass. */
27029 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
27030 {
27031 cur->code = 0;
27032 cur->info = NULL;
27033 }
27034 }
27035 else
27036 {
27037 *slot = inc;
27038 inc->lineno = (*macinfo_htab)->elements ();
27039 output_macinfo_op (inc);
27040 }
27041 return count;
27042 }
27043
27044 /* Save any strings needed by the macinfo table in the debug str
27045 table. All strings must be collected into the table by the time
27046 index_string is called. */
27047
27048 static void
27049 save_macinfo_strings (void)
27050 {
27051 unsigned len;
27052 unsigned i;
27053 macinfo_entry *ref;
27054
27055 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
27056 {
27057 switch (ref->code)
27058 {
27059 /* Match the logic in output_macinfo_op to decide on
27060 indirect strings. */
27061 case DW_MACINFO_define:
27062 case DW_MACINFO_undef:
27063 len = strlen (ref->info) + 1;
27064 if (!dwarf_strict
27065 && len > DWARF_OFFSET_SIZE
27066 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
27067 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
27068 set_indirect_string (find_AT_string (ref->info));
27069 break;
27070 case DW_MACRO_define_strp:
27071 case DW_MACRO_undef_strp:
27072 set_indirect_string (find_AT_string (ref->info));
27073 break;
27074 default:
27075 break;
27076 }
27077 }
27078 }
27079
27080 /* Output macinfo section(s). */
27081
27082 static void
27083 output_macinfo (const char *debug_line_label, bool early_lto_debug)
27084 {
27085 unsigned i;
27086 unsigned long length = vec_safe_length (macinfo_table);
27087 macinfo_entry *ref;
27088 vec<macinfo_entry, va_gc> *files = NULL;
27089 macinfo_hash_type *macinfo_htab = NULL;
27090 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
27091
27092 if (! length)
27093 return;
27094
27095 /* output_macinfo* uses these interchangeably. */
27096 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
27097 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
27098 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
27099 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
27100
27101 /* AIX Assembler inserts the length, so adjust the reference to match the
27102 offset expected by debuggers. */
27103 strcpy (dl_section_ref, debug_line_label);
27104 if (XCOFF_DEBUGGING_INFO)
27105 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
27106
27107 /* For .debug_macro emit the section header. */
27108 if (!dwarf_strict || dwarf_version >= 5)
27109 {
27110 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
27111 "DWARF macro version number");
27112 if (DWARF_OFFSET_SIZE == 8)
27113 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
27114 else
27115 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
27116 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
27117 debug_line_section, NULL);
27118 }
27119
27120 /* In the first loop, it emits the primary .debug_macinfo section
27121 and after each emitted op the macinfo_entry is cleared.
27122 If a longer range of define/undef ops can be optimized using
27123 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
27124 the vector before the first define/undef in the range and the
27125 whole range of define/undef ops is not emitted and kept. */
27126 for (i = 0; macinfo_table->iterate (i, &ref); i++)
27127 {
27128 switch (ref->code)
27129 {
27130 case DW_MACINFO_start_file:
27131 vec_safe_push (files, *ref);
27132 break;
27133 case DW_MACINFO_end_file:
27134 if (!vec_safe_is_empty (files))
27135 files->pop ();
27136 break;
27137 case DW_MACINFO_define:
27138 case DW_MACINFO_undef:
27139 if ((!dwarf_strict || dwarf_version >= 5)
27140 && HAVE_COMDAT_GROUP
27141 && vec_safe_length (files) != 1
27142 && i > 0
27143 && i + 1 < length
27144 && (*macinfo_table)[i - 1].code == 0)
27145 {
27146 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
27147 if (count)
27148 {
27149 i += count - 1;
27150 continue;
27151 }
27152 }
27153 break;
27154 case 0:
27155 /* A dummy entry may be inserted at the beginning to be able
27156 to optimize the whole block of predefined macros. */
27157 if (i == 0)
27158 continue;
27159 default:
27160 break;
27161 }
27162 output_macinfo_op (ref);
27163 ref->info = NULL;
27164 ref->code = 0;
27165 }
27166
27167 if (!macinfo_htab)
27168 return;
27169
27170 /* Save the number of transparent includes so we can adjust the
27171 label number for the fat LTO object DWARF. */
27172 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
27173
27174 delete macinfo_htab;
27175 macinfo_htab = NULL;
27176
27177 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
27178 terminate the current chain and switch to a new comdat .debug_macinfo
27179 section and emit the define/undef entries within it. */
27180 for (i = 0; macinfo_table->iterate (i, &ref); i++)
27181 switch (ref->code)
27182 {
27183 case 0:
27184 continue;
27185 case DW_MACRO_import:
27186 {
27187 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27188 tree comdat_key = get_identifier (ref->info);
27189 /* Terminate the previous .debug_macinfo section. */
27190 dw2_asm_output_data (1, 0, "End compilation unit");
27191 targetm.asm_out.named_section (debug_macinfo_section_name,
27192 SECTION_DEBUG
27193 | SECTION_LINKONCE
27194 | (early_lto_debug
27195 ? SECTION_EXCLUDE : 0),
27196 comdat_key);
27197 ASM_GENERATE_INTERNAL_LABEL (label,
27198 DEBUG_MACRO_SECTION_LABEL,
27199 ref->lineno + macinfo_label_base);
27200 ASM_OUTPUT_LABEL (asm_out_file, label);
27201 ref->code = 0;
27202 ref->info = NULL;
27203 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
27204 "DWARF macro version number");
27205 if (DWARF_OFFSET_SIZE == 8)
27206 dw2_asm_output_data (1, 1, "Flags: 64-bit");
27207 else
27208 dw2_asm_output_data (1, 0, "Flags: 32-bit");
27209 }
27210 break;
27211 case DW_MACINFO_define:
27212 case DW_MACINFO_undef:
27213 output_macinfo_op (ref);
27214 ref->code = 0;
27215 ref->info = NULL;
27216 break;
27217 default:
27218 gcc_unreachable ();
27219 }
27220
27221 macinfo_label_base += macinfo_label_base_adj;
27222 }
27223
27224 /* Initialize the various sections and labels for dwarf output and prefix
27225 them with PREFIX if non-NULL. */
27226
27227 static void
27228 init_sections_and_labels (bool early_lto_debug)
27229 {
27230 /* As we may get called multiple times have a generation count for
27231 labels. */
27232 static unsigned generation = 0;
27233
27234 if (early_lto_debug)
27235 {
27236 if (!dwarf_split_debug_info)
27237 {
27238 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
27239 SECTION_DEBUG | SECTION_EXCLUDE,
27240 NULL);
27241 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
27242 SECTION_DEBUG | SECTION_EXCLUDE,
27243 NULL);
27244 debug_macinfo_section_name
27245 = ((dwarf_strict && dwarf_version < 5)
27246 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
27247 debug_macinfo_section = get_section (debug_macinfo_section_name,
27248 SECTION_DEBUG
27249 | SECTION_EXCLUDE, NULL);
27250 /* For macro info we have to refer to a debug_line section, so
27251 similar to split-dwarf emit a skeleton one for early debug. */
27252 debug_skeleton_line_section
27253 = get_section (DEBUG_LTO_LINE_SECTION,
27254 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27255 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
27256 DEBUG_SKELETON_LINE_SECTION_LABEL,
27257 generation);
27258 }
27259 else
27260 {
27261 /* ??? Which of the following do we need early? */
27262 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
27263 SECTION_DEBUG | SECTION_EXCLUDE,
27264 NULL);
27265 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
27266 SECTION_DEBUG | SECTION_EXCLUDE,
27267 NULL);
27268 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
27269 SECTION_DEBUG
27270 | SECTION_EXCLUDE, NULL);
27271 debug_skeleton_abbrev_section
27272 = get_section (DEBUG_LTO_ABBREV_SECTION,
27273 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27274 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
27275 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
27276 generation);
27277
27278 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
27279 stay in the main .o, but the skeleton_line goes into the split
27280 off dwo. */
27281 debug_skeleton_line_section
27282 = get_section (DEBUG_LTO_LINE_SECTION,
27283 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27284 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
27285 DEBUG_SKELETON_LINE_SECTION_LABEL,
27286 generation);
27287 debug_str_offsets_section
27288 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
27289 SECTION_DEBUG | SECTION_EXCLUDE,
27290 NULL);
27291 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
27292 DEBUG_SKELETON_INFO_SECTION_LABEL,
27293 generation);
27294 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
27295 DEBUG_STR_DWO_SECTION_FLAGS,
27296 NULL);
27297 debug_macinfo_section_name
27298 = ((dwarf_strict && dwarf_version < 5)
27299 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
27300 debug_macinfo_section = get_section (debug_macinfo_section_name,
27301 SECTION_DEBUG | SECTION_EXCLUDE,
27302 NULL);
27303 }
27304 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
27305 DEBUG_STR_SECTION_FLAGS
27306 | SECTION_EXCLUDE, NULL);
27307 if (!dwarf_split_debug_info && !DWARF2_ASM_LINE_DEBUG_INFO)
27308 debug_line_str_section
27309 = get_section (DEBUG_LTO_LINE_STR_SECTION,
27310 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
27311 }
27312 else
27313 {
27314 if (!dwarf_split_debug_info)
27315 {
27316 debug_info_section = get_section (DEBUG_INFO_SECTION,
27317 SECTION_DEBUG, NULL);
27318 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
27319 SECTION_DEBUG, NULL);
27320 debug_loc_section = get_section (dwarf_version >= 5
27321 ? DEBUG_LOCLISTS_SECTION
27322 : DEBUG_LOC_SECTION,
27323 SECTION_DEBUG, NULL);
27324 debug_macinfo_section_name
27325 = ((dwarf_strict && dwarf_version < 5)
27326 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
27327 debug_macinfo_section = get_section (debug_macinfo_section_name,
27328 SECTION_DEBUG, NULL);
27329 }
27330 else
27331 {
27332 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
27333 SECTION_DEBUG | SECTION_EXCLUDE,
27334 NULL);
27335 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
27336 SECTION_DEBUG | SECTION_EXCLUDE,
27337 NULL);
27338 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
27339 SECTION_DEBUG, NULL);
27340 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
27341 SECTION_DEBUG, NULL);
27342 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
27343 SECTION_DEBUG, NULL);
27344 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
27345 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
27346 generation);
27347
27348 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
27349 stay in the main .o, but the skeleton_line goes into the
27350 split off dwo. */
27351 debug_skeleton_line_section
27352 = get_section (DEBUG_DWO_LINE_SECTION,
27353 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27354 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
27355 DEBUG_SKELETON_LINE_SECTION_LABEL,
27356 generation);
27357 debug_str_offsets_section
27358 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
27359 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27360 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
27361 DEBUG_SKELETON_INFO_SECTION_LABEL,
27362 generation);
27363 debug_loc_section = get_section (dwarf_version >= 5
27364 ? DEBUG_DWO_LOCLISTS_SECTION
27365 : DEBUG_DWO_LOC_SECTION,
27366 SECTION_DEBUG | SECTION_EXCLUDE,
27367 NULL);
27368 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
27369 DEBUG_STR_DWO_SECTION_FLAGS,
27370 NULL);
27371 debug_macinfo_section_name
27372 = ((dwarf_strict && dwarf_version < 5)
27373 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
27374 debug_macinfo_section = get_section (debug_macinfo_section_name,
27375 SECTION_DEBUG | SECTION_EXCLUDE,
27376 NULL);
27377 }
27378 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
27379 SECTION_DEBUG, NULL);
27380 debug_line_section = get_section (DEBUG_LINE_SECTION,
27381 SECTION_DEBUG, NULL);
27382 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
27383 SECTION_DEBUG, NULL);
27384 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
27385 SECTION_DEBUG, NULL);
27386 debug_str_section = get_section (DEBUG_STR_SECTION,
27387 DEBUG_STR_SECTION_FLAGS, NULL);
27388 if (!dwarf_split_debug_info && !DWARF2_ASM_LINE_DEBUG_INFO)
27389 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
27390 DEBUG_STR_SECTION_FLAGS, NULL);
27391 debug_ranges_section = get_section (dwarf_version >= 5
27392 ? DEBUG_RNGLISTS_SECTION
27393 : DEBUG_RANGES_SECTION,
27394 SECTION_DEBUG, NULL);
27395 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
27396 SECTION_DEBUG, NULL);
27397 }
27398
27399 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
27400 DEBUG_ABBREV_SECTION_LABEL, generation);
27401 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
27402 DEBUG_INFO_SECTION_LABEL, generation);
27403 info_section_emitted = false;
27404 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
27405 DEBUG_LINE_SECTION_LABEL, generation);
27406 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
27407 DEBUG_RANGES_SECTION_LABEL, generation);
27408 if (dwarf_version >= 5 && dwarf_split_debug_info)
27409 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
27410 DEBUG_RANGES_SECTION_LABEL, 2 + generation);
27411 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
27412 DEBUG_ADDR_SECTION_LABEL, generation);
27413 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
27414 (dwarf_strict && dwarf_version < 5)
27415 ? DEBUG_MACINFO_SECTION_LABEL
27416 : DEBUG_MACRO_SECTION_LABEL, generation);
27417 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
27418 generation);
27419
27420 ++generation;
27421 }
27422
27423 /* Set up for Dwarf output at the start of compilation. */
27424
27425 static void
27426 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
27427 {
27428 /* Allocate the file_table. */
27429 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
27430
27431 #ifndef DWARF2_LINENO_DEBUGGING_INFO
27432 /* Allocate the decl_die_table. */
27433 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
27434
27435 /* Allocate the decl_loc_table. */
27436 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
27437
27438 /* Allocate the cached_dw_loc_list_table. */
27439 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
27440
27441 /* Allocate the initial hunk of the decl_scope_table. */
27442 vec_alloc (decl_scope_table, 256);
27443
27444 /* Allocate the initial hunk of the abbrev_die_table. */
27445 vec_alloc (abbrev_die_table, 256);
27446 /* Zero-th entry is allocated, but unused. */
27447 abbrev_die_table->quick_push (NULL);
27448
27449 /* Allocate the dwarf_proc_stack_usage_map. */
27450 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
27451
27452 /* Allocate the pubtypes and pubnames vectors. */
27453 vec_alloc (pubname_table, 32);
27454 vec_alloc (pubtype_table, 32);
27455
27456 vec_alloc (incomplete_types, 64);
27457
27458 vec_alloc (used_rtx_array, 32);
27459
27460 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27461 vec_alloc (macinfo_table, 64);
27462 #endif
27463
27464 /* If front-ends already registered a main translation unit but we were not
27465 ready to perform the association, do this now. */
27466 if (main_translation_unit != NULL_TREE)
27467 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
27468 }
27469
27470 /* Called before compile () starts outputtting functions, variables
27471 and toplevel asms into assembly. */
27472
27473 static void
27474 dwarf2out_assembly_start (void)
27475 {
27476 #ifndef DWARF2_LINENO_DEBUGGING_INFO
27477 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
27478 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
27479 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
27480 COLD_TEXT_SECTION_LABEL, 0);
27481 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
27482
27483 switch_to_section (text_section);
27484 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
27485 #endif
27486
27487 /* Make sure the line number table for .text always exists. */
27488 text_section_line_info = new_line_info_table ();
27489 text_section_line_info->end_label = text_end_label;
27490
27491 #ifdef DWARF2_LINENO_DEBUGGING_INFO
27492 cur_line_info_table = text_section_line_info;
27493 #endif
27494
27495 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
27496 && dwarf2out_do_cfi_asm ()
27497 && (!(flag_unwind_tables || flag_exceptions)
27498 || targetm_common.except_unwind_info (&global_options) != UI_DWARF2))
27499 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
27500 }
27501
27502 /* A helper function for dwarf2out_finish called through
27503 htab_traverse. Assign a string its index. All strings must be
27504 collected into the table by the time index_string is called,
27505 because the indexing code relies on htab_traverse to traverse nodes
27506 in the same order for each run. */
27507
27508 int
27509 index_string (indirect_string_node **h, unsigned int *index)
27510 {
27511 indirect_string_node *node = *h;
27512
27513 find_string_form (node);
27514 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
27515 {
27516 gcc_assert (node->index == NO_INDEX_ASSIGNED);
27517 node->index = *index;
27518 *index += 1;
27519 }
27520 return 1;
27521 }
27522
27523 /* A helper function for output_indirect_strings called through
27524 htab_traverse. Output the offset to a string and update the
27525 current offset. */
27526
27527 int
27528 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
27529 {
27530 indirect_string_node *node = *h;
27531
27532 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
27533 {
27534 /* Assert that this node has been assigned an index. */
27535 gcc_assert (node->index != NO_INDEX_ASSIGNED
27536 && node->index != NOT_INDEXED);
27537 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
27538 "indexed string 0x%x: %s", node->index, node->str);
27539 *offset += strlen (node->str) + 1;
27540 }
27541 return 1;
27542 }
27543
27544 /* A helper function for dwarf2out_finish called through
27545 htab_traverse. Output the indexed string. */
27546
27547 int
27548 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
27549 {
27550 struct indirect_string_node *node = *h;
27551
27552 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
27553 {
27554 /* Assert that the strings are output in the same order as their
27555 indexes were assigned. */
27556 gcc_assert (*cur_idx == node->index);
27557 assemble_string (node->str, strlen (node->str) + 1);
27558 *cur_idx += 1;
27559 }
27560 return 1;
27561 }
27562
27563 /* A helper function for dwarf2out_finish called through
27564 htab_traverse. Emit one queued .debug_str string. */
27565
27566 int
27567 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
27568 {
27569 struct indirect_string_node *node = *h;
27570
27571 node->form = find_string_form (node);
27572 if (node->form == form && node->refcount > 0)
27573 {
27574 ASM_OUTPUT_LABEL (asm_out_file, node->label);
27575 assemble_string (node->str, strlen (node->str) + 1);
27576 }
27577
27578 return 1;
27579 }
27580
27581 /* Output the indexed string table. */
27582
27583 static void
27584 output_indirect_strings (void)
27585 {
27586 switch_to_section (debug_str_section);
27587 if (!dwarf_split_debug_info)
27588 debug_str_hash->traverse<enum dwarf_form,
27589 output_indirect_string> (DW_FORM_strp);
27590 else
27591 {
27592 unsigned int offset = 0;
27593 unsigned int cur_idx = 0;
27594
27595 skeleton_debug_str_hash->traverse<enum dwarf_form,
27596 output_indirect_string> (DW_FORM_strp);
27597
27598 switch_to_section (debug_str_offsets_section);
27599 debug_str_hash->traverse_noresize
27600 <unsigned int *, output_index_string_offset> (&offset);
27601 switch_to_section (debug_str_dwo_section);
27602 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
27603 (&cur_idx);
27604 }
27605 }
27606
27607 /* Callback for htab_traverse to assign an index to an entry in the
27608 table, and to write that entry to the .debug_addr section. */
27609
27610 int
27611 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
27612 {
27613 addr_table_entry *entry = *slot;
27614
27615 if (entry->refcount == 0)
27616 {
27617 gcc_assert (entry->index == NO_INDEX_ASSIGNED
27618 || entry->index == NOT_INDEXED);
27619 return 1;
27620 }
27621
27622 gcc_assert (entry->index == *cur_index);
27623 (*cur_index)++;
27624
27625 switch (entry->kind)
27626 {
27627 case ate_kind_rtx:
27628 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
27629 "0x%x", entry->index);
27630 break;
27631 case ate_kind_rtx_dtprel:
27632 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
27633 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
27634 DWARF2_ADDR_SIZE,
27635 entry->addr.rtl);
27636 fputc ('\n', asm_out_file);
27637 break;
27638 case ate_kind_label:
27639 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
27640 "0x%x", entry->index);
27641 break;
27642 default:
27643 gcc_unreachable ();
27644 }
27645 return 1;
27646 }
27647
27648 /* Produce the .debug_addr section. */
27649
27650 static void
27651 output_addr_table (void)
27652 {
27653 unsigned int index = 0;
27654 if (addr_index_table == NULL || addr_index_table->size () == 0)
27655 return;
27656
27657 switch_to_section (debug_addr_section);
27658 addr_index_table
27659 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
27660 }
27661
27662 #if ENABLE_ASSERT_CHECKING
27663 /* Verify that all marks are clear. */
27664
27665 static void
27666 verify_marks_clear (dw_die_ref die)
27667 {
27668 dw_die_ref c;
27669
27670 gcc_assert (! die->die_mark);
27671 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
27672 }
27673 #endif /* ENABLE_ASSERT_CHECKING */
27674
27675 /* Clear the marks for a die and its children.
27676 Be cool if the mark isn't set. */
27677
27678 static void
27679 prune_unmark_dies (dw_die_ref die)
27680 {
27681 dw_die_ref c;
27682
27683 if (die->die_mark)
27684 die->die_mark = 0;
27685 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
27686 }
27687
27688 /* Given LOC that is referenced by a DIE we're marking as used, find all
27689 referenced DWARF procedures it references and mark them as used. */
27690
27691 static void
27692 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
27693 {
27694 for (; loc != NULL; loc = loc->dw_loc_next)
27695 switch (loc->dw_loc_opc)
27696 {
27697 case DW_OP_implicit_pointer:
27698 case DW_OP_convert:
27699 case DW_OP_reinterpret:
27700 case DW_OP_GNU_implicit_pointer:
27701 case DW_OP_GNU_convert:
27702 case DW_OP_GNU_reinterpret:
27703 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
27704 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
27705 break;
27706 case DW_OP_GNU_variable_value:
27707 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
27708 {
27709 dw_die_ref ref
27710 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
27711 if (ref == NULL)
27712 break;
27713 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
27714 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
27715 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
27716 }
27717 /* FALLTHRU */
27718 case DW_OP_call2:
27719 case DW_OP_call4:
27720 case DW_OP_call_ref:
27721 case DW_OP_const_type:
27722 case DW_OP_GNU_const_type:
27723 case DW_OP_GNU_parameter_ref:
27724 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
27725 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
27726 break;
27727 case DW_OP_regval_type:
27728 case DW_OP_deref_type:
27729 case DW_OP_GNU_regval_type:
27730 case DW_OP_GNU_deref_type:
27731 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
27732 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
27733 break;
27734 case DW_OP_entry_value:
27735 case DW_OP_GNU_entry_value:
27736 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
27737 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
27738 break;
27739 default:
27740 break;
27741 }
27742 }
27743
27744 /* Given DIE that we're marking as used, find any other dies
27745 it references as attributes and mark them as used. */
27746
27747 static void
27748 prune_unused_types_walk_attribs (dw_die_ref die)
27749 {
27750 dw_attr_node *a;
27751 unsigned ix;
27752
27753 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
27754 {
27755 switch (AT_class (a))
27756 {
27757 /* Make sure DWARF procedures referenced by location descriptions will
27758 get emitted. */
27759 case dw_val_class_loc:
27760 prune_unused_types_walk_loc_descr (AT_loc (a));
27761 break;
27762 case dw_val_class_loc_list:
27763 for (dw_loc_list_ref list = AT_loc_list (a);
27764 list != NULL;
27765 list = list->dw_loc_next)
27766 prune_unused_types_walk_loc_descr (list->expr);
27767 break;
27768
27769 case dw_val_class_die_ref:
27770 /* A reference to another DIE.
27771 Make sure that it will get emitted.
27772 If it was broken out into a comdat group, don't follow it. */
27773 if (! AT_ref (a)->comdat_type_p
27774 || a->dw_attr == DW_AT_specification)
27775 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
27776 break;
27777
27778 case dw_val_class_str:
27779 /* Set the string's refcount to 0 so that prune_unused_types_mark
27780 accounts properly for it. */
27781 a->dw_attr_val.v.val_str->refcount = 0;
27782 break;
27783
27784 default:
27785 break;
27786 }
27787 }
27788 }
27789
27790 /* Mark the generic parameters and arguments children DIEs of DIE. */
27791
27792 static void
27793 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
27794 {
27795 dw_die_ref c;
27796
27797 if (die == NULL || die->die_child == NULL)
27798 return;
27799 c = die->die_child;
27800 do
27801 {
27802 if (is_template_parameter (c))
27803 prune_unused_types_mark (c, 1);
27804 c = c->die_sib;
27805 } while (c && c != die->die_child);
27806 }
27807
27808 /* Mark DIE as being used. If DOKIDS is true, then walk down
27809 to DIE's children. */
27810
27811 static void
27812 prune_unused_types_mark (dw_die_ref die, int dokids)
27813 {
27814 dw_die_ref c;
27815
27816 if (die->die_mark == 0)
27817 {
27818 /* We haven't done this node yet. Mark it as used. */
27819 die->die_mark = 1;
27820 /* If this is the DIE of a generic type instantiation,
27821 mark the children DIEs that describe its generic parms and
27822 args. */
27823 prune_unused_types_mark_generic_parms_dies (die);
27824
27825 /* We also have to mark its parents as used.
27826 (But we don't want to mark our parent's kids due to this,
27827 unless it is a class.) */
27828 if (die->die_parent)
27829 prune_unused_types_mark (die->die_parent,
27830 class_scope_p (die->die_parent));
27831
27832 /* Mark any referenced nodes. */
27833 prune_unused_types_walk_attribs (die);
27834
27835 /* If this node is a specification,
27836 also mark the definition, if it exists. */
27837 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
27838 prune_unused_types_mark (die->die_definition, 1);
27839 }
27840
27841 if (dokids && die->die_mark != 2)
27842 {
27843 /* We need to walk the children, but haven't done so yet.
27844 Remember that we've walked the kids. */
27845 die->die_mark = 2;
27846
27847 /* If this is an array type, we need to make sure our
27848 kids get marked, even if they're types. If we're
27849 breaking out types into comdat sections, do this
27850 for all type definitions. */
27851 if (die->die_tag == DW_TAG_array_type
27852 || (use_debug_types
27853 && is_type_die (die) && ! is_declaration_die (die)))
27854 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
27855 else
27856 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
27857 }
27858 }
27859
27860 /* For local classes, look if any static member functions were emitted
27861 and if so, mark them. */
27862
27863 static void
27864 prune_unused_types_walk_local_classes (dw_die_ref die)
27865 {
27866 dw_die_ref c;
27867
27868 if (die->die_mark == 2)
27869 return;
27870
27871 switch (die->die_tag)
27872 {
27873 case DW_TAG_structure_type:
27874 case DW_TAG_union_type:
27875 case DW_TAG_class_type:
27876 break;
27877
27878 case DW_TAG_subprogram:
27879 if (!get_AT_flag (die, DW_AT_declaration)
27880 || die->die_definition != NULL)
27881 prune_unused_types_mark (die, 1);
27882 return;
27883
27884 default:
27885 return;
27886 }
27887
27888 /* Mark children. */
27889 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
27890 }
27891
27892 /* Walk the tree DIE and mark types that we actually use. */
27893
27894 static void
27895 prune_unused_types_walk (dw_die_ref die)
27896 {
27897 dw_die_ref c;
27898
27899 /* Don't do anything if this node is already marked and
27900 children have been marked as well. */
27901 if (die->die_mark == 2)
27902 return;
27903
27904 switch (die->die_tag)
27905 {
27906 case DW_TAG_structure_type:
27907 case DW_TAG_union_type:
27908 case DW_TAG_class_type:
27909 if (die->die_perennial_p)
27910 break;
27911
27912 for (c = die->die_parent; c; c = c->die_parent)
27913 if (c->die_tag == DW_TAG_subprogram)
27914 break;
27915
27916 /* Finding used static member functions inside of classes
27917 is needed just for local classes, because for other classes
27918 static member function DIEs with DW_AT_specification
27919 are emitted outside of the DW_TAG_*_type. If we ever change
27920 it, we'd need to call this even for non-local classes. */
27921 if (c)
27922 prune_unused_types_walk_local_classes (die);
27923
27924 /* It's a type node --- don't mark it. */
27925 return;
27926
27927 case DW_TAG_const_type:
27928 case DW_TAG_packed_type:
27929 case DW_TAG_pointer_type:
27930 case DW_TAG_reference_type:
27931 case DW_TAG_rvalue_reference_type:
27932 case DW_TAG_volatile_type:
27933 case DW_TAG_typedef:
27934 case DW_TAG_array_type:
27935 case DW_TAG_interface_type:
27936 case DW_TAG_friend:
27937 case DW_TAG_enumeration_type:
27938 case DW_TAG_subroutine_type:
27939 case DW_TAG_string_type:
27940 case DW_TAG_set_type:
27941 case DW_TAG_subrange_type:
27942 case DW_TAG_ptr_to_member_type:
27943 case DW_TAG_file_type:
27944 /* Type nodes are useful only when other DIEs reference them --- don't
27945 mark them. */
27946 /* FALLTHROUGH */
27947
27948 case DW_TAG_dwarf_procedure:
27949 /* Likewise for DWARF procedures. */
27950
27951 if (die->die_perennial_p)
27952 break;
27953
27954 return;
27955
27956 default:
27957 /* Mark everything else. */
27958 break;
27959 }
27960
27961 if (die->die_mark == 0)
27962 {
27963 die->die_mark = 1;
27964
27965 /* Now, mark any dies referenced from here. */
27966 prune_unused_types_walk_attribs (die);
27967 }
27968
27969 die->die_mark = 2;
27970
27971 /* Mark children. */
27972 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
27973 }
27974
27975 /* Increment the string counts on strings referred to from DIE's
27976 attributes. */
27977
27978 static void
27979 prune_unused_types_update_strings (dw_die_ref die)
27980 {
27981 dw_attr_node *a;
27982 unsigned ix;
27983
27984 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
27985 if (AT_class (a) == dw_val_class_str)
27986 {
27987 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
27988 s->refcount++;
27989 /* Avoid unnecessarily putting strings that are used less than
27990 twice in the hash table. */
27991 if (s->refcount
27992 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
27993 {
27994 indirect_string_node **slot
27995 = debug_str_hash->find_slot_with_hash (s->str,
27996 htab_hash_string (s->str),
27997 INSERT);
27998 gcc_assert (*slot == NULL);
27999 *slot = s;
28000 }
28001 }
28002 }
28003
28004 /* Mark DIE and its children as removed. */
28005
28006 static void
28007 mark_removed (dw_die_ref die)
28008 {
28009 dw_die_ref c;
28010 die->removed = true;
28011 FOR_EACH_CHILD (die, c, mark_removed (c));
28012 }
28013
28014 /* Remove from the tree DIE any dies that aren't marked. */
28015
28016 static void
28017 prune_unused_types_prune (dw_die_ref die)
28018 {
28019 dw_die_ref c;
28020
28021 gcc_assert (die->die_mark);
28022 prune_unused_types_update_strings (die);
28023
28024 if (! die->die_child)
28025 return;
28026
28027 c = die->die_child;
28028 do {
28029 dw_die_ref prev = c, next;
28030 for (c = c->die_sib; ! c->die_mark; c = next)
28031 if (c == die->die_child)
28032 {
28033 /* No marked children between 'prev' and the end of the list. */
28034 if (prev == c)
28035 /* No marked children at all. */
28036 die->die_child = NULL;
28037 else
28038 {
28039 prev->die_sib = c->die_sib;
28040 die->die_child = prev;
28041 }
28042 c->die_sib = NULL;
28043 mark_removed (c);
28044 return;
28045 }
28046 else
28047 {
28048 next = c->die_sib;
28049 c->die_sib = NULL;
28050 mark_removed (c);
28051 }
28052
28053 if (c != prev->die_sib)
28054 prev->die_sib = c;
28055 prune_unused_types_prune (c);
28056 } while (c != die->die_child);
28057 }
28058
28059 /* Remove dies representing declarations that we never use. */
28060
28061 static void
28062 prune_unused_types (void)
28063 {
28064 unsigned int i;
28065 limbo_die_node *node;
28066 comdat_type_node *ctnode;
28067 pubname_entry *pub;
28068 dw_die_ref base_type;
28069
28070 #if ENABLE_ASSERT_CHECKING
28071 /* All the marks should already be clear. */
28072 verify_marks_clear (comp_unit_die ());
28073 for (node = limbo_die_list; node; node = node->next)
28074 verify_marks_clear (node->die);
28075 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
28076 verify_marks_clear (ctnode->root_die);
28077 #endif /* ENABLE_ASSERT_CHECKING */
28078
28079 /* Mark types that are used in global variables. */
28080 premark_types_used_by_global_vars ();
28081
28082 /* Set the mark on nodes that are actually used. */
28083 prune_unused_types_walk (comp_unit_die ());
28084 for (node = limbo_die_list; node; node = node->next)
28085 prune_unused_types_walk (node->die);
28086 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
28087 {
28088 prune_unused_types_walk (ctnode->root_die);
28089 prune_unused_types_mark (ctnode->type_die, 1);
28090 }
28091
28092 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
28093 are unusual in that they are pubnames that are the children of pubtypes.
28094 They should only be marked via their parent DW_TAG_enumeration_type die,
28095 not as roots in themselves. */
28096 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
28097 if (pub->die->die_tag != DW_TAG_enumerator)
28098 prune_unused_types_mark (pub->die, 1);
28099 for (i = 0; base_types.iterate (i, &base_type); i++)
28100 prune_unused_types_mark (base_type, 1);
28101
28102 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
28103 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
28104 callees). */
28105 cgraph_node *cnode;
28106 FOR_EACH_FUNCTION (cnode)
28107 if (cnode->referred_to_p (false))
28108 {
28109 dw_die_ref die = lookup_decl_die (cnode->decl);
28110 if (die == NULL || die->die_mark)
28111 continue;
28112 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
28113 if (e->caller != cnode
28114 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
28115 {
28116 prune_unused_types_mark (die, 1);
28117 break;
28118 }
28119 }
28120
28121 if (debug_str_hash)
28122 debug_str_hash->empty ();
28123 if (skeleton_debug_str_hash)
28124 skeleton_debug_str_hash->empty ();
28125 prune_unused_types_prune (comp_unit_die ());
28126 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
28127 {
28128 node = *pnode;
28129 if (!node->die->die_mark)
28130 *pnode = node->next;
28131 else
28132 {
28133 prune_unused_types_prune (node->die);
28134 pnode = &node->next;
28135 }
28136 }
28137 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
28138 prune_unused_types_prune (ctnode->root_die);
28139
28140 /* Leave the marks clear. */
28141 prune_unmark_dies (comp_unit_die ());
28142 for (node = limbo_die_list; node; node = node->next)
28143 prune_unmark_dies (node->die);
28144 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
28145 prune_unmark_dies (ctnode->root_die);
28146 }
28147
28148 /* Helpers to manipulate hash table of comdat type units. */
28149
28150 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
28151 {
28152 static inline hashval_t hash (const comdat_type_node *);
28153 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
28154 };
28155
28156 inline hashval_t
28157 comdat_type_hasher::hash (const comdat_type_node *type_node)
28158 {
28159 hashval_t h;
28160 memcpy (&h, type_node->signature, sizeof (h));
28161 return h;
28162 }
28163
28164 inline bool
28165 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
28166 const comdat_type_node *type_node_2)
28167 {
28168 return (! memcmp (type_node_1->signature, type_node_2->signature,
28169 DWARF_TYPE_SIGNATURE_SIZE));
28170 }
28171
28172 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
28173 to the location it would have been added, should we know its
28174 DECL_ASSEMBLER_NAME when we added other attributes. This will
28175 probably improve compactness of debug info, removing equivalent
28176 abbrevs, and hide any differences caused by deferring the
28177 computation of the assembler name, triggered by e.g. PCH. */
28178
28179 static inline void
28180 move_linkage_attr (dw_die_ref die)
28181 {
28182 unsigned ix = vec_safe_length (die->die_attr);
28183 dw_attr_node linkage = (*die->die_attr)[ix - 1];
28184
28185 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
28186 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
28187
28188 while (--ix > 0)
28189 {
28190 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
28191
28192 if (prev->dw_attr == DW_AT_decl_line
28193 || prev->dw_attr == DW_AT_decl_column
28194 || prev->dw_attr == DW_AT_name)
28195 break;
28196 }
28197
28198 if (ix != vec_safe_length (die->die_attr) - 1)
28199 {
28200 die->die_attr->pop ();
28201 die->die_attr->quick_insert (ix, linkage);
28202 }
28203 }
28204
28205 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
28206 referenced from typed stack ops and count how often they are used. */
28207
28208 static void
28209 mark_base_types (dw_loc_descr_ref loc)
28210 {
28211 dw_die_ref base_type = NULL;
28212
28213 for (; loc; loc = loc->dw_loc_next)
28214 {
28215 switch (loc->dw_loc_opc)
28216 {
28217 case DW_OP_regval_type:
28218 case DW_OP_deref_type:
28219 case DW_OP_GNU_regval_type:
28220 case DW_OP_GNU_deref_type:
28221 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
28222 break;
28223 case DW_OP_convert:
28224 case DW_OP_reinterpret:
28225 case DW_OP_GNU_convert:
28226 case DW_OP_GNU_reinterpret:
28227 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
28228 continue;
28229 /* FALLTHRU */
28230 case DW_OP_const_type:
28231 case DW_OP_GNU_const_type:
28232 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
28233 break;
28234 case DW_OP_entry_value:
28235 case DW_OP_GNU_entry_value:
28236 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
28237 continue;
28238 default:
28239 continue;
28240 }
28241 gcc_assert (base_type->die_parent == comp_unit_die ());
28242 if (base_type->die_mark)
28243 base_type->die_mark++;
28244 else
28245 {
28246 base_types.safe_push (base_type);
28247 base_type->die_mark = 1;
28248 }
28249 }
28250 }
28251
28252 /* Comparison function for sorting marked base types. */
28253
28254 static int
28255 base_type_cmp (const void *x, const void *y)
28256 {
28257 dw_die_ref dx = *(const dw_die_ref *) x;
28258 dw_die_ref dy = *(const dw_die_ref *) y;
28259 unsigned int byte_size1, byte_size2;
28260 unsigned int encoding1, encoding2;
28261 unsigned int align1, align2;
28262 if (dx->die_mark > dy->die_mark)
28263 return -1;
28264 if (dx->die_mark < dy->die_mark)
28265 return 1;
28266 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
28267 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
28268 if (byte_size1 < byte_size2)
28269 return 1;
28270 if (byte_size1 > byte_size2)
28271 return -1;
28272 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
28273 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
28274 if (encoding1 < encoding2)
28275 return 1;
28276 if (encoding1 > encoding2)
28277 return -1;
28278 align1 = get_AT_unsigned (dx, DW_AT_alignment);
28279 align2 = get_AT_unsigned (dy, DW_AT_alignment);
28280 if (align1 < align2)
28281 return 1;
28282 if (align1 > align2)
28283 return -1;
28284 return 0;
28285 }
28286
28287 /* Move base types marked by mark_base_types as early as possible
28288 in the CU, sorted by decreasing usage count both to make the
28289 uleb128 references as small as possible and to make sure they
28290 will have die_offset already computed by calc_die_sizes when
28291 sizes of typed stack loc ops is computed. */
28292
28293 static void
28294 move_marked_base_types (void)
28295 {
28296 unsigned int i;
28297 dw_die_ref base_type, die, c;
28298
28299 if (base_types.is_empty ())
28300 return;
28301
28302 /* Sort by decreasing usage count, they will be added again in that
28303 order later on. */
28304 base_types.qsort (base_type_cmp);
28305 die = comp_unit_die ();
28306 c = die->die_child;
28307 do
28308 {
28309 dw_die_ref prev = c;
28310 c = c->die_sib;
28311 while (c->die_mark)
28312 {
28313 remove_child_with_prev (c, prev);
28314 /* As base types got marked, there must be at least
28315 one node other than DW_TAG_base_type. */
28316 gcc_assert (die->die_child != NULL);
28317 c = prev->die_sib;
28318 }
28319 }
28320 while (c != die->die_child);
28321 gcc_assert (die->die_child);
28322 c = die->die_child;
28323 for (i = 0; base_types.iterate (i, &base_type); i++)
28324 {
28325 base_type->die_mark = 0;
28326 base_type->die_sib = c->die_sib;
28327 c->die_sib = base_type;
28328 c = base_type;
28329 }
28330 }
28331
28332 /* Helper function for resolve_addr, attempt to resolve
28333 one CONST_STRING, return true if successful. Similarly verify that
28334 SYMBOL_REFs refer to variables emitted in the current CU. */
28335
28336 static bool
28337 resolve_one_addr (rtx *addr)
28338 {
28339 rtx rtl = *addr;
28340
28341 if (GET_CODE (rtl) == CONST_STRING)
28342 {
28343 size_t len = strlen (XSTR (rtl, 0)) + 1;
28344 tree t = build_string (len, XSTR (rtl, 0));
28345 tree tlen = size_int (len - 1);
28346 TREE_TYPE (t)
28347 = build_array_type (char_type_node, build_index_type (tlen));
28348 rtl = lookup_constant_def (t);
28349 if (!rtl || !MEM_P (rtl))
28350 return false;
28351 rtl = XEXP (rtl, 0);
28352 if (GET_CODE (rtl) == SYMBOL_REF
28353 && SYMBOL_REF_DECL (rtl)
28354 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
28355 return false;
28356 vec_safe_push (used_rtx_array, rtl);
28357 *addr = rtl;
28358 return true;
28359 }
28360
28361 if (GET_CODE (rtl) == SYMBOL_REF
28362 && SYMBOL_REF_DECL (rtl))
28363 {
28364 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
28365 {
28366 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
28367 return false;
28368 }
28369 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
28370 return false;
28371 }
28372
28373 if (GET_CODE (rtl) == CONST)
28374 {
28375 subrtx_ptr_iterator::array_type array;
28376 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
28377 if (!resolve_one_addr (*iter))
28378 return false;
28379 }
28380
28381 return true;
28382 }
28383
28384 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
28385 if possible, and create DW_TAG_dwarf_procedure that can be referenced
28386 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
28387
28388 static rtx
28389 string_cst_pool_decl (tree t)
28390 {
28391 rtx rtl = output_constant_def (t, 1);
28392 unsigned char *array;
28393 dw_loc_descr_ref l;
28394 tree decl;
28395 size_t len;
28396 dw_die_ref ref;
28397
28398 if (!rtl || !MEM_P (rtl))
28399 return NULL_RTX;
28400 rtl = XEXP (rtl, 0);
28401 if (GET_CODE (rtl) != SYMBOL_REF
28402 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
28403 return NULL_RTX;
28404
28405 decl = SYMBOL_REF_DECL (rtl);
28406 if (!lookup_decl_die (decl))
28407 {
28408 len = TREE_STRING_LENGTH (t);
28409 vec_safe_push (used_rtx_array, rtl);
28410 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
28411 array = ggc_vec_alloc<unsigned char> (len);
28412 memcpy (array, TREE_STRING_POINTER (t), len);
28413 l = new_loc_descr (DW_OP_implicit_value, len, 0);
28414 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
28415 l->dw_loc_oprnd2.v.val_vec.length = len;
28416 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
28417 l->dw_loc_oprnd2.v.val_vec.array = array;
28418 add_AT_loc (ref, DW_AT_location, l);
28419 equate_decl_number_to_die (decl, ref);
28420 }
28421 return rtl;
28422 }
28423
28424 /* Helper function of resolve_addr_in_expr. LOC is
28425 a DW_OP_addr followed by DW_OP_stack_value, either at the start
28426 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
28427 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
28428 with DW_OP_implicit_pointer if possible
28429 and return true, if unsuccessful, return false. */
28430
28431 static bool
28432 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
28433 {
28434 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
28435 HOST_WIDE_INT offset = 0;
28436 dw_die_ref ref = NULL;
28437 tree decl;
28438
28439 if (GET_CODE (rtl) == CONST
28440 && GET_CODE (XEXP (rtl, 0)) == PLUS
28441 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
28442 {
28443 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
28444 rtl = XEXP (XEXP (rtl, 0), 0);
28445 }
28446 if (GET_CODE (rtl) == CONST_STRING)
28447 {
28448 size_t len = strlen (XSTR (rtl, 0)) + 1;
28449 tree t = build_string (len, XSTR (rtl, 0));
28450 tree tlen = size_int (len - 1);
28451
28452 TREE_TYPE (t)
28453 = build_array_type (char_type_node, build_index_type (tlen));
28454 rtl = string_cst_pool_decl (t);
28455 if (!rtl)
28456 return false;
28457 }
28458 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
28459 {
28460 decl = SYMBOL_REF_DECL (rtl);
28461 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
28462 {
28463 ref = lookup_decl_die (decl);
28464 if (ref && (get_AT (ref, DW_AT_location)
28465 || get_AT (ref, DW_AT_const_value)))
28466 {
28467 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
28468 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28469 loc->dw_loc_oprnd1.val_entry = NULL;
28470 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28471 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28472 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
28473 loc->dw_loc_oprnd2.v.val_int = offset;
28474 return true;
28475 }
28476 }
28477 }
28478 return false;
28479 }
28480
28481 /* Helper function for resolve_addr, handle one location
28482 expression, return false if at least one CONST_STRING or SYMBOL_REF in
28483 the location list couldn't be resolved. */
28484
28485 static bool
28486 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
28487 {
28488 dw_loc_descr_ref keep = NULL;
28489 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
28490 switch (loc->dw_loc_opc)
28491 {
28492 case DW_OP_addr:
28493 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
28494 {
28495 if ((prev == NULL
28496 || prev->dw_loc_opc == DW_OP_piece
28497 || prev->dw_loc_opc == DW_OP_bit_piece)
28498 && loc->dw_loc_next
28499 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
28500 && (!dwarf_strict || dwarf_version >= 5)
28501 && optimize_one_addr_into_implicit_ptr (loc))
28502 break;
28503 return false;
28504 }
28505 break;
28506 case DW_OP_GNU_addr_index:
28507 case DW_OP_GNU_const_index:
28508 if (loc->dw_loc_opc == DW_OP_GNU_addr_index
28509 || (loc->dw_loc_opc == DW_OP_GNU_const_index && loc->dtprel))
28510 {
28511 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
28512 if (!resolve_one_addr (&rtl))
28513 return false;
28514 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
28515 loc->dw_loc_oprnd1.val_entry
28516 = add_addr_table_entry (rtl, ate_kind_rtx);
28517 }
28518 break;
28519 case DW_OP_const4u:
28520 case DW_OP_const8u:
28521 if (loc->dtprel
28522 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
28523 return false;
28524 break;
28525 case DW_OP_plus_uconst:
28526 if (size_of_loc_descr (loc)
28527 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
28528 + 1
28529 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
28530 {
28531 dw_loc_descr_ref repl
28532 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
28533 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
28534 add_loc_descr (&repl, loc->dw_loc_next);
28535 *loc = *repl;
28536 }
28537 break;
28538 case DW_OP_implicit_value:
28539 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
28540 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
28541 return false;
28542 break;
28543 case DW_OP_implicit_pointer:
28544 case DW_OP_GNU_implicit_pointer:
28545 case DW_OP_GNU_parameter_ref:
28546 case DW_OP_GNU_variable_value:
28547 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28548 {
28549 dw_die_ref ref
28550 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28551 if (ref == NULL)
28552 return false;
28553 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28554 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28555 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28556 }
28557 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
28558 {
28559 if (prev == NULL
28560 && loc->dw_loc_next == NULL
28561 && AT_class (a) == dw_val_class_loc)
28562 switch (a->dw_attr)
28563 {
28564 /* Following attributes allow both exprloc and reference,
28565 so if the whole expression is DW_OP_GNU_variable_value
28566 alone we could transform it into reference. */
28567 case DW_AT_byte_size:
28568 case DW_AT_bit_size:
28569 case DW_AT_lower_bound:
28570 case DW_AT_upper_bound:
28571 case DW_AT_bit_stride:
28572 case DW_AT_count:
28573 case DW_AT_allocated:
28574 case DW_AT_associated:
28575 case DW_AT_byte_stride:
28576 a->dw_attr_val.val_class = dw_val_class_die_ref;
28577 a->dw_attr_val.val_entry = NULL;
28578 a->dw_attr_val.v.val_die_ref.die
28579 = loc->dw_loc_oprnd1.v.val_die_ref.die;
28580 a->dw_attr_val.v.val_die_ref.external = 0;
28581 return true;
28582 default:
28583 break;
28584 }
28585 if (dwarf_strict)
28586 return false;
28587 }
28588 break;
28589 case DW_OP_const_type:
28590 case DW_OP_regval_type:
28591 case DW_OP_deref_type:
28592 case DW_OP_convert:
28593 case DW_OP_reinterpret:
28594 case DW_OP_GNU_const_type:
28595 case DW_OP_GNU_regval_type:
28596 case DW_OP_GNU_deref_type:
28597 case DW_OP_GNU_convert:
28598 case DW_OP_GNU_reinterpret:
28599 while (loc->dw_loc_next
28600 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
28601 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
28602 {
28603 dw_die_ref base1, base2;
28604 unsigned enc1, enc2, size1, size2;
28605 if (loc->dw_loc_opc == DW_OP_regval_type
28606 || loc->dw_loc_opc == DW_OP_deref_type
28607 || loc->dw_loc_opc == DW_OP_GNU_regval_type
28608 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
28609 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
28610 else if (loc->dw_loc_oprnd1.val_class
28611 == dw_val_class_unsigned_const)
28612 break;
28613 else
28614 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
28615 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
28616 == dw_val_class_unsigned_const)
28617 break;
28618 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
28619 gcc_assert (base1->die_tag == DW_TAG_base_type
28620 && base2->die_tag == DW_TAG_base_type);
28621 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
28622 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
28623 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
28624 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
28625 if (size1 == size2
28626 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
28627 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
28628 && loc != keep)
28629 || enc1 == enc2))
28630 {
28631 /* Optimize away next DW_OP_convert after
28632 adjusting LOC's base type die reference. */
28633 if (loc->dw_loc_opc == DW_OP_regval_type
28634 || loc->dw_loc_opc == DW_OP_deref_type
28635 || loc->dw_loc_opc == DW_OP_GNU_regval_type
28636 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
28637 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
28638 else
28639 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
28640 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
28641 continue;
28642 }
28643 /* Don't change integer DW_OP_convert after e.g. floating
28644 point typed stack entry. */
28645 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
28646 keep = loc->dw_loc_next;
28647 break;
28648 }
28649 break;
28650 default:
28651 break;
28652 }
28653 return true;
28654 }
28655
28656 /* Helper function of resolve_addr. DIE had DW_AT_location of
28657 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
28658 and DW_OP_addr couldn't be resolved. resolve_addr has already
28659 removed the DW_AT_location attribute. This function attempts to
28660 add a new DW_AT_location attribute with DW_OP_implicit_pointer
28661 to it or DW_AT_const_value attribute, if possible. */
28662
28663 static void
28664 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
28665 {
28666 if (!VAR_P (decl)
28667 || lookup_decl_die (decl) != die
28668 || DECL_EXTERNAL (decl)
28669 || !TREE_STATIC (decl)
28670 || DECL_INITIAL (decl) == NULL_TREE
28671 || DECL_P (DECL_INITIAL (decl))
28672 || get_AT (die, DW_AT_const_value))
28673 return;
28674
28675 tree init = DECL_INITIAL (decl);
28676 HOST_WIDE_INT offset = 0;
28677 /* For variables that have been optimized away and thus
28678 don't have a memory location, see if we can emit
28679 DW_AT_const_value instead. */
28680 if (tree_add_const_value_attribute (die, init))
28681 return;
28682 if (dwarf_strict && dwarf_version < 5)
28683 return;
28684 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
28685 and ADDR_EXPR refers to a decl that has DW_AT_location or
28686 DW_AT_const_value (but isn't addressable, otherwise
28687 resolving the original DW_OP_addr wouldn't fail), see if
28688 we can add DW_OP_implicit_pointer. */
28689 STRIP_NOPS (init);
28690 if (TREE_CODE (init) == POINTER_PLUS_EXPR
28691 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
28692 {
28693 offset = tree_to_shwi (TREE_OPERAND (init, 1));
28694 init = TREE_OPERAND (init, 0);
28695 STRIP_NOPS (init);
28696 }
28697 if (TREE_CODE (init) != ADDR_EXPR)
28698 return;
28699 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
28700 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
28701 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
28702 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
28703 && TREE_OPERAND (init, 0) != decl))
28704 {
28705 dw_die_ref ref;
28706 dw_loc_descr_ref l;
28707
28708 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
28709 {
28710 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
28711 if (!rtl)
28712 return;
28713 decl = SYMBOL_REF_DECL (rtl);
28714 }
28715 else
28716 decl = TREE_OPERAND (init, 0);
28717 ref = lookup_decl_die (decl);
28718 if (ref == NULL
28719 || (!get_AT (ref, DW_AT_location)
28720 && !get_AT (ref, DW_AT_const_value)))
28721 return;
28722 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
28723 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28724 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
28725 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
28726 add_AT_loc (die, DW_AT_location, l);
28727 }
28728 }
28729
28730 /* Return NULL if l is a DWARF expression, or first op that is not
28731 valid DWARF expression. */
28732
28733 static dw_loc_descr_ref
28734 non_dwarf_expression (dw_loc_descr_ref l)
28735 {
28736 while (l)
28737 {
28738 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
28739 return l;
28740 switch (l->dw_loc_opc)
28741 {
28742 case DW_OP_regx:
28743 case DW_OP_implicit_value:
28744 case DW_OP_stack_value:
28745 case DW_OP_implicit_pointer:
28746 case DW_OP_GNU_implicit_pointer:
28747 case DW_OP_GNU_parameter_ref:
28748 case DW_OP_piece:
28749 case DW_OP_bit_piece:
28750 return l;
28751 default:
28752 break;
28753 }
28754 l = l->dw_loc_next;
28755 }
28756 return NULL;
28757 }
28758
28759 /* Return adjusted copy of EXPR:
28760 If it is empty DWARF expression, return it.
28761 If it is valid non-empty DWARF expression,
28762 return copy of EXPR with DW_OP_deref appended to it.
28763 If it is DWARF expression followed by DW_OP_reg{N,x}, return
28764 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
28765 If it is DWARF expression followed by DW_OP_stack_value, return
28766 copy of the DWARF expression without anything appended.
28767 Otherwise, return NULL. */
28768
28769 static dw_loc_descr_ref
28770 copy_deref_exprloc (dw_loc_descr_ref expr)
28771 {
28772 dw_loc_descr_ref tail = NULL;
28773
28774 if (expr == NULL)
28775 return NULL;
28776
28777 dw_loc_descr_ref l = non_dwarf_expression (expr);
28778 if (l && l->dw_loc_next)
28779 return NULL;
28780
28781 if (l)
28782 {
28783 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
28784 tail = new_loc_descr ((enum dwarf_location_atom)
28785 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
28786 0, 0);
28787 else
28788 switch (l->dw_loc_opc)
28789 {
28790 case DW_OP_regx:
28791 tail = new_loc_descr (DW_OP_bregx,
28792 l->dw_loc_oprnd1.v.val_unsigned, 0);
28793 break;
28794 case DW_OP_stack_value:
28795 break;
28796 default:
28797 return NULL;
28798 }
28799 }
28800 else
28801 tail = new_loc_descr (DW_OP_deref, 0, 0);
28802
28803 dw_loc_descr_ref ret = NULL, *p = &ret;
28804 while (expr != l)
28805 {
28806 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
28807 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
28808 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
28809 p = &(*p)->dw_loc_next;
28810 expr = expr->dw_loc_next;
28811 }
28812 *p = tail;
28813 return ret;
28814 }
28815
28816 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
28817 reference to a variable or argument, adjust it if needed and return:
28818 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
28819 attribute if present should be removed
28820 0 keep the attribute perhaps with minor modifications, no need to rescan
28821 1 if the attribute has been successfully adjusted. */
28822
28823 static int
28824 optimize_string_length (dw_attr_node *a)
28825 {
28826 dw_loc_descr_ref l = AT_loc (a), lv;
28827 dw_die_ref die;
28828 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28829 {
28830 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
28831 die = lookup_decl_die (decl);
28832 if (die)
28833 {
28834 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28835 l->dw_loc_oprnd1.v.val_die_ref.die = die;
28836 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
28837 }
28838 else
28839 return -1;
28840 }
28841 else
28842 die = l->dw_loc_oprnd1.v.val_die_ref.die;
28843
28844 /* DWARF5 allows reference class, so we can then reference the DIE.
28845 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
28846 if (l->dw_loc_next != NULL && dwarf_version >= 5)
28847 {
28848 a->dw_attr_val.val_class = dw_val_class_die_ref;
28849 a->dw_attr_val.val_entry = NULL;
28850 a->dw_attr_val.v.val_die_ref.die = die;
28851 a->dw_attr_val.v.val_die_ref.external = 0;
28852 return 0;
28853 }
28854
28855 dw_attr_node *av = get_AT (die, DW_AT_location);
28856 dw_loc_list_ref d;
28857 bool non_dwarf_expr = false;
28858
28859 if (av == NULL)
28860 return dwarf_strict ? -1 : 0;
28861 switch (AT_class (av))
28862 {
28863 case dw_val_class_loc_list:
28864 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
28865 if (d->expr && non_dwarf_expression (d->expr))
28866 non_dwarf_expr = true;
28867 break;
28868 case dw_val_class_loc:
28869 lv = AT_loc (av);
28870 if (lv == NULL)
28871 return dwarf_strict ? -1 : 0;
28872 if (non_dwarf_expression (lv))
28873 non_dwarf_expr = true;
28874 break;
28875 default:
28876 return dwarf_strict ? -1 : 0;
28877 }
28878
28879 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
28880 into DW_OP_call4 or DW_OP_GNU_variable_value into
28881 DW_OP_call4 DW_OP_deref, do so. */
28882 if (!non_dwarf_expr
28883 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
28884 {
28885 l->dw_loc_opc = DW_OP_call4;
28886 if (l->dw_loc_next)
28887 l->dw_loc_next = NULL;
28888 else
28889 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
28890 return 0;
28891 }
28892
28893 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
28894 copy over the DW_AT_location attribute from die to a. */
28895 if (l->dw_loc_next != NULL)
28896 {
28897 a->dw_attr_val = av->dw_attr_val;
28898 return 1;
28899 }
28900
28901 dw_loc_list_ref list, *p;
28902 switch (AT_class (av))
28903 {
28904 case dw_val_class_loc_list:
28905 p = &list;
28906 list = NULL;
28907 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
28908 {
28909 lv = copy_deref_exprloc (d->expr);
28910 if (lv)
28911 {
28912 *p = new_loc_list (lv, d->begin, d->end, d->section);
28913 p = &(*p)->dw_loc_next;
28914 }
28915 else if (!dwarf_strict && d->expr)
28916 return 0;
28917 }
28918 if (list == NULL)
28919 return dwarf_strict ? -1 : 0;
28920 a->dw_attr_val.val_class = dw_val_class_loc_list;
28921 gen_llsym (list);
28922 *AT_loc_list_ptr (a) = list;
28923 return 1;
28924 case dw_val_class_loc:
28925 lv = copy_deref_exprloc (AT_loc (av));
28926 if (lv == NULL)
28927 return dwarf_strict ? -1 : 0;
28928 a->dw_attr_val.v.val_loc = lv;
28929 return 1;
28930 default:
28931 gcc_unreachable ();
28932 }
28933 }
28934
28935 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
28936 an address in .rodata section if the string literal is emitted there,
28937 or remove the containing location list or replace DW_AT_const_value
28938 with DW_AT_location and empty location expression, if it isn't found
28939 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
28940 to something that has been emitted in the current CU. */
28941
28942 static void
28943 resolve_addr (dw_die_ref die)
28944 {
28945 dw_die_ref c;
28946 dw_attr_node *a;
28947 dw_loc_list_ref *curr, *start, loc;
28948 unsigned ix;
28949 bool remove_AT_byte_size = false;
28950
28951 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
28952 switch (AT_class (a))
28953 {
28954 case dw_val_class_loc_list:
28955 start = curr = AT_loc_list_ptr (a);
28956 loc = *curr;
28957 gcc_assert (loc);
28958 /* The same list can be referenced more than once. See if we have
28959 already recorded the result from a previous pass. */
28960 if (loc->replaced)
28961 *curr = loc->dw_loc_next;
28962 else if (!loc->resolved_addr)
28963 {
28964 /* As things stand, we do not expect or allow one die to
28965 reference a suffix of another die's location list chain.
28966 References must be identical or completely separate.
28967 There is therefore no need to cache the result of this
28968 pass on any list other than the first; doing so
28969 would lead to unnecessary writes. */
28970 while (*curr)
28971 {
28972 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
28973 if (!resolve_addr_in_expr (a, (*curr)->expr))
28974 {
28975 dw_loc_list_ref next = (*curr)->dw_loc_next;
28976 dw_loc_descr_ref l = (*curr)->expr;
28977
28978 if (next && (*curr)->ll_symbol)
28979 {
28980 gcc_assert (!next->ll_symbol);
28981 next->ll_symbol = (*curr)->ll_symbol;
28982 }
28983 if (dwarf_split_debug_info)
28984 remove_loc_list_addr_table_entries (l);
28985 *curr = next;
28986 }
28987 else
28988 {
28989 mark_base_types ((*curr)->expr);
28990 curr = &(*curr)->dw_loc_next;
28991 }
28992 }
28993 if (loc == *start)
28994 loc->resolved_addr = 1;
28995 else
28996 {
28997 loc->replaced = 1;
28998 loc->dw_loc_next = *start;
28999 }
29000 }
29001 if (!*start)
29002 {
29003 remove_AT (die, a->dw_attr);
29004 ix--;
29005 }
29006 break;
29007 case dw_val_class_loc:
29008 {
29009 dw_loc_descr_ref l = AT_loc (a);
29010 /* DW_OP_GNU_variable_value DW_OP_stack_value or
29011 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
29012 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
29013 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
29014 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
29015 with DW_FORM_ref referencing the same DIE as
29016 DW_OP_GNU_variable_value used to reference. */
29017 if (a->dw_attr == DW_AT_string_length
29018 && l
29019 && l->dw_loc_opc == DW_OP_GNU_variable_value
29020 && (l->dw_loc_next == NULL
29021 || (l->dw_loc_next->dw_loc_next == NULL
29022 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
29023 {
29024 switch (optimize_string_length (a))
29025 {
29026 case -1:
29027 remove_AT (die, a->dw_attr);
29028 ix--;
29029 /* If we drop DW_AT_string_length, we need to drop also
29030 DW_AT_{string_length_,}byte_size. */
29031 remove_AT_byte_size = true;
29032 continue;
29033 default:
29034 break;
29035 case 1:
29036 /* Even if we keep the optimized DW_AT_string_length,
29037 it might have changed AT_class, so process it again. */
29038 ix--;
29039 continue;
29040 }
29041 }
29042 /* For -gdwarf-2 don't attempt to optimize
29043 DW_AT_data_member_location containing
29044 DW_OP_plus_uconst - older consumers might
29045 rely on it being that op instead of a more complex,
29046 but shorter, location description. */
29047 if ((dwarf_version > 2
29048 || a->dw_attr != DW_AT_data_member_location
29049 || l == NULL
29050 || l->dw_loc_opc != DW_OP_plus_uconst
29051 || l->dw_loc_next != NULL)
29052 && !resolve_addr_in_expr (a, l))
29053 {
29054 if (dwarf_split_debug_info)
29055 remove_loc_list_addr_table_entries (l);
29056 if (l != NULL
29057 && l->dw_loc_next == NULL
29058 && l->dw_loc_opc == DW_OP_addr
29059 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
29060 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
29061 && a->dw_attr == DW_AT_location)
29062 {
29063 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
29064 remove_AT (die, a->dw_attr);
29065 ix--;
29066 optimize_location_into_implicit_ptr (die, decl);
29067 break;
29068 }
29069 if (a->dw_attr == DW_AT_string_length)
29070 /* If we drop DW_AT_string_length, we need to drop also
29071 DW_AT_{string_length_,}byte_size. */
29072 remove_AT_byte_size = true;
29073 remove_AT (die, a->dw_attr);
29074 ix--;
29075 }
29076 else
29077 mark_base_types (l);
29078 }
29079 break;
29080 case dw_val_class_addr:
29081 if (a->dw_attr == DW_AT_const_value
29082 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
29083 {
29084 if (AT_index (a) != NOT_INDEXED)
29085 remove_addr_table_entry (a->dw_attr_val.val_entry);
29086 remove_AT (die, a->dw_attr);
29087 ix--;
29088 }
29089 if ((die->die_tag == DW_TAG_call_site
29090 && a->dw_attr == DW_AT_call_origin)
29091 || (die->die_tag == DW_TAG_GNU_call_site
29092 && a->dw_attr == DW_AT_abstract_origin))
29093 {
29094 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
29095 dw_die_ref tdie = lookup_decl_die (tdecl);
29096 dw_die_ref cdie;
29097 if (tdie == NULL
29098 && DECL_EXTERNAL (tdecl)
29099 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
29100 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
29101 {
29102 dw_die_ref pdie = cdie;
29103 /* Make sure we don't add these DIEs into type units.
29104 We could emit skeleton DIEs for context (namespaces,
29105 outer structs/classes) and a skeleton DIE for the
29106 innermost context with DW_AT_signature pointing to the
29107 type unit. See PR78835. */
29108 while (pdie && pdie->die_tag != DW_TAG_type_unit)
29109 pdie = pdie->die_parent;
29110 if (pdie == NULL)
29111 {
29112 /* Creating a full DIE for tdecl is overly expensive and
29113 at this point even wrong when in the LTO phase
29114 as it can end up generating new type DIEs we didn't
29115 output and thus optimize_external_refs will crash. */
29116 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
29117 add_AT_flag (tdie, DW_AT_external, 1);
29118 add_AT_flag (tdie, DW_AT_declaration, 1);
29119 add_linkage_attr (tdie, tdecl);
29120 add_name_and_src_coords_attributes (tdie, tdecl, true);
29121 equate_decl_number_to_die (tdecl, tdie);
29122 }
29123 }
29124 if (tdie)
29125 {
29126 a->dw_attr_val.val_class = dw_val_class_die_ref;
29127 a->dw_attr_val.v.val_die_ref.die = tdie;
29128 a->dw_attr_val.v.val_die_ref.external = 0;
29129 }
29130 else
29131 {
29132 if (AT_index (a) != NOT_INDEXED)
29133 remove_addr_table_entry (a->dw_attr_val.val_entry);
29134 remove_AT (die, a->dw_attr);
29135 ix--;
29136 }
29137 }
29138 break;
29139 default:
29140 break;
29141 }
29142
29143 if (remove_AT_byte_size)
29144 remove_AT (die, dwarf_version >= 5
29145 ? DW_AT_string_length_byte_size
29146 : DW_AT_byte_size);
29147
29148 FOR_EACH_CHILD (die, c, resolve_addr (c));
29149 }
29150 \f
29151 /* Helper routines for optimize_location_lists.
29152 This pass tries to share identical local lists in .debug_loc
29153 section. */
29154
29155 /* Iteratively hash operands of LOC opcode into HSTATE. */
29156
29157 static void
29158 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
29159 {
29160 dw_val_ref val1 = &loc->dw_loc_oprnd1;
29161 dw_val_ref val2 = &loc->dw_loc_oprnd2;
29162
29163 switch (loc->dw_loc_opc)
29164 {
29165 case DW_OP_const4u:
29166 case DW_OP_const8u:
29167 if (loc->dtprel)
29168 goto hash_addr;
29169 /* FALLTHRU */
29170 case DW_OP_const1u:
29171 case DW_OP_const1s:
29172 case DW_OP_const2u:
29173 case DW_OP_const2s:
29174 case DW_OP_const4s:
29175 case DW_OP_const8s:
29176 case DW_OP_constu:
29177 case DW_OP_consts:
29178 case DW_OP_pick:
29179 case DW_OP_plus_uconst:
29180 case DW_OP_breg0:
29181 case DW_OP_breg1:
29182 case DW_OP_breg2:
29183 case DW_OP_breg3:
29184 case DW_OP_breg4:
29185 case DW_OP_breg5:
29186 case DW_OP_breg6:
29187 case DW_OP_breg7:
29188 case DW_OP_breg8:
29189 case DW_OP_breg9:
29190 case DW_OP_breg10:
29191 case DW_OP_breg11:
29192 case DW_OP_breg12:
29193 case DW_OP_breg13:
29194 case DW_OP_breg14:
29195 case DW_OP_breg15:
29196 case DW_OP_breg16:
29197 case DW_OP_breg17:
29198 case DW_OP_breg18:
29199 case DW_OP_breg19:
29200 case DW_OP_breg20:
29201 case DW_OP_breg21:
29202 case DW_OP_breg22:
29203 case DW_OP_breg23:
29204 case DW_OP_breg24:
29205 case DW_OP_breg25:
29206 case DW_OP_breg26:
29207 case DW_OP_breg27:
29208 case DW_OP_breg28:
29209 case DW_OP_breg29:
29210 case DW_OP_breg30:
29211 case DW_OP_breg31:
29212 case DW_OP_regx:
29213 case DW_OP_fbreg:
29214 case DW_OP_piece:
29215 case DW_OP_deref_size:
29216 case DW_OP_xderef_size:
29217 hstate.add_object (val1->v.val_int);
29218 break;
29219 case DW_OP_skip:
29220 case DW_OP_bra:
29221 {
29222 int offset;
29223
29224 gcc_assert (val1->val_class == dw_val_class_loc);
29225 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
29226 hstate.add_object (offset);
29227 }
29228 break;
29229 case DW_OP_implicit_value:
29230 hstate.add_object (val1->v.val_unsigned);
29231 switch (val2->val_class)
29232 {
29233 case dw_val_class_const:
29234 hstate.add_object (val2->v.val_int);
29235 break;
29236 case dw_val_class_vec:
29237 {
29238 unsigned int elt_size = val2->v.val_vec.elt_size;
29239 unsigned int len = val2->v.val_vec.length;
29240
29241 hstate.add_int (elt_size);
29242 hstate.add_int (len);
29243 hstate.add (val2->v.val_vec.array, len * elt_size);
29244 }
29245 break;
29246 case dw_val_class_const_double:
29247 hstate.add_object (val2->v.val_double.low);
29248 hstate.add_object (val2->v.val_double.high);
29249 break;
29250 case dw_val_class_wide_int:
29251 hstate.add (val2->v.val_wide->get_val (),
29252 get_full_len (*val2->v.val_wide)
29253 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
29254 break;
29255 case dw_val_class_addr:
29256 inchash::add_rtx (val2->v.val_addr, hstate);
29257 break;
29258 default:
29259 gcc_unreachable ();
29260 }
29261 break;
29262 case DW_OP_bregx:
29263 case DW_OP_bit_piece:
29264 hstate.add_object (val1->v.val_int);
29265 hstate.add_object (val2->v.val_int);
29266 break;
29267 case DW_OP_addr:
29268 hash_addr:
29269 if (loc->dtprel)
29270 {
29271 unsigned char dtprel = 0xd1;
29272 hstate.add_object (dtprel);
29273 }
29274 inchash::add_rtx (val1->v.val_addr, hstate);
29275 break;
29276 case DW_OP_GNU_addr_index:
29277 case DW_OP_GNU_const_index:
29278 {
29279 if (loc->dtprel)
29280 {
29281 unsigned char dtprel = 0xd1;
29282 hstate.add_object (dtprel);
29283 }
29284 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
29285 }
29286 break;
29287 case DW_OP_implicit_pointer:
29288 case DW_OP_GNU_implicit_pointer:
29289 hstate.add_int (val2->v.val_int);
29290 break;
29291 case DW_OP_entry_value:
29292 case DW_OP_GNU_entry_value:
29293 hstate.add_object (val1->v.val_loc);
29294 break;
29295 case DW_OP_regval_type:
29296 case DW_OP_deref_type:
29297 case DW_OP_GNU_regval_type:
29298 case DW_OP_GNU_deref_type:
29299 {
29300 unsigned int byte_size
29301 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
29302 unsigned int encoding
29303 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
29304 hstate.add_object (val1->v.val_int);
29305 hstate.add_object (byte_size);
29306 hstate.add_object (encoding);
29307 }
29308 break;
29309 case DW_OP_convert:
29310 case DW_OP_reinterpret:
29311 case DW_OP_GNU_convert:
29312 case DW_OP_GNU_reinterpret:
29313 if (val1->val_class == dw_val_class_unsigned_const)
29314 {
29315 hstate.add_object (val1->v.val_unsigned);
29316 break;
29317 }
29318 /* FALLTHRU */
29319 case DW_OP_const_type:
29320 case DW_OP_GNU_const_type:
29321 {
29322 unsigned int byte_size
29323 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
29324 unsigned int encoding
29325 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
29326 hstate.add_object (byte_size);
29327 hstate.add_object (encoding);
29328 if (loc->dw_loc_opc != DW_OP_const_type
29329 && loc->dw_loc_opc != DW_OP_GNU_const_type)
29330 break;
29331 hstate.add_object (val2->val_class);
29332 switch (val2->val_class)
29333 {
29334 case dw_val_class_const:
29335 hstate.add_object (val2->v.val_int);
29336 break;
29337 case dw_val_class_vec:
29338 {
29339 unsigned int elt_size = val2->v.val_vec.elt_size;
29340 unsigned int len = val2->v.val_vec.length;
29341
29342 hstate.add_object (elt_size);
29343 hstate.add_object (len);
29344 hstate.add (val2->v.val_vec.array, len * elt_size);
29345 }
29346 break;
29347 case dw_val_class_const_double:
29348 hstate.add_object (val2->v.val_double.low);
29349 hstate.add_object (val2->v.val_double.high);
29350 break;
29351 case dw_val_class_wide_int:
29352 hstate.add (val2->v.val_wide->get_val (),
29353 get_full_len (*val2->v.val_wide)
29354 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
29355 break;
29356 default:
29357 gcc_unreachable ();
29358 }
29359 }
29360 break;
29361
29362 default:
29363 /* Other codes have no operands. */
29364 break;
29365 }
29366 }
29367
29368 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
29369
29370 static inline void
29371 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
29372 {
29373 dw_loc_descr_ref l;
29374 bool sizes_computed = false;
29375 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
29376 size_of_locs (loc);
29377
29378 for (l = loc; l != NULL; l = l->dw_loc_next)
29379 {
29380 enum dwarf_location_atom opc = l->dw_loc_opc;
29381 hstate.add_object (opc);
29382 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
29383 {
29384 size_of_locs (loc);
29385 sizes_computed = true;
29386 }
29387 hash_loc_operands (l, hstate);
29388 }
29389 }
29390
29391 /* Compute hash of the whole location list LIST_HEAD. */
29392
29393 static inline void
29394 hash_loc_list (dw_loc_list_ref list_head)
29395 {
29396 dw_loc_list_ref curr = list_head;
29397 inchash::hash hstate;
29398
29399 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
29400 {
29401 hstate.add (curr->begin, strlen (curr->begin) + 1);
29402 hstate.add (curr->end, strlen (curr->end) + 1);
29403 if (curr->section)
29404 hstate.add (curr->section, strlen (curr->section) + 1);
29405 hash_locs (curr->expr, hstate);
29406 }
29407 list_head->hash = hstate.end ();
29408 }
29409
29410 /* Return true if X and Y opcodes have the same operands. */
29411
29412 static inline bool
29413 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
29414 {
29415 dw_val_ref valx1 = &x->dw_loc_oprnd1;
29416 dw_val_ref valx2 = &x->dw_loc_oprnd2;
29417 dw_val_ref valy1 = &y->dw_loc_oprnd1;
29418 dw_val_ref valy2 = &y->dw_loc_oprnd2;
29419
29420 switch (x->dw_loc_opc)
29421 {
29422 case DW_OP_const4u:
29423 case DW_OP_const8u:
29424 if (x->dtprel)
29425 goto hash_addr;
29426 /* FALLTHRU */
29427 case DW_OP_const1u:
29428 case DW_OP_const1s:
29429 case DW_OP_const2u:
29430 case DW_OP_const2s:
29431 case DW_OP_const4s:
29432 case DW_OP_const8s:
29433 case DW_OP_constu:
29434 case DW_OP_consts:
29435 case DW_OP_pick:
29436 case DW_OP_plus_uconst:
29437 case DW_OP_breg0:
29438 case DW_OP_breg1:
29439 case DW_OP_breg2:
29440 case DW_OP_breg3:
29441 case DW_OP_breg4:
29442 case DW_OP_breg5:
29443 case DW_OP_breg6:
29444 case DW_OP_breg7:
29445 case DW_OP_breg8:
29446 case DW_OP_breg9:
29447 case DW_OP_breg10:
29448 case DW_OP_breg11:
29449 case DW_OP_breg12:
29450 case DW_OP_breg13:
29451 case DW_OP_breg14:
29452 case DW_OP_breg15:
29453 case DW_OP_breg16:
29454 case DW_OP_breg17:
29455 case DW_OP_breg18:
29456 case DW_OP_breg19:
29457 case DW_OP_breg20:
29458 case DW_OP_breg21:
29459 case DW_OP_breg22:
29460 case DW_OP_breg23:
29461 case DW_OP_breg24:
29462 case DW_OP_breg25:
29463 case DW_OP_breg26:
29464 case DW_OP_breg27:
29465 case DW_OP_breg28:
29466 case DW_OP_breg29:
29467 case DW_OP_breg30:
29468 case DW_OP_breg31:
29469 case DW_OP_regx:
29470 case DW_OP_fbreg:
29471 case DW_OP_piece:
29472 case DW_OP_deref_size:
29473 case DW_OP_xderef_size:
29474 return valx1->v.val_int == valy1->v.val_int;
29475 case DW_OP_skip:
29476 case DW_OP_bra:
29477 /* If splitting debug info, the use of DW_OP_GNU_addr_index
29478 can cause irrelevant differences in dw_loc_addr. */
29479 gcc_assert (valx1->val_class == dw_val_class_loc
29480 && valy1->val_class == dw_val_class_loc
29481 && (dwarf_split_debug_info
29482 || x->dw_loc_addr == y->dw_loc_addr));
29483 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
29484 case DW_OP_implicit_value:
29485 if (valx1->v.val_unsigned != valy1->v.val_unsigned
29486 || valx2->val_class != valy2->val_class)
29487 return false;
29488 switch (valx2->val_class)
29489 {
29490 case dw_val_class_const:
29491 return valx2->v.val_int == valy2->v.val_int;
29492 case dw_val_class_vec:
29493 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
29494 && valx2->v.val_vec.length == valy2->v.val_vec.length
29495 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
29496 valx2->v.val_vec.elt_size
29497 * valx2->v.val_vec.length) == 0;
29498 case dw_val_class_const_double:
29499 return valx2->v.val_double.low == valy2->v.val_double.low
29500 && valx2->v.val_double.high == valy2->v.val_double.high;
29501 case dw_val_class_wide_int:
29502 return *valx2->v.val_wide == *valy2->v.val_wide;
29503 case dw_val_class_addr:
29504 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
29505 default:
29506 gcc_unreachable ();
29507 }
29508 case DW_OP_bregx:
29509 case DW_OP_bit_piece:
29510 return valx1->v.val_int == valy1->v.val_int
29511 && valx2->v.val_int == valy2->v.val_int;
29512 case DW_OP_addr:
29513 hash_addr:
29514 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
29515 case DW_OP_GNU_addr_index:
29516 case DW_OP_GNU_const_index:
29517 {
29518 rtx ax1 = valx1->val_entry->addr.rtl;
29519 rtx ay1 = valy1->val_entry->addr.rtl;
29520 return rtx_equal_p (ax1, ay1);
29521 }
29522 case DW_OP_implicit_pointer:
29523 case DW_OP_GNU_implicit_pointer:
29524 return valx1->val_class == dw_val_class_die_ref
29525 && valx1->val_class == valy1->val_class
29526 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
29527 && valx2->v.val_int == valy2->v.val_int;
29528 case DW_OP_entry_value:
29529 case DW_OP_GNU_entry_value:
29530 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
29531 case DW_OP_const_type:
29532 case DW_OP_GNU_const_type:
29533 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
29534 || valx2->val_class != valy2->val_class)
29535 return false;
29536 switch (valx2->val_class)
29537 {
29538 case dw_val_class_const:
29539 return valx2->v.val_int == valy2->v.val_int;
29540 case dw_val_class_vec:
29541 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
29542 && valx2->v.val_vec.length == valy2->v.val_vec.length
29543 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
29544 valx2->v.val_vec.elt_size
29545 * valx2->v.val_vec.length) == 0;
29546 case dw_val_class_const_double:
29547 return valx2->v.val_double.low == valy2->v.val_double.low
29548 && valx2->v.val_double.high == valy2->v.val_double.high;
29549 case dw_val_class_wide_int:
29550 return *valx2->v.val_wide == *valy2->v.val_wide;
29551 default:
29552 gcc_unreachable ();
29553 }
29554 case DW_OP_regval_type:
29555 case DW_OP_deref_type:
29556 case DW_OP_GNU_regval_type:
29557 case DW_OP_GNU_deref_type:
29558 return valx1->v.val_int == valy1->v.val_int
29559 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
29560 case DW_OP_convert:
29561 case DW_OP_reinterpret:
29562 case DW_OP_GNU_convert:
29563 case DW_OP_GNU_reinterpret:
29564 if (valx1->val_class != valy1->val_class)
29565 return false;
29566 if (valx1->val_class == dw_val_class_unsigned_const)
29567 return valx1->v.val_unsigned == valy1->v.val_unsigned;
29568 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
29569 case DW_OP_GNU_parameter_ref:
29570 return valx1->val_class == dw_val_class_die_ref
29571 && valx1->val_class == valy1->val_class
29572 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
29573 default:
29574 /* Other codes have no operands. */
29575 return true;
29576 }
29577 }
29578
29579 /* Return true if DWARF location expressions X and Y are the same. */
29580
29581 static inline bool
29582 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
29583 {
29584 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
29585 if (x->dw_loc_opc != y->dw_loc_opc
29586 || x->dtprel != y->dtprel
29587 || !compare_loc_operands (x, y))
29588 break;
29589 return x == NULL && y == NULL;
29590 }
29591
29592 /* Hashtable helpers. */
29593
29594 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
29595 {
29596 static inline hashval_t hash (const dw_loc_list_struct *);
29597 static inline bool equal (const dw_loc_list_struct *,
29598 const dw_loc_list_struct *);
29599 };
29600
29601 /* Return precomputed hash of location list X. */
29602
29603 inline hashval_t
29604 loc_list_hasher::hash (const dw_loc_list_struct *x)
29605 {
29606 return x->hash;
29607 }
29608
29609 /* Return true if location lists A and B are the same. */
29610
29611 inline bool
29612 loc_list_hasher::equal (const dw_loc_list_struct *a,
29613 const dw_loc_list_struct *b)
29614 {
29615 if (a == b)
29616 return 1;
29617 if (a->hash != b->hash)
29618 return 0;
29619 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
29620 if (strcmp (a->begin, b->begin) != 0
29621 || strcmp (a->end, b->end) != 0
29622 || (a->section == NULL) != (b->section == NULL)
29623 || (a->section && strcmp (a->section, b->section) != 0)
29624 || !compare_locs (a->expr, b->expr))
29625 break;
29626 return a == NULL && b == NULL;
29627 }
29628
29629 typedef hash_table<loc_list_hasher> loc_list_hash_type;
29630
29631
29632 /* Recursively optimize location lists referenced from DIE
29633 children and share them whenever possible. */
29634
29635 static void
29636 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
29637 {
29638 dw_die_ref c;
29639 dw_attr_node *a;
29640 unsigned ix;
29641 dw_loc_list_struct **slot;
29642
29643 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29644 if (AT_class (a) == dw_val_class_loc_list)
29645 {
29646 dw_loc_list_ref list = AT_loc_list (a);
29647 /* TODO: perform some optimizations here, before hashing
29648 it and storing into the hash table. */
29649 hash_loc_list (list);
29650 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
29651 if (*slot == NULL)
29652 *slot = list;
29653 else
29654 a->dw_attr_val.v.val_loc_list = *slot;
29655 }
29656
29657 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
29658 }
29659
29660
29661 /* Recursively assign each location list a unique index into the debug_addr
29662 section. */
29663
29664 static void
29665 index_location_lists (dw_die_ref die)
29666 {
29667 dw_die_ref c;
29668 dw_attr_node *a;
29669 unsigned ix;
29670
29671 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29672 if (AT_class (a) == dw_val_class_loc_list)
29673 {
29674 dw_loc_list_ref list = AT_loc_list (a);
29675 dw_loc_list_ref curr;
29676 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
29677 {
29678 /* Don't index an entry that has already been indexed
29679 or won't be output. */
29680 if (curr->begin_entry != NULL
29681 || (strcmp (curr->begin, curr->end) == 0 && !curr->force))
29682 continue;
29683
29684 curr->begin_entry
29685 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
29686 }
29687 }
29688
29689 FOR_EACH_CHILD (die, c, index_location_lists (c));
29690 }
29691
29692 /* Optimize location lists referenced from DIE
29693 children and share them whenever possible. */
29694
29695 static void
29696 optimize_location_lists (dw_die_ref die)
29697 {
29698 loc_list_hash_type htab (500);
29699 optimize_location_lists_1 (die, &htab);
29700 }
29701 \f
29702 /* Traverse the limbo die list, and add parent/child links. The only
29703 dies without parents that should be here are concrete instances of
29704 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
29705 For concrete instances, we can get the parent die from the abstract
29706 instance. */
29707
29708 static void
29709 flush_limbo_die_list (void)
29710 {
29711 limbo_die_node *node;
29712
29713 /* get_context_die calls force_decl_die, which can put new DIEs on the
29714 limbo list in LTO mode when nested functions are put in a different
29715 partition than that of their parent function. */
29716 while ((node = limbo_die_list))
29717 {
29718 dw_die_ref die = node->die;
29719 limbo_die_list = node->next;
29720
29721 if (die->die_parent == NULL)
29722 {
29723 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
29724
29725 if (origin && origin->die_parent)
29726 add_child_die (origin->die_parent, die);
29727 else if (is_cu_die (die))
29728 ;
29729 else if (seen_error ())
29730 /* It's OK to be confused by errors in the input. */
29731 add_child_die (comp_unit_die (), die);
29732 else
29733 {
29734 /* In certain situations, the lexical block containing a
29735 nested function can be optimized away, which results
29736 in the nested function die being orphaned. Likewise
29737 with the return type of that nested function. Force
29738 this to be a child of the containing function.
29739
29740 It may happen that even the containing function got fully
29741 inlined and optimized out. In that case we are lost and
29742 assign the empty child. This should not be big issue as
29743 the function is likely unreachable too. */
29744 gcc_assert (node->created_for);
29745
29746 if (DECL_P (node->created_for))
29747 origin = get_context_die (DECL_CONTEXT (node->created_for));
29748 else if (TYPE_P (node->created_for))
29749 origin = scope_die_for (node->created_for, comp_unit_die ());
29750 else
29751 origin = comp_unit_die ();
29752
29753 add_child_die (origin, die);
29754 }
29755 }
29756 }
29757 }
29758
29759 /* Reset DIEs so we can output them again. */
29760
29761 static void
29762 reset_dies (dw_die_ref die)
29763 {
29764 dw_die_ref c;
29765
29766 /* Remove stuff we re-generate. */
29767 die->die_mark = 0;
29768 die->die_offset = 0;
29769 die->die_abbrev = 0;
29770 remove_AT (die, DW_AT_sibling);
29771
29772 FOR_EACH_CHILD (die, c, reset_dies (c));
29773 }
29774
29775 /* Output stuff that dwarf requires at the end of every file,
29776 and generate the DWARF-2 debugging info. */
29777
29778 static void
29779 dwarf2out_finish (const char *)
29780 {
29781 comdat_type_node *ctnode;
29782 dw_die_ref main_comp_unit_die;
29783 unsigned char checksum[16];
29784 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
29785
29786 /* Flush out any latecomers to the limbo party. */
29787 flush_limbo_die_list ();
29788
29789 if (flag_checking)
29790 {
29791 verify_die (comp_unit_die ());
29792 for (limbo_die_node *node = cu_die_list; node; node = node->next)
29793 verify_die (node->die);
29794 }
29795
29796 /* We shouldn't have any symbols with delayed asm names for
29797 DIEs generated after early finish. */
29798 gcc_assert (deferred_asm_name == NULL);
29799
29800 gen_remaining_tmpl_value_param_die_attribute ();
29801
29802 if (flag_generate_lto || flag_generate_offload)
29803 {
29804 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
29805
29806 /* Prune stuff so that dwarf2out_finish runs successfully
29807 for the fat part of the object. */
29808 reset_dies (comp_unit_die ());
29809 for (limbo_die_node *node = cu_die_list; node; node = node->next)
29810 reset_dies (node->die);
29811
29812 hash_table<comdat_type_hasher> comdat_type_table (100);
29813 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
29814 {
29815 comdat_type_node **slot
29816 = comdat_type_table.find_slot (ctnode, INSERT);
29817
29818 /* Don't reset types twice. */
29819 if (*slot != HTAB_EMPTY_ENTRY)
29820 continue;
29821
29822 /* Add a pointer to the line table for the main compilation unit
29823 so that the debugger can make sense of DW_AT_decl_file
29824 attributes. */
29825 if (debug_info_level >= DINFO_LEVEL_TERSE)
29826 reset_dies (ctnode->root_die);
29827
29828 *slot = ctnode;
29829 }
29830
29831 /* Reset die CU symbol so we don't output it twice. */
29832 comp_unit_die ()->die_id.die_symbol = NULL;
29833
29834 /* Remove DW_AT_macro from the early output. */
29835 if (have_macinfo)
29836 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
29837
29838 /* Remove indirect string decisions. */
29839 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
29840 }
29841
29842 #if ENABLE_ASSERT_CHECKING
29843 {
29844 dw_die_ref die = comp_unit_die (), c;
29845 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
29846 }
29847 #endif
29848 resolve_addr (comp_unit_die ());
29849 move_marked_base_types ();
29850
29851 /* Initialize sections and labels used for actual assembler output. */
29852 init_sections_and_labels (false);
29853
29854 /* Traverse the DIE's and add sibling attributes to those DIE's that
29855 have children. */
29856 add_sibling_attributes (comp_unit_die ());
29857 limbo_die_node *node;
29858 for (node = cu_die_list; node; node = node->next)
29859 add_sibling_attributes (node->die);
29860 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
29861 add_sibling_attributes (ctnode->root_die);
29862
29863 /* When splitting DWARF info, we put some attributes in the
29864 skeleton compile_unit DIE that remains in the .o, while
29865 most attributes go in the DWO compile_unit_die. */
29866 if (dwarf_split_debug_info)
29867 {
29868 limbo_die_node *cu;
29869 main_comp_unit_die = gen_compile_unit_die (NULL);
29870 if (dwarf_version >= 5)
29871 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
29872 cu = limbo_die_list;
29873 gcc_assert (cu->die == main_comp_unit_die);
29874 limbo_die_list = limbo_die_list->next;
29875 cu->next = cu_die_list;
29876 cu_die_list = cu;
29877 }
29878 else
29879 main_comp_unit_die = comp_unit_die ();
29880
29881 /* Output a terminator label for the .text section. */
29882 switch_to_section (text_section);
29883 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
29884 if (cold_text_section)
29885 {
29886 switch_to_section (cold_text_section);
29887 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
29888 }
29889
29890 /* We can only use the low/high_pc attributes if all of the code was
29891 in .text. */
29892 if (!have_multiple_function_sections
29893 || (dwarf_version < 3 && dwarf_strict))
29894 {
29895 /* Don't add if the CU has no associated code. */
29896 if (text_section_used)
29897 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
29898 text_end_label, true);
29899 }
29900 else
29901 {
29902 unsigned fde_idx;
29903 dw_fde_ref fde;
29904 bool range_list_added = false;
29905
29906 if (text_section_used)
29907 add_ranges_by_labels (main_comp_unit_die, text_section_label,
29908 text_end_label, &range_list_added, true);
29909 if (cold_text_section_used)
29910 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
29911 cold_end_label, &range_list_added, true);
29912
29913 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
29914 {
29915 if (DECL_IGNORED_P (fde->decl))
29916 continue;
29917 if (!fde->in_std_section)
29918 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
29919 fde->dw_fde_end, &range_list_added,
29920 true);
29921 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
29922 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
29923 fde->dw_fde_second_end, &range_list_added,
29924 true);
29925 }
29926
29927 if (range_list_added)
29928 {
29929 /* We need to give .debug_loc and .debug_ranges an appropriate
29930 "base address". Use zero so that these addresses become
29931 absolute. Historically, we've emitted the unexpected
29932 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
29933 Emit both to give time for other tools to adapt. */
29934 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
29935 if (! dwarf_strict && dwarf_version < 4)
29936 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
29937
29938 add_ranges (NULL);
29939 }
29940 }
29941
29942 /* AIX Assembler inserts the length, so adjust the reference to match the
29943 offset expected by debuggers. */
29944 strcpy (dl_section_ref, debug_line_section_label);
29945 if (XCOFF_DEBUGGING_INFO)
29946 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
29947
29948 if (debug_info_level >= DINFO_LEVEL_TERSE)
29949 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
29950 dl_section_ref);
29951
29952 if (have_macinfo)
29953 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
29954 macinfo_section_label);
29955
29956 if (dwarf_split_debug_info)
29957 {
29958 if (have_location_lists)
29959 {
29960 if (dwarf_version >= 5)
29961 add_AT_loclistsptr (comp_unit_die (), DW_AT_loclists_base,
29962 loc_section_label);
29963 /* optimize_location_lists calculates the size of the lists,
29964 so index them first, and assign indices to the entries.
29965 Although optimize_location_lists will remove entries from
29966 the table, it only does so for duplicates, and therefore
29967 only reduces ref_counts to 1. */
29968 index_location_lists (comp_unit_die ());
29969 }
29970
29971 if (addr_index_table != NULL)
29972 {
29973 unsigned int index = 0;
29974 addr_index_table
29975 ->traverse_noresize<unsigned int *, index_addr_table_entry>
29976 (&index);
29977 }
29978 }
29979
29980 loc_list_idx = 0;
29981 if (have_location_lists)
29982 {
29983 optimize_location_lists (comp_unit_die ());
29984 /* And finally assign indexes to the entries for -gsplit-dwarf. */
29985 if (dwarf_version >= 5 && dwarf_split_debug_info)
29986 assign_location_list_indexes (comp_unit_die ());
29987 }
29988
29989 save_macinfo_strings ();
29990
29991 if (dwarf_split_debug_info)
29992 {
29993 unsigned int index = 0;
29994
29995 /* Add attributes common to skeleton compile_units and
29996 type_units. Because these attributes include strings, it
29997 must be done before freezing the string table. Top-level
29998 skeleton die attrs are added when the skeleton type unit is
29999 created, so ensure it is created by this point. */
30000 add_top_level_skeleton_die_attrs (main_comp_unit_die);
30001 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
30002 }
30003
30004 /* Output all of the compilation units. We put the main one last so that
30005 the offsets are available to output_pubnames. */
30006 for (node = cu_die_list; node; node = node->next)
30007 output_comp_unit (node->die, 0, NULL);
30008
30009 hash_table<comdat_type_hasher> comdat_type_table (100);
30010 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
30011 {
30012 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
30013
30014 /* Don't output duplicate types. */
30015 if (*slot != HTAB_EMPTY_ENTRY)
30016 continue;
30017
30018 /* Add a pointer to the line table for the main compilation unit
30019 so that the debugger can make sense of DW_AT_decl_file
30020 attributes. */
30021 if (debug_info_level >= DINFO_LEVEL_TERSE)
30022 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
30023 (!dwarf_split_debug_info
30024 ? dl_section_ref
30025 : debug_skeleton_line_section_label));
30026
30027 output_comdat_type_unit (ctnode);
30028 *slot = ctnode;
30029 }
30030
30031 if (dwarf_split_debug_info)
30032 {
30033 int mark;
30034 struct md5_ctx ctx;
30035
30036 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
30037 index_rnglists ();
30038
30039 /* Compute a checksum of the comp_unit to use as the dwo_id. */
30040 md5_init_ctx (&ctx);
30041 mark = 0;
30042 die_checksum (comp_unit_die (), &ctx, &mark);
30043 unmark_all_dies (comp_unit_die ());
30044 md5_finish_ctx (&ctx, checksum);
30045
30046 if (dwarf_version < 5)
30047 {
30048 /* Use the first 8 bytes of the checksum as the dwo_id,
30049 and add it to both comp-unit DIEs. */
30050 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
30051 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
30052 }
30053
30054 /* Add the base offset of the ranges table to the skeleton
30055 comp-unit DIE. */
30056 if (!vec_safe_is_empty (ranges_table))
30057 {
30058 if (dwarf_version >= 5)
30059 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
30060 ranges_base_label);
30061 else
30062 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
30063 ranges_section_label);
30064 }
30065
30066 switch_to_section (debug_addr_section);
30067 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
30068 output_addr_table ();
30069 }
30070
30071 /* Output the main compilation unit if non-empty or if .debug_macinfo
30072 or .debug_macro will be emitted. */
30073 output_comp_unit (comp_unit_die (), have_macinfo,
30074 dwarf_split_debug_info ? checksum : NULL);
30075
30076 if (dwarf_split_debug_info && info_section_emitted)
30077 output_skeleton_debug_sections (main_comp_unit_die, checksum);
30078
30079 /* Output the abbreviation table. */
30080 if (vec_safe_length (abbrev_die_table) != 1)
30081 {
30082 switch_to_section (debug_abbrev_section);
30083 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
30084 output_abbrev_section ();
30085 }
30086
30087 /* Output location list section if necessary. */
30088 if (have_location_lists)
30089 {
30090 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
30091 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
30092 /* Output the location lists info. */
30093 switch_to_section (debug_loc_section);
30094 if (dwarf_version >= 5)
30095 {
30096 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 1);
30097 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 2);
30098 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
30099 dw2_asm_output_data (4, 0xffffffff,
30100 "Initial length escape value indicating "
30101 "64-bit DWARF extension");
30102 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
30103 "Length of Location Lists");
30104 ASM_OUTPUT_LABEL (asm_out_file, l1);
30105 dw2_asm_output_data (2, dwarf_version, "DWARF Version");
30106 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
30107 dw2_asm_output_data (1, 0, "Segment Size");
30108 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
30109 "Offset Entry Count");
30110 }
30111 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
30112 if (dwarf_version >= 5 && dwarf_split_debug_info)
30113 {
30114 unsigned int save_loc_list_idx = loc_list_idx;
30115 loc_list_idx = 0;
30116 output_loclists_offsets (comp_unit_die ());
30117 gcc_assert (save_loc_list_idx == loc_list_idx);
30118 }
30119 output_location_lists (comp_unit_die ());
30120 if (dwarf_version >= 5)
30121 ASM_OUTPUT_LABEL (asm_out_file, l2);
30122 }
30123
30124 output_pubtables ();
30125
30126 /* Output the address range information if a CU (.debug_info section)
30127 was emitted. We output an empty table even if we had no functions
30128 to put in it. This because the consumer has no way to tell the
30129 difference between an empty table that we omitted and failure to
30130 generate a table that would have contained data. */
30131 if (info_section_emitted)
30132 {
30133 switch_to_section (debug_aranges_section);
30134 output_aranges ();
30135 }
30136
30137 /* Output ranges section if necessary. */
30138 if (!vec_safe_is_empty (ranges_table))
30139 {
30140 if (dwarf_version >= 5)
30141 output_rnglists ();
30142 else
30143 output_ranges ();
30144 }
30145
30146 /* Have to end the macro section. */
30147 if (have_macinfo)
30148 {
30149 switch_to_section (debug_macinfo_section);
30150 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
30151 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
30152 : debug_skeleton_line_section_label, false);
30153 dw2_asm_output_data (1, 0, "End compilation unit");
30154 }
30155
30156 /* Output the source line correspondence table. We must do this
30157 even if there is no line information. Otherwise, on an empty
30158 translation unit, we will generate a present, but empty,
30159 .debug_info section. IRIX 6.5 `nm' will then complain when
30160 examining the file. This is done late so that any filenames
30161 used by the debug_info section are marked as 'used'. */
30162 switch_to_section (debug_line_section);
30163 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
30164 if (! DWARF2_ASM_LINE_DEBUG_INFO)
30165 output_line_info (false);
30166
30167 if (dwarf_split_debug_info && info_section_emitted)
30168 {
30169 switch_to_section (debug_skeleton_line_section);
30170 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
30171 output_line_info (true);
30172 }
30173
30174 /* If we emitted any indirect strings, output the string table too. */
30175 if (debug_str_hash || skeleton_debug_str_hash)
30176 output_indirect_strings ();
30177 if (debug_line_str_hash)
30178 {
30179 switch_to_section (debug_line_str_section);
30180 const enum dwarf_form form = DW_FORM_line_strp;
30181 debug_line_str_hash->traverse<enum dwarf_form,
30182 output_indirect_string> (form);
30183 }
30184 }
30185
30186 /* Returns a hash value for X (which really is a variable_value_struct). */
30187
30188 inline hashval_t
30189 variable_value_hasher::hash (variable_value_struct *x)
30190 {
30191 return (hashval_t) x->decl_id;
30192 }
30193
30194 /* Return nonzero if decl_id of variable_value_struct X is the same as
30195 UID of decl Y. */
30196
30197 inline bool
30198 variable_value_hasher::equal (variable_value_struct *x, tree y)
30199 {
30200 return x->decl_id == DECL_UID (y);
30201 }
30202
30203 /* Helper function for resolve_variable_value, handle
30204 DW_OP_GNU_variable_value in one location expression.
30205 Return true if exprloc has been changed into loclist. */
30206
30207 static bool
30208 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
30209 {
30210 dw_loc_descr_ref next;
30211 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
30212 {
30213 next = loc->dw_loc_next;
30214 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
30215 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
30216 continue;
30217
30218 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
30219 if (DECL_CONTEXT (decl) != current_function_decl)
30220 continue;
30221
30222 dw_die_ref ref = lookup_decl_die (decl);
30223 if (ref)
30224 {
30225 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30226 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30227 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30228 continue;
30229 }
30230 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
30231 if (l == NULL)
30232 continue;
30233 if (l->dw_loc_next)
30234 {
30235 if (AT_class (a) != dw_val_class_loc)
30236 continue;
30237 switch (a->dw_attr)
30238 {
30239 /* Following attributes allow both exprloc and loclist
30240 classes, so we can change them into a loclist. */
30241 case DW_AT_location:
30242 case DW_AT_string_length:
30243 case DW_AT_return_addr:
30244 case DW_AT_data_member_location:
30245 case DW_AT_frame_base:
30246 case DW_AT_segment:
30247 case DW_AT_static_link:
30248 case DW_AT_use_location:
30249 case DW_AT_vtable_elem_location:
30250 if (prev)
30251 {
30252 prev->dw_loc_next = NULL;
30253 prepend_loc_descr_to_each (l, AT_loc (a));
30254 }
30255 if (next)
30256 add_loc_descr_to_each (l, next);
30257 a->dw_attr_val.val_class = dw_val_class_loc_list;
30258 a->dw_attr_val.val_entry = NULL;
30259 a->dw_attr_val.v.val_loc_list = l;
30260 have_location_lists = true;
30261 return true;
30262 /* Following attributes allow both exprloc and reference,
30263 so if the whole expression is DW_OP_GNU_variable_value alone
30264 we could transform it into reference. */
30265 case DW_AT_byte_size:
30266 case DW_AT_bit_size:
30267 case DW_AT_lower_bound:
30268 case DW_AT_upper_bound:
30269 case DW_AT_bit_stride:
30270 case DW_AT_count:
30271 case DW_AT_allocated:
30272 case DW_AT_associated:
30273 case DW_AT_byte_stride:
30274 if (prev == NULL && next == NULL)
30275 break;
30276 /* FALLTHRU */
30277 default:
30278 if (dwarf_strict)
30279 continue;
30280 break;
30281 }
30282 /* Create DW_TAG_variable that we can refer to. */
30283 gen_decl_die (decl, NULL_TREE, NULL,
30284 lookup_decl_die (current_function_decl));
30285 ref = lookup_decl_die (decl);
30286 if (ref)
30287 {
30288 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30289 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30290 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30291 }
30292 continue;
30293 }
30294 if (prev)
30295 {
30296 prev->dw_loc_next = l->expr;
30297 add_loc_descr (&prev->dw_loc_next, next);
30298 free_loc_descr (loc, NULL);
30299 next = prev->dw_loc_next;
30300 }
30301 else
30302 {
30303 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
30304 add_loc_descr (&loc, next);
30305 next = loc;
30306 }
30307 loc = prev;
30308 }
30309 return false;
30310 }
30311
30312 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
30313
30314 static void
30315 resolve_variable_value (dw_die_ref die)
30316 {
30317 dw_attr_node *a;
30318 dw_loc_list_ref loc;
30319 unsigned ix;
30320
30321 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30322 switch (AT_class (a))
30323 {
30324 case dw_val_class_loc:
30325 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
30326 break;
30327 /* FALLTHRU */
30328 case dw_val_class_loc_list:
30329 loc = AT_loc_list (a);
30330 gcc_assert (loc);
30331 for (; loc; loc = loc->dw_loc_next)
30332 resolve_variable_value_in_expr (a, loc->expr);
30333 break;
30334 default:
30335 break;
30336 }
30337 }
30338
30339 /* Attempt to optimize DW_OP_GNU_variable_value refering to
30340 temporaries in the current function. */
30341
30342 static void
30343 resolve_variable_values (void)
30344 {
30345 if (!variable_value_hash || !current_function_decl)
30346 return;
30347
30348 struct variable_value_struct *node
30349 = variable_value_hash->find_with_hash (current_function_decl,
30350 DECL_UID (current_function_decl));
30351
30352 if (node == NULL)
30353 return;
30354
30355 unsigned int i;
30356 dw_die_ref die;
30357 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
30358 resolve_variable_value (die);
30359 }
30360
30361 /* Helper function for note_variable_value, handle one location
30362 expression. */
30363
30364 static void
30365 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
30366 {
30367 for (; loc; loc = loc->dw_loc_next)
30368 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
30369 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30370 {
30371 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
30372 dw_die_ref ref = lookup_decl_die (decl);
30373 if (! ref && (flag_generate_lto || flag_generate_offload))
30374 {
30375 /* ??? This is somewhat a hack because we do not create DIEs
30376 for variables not in BLOCK trees early but when generating
30377 early LTO output we need the dw_val_class_decl_ref to be
30378 fully resolved. For fat LTO objects we'd also like to
30379 undo this after LTO dwarf output. */
30380 gcc_assert (DECL_CONTEXT (decl));
30381 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
30382 gcc_assert (ctx != NULL);
30383 gen_decl_die (decl, NULL_TREE, NULL, ctx);
30384 ref = lookup_decl_die (decl);
30385 gcc_assert (ref != NULL);
30386 }
30387 if (ref)
30388 {
30389 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30390 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30391 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30392 continue;
30393 }
30394 if (VAR_P (decl)
30395 && DECL_CONTEXT (decl)
30396 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
30397 && lookup_decl_die (DECL_CONTEXT (decl)))
30398 {
30399 if (!variable_value_hash)
30400 variable_value_hash
30401 = hash_table<variable_value_hasher>::create_ggc (10);
30402
30403 tree fndecl = DECL_CONTEXT (decl);
30404 struct variable_value_struct *node;
30405 struct variable_value_struct **slot
30406 = variable_value_hash->find_slot_with_hash (fndecl,
30407 DECL_UID (fndecl),
30408 INSERT);
30409 if (*slot == NULL)
30410 {
30411 node = ggc_cleared_alloc<variable_value_struct> ();
30412 node->decl_id = DECL_UID (fndecl);
30413 *slot = node;
30414 }
30415 else
30416 node = *slot;
30417
30418 vec_safe_push (node->dies, die);
30419 }
30420 }
30421 }
30422
30423 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
30424 with dw_val_class_decl_ref operand. */
30425
30426 static void
30427 note_variable_value (dw_die_ref die)
30428 {
30429 dw_die_ref c;
30430 dw_attr_node *a;
30431 dw_loc_list_ref loc;
30432 unsigned ix;
30433
30434 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30435 switch (AT_class (a))
30436 {
30437 case dw_val_class_loc_list:
30438 loc = AT_loc_list (a);
30439 gcc_assert (loc);
30440 if (!loc->noted_variable_value)
30441 {
30442 loc->noted_variable_value = 1;
30443 for (; loc; loc = loc->dw_loc_next)
30444 note_variable_value_in_expr (die, loc->expr);
30445 }
30446 break;
30447 case dw_val_class_loc:
30448 note_variable_value_in_expr (die, AT_loc (a));
30449 break;
30450 default:
30451 break;
30452 }
30453
30454 /* Mark children. */
30455 FOR_EACH_CHILD (die, c, note_variable_value (c));
30456 }
30457
30458 /* Perform any cleanups needed after the early debug generation pass
30459 has run. */
30460
30461 static void
30462 dwarf2out_early_finish (const char *filename)
30463 {
30464 set_early_dwarf s;
30465
30466 /* PCH might result in DW_AT_producer string being restored from the
30467 header compilation, so always fill it with empty string initially
30468 and overwrite only here. */
30469 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
30470 producer_string = gen_producer_string ();
30471 producer->dw_attr_val.v.val_str->refcount--;
30472 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
30473
30474 /* Add the name for the main input file now. We delayed this from
30475 dwarf2out_init to avoid complications with PCH. */
30476 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
30477 add_comp_dir_attribute (comp_unit_die ());
30478
30479 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
30480 DW_AT_comp_dir into .debug_line_str section. */
30481 if (!DWARF2_ASM_LINE_DEBUG_INFO
30482 && dwarf_version >= 5
30483 && DWARF5_USE_DEBUG_LINE_STR)
30484 {
30485 for (int i = 0; i < 2; i++)
30486 {
30487 dw_attr_node *a = get_AT (comp_unit_die (),
30488 i ? DW_AT_comp_dir : DW_AT_name);
30489 if (a == NULL
30490 || AT_class (a) != dw_val_class_str
30491 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
30492 continue;
30493
30494 if (! debug_line_str_hash)
30495 debug_line_str_hash
30496 = hash_table<indirect_string_hasher>::create_ggc (10);
30497
30498 struct indirect_string_node *node
30499 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
30500 set_indirect_string (node);
30501 node->form = DW_FORM_line_strp;
30502 a->dw_attr_val.v.val_str->refcount--;
30503 a->dw_attr_val.v.val_str = node;
30504 }
30505 }
30506
30507 /* With LTO early dwarf was really finished at compile-time, so make
30508 sure to adjust the phase after annotating the LTRANS CU DIE. */
30509 if (in_lto_p)
30510 {
30511 early_dwarf_finished = true;
30512 return;
30513 }
30514
30515 /* Walk through the list of incomplete types again, trying once more to
30516 emit full debugging info for them. */
30517 retry_incomplete_types ();
30518
30519 /* The point here is to flush out the limbo list so that it is empty
30520 and we don't need to stream it for LTO. */
30521 flush_limbo_die_list ();
30522
30523 gen_scheduled_generic_parms_dies ();
30524 gen_remaining_tmpl_value_param_die_attribute ();
30525
30526 /* Add DW_AT_linkage_name for all deferred DIEs. */
30527 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
30528 {
30529 tree decl = node->created_for;
30530 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
30531 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
30532 ended up in deferred_asm_name before we knew it was
30533 constant and never written to disk. */
30534 && DECL_ASSEMBLER_NAME (decl))
30535 {
30536 add_linkage_attr (node->die, decl);
30537 move_linkage_attr (node->die);
30538 }
30539 }
30540 deferred_asm_name = NULL;
30541
30542 if (flag_eliminate_unused_debug_types)
30543 prune_unused_types ();
30544
30545 /* Generate separate COMDAT sections for type DIEs. */
30546 if (use_debug_types)
30547 {
30548 break_out_comdat_types (comp_unit_die ());
30549
30550 /* Each new type_unit DIE was added to the limbo die list when created.
30551 Since these have all been added to comdat_type_list, clear the
30552 limbo die list. */
30553 limbo_die_list = NULL;
30554
30555 /* For each new comdat type unit, copy declarations for incomplete
30556 types to make the new unit self-contained (i.e., no direct
30557 references to the main compile unit). */
30558 for (comdat_type_node *ctnode = comdat_type_list;
30559 ctnode != NULL; ctnode = ctnode->next)
30560 copy_decls_for_unworthy_types (ctnode->root_die);
30561 copy_decls_for_unworthy_types (comp_unit_die ());
30562
30563 /* In the process of copying declarations from one unit to another,
30564 we may have left some declarations behind that are no longer
30565 referenced. Prune them. */
30566 prune_unused_types ();
30567 }
30568
30569 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
30570 with dw_val_class_decl_ref operand. */
30571 note_variable_value (comp_unit_die ());
30572 for (limbo_die_node *node = cu_die_list; node; node = node->next)
30573 note_variable_value (node->die);
30574 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
30575 ctnode = ctnode->next)
30576 note_variable_value (ctnode->root_die);
30577 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
30578 note_variable_value (node->die);
30579
30580 /* The AT_pubnames attribute needs to go in all skeleton dies, including
30581 both the main_cu and all skeleton TUs. Making this call unconditional
30582 would end up either adding a second copy of the AT_pubnames attribute, or
30583 requiring a special case in add_top_level_skeleton_die_attrs. */
30584 if (!dwarf_split_debug_info)
30585 add_AT_pubnames (comp_unit_die ());
30586
30587 /* The early debug phase is now finished. */
30588 early_dwarf_finished = true;
30589
30590 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
30591 if (!flag_generate_lto && !flag_generate_offload)
30592 return;
30593
30594 /* Now as we are going to output for LTO initialize sections and labels
30595 to the LTO variants. We don't need a random-seed postfix as other
30596 LTO sections as linking the LTO debug sections into one in a partial
30597 link is fine. */
30598 init_sections_and_labels (true);
30599
30600 /* The output below is modeled after dwarf2out_finish with all
30601 location related output removed and some LTO specific changes.
30602 Some refactoring might make both smaller and easier to match up. */
30603
30604 /* Traverse the DIE's and add add sibling attributes to those DIE's
30605 that have children. */
30606 add_sibling_attributes (comp_unit_die ());
30607 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
30608 add_sibling_attributes (node->die);
30609 for (comdat_type_node *ctnode = comdat_type_list;
30610 ctnode != NULL; ctnode = ctnode->next)
30611 add_sibling_attributes (ctnode->root_die);
30612
30613 if (have_macinfo)
30614 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
30615 macinfo_section_label);
30616
30617 save_macinfo_strings ();
30618
30619 /* Output all of the compilation units. We put the main one last so that
30620 the offsets are available to output_pubnames. */
30621 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
30622 output_comp_unit (node->die, 0, NULL);
30623
30624 hash_table<comdat_type_hasher> comdat_type_table (100);
30625 for (comdat_type_node *ctnode = comdat_type_list;
30626 ctnode != NULL; ctnode = ctnode->next)
30627 {
30628 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
30629
30630 /* Don't output duplicate types. */
30631 if (*slot != HTAB_EMPTY_ENTRY)
30632 continue;
30633
30634 /* Add a pointer to the line table for the main compilation unit
30635 so that the debugger can make sense of DW_AT_decl_file
30636 attributes. */
30637 if (debug_info_level >= DINFO_LEVEL_TERSE)
30638 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
30639 (!dwarf_split_debug_info
30640 ? debug_line_section_label
30641 : debug_skeleton_line_section_label));
30642
30643 output_comdat_type_unit (ctnode);
30644 *slot = ctnode;
30645 }
30646
30647 /* Stick a unique symbol to the main debuginfo section. */
30648 compute_comp_unit_symbol (comp_unit_die ());
30649
30650 /* Output the main compilation unit. We always need it if only for
30651 the CU symbol. */
30652 output_comp_unit (comp_unit_die (), true, NULL);
30653
30654 /* Output the abbreviation table. */
30655 if (vec_safe_length (abbrev_die_table) != 1)
30656 {
30657 switch_to_section (debug_abbrev_section);
30658 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
30659 output_abbrev_section ();
30660 }
30661
30662 /* Have to end the macro section. */
30663 if (have_macinfo)
30664 {
30665 /* We have to save macinfo state if we need to output it again
30666 for the FAT part of the object. */
30667 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
30668 if (flag_fat_lto_objects)
30669 macinfo_table = macinfo_table->copy ();
30670
30671 switch_to_section (debug_macinfo_section);
30672 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
30673 output_macinfo (debug_skeleton_line_section_label, true);
30674 dw2_asm_output_data (1, 0, "End compilation unit");
30675
30676 /* Emit a skeleton debug_line section. */
30677 switch_to_section (debug_skeleton_line_section);
30678 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
30679 output_line_info (true);
30680
30681 if (flag_fat_lto_objects)
30682 {
30683 vec_free (macinfo_table);
30684 macinfo_table = saved_macinfo_table;
30685 }
30686 }
30687
30688
30689 /* If we emitted any indirect strings, output the string table too. */
30690 if (debug_str_hash || skeleton_debug_str_hash)
30691 output_indirect_strings ();
30692
30693 /* Switch back to the text section. */
30694 switch_to_section (text_section);
30695 }
30696
30697 /* Reset all state within dwarf2out.c so that we can rerun the compiler
30698 within the same process. For use by toplev::finalize. */
30699
30700 void
30701 dwarf2out_c_finalize (void)
30702 {
30703 last_var_location_insn = NULL;
30704 cached_next_real_insn = NULL;
30705 used_rtx_array = NULL;
30706 incomplete_types = NULL;
30707 decl_scope_table = NULL;
30708 debug_info_section = NULL;
30709 debug_skeleton_info_section = NULL;
30710 debug_abbrev_section = NULL;
30711 debug_skeleton_abbrev_section = NULL;
30712 debug_aranges_section = NULL;
30713 debug_addr_section = NULL;
30714 debug_macinfo_section = NULL;
30715 debug_line_section = NULL;
30716 debug_skeleton_line_section = NULL;
30717 debug_loc_section = NULL;
30718 debug_pubnames_section = NULL;
30719 debug_pubtypes_section = NULL;
30720 debug_str_section = NULL;
30721 debug_line_str_section = NULL;
30722 debug_str_dwo_section = NULL;
30723 debug_str_offsets_section = NULL;
30724 debug_ranges_section = NULL;
30725 debug_frame_section = NULL;
30726 fde_vec = NULL;
30727 debug_str_hash = NULL;
30728 debug_line_str_hash = NULL;
30729 skeleton_debug_str_hash = NULL;
30730 dw2_string_counter = 0;
30731 have_multiple_function_sections = false;
30732 text_section_used = false;
30733 cold_text_section_used = false;
30734 cold_text_section = NULL;
30735 current_unit_personality = NULL;
30736
30737 early_dwarf = false;
30738 early_dwarf_finished = false;
30739
30740 next_die_offset = 0;
30741 single_comp_unit_die = NULL;
30742 comdat_type_list = NULL;
30743 limbo_die_list = NULL;
30744 file_table = NULL;
30745 decl_die_table = NULL;
30746 common_block_die_table = NULL;
30747 decl_loc_table = NULL;
30748 call_arg_locations = NULL;
30749 call_arg_loc_last = NULL;
30750 call_site_count = -1;
30751 tail_call_site_count = -1;
30752 cached_dw_loc_list_table = NULL;
30753 abbrev_die_table = NULL;
30754 delete dwarf_proc_stack_usage_map;
30755 dwarf_proc_stack_usage_map = NULL;
30756 line_info_label_num = 0;
30757 cur_line_info_table = NULL;
30758 text_section_line_info = NULL;
30759 cold_text_section_line_info = NULL;
30760 separate_line_info = NULL;
30761 info_section_emitted = false;
30762 pubname_table = NULL;
30763 pubtype_table = NULL;
30764 macinfo_table = NULL;
30765 ranges_table = NULL;
30766 ranges_by_label = NULL;
30767 rnglist_idx = 0;
30768 have_location_lists = false;
30769 loclabel_num = 0;
30770 poc_label_num = 0;
30771 last_emitted_file = NULL;
30772 label_num = 0;
30773 tmpl_value_parm_die_table = NULL;
30774 generic_type_instances = NULL;
30775 frame_pointer_fb_offset = 0;
30776 frame_pointer_fb_offset_valid = false;
30777 base_types.release ();
30778 XDELETEVEC (producer_string);
30779 producer_string = NULL;
30780 }
30781
30782 #include "gt-dwarf2out.h"