poly_int: rtx constants
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2017 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98
99 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
100 int, bool);
101 static rtx_insn *last_var_location_insn;
102 static rtx_insn *cached_next_real_insn;
103 static void dwarf2out_decl (tree);
104
105 #ifndef XCOFF_DEBUGGING_INFO
106 #define XCOFF_DEBUGGING_INFO 0
107 #endif
108
109 #ifndef HAVE_XCOFF_DWARF_EXTRAS
110 #define HAVE_XCOFF_DWARF_EXTRAS 0
111 #endif
112
113 #ifdef VMS_DEBUGGING_INFO
114 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
115
116 /* Define this macro to be a nonzero value if the directory specifications
117 which are output in the debug info should end with a separator. */
118 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
119 /* Define this macro to evaluate to a nonzero value if GCC should refrain
120 from generating indirect strings in DWARF2 debug information, for instance
121 if your target is stuck with an old version of GDB that is unable to
122 process them properly or uses VMS Debug. */
123 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
124 #else
125 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
126 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
127 #endif
128
129 /* ??? Poison these here until it can be done generically. They've been
130 totally replaced in this file; make sure it stays that way. */
131 #undef DWARF2_UNWIND_INFO
132 #undef DWARF2_FRAME_INFO
133 #if (GCC_VERSION >= 3000)
134 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
135 #endif
136
137 /* The size of the target's pointer type. */
138 #ifndef PTR_SIZE
139 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
140 #endif
141
142 /* Array of RTXes referenced by the debugging information, which therefore
143 must be kept around forever. */
144 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
145
146 /* A pointer to the base of a list of incomplete types which might be
147 completed at some later time. incomplete_types_list needs to be a
148 vec<tree, va_gc> *because we want to tell the garbage collector about
149 it. */
150 static GTY(()) vec<tree, va_gc> *incomplete_types;
151
152 /* A pointer to the base of a table of references to declaration
153 scopes. This table is a display which tracks the nesting
154 of declaration scopes at the current scope and containing
155 scopes. This table is used to find the proper place to
156 define type declaration DIE's. */
157 static GTY(()) vec<tree, va_gc> *decl_scope_table;
158
159 /* Pointers to various DWARF2 sections. */
160 static GTY(()) section *debug_info_section;
161 static GTY(()) section *debug_skeleton_info_section;
162 static GTY(()) section *debug_abbrev_section;
163 static GTY(()) section *debug_skeleton_abbrev_section;
164 static GTY(()) section *debug_aranges_section;
165 static GTY(()) section *debug_addr_section;
166 static GTY(()) section *debug_macinfo_section;
167 static const char *debug_macinfo_section_name;
168 static unsigned macinfo_label_base = 1;
169 static GTY(()) section *debug_line_section;
170 static GTY(()) section *debug_skeleton_line_section;
171 static GTY(()) section *debug_loc_section;
172 static GTY(()) section *debug_pubnames_section;
173 static GTY(()) section *debug_pubtypes_section;
174 static GTY(()) section *debug_str_section;
175 static GTY(()) section *debug_line_str_section;
176 static GTY(()) section *debug_str_dwo_section;
177 static GTY(()) section *debug_str_offsets_section;
178 static GTY(()) section *debug_ranges_section;
179 static GTY(()) section *debug_frame_section;
180
181 /* Maximum size (in bytes) of an artificially generated label. */
182 #define MAX_ARTIFICIAL_LABEL_BYTES 40
183
184 /* According to the (draft) DWARF 3 specification, the initial length
185 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
186 bytes are 0xffffffff, followed by the length stored in the next 8
187 bytes.
188
189 However, the SGI/MIPS ABI uses an initial length which is equal to
190 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
191
192 #ifndef DWARF_INITIAL_LENGTH_SIZE
193 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
194 #endif
195
196 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
197 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
198 #endif
199
200 /* Round SIZE up to the nearest BOUNDARY. */
201 #define DWARF_ROUND(SIZE,BOUNDARY) \
202 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
203
204 /* CIE identifier. */
205 #if HOST_BITS_PER_WIDE_INT >= 64
206 #define DWARF_CIE_ID \
207 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
208 #else
209 #define DWARF_CIE_ID DW_CIE_ID
210 #endif
211
212
213 /* A vector for a table that contains frame description
214 information for each routine. */
215 #define NOT_INDEXED (-1U)
216 #define NO_INDEX_ASSIGNED (-2U)
217
218 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
219
220 struct GTY((for_user)) indirect_string_node {
221 const char *str;
222 unsigned int refcount;
223 enum dwarf_form form;
224 char *label;
225 unsigned int index;
226 };
227
228 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
229 {
230 typedef const char *compare_type;
231
232 static hashval_t hash (indirect_string_node *);
233 static bool equal (indirect_string_node *, const char *);
234 };
235
236 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
237
238 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
239
240 /* With split_debug_info, both the comp_dir and dwo_name go in the
241 main object file, rather than the dwo, similar to the force_direct
242 parameter elsewhere but with additional complications:
243
244 1) The string is needed in both the main object file and the dwo.
245 That is, the comp_dir and dwo_name will appear in both places.
246
247 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
248 DW_FORM_line_strp or DW_FORM_GNU_str_index.
249
250 3) GCC chooses the form to use late, depending on the size and
251 reference count.
252
253 Rather than forcing the all debug string handling functions and
254 callers to deal with these complications, simply use a separate,
255 special-cased string table for any attribute that should go in the
256 main object file. This limits the complexity to just the places
257 that need it. */
258
259 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
260
261 static GTY(()) int dw2_string_counter;
262
263 /* True if the compilation unit places functions in more than one section. */
264 static GTY(()) bool have_multiple_function_sections = false;
265
266 /* Whether the default text and cold text sections have been used at all. */
267 static GTY(()) bool text_section_used = false;
268 static GTY(()) bool cold_text_section_used = false;
269
270 /* The default cold text section. */
271 static GTY(()) section *cold_text_section;
272
273 /* The DIE for C++14 'auto' in a function return type. */
274 static GTY(()) dw_die_ref auto_die;
275
276 /* The DIE for C++14 'decltype(auto)' in a function return type. */
277 static GTY(()) dw_die_ref decltype_auto_die;
278
279 /* Forward declarations for functions defined in this file. */
280
281 static void output_call_frame_info (int);
282 static void dwarf2out_note_section_used (void);
283
284 /* Personality decl of current unit. Used only when assembler does not support
285 personality CFI. */
286 static GTY(()) rtx current_unit_personality;
287
288 /* Whether an eh_frame section is required. */
289 static GTY(()) bool do_eh_frame = false;
290
291 /* .debug_rnglists next index. */
292 static unsigned int rnglist_idx;
293
294 /* Data and reference forms for relocatable data. */
295 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
296 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
297
298 #ifndef DEBUG_FRAME_SECTION
299 #define DEBUG_FRAME_SECTION ".debug_frame"
300 #endif
301
302 #ifndef FUNC_BEGIN_LABEL
303 #define FUNC_BEGIN_LABEL "LFB"
304 #endif
305
306 #ifndef FUNC_END_LABEL
307 #define FUNC_END_LABEL "LFE"
308 #endif
309
310 #ifndef PROLOGUE_END_LABEL
311 #define PROLOGUE_END_LABEL "LPE"
312 #endif
313
314 #ifndef EPILOGUE_BEGIN_LABEL
315 #define EPILOGUE_BEGIN_LABEL "LEB"
316 #endif
317
318 #ifndef FRAME_BEGIN_LABEL
319 #define FRAME_BEGIN_LABEL "Lframe"
320 #endif
321 #define CIE_AFTER_SIZE_LABEL "LSCIE"
322 #define CIE_END_LABEL "LECIE"
323 #define FDE_LABEL "LSFDE"
324 #define FDE_AFTER_SIZE_LABEL "LASFDE"
325 #define FDE_END_LABEL "LEFDE"
326 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
327 #define LINE_NUMBER_END_LABEL "LELT"
328 #define LN_PROLOG_AS_LABEL "LASLTP"
329 #define LN_PROLOG_END_LABEL "LELTP"
330 #define DIE_LABEL_PREFIX "DW"
331 \f
332 /* Match the base name of a file to the base name of a compilation unit. */
333
334 static int
335 matches_main_base (const char *path)
336 {
337 /* Cache the last query. */
338 static const char *last_path = NULL;
339 static int last_match = 0;
340 if (path != last_path)
341 {
342 const char *base;
343 int length = base_of_path (path, &base);
344 last_path = path;
345 last_match = (length == main_input_baselength
346 && memcmp (base, main_input_basename, length) == 0);
347 }
348 return last_match;
349 }
350
351 #ifdef DEBUG_DEBUG_STRUCT
352
353 static int
354 dump_struct_debug (tree type, enum debug_info_usage usage,
355 enum debug_struct_file criterion, int generic,
356 int matches, int result)
357 {
358 /* Find the type name. */
359 tree type_decl = TYPE_STUB_DECL (type);
360 tree t = type_decl;
361 const char *name = 0;
362 if (TREE_CODE (t) == TYPE_DECL)
363 t = DECL_NAME (t);
364 if (t)
365 name = IDENTIFIER_POINTER (t);
366
367 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
368 criterion,
369 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
370 matches ? "bas" : "hdr",
371 generic ? "gen" : "ord",
372 usage == DINFO_USAGE_DFN ? ";" :
373 usage == DINFO_USAGE_DIR_USE ? "." : "*",
374 result,
375 (void*) type_decl, name);
376 return result;
377 }
378 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
379 dump_struct_debug (type, usage, criterion, generic, matches, result)
380
381 #else
382
383 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
384 (result)
385
386 #endif
387
388 /* Get the number of HOST_WIDE_INTs needed to represent the precision
389 of the number. Some constants have a large uniform precision, so
390 we get the precision needed for the actual value of the number. */
391
392 static unsigned int
393 get_full_len (const wide_int &op)
394 {
395 int prec = wi::min_precision (op, UNSIGNED);
396 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
397 / HOST_BITS_PER_WIDE_INT);
398 }
399
400 static bool
401 should_emit_struct_debug (tree type, enum debug_info_usage usage)
402 {
403 enum debug_struct_file criterion;
404 tree type_decl;
405 bool generic = lang_hooks.types.generic_p (type);
406
407 if (generic)
408 criterion = debug_struct_generic[usage];
409 else
410 criterion = debug_struct_ordinary[usage];
411
412 if (criterion == DINFO_STRUCT_FILE_NONE)
413 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
414 if (criterion == DINFO_STRUCT_FILE_ANY)
415 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
416
417 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
418
419 if (type_decl != NULL)
420 {
421 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
422 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
423
424 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
425 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
426 }
427
428 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
429 }
430 \f
431 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
432 switch to the data section instead, and write out a synthetic start label
433 for collect2 the first time around. */
434
435 static void
436 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
437 {
438 if (eh_frame_section == 0)
439 {
440 int flags;
441
442 if (EH_TABLES_CAN_BE_READ_ONLY)
443 {
444 int fde_encoding;
445 int per_encoding;
446 int lsda_encoding;
447
448 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
449 /*global=*/0);
450 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
451 /*global=*/1);
452 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
453 /*global=*/0);
454 flags = ((! flag_pic
455 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
456 && (fde_encoding & 0x70) != DW_EH_PE_aligned
457 && (per_encoding & 0x70) != DW_EH_PE_absptr
458 && (per_encoding & 0x70) != DW_EH_PE_aligned
459 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
460 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
461 ? 0 : SECTION_WRITE);
462 }
463 else
464 flags = SECTION_WRITE;
465
466 #ifdef EH_FRAME_SECTION_NAME
467 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
468 #else
469 eh_frame_section = ((flags == SECTION_WRITE)
470 ? data_section : readonly_data_section);
471 #endif /* EH_FRAME_SECTION_NAME */
472 }
473
474 switch_to_section (eh_frame_section);
475
476 #ifdef EH_FRAME_THROUGH_COLLECT2
477 /* We have no special eh_frame section. Emit special labels to guide
478 collect2. */
479 if (!back)
480 {
481 tree label = get_file_function_name ("F");
482 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
483 targetm.asm_out.globalize_label (asm_out_file,
484 IDENTIFIER_POINTER (label));
485 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
486 }
487 #endif
488 }
489
490 /* Switch [BACK] to the eh or debug frame table section, depending on
491 FOR_EH. */
492
493 static void
494 switch_to_frame_table_section (int for_eh, bool back)
495 {
496 if (for_eh)
497 switch_to_eh_frame_section (back);
498 else
499 {
500 if (!debug_frame_section)
501 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
502 SECTION_DEBUG, NULL);
503 switch_to_section (debug_frame_section);
504 }
505 }
506
507 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
508
509 enum dw_cfi_oprnd_type
510 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
511 {
512 switch (cfi)
513 {
514 case DW_CFA_nop:
515 case DW_CFA_GNU_window_save:
516 case DW_CFA_remember_state:
517 case DW_CFA_restore_state:
518 return dw_cfi_oprnd_unused;
519
520 case DW_CFA_set_loc:
521 case DW_CFA_advance_loc1:
522 case DW_CFA_advance_loc2:
523 case DW_CFA_advance_loc4:
524 case DW_CFA_MIPS_advance_loc8:
525 return dw_cfi_oprnd_addr;
526
527 case DW_CFA_offset:
528 case DW_CFA_offset_extended:
529 case DW_CFA_def_cfa:
530 case DW_CFA_offset_extended_sf:
531 case DW_CFA_def_cfa_sf:
532 case DW_CFA_restore:
533 case DW_CFA_restore_extended:
534 case DW_CFA_undefined:
535 case DW_CFA_same_value:
536 case DW_CFA_def_cfa_register:
537 case DW_CFA_register:
538 case DW_CFA_expression:
539 case DW_CFA_val_expression:
540 return dw_cfi_oprnd_reg_num;
541
542 case DW_CFA_def_cfa_offset:
543 case DW_CFA_GNU_args_size:
544 case DW_CFA_def_cfa_offset_sf:
545 return dw_cfi_oprnd_offset;
546
547 case DW_CFA_def_cfa_expression:
548 return dw_cfi_oprnd_loc;
549
550 default:
551 gcc_unreachable ();
552 }
553 }
554
555 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
556
557 enum dw_cfi_oprnd_type
558 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
559 {
560 switch (cfi)
561 {
562 case DW_CFA_def_cfa:
563 case DW_CFA_def_cfa_sf:
564 case DW_CFA_offset:
565 case DW_CFA_offset_extended_sf:
566 case DW_CFA_offset_extended:
567 return dw_cfi_oprnd_offset;
568
569 case DW_CFA_register:
570 return dw_cfi_oprnd_reg_num;
571
572 case DW_CFA_expression:
573 case DW_CFA_val_expression:
574 return dw_cfi_oprnd_loc;
575
576 default:
577 return dw_cfi_oprnd_unused;
578 }
579 }
580
581 /* Output one FDE. */
582
583 static void
584 output_fde (dw_fde_ref fde, bool for_eh, bool second,
585 char *section_start_label, int fde_encoding, char *augmentation,
586 bool any_lsda_needed, int lsda_encoding)
587 {
588 const char *begin, *end;
589 static unsigned int j;
590 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
591
592 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
593 /* empty */ 0);
594 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
595 for_eh + j);
596 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
597 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
598 if (!XCOFF_DEBUGGING_INFO || for_eh)
599 {
600 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
601 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
602 " indicating 64-bit DWARF extension");
603 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
604 "FDE Length");
605 }
606 ASM_OUTPUT_LABEL (asm_out_file, l1);
607
608 if (for_eh)
609 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
610 else
611 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
612 debug_frame_section, "FDE CIE offset");
613
614 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
615 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
616
617 if (for_eh)
618 {
619 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
620 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
621 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
622 "FDE initial location");
623 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
624 end, begin, "FDE address range");
625 }
626 else
627 {
628 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
629 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
630 }
631
632 if (augmentation[0])
633 {
634 if (any_lsda_needed)
635 {
636 int size = size_of_encoded_value (lsda_encoding);
637
638 if (lsda_encoding == DW_EH_PE_aligned)
639 {
640 int offset = ( 4 /* Length */
641 + 4 /* CIE offset */
642 + 2 * size_of_encoded_value (fde_encoding)
643 + 1 /* Augmentation size */ );
644 int pad = -offset & (PTR_SIZE - 1);
645
646 size += pad;
647 gcc_assert (size_of_uleb128 (size) == 1);
648 }
649
650 dw2_asm_output_data_uleb128 (size, "Augmentation size");
651
652 if (fde->uses_eh_lsda)
653 {
654 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
655 fde->funcdef_number);
656 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
657 gen_rtx_SYMBOL_REF (Pmode, l1),
658 false,
659 "Language Specific Data Area");
660 }
661 else
662 {
663 if (lsda_encoding == DW_EH_PE_aligned)
664 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
665 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
666 "Language Specific Data Area (none)");
667 }
668 }
669 else
670 dw2_asm_output_data_uleb128 (0, "Augmentation size");
671 }
672
673 /* Loop through the Call Frame Instructions associated with this FDE. */
674 fde->dw_fde_current_label = begin;
675 {
676 size_t from, until, i;
677
678 from = 0;
679 until = vec_safe_length (fde->dw_fde_cfi);
680
681 if (fde->dw_fde_second_begin == NULL)
682 ;
683 else if (!second)
684 until = fde->dw_fde_switch_cfi_index;
685 else
686 from = fde->dw_fde_switch_cfi_index;
687
688 for (i = from; i < until; i++)
689 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
690 }
691
692 /* If we are to emit a ref/link from function bodies to their frame tables,
693 do it now. This is typically performed to make sure that tables
694 associated with functions are dragged with them and not discarded in
695 garbage collecting links. We need to do this on a per function basis to
696 cope with -ffunction-sections. */
697
698 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
699 /* Switch to the function section, emit the ref to the tables, and
700 switch *back* into the table section. */
701 switch_to_section (function_section (fde->decl));
702 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
703 switch_to_frame_table_section (for_eh, true);
704 #endif
705
706 /* Pad the FDE out to an address sized boundary. */
707 ASM_OUTPUT_ALIGN (asm_out_file,
708 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
709 ASM_OUTPUT_LABEL (asm_out_file, l2);
710
711 j += 2;
712 }
713
714 /* Return true if frame description entry FDE is needed for EH. */
715
716 static bool
717 fde_needed_for_eh_p (dw_fde_ref fde)
718 {
719 if (flag_asynchronous_unwind_tables)
720 return true;
721
722 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
723 return true;
724
725 if (fde->uses_eh_lsda)
726 return true;
727
728 /* If exceptions are enabled, we have collected nothrow info. */
729 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
730 return false;
731
732 return true;
733 }
734
735 /* Output the call frame information used to record information
736 that relates to calculating the frame pointer, and records the
737 location of saved registers. */
738
739 static void
740 output_call_frame_info (int for_eh)
741 {
742 unsigned int i;
743 dw_fde_ref fde;
744 dw_cfi_ref cfi;
745 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
746 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
747 bool any_lsda_needed = false;
748 char augmentation[6];
749 int augmentation_size;
750 int fde_encoding = DW_EH_PE_absptr;
751 int per_encoding = DW_EH_PE_absptr;
752 int lsda_encoding = DW_EH_PE_absptr;
753 int return_reg;
754 rtx personality = NULL;
755 int dw_cie_version;
756
757 /* Don't emit a CIE if there won't be any FDEs. */
758 if (!fde_vec)
759 return;
760
761 /* Nothing to do if the assembler's doing it all. */
762 if (dwarf2out_do_cfi_asm ())
763 return;
764
765 /* If we don't have any functions we'll want to unwind out of, don't emit
766 any EH unwind information. If we make FDEs linkonce, we may have to
767 emit an empty label for an FDE that wouldn't otherwise be emitted. We
768 want to avoid having an FDE kept around when the function it refers to
769 is discarded. Example where this matters: a primary function template
770 in C++ requires EH information, an explicit specialization doesn't. */
771 if (for_eh)
772 {
773 bool any_eh_needed = false;
774
775 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
776 {
777 if (fde->uses_eh_lsda)
778 any_eh_needed = any_lsda_needed = true;
779 else if (fde_needed_for_eh_p (fde))
780 any_eh_needed = true;
781 else if (TARGET_USES_WEAK_UNWIND_INFO)
782 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
783 }
784
785 if (!any_eh_needed)
786 return;
787 }
788
789 /* We're going to be generating comments, so turn on app. */
790 if (flag_debug_asm)
791 app_enable ();
792
793 /* Switch to the proper frame section, first time. */
794 switch_to_frame_table_section (for_eh, false);
795
796 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
797 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
798
799 /* Output the CIE. */
800 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
801 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
802 if (!XCOFF_DEBUGGING_INFO || for_eh)
803 {
804 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
805 dw2_asm_output_data (4, 0xffffffff,
806 "Initial length escape value indicating 64-bit DWARF extension");
807 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
808 "Length of Common Information Entry");
809 }
810 ASM_OUTPUT_LABEL (asm_out_file, l1);
811
812 /* Now that the CIE pointer is PC-relative for EH,
813 use 0 to identify the CIE. */
814 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
815 (for_eh ? 0 : DWARF_CIE_ID),
816 "CIE Identifier Tag");
817
818 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
819 use CIE version 1, unless that would produce incorrect results
820 due to overflowing the return register column. */
821 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
822 dw_cie_version = 1;
823 if (return_reg >= 256 || dwarf_version > 2)
824 dw_cie_version = 3;
825 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
826
827 augmentation[0] = 0;
828 augmentation_size = 0;
829
830 personality = current_unit_personality;
831 if (for_eh)
832 {
833 char *p;
834
835 /* Augmentation:
836 z Indicates that a uleb128 is present to size the
837 augmentation section.
838 L Indicates the encoding (and thus presence) of
839 an LSDA pointer in the FDE augmentation.
840 R Indicates a non-default pointer encoding for
841 FDE code pointers.
842 P Indicates the presence of an encoding + language
843 personality routine in the CIE augmentation. */
844
845 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
846 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
847 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
848
849 p = augmentation + 1;
850 if (personality)
851 {
852 *p++ = 'P';
853 augmentation_size += 1 + size_of_encoded_value (per_encoding);
854 assemble_external_libcall (personality);
855 }
856 if (any_lsda_needed)
857 {
858 *p++ = 'L';
859 augmentation_size += 1;
860 }
861 if (fde_encoding != DW_EH_PE_absptr)
862 {
863 *p++ = 'R';
864 augmentation_size += 1;
865 }
866 if (p > augmentation + 1)
867 {
868 augmentation[0] = 'z';
869 *p = '\0';
870 }
871
872 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
873 if (personality && per_encoding == DW_EH_PE_aligned)
874 {
875 int offset = ( 4 /* Length */
876 + 4 /* CIE Id */
877 + 1 /* CIE version */
878 + strlen (augmentation) + 1 /* Augmentation */
879 + size_of_uleb128 (1) /* Code alignment */
880 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
881 + 1 /* RA column */
882 + 1 /* Augmentation size */
883 + 1 /* Personality encoding */ );
884 int pad = -offset & (PTR_SIZE - 1);
885
886 augmentation_size += pad;
887
888 /* Augmentations should be small, so there's scarce need to
889 iterate for a solution. Die if we exceed one uleb128 byte. */
890 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
891 }
892 }
893
894 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
895 if (dw_cie_version >= 4)
896 {
897 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
898 dw2_asm_output_data (1, 0, "CIE Segment Size");
899 }
900 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
901 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
902 "CIE Data Alignment Factor");
903
904 if (dw_cie_version == 1)
905 dw2_asm_output_data (1, return_reg, "CIE RA Column");
906 else
907 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
908
909 if (augmentation[0])
910 {
911 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
912 if (personality)
913 {
914 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
915 eh_data_format_name (per_encoding));
916 dw2_asm_output_encoded_addr_rtx (per_encoding,
917 personality,
918 true, NULL);
919 }
920
921 if (any_lsda_needed)
922 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
923 eh_data_format_name (lsda_encoding));
924
925 if (fde_encoding != DW_EH_PE_absptr)
926 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
927 eh_data_format_name (fde_encoding));
928 }
929
930 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
931 output_cfi (cfi, NULL, for_eh);
932
933 /* Pad the CIE out to an address sized boundary. */
934 ASM_OUTPUT_ALIGN (asm_out_file,
935 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
936 ASM_OUTPUT_LABEL (asm_out_file, l2);
937
938 /* Loop through all of the FDE's. */
939 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
940 {
941 unsigned int k;
942
943 /* Don't emit EH unwind info for leaf functions that don't need it. */
944 if (for_eh && !fde_needed_for_eh_p (fde))
945 continue;
946
947 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
948 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
949 augmentation, any_lsda_needed, lsda_encoding);
950 }
951
952 if (for_eh && targetm.terminate_dw2_eh_frame_info)
953 dw2_asm_output_data (4, 0, "End of Table");
954
955 /* Turn off app to make assembly quicker. */
956 if (flag_debug_asm)
957 app_disable ();
958 }
959
960 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
961
962 static void
963 dwarf2out_do_cfi_startproc (bool second)
964 {
965 int enc;
966 rtx ref;
967
968 fprintf (asm_out_file, "\t.cfi_startproc\n");
969
970 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
971 eh unwinders. */
972 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
973 return;
974
975 rtx personality = get_personality_function (current_function_decl);
976
977 if (personality)
978 {
979 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
980 ref = personality;
981
982 /* ??? The GAS support isn't entirely consistent. We have to
983 handle indirect support ourselves, but PC-relative is done
984 in the assembler. Further, the assembler can't handle any
985 of the weirder relocation types. */
986 if (enc & DW_EH_PE_indirect)
987 ref = dw2_force_const_mem (ref, true);
988
989 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
990 output_addr_const (asm_out_file, ref);
991 fputc ('\n', asm_out_file);
992 }
993
994 if (crtl->uses_eh_lsda)
995 {
996 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
997
998 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
999 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1000 current_function_funcdef_no);
1001 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1002 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1003
1004 if (enc & DW_EH_PE_indirect)
1005 ref = dw2_force_const_mem (ref, true);
1006
1007 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1008 output_addr_const (asm_out_file, ref);
1009 fputc ('\n', asm_out_file);
1010 }
1011 }
1012
1013 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1014 this allocation may be done before pass_final. */
1015
1016 dw_fde_ref
1017 dwarf2out_alloc_current_fde (void)
1018 {
1019 dw_fde_ref fde;
1020
1021 fde = ggc_cleared_alloc<dw_fde_node> ();
1022 fde->decl = current_function_decl;
1023 fde->funcdef_number = current_function_funcdef_no;
1024 fde->fde_index = vec_safe_length (fde_vec);
1025 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1026 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1027 fde->nothrow = crtl->nothrow;
1028 fde->drap_reg = INVALID_REGNUM;
1029 fde->vdrap_reg = INVALID_REGNUM;
1030
1031 /* Record the FDE associated with this function. */
1032 cfun->fde = fde;
1033 vec_safe_push (fde_vec, fde);
1034
1035 return fde;
1036 }
1037
1038 /* Output a marker (i.e. a label) for the beginning of a function, before
1039 the prologue. */
1040
1041 void
1042 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1043 unsigned int column ATTRIBUTE_UNUSED,
1044 const char *file ATTRIBUTE_UNUSED)
1045 {
1046 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1047 char * dup_label;
1048 dw_fde_ref fde;
1049 section *fnsec;
1050 bool do_frame;
1051
1052 current_function_func_begin_label = NULL;
1053
1054 do_frame = dwarf2out_do_frame ();
1055
1056 /* ??? current_function_func_begin_label is also used by except.c for
1057 call-site information. We must emit this label if it might be used. */
1058 if (!do_frame
1059 && (!flag_exceptions
1060 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1061 return;
1062
1063 fnsec = function_section (current_function_decl);
1064 switch_to_section (fnsec);
1065 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1066 current_function_funcdef_no);
1067 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1068 current_function_funcdef_no);
1069 dup_label = xstrdup (label);
1070 current_function_func_begin_label = dup_label;
1071
1072 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1073 if (!do_frame)
1074 return;
1075
1076 /* Unlike the debug version, the EH version of frame unwind info is a per-
1077 function setting so we need to record whether we need it for the unit. */
1078 do_eh_frame |= dwarf2out_do_eh_frame ();
1079
1080 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1081 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1082 would include pass_dwarf2_frame. If we've not created the FDE yet,
1083 do so now. */
1084 fde = cfun->fde;
1085 if (fde == NULL)
1086 fde = dwarf2out_alloc_current_fde ();
1087
1088 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1089 fde->dw_fde_begin = dup_label;
1090 fde->dw_fde_current_label = dup_label;
1091 fde->in_std_section = (fnsec == text_section
1092 || (cold_text_section && fnsec == cold_text_section));
1093
1094 /* We only want to output line number information for the genuine dwarf2
1095 prologue case, not the eh frame case. */
1096 #ifdef DWARF2_DEBUGGING_INFO
1097 if (file)
1098 dwarf2out_source_line (line, column, file, 0, true);
1099 #endif
1100
1101 if (dwarf2out_do_cfi_asm ())
1102 dwarf2out_do_cfi_startproc (false);
1103 else
1104 {
1105 rtx personality = get_personality_function (current_function_decl);
1106 if (!current_unit_personality)
1107 current_unit_personality = personality;
1108
1109 /* We cannot keep a current personality per function as without CFI
1110 asm, at the point where we emit the CFI data, there is no current
1111 function anymore. */
1112 if (personality && current_unit_personality != personality)
1113 sorry ("multiple EH personalities are supported only with assemblers "
1114 "supporting .cfi_personality directive");
1115 }
1116 }
1117
1118 /* Output a marker (i.e. a label) for the end of the generated code
1119 for a function prologue. This gets called *after* the prologue code has
1120 been generated. */
1121
1122 void
1123 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1124 const char *file ATTRIBUTE_UNUSED)
1125 {
1126 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1127
1128 /* Output a label to mark the endpoint of the code generated for this
1129 function. */
1130 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1131 current_function_funcdef_no);
1132 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1133 current_function_funcdef_no);
1134 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1135 }
1136
1137 /* Output a marker (i.e. a label) for the beginning of the generated code
1138 for a function epilogue. This gets called *before* the prologue code has
1139 been generated. */
1140
1141 void
1142 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1143 const char *file ATTRIBUTE_UNUSED)
1144 {
1145 dw_fde_ref fde = cfun->fde;
1146 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1147
1148 if (fde->dw_fde_vms_begin_epilogue)
1149 return;
1150
1151 /* Output a label to mark the endpoint of the code generated for this
1152 function. */
1153 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1154 current_function_funcdef_no);
1155 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1156 current_function_funcdef_no);
1157 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1158 }
1159
1160 /* Output a marker (i.e. a label) for the absolute end of the generated code
1161 for a function definition. This gets called *after* the epilogue code has
1162 been generated. */
1163
1164 void
1165 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1166 const char *file ATTRIBUTE_UNUSED)
1167 {
1168 dw_fde_ref fde;
1169 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1170
1171 last_var_location_insn = NULL;
1172 cached_next_real_insn = NULL;
1173
1174 if (dwarf2out_do_cfi_asm ())
1175 fprintf (asm_out_file, "\t.cfi_endproc\n");
1176
1177 /* Output a label to mark the endpoint of the code generated for this
1178 function. */
1179 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1180 current_function_funcdef_no);
1181 ASM_OUTPUT_LABEL (asm_out_file, label);
1182 fde = cfun->fde;
1183 gcc_assert (fde != NULL);
1184 if (fde->dw_fde_second_begin == NULL)
1185 fde->dw_fde_end = xstrdup (label);
1186 }
1187
1188 void
1189 dwarf2out_frame_finish (void)
1190 {
1191 /* Output call frame information. */
1192 if (targetm.debug_unwind_info () == UI_DWARF2)
1193 output_call_frame_info (0);
1194
1195 /* Output another copy for the unwinder. */
1196 if (do_eh_frame)
1197 output_call_frame_info (1);
1198 }
1199
1200 /* Note that the current function section is being used for code. */
1201
1202 static void
1203 dwarf2out_note_section_used (void)
1204 {
1205 section *sec = current_function_section ();
1206 if (sec == text_section)
1207 text_section_used = true;
1208 else if (sec == cold_text_section)
1209 cold_text_section_used = true;
1210 }
1211
1212 static void var_location_switch_text_section (void);
1213 static void set_cur_line_info_table (section *);
1214
1215 void
1216 dwarf2out_switch_text_section (void)
1217 {
1218 section *sect;
1219 dw_fde_ref fde = cfun->fde;
1220
1221 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1222
1223 if (!in_cold_section_p)
1224 {
1225 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1226 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1227 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1228 }
1229 else
1230 {
1231 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1232 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1233 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1234 }
1235 have_multiple_function_sections = true;
1236
1237 /* There is no need to mark used sections when not debugging. */
1238 if (cold_text_section != NULL)
1239 dwarf2out_note_section_used ();
1240
1241 if (dwarf2out_do_cfi_asm ())
1242 fprintf (asm_out_file, "\t.cfi_endproc\n");
1243
1244 /* Now do the real section switch. */
1245 sect = current_function_section ();
1246 switch_to_section (sect);
1247
1248 fde->second_in_std_section
1249 = (sect == text_section
1250 || (cold_text_section && sect == cold_text_section));
1251
1252 if (dwarf2out_do_cfi_asm ())
1253 dwarf2out_do_cfi_startproc (true);
1254
1255 var_location_switch_text_section ();
1256
1257 if (cold_text_section != NULL)
1258 set_cur_line_info_table (sect);
1259 }
1260 \f
1261 /* And now, the subset of the debugging information support code necessary
1262 for emitting location expressions. */
1263
1264 /* Data about a single source file. */
1265 struct GTY((for_user)) dwarf_file_data {
1266 const char * filename;
1267 int emitted_number;
1268 };
1269
1270 /* Describe an entry into the .debug_addr section. */
1271
1272 enum ate_kind {
1273 ate_kind_rtx,
1274 ate_kind_rtx_dtprel,
1275 ate_kind_label
1276 };
1277
1278 struct GTY((for_user)) addr_table_entry {
1279 enum ate_kind kind;
1280 unsigned int refcount;
1281 unsigned int index;
1282 union addr_table_entry_struct_union
1283 {
1284 rtx GTY ((tag ("0"))) rtl;
1285 char * GTY ((tag ("1"))) label;
1286 }
1287 GTY ((desc ("%1.kind"))) addr;
1288 };
1289
1290 /* Location lists are ranges + location descriptions for that range,
1291 so you can track variables that are in different places over
1292 their entire life. */
1293 typedef struct GTY(()) dw_loc_list_struct {
1294 dw_loc_list_ref dw_loc_next;
1295 const char *begin; /* Label and addr_entry for start of range */
1296 addr_table_entry *begin_entry;
1297 const char *end; /* Label for end of range */
1298 char *ll_symbol; /* Label for beginning of location list.
1299 Only on head of list */
1300 const char *section; /* Section this loclist is relative to */
1301 dw_loc_descr_ref expr;
1302 hashval_t hash;
1303 /* True if all addresses in this and subsequent lists are known to be
1304 resolved. */
1305 bool resolved_addr;
1306 /* True if this list has been replaced by dw_loc_next. */
1307 bool replaced;
1308 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1309 section. */
1310 unsigned char emitted : 1;
1311 /* True if hash field is index rather than hash value. */
1312 unsigned char num_assigned : 1;
1313 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1314 unsigned char offset_emitted : 1;
1315 /* True if note_variable_value_in_expr has been called on it. */
1316 unsigned char noted_variable_value : 1;
1317 /* True if the range should be emitted even if begin and end
1318 are the same. */
1319 bool force;
1320 } dw_loc_list_node;
1321
1322 static dw_loc_descr_ref int_loc_descriptor (HOST_WIDE_INT);
1323 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1324
1325 /* Convert a DWARF stack opcode into its string name. */
1326
1327 static const char *
1328 dwarf_stack_op_name (unsigned int op)
1329 {
1330 const char *name = get_DW_OP_name (op);
1331
1332 if (name != NULL)
1333 return name;
1334
1335 return "OP_<unknown>";
1336 }
1337
1338 /* Return a pointer to a newly allocated location description. Location
1339 descriptions are simple expression terms that can be strung
1340 together to form more complicated location (address) descriptions. */
1341
1342 static inline dw_loc_descr_ref
1343 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1344 unsigned HOST_WIDE_INT oprnd2)
1345 {
1346 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1347
1348 descr->dw_loc_opc = op;
1349 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1350 descr->dw_loc_oprnd1.val_entry = NULL;
1351 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1352 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1353 descr->dw_loc_oprnd2.val_entry = NULL;
1354 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1355
1356 return descr;
1357 }
1358
1359 /* Return a pointer to a newly allocated location description for
1360 REG and OFFSET. */
1361
1362 static inline dw_loc_descr_ref
1363 new_reg_loc_descr (unsigned int reg, unsigned HOST_WIDE_INT offset)
1364 {
1365 if (reg <= 31)
1366 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1367 offset, 0);
1368 else
1369 return new_loc_descr (DW_OP_bregx, reg, offset);
1370 }
1371
1372 /* Add a location description term to a location description expression. */
1373
1374 static inline void
1375 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1376 {
1377 dw_loc_descr_ref *d;
1378
1379 /* Find the end of the chain. */
1380 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1381 ;
1382
1383 *d = descr;
1384 }
1385
1386 /* Compare two location operands for exact equality. */
1387
1388 static bool
1389 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1390 {
1391 if (a->val_class != b->val_class)
1392 return false;
1393 switch (a->val_class)
1394 {
1395 case dw_val_class_none:
1396 return true;
1397 case dw_val_class_addr:
1398 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1399
1400 case dw_val_class_offset:
1401 case dw_val_class_unsigned_const:
1402 case dw_val_class_const:
1403 case dw_val_class_unsigned_const_implicit:
1404 case dw_val_class_const_implicit:
1405 case dw_val_class_range_list:
1406 /* These are all HOST_WIDE_INT, signed or unsigned. */
1407 return a->v.val_unsigned == b->v.val_unsigned;
1408
1409 case dw_val_class_loc:
1410 return a->v.val_loc == b->v.val_loc;
1411 case dw_val_class_loc_list:
1412 return a->v.val_loc_list == b->v.val_loc_list;
1413 case dw_val_class_die_ref:
1414 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1415 case dw_val_class_fde_ref:
1416 return a->v.val_fde_index == b->v.val_fde_index;
1417 case dw_val_class_lbl_id:
1418 case dw_val_class_lineptr:
1419 case dw_val_class_macptr:
1420 case dw_val_class_loclistsptr:
1421 case dw_val_class_high_pc:
1422 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1423 case dw_val_class_str:
1424 return a->v.val_str == b->v.val_str;
1425 case dw_val_class_flag:
1426 return a->v.val_flag == b->v.val_flag;
1427 case dw_val_class_file:
1428 case dw_val_class_file_implicit:
1429 return a->v.val_file == b->v.val_file;
1430 case dw_val_class_decl_ref:
1431 return a->v.val_decl_ref == b->v.val_decl_ref;
1432
1433 case dw_val_class_const_double:
1434 return (a->v.val_double.high == b->v.val_double.high
1435 && a->v.val_double.low == b->v.val_double.low);
1436
1437 case dw_val_class_wide_int:
1438 return *a->v.val_wide == *b->v.val_wide;
1439
1440 case dw_val_class_vec:
1441 {
1442 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1443 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1444
1445 return (a_len == b_len
1446 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1447 }
1448
1449 case dw_val_class_data8:
1450 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1451
1452 case dw_val_class_vms_delta:
1453 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1454 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1455
1456 case dw_val_class_discr_value:
1457 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1458 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1459 case dw_val_class_discr_list:
1460 /* It makes no sense comparing two discriminant value lists. */
1461 return false;
1462 }
1463 gcc_unreachable ();
1464 }
1465
1466 /* Compare two location atoms for exact equality. */
1467
1468 static bool
1469 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1470 {
1471 if (a->dw_loc_opc != b->dw_loc_opc)
1472 return false;
1473
1474 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1475 address size, but since we always allocate cleared storage it
1476 should be zero for other types of locations. */
1477 if (a->dtprel != b->dtprel)
1478 return false;
1479
1480 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1481 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1482 }
1483
1484 /* Compare two complete location expressions for exact equality. */
1485
1486 bool
1487 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1488 {
1489 while (1)
1490 {
1491 if (a == b)
1492 return true;
1493 if (a == NULL || b == NULL)
1494 return false;
1495 if (!loc_descr_equal_p_1 (a, b))
1496 return false;
1497
1498 a = a->dw_loc_next;
1499 b = b->dw_loc_next;
1500 }
1501 }
1502
1503
1504 /* Add a constant OFFSET to a location expression. */
1505
1506 static void
1507 loc_descr_plus_const (dw_loc_descr_ref *list_head, HOST_WIDE_INT offset)
1508 {
1509 dw_loc_descr_ref loc;
1510 HOST_WIDE_INT *p;
1511
1512 gcc_assert (*list_head != NULL);
1513
1514 if (!offset)
1515 return;
1516
1517 /* Find the end of the chain. */
1518 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1519 ;
1520
1521 p = NULL;
1522 if (loc->dw_loc_opc == DW_OP_fbreg
1523 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1524 p = &loc->dw_loc_oprnd1.v.val_int;
1525 else if (loc->dw_loc_opc == DW_OP_bregx)
1526 p = &loc->dw_loc_oprnd2.v.val_int;
1527
1528 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1529 offset. Don't optimize if an signed integer overflow would happen. */
1530 if (p != NULL
1531 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1532 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1533 *p += offset;
1534
1535 else if (offset > 0)
1536 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1537
1538 else
1539 {
1540 loc->dw_loc_next
1541 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1542 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1543 }
1544 }
1545
1546 /* Add a constant OFFSET to a location list. */
1547
1548 static void
1549 loc_list_plus_const (dw_loc_list_ref list_head, HOST_WIDE_INT offset)
1550 {
1551 dw_loc_list_ref d;
1552 for (d = list_head; d != NULL; d = d->dw_loc_next)
1553 loc_descr_plus_const (&d->expr, offset);
1554 }
1555
1556 #define DWARF_REF_SIZE \
1557 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1558
1559 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1560 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1561 DW_FORM_data16 with 128 bits. */
1562 #define DWARF_LARGEST_DATA_FORM_BITS \
1563 (dwarf_version >= 5 ? 128 : 64)
1564
1565 /* Utility inline function for construction of ops that were GNU extension
1566 before DWARF 5. */
1567 static inline enum dwarf_location_atom
1568 dwarf_OP (enum dwarf_location_atom op)
1569 {
1570 switch (op)
1571 {
1572 case DW_OP_implicit_pointer:
1573 if (dwarf_version < 5)
1574 return DW_OP_GNU_implicit_pointer;
1575 break;
1576
1577 case DW_OP_entry_value:
1578 if (dwarf_version < 5)
1579 return DW_OP_GNU_entry_value;
1580 break;
1581
1582 case DW_OP_const_type:
1583 if (dwarf_version < 5)
1584 return DW_OP_GNU_const_type;
1585 break;
1586
1587 case DW_OP_regval_type:
1588 if (dwarf_version < 5)
1589 return DW_OP_GNU_regval_type;
1590 break;
1591
1592 case DW_OP_deref_type:
1593 if (dwarf_version < 5)
1594 return DW_OP_GNU_deref_type;
1595 break;
1596
1597 case DW_OP_convert:
1598 if (dwarf_version < 5)
1599 return DW_OP_GNU_convert;
1600 break;
1601
1602 case DW_OP_reinterpret:
1603 if (dwarf_version < 5)
1604 return DW_OP_GNU_reinterpret;
1605 break;
1606
1607 default:
1608 break;
1609 }
1610 return op;
1611 }
1612
1613 /* Similarly for attributes. */
1614 static inline enum dwarf_attribute
1615 dwarf_AT (enum dwarf_attribute at)
1616 {
1617 switch (at)
1618 {
1619 case DW_AT_call_return_pc:
1620 if (dwarf_version < 5)
1621 return DW_AT_low_pc;
1622 break;
1623
1624 case DW_AT_call_tail_call:
1625 if (dwarf_version < 5)
1626 return DW_AT_GNU_tail_call;
1627 break;
1628
1629 case DW_AT_call_origin:
1630 if (dwarf_version < 5)
1631 return DW_AT_abstract_origin;
1632 break;
1633
1634 case DW_AT_call_target:
1635 if (dwarf_version < 5)
1636 return DW_AT_GNU_call_site_target;
1637 break;
1638
1639 case DW_AT_call_target_clobbered:
1640 if (dwarf_version < 5)
1641 return DW_AT_GNU_call_site_target_clobbered;
1642 break;
1643
1644 case DW_AT_call_parameter:
1645 if (dwarf_version < 5)
1646 return DW_AT_abstract_origin;
1647 break;
1648
1649 case DW_AT_call_value:
1650 if (dwarf_version < 5)
1651 return DW_AT_GNU_call_site_value;
1652 break;
1653
1654 case DW_AT_call_data_value:
1655 if (dwarf_version < 5)
1656 return DW_AT_GNU_call_site_data_value;
1657 break;
1658
1659 case DW_AT_call_all_calls:
1660 if (dwarf_version < 5)
1661 return DW_AT_GNU_all_call_sites;
1662 break;
1663
1664 case DW_AT_call_all_tail_calls:
1665 if (dwarf_version < 5)
1666 return DW_AT_GNU_all_tail_call_sites;
1667 break;
1668
1669 case DW_AT_dwo_name:
1670 if (dwarf_version < 5)
1671 return DW_AT_GNU_dwo_name;
1672 break;
1673
1674 default:
1675 break;
1676 }
1677 return at;
1678 }
1679
1680 /* And similarly for tags. */
1681 static inline enum dwarf_tag
1682 dwarf_TAG (enum dwarf_tag tag)
1683 {
1684 switch (tag)
1685 {
1686 case DW_TAG_call_site:
1687 if (dwarf_version < 5)
1688 return DW_TAG_GNU_call_site;
1689 break;
1690
1691 case DW_TAG_call_site_parameter:
1692 if (dwarf_version < 5)
1693 return DW_TAG_GNU_call_site_parameter;
1694 break;
1695
1696 default:
1697 break;
1698 }
1699 return tag;
1700 }
1701
1702 static unsigned long int get_base_type_offset (dw_die_ref);
1703
1704 /* Return the size of a location descriptor. */
1705
1706 static unsigned long
1707 size_of_loc_descr (dw_loc_descr_ref loc)
1708 {
1709 unsigned long size = 1;
1710
1711 switch (loc->dw_loc_opc)
1712 {
1713 case DW_OP_addr:
1714 size += DWARF2_ADDR_SIZE;
1715 break;
1716 case DW_OP_GNU_addr_index:
1717 case DW_OP_GNU_const_index:
1718 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1719 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1720 break;
1721 case DW_OP_const1u:
1722 case DW_OP_const1s:
1723 size += 1;
1724 break;
1725 case DW_OP_const2u:
1726 case DW_OP_const2s:
1727 size += 2;
1728 break;
1729 case DW_OP_const4u:
1730 case DW_OP_const4s:
1731 size += 4;
1732 break;
1733 case DW_OP_const8u:
1734 case DW_OP_const8s:
1735 size += 8;
1736 break;
1737 case DW_OP_constu:
1738 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1739 break;
1740 case DW_OP_consts:
1741 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1742 break;
1743 case DW_OP_pick:
1744 size += 1;
1745 break;
1746 case DW_OP_plus_uconst:
1747 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1748 break;
1749 case DW_OP_skip:
1750 case DW_OP_bra:
1751 size += 2;
1752 break;
1753 case DW_OP_breg0:
1754 case DW_OP_breg1:
1755 case DW_OP_breg2:
1756 case DW_OP_breg3:
1757 case DW_OP_breg4:
1758 case DW_OP_breg5:
1759 case DW_OP_breg6:
1760 case DW_OP_breg7:
1761 case DW_OP_breg8:
1762 case DW_OP_breg9:
1763 case DW_OP_breg10:
1764 case DW_OP_breg11:
1765 case DW_OP_breg12:
1766 case DW_OP_breg13:
1767 case DW_OP_breg14:
1768 case DW_OP_breg15:
1769 case DW_OP_breg16:
1770 case DW_OP_breg17:
1771 case DW_OP_breg18:
1772 case DW_OP_breg19:
1773 case DW_OP_breg20:
1774 case DW_OP_breg21:
1775 case DW_OP_breg22:
1776 case DW_OP_breg23:
1777 case DW_OP_breg24:
1778 case DW_OP_breg25:
1779 case DW_OP_breg26:
1780 case DW_OP_breg27:
1781 case DW_OP_breg28:
1782 case DW_OP_breg29:
1783 case DW_OP_breg30:
1784 case DW_OP_breg31:
1785 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1786 break;
1787 case DW_OP_regx:
1788 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1789 break;
1790 case DW_OP_fbreg:
1791 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1792 break;
1793 case DW_OP_bregx:
1794 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1795 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1796 break;
1797 case DW_OP_piece:
1798 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1799 break;
1800 case DW_OP_bit_piece:
1801 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1802 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1803 break;
1804 case DW_OP_deref_size:
1805 case DW_OP_xderef_size:
1806 size += 1;
1807 break;
1808 case DW_OP_call2:
1809 size += 2;
1810 break;
1811 case DW_OP_call4:
1812 size += 4;
1813 break;
1814 case DW_OP_call_ref:
1815 case DW_OP_GNU_variable_value:
1816 size += DWARF_REF_SIZE;
1817 break;
1818 case DW_OP_implicit_value:
1819 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1820 + loc->dw_loc_oprnd1.v.val_unsigned;
1821 break;
1822 case DW_OP_implicit_pointer:
1823 case DW_OP_GNU_implicit_pointer:
1824 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1825 break;
1826 case DW_OP_entry_value:
1827 case DW_OP_GNU_entry_value:
1828 {
1829 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1830 size += size_of_uleb128 (op_size) + op_size;
1831 break;
1832 }
1833 case DW_OP_const_type:
1834 case DW_OP_GNU_const_type:
1835 {
1836 unsigned long o
1837 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1838 size += size_of_uleb128 (o) + 1;
1839 switch (loc->dw_loc_oprnd2.val_class)
1840 {
1841 case dw_val_class_vec:
1842 size += loc->dw_loc_oprnd2.v.val_vec.length
1843 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1844 break;
1845 case dw_val_class_const:
1846 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1847 break;
1848 case dw_val_class_const_double:
1849 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1850 break;
1851 case dw_val_class_wide_int:
1852 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1853 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1854 break;
1855 default:
1856 gcc_unreachable ();
1857 }
1858 break;
1859 }
1860 case DW_OP_regval_type:
1861 case DW_OP_GNU_regval_type:
1862 {
1863 unsigned long o
1864 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1865 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1866 + size_of_uleb128 (o);
1867 }
1868 break;
1869 case DW_OP_deref_type:
1870 case DW_OP_GNU_deref_type:
1871 {
1872 unsigned long o
1873 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1874 size += 1 + size_of_uleb128 (o);
1875 }
1876 break;
1877 case DW_OP_convert:
1878 case DW_OP_reinterpret:
1879 case DW_OP_GNU_convert:
1880 case DW_OP_GNU_reinterpret:
1881 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1882 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1883 else
1884 {
1885 unsigned long o
1886 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1887 size += size_of_uleb128 (o);
1888 }
1889 break;
1890 case DW_OP_GNU_parameter_ref:
1891 size += 4;
1892 break;
1893 default:
1894 break;
1895 }
1896
1897 return size;
1898 }
1899
1900 /* Return the size of a series of location descriptors. */
1901
1902 unsigned long
1903 size_of_locs (dw_loc_descr_ref loc)
1904 {
1905 dw_loc_descr_ref l;
1906 unsigned long size;
1907
1908 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1909 field, to avoid writing to a PCH file. */
1910 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1911 {
1912 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1913 break;
1914 size += size_of_loc_descr (l);
1915 }
1916 if (! l)
1917 return size;
1918
1919 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1920 {
1921 l->dw_loc_addr = size;
1922 size += size_of_loc_descr (l);
1923 }
1924
1925 return size;
1926 }
1927
1928 /* Return the size of the value in a DW_AT_discr_value attribute. */
1929
1930 static int
1931 size_of_discr_value (dw_discr_value *discr_value)
1932 {
1933 if (discr_value->pos)
1934 return size_of_uleb128 (discr_value->v.uval);
1935 else
1936 return size_of_sleb128 (discr_value->v.sval);
1937 }
1938
1939 /* Return the size of the value in a DW_AT_discr_list attribute. */
1940
1941 static int
1942 size_of_discr_list (dw_discr_list_ref discr_list)
1943 {
1944 int size = 0;
1945
1946 for (dw_discr_list_ref list = discr_list;
1947 list != NULL;
1948 list = list->dw_discr_next)
1949 {
1950 /* One byte for the discriminant value descriptor, and then one or two
1951 LEB128 numbers, depending on whether it's a single case label or a
1952 range label. */
1953 size += 1;
1954 size += size_of_discr_value (&list->dw_discr_lower_bound);
1955 if (list->dw_discr_range != 0)
1956 size += size_of_discr_value (&list->dw_discr_upper_bound);
1957 }
1958 return size;
1959 }
1960
1961 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
1962 static void get_ref_die_offset_label (char *, dw_die_ref);
1963 static unsigned long int get_ref_die_offset (dw_die_ref);
1964
1965 /* Output location description stack opcode's operands (if any).
1966 The for_eh_or_skip parameter controls whether register numbers are
1967 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
1968 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
1969 info). This should be suppressed for the cases that have not been converted
1970 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
1971
1972 static void
1973 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
1974 {
1975 dw_val_ref val1 = &loc->dw_loc_oprnd1;
1976 dw_val_ref val2 = &loc->dw_loc_oprnd2;
1977
1978 switch (loc->dw_loc_opc)
1979 {
1980 #ifdef DWARF2_DEBUGGING_INFO
1981 case DW_OP_const2u:
1982 case DW_OP_const2s:
1983 dw2_asm_output_data (2, val1->v.val_int, NULL);
1984 break;
1985 case DW_OP_const4u:
1986 if (loc->dtprel)
1987 {
1988 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
1989 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
1990 val1->v.val_addr);
1991 fputc ('\n', asm_out_file);
1992 break;
1993 }
1994 /* FALLTHRU */
1995 case DW_OP_const4s:
1996 dw2_asm_output_data (4, val1->v.val_int, NULL);
1997 break;
1998 case DW_OP_const8u:
1999 if (loc->dtprel)
2000 {
2001 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2002 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2003 val1->v.val_addr);
2004 fputc ('\n', asm_out_file);
2005 break;
2006 }
2007 /* FALLTHRU */
2008 case DW_OP_const8s:
2009 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2010 dw2_asm_output_data (8, val1->v.val_int, NULL);
2011 break;
2012 case DW_OP_skip:
2013 case DW_OP_bra:
2014 {
2015 int offset;
2016
2017 gcc_assert (val1->val_class == dw_val_class_loc);
2018 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2019
2020 dw2_asm_output_data (2, offset, NULL);
2021 }
2022 break;
2023 case DW_OP_implicit_value:
2024 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2025 switch (val2->val_class)
2026 {
2027 case dw_val_class_const:
2028 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2029 break;
2030 case dw_val_class_vec:
2031 {
2032 unsigned int elt_size = val2->v.val_vec.elt_size;
2033 unsigned int len = val2->v.val_vec.length;
2034 unsigned int i;
2035 unsigned char *p;
2036
2037 if (elt_size > sizeof (HOST_WIDE_INT))
2038 {
2039 elt_size /= 2;
2040 len *= 2;
2041 }
2042 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2043 i < len;
2044 i++, p += elt_size)
2045 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2046 "fp or vector constant word %u", i);
2047 }
2048 break;
2049 case dw_val_class_const_double:
2050 {
2051 unsigned HOST_WIDE_INT first, second;
2052
2053 if (WORDS_BIG_ENDIAN)
2054 {
2055 first = val2->v.val_double.high;
2056 second = val2->v.val_double.low;
2057 }
2058 else
2059 {
2060 first = val2->v.val_double.low;
2061 second = val2->v.val_double.high;
2062 }
2063 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2064 first, NULL);
2065 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2066 second, NULL);
2067 }
2068 break;
2069 case dw_val_class_wide_int:
2070 {
2071 int i;
2072 int len = get_full_len (*val2->v.val_wide);
2073 if (WORDS_BIG_ENDIAN)
2074 for (i = len - 1; i >= 0; --i)
2075 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2076 val2->v.val_wide->elt (i), NULL);
2077 else
2078 for (i = 0; i < len; ++i)
2079 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2080 val2->v.val_wide->elt (i), NULL);
2081 }
2082 break;
2083 case dw_val_class_addr:
2084 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2085 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2086 break;
2087 default:
2088 gcc_unreachable ();
2089 }
2090 break;
2091 #else
2092 case DW_OP_const2u:
2093 case DW_OP_const2s:
2094 case DW_OP_const4u:
2095 case DW_OP_const4s:
2096 case DW_OP_const8u:
2097 case DW_OP_const8s:
2098 case DW_OP_skip:
2099 case DW_OP_bra:
2100 case DW_OP_implicit_value:
2101 /* We currently don't make any attempt to make sure these are
2102 aligned properly like we do for the main unwind info, so
2103 don't support emitting things larger than a byte if we're
2104 only doing unwinding. */
2105 gcc_unreachable ();
2106 #endif
2107 case DW_OP_const1u:
2108 case DW_OP_const1s:
2109 dw2_asm_output_data (1, val1->v.val_int, NULL);
2110 break;
2111 case DW_OP_constu:
2112 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2113 break;
2114 case DW_OP_consts:
2115 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2116 break;
2117 case DW_OP_pick:
2118 dw2_asm_output_data (1, val1->v.val_int, NULL);
2119 break;
2120 case DW_OP_plus_uconst:
2121 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2122 break;
2123 case DW_OP_breg0:
2124 case DW_OP_breg1:
2125 case DW_OP_breg2:
2126 case DW_OP_breg3:
2127 case DW_OP_breg4:
2128 case DW_OP_breg5:
2129 case DW_OP_breg6:
2130 case DW_OP_breg7:
2131 case DW_OP_breg8:
2132 case DW_OP_breg9:
2133 case DW_OP_breg10:
2134 case DW_OP_breg11:
2135 case DW_OP_breg12:
2136 case DW_OP_breg13:
2137 case DW_OP_breg14:
2138 case DW_OP_breg15:
2139 case DW_OP_breg16:
2140 case DW_OP_breg17:
2141 case DW_OP_breg18:
2142 case DW_OP_breg19:
2143 case DW_OP_breg20:
2144 case DW_OP_breg21:
2145 case DW_OP_breg22:
2146 case DW_OP_breg23:
2147 case DW_OP_breg24:
2148 case DW_OP_breg25:
2149 case DW_OP_breg26:
2150 case DW_OP_breg27:
2151 case DW_OP_breg28:
2152 case DW_OP_breg29:
2153 case DW_OP_breg30:
2154 case DW_OP_breg31:
2155 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2156 break;
2157 case DW_OP_regx:
2158 {
2159 unsigned r = val1->v.val_unsigned;
2160 if (for_eh_or_skip >= 0)
2161 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2162 gcc_assert (size_of_uleb128 (r)
2163 == size_of_uleb128 (val1->v.val_unsigned));
2164 dw2_asm_output_data_uleb128 (r, NULL);
2165 }
2166 break;
2167 case DW_OP_fbreg:
2168 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2169 break;
2170 case DW_OP_bregx:
2171 {
2172 unsigned r = val1->v.val_unsigned;
2173 if (for_eh_or_skip >= 0)
2174 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2175 gcc_assert (size_of_uleb128 (r)
2176 == size_of_uleb128 (val1->v.val_unsigned));
2177 dw2_asm_output_data_uleb128 (r, NULL);
2178 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2179 }
2180 break;
2181 case DW_OP_piece:
2182 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2183 break;
2184 case DW_OP_bit_piece:
2185 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2186 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2187 break;
2188 case DW_OP_deref_size:
2189 case DW_OP_xderef_size:
2190 dw2_asm_output_data (1, val1->v.val_int, NULL);
2191 break;
2192
2193 case DW_OP_addr:
2194 if (loc->dtprel)
2195 {
2196 if (targetm.asm_out.output_dwarf_dtprel)
2197 {
2198 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2199 DWARF2_ADDR_SIZE,
2200 val1->v.val_addr);
2201 fputc ('\n', asm_out_file);
2202 }
2203 else
2204 gcc_unreachable ();
2205 }
2206 else
2207 {
2208 #ifdef DWARF2_DEBUGGING_INFO
2209 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2210 #else
2211 gcc_unreachable ();
2212 #endif
2213 }
2214 break;
2215
2216 case DW_OP_GNU_addr_index:
2217 case DW_OP_GNU_const_index:
2218 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2219 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2220 "(index into .debug_addr)");
2221 break;
2222
2223 case DW_OP_call2:
2224 case DW_OP_call4:
2225 {
2226 unsigned long die_offset
2227 = get_ref_die_offset (val1->v.val_die_ref.die);
2228 /* Make sure the offset has been computed and that we can encode it as
2229 an operand. */
2230 gcc_assert (die_offset > 0
2231 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2232 ? 0xffff
2233 : 0xffffffff));
2234 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2235 die_offset, NULL);
2236 }
2237 break;
2238
2239 case DW_OP_call_ref:
2240 case DW_OP_GNU_variable_value:
2241 {
2242 char label[MAX_ARTIFICIAL_LABEL_BYTES
2243 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2244 gcc_assert (val1->val_class == dw_val_class_die_ref);
2245 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2246 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2247 }
2248 break;
2249
2250 case DW_OP_implicit_pointer:
2251 case DW_OP_GNU_implicit_pointer:
2252 {
2253 char label[MAX_ARTIFICIAL_LABEL_BYTES
2254 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2255 gcc_assert (val1->val_class == dw_val_class_die_ref);
2256 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2257 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2258 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2259 }
2260 break;
2261
2262 case DW_OP_entry_value:
2263 case DW_OP_GNU_entry_value:
2264 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2265 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2266 break;
2267
2268 case DW_OP_const_type:
2269 case DW_OP_GNU_const_type:
2270 {
2271 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2272 gcc_assert (o);
2273 dw2_asm_output_data_uleb128 (o, NULL);
2274 switch (val2->val_class)
2275 {
2276 case dw_val_class_const:
2277 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2278 dw2_asm_output_data (1, l, NULL);
2279 dw2_asm_output_data (l, val2->v.val_int, NULL);
2280 break;
2281 case dw_val_class_vec:
2282 {
2283 unsigned int elt_size = val2->v.val_vec.elt_size;
2284 unsigned int len = val2->v.val_vec.length;
2285 unsigned int i;
2286 unsigned char *p;
2287
2288 l = len * elt_size;
2289 dw2_asm_output_data (1, l, NULL);
2290 if (elt_size > sizeof (HOST_WIDE_INT))
2291 {
2292 elt_size /= 2;
2293 len *= 2;
2294 }
2295 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2296 i < len;
2297 i++, p += elt_size)
2298 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2299 "fp or vector constant word %u", i);
2300 }
2301 break;
2302 case dw_val_class_const_double:
2303 {
2304 unsigned HOST_WIDE_INT first, second;
2305 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2306
2307 dw2_asm_output_data (1, 2 * l, NULL);
2308 if (WORDS_BIG_ENDIAN)
2309 {
2310 first = val2->v.val_double.high;
2311 second = val2->v.val_double.low;
2312 }
2313 else
2314 {
2315 first = val2->v.val_double.low;
2316 second = val2->v.val_double.high;
2317 }
2318 dw2_asm_output_data (l, first, NULL);
2319 dw2_asm_output_data (l, second, NULL);
2320 }
2321 break;
2322 case dw_val_class_wide_int:
2323 {
2324 int i;
2325 int len = get_full_len (*val2->v.val_wide);
2326 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2327
2328 dw2_asm_output_data (1, len * l, NULL);
2329 if (WORDS_BIG_ENDIAN)
2330 for (i = len - 1; i >= 0; --i)
2331 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2332 else
2333 for (i = 0; i < len; ++i)
2334 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2335 }
2336 break;
2337 default:
2338 gcc_unreachable ();
2339 }
2340 }
2341 break;
2342 case DW_OP_regval_type:
2343 case DW_OP_GNU_regval_type:
2344 {
2345 unsigned r = val1->v.val_unsigned;
2346 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2347 gcc_assert (o);
2348 if (for_eh_or_skip >= 0)
2349 {
2350 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2351 gcc_assert (size_of_uleb128 (r)
2352 == size_of_uleb128 (val1->v.val_unsigned));
2353 }
2354 dw2_asm_output_data_uleb128 (r, NULL);
2355 dw2_asm_output_data_uleb128 (o, NULL);
2356 }
2357 break;
2358 case DW_OP_deref_type:
2359 case DW_OP_GNU_deref_type:
2360 {
2361 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2362 gcc_assert (o);
2363 dw2_asm_output_data (1, val1->v.val_int, NULL);
2364 dw2_asm_output_data_uleb128 (o, NULL);
2365 }
2366 break;
2367 case DW_OP_convert:
2368 case DW_OP_reinterpret:
2369 case DW_OP_GNU_convert:
2370 case DW_OP_GNU_reinterpret:
2371 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2372 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2373 else
2374 {
2375 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2376 gcc_assert (o);
2377 dw2_asm_output_data_uleb128 (o, NULL);
2378 }
2379 break;
2380
2381 case DW_OP_GNU_parameter_ref:
2382 {
2383 unsigned long o;
2384 gcc_assert (val1->val_class == dw_val_class_die_ref);
2385 o = get_ref_die_offset (val1->v.val_die_ref.die);
2386 dw2_asm_output_data (4, o, NULL);
2387 }
2388 break;
2389
2390 default:
2391 /* Other codes have no operands. */
2392 break;
2393 }
2394 }
2395
2396 /* Output a sequence of location operations.
2397 The for_eh_or_skip parameter controls whether register numbers are
2398 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2399 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2400 info). This should be suppressed for the cases that have not been converted
2401 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2402
2403 void
2404 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2405 {
2406 for (; loc != NULL; loc = loc->dw_loc_next)
2407 {
2408 enum dwarf_location_atom opc = loc->dw_loc_opc;
2409 /* Output the opcode. */
2410 if (for_eh_or_skip >= 0
2411 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2412 {
2413 unsigned r = (opc - DW_OP_breg0);
2414 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2415 gcc_assert (r <= 31);
2416 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2417 }
2418 else if (for_eh_or_skip >= 0
2419 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2420 {
2421 unsigned r = (opc - DW_OP_reg0);
2422 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2423 gcc_assert (r <= 31);
2424 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2425 }
2426
2427 dw2_asm_output_data (1, opc,
2428 "%s", dwarf_stack_op_name (opc));
2429
2430 /* Output the operand(s) (if any). */
2431 output_loc_operands (loc, for_eh_or_skip);
2432 }
2433 }
2434
2435 /* Output location description stack opcode's operands (if any).
2436 The output is single bytes on a line, suitable for .cfi_escape. */
2437
2438 static void
2439 output_loc_operands_raw (dw_loc_descr_ref loc)
2440 {
2441 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2442 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2443
2444 switch (loc->dw_loc_opc)
2445 {
2446 case DW_OP_addr:
2447 case DW_OP_GNU_addr_index:
2448 case DW_OP_GNU_const_index:
2449 case DW_OP_implicit_value:
2450 /* We cannot output addresses in .cfi_escape, only bytes. */
2451 gcc_unreachable ();
2452
2453 case DW_OP_const1u:
2454 case DW_OP_const1s:
2455 case DW_OP_pick:
2456 case DW_OP_deref_size:
2457 case DW_OP_xderef_size:
2458 fputc (',', asm_out_file);
2459 dw2_asm_output_data_raw (1, val1->v.val_int);
2460 break;
2461
2462 case DW_OP_const2u:
2463 case DW_OP_const2s:
2464 fputc (',', asm_out_file);
2465 dw2_asm_output_data_raw (2, val1->v.val_int);
2466 break;
2467
2468 case DW_OP_const4u:
2469 case DW_OP_const4s:
2470 fputc (',', asm_out_file);
2471 dw2_asm_output_data_raw (4, val1->v.val_int);
2472 break;
2473
2474 case DW_OP_const8u:
2475 case DW_OP_const8s:
2476 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2477 fputc (',', asm_out_file);
2478 dw2_asm_output_data_raw (8, val1->v.val_int);
2479 break;
2480
2481 case DW_OP_skip:
2482 case DW_OP_bra:
2483 {
2484 int offset;
2485
2486 gcc_assert (val1->val_class == dw_val_class_loc);
2487 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2488
2489 fputc (',', asm_out_file);
2490 dw2_asm_output_data_raw (2, offset);
2491 }
2492 break;
2493
2494 case DW_OP_regx:
2495 {
2496 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2497 gcc_assert (size_of_uleb128 (r)
2498 == size_of_uleb128 (val1->v.val_unsigned));
2499 fputc (',', asm_out_file);
2500 dw2_asm_output_data_uleb128_raw (r);
2501 }
2502 break;
2503
2504 case DW_OP_constu:
2505 case DW_OP_plus_uconst:
2506 case DW_OP_piece:
2507 fputc (',', asm_out_file);
2508 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2509 break;
2510
2511 case DW_OP_bit_piece:
2512 fputc (',', asm_out_file);
2513 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2514 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2515 break;
2516
2517 case DW_OP_consts:
2518 case DW_OP_breg0:
2519 case DW_OP_breg1:
2520 case DW_OP_breg2:
2521 case DW_OP_breg3:
2522 case DW_OP_breg4:
2523 case DW_OP_breg5:
2524 case DW_OP_breg6:
2525 case DW_OP_breg7:
2526 case DW_OP_breg8:
2527 case DW_OP_breg9:
2528 case DW_OP_breg10:
2529 case DW_OP_breg11:
2530 case DW_OP_breg12:
2531 case DW_OP_breg13:
2532 case DW_OP_breg14:
2533 case DW_OP_breg15:
2534 case DW_OP_breg16:
2535 case DW_OP_breg17:
2536 case DW_OP_breg18:
2537 case DW_OP_breg19:
2538 case DW_OP_breg20:
2539 case DW_OP_breg21:
2540 case DW_OP_breg22:
2541 case DW_OP_breg23:
2542 case DW_OP_breg24:
2543 case DW_OP_breg25:
2544 case DW_OP_breg26:
2545 case DW_OP_breg27:
2546 case DW_OP_breg28:
2547 case DW_OP_breg29:
2548 case DW_OP_breg30:
2549 case DW_OP_breg31:
2550 case DW_OP_fbreg:
2551 fputc (',', asm_out_file);
2552 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2553 break;
2554
2555 case DW_OP_bregx:
2556 {
2557 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2558 gcc_assert (size_of_uleb128 (r)
2559 == size_of_uleb128 (val1->v.val_unsigned));
2560 fputc (',', asm_out_file);
2561 dw2_asm_output_data_uleb128_raw (r);
2562 fputc (',', asm_out_file);
2563 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2564 }
2565 break;
2566
2567 case DW_OP_implicit_pointer:
2568 case DW_OP_entry_value:
2569 case DW_OP_const_type:
2570 case DW_OP_regval_type:
2571 case DW_OP_deref_type:
2572 case DW_OP_convert:
2573 case DW_OP_reinterpret:
2574 case DW_OP_GNU_implicit_pointer:
2575 case DW_OP_GNU_entry_value:
2576 case DW_OP_GNU_const_type:
2577 case DW_OP_GNU_regval_type:
2578 case DW_OP_GNU_deref_type:
2579 case DW_OP_GNU_convert:
2580 case DW_OP_GNU_reinterpret:
2581 case DW_OP_GNU_parameter_ref:
2582 gcc_unreachable ();
2583 break;
2584
2585 default:
2586 /* Other codes have no operands. */
2587 break;
2588 }
2589 }
2590
2591 void
2592 output_loc_sequence_raw (dw_loc_descr_ref loc)
2593 {
2594 while (1)
2595 {
2596 enum dwarf_location_atom opc = loc->dw_loc_opc;
2597 /* Output the opcode. */
2598 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2599 {
2600 unsigned r = (opc - DW_OP_breg0);
2601 r = DWARF2_FRAME_REG_OUT (r, 1);
2602 gcc_assert (r <= 31);
2603 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2604 }
2605 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2606 {
2607 unsigned r = (opc - DW_OP_reg0);
2608 r = DWARF2_FRAME_REG_OUT (r, 1);
2609 gcc_assert (r <= 31);
2610 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2611 }
2612 /* Output the opcode. */
2613 fprintf (asm_out_file, "%#x", opc);
2614 output_loc_operands_raw (loc);
2615
2616 if (!loc->dw_loc_next)
2617 break;
2618 loc = loc->dw_loc_next;
2619
2620 fputc (',', asm_out_file);
2621 }
2622 }
2623
2624 /* This function builds a dwarf location descriptor sequence from a
2625 dw_cfa_location, adding the given OFFSET to the result of the
2626 expression. */
2627
2628 struct dw_loc_descr_node *
2629 build_cfa_loc (dw_cfa_location *cfa, HOST_WIDE_INT offset)
2630 {
2631 struct dw_loc_descr_node *head, *tmp;
2632
2633 offset += cfa->offset;
2634
2635 if (cfa->indirect)
2636 {
2637 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2638 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2639 head->dw_loc_oprnd1.val_entry = NULL;
2640 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2641 add_loc_descr (&head, tmp);
2642 if (offset != 0)
2643 {
2644 tmp = new_loc_descr (DW_OP_plus_uconst, offset, 0);
2645 add_loc_descr (&head, tmp);
2646 }
2647 }
2648 else
2649 head = new_reg_loc_descr (cfa->reg, offset);
2650
2651 return head;
2652 }
2653
2654 /* This function builds a dwarf location descriptor sequence for
2655 the address at OFFSET from the CFA when stack is aligned to
2656 ALIGNMENT byte. */
2657
2658 struct dw_loc_descr_node *
2659 build_cfa_aligned_loc (dw_cfa_location *cfa,
2660 HOST_WIDE_INT offset, HOST_WIDE_INT alignment)
2661 {
2662 struct dw_loc_descr_node *head;
2663 unsigned int dwarf_fp
2664 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2665
2666 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2667 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2668 {
2669 head = new_reg_loc_descr (dwarf_fp, 0);
2670 add_loc_descr (&head, int_loc_descriptor (alignment));
2671 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2672 loc_descr_plus_const (&head, offset);
2673 }
2674 else
2675 head = new_reg_loc_descr (dwarf_fp, offset);
2676 return head;
2677 }
2678 \f
2679 /* And now, the support for symbolic debugging information. */
2680
2681 /* .debug_str support. */
2682
2683 static void dwarf2out_init (const char *);
2684 static void dwarf2out_finish (const char *);
2685 static void dwarf2out_early_finish (const char *);
2686 static void dwarf2out_assembly_start (void);
2687 static void dwarf2out_define (unsigned int, const char *);
2688 static void dwarf2out_undef (unsigned int, const char *);
2689 static void dwarf2out_start_source_file (unsigned, const char *);
2690 static void dwarf2out_end_source_file (unsigned);
2691 static void dwarf2out_function_decl (tree);
2692 static void dwarf2out_begin_block (unsigned, unsigned);
2693 static void dwarf2out_end_block (unsigned, unsigned);
2694 static bool dwarf2out_ignore_block (const_tree);
2695 static void dwarf2out_early_global_decl (tree);
2696 static void dwarf2out_late_global_decl (tree);
2697 static void dwarf2out_type_decl (tree, int);
2698 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2699 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2700 dw_die_ref);
2701 static void dwarf2out_abstract_function (tree);
2702 static void dwarf2out_var_location (rtx_insn *);
2703 static void dwarf2out_size_function (tree);
2704 static void dwarf2out_begin_function (tree);
2705 static void dwarf2out_end_function (unsigned int);
2706 static void dwarf2out_register_main_translation_unit (tree unit);
2707 static void dwarf2out_set_name (tree, tree);
2708 static void dwarf2out_register_external_die (tree decl, const char *sym,
2709 unsigned HOST_WIDE_INT off);
2710 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2711 unsigned HOST_WIDE_INT *off);
2712
2713 /* The debug hooks structure. */
2714
2715 const struct gcc_debug_hooks dwarf2_debug_hooks =
2716 {
2717 dwarf2out_init,
2718 dwarf2out_finish,
2719 dwarf2out_early_finish,
2720 dwarf2out_assembly_start,
2721 dwarf2out_define,
2722 dwarf2out_undef,
2723 dwarf2out_start_source_file,
2724 dwarf2out_end_source_file,
2725 dwarf2out_begin_block,
2726 dwarf2out_end_block,
2727 dwarf2out_ignore_block,
2728 dwarf2out_source_line,
2729 dwarf2out_begin_prologue,
2730 #if VMS_DEBUGGING_INFO
2731 dwarf2out_vms_end_prologue,
2732 dwarf2out_vms_begin_epilogue,
2733 #else
2734 debug_nothing_int_charstar,
2735 debug_nothing_int_charstar,
2736 #endif
2737 dwarf2out_end_epilogue,
2738 dwarf2out_begin_function,
2739 dwarf2out_end_function, /* end_function */
2740 dwarf2out_register_main_translation_unit,
2741 dwarf2out_function_decl, /* function_decl */
2742 dwarf2out_early_global_decl,
2743 dwarf2out_late_global_decl,
2744 dwarf2out_type_decl, /* type_decl */
2745 dwarf2out_imported_module_or_decl,
2746 dwarf2out_die_ref_for_decl,
2747 dwarf2out_register_external_die,
2748 debug_nothing_tree, /* deferred_inline_function */
2749 /* The DWARF 2 backend tries to reduce debugging bloat by not
2750 emitting the abstract description of inline functions until
2751 something tries to reference them. */
2752 dwarf2out_abstract_function, /* outlining_inline_function */
2753 debug_nothing_rtx_code_label, /* label */
2754 debug_nothing_int, /* handle_pch */
2755 dwarf2out_var_location,
2756 debug_nothing_tree, /* inline_entry */
2757 dwarf2out_size_function, /* size_function */
2758 dwarf2out_switch_text_section,
2759 dwarf2out_set_name,
2760 1, /* start_end_main_source_file */
2761 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2762 };
2763
2764 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2765 {
2766 dwarf2out_init,
2767 debug_nothing_charstar,
2768 debug_nothing_charstar,
2769 dwarf2out_assembly_start,
2770 debug_nothing_int_charstar,
2771 debug_nothing_int_charstar,
2772 debug_nothing_int_charstar,
2773 debug_nothing_int,
2774 debug_nothing_int_int, /* begin_block */
2775 debug_nothing_int_int, /* end_block */
2776 debug_true_const_tree, /* ignore_block */
2777 dwarf2out_source_line, /* source_line */
2778 debug_nothing_int_int_charstar, /* begin_prologue */
2779 debug_nothing_int_charstar, /* end_prologue */
2780 debug_nothing_int_charstar, /* begin_epilogue */
2781 debug_nothing_int_charstar, /* end_epilogue */
2782 debug_nothing_tree, /* begin_function */
2783 debug_nothing_int, /* end_function */
2784 debug_nothing_tree, /* register_main_translation_unit */
2785 debug_nothing_tree, /* function_decl */
2786 debug_nothing_tree, /* early_global_decl */
2787 debug_nothing_tree, /* late_global_decl */
2788 debug_nothing_tree_int, /* type_decl */
2789 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2790 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2791 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2792 debug_nothing_tree, /* deferred_inline_function */
2793 debug_nothing_tree, /* outlining_inline_function */
2794 debug_nothing_rtx_code_label, /* label */
2795 debug_nothing_int, /* handle_pch */
2796 debug_nothing_rtx_insn, /* var_location */
2797 debug_nothing_tree, /* inline_entry */
2798 debug_nothing_tree, /* size_function */
2799 debug_nothing_void, /* switch_text_section */
2800 debug_nothing_tree_tree, /* set_name */
2801 0, /* start_end_main_source_file */
2802 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2803 };
2804 \f
2805 /* NOTE: In the comments in this file, many references are made to
2806 "Debugging Information Entries". This term is abbreviated as `DIE'
2807 throughout the remainder of this file. */
2808
2809 /* An internal representation of the DWARF output is built, and then
2810 walked to generate the DWARF debugging info. The walk of the internal
2811 representation is done after the entire program has been compiled.
2812 The types below are used to describe the internal representation. */
2813
2814 /* Whether to put type DIEs into their own section .debug_types instead
2815 of making them part of the .debug_info section. Only supported for
2816 Dwarf V4 or higher and the user didn't disable them through
2817 -fno-debug-types-section. It is more efficient to put them in a
2818 separate comdat sections since the linker will then be able to
2819 remove duplicates. But not all tools support .debug_types sections
2820 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2821 it is DW_UT_type unit type in .debug_info section. */
2822
2823 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2824
2825 /* Various DIE's use offsets relative to the beginning of the
2826 .debug_info section to refer to each other. */
2827
2828 typedef long int dw_offset;
2829
2830 struct comdat_type_node;
2831
2832 /* The entries in the line_info table more-or-less mirror the opcodes
2833 that are used in the real dwarf line table. Arrays of these entries
2834 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2835 supported. */
2836
2837 enum dw_line_info_opcode {
2838 /* Emit DW_LNE_set_address; the operand is the label index. */
2839 LI_set_address,
2840
2841 /* Emit a row to the matrix with the given line. This may be done
2842 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2843 special opcodes. */
2844 LI_set_line,
2845
2846 /* Emit a DW_LNS_set_file. */
2847 LI_set_file,
2848
2849 /* Emit a DW_LNS_set_column. */
2850 LI_set_column,
2851
2852 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2853 LI_negate_stmt,
2854
2855 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2856 LI_set_prologue_end,
2857 LI_set_epilogue_begin,
2858
2859 /* Emit a DW_LNE_set_discriminator. */
2860 LI_set_discriminator
2861 };
2862
2863 typedef struct GTY(()) dw_line_info_struct {
2864 enum dw_line_info_opcode opcode;
2865 unsigned int val;
2866 } dw_line_info_entry;
2867
2868
2869 struct GTY(()) dw_line_info_table {
2870 /* The label that marks the end of this section. */
2871 const char *end_label;
2872
2873 /* The values for the last row of the matrix, as collected in the table.
2874 These are used to minimize the changes to the next row. */
2875 unsigned int file_num;
2876 unsigned int line_num;
2877 unsigned int column_num;
2878 int discrim_num;
2879 bool is_stmt;
2880 bool in_use;
2881
2882 vec<dw_line_info_entry, va_gc> *entries;
2883 };
2884
2885
2886 /* Each DIE attribute has a field specifying the attribute kind,
2887 a link to the next attribute in the chain, and an attribute value.
2888 Attributes are typically linked below the DIE they modify. */
2889
2890 typedef struct GTY(()) dw_attr_struct {
2891 enum dwarf_attribute dw_attr;
2892 dw_val_node dw_attr_val;
2893 }
2894 dw_attr_node;
2895
2896
2897 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
2898 The children of each node form a circular list linked by
2899 die_sib. die_child points to the node *before* the "first" child node. */
2900
2901 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
2902 union die_symbol_or_type_node
2903 {
2904 const char * GTY ((tag ("0"))) die_symbol;
2905 comdat_type_node *GTY ((tag ("1"))) die_type_node;
2906 }
2907 GTY ((desc ("%0.comdat_type_p"))) die_id;
2908 vec<dw_attr_node, va_gc> *die_attr;
2909 dw_die_ref die_parent;
2910 dw_die_ref die_child;
2911 dw_die_ref die_sib;
2912 dw_die_ref die_definition; /* ref from a specification to its definition */
2913 dw_offset die_offset;
2914 unsigned long die_abbrev;
2915 int die_mark;
2916 unsigned int decl_id;
2917 enum dwarf_tag die_tag;
2918 /* Die is used and must not be pruned as unused. */
2919 BOOL_BITFIELD die_perennial_p : 1;
2920 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
2921 /* For an external ref to die_symbol if die_offset contains an extra
2922 offset to that symbol. */
2923 BOOL_BITFIELD with_offset : 1;
2924 /* Whether this DIE was removed from the DIE tree, for example via
2925 prune_unused_types. We don't consider those present from the
2926 DIE lookup routines. */
2927 BOOL_BITFIELD removed : 1;
2928 /* Lots of spare bits. */
2929 }
2930 die_node;
2931
2932 /* Set to TRUE while dwarf2out_early_global_decl is running. */
2933 static bool early_dwarf;
2934 static bool early_dwarf_finished;
2935 struct set_early_dwarf {
2936 bool saved;
2937 set_early_dwarf () : saved(early_dwarf)
2938 {
2939 gcc_assert (! early_dwarf_finished);
2940 early_dwarf = true;
2941 }
2942 ~set_early_dwarf () { early_dwarf = saved; }
2943 };
2944
2945 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
2946 #define FOR_EACH_CHILD(die, c, expr) do { \
2947 c = die->die_child; \
2948 if (c) do { \
2949 c = c->die_sib; \
2950 expr; \
2951 } while (c != die->die_child); \
2952 } while (0)
2953
2954 /* The pubname structure */
2955
2956 typedef struct GTY(()) pubname_struct {
2957 dw_die_ref die;
2958 const char *name;
2959 }
2960 pubname_entry;
2961
2962
2963 struct GTY(()) dw_ranges {
2964 const char *label;
2965 /* If this is positive, it's a block number, otherwise it's a
2966 bitwise-negated index into dw_ranges_by_label. */
2967 int num;
2968 /* Index for the range list for DW_FORM_rnglistx. */
2969 unsigned int idx : 31;
2970 /* True if this range might be possibly in a different section
2971 from previous entry. */
2972 unsigned int maybe_new_sec : 1;
2973 };
2974
2975 /* A structure to hold a macinfo entry. */
2976
2977 typedef struct GTY(()) macinfo_struct {
2978 unsigned char code;
2979 unsigned HOST_WIDE_INT lineno;
2980 const char *info;
2981 }
2982 macinfo_entry;
2983
2984
2985 struct GTY(()) dw_ranges_by_label {
2986 const char *begin;
2987 const char *end;
2988 };
2989
2990 /* The comdat type node structure. */
2991 struct GTY(()) comdat_type_node
2992 {
2993 dw_die_ref root_die;
2994 dw_die_ref type_die;
2995 dw_die_ref skeleton_die;
2996 char signature[DWARF_TYPE_SIGNATURE_SIZE];
2997 comdat_type_node *next;
2998 };
2999
3000 /* A list of DIEs for which we can't determine ancestry (parent_die
3001 field) just yet. Later in dwarf2out_finish we will fill in the
3002 missing bits. */
3003 typedef struct GTY(()) limbo_die_struct {
3004 dw_die_ref die;
3005 /* The tree for which this DIE was created. We use this to
3006 determine ancestry later. */
3007 tree created_for;
3008 struct limbo_die_struct *next;
3009 }
3010 limbo_die_node;
3011
3012 typedef struct skeleton_chain_struct
3013 {
3014 dw_die_ref old_die;
3015 dw_die_ref new_die;
3016 struct skeleton_chain_struct *parent;
3017 }
3018 skeleton_chain_node;
3019
3020 /* Define a macro which returns nonzero for a TYPE_DECL which was
3021 implicitly generated for a type.
3022
3023 Note that, unlike the C front-end (which generates a NULL named
3024 TYPE_DECL node for each complete tagged type, each array type,
3025 and each function type node created) the C++ front-end generates
3026 a _named_ TYPE_DECL node for each tagged type node created.
3027 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3028 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3029 front-end, but for each type, tagged or not. */
3030
3031 #define TYPE_DECL_IS_STUB(decl) \
3032 (DECL_NAME (decl) == NULL_TREE \
3033 || (DECL_ARTIFICIAL (decl) \
3034 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3035 /* This is necessary for stub decls that \
3036 appear in nested inline functions. */ \
3037 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3038 && (decl_ultimate_origin (decl) \
3039 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3040
3041 /* Information concerning the compilation unit's programming
3042 language, and compiler version. */
3043
3044 /* Fixed size portion of the DWARF compilation unit header. */
3045 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3046 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3047 + (dwarf_version >= 5 ? 4 : 3))
3048
3049 /* Fixed size portion of the DWARF comdat type unit header. */
3050 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3051 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3052 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3053
3054 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3055 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3056 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3057
3058 /* Fixed size portion of public names info. */
3059 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3060
3061 /* Fixed size portion of the address range info. */
3062 #define DWARF_ARANGES_HEADER_SIZE \
3063 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3064 DWARF2_ADDR_SIZE * 2) \
3065 - DWARF_INITIAL_LENGTH_SIZE)
3066
3067 /* Size of padding portion in the address range info. It must be
3068 aligned to twice the pointer size. */
3069 #define DWARF_ARANGES_PAD_SIZE \
3070 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3071 DWARF2_ADDR_SIZE * 2) \
3072 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3073
3074 /* Use assembler line directives if available. */
3075 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3076 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3077 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3078 #else
3079 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3080 #endif
3081 #endif
3082
3083 /* Minimum line offset in a special line info. opcode.
3084 This value was chosen to give a reasonable range of values. */
3085 #define DWARF_LINE_BASE -10
3086
3087 /* First special line opcode - leave room for the standard opcodes. */
3088 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3089
3090 /* Range of line offsets in a special line info. opcode. */
3091 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3092
3093 /* Flag that indicates the initial value of the is_stmt_start flag.
3094 In the present implementation, we do not mark any lines as
3095 the beginning of a source statement, because that information
3096 is not made available by the GCC front-end. */
3097 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3098
3099 /* Maximum number of operations per instruction bundle. */
3100 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3101 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3102 #endif
3103
3104 /* This location is used by calc_die_sizes() to keep track
3105 the offset of each DIE within the .debug_info section. */
3106 static unsigned long next_die_offset;
3107
3108 /* Record the root of the DIE's built for the current compilation unit. */
3109 static GTY(()) dw_die_ref single_comp_unit_die;
3110
3111 /* A list of type DIEs that have been separated into comdat sections. */
3112 static GTY(()) comdat_type_node *comdat_type_list;
3113
3114 /* A list of CU DIEs that have been separated. */
3115 static GTY(()) limbo_die_node *cu_die_list;
3116
3117 /* A list of DIEs with a NULL parent waiting to be relocated. */
3118 static GTY(()) limbo_die_node *limbo_die_list;
3119
3120 /* A list of DIEs for which we may have to generate
3121 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3122 static GTY(()) limbo_die_node *deferred_asm_name;
3123
3124 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3125 {
3126 typedef const char *compare_type;
3127
3128 static hashval_t hash (dwarf_file_data *);
3129 static bool equal (dwarf_file_data *, const char *);
3130 };
3131
3132 /* Filenames referenced by this compilation unit. */
3133 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3134
3135 struct decl_die_hasher : ggc_ptr_hash<die_node>
3136 {
3137 typedef tree compare_type;
3138
3139 static hashval_t hash (die_node *);
3140 static bool equal (die_node *, tree);
3141 };
3142 /* A hash table of references to DIE's that describe declarations.
3143 The key is a DECL_UID() which is a unique number identifying each decl. */
3144 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3145
3146 struct GTY ((for_user)) variable_value_struct {
3147 unsigned int decl_id;
3148 vec<dw_die_ref, va_gc> *dies;
3149 };
3150
3151 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3152 {
3153 typedef tree compare_type;
3154
3155 static hashval_t hash (variable_value_struct *);
3156 static bool equal (variable_value_struct *, tree);
3157 };
3158 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3159 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3160 DECL_CONTEXT of the referenced VAR_DECLs. */
3161 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3162
3163 struct block_die_hasher : ggc_ptr_hash<die_struct>
3164 {
3165 static hashval_t hash (die_struct *);
3166 static bool equal (die_struct *, die_struct *);
3167 };
3168
3169 /* A hash table of references to DIE's that describe COMMON blocks.
3170 The key is DECL_UID() ^ die_parent. */
3171 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3172
3173 typedef struct GTY(()) die_arg_entry_struct {
3174 dw_die_ref die;
3175 tree arg;
3176 } die_arg_entry;
3177
3178
3179 /* Node of the variable location list. */
3180 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3181 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3182 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3183 in mode of the EXPR_LIST node and first EXPR_LIST operand
3184 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3185 location or NULL for padding. For larger bitsizes,
3186 mode is 0 and first operand is a CONCAT with bitsize
3187 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3188 NULL as second operand. */
3189 rtx GTY (()) loc;
3190 const char * GTY (()) label;
3191 struct var_loc_node * GTY (()) next;
3192 };
3193
3194 /* Variable location list. */
3195 struct GTY ((for_user)) var_loc_list_def {
3196 struct var_loc_node * GTY (()) first;
3197
3198 /* Pointer to the last but one or last element of the
3199 chained list. If the list is empty, both first and
3200 last are NULL, if the list contains just one node
3201 or the last node certainly is not redundant, it points
3202 to the last node, otherwise points to the last but one.
3203 Do not mark it for GC because it is marked through the chain. */
3204 struct var_loc_node * GTY ((skip ("%h"))) last;
3205
3206 /* Pointer to the last element before section switch,
3207 if NULL, either sections weren't switched or first
3208 is after section switch. */
3209 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3210
3211 /* DECL_UID of the variable decl. */
3212 unsigned int decl_id;
3213 };
3214 typedef struct var_loc_list_def var_loc_list;
3215
3216 /* Call argument location list. */
3217 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3218 rtx GTY (()) call_arg_loc_note;
3219 const char * GTY (()) label;
3220 tree GTY (()) block;
3221 bool tail_call_p;
3222 rtx GTY (()) symbol_ref;
3223 struct call_arg_loc_node * GTY (()) next;
3224 };
3225
3226
3227 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3228 {
3229 typedef const_tree compare_type;
3230
3231 static hashval_t hash (var_loc_list *);
3232 static bool equal (var_loc_list *, const_tree);
3233 };
3234
3235 /* Table of decl location linked lists. */
3236 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3237
3238 /* Head and tail of call_arg_loc chain. */
3239 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3240 static struct call_arg_loc_node *call_arg_loc_last;
3241
3242 /* Number of call sites in the current function. */
3243 static int call_site_count = -1;
3244 /* Number of tail call sites in the current function. */
3245 static int tail_call_site_count = -1;
3246
3247 /* A cached location list. */
3248 struct GTY ((for_user)) cached_dw_loc_list_def {
3249 /* The DECL_UID of the decl that this entry describes. */
3250 unsigned int decl_id;
3251
3252 /* The cached location list. */
3253 dw_loc_list_ref loc_list;
3254 };
3255 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3256
3257 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3258 {
3259
3260 typedef const_tree compare_type;
3261
3262 static hashval_t hash (cached_dw_loc_list *);
3263 static bool equal (cached_dw_loc_list *, const_tree);
3264 };
3265
3266 /* Table of cached location lists. */
3267 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3268
3269 /* A vector of references to DIE's that are uniquely identified by their tag,
3270 presence/absence of children DIE's, and list of attribute/value pairs. */
3271 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3272
3273 /* A hash map to remember the stack usage for DWARF procedures. The value
3274 stored is the stack size difference between before the DWARF procedure
3275 invokation and after it returned. In other words, for a DWARF procedure
3276 that consumes N stack slots and that pushes M ones, this stores M - N. */
3277 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3278
3279 /* A global counter for generating labels for line number data. */
3280 static unsigned int line_info_label_num;
3281
3282 /* The current table to which we should emit line number information
3283 for the current function. This will be set up at the beginning of
3284 assembly for the function. */
3285 static GTY(()) dw_line_info_table *cur_line_info_table;
3286
3287 /* The two default tables of line number info. */
3288 static GTY(()) dw_line_info_table *text_section_line_info;
3289 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3290
3291 /* The set of all non-default tables of line number info. */
3292 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3293
3294 /* A flag to tell pubnames/types export if there is an info section to
3295 refer to. */
3296 static bool info_section_emitted;
3297
3298 /* A pointer to the base of a table that contains a list of publicly
3299 accessible names. */
3300 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3301
3302 /* A pointer to the base of a table that contains a list of publicly
3303 accessible types. */
3304 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3305
3306 /* A pointer to the base of a table that contains a list of macro
3307 defines/undefines (and file start/end markers). */
3308 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3309
3310 /* True if .debug_macinfo or .debug_macros section is going to be
3311 emitted. */
3312 #define have_macinfo \
3313 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3314 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3315 && !macinfo_table->is_empty ())
3316
3317 /* Vector of dies for which we should generate .debug_ranges info. */
3318 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3319
3320 /* Vector of pairs of labels referenced in ranges_table. */
3321 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3322
3323 /* Whether we have location lists that need outputting */
3324 static GTY(()) bool have_location_lists;
3325
3326 /* Unique label counter. */
3327 static GTY(()) unsigned int loclabel_num;
3328
3329 /* Unique label counter for point-of-call tables. */
3330 static GTY(()) unsigned int poc_label_num;
3331
3332 /* The last file entry emitted by maybe_emit_file(). */
3333 static GTY(()) struct dwarf_file_data * last_emitted_file;
3334
3335 /* Number of internal labels generated by gen_internal_sym(). */
3336 static GTY(()) int label_num;
3337
3338 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3339
3340 /* Instances of generic types for which we need to generate debug
3341 info that describe their generic parameters and arguments. That
3342 generation needs to happen once all types are properly laid out so
3343 we do it at the end of compilation. */
3344 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3345
3346 /* Offset from the "steady-state frame pointer" to the frame base,
3347 within the current function. */
3348 static HOST_WIDE_INT frame_pointer_fb_offset;
3349 static bool frame_pointer_fb_offset_valid;
3350
3351 static vec<dw_die_ref> base_types;
3352
3353 /* Flags to represent a set of attribute classes for attributes that represent
3354 a scalar value (bounds, pointers, ...). */
3355 enum dw_scalar_form
3356 {
3357 dw_scalar_form_constant = 0x01,
3358 dw_scalar_form_exprloc = 0x02,
3359 dw_scalar_form_reference = 0x04
3360 };
3361
3362 /* Forward declarations for functions defined in this file. */
3363
3364 static int is_pseudo_reg (const_rtx);
3365 static tree type_main_variant (tree);
3366 static int is_tagged_type (const_tree);
3367 static const char *dwarf_tag_name (unsigned);
3368 static const char *dwarf_attr_name (unsigned);
3369 static const char *dwarf_form_name (unsigned);
3370 static tree decl_ultimate_origin (const_tree);
3371 static tree decl_class_context (tree);
3372 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3373 static inline enum dw_val_class AT_class (dw_attr_node *);
3374 static inline unsigned int AT_index (dw_attr_node *);
3375 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3376 static inline unsigned AT_flag (dw_attr_node *);
3377 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3378 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3379 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3380 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3381 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3382 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3383 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3384 unsigned int, unsigned char *);
3385 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3386 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3387 static inline const char *AT_string (dw_attr_node *);
3388 static enum dwarf_form AT_string_form (dw_attr_node *);
3389 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3390 static void add_AT_specification (dw_die_ref, dw_die_ref);
3391 static inline dw_die_ref AT_ref (dw_attr_node *);
3392 static inline int AT_ref_external (dw_attr_node *);
3393 static inline void set_AT_ref_external (dw_attr_node *, int);
3394 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3395 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3396 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3397 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3398 dw_loc_list_ref);
3399 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3400 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3401 static void remove_addr_table_entry (addr_table_entry *);
3402 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3403 static inline rtx AT_addr (dw_attr_node *);
3404 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3405 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3406 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3407 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3408 const char *);
3409 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3410 unsigned HOST_WIDE_INT);
3411 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3412 unsigned long, bool);
3413 static inline const char *AT_lbl (dw_attr_node *);
3414 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3415 static const char *get_AT_low_pc (dw_die_ref);
3416 static const char *get_AT_hi_pc (dw_die_ref);
3417 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3418 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3419 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3420 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3421 static bool is_cxx (void);
3422 static bool is_cxx (const_tree);
3423 static bool is_fortran (void);
3424 static bool is_ada (void);
3425 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3426 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3427 static void add_child_die (dw_die_ref, dw_die_ref);
3428 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3429 static dw_die_ref lookup_type_die (tree);
3430 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3431 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3432 static void equate_type_number_to_die (tree, dw_die_ref);
3433 static dw_die_ref lookup_decl_die (tree);
3434 static var_loc_list *lookup_decl_loc (const_tree);
3435 static void equate_decl_number_to_die (tree, dw_die_ref);
3436 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *);
3437 static void print_spaces (FILE *);
3438 static void print_die (dw_die_ref, FILE *);
3439 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3440 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3441 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3442 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3443 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3444 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3445 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3446 struct md5_ctx *, int *);
3447 struct checksum_attributes;
3448 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3449 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3450 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3451 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3452 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3453 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3454 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3455 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3456 static int is_type_die (dw_die_ref);
3457 static int is_comdat_die (dw_die_ref);
3458 static inline bool is_template_instantiation (dw_die_ref);
3459 static int is_declaration_die (dw_die_ref);
3460 static int should_move_die_to_comdat (dw_die_ref);
3461 static dw_die_ref clone_as_declaration (dw_die_ref);
3462 static dw_die_ref clone_die (dw_die_ref);
3463 static dw_die_ref clone_tree (dw_die_ref);
3464 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3465 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3466 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3467 static dw_die_ref generate_skeleton (dw_die_ref);
3468 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3469 dw_die_ref,
3470 dw_die_ref);
3471 static void break_out_comdat_types (dw_die_ref);
3472 static void copy_decls_for_unworthy_types (dw_die_ref);
3473
3474 static void add_sibling_attributes (dw_die_ref);
3475 static void output_location_lists (dw_die_ref);
3476 static int constant_size (unsigned HOST_WIDE_INT);
3477 static unsigned long size_of_die (dw_die_ref);
3478 static void calc_die_sizes (dw_die_ref);
3479 static void calc_base_type_die_sizes (void);
3480 static void mark_dies (dw_die_ref);
3481 static void unmark_dies (dw_die_ref);
3482 static void unmark_all_dies (dw_die_ref);
3483 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3484 static unsigned long size_of_aranges (void);
3485 static enum dwarf_form value_format (dw_attr_node *);
3486 static void output_value_format (dw_attr_node *);
3487 static void output_abbrev_section (void);
3488 static void output_die_abbrevs (unsigned long, dw_die_ref);
3489 static void output_die (dw_die_ref);
3490 static void output_compilation_unit_header (enum dwarf_unit_type);
3491 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3492 static void output_comdat_type_unit (comdat_type_node *);
3493 static const char *dwarf2_name (tree, int);
3494 static void add_pubname (tree, dw_die_ref);
3495 static void add_enumerator_pubname (const char *, dw_die_ref);
3496 static void add_pubname_string (const char *, dw_die_ref);
3497 static void add_pubtype (tree, dw_die_ref);
3498 static void output_pubnames (vec<pubname_entry, va_gc> *);
3499 static void output_aranges (void);
3500 static unsigned int add_ranges (const_tree, bool = false);
3501 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3502 bool *, bool);
3503 static void output_ranges (void);
3504 static dw_line_info_table *new_line_info_table (void);
3505 static void output_line_info (bool);
3506 static void output_file_names (void);
3507 static dw_die_ref base_type_die (tree, bool);
3508 static int is_base_type (tree);
3509 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3510 static int decl_quals (const_tree);
3511 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3512 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3513 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3514 static int type_is_enum (const_tree);
3515 static unsigned int dbx_reg_number (const_rtx);
3516 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3517 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3518 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3519 enum var_init_status);
3520 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3521 enum var_init_status);
3522 static dw_loc_descr_ref based_loc_descr (rtx, HOST_WIDE_INT,
3523 enum var_init_status);
3524 static int is_based_loc (const_rtx);
3525 static bool resolve_one_addr (rtx *);
3526 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3527 enum var_init_status);
3528 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3529 enum var_init_status);
3530 struct loc_descr_context;
3531 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3532 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3533 static dw_loc_list_ref loc_list_from_tree (tree, int,
3534 struct loc_descr_context *);
3535 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3536 struct loc_descr_context *);
3537 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3538 static tree field_type (const_tree);
3539 static unsigned int simple_type_align_in_bits (const_tree);
3540 static unsigned int simple_decl_align_in_bits (const_tree);
3541 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3542 struct vlr_context;
3543 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3544 HOST_WIDE_INT *);
3545 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3546 dw_loc_list_ref);
3547 static void add_data_member_location_attribute (dw_die_ref, tree,
3548 struct vlr_context *);
3549 static bool add_const_value_attribute (dw_die_ref, rtx);
3550 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3551 static void insert_wide_int (const wide_int &, unsigned char *, int);
3552 static void insert_float (const_rtx, unsigned char *);
3553 static rtx rtl_for_decl_location (tree);
3554 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3555 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3556 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3557 static void add_name_attribute (dw_die_ref, const char *);
3558 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3559 static void add_comp_dir_attribute (dw_die_ref);
3560 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3561 struct loc_descr_context *);
3562 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3563 struct loc_descr_context *);
3564 static void add_subscript_info (dw_die_ref, tree, bool);
3565 static void add_byte_size_attribute (dw_die_ref, tree);
3566 static void add_alignment_attribute (dw_die_ref, tree);
3567 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3568 struct vlr_context *);
3569 static void add_bit_size_attribute (dw_die_ref, tree);
3570 static void add_prototyped_attribute (dw_die_ref, tree);
3571 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3572 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3573 static void add_src_coords_attributes (dw_die_ref, tree);
3574 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3575 static void add_discr_value (dw_die_ref, dw_discr_value *);
3576 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3577 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3578 static void push_decl_scope (tree);
3579 static void pop_decl_scope (void);
3580 static dw_die_ref scope_die_for (tree, dw_die_ref);
3581 static inline int local_scope_p (dw_die_ref);
3582 static inline int class_scope_p (dw_die_ref);
3583 static inline int class_or_namespace_scope_p (dw_die_ref);
3584 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3585 static void add_calling_convention_attribute (dw_die_ref, tree);
3586 static const char *type_tag (const_tree);
3587 static tree member_declared_type (const_tree);
3588 #if 0
3589 static const char *decl_start_label (tree);
3590 #endif
3591 static void gen_array_type_die (tree, dw_die_ref);
3592 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3593 #if 0
3594 static void gen_entry_point_die (tree, dw_die_ref);
3595 #endif
3596 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3597 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3598 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3599 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3600 static void gen_formal_types_die (tree, dw_die_ref);
3601 static void gen_subprogram_die (tree, dw_die_ref);
3602 static void gen_variable_die (tree, tree, dw_die_ref);
3603 static void gen_const_die (tree, dw_die_ref);
3604 static void gen_label_die (tree, dw_die_ref);
3605 static void gen_lexical_block_die (tree, dw_die_ref);
3606 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3607 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3608 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3609 static dw_die_ref gen_compile_unit_die (const char *);
3610 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3611 static void gen_member_die (tree, dw_die_ref);
3612 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3613 enum debug_info_usage);
3614 static void gen_subroutine_type_die (tree, dw_die_ref);
3615 static void gen_typedef_die (tree, dw_die_ref);
3616 static void gen_type_die (tree, dw_die_ref);
3617 static void gen_block_die (tree, dw_die_ref);
3618 static void decls_for_scope (tree, dw_die_ref);
3619 static bool is_naming_typedef_decl (const_tree);
3620 static inline dw_die_ref get_context_die (tree);
3621 static void gen_namespace_die (tree, dw_die_ref);
3622 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3623 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3624 static dw_die_ref force_decl_die (tree);
3625 static dw_die_ref force_type_die (tree);
3626 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3627 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3628 static struct dwarf_file_data * lookup_filename (const char *);
3629 static void retry_incomplete_types (void);
3630 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3631 static void gen_generic_params_dies (tree);
3632 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3633 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3634 static void splice_child_die (dw_die_ref, dw_die_ref);
3635 static int file_info_cmp (const void *, const void *);
3636 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *,
3637 const char *, const char *);
3638 static void output_loc_list (dw_loc_list_ref);
3639 static char *gen_internal_sym (const char *);
3640 static bool want_pubnames (void);
3641
3642 static void prune_unmark_dies (dw_die_ref);
3643 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3644 static void prune_unused_types_mark (dw_die_ref, int);
3645 static void prune_unused_types_walk (dw_die_ref);
3646 static void prune_unused_types_walk_attribs (dw_die_ref);
3647 static void prune_unused_types_prune (dw_die_ref);
3648 static void prune_unused_types (void);
3649 static int maybe_emit_file (struct dwarf_file_data *fd);
3650 static inline const char *AT_vms_delta1 (dw_attr_node *);
3651 static inline const char *AT_vms_delta2 (dw_attr_node *);
3652 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3653 const char *, const char *);
3654 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3655 static void gen_remaining_tmpl_value_param_die_attribute (void);
3656 static bool generic_type_p (tree);
3657 static void schedule_generic_params_dies_gen (tree t);
3658 static void gen_scheduled_generic_parms_dies (void);
3659 static void resolve_variable_values (void);
3660
3661 static const char *comp_dir_string (void);
3662
3663 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3664
3665 /* enum for tracking thread-local variables whose address is really an offset
3666 relative to the TLS pointer, which will need link-time relocation, but will
3667 not need relocation by the DWARF consumer. */
3668
3669 enum dtprel_bool
3670 {
3671 dtprel_false = 0,
3672 dtprel_true = 1
3673 };
3674
3675 /* Return the operator to use for an address of a variable. For dtprel_true, we
3676 use DW_OP_const*. For regular variables, which need both link-time
3677 relocation and consumer-level relocation (e.g., to account for shared objects
3678 loaded at a random address), we use DW_OP_addr*. */
3679
3680 static inline enum dwarf_location_atom
3681 dw_addr_op (enum dtprel_bool dtprel)
3682 {
3683 if (dtprel == dtprel_true)
3684 return (dwarf_split_debug_info ? DW_OP_GNU_const_index
3685 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3686 else
3687 return dwarf_split_debug_info ? DW_OP_GNU_addr_index : DW_OP_addr;
3688 }
3689
3690 /* Return a pointer to a newly allocated address location description. If
3691 dwarf_split_debug_info is true, then record the address with the appropriate
3692 relocation. */
3693 static inline dw_loc_descr_ref
3694 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3695 {
3696 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3697
3698 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3699 ref->dw_loc_oprnd1.v.val_addr = addr;
3700 ref->dtprel = dtprel;
3701 if (dwarf_split_debug_info)
3702 ref->dw_loc_oprnd1.val_entry
3703 = add_addr_table_entry (addr,
3704 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3705 else
3706 ref->dw_loc_oprnd1.val_entry = NULL;
3707
3708 return ref;
3709 }
3710
3711 /* Section names used to hold DWARF debugging information. */
3712
3713 #ifndef DEBUG_INFO_SECTION
3714 #define DEBUG_INFO_SECTION ".debug_info"
3715 #endif
3716 #ifndef DEBUG_DWO_INFO_SECTION
3717 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3718 #endif
3719 #ifndef DEBUG_LTO_INFO_SECTION
3720 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3721 #endif
3722 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3723 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3724 #endif
3725 #ifndef DEBUG_ABBREV_SECTION
3726 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3727 #endif
3728 #ifndef DEBUG_LTO_ABBREV_SECTION
3729 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3730 #endif
3731 #ifndef DEBUG_DWO_ABBREV_SECTION
3732 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3733 #endif
3734 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3735 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3736 #endif
3737 #ifndef DEBUG_ARANGES_SECTION
3738 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3739 #endif
3740 #ifndef DEBUG_ADDR_SECTION
3741 #define DEBUG_ADDR_SECTION ".debug_addr"
3742 #endif
3743 #ifndef DEBUG_MACINFO_SECTION
3744 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3745 #endif
3746 #ifndef DEBUG_LTO_MACINFO_SECTION
3747 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
3748 #endif
3749 #ifndef DEBUG_DWO_MACINFO_SECTION
3750 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
3751 #endif
3752 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
3753 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
3754 #endif
3755 #ifndef DEBUG_MACRO_SECTION
3756 #define DEBUG_MACRO_SECTION ".debug_macro"
3757 #endif
3758 #ifndef DEBUG_LTO_MACRO_SECTION
3759 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
3760 #endif
3761 #ifndef DEBUG_DWO_MACRO_SECTION
3762 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
3763 #endif
3764 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
3765 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
3766 #endif
3767 #ifndef DEBUG_LINE_SECTION
3768 #define DEBUG_LINE_SECTION ".debug_line"
3769 #endif
3770 #ifndef DEBUG_LTO_LINE_SECTION
3771 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
3772 #endif
3773 #ifndef DEBUG_DWO_LINE_SECTION
3774 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
3775 #endif
3776 #ifndef DEBUG_LTO_DWO_LINE_SECTION
3777 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
3778 #endif
3779 #ifndef DEBUG_LOC_SECTION
3780 #define DEBUG_LOC_SECTION ".debug_loc"
3781 #endif
3782 #ifndef DEBUG_DWO_LOC_SECTION
3783 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
3784 #endif
3785 #ifndef DEBUG_LOCLISTS_SECTION
3786 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
3787 #endif
3788 #ifndef DEBUG_DWO_LOCLISTS_SECTION
3789 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
3790 #endif
3791 #ifndef DEBUG_PUBNAMES_SECTION
3792 #define DEBUG_PUBNAMES_SECTION \
3793 ((debug_generate_pub_sections == 2) \
3794 ? ".debug_gnu_pubnames" : ".debug_pubnames")
3795 #endif
3796 #ifndef DEBUG_PUBTYPES_SECTION
3797 #define DEBUG_PUBTYPES_SECTION \
3798 ((debug_generate_pub_sections == 2) \
3799 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
3800 #endif
3801 #ifndef DEBUG_STR_OFFSETS_SECTION
3802 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
3803 #endif
3804 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
3805 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
3806 #endif
3807 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
3808 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
3809 #endif
3810 #ifndef DEBUG_STR_SECTION
3811 #define DEBUG_STR_SECTION ".debug_str"
3812 #endif
3813 #ifndef DEBUG_LTO_STR_SECTION
3814 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
3815 #endif
3816 #ifndef DEBUG_STR_DWO_SECTION
3817 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
3818 #endif
3819 #ifndef DEBUG_LTO_STR_DWO_SECTION
3820 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
3821 #endif
3822 #ifndef DEBUG_RANGES_SECTION
3823 #define DEBUG_RANGES_SECTION ".debug_ranges"
3824 #endif
3825 #ifndef DEBUG_RNGLISTS_SECTION
3826 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
3827 #endif
3828 #ifndef DEBUG_LINE_STR_SECTION
3829 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
3830 #endif
3831 #ifndef DEBUG_LTO_LINE_STR_SECTION
3832 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
3833 #endif
3834
3835 /* Standard ELF section names for compiled code and data. */
3836 #ifndef TEXT_SECTION_NAME
3837 #define TEXT_SECTION_NAME ".text"
3838 #endif
3839
3840 /* Section flags for .debug_str section. */
3841 #define DEBUG_STR_SECTION_FLAGS \
3842 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
3843 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
3844 : SECTION_DEBUG)
3845
3846 /* Section flags for .debug_str.dwo section. */
3847 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
3848
3849 /* Attribute used to refer to the macro section. */
3850 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
3851 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
3852
3853 /* Labels we insert at beginning sections we can reference instead of
3854 the section names themselves. */
3855
3856 #ifndef TEXT_SECTION_LABEL
3857 #define TEXT_SECTION_LABEL "Ltext"
3858 #endif
3859 #ifndef COLD_TEXT_SECTION_LABEL
3860 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
3861 #endif
3862 #ifndef DEBUG_LINE_SECTION_LABEL
3863 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
3864 #endif
3865 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
3866 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
3867 #endif
3868 #ifndef DEBUG_INFO_SECTION_LABEL
3869 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
3870 #endif
3871 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
3872 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
3873 #endif
3874 #ifndef DEBUG_ABBREV_SECTION_LABEL
3875 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
3876 #endif
3877 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
3878 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
3879 #endif
3880 #ifndef DEBUG_ADDR_SECTION_LABEL
3881 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
3882 #endif
3883 #ifndef DEBUG_LOC_SECTION_LABEL
3884 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
3885 #endif
3886 #ifndef DEBUG_RANGES_SECTION_LABEL
3887 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
3888 #endif
3889 #ifndef DEBUG_MACINFO_SECTION_LABEL
3890 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
3891 #endif
3892 #ifndef DEBUG_MACRO_SECTION_LABEL
3893 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
3894 #endif
3895 #define SKELETON_COMP_DIE_ABBREV 1
3896 #define SKELETON_TYPE_DIE_ABBREV 2
3897
3898 /* Definitions of defaults for formats and names of various special
3899 (artificial) labels which may be generated within this file (when the -g
3900 options is used and DWARF2_DEBUGGING_INFO is in effect.
3901 If necessary, these may be overridden from within the tm.h file, but
3902 typically, overriding these defaults is unnecessary. */
3903
3904 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
3905 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3906 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3907 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
3908 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3909 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3910 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3911 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3912 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3913 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3914 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3915 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3916 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3917 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
3918 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
3919
3920 #ifndef TEXT_END_LABEL
3921 #define TEXT_END_LABEL "Letext"
3922 #endif
3923 #ifndef COLD_END_LABEL
3924 #define COLD_END_LABEL "Letext_cold"
3925 #endif
3926 #ifndef BLOCK_BEGIN_LABEL
3927 #define BLOCK_BEGIN_LABEL "LBB"
3928 #endif
3929 #ifndef BLOCK_END_LABEL
3930 #define BLOCK_END_LABEL "LBE"
3931 #endif
3932 #ifndef LINE_CODE_LABEL
3933 #define LINE_CODE_LABEL "LM"
3934 #endif
3935
3936 \f
3937 /* Return the root of the DIE's built for the current compilation unit. */
3938 static dw_die_ref
3939 comp_unit_die (void)
3940 {
3941 if (!single_comp_unit_die)
3942 single_comp_unit_die = gen_compile_unit_die (NULL);
3943 return single_comp_unit_die;
3944 }
3945
3946 /* We allow a language front-end to designate a function that is to be
3947 called to "demangle" any name before it is put into a DIE. */
3948
3949 static const char *(*demangle_name_func) (const char *);
3950
3951 void
3952 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
3953 {
3954 demangle_name_func = func;
3955 }
3956
3957 /* Test if rtl node points to a pseudo register. */
3958
3959 static inline int
3960 is_pseudo_reg (const_rtx rtl)
3961 {
3962 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
3963 || (GET_CODE (rtl) == SUBREG
3964 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
3965 }
3966
3967 /* Return a reference to a type, with its const and volatile qualifiers
3968 removed. */
3969
3970 static inline tree
3971 type_main_variant (tree type)
3972 {
3973 type = TYPE_MAIN_VARIANT (type);
3974
3975 /* ??? There really should be only one main variant among any group of
3976 variants of a given type (and all of the MAIN_VARIANT values for all
3977 members of the group should point to that one type) but sometimes the C
3978 front-end messes this up for array types, so we work around that bug
3979 here. */
3980 if (TREE_CODE (type) == ARRAY_TYPE)
3981 while (type != TYPE_MAIN_VARIANT (type))
3982 type = TYPE_MAIN_VARIANT (type);
3983
3984 return type;
3985 }
3986
3987 /* Return nonzero if the given type node represents a tagged type. */
3988
3989 static inline int
3990 is_tagged_type (const_tree type)
3991 {
3992 enum tree_code code = TREE_CODE (type);
3993
3994 return (code == RECORD_TYPE || code == UNION_TYPE
3995 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
3996 }
3997
3998 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
3999
4000 static void
4001 get_ref_die_offset_label (char *label, dw_die_ref ref)
4002 {
4003 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4004 }
4005
4006 /* Return die_offset of a DIE reference to a base type. */
4007
4008 static unsigned long int
4009 get_base_type_offset (dw_die_ref ref)
4010 {
4011 if (ref->die_offset)
4012 return ref->die_offset;
4013 if (comp_unit_die ()->die_abbrev)
4014 {
4015 calc_base_type_die_sizes ();
4016 gcc_assert (ref->die_offset);
4017 }
4018 return ref->die_offset;
4019 }
4020
4021 /* Return die_offset of a DIE reference other than base type. */
4022
4023 static unsigned long int
4024 get_ref_die_offset (dw_die_ref ref)
4025 {
4026 gcc_assert (ref->die_offset);
4027 return ref->die_offset;
4028 }
4029
4030 /* Convert a DIE tag into its string name. */
4031
4032 static const char *
4033 dwarf_tag_name (unsigned int tag)
4034 {
4035 const char *name = get_DW_TAG_name (tag);
4036
4037 if (name != NULL)
4038 return name;
4039
4040 return "DW_TAG_<unknown>";
4041 }
4042
4043 /* Convert a DWARF attribute code into its string name. */
4044
4045 static const char *
4046 dwarf_attr_name (unsigned int attr)
4047 {
4048 const char *name;
4049
4050 switch (attr)
4051 {
4052 #if VMS_DEBUGGING_INFO
4053 case DW_AT_HP_prologue:
4054 return "DW_AT_HP_prologue";
4055 #else
4056 case DW_AT_MIPS_loop_unroll_factor:
4057 return "DW_AT_MIPS_loop_unroll_factor";
4058 #endif
4059
4060 #if VMS_DEBUGGING_INFO
4061 case DW_AT_HP_epilogue:
4062 return "DW_AT_HP_epilogue";
4063 #else
4064 case DW_AT_MIPS_stride:
4065 return "DW_AT_MIPS_stride";
4066 #endif
4067 }
4068
4069 name = get_DW_AT_name (attr);
4070
4071 if (name != NULL)
4072 return name;
4073
4074 return "DW_AT_<unknown>";
4075 }
4076
4077 /* Convert a DWARF value form code into its string name. */
4078
4079 static const char *
4080 dwarf_form_name (unsigned int form)
4081 {
4082 const char *name = get_DW_FORM_name (form);
4083
4084 if (name != NULL)
4085 return name;
4086
4087 return "DW_FORM_<unknown>";
4088 }
4089 \f
4090 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4091 instance of an inlined instance of a decl which is local to an inline
4092 function, so we have to trace all of the way back through the origin chain
4093 to find out what sort of node actually served as the original seed for the
4094 given block. */
4095
4096 static tree
4097 decl_ultimate_origin (const_tree decl)
4098 {
4099 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4100 return NULL_TREE;
4101
4102 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4103 we're trying to output the abstract instance of this function. */
4104 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4105 return NULL_TREE;
4106
4107 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4108 most distant ancestor, this should never happen. */
4109 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4110
4111 return DECL_ABSTRACT_ORIGIN (decl);
4112 }
4113
4114 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4115 of a virtual function may refer to a base class, so we check the 'this'
4116 parameter. */
4117
4118 static tree
4119 decl_class_context (tree decl)
4120 {
4121 tree context = NULL_TREE;
4122
4123 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4124 context = DECL_CONTEXT (decl);
4125 else
4126 context = TYPE_MAIN_VARIANT
4127 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4128
4129 if (context && !TYPE_P (context))
4130 context = NULL_TREE;
4131
4132 return context;
4133 }
4134 \f
4135 /* Add an attribute/value pair to a DIE. */
4136
4137 static inline void
4138 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4139 {
4140 /* Maybe this should be an assert? */
4141 if (die == NULL)
4142 return;
4143
4144 if (flag_checking)
4145 {
4146 /* Check we do not add duplicate attrs. Can't use get_AT here
4147 because that recurses to the specification/abstract origin DIE. */
4148 dw_attr_node *a;
4149 unsigned ix;
4150 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4151 gcc_assert (a->dw_attr != attr->dw_attr);
4152 }
4153
4154 vec_safe_reserve (die->die_attr, 1);
4155 vec_safe_push (die->die_attr, *attr);
4156 }
4157
4158 static inline enum dw_val_class
4159 AT_class (dw_attr_node *a)
4160 {
4161 return a->dw_attr_val.val_class;
4162 }
4163
4164 /* Return the index for any attribute that will be referenced with a
4165 DW_FORM_GNU_addr_index or DW_FORM_GNU_str_index. String indices
4166 are stored in dw_attr_val.v.val_str for reference counting
4167 pruning. */
4168
4169 static inline unsigned int
4170 AT_index (dw_attr_node *a)
4171 {
4172 if (AT_class (a) == dw_val_class_str)
4173 return a->dw_attr_val.v.val_str->index;
4174 else if (a->dw_attr_val.val_entry != NULL)
4175 return a->dw_attr_val.val_entry->index;
4176 return NOT_INDEXED;
4177 }
4178
4179 /* Add a flag value attribute to a DIE. */
4180
4181 static inline void
4182 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4183 {
4184 dw_attr_node attr;
4185
4186 attr.dw_attr = attr_kind;
4187 attr.dw_attr_val.val_class = dw_val_class_flag;
4188 attr.dw_attr_val.val_entry = NULL;
4189 attr.dw_attr_val.v.val_flag = flag;
4190 add_dwarf_attr (die, &attr);
4191 }
4192
4193 static inline unsigned
4194 AT_flag (dw_attr_node *a)
4195 {
4196 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4197 return a->dw_attr_val.v.val_flag;
4198 }
4199
4200 /* Add a signed integer attribute value to a DIE. */
4201
4202 static inline void
4203 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4204 {
4205 dw_attr_node attr;
4206
4207 attr.dw_attr = attr_kind;
4208 attr.dw_attr_val.val_class = dw_val_class_const;
4209 attr.dw_attr_val.val_entry = NULL;
4210 attr.dw_attr_val.v.val_int = int_val;
4211 add_dwarf_attr (die, &attr);
4212 }
4213
4214 static inline HOST_WIDE_INT
4215 AT_int (dw_attr_node *a)
4216 {
4217 gcc_assert (a && (AT_class (a) == dw_val_class_const
4218 || AT_class (a) == dw_val_class_const_implicit));
4219 return a->dw_attr_val.v.val_int;
4220 }
4221
4222 /* Add an unsigned integer attribute value to a DIE. */
4223
4224 static inline void
4225 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4226 unsigned HOST_WIDE_INT unsigned_val)
4227 {
4228 dw_attr_node attr;
4229
4230 attr.dw_attr = attr_kind;
4231 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4232 attr.dw_attr_val.val_entry = NULL;
4233 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4234 add_dwarf_attr (die, &attr);
4235 }
4236
4237 static inline unsigned HOST_WIDE_INT
4238 AT_unsigned (dw_attr_node *a)
4239 {
4240 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4241 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4242 return a->dw_attr_val.v.val_unsigned;
4243 }
4244
4245 /* Add an unsigned wide integer attribute value to a DIE. */
4246
4247 static inline void
4248 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4249 const wide_int& w)
4250 {
4251 dw_attr_node attr;
4252
4253 attr.dw_attr = attr_kind;
4254 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4255 attr.dw_attr_val.val_entry = NULL;
4256 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4257 *attr.dw_attr_val.v.val_wide = w;
4258 add_dwarf_attr (die, &attr);
4259 }
4260
4261 /* Add an unsigned double integer attribute value to a DIE. */
4262
4263 static inline void
4264 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4265 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4266 {
4267 dw_attr_node attr;
4268
4269 attr.dw_attr = attr_kind;
4270 attr.dw_attr_val.val_class = dw_val_class_const_double;
4271 attr.dw_attr_val.val_entry = NULL;
4272 attr.dw_attr_val.v.val_double.high = high;
4273 attr.dw_attr_val.v.val_double.low = low;
4274 add_dwarf_attr (die, &attr);
4275 }
4276
4277 /* Add a floating point attribute value to a DIE and return it. */
4278
4279 static inline void
4280 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4281 unsigned int length, unsigned int elt_size, unsigned char *array)
4282 {
4283 dw_attr_node attr;
4284
4285 attr.dw_attr = attr_kind;
4286 attr.dw_attr_val.val_class = dw_val_class_vec;
4287 attr.dw_attr_val.val_entry = NULL;
4288 attr.dw_attr_val.v.val_vec.length = length;
4289 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4290 attr.dw_attr_val.v.val_vec.array = array;
4291 add_dwarf_attr (die, &attr);
4292 }
4293
4294 /* Add an 8-byte data attribute value to a DIE. */
4295
4296 static inline void
4297 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4298 unsigned char data8[8])
4299 {
4300 dw_attr_node attr;
4301
4302 attr.dw_attr = attr_kind;
4303 attr.dw_attr_val.val_class = dw_val_class_data8;
4304 attr.dw_attr_val.val_entry = NULL;
4305 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4306 add_dwarf_attr (die, &attr);
4307 }
4308
4309 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4310 dwarf_split_debug_info, address attributes in dies destined for the
4311 final executable have force_direct set to avoid using indexed
4312 references. */
4313
4314 static inline void
4315 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4316 bool force_direct)
4317 {
4318 dw_attr_node attr;
4319 char * lbl_id;
4320
4321 lbl_id = xstrdup (lbl_low);
4322 attr.dw_attr = DW_AT_low_pc;
4323 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4324 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4325 if (dwarf_split_debug_info && !force_direct)
4326 attr.dw_attr_val.val_entry
4327 = add_addr_table_entry (lbl_id, ate_kind_label);
4328 else
4329 attr.dw_attr_val.val_entry = NULL;
4330 add_dwarf_attr (die, &attr);
4331
4332 attr.dw_attr = DW_AT_high_pc;
4333 if (dwarf_version < 4)
4334 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4335 else
4336 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4337 lbl_id = xstrdup (lbl_high);
4338 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4339 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4340 && dwarf_split_debug_info && !force_direct)
4341 attr.dw_attr_val.val_entry
4342 = add_addr_table_entry (lbl_id, ate_kind_label);
4343 else
4344 attr.dw_attr_val.val_entry = NULL;
4345 add_dwarf_attr (die, &attr);
4346 }
4347
4348 /* Hash and equality functions for debug_str_hash. */
4349
4350 hashval_t
4351 indirect_string_hasher::hash (indirect_string_node *x)
4352 {
4353 return htab_hash_string (x->str);
4354 }
4355
4356 bool
4357 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4358 {
4359 return strcmp (x1->str, x2) == 0;
4360 }
4361
4362 /* Add STR to the given string hash table. */
4363
4364 static struct indirect_string_node *
4365 find_AT_string_in_table (const char *str,
4366 hash_table<indirect_string_hasher> *table)
4367 {
4368 struct indirect_string_node *node;
4369
4370 indirect_string_node **slot
4371 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4372 if (*slot == NULL)
4373 {
4374 node = ggc_cleared_alloc<indirect_string_node> ();
4375 node->str = ggc_strdup (str);
4376 *slot = node;
4377 }
4378 else
4379 node = *slot;
4380
4381 node->refcount++;
4382 return node;
4383 }
4384
4385 /* Add STR to the indirect string hash table. */
4386
4387 static struct indirect_string_node *
4388 find_AT_string (const char *str)
4389 {
4390 if (! debug_str_hash)
4391 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4392
4393 return find_AT_string_in_table (str, debug_str_hash);
4394 }
4395
4396 /* Add a string attribute value to a DIE. */
4397
4398 static inline void
4399 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4400 {
4401 dw_attr_node attr;
4402 struct indirect_string_node *node;
4403
4404 node = find_AT_string (str);
4405
4406 attr.dw_attr = attr_kind;
4407 attr.dw_attr_val.val_class = dw_val_class_str;
4408 attr.dw_attr_val.val_entry = NULL;
4409 attr.dw_attr_val.v.val_str = node;
4410 add_dwarf_attr (die, &attr);
4411 }
4412
4413 static inline const char *
4414 AT_string (dw_attr_node *a)
4415 {
4416 gcc_assert (a && AT_class (a) == dw_val_class_str);
4417 return a->dw_attr_val.v.val_str->str;
4418 }
4419
4420 /* Call this function directly to bypass AT_string_form's logic to put
4421 the string inline in the die. */
4422
4423 static void
4424 set_indirect_string (struct indirect_string_node *node)
4425 {
4426 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4427 /* Already indirect is a no op. */
4428 if (node->form == DW_FORM_strp
4429 || node->form == DW_FORM_line_strp
4430 || node->form == DW_FORM_GNU_str_index)
4431 {
4432 gcc_assert (node->label);
4433 return;
4434 }
4435 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4436 ++dw2_string_counter;
4437 node->label = xstrdup (label);
4438
4439 if (!dwarf_split_debug_info)
4440 {
4441 node->form = DW_FORM_strp;
4442 node->index = NOT_INDEXED;
4443 }
4444 else
4445 {
4446 node->form = DW_FORM_GNU_str_index;
4447 node->index = NO_INDEX_ASSIGNED;
4448 }
4449 }
4450
4451 /* A helper function for dwarf2out_finish, called to reset indirect
4452 string decisions done for early LTO dwarf output before fat object
4453 dwarf output. */
4454
4455 int
4456 reset_indirect_string (indirect_string_node **h, void *)
4457 {
4458 struct indirect_string_node *node = *h;
4459 if (node->form == DW_FORM_strp || node->form == DW_FORM_GNU_str_index)
4460 {
4461 free (node->label);
4462 node->label = NULL;
4463 node->form = (dwarf_form) 0;
4464 node->index = 0;
4465 }
4466 return 1;
4467 }
4468
4469 /* Find out whether a string should be output inline in DIE
4470 or out-of-line in .debug_str section. */
4471
4472 static enum dwarf_form
4473 find_string_form (struct indirect_string_node *node)
4474 {
4475 unsigned int len;
4476
4477 if (node->form)
4478 return node->form;
4479
4480 len = strlen (node->str) + 1;
4481
4482 /* If the string is shorter or equal to the size of the reference, it is
4483 always better to put it inline. */
4484 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4485 return node->form = DW_FORM_string;
4486
4487 /* If we cannot expect the linker to merge strings in .debug_str
4488 section, only put it into .debug_str if it is worth even in this
4489 single module. */
4490 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4491 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4492 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4493 return node->form = DW_FORM_string;
4494
4495 set_indirect_string (node);
4496
4497 return node->form;
4498 }
4499
4500 /* Find out whether the string referenced from the attribute should be
4501 output inline in DIE or out-of-line in .debug_str section. */
4502
4503 static enum dwarf_form
4504 AT_string_form (dw_attr_node *a)
4505 {
4506 gcc_assert (a && AT_class (a) == dw_val_class_str);
4507 return find_string_form (a->dw_attr_val.v.val_str);
4508 }
4509
4510 /* Add a DIE reference attribute value to a DIE. */
4511
4512 static inline void
4513 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4514 {
4515 dw_attr_node attr;
4516 gcc_checking_assert (targ_die != NULL);
4517
4518 /* With LTO we can end up trying to reference something we didn't create
4519 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4520 if (targ_die == NULL)
4521 return;
4522
4523 attr.dw_attr = attr_kind;
4524 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4525 attr.dw_attr_val.val_entry = NULL;
4526 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4527 attr.dw_attr_val.v.val_die_ref.external = 0;
4528 add_dwarf_attr (die, &attr);
4529 }
4530
4531 /* Change DIE reference REF to point to NEW_DIE instead. */
4532
4533 static inline void
4534 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4535 {
4536 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4537 ref->dw_attr_val.v.val_die_ref.die = new_die;
4538 ref->dw_attr_val.v.val_die_ref.external = 0;
4539 }
4540
4541 /* Add an AT_specification attribute to a DIE, and also make the back
4542 pointer from the specification to the definition. */
4543
4544 static inline void
4545 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4546 {
4547 add_AT_die_ref (die, DW_AT_specification, targ_die);
4548 gcc_assert (!targ_die->die_definition);
4549 targ_die->die_definition = die;
4550 }
4551
4552 static inline dw_die_ref
4553 AT_ref (dw_attr_node *a)
4554 {
4555 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4556 return a->dw_attr_val.v.val_die_ref.die;
4557 }
4558
4559 static inline int
4560 AT_ref_external (dw_attr_node *a)
4561 {
4562 if (a && AT_class (a) == dw_val_class_die_ref)
4563 return a->dw_attr_val.v.val_die_ref.external;
4564
4565 return 0;
4566 }
4567
4568 static inline void
4569 set_AT_ref_external (dw_attr_node *a, int i)
4570 {
4571 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4572 a->dw_attr_val.v.val_die_ref.external = i;
4573 }
4574
4575 /* Add an FDE reference attribute value to a DIE. */
4576
4577 static inline void
4578 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4579 {
4580 dw_attr_node attr;
4581
4582 attr.dw_attr = attr_kind;
4583 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4584 attr.dw_attr_val.val_entry = NULL;
4585 attr.dw_attr_val.v.val_fde_index = targ_fde;
4586 add_dwarf_attr (die, &attr);
4587 }
4588
4589 /* Add a location description attribute value to a DIE. */
4590
4591 static inline void
4592 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4593 {
4594 dw_attr_node attr;
4595
4596 attr.dw_attr = attr_kind;
4597 attr.dw_attr_val.val_class = dw_val_class_loc;
4598 attr.dw_attr_val.val_entry = NULL;
4599 attr.dw_attr_val.v.val_loc = loc;
4600 add_dwarf_attr (die, &attr);
4601 }
4602
4603 static inline dw_loc_descr_ref
4604 AT_loc (dw_attr_node *a)
4605 {
4606 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4607 return a->dw_attr_val.v.val_loc;
4608 }
4609
4610 static inline void
4611 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4612 {
4613 dw_attr_node attr;
4614
4615 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4616 return;
4617
4618 attr.dw_attr = attr_kind;
4619 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4620 attr.dw_attr_val.val_entry = NULL;
4621 attr.dw_attr_val.v.val_loc_list = loc_list;
4622 add_dwarf_attr (die, &attr);
4623 have_location_lists = true;
4624 }
4625
4626 static inline dw_loc_list_ref
4627 AT_loc_list (dw_attr_node *a)
4628 {
4629 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4630 return a->dw_attr_val.v.val_loc_list;
4631 }
4632
4633 static inline dw_loc_list_ref *
4634 AT_loc_list_ptr (dw_attr_node *a)
4635 {
4636 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4637 return &a->dw_attr_val.v.val_loc_list;
4638 }
4639
4640 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4641 {
4642 static hashval_t hash (addr_table_entry *);
4643 static bool equal (addr_table_entry *, addr_table_entry *);
4644 };
4645
4646 /* Table of entries into the .debug_addr section. */
4647
4648 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4649
4650 /* Hash an address_table_entry. */
4651
4652 hashval_t
4653 addr_hasher::hash (addr_table_entry *a)
4654 {
4655 inchash::hash hstate;
4656 switch (a->kind)
4657 {
4658 case ate_kind_rtx:
4659 hstate.add_int (0);
4660 break;
4661 case ate_kind_rtx_dtprel:
4662 hstate.add_int (1);
4663 break;
4664 case ate_kind_label:
4665 return htab_hash_string (a->addr.label);
4666 default:
4667 gcc_unreachable ();
4668 }
4669 inchash::add_rtx (a->addr.rtl, hstate);
4670 return hstate.end ();
4671 }
4672
4673 /* Determine equality for two address_table_entries. */
4674
4675 bool
4676 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4677 {
4678 if (a1->kind != a2->kind)
4679 return 0;
4680 switch (a1->kind)
4681 {
4682 case ate_kind_rtx:
4683 case ate_kind_rtx_dtprel:
4684 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4685 case ate_kind_label:
4686 return strcmp (a1->addr.label, a2->addr.label) == 0;
4687 default:
4688 gcc_unreachable ();
4689 }
4690 }
4691
4692 /* Initialize an addr_table_entry. */
4693
4694 void
4695 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4696 {
4697 e->kind = kind;
4698 switch (kind)
4699 {
4700 case ate_kind_rtx:
4701 case ate_kind_rtx_dtprel:
4702 e->addr.rtl = (rtx) addr;
4703 break;
4704 case ate_kind_label:
4705 e->addr.label = (char *) addr;
4706 break;
4707 }
4708 e->refcount = 0;
4709 e->index = NO_INDEX_ASSIGNED;
4710 }
4711
4712 /* Add attr to the address table entry to the table. Defer setting an
4713 index until output time. */
4714
4715 static addr_table_entry *
4716 add_addr_table_entry (void *addr, enum ate_kind kind)
4717 {
4718 addr_table_entry *node;
4719 addr_table_entry finder;
4720
4721 gcc_assert (dwarf_split_debug_info);
4722 if (! addr_index_table)
4723 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
4724 init_addr_table_entry (&finder, kind, addr);
4725 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
4726
4727 if (*slot == HTAB_EMPTY_ENTRY)
4728 {
4729 node = ggc_cleared_alloc<addr_table_entry> ();
4730 init_addr_table_entry (node, kind, addr);
4731 *slot = node;
4732 }
4733 else
4734 node = *slot;
4735
4736 node->refcount++;
4737 return node;
4738 }
4739
4740 /* Remove an entry from the addr table by decrementing its refcount.
4741 Strictly, decrementing the refcount would be enough, but the
4742 assertion that the entry is actually in the table has found
4743 bugs. */
4744
4745 static void
4746 remove_addr_table_entry (addr_table_entry *entry)
4747 {
4748 gcc_assert (dwarf_split_debug_info && addr_index_table);
4749 /* After an index is assigned, the table is frozen. */
4750 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
4751 entry->refcount--;
4752 }
4753
4754 /* Given a location list, remove all addresses it refers to from the
4755 address_table. */
4756
4757 static void
4758 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
4759 {
4760 for (; descr; descr = descr->dw_loc_next)
4761 if (descr->dw_loc_oprnd1.val_entry != NULL)
4762 {
4763 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
4764 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
4765 }
4766 }
4767
4768 /* A helper function for dwarf2out_finish called through
4769 htab_traverse. Assign an addr_table_entry its index. All entries
4770 must be collected into the table when this function is called,
4771 because the indexing code relies on htab_traverse to traverse nodes
4772 in the same order for each run. */
4773
4774 int
4775 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
4776 {
4777 addr_table_entry *node = *h;
4778
4779 /* Don't index unreferenced nodes. */
4780 if (node->refcount == 0)
4781 return 1;
4782
4783 gcc_assert (node->index == NO_INDEX_ASSIGNED);
4784 node->index = *index;
4785 *index += 1;
4786
4787 return 1;
4788 }
4789
4790 /* Add an address constant attribute value to a DIE. When using
4791 dwarf_split_debug_info, address attributes in dies destined for the
4792 final executable should be direct references--setting the parameter
4793 force_direct ensures this behavior. */
4794
4795 static inline void
4796 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
4797 bool force_direct)
4798 {
4799 dw_attr_node attr;
4800
4801 attr.dw_attr = attr_kind;
4802 attr.dw_attr_val.val_class = dw_val_class_addr;
4803 attr.dw_attr_val.v.val_addr = addr;
4804 if (dwarf_split_debug_info && !force_direct)
4805 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
4806 else
4807 attr.dw_attr_val.val_entry = NULL;
4808 add_dwarf_attr (die, &attr);
4809 }
4810
4811 /* Get the RTX from to an address DIE attribute. */
4812
4813 static inline rtx
4814 AT_addr (dw_attr_node *a)
4815 {
4816 gcc_assert (a && AT_class (a) == dw_val_class_addr);
4817 return a->dw_attr_val.v.val_addr;
4818 }
4819
4820 /* Add a file attribute value to a DIE. */
4821
4822 static inline void
4823 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
4824 struct dwarf_file_data *fd)
4825 {
4826 dw_attr_node attr;
4827
4828 attr.dw_attr = attr_kind;
4829 attr.dw_attr_val.val_class = dw_val_class_file;
4830 attr.dw_attr_val.val_entry = NULL;
4831 attr.dw_attr_val.v.val_file = fd;
4832 add_dwarf_attr (die, &attr);
4833 }
4834
4835 /* Get the dwarf_file_data from a file DIE attribute. */
4836
4837 static inline struct dwarf_file_data *
4838 AT_file (dw_attr_node *a)
4839 {
4840 gcc_assert (a && (AT_class (a) == dw_val_class_file
4841 || AT_class (a) == dw_val_class_file_implicit));
4842 return a->dw_attr_val.v.val_file;
4843 }
4844
4845 /* Add a vms delta attribute value to a DIE. */
4846
4847 static inline void
4848 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
4849 const char *lbl1, const char *lbl2)
4850 {
4851 dw_attr_node attr;
4852
4853 attr.dw_attr = attr_kind;
4854 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
4855 attr.dw_attr_val.val_entry = NULL;
4856 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
4857 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
4858 add_dwarf_attr (die, &attr);
4859 }
4860
4861 /* Add a label identifier attribute value to a DIE. */
4862
4863 static inline void
4864 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
4865 const char *lbl_id)
4866 {
4867 dw_attr_node attr;
4868
4869 attr.dw_attr = attr_kind;
4870 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4871 attr.dw_attr_val.val_entry = NULL;
4872 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
4873 if (dwarf_split_debug_info)
4874 attr.dw_attr_val.val_entry
4875 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
4876 ate_kind_label);
4877 add_dwarf_attr (die, &attr);
4878 }
4879
4880 /* Add a section offset attribute value to a DIE, an offset into the
4881 debug_line section. */
4882
4883 static inline void
4884 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4885 const char *label)
4886 {
4887 dw_attr_node attr;
4888
4889 attr.dw_attr = attr_kind;
4890 attr.dw_attr_val.val_class = dw_val_class_lineptr;
4891 attr.dw_attr_val.val_entry = NULL;
4892 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4893 add_dwarf_attr (die, &attr);
4894 }
4895
4896 /* Add a section offset attribute value to a DIE, an offset into the
4897 debug_loclists section. */
4898
4899 static inline void
4900 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4901 const char *label)
4902 {
4903 dw_attr_node attr;
4904
4905 attr.dw_attr = attr_kind;
4906 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
4907 attr.dw_attr_val.val_entry = NULL;
4908 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4909 add_dwarf_attr (die, &attr);
4910 }
4911
4912 /* Add a section offset attribute value to a DIE, an offset into the
4913 debug_macinfo section. */
4914
4915 static inline void
4916 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4917 const char *label)
4918 {
4919 dw_attr_node attr;
4920
4921 attr.dw_attr = attr_kind;
4922 attr.dw_attr_val.val_class = dw_val_class_macptr;
4923 attr.dw_attr_val.val_entry = NULL;
4924 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4925 add_dwarf_attr (die, &attr);
4926 }
4927
4928 /* Add an offset attribute value to a DIE. */
4929
4930 static inline void
4931 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
4932 unsigned HOST_WIDE_INT offset)
4933 {
4934 dw_attr_node attr;
4935
4936 attr.dw_attr = attr_kind;
4937 attr.dw_attr_val.val_class = dw_val_class_offset;
4938 attr.dw_attr_val.val_entry = NULL;
4939 attr.dw_attr_val.v.val_offset = offset;
4940 add_dwarf_attr (die, &attr);
4941 }
4942
4943 /* Add a range_list attribute value to a DIE. When using
4944 dwarf_split_debug_info, address attributes in dies destined for the
4945 final executable should be direct references--setting the parameter
4946 force_direct ensures this behavior. */
4947
4948 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
4949 #define RELOCATED_OFFSET (NULL)
4950
4951 static void
4952 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
4953 long unsigned int offset, bool force_direct)
4954 {
4955 dw_attr_node attr;
4956
4957 attr.dw_attr = attr_kind;
4958 attr.dw_attr_val.val_class = dw_val_class_range_list;
4959 /* For the range_list attribute, use val_entry to store whether the
4960 offset should follow split-debug-info or normal semantics. This
4961 value is read in output_range_list_offset. */
4962 if (dwarf_split_debug_info && !force_direct)
4963 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
4964 else
4965 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
4966 attr.dw_attr_val.v.val_offset = offset;
4967 add_dwarf_attr (die, &attr);
4968 }
4969
4970 /* Return the start label of a delta attribute. */
4971
4972 static inline const char *
4973 AT_vms_delta1 (dw_attr_node *a)
4974 {
4975 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
4976 return a->dw_attr_val.v.val_vms_delta.lbl1;
4977 }
4978
4979 /* Return the end label of a delta attribute. */
4980
4981 static inline const char *
4982 AT_vms_delta2 (dw_attr_node *a)
4983 {
4984 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
4985 return a->dw_attr_val.v.val_vms_delta.lbl2;
4986 }
4987
4988 static inline const char *
4989 AT_lbl (dw_attr_node *a)
4990 {
4991 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
4992 || AT_class (a) == dw_val_class_lineptr
4993 || AT_class (a) == dw_val_class_macptr
4994 || AT_class (a) == dw_val_class_loclistsptr
4995 || AT_class (a) == dw_val_class_high_pc));
4996 return a->dw_attr_val.v.val_lbl_id;
4997 }
4998
4999 /* Get the attribute of type attr_kind. */
5000
5001 static dw_attr_node *
5002 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5003 {
5004 dw_attr_node *a;
5005 unsigned ix;
5006 dw_die_ref spec = NULL;
5007
5008 if (! die)
5009 return NULL;
5010
5011 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5012 if (a->dw_attr == attr_kind)
5013 return a;
5014 else if (a->dw_attr == DW_AT_specification
5015 || a->dw_attr == DW_AT_abstract_origin)
5016 spec = AT_ref (a);
5017
5018 if (spec)
5019 return get_AT (spec, attr_kind);
5020
5021 return NULL;
5022 }
5023
5024 /* Returns the parent of the declaration of DIE. */
5025
5026 static dw_die_ref
5027 get_die_parent (dw_die_ref die)
5028 {
5029 dw_die_ref t;
5030
5031 if (!die)
5032 return NULL;
5033
5034 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5035 || (t = get_AT_ref (die, DW_AT_specification)))
5036 die = t;
5037
5038 return die->die_parent;
5039 }
5040
5041 /* Return the "low pc" attribute value, typically associated with a subprogram
5042 DIE. Return null if the "low pc" attribute is either not present, or if it
5043 cannot be represented as an assembler label identifier. */
5044
5045 static inline const char *
5046 get_AT_low_pc (dw_die_ref die)
5047 {
5048 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5049
5050 return a ? AT_lbl (a) : NULL;
5051 }
5052
5053 /* Return the "high pc" attribute value, typically associated with a subprogram
5054 DIE. Return null if the "high pc" attribute is either not present, or if it
5055 cannot be represented as an assembler label identifier. */
5056
5057 static inline const char *
5058 get_AT_hi_pc (dw_die_ref die)
5059 {
5060 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5061
5062 return a ? AT_lbl (a) : NULL;
5063 }
5064
5065 /* Return the value of the string attribute designated by ATTR_KIND, or
5066 NULL if it is not present. */
5067
5068 static inline const char *
5069 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5070 {
5071 dw_attr_node *a = get_AT (die, attr_kind);
5072
5073 return a ? AT_string (a) : NULL;
5074 }
5075
5076 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5077 if it is not present. */
5078
5079 static inline int
5080 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5081 {
5082 dw_attr_node *a = get_AT (die, attr_kind);
5083
5084 return a ? AT_flag (a) : 0;
5085 }
5086
5087 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5088 if it is not present. */
5089
5090 static inline unsigned
5091 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5092 {
5093 dw_attr_node *a = get_AT (die, attr_kind);
5094
5095 return a ? AT_unsigned (a) : 0;
5096 }
5097
5098 static inline dw_die_ref
5099 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5100 {
5101 dw_attr_node *a = get_AT (die, attr_kind);
5102
5103 return a ? AT_ref (a) : NULL;
5104 }
5105
5106 static inline struct dwarf_file_data *
5107 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5108 {
5109 dw_attr_node *a = get_AT (die, attr_kind);
5110
5111 return a ? AT_file (a) : NULL;
5112 }
5113
5114 /* Return TRUE if the language is C++. */
5115
5116 static inline bool
5117 is_cxx (void)
5118 {
5119 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5120
5121 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5122 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5123 }
5124
5125 /* Return TRUE if DECL was created by the C++ frontend. */
5126
5127 static bool
5128 is_cxx (const_tree decl)
5129 {
5130 if (in_lto_p)
5131 {
5132 const_tree context = get_ultimate_context (decl);
5133 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5134 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5135 }
5136 return is_cxx ();
5137 }
5138
5139 /* Return TRUE if the language is Fortran. */
5140
5141 static inline bool
5142 is_fortran (void)
5143 {
5144 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5145
5146 return (lang == DW_LANG_Fortran77
5147 || lang == DW_LANG_Fortran90
5148 || lang == DW_LANG_Fortran95
5149 || lang == DW_LANG_Fortran03
5150 || lang == DW_LANG_Fortran08);
5151 }
5152
5153 static inline bool
5154 is_fortran (const_tree decl)
5155 {
5156 if (in_lto_p)
5157 {
5158 const_tree context = get_ultimate_context (decl);
5159 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5160 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5161 "GNU Fortran", 11) == 0
5162 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5163 "GNU F77") == 0);
5164 }
5165 return is_fortran ();
5166 }
5167
5168 /* Return TRUE if the language is Ada. */
5169
5170 static inline bool
5171 is_ada (void)
5172 {
5173 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5174
5175 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5176 }
5177
5178 /* Remove the specified attribute if present. Return TRUE if removal
5179 was successful. */
5180
5181 static bool
5182 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5183 {
5184 dw_attr_node *a;
5185 unsigned ix;
5186
5187 if (! die)
5188 return false;
5189
5190 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5191 if (a->dw_attr == attr_kind)
5192 {
5193 if (AT_class (a) == dw_val_class_str)
5194 if (a->dw_attr_val.v.val_str->refcount)
5195 a->dw_attr_val.v.val_str->refcount--;
5196
5197 /* vec::ordered_remove should help reduce the number of abbrevs
5198 that are needed. */
5199 die->die_attr->ordered_remove (ix);
5200 return true;
5201 }
5202 return false;
5203 }
5204
5205 /* Remove CHILD from its parent. PREV must have the property that
5206 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5207
5208 static void
5209 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5210 {
5211 gcc_assert (child->die_parent == prev->die_parent);
5212 gcc_assert (prev->die_sib == child);
5213 if (prev == child)
5214 {
5215 gcc_assert (child->die_parent->die_child == child);
5216 prev = NULL;
5217 }
5218 else
5219 prev->die_sib = child->die_sib;
5220 if (child->die_parent->die_child == child)
5221 child->die_parent->die_child = prev;
5222 child->die_sib = NULL;
5223 }
5224
5225 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5226 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5227
5228 static void
5229 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5230 {
5231 dw_die_ref parent = old_child->die_parent;
5232
5233 gcc_assert (parent == prev->die_parent);
5234 gcc_assert (prev->die_sib == old_child);
5235
5236 new_child->die_parent = parent;
5237 if (prev == old_child)
5238 {
5239 gcc_assert (parent->die_child == old_child);
5240 new_child->die_sib = new_child;
5241 }
5242 else
5243 {
5244 prev->die_sib = new_child;
5245 new_child->die_sib = old_child->die_sib;
5246 }
5247 if (old_child->die_parent->die_child == old_child)
5248 old_child->die_parent->die_child = new_child;
5249 old_child->die_sib = NULL;
5250 }
5251
5252 /* Move all children from OLD_PARENT to NEW_PARENT. */
5253
5254 static void
5255 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5256 {
5257 dw_die_ref c;
5258 new_parent->die_child = old_parent->die_child;
5259 old_parent->die_child = NULL;
5260 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5261 }
5262
5263 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5264 matches TAG. */
5265
5266 static void
5267 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5268 {
5269 dw_die_ref c;
5270
5271 c = die->die_child;
5272 if (c) do {
5273 dw_die_ref prev = c;
5274 c = c->die_sib;
5275 while (c->die_tag == tag)
5276 {
5277 remove_child_with_prev (c, prev);
5278 c->die_parent = NULL;
5279 /* Might have removed every child. */
5280 if (die->die_child == NULL)
5281 return;
5282 c = prev->die_sib;
5283 }
5284 } while (c != die->die_child);
5285 }
5286
5287 /* Add a CHILD_DIE as the last child of DIE. */
5288
5289 static void
5290 add_child_die (dw_die_ref die, dw_die_ref child_die)
5291 {
5292 /* FIXME this should probably be an assert. */
5293 if (! die || ! child_die)
5294 return;
5295 gcc_assert (die != child_die);
5296
5297 child_die->die_parent = die;
5298 if (die->die_child)
5299 {
5300 child_die->die_sib = die->die_child->die_sib;
5301 die->die_child->die_sib = child_die;
5302 }
5303 else
5304 child_die->die_sib = child_die;
5305 die->die_child = child_die;
5306 }
5307
5308 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5309
5310 static void
5311 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5312 dw_die_ref after_die)
5313 {
5314 gcc_assert (die
5315 && child_die
5316 && after_die
5317 && die->die_child
5318 && die != child_die);
5319
5320 child_die->die_parent = die;
5321 child_die->die_sib = after_die->die_sib;
5322 after_die->die_sib = child_die;
5323 if (die->die_child == after_die)
5324 die->die_child = child_die;
5325 }
5326
5327 /* Unassociate CHILD from its parent, and make its parent be
5328 NEW_PARENT. */
5329
5330 static void
5331 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5332 {
5333 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5334 if (p->die_sib == child)
5335 {
5336 remove_child_with_prev (child, p);
5337 break;
5338 }
5339 add_child_die (new_parent, child);
5340 }
5341
5342 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5343 is the specification, to the end of PARENT's list of children.
5344 This is done by removing and re-adding it. */
5345
5346 static void
5347 splice_child_die (dw_die_ref parent, dw_die_ref child)
5348 {
5349 /* We want the declaration DIE from inside the class, not the
5350 specification DIE at toplevel. */
5351 if (child->die_parent != parent)
5352 {
5353 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5354
5355 if (tmp)
5356 child = tmp;
5357 }
5358
5359 gcc_assert (child->die_parent == parent
5360 || (child->die_parent
5361 == get_AT_ref (parent, DW_AT_specification)));
5362
5363 reparent_child (child, parent);
5364 }
5365
5366 /* Create and return a new die with TAG_VALUE as tag. */
5367
5368 static inline dw_die_ref
5369 new_die_raw (enum dwarf_tag tag_value)
5370 {
5371 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5372 die->die_tag = tag_value;
5373 return die;
5374 }
5375
5376 /* Create and return a new die with a parent of PARENT_DIE. If
5377 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5378 associated tree T must be supplied to determine parenthood
5379 later. */
5380
5381 static inline dw_die_ref
5382 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5383 {
5384 dw_die_ref die = new_die_raw (tag_value);
5385
5386 if (parent_die != NULL)
5387 add_child_die (parent_die, die);
5388 else
5389 {
5390 limbo_die_node *limbo_node;
5391
5392 /* No DIEs created after early dwarf should end up in limbo,
5393 because the limbo list should not persist past LTO
5394 streaming. */
5395 if (tag_value != DW_TAG_compile_unit
5396 /* These are allowed because they're generated while
5397 breaking out COMDAT units late. */
5398 && tag_value != DW_TAG_type_unit
5399 && tag_value != DW_TAG_skeleton_unit
5400 && !early_dwarf
5401 /* Allow nested functions to live in limbo because they will
5402 only temporarily live there, as decls_for_scope will fix
5403 them up. */
5404 && (TREE_CODE (t) != FUNCTION_DECL
5405 || !decl_function_context (t))
5406 /* Same as nested functions above but for types. Types that
5407 are local to a function will be fixed in
5408 decls_for_scope. */
5409 && (!RECORD_OR_UNION_TYPE_P (t)
5410 || !TYPE_CONTEXT (t)
5411 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5412 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5413 especially in the ltrans stage, but once we implement LTO
5414 dwarf streaming, we should remove this exception. */
5415 && !in_lto_p)
5416 {
5417 fprintf (stderr, "symbol ended up in limbo too late:");
5418 debug_generic_stmt (t);
5419 gcc_unreachable ();
5420 }
5421
5422 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5423 limbo_node->die = die;
5424 limbo_node->created_for = t;
5425 limbo_node->next = limbo_die_list;
5426 limbo_die_list = limbo_node;
5427 }
5428
5429 return die;
5430 }
5431
5432 /* Return the DIE associated with the given type specifier. */
5433
5434 static inline dw_die_ref
5435 lookup_type_die (tree type)
5436 {
5437 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5438 if (die && die->removed)
5439 {
5440 TYPE_SYMTAB_DIE (type) = NULL;
5441 return NULL;
5442 }
5443 return die;
5444 }
5445
5446 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5447 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5448 anonymous type instead the one of the naming typedef. */
5449
5450 static inline dw_die_ref
5451 strip_naming_typedef (tree type, dw_die_ref type_die)
5452 {
5453 if (type
5454 && TREE_CODE (type) == RECORD_TYPE
5455 && type_die
5456 && type_die->die_tag == DW_TAG_typedef
5457 && is_naming_typedef_decl (TYPE_NAME (type)))
5458 type_die = get_AT_ref (type_die, DW_AT_type);
5459 return type_die;
5460 }
5461
5462 /* Like lookup_type_die, but if type is an anonymous type named by a
5463 typedef[1], return the DIE of the anonymous type instead the one of
5464 the naming typedef. This is because in gen_typedef_die, we did
5465 equate the anonymous struct named by the typedef with the DIE of
5466 the naming typedef. So by default, lookup_type_die on an anonymous
5467 struct yields the DIE of the naming typedef.
5468
5469 [1]: Read the comment of is_naming_typedef_decl to learn about what
5470 a naming typedef is. */
5471
5472 static inline dw_die_ref
5473 lookup_type_die_strip_naming_typedef (tree type)
5474 {
5475 dw_die_ref die = lookup_type_die (type);
5476 return strip_naming_typedef (type, die);
5477 }
5478
5479 /* Equate a DIE to a given type specifier. */
5480
5481 static inline void
5482 equate_type_number_to_die (tree type, dw_die_ref type_die)
5483 {
5484 TYPE_SYMTAB_DIE (type) = type_die;
5485 }
5486
5487 /* Returns a hash value for X (which really is a die_struct). */
5488
5489 inline hashval_t
5490 decl_die_hasher::hash (die_node *x)
5491 {
5492 return (hashval_t) x->decl_id;
5493 }
5494
5495 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5496
5497 inline bool
5498 decl_die_hasher::equal (die_node *x, tree y)
5499 {
5500 return (x->decl_id == DECL_UID (y));
5501 }
5502
5503 /* Return the DIE associated with a given declaration. */
5504
5505 static inline dw_die_ref
5506 lookup_decl_die (tree decl)
5507 {
5508 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5509 NO_INSERT);
5510 if (!die)
5511 return NULL;
5512 if ((*die)->removed)
5513 {
5514 decl_die_table->clear_slot (die);
5515 return NULL;
5516 }
5517 return *die;
5518 }
5519
5520
5521 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5522 style reference. Return true if we found one refering to a DIE for
5523 DECL, otherwise return false. */
5524
5525 static bool
5526 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5527 unsigned HOST_WIDE_INT *off)
5528 {
5529 dw_die_ref die;
5530
5531 if (flag_wpa && !decl_die_table)
5532 return false;
5533
5534 if (TREE_CODE (decl) == BLOCK)
5535 die = BLOCK_DIE (decl);
5536 else
5537 die = lookup_decl_die (decl);
5538 if (!die)
5539 return false;
5540
5541 /* During WPA stage we currently use DIEs to store the
5542 decl <-> label + offset map. That's quite inefficient but it
5543 works for now. */
5544 if (flag_wpa)
5545 {
5546 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5547 if (!ref)
5548 {
5549 gcc_assert (die == comp_unit_die ());
5550 return false;
5551 }
5552 *off = ref->die_offset;
5553 *sym = ref->die_id.die_symbol;
5554 return true;
5555 }
5556
5557 /* Similar to get_ref_die_offset_label, but using the "correct"
5558 label. */
5559 *off = die->die_offset;
5560 while (die->die_parent)
5561 die = die->die_parent;
5562 /* For the containing CU DIE we compute a die_symbol in
5563 compute_comp_unit_symbol. */
5564 gcc_assert (die->die_tag == DW_TAG_compile_unit
5565 && die->die_id.die_symbol != NULL);
5566 *sym = die->die_id.die_symbol;
5567 return true;
5568 }
5569
5570 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5571
5572 static void
5573 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5574 const char *symbol, HOST_WIDE_INT offset)
5575 {
5576 /* Create a fake DIE that contains the reference. Don't use
5577 new_die because we don't want to end up in the limbo list. */
5578 dw_die_ref ref = new_die_raw (die->die_tag);
5579 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5580 ref->die_offset = offset;
5581 ref->with_offset = 1;
5582 add_AT_die_ref (die, attr_kind, ref);
5583 }
5584
5585 /* Create a DIE for DECL if required and add a reference to a DIE
5586 at SYMBOL + OFFSET which contains attributes dumped early. */
5587
5588 static void
5589 dwarf2out_register_external_die (tree decl, const char *sym,
5590 unsigned HOST_WIDE_INT off)
5591 {
5592 if (debug_info_level == DINFO_LEVEL_NONE)
5593 return;
5594
5595 if (flag_wpa && !decl_die_table)
5596 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5597
5598 dw_die_ref die
5599 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5600 gcc_assert (!die);
5601
5602 tree ctx;
5603 dw_die_ref parent = NULL;
5604 /* Need to lookup a DIE for the decls context - the containing
5605 function or translation unit. */
5606 if (TREE_CODE (decl) == BLOCK)
5607 {
5608 ctx = BLOCK_SUPERCONTEXT (decl);
5609 /* ??? We do not output DIEs for all scopes thus skip as
5610 many DIEs as needed. */
5611 while (TREE_CODE (ctx) == BLOCK
5612 && !BLOCK_DIE (ctx))
5613 ctx = BLOCK_SUPERCONTEXT (ctx);
5614 }
5615 else
5616 ctx = DECL_CONTEXT (decl);
5617 while (ctx && TYPE_P (ctx))
5618 ctx = TYPE_CONTEXT (ctx);
5619 if (ctx)
5620 {
5621 if (TREE_CODE (ctx) == BLOCK)
5622 parent = BLOCK_DIE (ctx);
5623 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5624 /* Keep the 1:1 association during WPA. */
5625 && !flag_wpa)
5626 /* Otherwise all late annotations go to the main CU which
5627 imports the original CUs. */
5628 parent = comp_unit_die ();
5629 else if (TREE_CODE (ctx) == FUNCTION_DECL
5630 && TREE_CODE (decl) != PARM_DECL
5631 && TREE_CODE (decl) != BLOCK)
5632 /* Leave function local entities parent determination to when
5633 we process scope vars. */
5634 ;
5635 else
5636 parent = lookup_decl_die (ctx);
5637 }
5638 else
5639 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5640 Handle this case gracefully by globalizing stuff. */
5641 parent = comp_unit_die ();
5642 /* Create a DIE "stub". */
5643 switch (TREE_CODE (decl))
5644 {
5645 case TRANSLATION_UNIT_DECL:
5646 if (! flag_wpa)
5647 {
5648 die = comp_unit_die ();
5649 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5650 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5651 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5652 to create a DIE for the original CUs. */
5653 return;
5654 }
5655 /* Keep the 1:1 association during WPA. */
5656 die = new_die (DW_TAG_compile_unit, NULL, decl);
5657 break;
5658 case NAMESPACE_DECL:
5659 if (is_fortran (decl))
5660 die = new_die (DW_TAG_module, parent, decl);
5661 else
5662 die = new_die (DW_TAG_namespace, parent, decl);
5663 break;
5664 case FUNCTION_DECL:
5665 die = new_die (DW_TAG_subprogram, parent, decl);
5666 break;
5667 case VAR_DECL:
5668 die = new_die (DW_TAG_variable, parent, decl);
5669 break;
5670 case RESULT_DECL:
5671 die = new_die (DW_TAG_variable, parent, decl);
5672 break;
5673 case PARM_DECL:
5674 die = new_die (DW_TAG_formal_parameter, parent, decl);
5675 break;
5676 case CONST_DECL:
5677 die = new_die (DW_TAG_constant, parent, decl);
5678 break;
5679 case LABEL_DECL:
5680 die = new_die (DW_TAG_label, parent, decl);
5681 break;
5682 case BLOCK:
5683 die = new_die (DW_TAG_lexical_block, parent, decl);
5684 break;
5685 default:
5686 gcc_unreachable ();
5687 }
5688 if (TREE_CODE (decl) == BLOCK)
5689 BLOCK_DIE (decl) = die;
5690 else
5691 equate_decl_number_to_die (decl, die);
5692
5693 /* Add a reference to the DIE providing early debug at $sym + off. */
5694 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
5695 }
5696
5697 /* Returns a hash value for X (which really is a var_loc_list). */
5698
5699 inline hashval_t
5700 decl_loc_hasher::hash (var_loc_list *x)
5701 {
5702 return (hashval_t) x->decl_id;
5703 }
5704
5705 /* Return nonzero if decl_id of var_loc_list X is the same as
5706 UID of decl *Y. */
5707
5708 inline bool
5709 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
5710 {
5711 return (x->decl_id == DECL_UID (y));
5712 }
5713
5714 /* Return the var_loc list associated with a given declaration. */
5715
5716 static inline var_loc_list *
5717 lookup_decl_loc (const_tree decl)
5718 {
5719 if (!decl_loc_table)
5720 return NULL;
5721 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
5722 }
5723
5724 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
5725
5726 inline hashval_t
5727 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
5728 {
5729 return (hashval_t) x->decl_id;
5730 }
5731
5732 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
5733 UID of decl *Y. */
5734
5735 inline bool
5736 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
5737 {
5738 return (x->decl_id == DECL_UID (y));
5739 }
5740
5741 /* Equate a DIE to a particular declaration. */
5742
5743 static void
5744 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
5745 {
5746 unsigned int decl_id = DECL_UID (decl);
5747
5748 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
5749 decl_die->decl_id = decl_id;
5750 }
5751
5752 /* Return how many bits covers PIECE EXPR_LIST. */
5753
5754 static HOST_WIDE_INT
5755 decl_piece_bitsize (rtx piece)
5756 {
5757 int ret = (int) GET_MODE (piece);
5758 if (ret)
5759 return ret;
5760 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
5761 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
5762 return INTVAL (XEXP (XEXP (piece, 0), 0));
5763 }
5764
5765 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
5766
5767 static rtx *
5768 decl_piece_varloc_ptr (rtx piece)
5769 {
5770 if ((int) GET_MODE (piece))
5771 return &XEXP (piece, 0);
5772 else
5773 return &XEXP (XEXP (piece, 0), 1);
5774 }
5775
5776 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
5777 Next is the chain of following piece nodes. */
5778
5779 static rtx_expr_list *
5780 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
5781 {
5782 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
5783 return alloc_EXPR_LIST (bitsize, loc_note, next);
5784 else
5785 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
5786 GEN_INT (bitsize),
5787 loc_note), next);
5788 }
5789
5790 /* Return rtx that should be stored into loc field for
5791 LOC_NOTE and BITPOS/BITSIZE. */
5792
5793 static rtx
5794 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
5795 HOST_WIDE_INT bitsize)
5796 {
5797 if (bitsize != -1)
5798 {
5799 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
5800 if (bitpos != 0)
5801 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
5802 }
5803 return loc_note;
5804 }
5805
5806 /* This function either modifies location piece list *DEST in
5807 place (if SRC and INNER is NULL), or copies location piece list
5808 *SRC to *DEST while modifying it. Location BITPOS is modified
5809 to contain LOC_NOTE, any pieces overlapping it are removed resp.
5810 not copied and if needed some padding around it is added.
5811 When modifying in place, DEST should point to EXPR_LIST where
5812 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
5813 to the start of the whole list and INNER points to the EXPR_LIST
5814 where earlier pieces cover PIECE_BITPOS bits. */
5815
5816 static void
5817 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
5818 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
5819 HOST_WIDE_INT bitsize, rtx loc_note)
5820 {
5821 HOST_WIDE_INT diff;
5822 bool copy = inner != NULL;
5823
5824 if (copy)
5825 {
5826 /* First copy all nodes preceding the current bitpos. */
5827 while (src != inner)
5828 {
5829 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
5830 decl_piece_bitsize (*src), NULL_RTX);
5831 dest = &XEXP (*dest, 1);
5832 src = &XEXP (*src, 1);
5833 }
5834 }
5835 /* Add padding if needed. */
5836 if (bitpos != piece_bitpos)
5837 {
5838 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
5839 copy ? NULL_RTX : *dest);
5840 dest = &XEXP (*dest, 1);
5841 }
5842 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
5843 {
5844 gcc_assert (!copy);
5845 /* A piece with correct bitpos and bitsize already exist,
5846 just update the location for it and return. */
5847 *decl_piece_varloc_ptr (*dest) = loc_note;
5848 return;
5849 }
5850 /* Add the piece that changed. */
5851 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
5852 dest = &XEXP (*dest, 1);
5853 /* Skip over pieces that overlap it. */
5854 diff = bitpos - piece_bitpos + bitsize;
5855 if (!copy)
5856 src = dest;
5857 while (diff > 0 && *src)
5858 {
5859 rtx piece = *src;
5860 diff -= decl_piece_bitsize (piece);
5861 if (copy)
5862 src = &XEXP (piece, 1);
5863 else
5864 {
5865 *src = XEXP (piece, 1);
5866 free_EXPR_LIST_node (piece);
5867 }
5868 }
5869 /* Add padding if needed. */
5870 if (diff < 0 && *src)
5871 {
5872 if (!copy)
5873 dest = src;
5874 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
5875 dest = &XEXP (*dest, 1);
5876 }
5877 if (!copy)
5878 return;
5879 /* Finally copy all nodes following it. */
5880 while (*src)
5881 {
5882 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
5883 decl_piece_bitsize (*src), NULL_RTX);
5884 dest = &XEXP (*dest, 1);
5885 src = &XEXP (*src, 1);
5886 }
5887 }
5888
5889 /* Add a variable location node to the linked list for DECL. */
5890
5891 static struct var_loc_node *
5892 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label)
5893 {
5894 unsigned int decl_id;
5895 var_loc_list *temp;
5896 struct var_loc_node *loc = NULL;
5897 HOST_WIDE_INT bitsize = -1, bitpos = -1;
5898
5899 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
5900 {
5901 tree realdecl = DECL_DEBUG_EXPR (decl);
5902 if (handled_component_p (realdecl)
5903 || (TREE_CODE (realdecl) == MEM_REF
5904 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5905 {
5906 HOST_WIDE_INT maxsize;
5907 bool reverse;
5908 tree innerdecl
5909 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize, &maxsize,
5910 &reverse);
5911 if (!DECL_P (innerdecl)
5912 || DECL_IGNORED_P (innerdecl)
5913 || TREE_STATIC (innerdecl)
5914 || bitsize <= 0
5915 || bitpos + bitsize > 256
5916 || bitsize != maxsize)
5917 return NULL;
5918 decl = innerdecl;
5919 }
5920 }
5921
5922 decl_id = DECL_UID (decl);
5923 var_loc_list **slot
5924 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
5925 if (*slot == NULL)
5926 {
5927 temp = ggc_cleared_alloc<var_loc_list> ();
5928 temp->decl_id = decl_id;
5929 *slot = temp;
5930 }
5931 else
5932 temp = *slot;
5933
5934 /* For PARM_DECLs try to keep around the original incoming value,
5935 even if that means we'll emit a zero-range .debug_loc entry. */
5936 if (temp->last
5937 && temp->first == temp->last
5938 && TREE_CODE (decl) == PARM_DECL
5939 && NOTE_P (temp->first->loc)
5940 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
5941 && DECL_INCOMING_RTL (decl)
5942 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
5943 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
5944 == GET_CODE (DECL_INCOMING_RTL (decl))
5945 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
5946 && (bitsize != -1
5947 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
5948 NOTE_VAR_LOCATION_LOC (loc_note))
5949 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
5950 != NOTE_VAR_LOCATION_STATUS (loc_note))))
5951 {
5952 loc = ggc_cleared_alloc<var_loc_node> ();
5953 temp->first->next = loc;
5954 temp->last = loc;
5955 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
5956 }
5957 else if (temp->last)
5958 {
5959 struct var_loc_node *last = temp->last, *unused = NULL;
5960 rtx *piece_loc = NULL, last_loc_note;
5961 HOST_WIDE_INT piece_bitpos = 0;
5962 if (last->next)
5963 {
5964 last = last->next;
5965 gcc_assert (last->next == NULL);
5966 }
5967 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
5968 {
5969 piece_loc = &last->loc;
5970 do
5971 {
5972 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
5973 if (piece_bitpos + cur_bitsize > bitpos)
5974 break;
5975 piece_bitpos += cur_bitsize;
5976 piece_loc = &XEXP (*piece_loc, 1);
5977 }
5978 while (*piece_loc);
5979 }
5980 /* TEMP->LAST here is either pointer to the last but one or
5981 last element in the chained list, LAST is pointer to the
5982 last element. */
5983 if (label && strcmp (last->label, label) == 0)
5984 {
5985 /* For SRA optimized variables if there weren't any real
5986 insns since last note, just modify the last node. */
5987 if (piece_loc != NULL)
5988 {
5989 adjust_piece_list (piece_loc, NULL, NULL,
5990 bitpos, piece_bitpos, bitsize, loc_note);
5991 return NULL;
5992 }
5993 /* If the last note doesn't cover any instructions, remove it. */
5994 if (temp->last != last)
5995 {
5996 temp->last->next = NULL;
5997 unused = last;
5998 last = temp->last;
5999 gcc_assert (strcmp (last->label, label) != 0);
6000 }
6001 else
6002 {
6003 gcc_assert (temp->first == temp->last
6004 || (temp->first->next == temp->last
6005 && TREE_CODE (decl) == PARM_DECL));
6006 memset (temp->last, '\0', sizeof (*temp->last));
6007 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6008 return temp->last;
6009 }
6010 }
6011 if (bitsize == -1 && NOTE_P (last->loc))
6012 last_loc_note = last->loc;
6013 else if (piece_loc != NULL
6014 && *piece_loc != NULL_RTX
6015 && piece_bitpos == bitpos
6016 && decl_piece_bitsize (*piece_loc) == bitsize)
6017 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6018 else
6019 last_loc_note = NULL_RTX;
6020 /* If the current location is the same as the end of the list,
6021 and either both or neither of the locations is uninitialized,
6022 we have nothing to do. */
6023 if (last_loc_note == NULL_RTX
6024 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6025 NOTE_VAR_LOCATION_LOC (loc_note)))
6026 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6027 != NOTE_VAR_LOCATION_STATUS (loc_note))
6028 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6029 == VAR_INIT_STATUS_UNINITIALIZED)
6030 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6031 == VAR_INIT_STATUS_UNINITIALIZED))))
6032 {
6033 /* Add LOC to the end of list and update LAST. If the last
6034 element of the list has been removed above, reuse its
6035 memory for the new node, otherwise allocate a new one. */
6036 if (unused)
6037 {
6038 loc = unused;
6039 memset (loc, '\0', sizeof (*loc));
6040 }
6041 else
6042 loc = ggc_cleared_alloc<var_loc_node> ();
6043 if (bitsize == -1 || piece_loc == NULL)
6044 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6045 else
6046 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6047 bitpos, piece_bitpos, bitsize, loc_note);
6048 last->next = loc;
6049 /* Ensure TEMP->LAST will point either to the new last but one
6050 element of the chain, or to the last element in it. */
6051 if (last != temp->last)
6052 temp->last = last;
6053 }
6054 else if (unused)
6055 ggc_free (unused);
6056 }
6057 else
6058 {
6059 loc = ggc_cleared_alloc<var_loc_node> ();
6060 temp->first = loc;
6061 temp->last = loc;
6062 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6063 }
6064 return loc;
6065 }
6066 \f
6067 /* Keep track of the number of spaces used to indent the
6068 output of the debugging routines that print the structure of
6069 the DIE internal representation. */
6070 static int print_indent;
6071
6072 /* Indent the line the number of spaces given by print_indent. */
6073
6074 static inline void
6075 print_spaces (FILE *outfile)
6076 {
6077 fprintf (outfile, "%*s", print_indent, "");
6078 }
6079
6080 /* Print a type signature in hex. */
6081
6082 static inline void
6083 print_signature (FILE *outfile, char *sig)
6084 {
6085 int i;
6086
6087 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6088 fprintf (outfile, "%02x", sig[i] & 0xff);
6089 }
6090
6091 static inline void
6092 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6093 {
6094 if (discr_value->pos)
6095 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6096 else
6097 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6098 }
6099
6100 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6101
6102 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6103 RECURSE, output location descriptor operations. */
6104
6105 static void
6106 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6107 {
6108 switch (val->val_class)
6109 {
6110 case dw_val_class_addr:
6111 fprintf (outfile, "address");
6112 break;
6113 case dw_val_class_offset:
6114 fprintf (outfile, "offset");
6115 break;
6116 case dw_val_class_loc:
6117 fprintf (outfile, "location descriptor");
6118 if (val->v.val_loc == NULL)
6119 fprintf (outfile, " -> <null>\n");
6120 else if (recurse)
6121 {
6122 fprintf (outfile, ":\n");
6123 print_indent += 4;
6124 print_loc_descr (val->v.val_loc, outfile);
6125 print_indent -= 4;
6126 }
6127 else
6128 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6129 break;
6130 case dw_val_class_loc_list:
6131 fprintf (outfile, "location list -> label:%s",
6132 val->v.val_loc_list->ll_symbol);
6133 break;
6134 case dw_val_class_range_list:
6135 fprintf (outfile, "range list");
6136 break;
6137 case dw_val_class_const:
6138 case dw_val_class_const_implicit:
6139 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6140 break;
6141 case dw_val_class_unsigned_const:
6142 case dw_val_class_unsigned_const_implicit:
6143 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6144 break;
6145 case dw_val_class_const_double:
6146 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6147 HOST_WIDE_INT_PRINT_UNSIGNED")",
6148 val->v.val_double.high,
6149 val->v.val_double.low);
6150 break;
6151 case dw_val_class_wide_int:
6152 {
6153 int i = val->v.val_wide->get_len ();
6154 fprintf (outfile, "constant (");
6155 gcc_assert (i > 0);
6156 if (val->v.val_wide->elt (i - 1) == 0)
6157 fprintf (outfile, "0x");
6158 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6159 val->v.val_wide->elt (--i));
6160 while (--i >= 0)
6161 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6162 val->v.val_wide->elt (i));
6163 fprintf (outfile, ")");
6164 break;
6165 }
6166 case dw_val_class_vec:
6167 fprintf (outfile, "floating-point or vector constant");
6168 break;
6169 case dw_val_class_flag:
6170 fprintf (outfile, "%u", val->v.val_flag);
6171 break;
6172 case dw_val_class_die_ref:
6173 if (val->v.val_die_ref.die != NULL)
6174 {
6175 dw_die_ref die = val->v.val_die_ref.die;
6176
6177 if (die->comdat_type_p)
6178 {
6179 fprintf (outfile, "die -> signature: ");
6180 print_signature (outfile,
6181 die->die_id.die_type_node->signature);
6182 }
6183 else if (die->die_id.die_symbol)
6184 {
6185 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6186 if (die->with_offset)
6187 fprintf (outfile, " + %ld", die->die_offset);
6188 }
6189 else
6190 fprintf (outfile, "die -> %ld", die->die_offset);
6191 fprintf (outfile, " (%p)", (void *) die);
6192 }
6193 else
6194 fprintf (outfile, "die -> <null>");
6195 break;
6196 case dw_val_class_vms_delta:
6197 fprintf (outfile, "delta: @slotcount(%s-%s)",
6198 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6199 break;
6200 case dw_val_class_lbl_id:
6201 case dw_val_class_lineptr:
6202 case dw_val_class_macptr:
6203 case dw_val_class_loclistsptr:
6204 case dw_val_class_high_pc:
6205 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6206 break;
6207 case dw_val_class_str:
6208 if (val->v.val_str->str != NULL)
6209 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6210 else
6211 fprintf (outfile, "<null>");
6212 break;
6213 case dw_val_class_file:
6214 case dw_val_class_file_implicit:
6215 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6216 val->v.val_file->emitted_number);
6217 break;
6218 case dw_val_class_data8:
6219 {
6220 int i;
6221
6222 for (i = 0; i < 8; i++)
6223 fprintf (outfile, "%02x", val->v.val_data8[i]);
6224 break;
6225 }
6226 case dw_val_class_discr_value:
6227 print_discr_value (outfile, &val->v.val_discr_value);
6228 break;
6229 case dw_val_class_discr_list:
6230 for (dw_discr_list_ref node = val->v.val_discr_list;
6231 node != NULL;
6232 node = node->dw_discr_next)
6233 {
6234 if (node->dw_discr_range)
6235 {
6236 fprintf (outfile, " .. ");
6237 print_discr_value (outfile, &node->dw_discr_lower_bound);
6238 print_discr_value (outfile, &node->dw_discr_upper_bound);
6239 }
6240 else
6241 print_discr_value (outfile, &node->dw_discr_lower_bound);
6242
6243 if (node->dw_discr_next != NULL)
6244 fprintf (outfile, " | ");
6245 }
6246 default:
6247 break;
6248 }
6249 }
6250
6251 /* Likewise, for a DIE attribute. */
6252
6253 static void
6254 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6255 {
6256 print_dw_val (&a->dw_attr_val, recurse, outfile);
6257 }
6258
6259
6260 /* Print the list of operands in the LOC location description to OUTFILE. This
6261 routine is a debugging aid only. */
6262
6263 static void
6264 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6265 {
6266 dw_loc_descr_ref l = loc;
6267
6268 if (loc == NULL)
6269 {
6270 print_spaces (outfile);
6271 fprintf (outfile, "<null>\n");
6272 return;
6273 }
6274
6275 for (l = loc; l != NULL; l = l->dw_loc_next)
6276 {
6277 print_spaces (outfile);
6278 fprintf (outfile, "(%p) %s",
6279 (void *) l,
6280 dwarf_stack_op_name (l->dw_loc_opc));
6281 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6282 {
6283 fprintf (outfile, " ");
6284 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6285 }
6286 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6287 {
6288 fprintf (outfile, ", ");
6289 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6290 }
6291 fprintf (outfile, "\n");
6292 }
6293 }
6294
6295 /* Print the information associated with a given DIE, and its children.
6296 This routine is a debugging aid only. */
6297
6298 static void
6299 print_die (dw_die_ref die, FILE *outfile)
6300 {
6301 dw_attr_node *a;
6302 dw_die_ref c;
6303 unsigned ix;
6304
6305 print_spaces (outfile);
6306 fprintf (outfile, "DIE %4ld: %s (%p)\n",
6307 die->die_offset, dwarf_tag_name (die->die_tag),
6308 (void*) die);
6309 print_spaces (outfile);
6310 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6311 fprintf (outfile, " offset: %ld", die->die_offset);
6312 fprintf (outfile, " mark: %d\n", die->die_mark);
6313
6314 if (die->comdat_type_p)
6315 {
6316 print_spaces (outfile);
6317 fprintf (outfile, " signature: ");
6318 print_signature (outfile, die->die_id.die_type_node->signature);
6319 fprintf (outfile, "\n");
6320 }
6321
6322 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6323 {
6324 print_spaces (outfile);
6325 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6326
6327 print_attribute (a, true, outfile);
6328 fprintf (outfile, "\n");
6329 }
6330
6331 if (die->die_child != NULL)
6332 {
6333 print_indent += 4;
6334 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6335 print_indent -= 4;
6336 }
6337 if (print_indent == 0)
6338 fprintf (outfile, "\n");
6339 }
6340
6341 /* Print the list of operations in the LOC location description. */
6342
6343 DEBUG_FUNCTION void
6344 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6345 {
6346 print_loc_descr (loc, stderr);
6347 }
6348
6349 /* Print the information collected for a given DIE. */
6350
6351 DEBUG_FUNCTION void
6352 debug_dwarf_die (dw_die_ref die)
6353 {
6354 print_die (die, stderr);
6355 }
6356
6357 DEBUG_FUNCTION void
6358 debug (die_struct &ref)
6359 {
6360 print_die (&ref, stderr);
6361 }
6362
6363 DEBUG_FUNCTION void
6364 debug (die_struct *ptr)
6365 {
6366 if (ptr)
6367 debug (*ptr);
6368 else
6369 fprintf (stderr, "<nil>\n");
6370 }
6371
6372
6373 /* Print all DWARF information collected for the compilation unit.
6374 This routine is a debugging aid only. */
6375
6376 DEBUG_FUNCTION void
6377 debug_dwarf (void)
6378 {
6379 print_indent = 0;
6380 print_die (comp_unit_die (), stderr);
6381 }
6382
6383 /* Verify the DIE tree structure. */
6384
6385 DEBUG_FUNCTION void
6386 verify_die (dw_die_ref die)
6387 {
6388 gcc_assert (!die->die_mark);
6389 if (die->die_parent == NULL
6390 && die->die_sib == NULL)
6391 return;
6392 /* Verify the die_sib list is cyclic. */
6393 dw_die_ref x = die;
6394 do
6395 {
6396 x->die_mark = 1;
6397 x = x->die_sib;
6398 }
6399 while (x && !x->die_mark);
6400 gcc_assert (x == die);
6401 x = die;
6402 do
6403 {
6404 /* Verify all dies have the same parent. */
6405 gcc_assert (x->die_parent == die->die_parent);
6406 if (x->die_child)
6407 {
6408 /* Verify the child has the proper parent and recurse. */
6409 gcc_assert (x->die_child->die_parent == x);
6410 verify_die (x->die_child);
6411 }
6412 x->die_mark = 0;
6413 x = x->die_sib;
6414 }
6415 while (x && x->die_mark);
6416 }
6417
6418 /* Sanity checks on DIEs. */
6419
6420 static void
6421 check_die (dw_die_ref die)
6422 {
6423 unsigned ix;
6424 dw_attr_node *a;
6425 bool inline_found = false;
6426 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6427 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6428 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6429 {
6430 switch (a->dw_attr)
6431 {
6432 case DW_AT_inline:
6433 if (a->dw_attr_val.v.val_unsigned)
6434 inline_found = true;
6435 break;
6436 case DW_AT_location:
6437 ++n_location;
6438 break;
6439 case DW_AT_low_pc:
6440 ++n_low_pc;
6441 break;
6442 case DW_AT_high_pc:
6443 ++n_high_pc;
6444 break;
6445 case DW_AT_artificial:
6446 ++n_artificial;
6447 break;
6448 case DW_AT_decl_column:
6449 ++n_decl_column;
6450 break;
6451 case DW_AT_decl_line:
6452 ++n_decl_line;
6453 break;
6454 case DW_AT_decl_file:
6455 ++n_decl_file;
6456 break;
6457 default:
6458 break;
6459 }
6460 }
6461 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6462 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6463 {
6464 fprintf (stderr, "Duplicate attributes in DIE:\n");
6465 debug_dwarf_die (die);
6466 gcc_unreachable ();
6467 }
6468 if (inline_found)
6469 {
6470 /* A debugging information entry that is a member of an abstract
6471 instance tree [that has DW_AT_inline] should not contain any
6472 attributes which describe aspects of the subroutine which vary
6473 between distinct inlined expansions or distinct out-of-line
6474 expansions. */
6475 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6476 gcc_assert (a->dw_attr != DW_AT_low_pc
6477 && a->dw_attr != DW_AT_high_pc
6478 && a->dw_attr != DW_AT_location
6479 && a->dw_attr != DW_AT_frame_base
6480 && a->dw_attr != DW_AT_call_all_calls
6481 && a->dw_attr != DW_AT_GNU_all_call_sites);
6482 }
6483 }
6484 \f
6485 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6486 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6487 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6488
6489 /* Calculate the checksum of a location expression. */
6490
6491 static inline void
6492 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6493 {
6494 int tem;
6495 inchash::hash hstate;
6496 hashval_t hash;
6497
6498 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6499 CHECKSUM (tem);
6500 hash_loc_operands (loc, hstate);
6501 hash = hstate.end();
6502 CHECKSUM (hash);
6503 }
6504
6505 /* Calculate the checksum of an attribute. */
6506
6507 static void
6508 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6509 {
6510 dw_loc_descr_ref loc;
6511 rtx r;
6512
6513 CHECKSUM (at->dw_attr);
6514
6515 /* We don't care that this was compiled with a different compiler
6516 snapshot; if the output is the same, that's what matters. */
6517 if (at->dw_attr == DW_AT_producer)
6518 return;
6519
6520 switch (AT_class (at))
6521 {
6522 case dw_val_class_const:
6523 case dw_val_class_const_implicit:
6524 CHECKSUM (at->dw_attr_val.v.val_int);
6525 break;
6526 case dw_val_class_unsigned_const:
6527 case dw_val_class_unsigned_const_implicit:
6528 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6529 break;
6530 case dw_val_class_const_double:
6531 CHECKSUM (at->dw_attr_val.v.val_double);
6532 break;
6533 case dw_val_class_wide_int:
6534 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6535 get_full_len (*at->dw_attr_val.v.val_wide)
6536 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6537 break;
6538 case dw_val_class_vec:
6539 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6540 (at->dw_attr_val.v.val_vec.length
6541 * at->dw_attr_val.v.val_vec.elt_size));
6542 break;
6543 case dw_val_class_flag:
6544 CHECKSUM (at->dw_attr_val.v.val_flag);
6545 break;
6546 case dw_val_class_str:
6547 CHECKSUM_STRING (AT_string (at));
6548 break;
6549
6550 case dw_val_class_addr:
6551 r = AT_addr (at);
6552 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6553 CHECKSUM_STRING (XSTR (r, 0));
6554 break;
6555
6556 case dw_val_class_offset:
6557 CHECKSUM (at->dw_attr_val.v.val_offset);
6558 break;
6559
6560 case dw_val_class_loc:
6561 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6562 loc_checksum (loc, ctx);
6563 break;
6564
6565 case dw_val_class_die_ref:
6566 die_checksum (AT_ref (at), ctx, mark);
6567 break;
6568
6569 case dw_val_class_fde_ref:
6570 case dw_val_class_vms_delta:
6571 case dw_val_class_lbl_id:
6572 case dw_val_class_lineptr:
6573 case dw_val_class_macptr:
6574 case dw_val_class_loclistsptr:
6575 case dw_val_class_high_pc:
6576 break;
6577
6578 case dw_val_class_file:
6579 case dw_val_class_file_implicit:
6580 CHECKSUM_STRING (AT_file (at)->filename);
6581 break;
6582
6583 case dw_val_class_data8:
6584 CHECKSUM (at->dw_attr_val.v.val_data8);
6585 break;
6586
6587 default:
6588 break;
6589 }
6590 }
6591
6592 /* Calculate the checksum of a DIE. */
6593
6594 static void
6595 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6596 {
6597 dw_die_ref c;
6598 dw_attr_node *a;
6599 unsigned ix;
6600
6601 /* To avoid infinite recursion. */
6602 if (die->die_mark)
6603 {
6604 CHECKSUM (die->die_mark);
6605 return;
6606 }
6607 die->die_mark = ++(*mark);
6608
6609 CHECKSUM (die->die_tag);
6610
6611 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6612 attr_checksum (a, ctx, mark);
6613
6614 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6615 }
6616
6617 #undef CHECKSUM
6618 #undef CHECKSUM_BLOCK
6619 #undef CHECKSUM_STRING
6620
6621 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6622 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6623 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6624 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6625 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6626 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6627 #define CHECKSUM_ATTR(FOO) \
6628 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6629
6630 /* Calculate the checksum of a number in signed LEB128 format. */
6631
6632 static void
6633 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6634 {
6635 unsigned char byte;
6636 bool more;
6637
6638 while (1)
6639 {
6640 byte = (value & 0x7f);
6641 value >>= 7;
6642 more = !((value == 0 && (byte & 0x40) == 0)
6643 || (value == -1 && (byte & 0x40) != 0));
6644 if (more)
6645 byte |= 0x80;
6646 CHECKSUM (byte);
6647 if (!more)
6648 break;
6649 }
6650 }
6651
6652 /* Calculate the checksum of a number in unsigned LEB128 format. */
6653
6654 static void
6655 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6656 {
6657 while (1)
6658 {
6659 unsigned char byte = (value & 0x7f);
6660 value >>= 7;
6661 if (value != 0)
6662 /* More bytes to follow. */
6663 byte |= 0x80;
6664 CHECKSUM (byte);
6665 if (value == 0)
6666 break;
6667 }
6668 }
6669
6670 /* Checksum the context of the DIE. This adds the names of any
6671 surrounding namespaces or structures to the checksum. */
6672
6673 static void
6674 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
6675 {
6676 const char *name;
6677 dw_die_ref spec;
6678 int tag = die->die_tag;
6679
6680 if (tag != DW_TAG_namespace
6681 && tag != DW_TAG_structure_type
6682 && tag != DW_TAG_class_type)
6683 return;
6684
6685 name = get_AT_string (die, DW_AT_name);
6686
6687 spec = get_AT_ref (die, DW_AT_specification);
6688 if (spec != NULL)
6689 die = spec;
6690
6691 if (die->die_parent != NULL)
6692 checksum_die_context (die->die_parent, ctx);
6693
6694 CHECKSUM_ULEB128 ('C');
6695 CHECKSUM_ULEB128 (tag);
6696 if (name != NULL)
6697 CHECKSUM_STRING (name);
6698 }
6699
6700 /* Calculate the checksum of a location expression. */
6701
6702 static inline void
6703 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6704 {
6705 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
6706 were emitted as a DW_FORM_sdata instead of a location expression. */
6707 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
6708 {
6709 CHECKSUM_ULEB128 (DW_FORM_sdata);
6710 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
6711 return;
6712 }
6713
6714 /* Otherwise, just checksum the raw location expression. */
6715 while (loc != NULL)
6716 {
6717 inchash::hash hstate;
6718 hashval_t hash;
6719
6720 CHECKSUM_ULEB128 (loc->dtprel);
6721 CHECKSUM_ULEB128 (loc->dw_loc_opc);
6722 hash_loc_operands (loc, hstate);
6723 hash = hstate.end ();
6724 CHECKSUM (hash);
6725 loc = loc->dw_loc_next;
6726 }
6727 }
6728
6729 /* Calculate the checksum of an attribute. */
6730
6731 static void
6732 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
6733 struct md5_ctx *ctx, int *mark)
6734 {
6735 dw_loc_descr_ref loc;
6736 rtx r;
6737
6738 if (AT_class (at) == dw_val_class_die_ref)
6739 {
6740 dw_die_ref target_die = AT_ref (at);
6741
6742 /* For pointer and reference types, we checksum only the (qualified)
6743 name of the target type (if there is a name). For friend entries,
6744 we checksum only the (qualified) name of the target type or function.
6745 This allows the checksum to remain the same whether the target type
6746 is complete or not. */
6747 if ((at->dw_attr == DW_AT_type
6748 && (tag == DW_TAG_pointer_type
6749 || tag == DW_TAG_reference_type
6750 || tag == DW_TAG_rvalue_reference_type
6751 || tag == DW_TAG_ptr_to_member_type))
6752 || (at->dw_attr == DW_AT_friend
6753 && tag == DW_TAG_friend))
6754 {
6755 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
6756
6757 if (name_attr != NULL)
6758 {
6759 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
6760
6761 if (decl == NULL)
6762 decl = target_die;
6763 CHECKSUM_ULEB128 ('N');
6764 CHECKSUM_ULEB128 (at->dw_attr);
6765 if (decl->die_parent != NULL)
6766 checksum_die_context (decl->die_parent, ctx);
6767 CHECKSUM_ULEB128 ('E');
6768 CHECKSUM_STRING (AT_string (name_attr));
6769 return;
6770 }
6771 }
6772
6773 /* For all other references to another DIE, we check to see if the
6774 target DIE has already been visited. If it has, we emit a
6775 backward reference; if not, we descend recursively. */
6776 if (target_die->die_mark > 0)
6777 {
6778 CHECKSUM_ULEB128 ('R');
6779 CHECKSUM_ULEB128 (at->dw_attr);
6780 CHECKSUM_ULEB128 (target_die->die_mark);
6781 }
6782 else
6783 {
6784 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
6785
6786 if (decl == NULL)
6787 decl = target_die;
6788 target_die->die_mark = ++(*mark);
6789 CHECKSUM_ULEB128 ('T');
6790 CHECKSUM_ULEB128 (at->dw_attr);
6791 if (decl->die_parent != NULL)
6792 checksum_die_context (decl->die_parent, ctx);
6793 die_checksum_ordered (target_die, ctx, mark);
6794 }
6795 return;
6796 }
6797
6798 CHECKSUM_ULEB128 ('A');
6799 CHECKSUM_ULEB128 (at->dw_attr);
6800
6801 switch (AT_class (at))
6802 {
6803 case dw_val_class_const:
6804 case dw_val_class_const_implicit:
6805 CHECKSUM_ULEB128 (DW_FORM_sdata);
6806 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
6807 break;
6808
6809 case dw_val_class_unsigned_const:
6810 case dw_val_class_unsigned_const_implicit:
6811 CHECKSUM_ULEB128 (DW_FORM_sdata);
6812 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
6813 break;
6814
6815 case dw_val_class_const_double:
6816 CHECKSUM_ULEB128 (DW_FORM_block);
6817 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
6818 CHECKSUM (at->dw_attr_val.v.val_double);
6819 break;
6820
6821 case dw_val_class_wide_int:
6822 CHECKSUM_ULEB128 (DW_FORM_block);
6823 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
6824 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
6825 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6826 get_full_len (*at->dw_attr_val.v.val_wide)
6827 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6828 break;
6829
6830 case dw_val_class_vec:
6831 CHECKSUM_ULEB128 (DW_FORM_block);
6832 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
6833 * at->dw_attr_val.v.val_vec.elt_size);
6834 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6835 (at->dw_attr_val.v.val_vec.length
6836 * at->dw_attr_val.v.val_vec.elt_size));
6837 break;
6838
6839 case dw_val_class_flag:
6840 CHECKSUM_ULEB128 (DW_FORM_flag);
6841 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
6842 break;
6843
6844 case dw_val_class_str:
6845 CHECKSUM_ULEB128 (DW_FORM_string);
6846 CHECKSUM_STRING (AT_string (at));
6847 break;
6848
6849 case dw_val_class_addr:
6850 r = AT_addr (at);
6851 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6852 CHECKSUM_ULEB128 (DW_FORM_string);
6853 CHECKSUM_STRING (XSTR (r, 0));
6854 break;
6855
6856 case dw_val_class_offset:
6857 CHECKSUM_ULEB128 (DW_FORM_sdata);
6858 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
6859 break;
6860
6861 case dw_val_class_loc:
6862 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6863 loc_checksum_ordered (loc, ctx);
6864 break;
6865
6866 case dw_val_class_fde_ref:
6867 case dw_val_class_lbl_id:
6868 case dw_val_class_lineptr:
6869 case dw_val_class_macptr:
6870 case dw_val_class_loclistsptr:
6871 case dw_val_class_high_pc:
6872 break;
6873
6874 case dw_val_class_file:
6875 case dw_val_class_file_implicit:
6876 CHECKSUM_ULEB128 (DW_FORM_string);
6877 CHECKSUM_STRING (AT_file (at)->filename);
6878 break;
6879
6880 case dw_val_class_data8:
6881 CHECKSUM (at->dw_attr_val.v.val_data8);
6882 break;
6883
6884 default:
6885 break;
6886 }
6887 }
6888
6889 struct checksum_attributes
6890 {
6891 dw_attr_node *at_name;
6892 dw_attr_node *at_type;
6893 dw_attr_node *at_friend;
6894 dw_attr_node *at_accessibility;
6895 dw_attr_node *at_address_class;
6896 dw_attr_node *at_alignment;
6897 dw_attr_node *at_allocated;
6898 dw_attr_node *at_artificial;
6899 dw_attr_node *at_associated;
6900 dw_attr_node *at_binary_scale;
6901 dw_attr_node *at_bit_offset;
6902 dw_attr_node *at_bit_size;
6903 dw_attr_node *at_bit_stride;
6904 dw_attr_node *at_byte_size;
6905 dw_attr_node *at_byte_stride;
6906 dw_attr_node *at_const_value;
6907 dw_attr_node *at_containing_type;
6908 dw_attr_node *at_count;
6909 dw_attr_node *at_data_location;
6910 dw_attr_node *at_data_member_location;
6911 dw_attr_node *at_decimal_scale;
6912 dw_attr_node *at_decimal_sign;
6913 dw_attr_node *at_default_value;
6914 dw_attr_node *at_digit_count;
6915 dw_attr_node *at_discr;
6916 dw_attr_node *at_discr_list;
6917 dw_attr_node *at_discr_value;
6918 dw_attr_node *at_encoding;
6919 dw_attr_node *at_endianity;
6920 dw_attr_node *at_explicit;
6921 dw_attr_node *at_is_optional;
6922 dw_attr_node *at_location;
6923 dw_attr_node *at_lower_bound;
6924 dw_attr_node *at_mutable;
6925 dw_attr_node *at_ordering;
6926 dw_attr_node *at_picture_string;
6927 dw_attr_node *at_prototyped;
6928 dw_attr_node *at_small;
6929 dw_attr_node *at_segment;
6930 dw_attr_node *at_string_length;
6931 dw_attr_node *at_string_length_bit_size;
6932 dw_attr_node *at_string_length_byte_size;
6933 dw_attr_node *at_threads_scaled;
6934 dw_attr_node *at_upper_bound;
6935 dw_attr_node *at_use_location;
6936 dw_attr_node *at_use_UTF8;
6937 dw_attr_node *at_variable_parameter;
6938 dw_attr_node *at_virtuality;
6939 dw_attr_node *at_visibility;
6940 dw_attr_node *at_vtable_elem_location;
6941 };
6942
6943 /* Collect the attributes that we will want to use for the checksum. */
6944
6945 static void
6946 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
6947 {
6948 dw_attr_node *a;
6949 unsigned ix;
6950
6951 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6952 {
6953 switch (a->dw_attr)
6954 {
6955 case DW_AT_name:
6956 attrs->at_name = a;
6957 break;
6958 case DW_AT_type:
6959 attrs->at_type = a;
6960 break;
6961 case DW_AT_friend:
6962 attrs->at_friend = a;
6963 break;
6964 case DW_AT_accessibility:
6965 attrs->at_accessibility = a;
6966 break;
6967 case DW_AT_address_class:
6968 attrs->at_address_class = a;
6969 break;
6970 case DW_AT_alignment:
6971 attrs->at_alignment = a;
6972 break;
6973 case DW_AT_allocated:
6974 attrs->at_allocated = a;
6975 break;
6976 case DW_AT_artificial:
6977 attrs->at_artificial = a;
6978 break;
6979 case DW_AT_associated:
6980 attrs->at_associated = a;
6981 break;
6982 case DW_AT_binary_scale:
6983 attrs->at_binary_scale = a;
6984 break;
6985 case DW_AT_bit_offset:
6986 attrs->at_bit_offset = a;
6987 break;
6988 case DW_AT_bit_size:
6989 attrs->at_bit_size = a;
6990 break;
6991 case DW_AT_bit_stride:
6992 attrs->at_bit_stride = a;
6993 break;
6994 case DW_AT_byte_size:
6995 attrs->at_byte_size = a;
6996 break;
6997 case DW_AT_byte_stride:
6998 attrs->at_byte_stride = a;
6999 break;
7000 case DW_AT_const_value:
7001 attrs->at_const_value = a;
7002 break;
7003 case DW_AT_containing_type:
7004 attrs->at_containing_type = a;
7005 break;
7006 case DW_AT_count:
7007 attrs->at_count = a;
7008 break;
7009 case DW_AT_data_location:
7010 attrs->at_data_location = a;
7011 break;
7012 case DW_AT_data_member_location:
7013 attrs->at_data_member_location = a;
7014 break;
7015 case DW_AT_decimal_scale:
7016 attrs->at_decimal_scale = a;
7017 break;
7018 case DW_AT_decimal_sign:
7019 attrs->at_decimal_sign = a;
7020 break;
7021 case DW_AT_default_value:
7022 attrs->at_default_value = a;
7023 break;
7024 case DW_AT_digit_count:
7025 attrs->at_digit_count = a;
7026 break;
7027 case DW_AT_discr:
7028 attrs->at_discr = a;
7029 break;
7030 case DW_AT_discr_list:
7031 attrs->at_discr_list = a;
7032 break;
7033 case DW_AT_discr_value:
7034 attrs->at_discr_value = a;
7035 break;
7036 case DW_AT_encoding:
7037 attrs->at_encoding = a;
7038 break;
7039 case DW_AT_endianity:
7040 attrs->at_endianity = a;
7041 break;
7042 case DW_AT_explicit:
7043 attrs->at_explicit = a;
7044 break;
7045 case DW_AT_is_optional:
7046 attrs->at_is_optional = a;
7047 break;
7048 case DW_AT_location:
7049 attrs->at_location = a;
7050 break;
7051 case DW_AT_lower_bound:
7052 attrs->at_lower_bound = a;
7053 break;
7054 case DW_AT_mutable:
7055 attrs->at_mutable = a;
7056 break;
7057 case DW_AT_ordering:
7058 attrs->at_ordering = a;
7059 break;
7060 case DW_AT_picture_string:
7061 attrs->at_picture_string = a;
7062 break;
7063 case DW_AT_prototyped:
7064 attrs->at_prototyped = a;
7065 break;
7066 case DW_AT_small:
7067 attrs->at_small = a;
7068 break;
7069 case DW_AT_segment:
7070 attrs->at_segment = a;
7071 break;
7072 case DW_AT_string_length:
7073 attrs->at_string_length = a;
7074 break;
7075 case DW_AT_string_length_bit_size:
7076 attrs->at_string_length_bit_size = a;
7077 break;
7078 case DW_AT_string_length_byte_size:
7079 attrs->at_string_length_byte_size = a;
7080 break;
7081 case DW_AT_threads_scaled:
7082 attrs->at_threads_scaled = a;
7083 break;
7084 case DW_AT_upper_bound:
7085 attrs->at_upper_bound = a;
7086 break;
7087 case DW_AT_use_location:
7088 attrs->at_use_location = a;
7089 break;
7090 case DW_AT_use_UTF8:
7091 attrs->at_use_UTF8 = a;
7092 break;
7093 case DW_AT_variable_parameter:
7094 attrs->at_variable_parameter = a;
7095 break;
7096 case DW_AT_virtuality:
7097 attrs->at_virtuality = a;
7098 break;
7099 case DW_AT_visibility:
7100 attrs->at_visibility = a;
7101 break;
7102 case DW_AT_vtable_elem_location:
7103 attrs->at_vtable_elem_location = a;
7104 break;
7105 default:
7106 break;
7107 }
7108 }
7109 }
7110
7111 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7112
7113 static void
7114 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7115 {
7116 dw_die_ref c;
7117 dw_die_ref decl;
7118 struct checksum_attributes attrs;
7119
7120 CHECKSUM_ULEB128 ('D');
7121 CHECKSUM_ULEB128 (die->die_tag);
7122
7123 memset (&attrs, 0, sizeof (attrs));
7124
7125 decl = get_AT_ref (die, DW_AT_specification);
7126 if (decl != NULL)
7127 collect_checksum_attributes (&attrs, decl);
7128 collect_checksum_attributes (&attrs, die);
7129
7130 CHECKSUM_ATTR (attrs.at_name);
7131 CHECKSUM_ATTR (attrs.at_accessibility);
7132 CHECKSUM_ATTR (attrs.at_address_class);
7133 CHECKSUM_ATTR (attrs.at_allocated);
7134 CHECKSUM_ATTR (attrs.at_artificial);
7135 CHECKSUM_ATTR (attrs.at_associated);
7136 CHECKSUM_ATTR (attrs.at_binary_scale);
7137 CHECKSUM_ATTR (attrs.at_bit_offset);
7138 CHECKSUM_ATTR (attrs.at_bit_size);
7139 CHECKSUM_ATTR (attrs.at_bit_stride);
7140 CHECKSUM_ATTR (attrs.at_byte_size);
7141 CHECKSUM_ATTR (attrs.at_byte_stride);
7142 CHECKSUM_ATTR (attrs.at_const_value);
7143 CHECKSUM_ATTR (attrs.at_containing_type);
7144 CHECKSUM_ATTR (attrs.at_count);
7145 CHECKSUM_ATTR (attrs.at_data_location);
7146 CHECKSUM_ATTR (attrs.at_data_member_location);
7147 CHECKSUM_ATTR (attrs.at_decimal_scale);
7148 CHECKSUM_ATTR (attrs.at_decimal_sign);
7149 CHECKSUM_ATTR (attrs.at_default_value);
7150 CHECKSUM_ATTR (attrs.at_digit_count);
7151 CHECKSUM_ATTR (attrs.at_discr);
7152 CHECKSUM_ATTR (attrs.at_discr_list);
7153 CHECKSUM_ATTR (attrs.at_discr_value);
7154 CHECKSUM_ATTR (attrs.at_encoding);
7155 CHECKSUM_ATTR (attrs.at_endianity);
7156 CHECKSUM_ATTR (attrs.at_explicit);
7157 CHECKSUM_ATTR (attrs.at_is_optional);
7158 CHECKSUM_ATTR (attrs.at_location);
7159 CHECKSUM_ATTR (attrs.at_lower_bound);
7160 CHECKSUM_ATTR (attrs.at_mutable);
7161 CHECKSUM_ATTR (attrs.at_ordering);
7162 CHECKSUM_ATTR (attrs.at_picture_string);
7163 CHECKSUM_ATTR (attrs.at_prototyped);
7164 CHECKSUM_ATTR (attrs.at_small);
7165 CHECKSUM_ATTR (attrs.at_segment);
7166 CHECKSUM_ATTR (attrs.at_string_length);
7167 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7168 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7169 CHECKSUM_ATTR (attrs.at_threads_scaled);
7170 CHECKSUM_ATTR (attrs.at_upper_bound);
7171 CHECKSUM_ATTR (attrs.at_use_location);
7172 CHECKSUM_ATTR (attrs.at_use_UTF8);
7173 CHECKSUM_ATTR (attrs.at_variable_parameter);
7174 CHECKSUM_ATTR (attrs.at_virtuality);
7175 CHECKSUM_ATTR (attrs.at_visibility);
7176 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7177 CHECKSUM_ATTR (attrs.at_type);
7178 CHECKSUM_ATTR (attrs.at_friend);
7179 CHECKSUM_ATTR (attrs.at_alignment);
7180
7181 /* Checksum the child DIEs. */
7182 c = die->die_child;
7183 if (c) do {
7184 dw_attr_node *name_attr;
7185
7186 c = c->die_sib;
7187 name_attr = get_AT (c, DW_AT_name);
7188 if (is_template_instantiation (c))
7189 {
7190 /* Ignore instantiations of member type and function templates. */
7191 }
7192 else if (name_attr != NULL
7193 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7194 {
7195 /* Use a shallow checksum for named nested types and member
7196 functions. */
7197 CHECKSUM_ULEB128 ('S');
7198 CHECKSUM_ULEB128 (c->die_tag);
7199 CHECKSUM_STRING (AT_string (name_attr));
7200 }
7201 else
7202 {
7203 /* Use a deep checksum for other children. */
7204 /* Mark this DIE so it gets processed when unmarking. */
7205 if (c->die_mark == 0)
7206 c->die_mark = -1;
7207 die_checksum_ordered (c, ctx, mark);
7208 }
7209 } while (c != die->die_child);
7210
7211 CHECKSUM_ULEB128 (0);
7212 }
7213
7214 /* Add a type name and tag to a hash. */
7215 static void
7216 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7217 {
7218 CHECKSUM_ULEB128 (tag);
7219 CHECKSUM_STRING (name);
7220 }
7221
7222 #undef CHECKSUM
7223 #undef CHECKSUM_STRING
7224 #undef CHECKSUM_ATTR
7225 #undef CHECKSUM_LEB128
7226 #undef CHECKSUM_ULEB128
7227
7228 /* Generate the type signature for DIE. This is computed by generating an
7229 MD5 checksum over the DIE's tag, its relevant attributes, and its
7230 children. Attributes that are references to other DIEs are processed
7231 by recursion, using the MARK field to prevent infinite recursion.
7232 If the DIE is nested inside a namespace or another type, we also
7233 need to include that context in the signature. The lower 64 bits
7234 of the resulting MD5 checksum comprise the signature. */
7235
7236 static void
7237 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7238 {
7239 int mark;
7240 const char *name;
7241 unsigned char checksum[16];
7242 struct md5_ctx ctx;
7243 dw_die_ref decl;
7244 dw_die_ref parent;
7245
7246 name = get_AT_string (die, DW_AT_name);
7247 decl = get_AT_ref (die, DW_AT_specification);
7248 parent = get_die_parent (die);
7249
7250 /* First, compute a signature for just the type name (and its surrounding
7251 context, if any. This is stored in the type unit DIE for link-time
7252 ODR (one-definition rule) checking. */
7253
7254 if (is_cxx () && name != NULL)
7255 {
7256 md5_init_ctx (&ctx);
7257
7258 /* Checksum the names of surrounding namespaces and structures. */
7259 if (parent != NULL)
7260 checksum_die_context (parent, &ctx);
7261
7262 /* Checksum the current DIE. */
7263 die_odr_checksum (die->die_tag, name, &ctx);
7264 md5_finish_ctx (&ctx, checksum);
7265
7266 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7267 }
7268
7269 /* Next, compute the complete type signature. */
7270
7271 md5_init_ctx (&ctx);
7272 mark = 1;
7273 die->die_mark = mark;
7274
7275 /* Checksum the names of surrounding namespaces and structures. */
7276 if (parent != NULL)
7277 checksum_die_context (parent, &ctx);
7278
7279 /* Checksum the DIE and its children. */
7280 die_checksum_ordered (die, &ctx, &mark);
7281 unmark_all_dies (die);
7282 md5_finish_ctx (&ctx, checksum);
7283
7284 /* Store the signature in the type node and link the type DIE and the
7285 type node together. */
7286 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7287 DWARF_TYPE_SIGNATURE_SIZE);
7288 die->comdat_type_p = true;
7289 die->die_id.die_type_node = type_node;
7290 type_node->type_die = die;
7291
7292 /* If the DIE is a specification, link its declaration to the type node
7293 as well. */
7294 if (decl != NULL)
7295 {
7296 decl->comdat_type_p = true;
7297 decl->die_id.die_type_node = type_node;
7298 }
7299 }
7300
7301 /* Do the location expressions look same? */
7302 static inline int
7303 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7304 {
7305 return loc1->dw_loc_opc == loc2->dw_loc_opc
7306 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7307 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7308 }
7309
7310 /* Do the values look the same? */
7311 static int
7312 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7313 {
7314 dw_loc_descr_ref loc1, loc2;
7315 rtx r1, r2;
7316
7317 if (v1->val_class != v2->val_class)
7318 return 0;
7319
7320 switch (v1->val_class)
7321 {
7322 case dw_val_class_const:
7323 case dw_val_class_const_implicit:
7324 return v1->v.val_int == v2->v.val_int;
7325 case dw_val_class_unsigned_const:
7326 case dw_val_class_unsigned_const_implicit:
7327 return v1->v.val_unsigned == v2->v.val_unsigned;
7328 case dw_val_class_const_double:
7329 return v1->v.val_double.high == v2->v.val_double.high
7330 && v1->v.val_double.low == v2->v.val_double.low;
7331 case dw_val_class_wide_int:
7332 return *v1->v.val_wide == *v2->v.val_wide;
7333 case dw_val_class_vec:
7334 if (v1->v.val_vec.length != v2->v.val_vec.length
7335 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7336 return 0;
7337 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7338 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7339 return 0;
7340 return 1;
7341 case dw_val_class_flag:
7342 return v1->v.val_flag == v2->v.val_flag;
7343 case dw_val_class_str:
7344 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7345
7346 case dw_val_class_addr:
7347 r1 = v1->v.val_addr;
7348 r2 = v2->v.val_addr;
7349 if (GET_CODE (r1) != GET_CODE (r2))
7350 return 0;
7351 return !rtx_equal_p (r1, r2);
7352
7353 case dw_val_class_offset:
7354 return v1->v.val_offset == v2->v.val_offset;
7355
7356 case dw_val_class_loc:
7357 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7358 loc1 && loc2;
7359 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7360 if (!same_loc_p (loc1, loc2, mark))
7361 return 0;
7362 return !loc1 && !loc2;
7363
7364 case dw_val_class_die_ref:
7365 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7366
7367 case dw_val_class_fde_ref:
7368 case dw_val_class_vms_delta:
7369 case dw_val_class_lbl_id:
7370 case dw_val_class_lineptr:
7371 case dw_val_class_macptr:
7372 case dw_val_class_loclistsptr:
7373 case dw_val_class_high_pc:
7374 return 1;
7375
7376 case dw_val_class_file:
7377 case dw_val_class_file_implicit:
7378 return v1->v.val_file == v2->v.val_file;
7379
7380 case dw_val_class_data8:
7381 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7382
7383 default:
7384 return 1;
7385 }
7386 }
7387
7388 /* Do the attributes look the same? */
7389
7390 static int
7391 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7392 {
7393 if (at1->dw_attr != at2->dw_attr)
7394 return 0;
7395
7396 /* We don't care that this was compiled with a different compiler
7397 snapshot; if the output is the same, that's what matters. */
7398 if (at1->dw_attr == DW_AT_producer)
7399 return 1;
7400
7401 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7402 }
7403
7404 /* Do the dies look the same? */
7405
7406 static int
7407 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7408 {
7409 dw_die_ref c1, c2;
7410 dw_attr_node *a1;
7411 unsigned ix;
7412
7413 /* To avoid infinite recursion. */
7414 if (die1->die_mark)
7415 return die1->die_mark == die2->die_mark;
7416 die1->die_mark = die2->die_mark = ++(*mark);
7417
7418 if (die1->die_tag != die2->die_tag)
7419 return 0;
7420
7421 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7422 return 0;
7423
7424 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7425 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7426 return 0;
7427
7428 c1 = die1->die_child;
7429 c2 = die2->die_child;
7430 if (! c1)
7431 {
7432 if (c2)
7433 return 0;
7434 }
7435 else
7436 for (;;)
7437 {
7438 if (!same_die_p (c1, c2, mark))
7439 return 0;
7440 c1 = c1->die_sib;
7441 c2 = c2->die_sib;
7442 if (c1 == die1->die_child)
7443 {
7444 if (c2 == die2->die_child)
7445 break;
7446 else
7447 return 0;
7448 }
7449 }
7450
7451 return 1;
7452 }
7453
7454 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7455 children, and set die_symbol. */
7456
7457 static void
7458 compute_comp_unit_symbol (dw_die_ref unit_die)
7459 {
7460 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7461 const char *base = die_name ? lbasename (die_name) : "anonymous";
7462 char *name = XALLOCAVEC (char, strlen (base) + 64);
7463 char *p;
7464 int i, mark;
7465 unsigned char checksum[16];
7466 struct md5_ctx ctx;
7467
7468 /* Compute the checksum of the DIE, then append part of it as hex digits to
7469 the name filename of the unit. */
7470
7471 md5_init_ctx (&ctx);
7472 mark = 0;
7473 die_checksum (unit_die, &ctx, &mark);
7474 unmark_all_dies (unit_die);
7475 md5_finish_ctx (&ctx, checksum);
7476
7477 /* When we this for comp_unit_die () we have a DW_AT_name that might
7478 not start with a letter but with anything valid for filenames and
7479 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7480 character is not a letter. */
7481 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7482 clean_symbol_name (name);
7483
7484 p = name + strlen (name);
7485 for (i = 0; i < 4; i++)
7486 {
7487 sprintf (p, "%.2x", checksum[i]);
7488 p += 2;
7489 }
7490
7491 unit_die->die_id.die_symbol = xstrdup (name);
7492 }
7493
7494 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7495
7496 static int
7497 is_type_die (dw_die_ref die)
7498 {
7499 switch (die->die_tag)
7500 {
7501 case DW_TAG_array_type:
7502 case DW_TAG_class_type:
7503 case DW_TAG_interface_type:
7504 case DW_TAG_enumeration_type:
7505 case DW_TAG_pointer_type:
7506 case DW_TAG_reference_type:
7507 case DW_TAG_rvalue_reference_type:
7508 case DW_TAG_string_type:
7509 case DW_TAG_structure_type:
7510 case DW_TAG_subroutine_type:
7511 case DW_TAG_union_type:
7512 case DW_TAG_ptr_to_member_type:
7513 case DW_TAG_set_type:
7514 case DW_TAG_subrange_type:
7515 case DW_TAG_base_type:
7516 case DW_TAG_const_type:
7517 case DW_TAG_file_type:
7518 case DW_TAG_packed_type:
7519 case DW_TAG_volatile_type:
7520 case DW_TAG_typedef:
7521 return 1;
7522 default:
7523 return 0;
7524 }
7525 }
7526
7527 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7528 Basically, we want to choose the bits that are likely to be shared between
7529 compilations (types) and leave out the bits that are specific to individual
7530 compilations (functions). */
7531
7532 static int
7533 is_comdat_die (dw_die_ref c)
7534 {
7535 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7536 we do for stabs. The advantage is a greater likelihood of sharing between
7537 objects that don't include headers in the same order (and therefore would
7538 put the base types in a different comdat). jason 8/28/00 */
7539
7540 if (c->die_tag == DW_TAG_base_type)
7541 return 0;
7542
7543 if (c->die_tag == DW_TAG_pointer_type
7544 || c->die_tag == DW_TAG_reference_type
7545 || c->die_tag == DW_TAG_rvalue_reference_type
7546 || c->die_tag == DW_TAG_const_type
7547 || c->die_tag == DW_TAG_volatile_type)
7548 {
7549 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7550
7551 return t ? is_comdat_die (t) : 0;
7552 }
7553
7554 return is_type_die (c);
7555 }
7556
7557 /* Returns true iff C is a compile-unit DIE. */
7558
7559 static inline bool
7560 is_cu_die (dw_die_ref c)
7561 {
7562 return c && (c->die_tag == DW_TAG_compile_unit
7563 || c->die_tag == DW_TAG_skeleton_unit);
7564 }
7565
7566 /* Returns true iff C is a unit DIE of some sort. */
7567
7568 static inline bool
7569 is_unit_die (dw_die_ref c)
7570 {
7571 return c && (c->die_tag == DW_TAG_compile_unit
7572 || c->die_tag == DW_TAG_partial_unit
7573 || c->die_tag == DW_TAG_type_unit
7574 || c->die_tag == DW_TAG_skeleton_unit);
7575 }
7576
7577 /* Returns true iff C is a namespace DIE. */
7578
7579 static inline bool
7580 is_namespace_die (dw_die_ref c)
7581 {
7582 return c && c->die_tag == DW_TAG_namespace;
7583 }
7584
7585 /* Returns true iff C is a class or structure DIE. */
7586
7587 static inline bool
7588 is_class_die (dw_die_ref c)
7589 {
7590 return c && (c->die_tag == DW_TAG_class_type
7591 || c->die_tag == DW_TAG_structure_type);
7592 }
7593
7594 /* Return non-zero if this DIE is a template parameter. */
7595
7596 static inline bool
7597 is_template_parameter (dw_die_ref die)
7598 {
7599 switch (die->die_tag)
7600 {
7601 case DW_TAG_template_type_param:
7602 case DW_TAG_template_value_param:
7603 case DW_TAG_GNU_template_template_param:
7604 case DW_TAG_GNU_template_parameter_pack:
7605 return true;
7606 default:
7607 return false;
7608 }
7609 }
7610
7611 /* Return non-zero if this DIE represents a template instantiation. */
7612
7613 static inline bool
7614 is_template_instantiation (dw_die_ref die)
7615 {
7616 dw_die_ref c;
7617
7618 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7619 return false;
7620 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7621 return false;
7622 }
7623
7624 static char *
7625 gen_internal_sym (const char *prefix)
7626 {
7627 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7628
7629 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7630 return xstrdup (buf);
7631 }
7632
7633 /* Return non-zero if this DIE is a declaration. */
7634
7635 static int
7636 is_declaration_die (dw_die_ref die)
7637 {
7638 dw_attr_node *a;
7639 unsigned ix;
7640
7641 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7642 if (a->dw_attr == DW_AT_declaration)
7643 return 1;
7644
7645 return 0;
7646 }
7647
7648 /* Return non-zero if this DIE is nested inside a subprogram. */
7649
7650 static int
7651 is_nested_in_subprogram (dw_die_ref die)
7652 {
7653 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7654
7655 if (decl == NULL)
7656 decl = die;
7657 return local_scope_p (decl);
7658 }
7659
7660 /* Return non-zero if this DIE contains a defining declaration of a
7661 subprogram. */
7662
7663 static int
7664 contains_subprogram_definition (dw_die_ref die)
7665 {
7666 dw_die_ref c;
7667
7668 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7669 return 1;
7670 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7671 return 0;
7672 }
7673
7674 /* Return non-zero if this is a type DIE that should be moved to a
7675 COMDAT .debug_types section or .debug_info section with DW_UT_*type
7676 unit type. */
7677
7678 static int
7679 should_move_die_to_comdat (dw_die_ref die)
7680 {
7681 switch (die->die_tag)
7682 {
7683 case DW_TAG_class_type:
7684 case DW_TAG_structure_type:
7685 case DW_TAG_enumeration_type:
7686 case DW_TAG_union_type:
7687 /* Don't move declarations, inlined instances, types nested in a
7688 subprogram, or types that contain subprogram definitions. */
7689 if (is_declaration_die (die)
7690 || get_AT (die, DW_AT_abstract_origin)
7691 || is_nested_in_subprogram (die)
7692 || contains_subprogram_definition (die))
7693 return 0;
7694 return 1;
7695 case DW_TAG_array_type:
7696 case DW_TAG_interface_type:
7697 case DW_TAG_pointer_type:
7698 case DW_TAG_reference_type:
7699 case DW_TAG_rvalue_reference_type:
7700 case DW_TAG_string_type:
7701 case DW_TAG_subroutine_type:
7702 case DW_TAG_ptr_to_member_type:
7703 case DW_TAG_set_type:
7704 case DW_TAG_subrange_type:
7705 case DW_TAG_base_type:
7706 case DW_TAG_const_type:
7707 case DW_TAG_file_type:
7708 case DW_TAG_packed_type:
7709 case DW_TAG_volatile_type:
7710 case DW_TAG_typedef:
7711 default:
7712 return 0;
7713 }
7714 }
7715
7716 /* Make a clone of DIE. */
7717
7718 static dw_die_ref
7719 clone_die (dw_die_ref die)
7720 {
7721 dw_die_ref clone = new_die_raw (die->die_tag);
7722 dw_attr_node *a;
7723 unsigned ix;
7724
7725 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7726 add_dwarf_attr (clone, a);
7727
7728 return clone;
7729 }
7730
7731 /* Make a clone of the tree rooted at DIE. */
7732
7733 static dw_die_ref
7734 clone_tree (dw_die_ref die)
7735 {
7736 dw_die_ref c;
7737 dw_die_ref clone = clone_die (die);
7738
7739 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
7740
7741 return clone;
7742 }
7743
7744 /* Make a clone of DIE as a declaration. */
7745
7746 static dw_die_ref
7747 clone_as_declaration (dw_die_ref die)
7748 {
7749 dw_die_ref clone;
7750 dw_die_ref decl;
7751 dw_attr_node *a;
7752 unsigned ix;
7753
7754 /* If the DIE is already a declaration, just clone it. */
7755 if (is_declaration_die (die))
7756 return clone_die (die);
7757
7758 /* If the DIE is a specification, just clone its declaration DIE. */
7759 decl = get_AT_ref (die, DW_AT_specification);
7760 if (decl != NULL)
7761 {
7762 clone = clone_die (decl);
7763 if (die->comdat_type_p)
7764 add_AT_die_ref (clone, DW_AT_signature, die);
7765 return clone;
7766 }
7767
7768 clone = new_die_raw (die->die_tag);
7769
7770 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7771 {
7772 /* We don't want to copy over all attributes.
7773 For example we don't want DW_AT_byte_size because otherwise we will no
7774 longer have a declaration and GDB will treat it as a definition. */
7775
7776 switch (a->dw_attr)
7777 {
7778 case DW_AT_abstract_origin:
7779 case DW_AT_artificial:
7780 case DW_AT_containing_type:
7781 case DW_AT_external:
7782 case DW_AT_name:
7783 case DW_AT_type:
7784 case DW_AT_virtuality:
7785 case DW_AT_linkage_name:
7786 case DW_AT_MIPS_linkage_name:
7787 add_dwarf_attr (clone, a);
7788 break;
7789 case DW_AT_byte_size:
7790 case DW_AT_alignment:
7791 default:
7792 break;
7793 }
7794 }
7795
7796 if (die->comdat_type_p)
7797 add_AT_die_ref (clone, DW_AT_signature, die);
7798
7799 add_AT_flag (clone, DW_AT_declaration, 1);
7800 return clone;
7801 }
7802
7803
7804 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
7805
7806 struct decl_table_entry
7807 {
7808 dw_die_ref orig;
7809 dw_die_ref copy;
7810 };
7811
7812 /* Helpers to manipulate hash table of copied declarations. */
7813
7814 /* Hashtable helpers. */
7815
7816 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
7817 {
7818 typedef die_struct *compare_type;
7819 static inline hashval_t hash (const decl_table_entry *);
7820 static inline bool equal (const decl_table_entry *, const die_struct *);
7821 };
7822
7823 inline hashval_t
7824 decl_table_entry_hasher::hash (const decl_table_entry *entry)
7825 {
7826 return htab_hash_pointer (entry->orig);
7827 }
7828
7829 inline bool
7830 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
7831 const die_struct *entry2)
7832 {
7833 return entry1->orig == entry2;
7834 }
7835
7836 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
7837
7838 /* Copy DIE and its ancestors, up to, but not including, the compile unit
7839 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
7840 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
7841 to check if the ancestor has already been copied into UNIT. */
7842
7843 static dw_die_ref
7844 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
7845 decl_hash_type *decl_table)
7846 {
7847 dw_die_ref parent = die->die_parent;
7848 dw_die_ref new_parent = unit;
7849 dw_die_ref copy;
7850 decl_table_entry **slot = NULL;
7851 struct decl_table_entry *entry = NULL;
7852
7853 if (decl_table)
7854 {
7855 /* Check if the entry has already been copied to UNIT. */
7856 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
7857 INSERT);
7858 if (*slot != HTAB_EMPTY_ENTRY)
7859 {
7860 entry = *slot;
7861 return entry->copy;
7862 }
7863
7864 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
7865 entry = XCNEW (struct decl_table_entry);
7866 entry->orig = die;
7867 entry->copy = NULL;
7868 *slot = entry;
7869 }
7870
7871 if (parent != NULL)
7872 {
7873 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
7874 if (spec != NULL)
7875 parent = spec;
7876 if (!is_unit_die (parent))
7877 new_parent = copy_ancestor_tree (unit, parent, decl_table);
7878 }
7879
7880 copy = clone_as_declaration (die);
7881 add_child_die (new_parent, copy);
7882
7883 if (decl_table)
7884 {
7885 /* Record the pointer to the copy. */
7886 entry->copy = copy;
7887 }
7888
7889 return copy;
7890 }
7891 /* Copy the declaration context to the new type unit DIE. This includes
7892 any surrounding namespace or type declarations. If the DIE has an
7893 AT_specification attribute, it also includes attributes and children
7894 attached to the specification, and returns a pointer to the original
7895 parent of the declaration DIE. Returns NULL otherwise. */
7896
7897 static dw_die_ref
7898 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
7899 {
7900 dw_die_ref decl;
7901 dw_die_ref new_decl;
7902 dw_die_ref orig_parent = NULL;
7903
7904 decl = get_AT_ref (die, DW_AT_specification);
7905 if (decl == NULL)
7906 decl = die;
7907 else
7908 {
7909 unsigned ix;
7910 dw_die_ref c;
7911 dw_attr_node *a;
7912
7913 /* The original DIE will be changed to a declaration, and must
7914 be moved to be a child of the original declaration DIE. */
7915 orig_parent = decl->die_parent;
7916
7917 /* Copy the type node pointer from the new DIE to the original
7918 declaration DIE so we can forward references later. */
7919 decl->comdat_type_p = true;
7920 decl->die_id.die_type_node = die->die_id.die_type_node;
7921
7922 remove_AT (die, DW_AT_specification);
7923
7924 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
7925 {
7926 if (a->dw_attr != DW_AT_name
7927 && a->dw_attr != DW_AT_declaration
7928 && a->dw_attr != DW_AT_external)
7929 add_dwarf_attr (die, a);
7930 }
7931
7932 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
7933 }
7934
7935 if (decl->die_parent != NULL
7936 && !is_unit_die (decl->die_parent))
7937 {
7938 new_decl = copy_ancestor_tree (unit, decl, NULL);
7939 if (new_decl != NULL)
7940 {
7941 remove_AT (new_decl, DW_AT_signature);
7942 add_AT_specification (die, new_decl);
7943 }
7944 }
7945
7946 return orig_parent;
7947 }
7948
7949 /* Generate the skeleton ancestor tree for the given NODE, then clone
7950 the DIE and add the clone into the tree. */
7951
7952 static void
7953 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
7954 {
7955 if (node->new_die != NULL)
7956 return;
7957
7958 node->new_die = clone_as_declaration (node->old_die);
7959
7960 if (node->parent != NULL)
7961 {
7962 generate_skeleton_ancestor_tree (node->parent);
7963 add_child_die (node->parent->new_die, node->new_die);
7964 }
7965 }
7966
7967 /* Generate a skeleton tree of DIEs containing any declarations that are
7968 found in the original tree. We traverse the tree looking for declaration
7969 DIEs, and construct the skeleton from the bottom up whenever we find one. */
7970
7971 static void
7972 generate_skeleton_bottom_up (skeleton_chain_node *parent)
7973 {
7974 skeleton_chain_node node;
7975 dw_die_ref c;
7976 dw_die_ref first;
7977 dw_die_ref prev = NULL;
7978 dw_die_ref next = NULL;
7979
7980 node.parent = parent;
7981
7982 first = c = parent->old_die->die_child;
7983 if (c)
7984 next = c->die_sib;
7985 if (c) do {
7986 if (prev == NULL || prev->die_sib == c)
7987 prev = c;
7988 c = next;
7989 next = (c == first ? NULL : c->die_sib);
7990 node.old_die = c;
7991 node.new_die = NULL;
7992 if (is_declaration_die (c))
7993 {
7994 if (is_template_instantiation (c))
7995 {
7996 /* Instantiated templates do not need to be cloned into the
7997 type unit. Just move the DIE and its children back to
7998 the skeleton tree (in the main CU). */
7999 remove_child_with_prev (c, prev);
8000 add_child_die (parent->new_die, c);
8001 c = prev;
8002 }
8003 else if (c->comdat_type_p)
8004 {
8005 /* This is the skeleton of earlier break_out_comdat_types
8006 type. Clone the existing DIE, but keep the children
8007 under the original (which is in the main CU). */
8008 dw_die_ref clone = clone_die (c);
8009
8010 replace_child (c, clone, prev);
8011 generate_skeleton_ancestor_tree (parent);
8012 add_child_die (parent->new_die, c);
8013 c = clone;
8014 continue;
8015 }
8016 else
8017 {
8018 /* Clone the existing DIE, move the original to the skeleton
8019 tree (which is in the main CU), and put the clone, with
8020 all the original's children, where the original came from
8021 (which is about to be moved to the type unit). */
8022 dw_die_ref clone = clone_die (c);
8023 move_all_children (c, clone);
8024
8025 /* If the original has a DW_AT_object_pointer attribute,
8026 it would now point to a child DIE just moved to the
8027 cloned tree, so we need to remove that attribute from
8028 the original. */
8029 remove_AT (c, DW_AT_object_pointer);
8030
8031 replace_child (c, clone, prev);
8032 generate_skeleton_ancestor_tree (parent);
8033 add_child_die (parent->new_die, c);
8034 node.old_die = clone;
8035 node.new_die = c;
8036 c = clone;
8037 }
8038 }
8039 generate_skeleton_bottom_up (&node);
8040 } while (next != NULL);
8041 }
8042
8043 /* Wrapper function for generate_skeleton_bottom_up. */
8044
8045 static dw_die_ref
8046 generate_skeleton (dw_die_ref die)
8047 {
8048 skeleton_chain_node node;
8049
8050 node.old_die = die;
8051 node.new_die = NULL;
8052 node.parent = NULL;
8053
8054 /* If this type definition is nested inside another type,
8055 and is not an instantiation of a template, always leave
8056 at least a declaration in its place. */
8057 if (die->die_parent != NULL
8058 && is_type_die (die->die_parent)
8059 && !is_template_instantiation (die))
8060 node.new_die = clone_as_declaration (die);
8061
8062 generate_skeleton_bottom_up (&node);
8063 return node.new_die;
8064 }
8065
8066 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8067 declaration. The original DIE is moved to a new compile unit so that
8068 existing references to it follow it to the new location. If any of the
8069 original DIE's descendants is a declaration, we need to replace the
8070 original DIE with a skeleton tree and move the declarations back into the
8071 skeleton tree. */
8072
8073 static dw_die_ref
8074 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8075 dw_die_ref prev)
8076 {
8077 dw_die_ref skeleton, orig_parent;
8078
8079 /* Copy the declaration context to the type unit DIE. If the returned
8080 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8081 that DIE. */
8082 orig_parent = copy_declaration_context (unit, child);
8083
8084 skeleton = generate_skeleton (child);
8085 if (skeleton == NULL)
8086 remove_child_with_prev (child, prev);
8087 else
8088 {
8089 skeleton->comdat_type_p = true;
8090 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8091
8092 /* If the original DIE was a specification, we need to put
8093 the skeleton under the parent DIE of the declaration.
8094 This leaves the original declaration in the tree, but
8095 it will be pruned later since there are no longer any
8096 references to it. */
8097 if (orig_parent != NULL)
8098 {
8099 remove_child_with_prev (child, prev);
8100 add_child_die (orig_parent, skeleton);
8101 }
8102 else
8103 replace_child (child, skeleton, prev);
8104 }
8105
8106 return skeleton;
8107 }
8108
8109 static void
8110 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8111 comdat_type_node *type_node,
8112 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8113
8114 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8115 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8116 DWARF procedure references in the DW_AT_location attribute. */
8117
8118 static dw_die_ref
8119 copy_dwarf_procedure (dw_die_ref die,
8120 comdat_type_node *type_node,
8121 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8122 {
8123 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8124
8125 /* DWARF procedures are not supposed to have children... */
8126 gcc_assert (die->die_child == NULL);
8127
8128 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8129 gcc_assert (vec_safe_length (die->die_attr) == 1
8130 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8131
8132 /* Do not copy more than once DWARF procedures. */
8133 bool existed;
8134 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8135 if (existed)
8136 return die_copy;
8137
8138 die_copy = clone_die (die);
8139 add_child_die (type_node->root_die, die_copy);
8140 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8141 return die_copy;
8142 }
8143
8144 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8145 procedures in DIE's attributes. */
8146
8147 static void
8148 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8149 comdat_type_node *type_node,
8150 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8151 {
8152 dw_attr_node *a;
8153 unsigned i;
8154
8155 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8156 {
8157 dw_loc_descr_ref loc;
8158
8159 if (a->dw_attr_val.val_class != dw_val_class_loc)
8160 continue;
8161
8162 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8163 {
8164 switch (loc->dw_loc_opc)
8165 {
8166 case DW_OP_call2:
8167 case DW_OP_call4:
8168 case DW_OP_call_ref:
8169 gcc_assert (loc->dw_loc_oprnd1.val_class
8170 == dw_val_class_die_ref);
8171 loc->dw_loc_oprnd1.v.val_die_ref.die
8172 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8173 type_node,
8174 copied_dwarf_procs);
8175
8176 default:
8177 break;
8178 }
8179 }
8180 }
8181 }
8182
8183 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8184 rewrite references to point to the copies.
8185
8186 References are looked for in DIE's attributes and recursively in all its
8187 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8188 mapping from old DWARF procedures to their copy. It is used not to copy
8189 twice the same DWARF procedure under TYPE_NODE. */
8190
8191 static void
8192 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8193 comdat_type_node *type_node,
8194 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8195 {
8196 dw_die_ref c;
8197
8198 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8199 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8200 type_node,
8201 copied_dwarf_procs));
8202 }
8203
8204 /* Traverse the DIE and set up additional .debug_types or .debug_info
8205 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8206 section. */
8207
8208 static void
8209 break_out_comdat_types (dw_die_ref die)
8210 {
8211 dw_die_ref c;
8212 dw_die_ref first;
8213 dw_die_ref prev = NULL;
8214 dw_die_ref next = NULL;
8215 dw_die_ref unit = NULL;
8216
8217 first = c = die->die_child;
8218 if (c)
8219 next = c->die_sib;
8220 if (c) do {
8221 if (prev == NULL || prev->die_sib == c)
8222 prev = c;
8223 c = next;
8224 next = (c == first ? NULL : c->die_sib);
8225 if (should_move_die_to_comdat (c))
8226 {
8227 dw_die_ref replacement;
8228 comdat_type_node *type_node;
8229
8230 /* Break out nested types into their own type units. */
8231 break_out_comdat_types (c);
8232
8233 /* Create a new type unit DIE as the root for the new tree, and
8234 add it to the list of comdat types. */
8235 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8236 add_AT_unsigned (unit, DW_AT_language,
8237 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8238 type_node = ggc_cleared_alloc<comdat_type_node> ();
8239 type_node->root_die = unit;
8240 type_node->next = comdat_type_list;
8241 comdat_type_list = type_node;
8242
8243 /* Generate the type signature. */
8244 generate_type_signature (c, type_node);
8245
8246 /* Copy the declaration context, attributes, and children of the
8247 declaration into the new type unit DIE, then remove this DIE
8248 from the main CU (or replace it with a skeleton if necessary). */
8249 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8250 type_node->skeleton_die = replacement;
8251
8252 /* Add the DIE to the new compunit. */
8253 add_child_die (unit, c);
8254
8255 /* Types can reference DWARF procedures for type size or data location
8256 expressions. Calls in DWARF expressions cannot target procedures
8257 that are not in the same section. So we must copy DWARF procedures
8258 along with this type and then rewrite references to them. */
8259 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8260 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8261
8262 if (replacement != NULL)
8263 c = replacement;
8264 }
8265 else if (c->die_tag == DW_TAG_namespace
8266 || c->die_tag == DW_TAG_class_type
8267 || c->die_tag == DW_TAG_structure_type
8268 || c->die_tag == DW_TAG_union_type)
8269 {
8270 /* Look for nested types that can be broken out. */
8271 break_out_comdat_types (c);
8272 }
8273 } while (next != NULL);
8274 }
8275
8276 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8277 Enter all the cloned children into the hash table decl_table. */
8278
8279 static dw_die_ref
8280 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8281 {
8282 dw_die_ref c;
8283 dw_die_ref clone;
8284 struct decl_table_entry *entry;
8285 decl_table_entry **slot;
8286
8287 if (die->die_tag == DW_TAG_subprogram)
8288 clone = clone_as_declaration (die);
8289 else
8290 clone = clone_die (die);
8291
8292 slot = decl_table->find_slot_with_hash (die,
8293 htab_hash_pointer (die), INSERT);
8294
8295 /* Assert that DIE isn't in the hash table yet. If it would be there
8296 before, the ancestors would be necessarily there as well, therefore
8297 clone_tree_partial wouldn't be called. */
8298 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8299
8300 entry = XCNEW (struct decl_table_entry);
8301 entry->orig = die;
8302 entry->copy = clone;
8303 *slot = entry;
8304
8305 if (die->die_tag != DW_TAG_subprogram)
8306 FOR_EACH_CHILD (die, c,
8307 add_child_die (clone, clone_tree_partial (c, decl_table)));
8308
8309 return clone;
8310 }
8311
8312 /* Walk the DIE and its children, looking for references to incomplete
8313 or trivial types that are unmarked (i.e., that are not in the current
8314 type_unit). */
8315
8316 static void
8317 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8318 {
8319 dw_die_ref c;
8320 dw_attr_node *a;
8321 unsigned ix;
8322
8323 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8324 {
8325 if (AT_class (a) == dw_val_class_die_ref)
8326 {
8327 dw_die_ref targ = AT_ref (a);
8328 decl_table_entry **slot;
8329 struct decl_table_entry *entry;
8330
8331 if (targ->die_mark != 0 || targ->comdat_type_p)
8332 continue;
8333
8334 slot = decl_table->find_slot_with_hash (targ,
8335 htab_hash_pointer (targ),
8336 INSERT);
8337
8338 if (*slot != HTAB_EMPTY_ENTRY)
8339 {
8340 /* TARG has already been copied, so we just need to
8341 modify the reference to point to the copy. */
8342 entry = *slot;
8343 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8344 }
8345 else
8346 {
8347 dw_die_ref parent = unit;
8348 dw_die_ref copy = clone_die (targ);
8349
8350 /* Record in DECL_TABLE that TARG has been copied.
8351 Need to do this now, before the recursive call,
8352 because DECL_TABLE may be expanded and SLOT
8353 would no longer be a valid pointer. */
8354 entry = XCNEW (struct decl_table_entry);
8355 entry->orig = targ;
8356 entry->copy = copy;
8357 *slot = entry;
8358
8359 /* If TARG is not a declaration DIE, we need to copy its
8360 children. */
8361 if (!is_declaration_die (targ))
8362 {
8363 FOR_EACH_CHILD (
8364 targ, c,
8365 add_child_die (copy,
8366 clone_tree_partial (c, decl_table)));
8367 }
8368
8369 /* Make sure the cloned tree is marked as part of the
8370 type unit. */
8371 mark_dies (copy);
8372
8373 /* If TARG has surrounding context, copy its ancestor tree
8374 into the new type unit. */
8375 if (targ->die_parent != NULL
8376 && !is_unit_die (targ->die_parent))
8377 parent = copy_ancestor_tree (unit, targ->die_parent,
8378 decl_table);
8379
8380 add_child_die (parent, copy);
8381 a->dw_attr_val.v.val_die_ref.die = copy;
8382
8383 /* Make sure the newly-copied DIE is walked. If it was
8384 installed in a previously-added context, it won't
8385 get visited otherwise. */
8386 if (parent != unit)
8387 {
8388 /* Find the highest point of the newly-added tree,
8389 mark each node along the way, and walk from there. */
8390 parent->die_mark = 1;
8391 while (parent->die_parent
8392 && parent->die_parent->die_mark == 0)
8393 {
8394 parent = parent->die_parent;
8395 parent->die_mark = 1;
8396 }
8397 copy_decls_walk (unit, parent, decl_table);
8398 }
8399 }
8400 }
8401 }
8402
8403 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8404 }
8405
8406 /* Copy declarations for "unworthy" types into the new comdat section.
8407 Incomplete types, modified types, and certain other types aren't broken
8408 out into comdat sections of their own, so they don't have a signature,
8409 and we need to copy the declaration into the same section so that we
8410 don't have an external reference. */
8411
8412 static void
8413 copy_decls_for_unworthy_types (dw_die_ref unit)
8414 {
8415 mark_dies (unit);
8416 decl_hash_type decl_table (10);
8417 copy_decls_walk (unit, unit, &decl_table);
8418 unmark_dies (unit);
8419 }
8420
8421 /* Traverse the DIE and add a sibling attribute if it may have the
8422 effect of speeding up access to siblings. To save some space,
8423 avoid generating sibling attributes for DIE's without children. */
8424
8425 static void
8426 add_sibling_attributes (dw_die_ref die)
8427 {
8428 dw_die_ref c;
8429
8430 if (! die->die_child)
8431 return;
8432
8433 if (die->die_parent && die != die->die_parent->die_child)
8434 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8435
8436 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8437 }
8438
8439 /* Output all location lists for the DIE and its children. */
8440
8441 static void
8442 output_location_lists (dw_die_ref die)
8443 {
8444 dw_die_ref c;
8445 dw_attr_node *a;
8446 unsigned ix;
8447
8448 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8449 if (AT_class (a) == dw_val_class_loc_list)
8450 output_loc_list (AT_loc_list (a));
8451
8452 FOR_EACH_CHILD (die, c, output_location_lists (c));
8453 }
8454
8455 /* During assign_location_list_indexes and output_loclists_offset the
8456 current index, after it the number of assigned indexes (i.e. how
8457 large the .debug_loclists* offset table should be). */
8458 static unsigned int loc_list_idx;
8459
8460 /* Output all location list offsets for the DIE and its children. */
8461
8462 static void
8463 output_loclists_offsets (dw_die_ref die)
8464 {
8465 dw_die_ref c;
8466 dw_attr_node *a;
8467 unsigned ix;
8468
8469 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8470 if (AT_class (a) == dw_val_class_loc_list)
8471 {
8472 dw_loc_list_ref l = AT_loc_list (a);
8473 if (l->offset_emitted)
8474 continue;
8475 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8476 loc_section_label, NULL);
8477 gcc_assert (l->hash == loc_list_idx);
8478 loc_list_idx++;
8479 l->offset_emitted = true;
8480 }
8481
8482 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8483 }
8484
8485 /* Recursively set indexes of location lists. */
8486
8487 static void
8488 assign_location_list_indexes (dw_die_ref die)
8489 {
8490 dw_die_ref c;
8491 dw_attr_node *a;
8492 unsigned ix;
8493
8494 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8495 if (AT_class (a) == dw_val_class_loc_list)
8496 {
8497 dw_loc_list_ref list = AT_loc_list (a);
8498 if (!list->num_assigned)
8499 {
8500 list->num_assigned = true;
8501 list->hash = loc_list_idx++;
8502 }
8503 }
8504
8505 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8506 }
8507
8508 /* We want to limit the number of external references, because they are
8509 larger than local references: a relocation takes multiple words, and
8510 even a sig8 reference is always eight bytes, whereas a local reference
8511 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8512 So if we encounter multiple external references to the same type DIE, we
8513 make a local typedef stub for it and redirect all references there.
8514
8515 This is the element of the hash table for keeping track of these
8516 references. */
8517
8518 struct external_ref
8519 {
8520 dw_die_ref type;
8521 dw_die_ref stub;
8522 unsigned n_refs;
8523 };
8524
8525 /* Hashtable helpers. */
8526
8527 struct external_ref_hasher : free_ptr_hash <external_ref>
8528 {
8529 static inline hashval_t hash (const external_ref *);
8530 static inline bool equal (const external_ref *, const external_ref *);
8531 };
8532
8533 inline hashval_t
8534 external_ref_hasher::hash (const external_ref *r)
8535 {
8536 dw_die_ref die = r->type;
8537 hashval_t h = 0;
8538
8539 /* We can't use the address of the DIE for hashing, because
8540 that will make the order of the stub DIEs non-deterministic. */
8541 if (! die->comdat_type_p)
8542 /* We have a symbol; use it to compute a hash. */
8543 h = htab_hash_string (die->die_id.die_symbol);
8544 else
8545 {
8546 /* We have a type signature; use a subset of the bits as the hash.
8547 The 8-byte signature is at least as large as hashval_t. */
8548 comdat_type_node *type_node = die->die_id.die_type_node;
8549 memcpy (&h, type_node->signature, sizeof (h));
8550 }
8551 return h;
8552 }
8553
8554 inline bool
8555 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8556 {
8557 return r1->type == r2->type;
8558 }
8559
8560 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8561
8562 /* Return a pointer to the external_ref for references to DIE. */
8563
8564 static struct external_ref *
8565 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8566 {
8567 struct external_ref ref, *ref_p;
8568 external_ref **slot;
8569
8570 ref.type = die;
8571 slot = map->find_slot (&ref, INSERT);
8572 if (*slot != HTAB_EMPTY_ENTRY)
8573 return *slot;
8574
8575 ref_p = XCNEW (struct external_ref);
8576 ref_p->type = die;
8577 *slot = ref_p;
8578 return ref_p;
8579 }
8580
8581 /* Subroutine of optimize_external_refs, below.
8582
8583 If we see a type skeleton, record it as our stub. If we see external
8584 references, remember how many we've seen. */
8585
8586 static void
8587 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8588 {
8589 dw_die_ref c;
8590 dw_attr_node *a;
8591 unsigned ix;
8592 struct external_ref *ref_p;
8593
8594 if (is_type_die (die)
8595 && (c = get_AT_ref (die, DW_AT_signature)))
8596 {
8597 /* This is a local skeleton; use it for local references. */
8598 ref_p = lookup_external_ref (map, c);
8599 ref_p->stub = die;
8600 }
8601
8602 /* Scan the DIE references, and remember any that refer to DIEs from
8603 other CUs (i.e. those which are not marked). */
8604 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8605 if (AT_class (a) == dw_val_class_die_ref
8606 && (c = AT_ref (a))->die_mark == 0
8607 && is_type_die (c))
8608 {
8609 ref_p = lookup_external_ref (map, c);
8610 ref_p->n_refs++;
8611 }
8612
8613 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8614 }
8615
8616 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8617 points to an external_ref, DATA is the CU we're processing. If we don't
8618 already have a local stub, and we have multiple refs, build a stub. */
8619
8620 int
8621 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8622 {
8623 struct external_ref *ref_p = *slot;
8624
8625 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8626 {
8627 /* We have multiple references to this type, so build a small stub.
8628 Both of these forms are a bit dodgy from the perspective of the
8629 DWARF standard, since technically they should have names. */
8630 dw_die_ref cu = data;
8631 dw_die_ref type = ref_p->type;
8632 dw_die_ref stub = NULL;
8633
8634 if (type->comdat_type_p)
8635 {
8636 /* If we refer to this type via sig8, use AT_signature. */
8637 stub = new_die (type->die_tag, cu, NULL_TREE);
8638 add_AT_die_ref (stub, DW_AT_signature, type);
8639 }
8640 else
8641 {
8642 /* Otherwise, use a typedef with no name. */
8643 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8644 add_AT_die_ref (stub, DW_AT_type, type);
8645 }
8646
8647 stub->die_mark++;
8648 ref_p->stub = stub;
8649 }
8650 return 1;
8651 }
8652
8653 /* DIE is a unit; look through all the DIE references to see if there are
8654 any external references to types, and if so, create local stubs for
8655 them which will be applied in build_abbrev_table. This is useful because
8656 references to local DIEs are smaller. */
8657
8658 static external_ref_hash_type *
8659 optimize_external_refs (dw_die_ref die)
8660 {
8661 external_ref_hash_type *map = new external_ref_hash_type (10);
8662 optimize_external_refs_1 (die, map);
8663 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8664 return map;
8665 }
8666
8667 /* The following 3 variables are temporaries that are computed only during the
8668 build_abbrev_table call and used and released during the following
8669 optimize_abbrev_table call. */
8670
8671 /* First abbrev_id that can be optimized based on usage. */
8672 static unsigned int abbrev_opt_start;
8673
8674 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
8675 abbrev_id smaller than this, because they must be already sized
8676 during build_abbrev_table). */
8677 static unsigned int abbrev_opt_base_type_end;
8678
8679 /* Vector of usage counts during build_abbrev_table. Indexed by
8680 abbrev_id - abbrev_opt_start. */
8681 static vec<unsigned int> abbrev_usage_count;
8682
8683 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
8684 static vec<dw_die_ref> sorted_abbrev_dies;
8685
8686 /* The format of each DIE (and its attribute value pairs) is encoded in an
8687 abbreviation table. This routine builds the abbreviation table and assigns
8688 a unique abbreviation id for each abbreviation entry. The children of each
8689 die are visited recursively. */
8690
8691 static void
8692 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
8693 {
8694 unsigned int abbrev_id = 0;
8695 dw_die_ref c;
8696 dw_attr_node *a;
8697 unsigned ix;
8698 dw_die_ref abbrev;
8699
8700 /* Scan the DIE references, and replace any that refer to
8701 DIEs from other CUs (i.e. those which are not marked) with
8702 the local stubs we built in optimize_external_refs. */
8703 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8704 if (AT_class (a) == dw_val_class_die_ref
8705 && (c = AT_ref (a))->die_mark == 0)
8706 {
8707 struct external_ref *ref_p;
8708 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
8709
8710 ref_p = lookup_external_ref (extern_map, c);
8711 if (ref_p->stub && ref_p->stub != die)
8712 change_AT_die_ref (a, ref_p->stub);
8713 else
8714 /* We aren't changing this reference, so mark it external. */
8715 set_AT_ref_external (a, 1);
8716 }
8717
8718 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
8719 {
8720 dw_attr_node *die_a, *abbrev_a;
8721 unsigned ix;
8722 bool ok = true;
8723
8724 if (abbrev_id == 0)
8725 continue;
8726 if (abbrev->die_tag != die->die_tag)
8727 continue;
8728 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
8729 continue;
8730
8731 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
8732 continue;
8733
8734 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
8735 {
8736 abbrev_a = &(*abbrev->die_attr)[ix];
8737 if ((abbrev_a->dw_attr != die_a->dw_attr)
8738 || (value_format (abbrev_a) != value_format (die_a)))
8739 {
8740 ok = false;
8741 break;
8742 }
8743 }
8744 if (ok)
8745 break;
8746 }
8747
8748 if (abbrev_id >= vec_safe_length (abbrev_die_table))
8749 {
8750 vec_safe_push (abbrev_die_table, die);
8751 if (abbrev_opt_start)
8752 abbrev_usage_count.safe_push (0);
8753 }
8754 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
8755 {
8756 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
8757 sorted_abbrev_dies.safe_push (die);
8758 }
8759
8760 die->die_abbrev = abbrev_id;
8761 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
8762 }
8763
8764 /* Callback function for sorted_abbrev_dies vector sorting. We sort
8765 by die_abbrev's usage count, from the most commonly used
8766 abbreviation to the least. */
8767
8768 static int
8769 die_abbrev_cmp (const void *p1, const void *p2)
8770 {
8771 dw_die_ref die1 = *(const dw_die_ref *) p1;
8772 dw_die_ref die2 = *(const dw_die_ref *) p2;
8773
8774 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
8775 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
8776
8777 if (die1->die_abbrev >= abbrev_opt_base_type_end
8778 && die2->die_abbrev >= abbrev_opt_base_type_end)
8779 {
8780 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
8781 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
8782 return -1;
8783 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
8784 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
8785 return 1;
8786 }
8787
8788 /* Stabilize the sort. */
8789 if (die1->die_abbrev < die2->die_abbrev)
8790 return -1;
8791 if (die1->die_abbrev > die2->die_abbrev)
8792 return 1;
8793
8794 return 0;
8795 }
8796
8797 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
8798 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
8799 into dw_val_class_const_implicit or
8800 dw_val_class_unsigned_const_implicit. */
8801
8802 static void
8803 optimize_implicit_const (unsigned int first_id, unsigned int end,
8804 vec<bool> &implicit_consts)
8805 {
8806 /* It never makes sense if there is just one DIE using the abbreviation. */
8807 if (end < first_id + 2)
8808 return;
8809
8810 dw_attr_node *a;
8811 unsigned ix, i;
8812 dw_die_ref die = sorted_abbrev_dies[first_id];
8813 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8814 if (implicit_consts[ix])
8815 {
8816 enum dw_val_class new_class = dw_val_class_none;
8817 switch (AT_class (a))
8818 {
8819 case dw_val_class_unsigned_const:
8820 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
8821 continue;
8822
8823 /* The .debug_abbrev section will grow by
8824 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
8825 in all the DIEs using that abbreviation. */
8826 if (constant_size (AT_unsigned (a)) * (end - first_id)
8827 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
8828 continue;
8829
8830 new_class = dw_val_class_unsigned_const_implicit;
8831 break;
8832
8833 case dw_val_class_const:
8834 new_class = dw_val_class_const_implicit;
8835 break;
8836
8837 case dw_val_class_file:
8838 new_class = dw_val_class_file_implicit;
8839 break;
8840
8841 default:
8842 continue;
8843 }
8844 for (i = first_id; i < end; i++)
8845 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
8846 = new_class;
8847 }
8848 }
8849
8850 /* Attempt to optimize abbreviation table from abbrev_opt_start
8851 abbreviation above. */
8852
8853 static void
8854 optimize_abbrev_table (void)
8855 {
8856 if (abbrev_opt_start
8857 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
8858 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
8859 {
8860 auto_vec<bool, 32> implicit_consts;
8861 sorted_abbrev_dies.qsort (die_abbrev_cmp);
8862
8863 unsigned int abbrev_id = abbrev_opt_start - 1;
8864 unsigned int first_id = ~0U;
8865 unsigned int last_abbrev_id = 0;
8866 unsigned int i;
8867 dw_die_ref die;
8868 if (abbrev_opt_base_type_end > abbrev_opt_start)
8869 abbrev_id = abbrev_opt_base_type_end - 1;
8870 /* Reassign abbreviation ids from abbrev_opt_start above, so that
8871 most commonly used abbreviations come first. */
8872 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
8873 {
8874 dw_attr_node *a;
8875 unsigned ix;
8876
8877 /* If calc_base_type_die_sizes has been called, the CU and
8878 base types after it can't be optimized, because we've already
8879 calculated their DIE offsets. We've sorted them first. */
8880 if (die->die_abbrev < abbrev_opt_base_type_end)
8881 continue;
8882 if (die->die_abbrev != last_abbrev_id)
8883 {
8884 last_abbrev_id = die->die_abbrev;
8885 if (dwarf_version >= 5 && first_id != ~0U)
8886 optimize_implicit_const (first_id, i, implicit_consts);
8887 abbrev_id++;
8888 (*abbrev_die_table)[abbrev_id] = die;
8889 if (dwarf_version >= 5)
8890 {
8891 first_id = i;
8892 implicit_consts.truncate (0);
8893
8894 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8895 switch (AT_class (a))
8896 {
8897 case dw_val_class_const:
8898 case dw_val_class_unsigned_const:
8899 case dw_val_class_file:
8900 implicit_consts.safe_push (true);
8901 break;
8902 default:
8903 implicit_consts.safe_push (false);
8904 break;
8905 }
8906 }
8907 }
8908 else if (dwarf_version >= 5)
8909 {
8910 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8911 if (!implicit_consts[ix])
8912 continue;
8913 else
8914 {
8915 dw_attr_node *other_a
8916 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
8917 if (!dw_val_equal_p (&a->dw_attr_val,
8918 &other_a->dw_attr_val))
8919 implicit_consts[ix] = false;
8920 }
8921 }
8922 die->die_abbrev = abbrev_id;
8923 }
8924 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
8925 if (dwarf_version >= 5 && first_id != ~0U)
8926 optimize_implicit_const (first_id, i, implicit_consts);
8927 }
8928
8929 abbrev_opt_start = 0;
8930 abbrev_opt_base_type_end = 0;
8931 abbrev_usage_count.release ();
8932 sorted_abbrev_dies.release ();
8933 }
8934 \f
8935 /* Return the power-of-two number of bytes necessary to represent VALUE. */
8936
8937 static int
8938 constant_size (unsigned HOST_WIDE_INT value)
8939 {
8940 int log;
8941
8942 if (value == 0)
8943 log = 0;
8944 else
8945 log = floor_log2 (value);
8946
8947 log = log / 8;
8948 log = 1 << (floor_log2 (log) + 1);
8949
8950 return log;
8951 }
8952
8953 /* Return the size of a DIE as it is represented in the
8954 .debug_info section. */
8955
8956 static unsigned long
8957 size_of_die (dw_die_ref die)
8958 {
8959 unsigned long size = 0;
8960 dw_attr_node *a;
8961 unsigned ix;
8962 enum dwarf_form form;
8963
8964 size += size_of_uleb128 (die->die_abbrev);
8965 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8966 {
8967 switch (AT_class (a))
8968 {
8969 case dw_val_class_addr:
8970 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
8971 {
8972 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
8973 size += size_of_uleb128 (AT_index (a));
8974 }
8975 else
8976 size += DWARF2_ADDR_SIZE;
8977 break;
8978 case dw_val_class_offset:
8979 size += DWARF_OFFSET_SIZE;
8980 break;
8981 case dw_val_class_loc:
8982 {
8983 unsigned long lsize = size_of_locs (AT_loc (a));
8984
8985 /* Block length. */
8986 if (dwarf_version >= 4)
8987 size += size_of_uleb128 (lsize);
8988 else
8989 size += constant_size (lsize);
8990 size += lsize;
8991 }
8992 break;
8993 case dw_val_class_loc_list:
8994 if (dwarf_split_debug_info && dwarf_version >= 5)
8995 {
8996 gcc_assert (AT_loc_list (a)->num_assigned);
8997 size += size_of_uleb128 (AT_loc_list (a)->hash);
8998 }
8999 else
9000 size += DWARF_OFFSET_SIZE;
9001 break;
9002 case dw_val_class_range_list:
9003 if (value_format (a) == DW_FORM_rnglistx)
9004 {
9005 gcc_assert (rnglist_idx);
9006 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9007 size += size_of_uleb128 (r->idx);
9008 }
9009 else
9010 size += DWARF_OFFSET_SIZE;
9011 break;
9012 case dw_val_class_const:
9013 size += size_of_sleb128 (AT_int (a));
9014 break;
9015 case dw_val_class_unsigned_const:
9016 {
9017 int csize = constant_size (AT_unsigned (a));
9018 if (dwarf_version == 3
9019 && a->dw_attr == DW_AT_data_member_location
9020 && csize >= 4)
9021 size += size_of_uleb128 (AT_unsigned (a));
9022 else
9023 size += csize;
9024 }
9025 break;
9026 case dw_val_class_const_implicit:
9027 case dw_val_class_unsigned_const_implicit:
9028 case dw_val_class_file_implicit:
9029 /* These occupy no size in the DIE, just an extra sleb128 in
9030 .debug_abbrev. */
9031 break;
9032 case dw_val_class_const_double:
9033 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9034 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9035 size++; /* block */
9036 break;
9037 case dw_val_class_wide_int:
9038 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9039 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9040 if (get_full_len (*a->dw_attr_val.v.val_wide)
9041 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9042 size++; /* block */
9043 break;
9044 case dw_val_class_vec:
9045 size += constant_size (a->dw_attr_val.v.val_vec.length
9046 * a->dw_attr_val.v.val_vec.elt_size)
9047 + a->dw_attr_val.v.val_vec.length
9048 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9049 break;
9050 case dw_val_class_flag:
9051 if (dwarf_version >= 4)
9052 /* Currently all add_AT_flag calls pass in 1 as last argument,
9053 so DW_FORM_flag_present can be used. If that ever changes,
9054 we'll need to use DW_FORM_flag and have some optimization
9055 in build_abbrev_table that will change those to
9056 DW_FORM_flag_present if it is set to 1 in all DIEs using
9057 the same abbrev entry. */
9058 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9059 else
9060 size += 1;
9061 break;
9062 case dw_val_class_die_ref:
9063 if (AT_ref_external (a))
9064 {
9065 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9066 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9067 is sized by target address length, whereas in DWARF3
9068 it's always sized as an offset. */
9069 if (use_debug_types)
9070 size += DWARF_TYPE_SIGNATURE_SIZE;
9071 else if (dwarf_version == 2)
9072 size += DWARF2_ADDR_SIZE;
9073 else
9074 size += DWARF_OFFSET_SIZE;
9075 }
9076 else
9077 size += DWARF_OFFSET_SIZE;
9078 break;
9079 case dw_val_class_fde_ref:
9080 size += DWARF_OFFSET_SIZE;
9081 break;
9082 case dw_val_class_lbl_id:
9083 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9084 {
9085 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9086 size += size_of_uleb128 (AT_index (a));
9087 }
9088 else
9089 size += DWARF2_ADDR_SIZE;
9090 break;
9091 case dw_val_class_lineptr:
9092 case dw_val_class_macptr:
9093 case dw_val_class_loclistsptr:
9094 size += DWARF_OFFSET_SIZE;
9095 break;
9096 case dw_val_class_str:
9097 form = AT_string_form (a);
9098 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9099 size += DWARF_OFFSET_SIZE;
9100 else if (form == DW_FORM_GNU_str_index)
9101 size += size_of_uleb128 (AT_index (a));
9102 else
9103 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9104 break;
9105 case dw_val_class_file:
9106 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9107 break;
9108 case dw_val_class_data8:
9109 size += 8;
9110 break;
9111 case dw_val_class_vms_delta:
9112 size += DWARF_OFFSET_SIZE;
9113 break;
9114 case dw_val_class_high_pc:
9115 size += DWARF2_ADDR_SIZE;
9116 break;
9117 case dw_val_class_discr_value:
9118 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9119 break;
9120 case dw_val_class_discr_list:
9121 {
9122 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9123
9124 /* This is a block, so we have the block length and then its
9125 data. */
9126 size += constant_size (block_size) + block_size;
9127 }
9128 break;
9129 default:
9130 gcc_unreachable ();
9131 }
9132 }
9133
9134 return size;
9135 }
9136
9137 /* Size the debugging information associated with a given DIE. Visits the
9138 DIE's children recursively. Updates the global variable next_die_offset, on
9139 each time through. Uses the current value of next_die_offset to update the
9140 die_offset field in each DIE. */
9141
9142 static void
9143 calc_die_sizes (dw_die_ref die)
9144 {
9145 dw_die_ref c;
9146
9147 gcc_assert (die->die_offset == 0
9148 || (unsigned long int) die->die_offset == next_die_offset);
9149 die->die_offset = next_die_offset;
9150 next_die_offset += size_of_die (die);
9151
9152 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9153
9154 if (die->die_child != NULL)
9155 /* Count the null byte used to terminate sibling lists. */
9156 next_die_offset += 1;
9157 }
9158
9159 /* Size just the base type children at the start of the CU.
9160 This is needed because build_abbrev needs to size locs
9161 and sizing of type based stack ops needs to know die_offset
9162 values for the base types. */
9163
9164 static void
9165 calc_base_type_die_sizes (void)
9166 {
9167 unsigned long die_offset = (dwarf_split_debug_info
9168 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9169 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9170 unsigned int i;
9171 dw_die_ref base_type;
9172 #if ENABLE_ASSERT_CHECKING
9173 dw_die_ref prev = comp_unit_die ()->die_child;
9174 #endif
9175
9176 die_offset += size_of_die (comp_unit_die ());
9177 for (i = 0; base_types.iterate (i, &base_type); i++)
9178 {
9179 #if ENABLE_ASSERT_CHECKING
9180 gcc_assert (base_type->die_offset == 0
9181 && prev->die_sib == base_type
9182 && base_type->die_child == NULL
9183 && base_type->die_abbrev);
9184 prev = base_type;
9185 #endif
9186 if (abbrev_opt_start
9187 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9188 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9189 base_type->die_offset = die_offset;
9190 die_offset += size_of_die (base_type);
9191 }
9192 }
9193
9194 /* Set the marks for a die and its children. We do this so
9195 that we know whether or not a reference needs to use FORM_ref_addr; only
9196 DIEs in the same CU will be marked. We used to clear out the offset
9197 and use that as the flag, but ran into ordering problems. */
9198
9199 static void
9200 mark_dies (dw_die_ref die)
9201 {
9202 dw_die_ref c;
9203
9204 gcc_assert (!die->die_mark);
9205
9206 die->die_mark = 1;
9207 FOR_EACH_CHILD (die, c, mark_dies (c));
9208 }
9209
9210 /* Clear the marks for a die and its children. */
9211
9212 static void
9213 unmark_dies (dw_die_ref die)
9214 {
9215 dw_die_ref c;
9216
9217 if (! use_debug_types)
9218 gcc_assert (die->die_mark);
9219
9220 die->die_mark = 0;
9221 FOR_EACH_CHILD (die, c, unmark_dies (c));
9222 }
9223
9224 /* Clear the marks for a die, its children and referred dies. */
9225
9226 static void
9227 unmark_all_dies (dw_die_ref die)
9228 {
9229 dw_die_ref c;
9230 dw_attr_node *a;
9231 unsigned ix;
9232
9233 if (!die->die_mark)
9234 return;
9235 die->die_mark = 0;
9236
9237 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9238
9239 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9240 if (AT_class (a) == dw_val_class_die_ref)
9241 unmark_all_dies (AT_ref (a));
9242 }
9243
9244 /* Calculate if the entry should appear in the final output file. It may be
9245 from a pruned a type. */
9246
9247 static bool
9248 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9249 {
9250 /* By limiting gnu pubnames to definitions only, gold can generate a
9251 gdb index without entries for declarations, which don't include
9252 enough information to be useful. */
9253 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9254 return false;
9255
9256 if (table == pubname_table)
9257 {
9258 /* Enumerator names are part of the pubname table, but the
9259 parent DW_TAG_enumeration_type die may have been pruned.
9260 Don't output them if that is the case. */
9261 if (p->die->die_tag == DW_TAG_enumerator &&
9262 (p->die->die_parent == NULL
9263 || !p->die->die_parent->die_perennial_p))
9264 return false;
9265
9266 /* Everything else in the pubname table is included. */
9267 return true;
9268 }
9269
9270 /* The pubtypes table shouldn't include types that have been
9271 pruned. */
9272 return (p->die->die_offset != 0
9273 || !flag_eliminate_unused_debug_types);
9274 }
9275
9276 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9277 generated for the compilation unit. */
9278
9279 static unsigned long
9280 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9281 {
9282 unsigned long size;
9283 unsigned i;
9284 pubname_entry *p;
9285 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9286
9287 size = DWARF_PUBNAMES_HEADER_SIZE;
9288 FOR_EACH_VEC_ELT (*names, i, p)
9289 if (include_pubname_in_output (names, p))
9290 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9291
9292 size += DWARF_OFFSET_SIZE;
9293 return size;
9294 }
9295
9296 /* Return the size of the information in the .debug_aranges section. */
9297
9298 static unsigned long
9299 size_of_aranges (void)
9300 {
9301 unsigned long size;
9302
9303 size = DWARF_ARANGES_HEADER_SIZE;
9304
9305 /* Count the address/length pair for this compilation unit. */
9306 if (text_section_used)
9307 size += 2 * DWARF2_ADDR_SIZE;
9308 if (cold_text_section_used)
9309 size += 2 * DWARF2_ADDR_SIZE;
9310 if (have_multiple_function_sections)
9311 {
9312 unsigned fde_idx;
9313 dw_fde_ref fde;
9314
9315 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9316 {
9317 if (DECL_IGNORED_P (fde->decl))
9318 continue;
9319 if (!fde->in_std_section)
9320 size += 2 * DWARF2_ADDR_SIZE;
9321 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9322 size += 2 * DWARF2_ADDR_SIZE;
9323 }
9324 }
9325
9326 /* Count the two zero words used to terminated the address range table. */
9327 size += 2 * DWARF2_ADDR_SIZE;
9328 return size;
9329 }
9330 \f
9331 /* Select the encoding of an attribute value. */
9332
9333 static enum dwarf_form
9334 value_format (dw_attr_node *a)
9335 {
9336 switch (AT_class (a))
9337 {
9338 case dw_val_class_addr:
9339 /* Only very few attributes allow DW_FORM_addr. */
9340 switch (a->dw_attr)
9341 {
9342 case DW_AT_low_pc:
9343 case DW_AT_high_pc:
9344 case DW_AT_entry_pc:
9345 case DW_AT_trampoline:
9346 return (AT_index (a) == NOT_INDEXED
9347 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9348 default:
9349 break;
9350 }
9351 switch (DWARF2_ADDR_SIZE)
9352 {
9353 case 1:
9354 return DW_FORM_data1;
9355 case 2:
9356 return DW_FORM_data2;
9357 case 4:
9358 return DW_FORM_data4;
9359 case 8:
9360 return DW_FORM_data8;
9361 default:
9362 gcc_unreachable ();
9363 }
9364 case dw_val_class_loc_list:
9365 if (dwarf_split_debug_info
9366 && dwarf_version >= 5
9367 && AT_loc_list (a)->num_assigned)
9368 return DW_FORM_loclistx;
9369 /* FALLTHRU */
9370 case dw_val_class_range_list:
9371 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9372 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9373 care about sizes of .debug* sections in shared libraries and
9374 executables and don't take into account relocations that affect just
9375 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9376 table in the .debug_rnglists section. */
9377 if (dwarf_split_debug_info
9378 && dwarf_version >= 5
9379 && AT_class (a) == dw_val_class_range_list
9380 && rnglist_idx
9381 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9382 return DW_FORM_rnglistx;
9383 if (dwarf_version >= 4)
9384 return DW_FORM_sec_offset;
9385 /* FALLTHRU */
9386 case dw_val_class_vms_delta:
9387 case dw_val_class_offset:
9388 switch (DWARF_OFFSET_SIZE)
9389 {
9390 case 4:
9391 return DW_FORM_data4;
9392 case 8:
9393 return DW_FORM_data8;
9394 default:
9395 gcc_unreachable ();
9396 }
9397 case dw_val_class_loc:
9398 if (dwarf_version >= 4)
9399 return DW_FORM_exprloc;
9400 switch (constant_size (size_of_locs (AT_loc (a))))
9401 {
9402 case 1:
9403 return DW_FORM_block1;
9404 case 2:
9405 return DW_FORM_block2;
9406 case 4:
9407 return DW_FORM_block4;
9408 default:
9409 gcc_unreachable ();
9410 }
9411 case dw_val_class_const:
9412 return DW_FORM_sdata;
9413 case dw_val_class_unsigned_const:
9414 switch (constant_size (AT_unsigned (a)))
9415 {
9416 case 1:
9417 return DW_FORM_data1;
9418 case 2:
9419 return DW_FORM_data2;
9420 case 4:
9421 /* In DWARF3 DW_AT_data_member_location with
9422 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9423 constant, so we need to use DW_FORM_udata if we need
9424 a large constant. */
9425 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9426 return DW_FORM_udata;
9427 return DW_FORM_data4;
9428 case 8:
9429 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9430 return DW_FORM_udata;
9431 return DW_FORM_data8;
9432 default:
9433 gcc_unreachable ();
9434 }
9435 case dw_val_class_const_implicit:
9436 case dw_val_class_unsigned_const_implicit:
9437 case dw_val_class_file_implicit:
9438 return DW_FORM_implicit_const;
9439 case dw_val_class_const_double:
9440 switch (HOST_BITS_PER_WIDE_INT)
9441 {
9442 case 8:
9443 return DW_FORM_data2;
9444 case 16:
9445 return DW_FORM_data4;
9446 case 32:
9447 return DW_FORM_data8;
9448 case 64:
9449 if (dwarf_version >= 5)
9450 return DW_FORM_data16;
9451 /* FALLTHRU */
9452 default:
9453 return DW_FORM_block1;
9454 }
9455 case dw_val_class_wide_int:
9456 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9457 {
9458 case 8:
9459 return DW_FORM_data1;
9460 case 16:
9461 return DW_FORM_data2;
9462 case 32:
9463 return DW_FORM_data4;
9464 case 64:
9465 return DW_FORM_data8;
9466 case 128:
9467 if (dwarf_version >= 5)
9468 return DW_FORM_data16;
9469 /* FALLTHRU */
9470 default:
9471 return DW_FORM_block1;
9472 }
9473 case dw_val_class_vec:
9474 switch (constant_size (a->dw_attr_val.v.val_vec.length
9475 * a->dw_attr_val.v.val_vec.elt_size))
9476 {
9477 case 1:
9478 return DW_FORM_block1;
9479 case 2:
9480 return DW_FORM_block2;
9481 case 4:
9482 return DW_FORM_block4;
9483 default:
9484 gcc_unreachable ();
9485 }
9486 case dw_val_class_flag:
9487 if (dwarf_version >= 4)
9488 {
9489 /* Currently all add_AT_flag calls pass in 1 as last argument,
9490 so DW_FORM_flag_present can be used. If that ever changes,
9491 we'll need to use DW_FORM_flag and have some optimization
9492 in build_abbrev_table that will change those to
9493 DW_FORM_flag_present if it is set to 1 in all DIEs using
9494 the same abbrev entry. */
9495 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9496 return DW_FORM_flag_present;
9497 }
9498 return DW_FORM_flag;
9499 case dw_val_class_die_ref:
9500 if (AT_ref_external (a))
9501 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9502 else
9503 return DW_FORM_ref;
9504 case dw_val_class_fde_ref:
9505 return DW_FORM_data;
9506 case dw_val_class_lbl_id:
9507 return (AT_index (a) == NOT_INDEXED
9508 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9509 case dw_val_class_lineptr:
9510 case dw_val_class_macptr:
9511 case dw_val_class_loclistsptr:
9512 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9513 case dw_val_class_str:
9514 return AT_string_form (a);
9515 case dw_val_class_file:
9516 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9517 {
9518 case 1:
9519 return DW_FORM_data1;
9520 case 2:
9521 return DW_FORM_data2;
9522 case 4:
9523 return DW_FORM_data4;
9524 default:
9525 gcc_unreachable ();
9526 }
9527
9528 case dw_val_class_data8:
9529 return DW_FORM_data8;
9530
9531 case dw_val_class_high_pc:
9532 switch (DWARF2_ADDR_SIZE)
9533 {
9534 case 1:
9535 return DW_FORM_data1;
9536 case 2:
9537 return DW_FORM_data2;
9538 case 4:
9539 return DW_FORM_data4;
9540 case 8:
9541 return DW_FORM_data8;
9542 default:
9543 gcc_unreachable ();
9544 }
9545
9546 case dw_val_class_discr_value:
9547 return (a->dw_attr_val.v.val_discr_value.pos
9548 ? DW_FORM_udata
9549 : DW_FORM_sdata);
9550 case dw_val_class_discr_list:
9551 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9552 {
9553 case 1:
9554 return DW_FORM_block1;
9555 case 2:
9556 return DW_FORM_block2;
9557 case 4:
9558 return DW_FORM_block4;
9559 default:
9560 gcc_unreachable ();
9561 }
9562
9563 default:
9564 gcc_unreachable ();
9565 }
9566 }
9567
9568 /* Output the encoding of an attribute value. */
9569
9570 static void
9571 output_value_format (dw_attr_node *a)
9572 {
9573 enum dwarf_form form = value_format (a);
9574
9575 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9576 }
9577
9578 /* Given a die and id, produce the appropriate abbreviations. */
9579
9580 static void
9581 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9582 {
9583 unsigned ix;
9584 dw_attr_node *a_attr;
9585
9586 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9587 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9588 dwarf_tag_name (abbrev->die_tag));
9589
9590 if (abbrev->die_child != NULL)
9591 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9592 else
9593 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9594
9595 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9596 {
9597 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9598 dwarf_attr_name (a_attr->dw_attr));
9599 output_value_format (a_attr);
9600 if (value_format (a_attr) == DW_FORM_implicit_const)
9601 {
9602 if (AT_class (a_attr) == dw_val_class_file_implicit)
9603 {
9604 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9605 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9606 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9607 }
9608 else
9609 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9610 }
9611 }
9612
9613 dw2_asm_output_data (1, 0, NULL);
9614 dw2_asm_output_data (1, 0, NULL);
9615 }
9616
9617
9618 /* Output the .debug_abbrev section which defines the DIE abbreviation
9619 table. */
9620
9621 static void
9622 output_abbrev_section (void)
9623 {
9624 unsigned int abbrev_id;
9625 dw_die_ref abbrev;
9626
9627 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9628 if (abbrev_id != 0)
9629 output_die_abbrevs (abbrev_id, abbrev);
9630
9631 /* Terminate the table. */
9632 dw2_asm_output_data (1, 0, NULL);
9633 }
9634
9635 /* Return a new location list, given the begin and end range, and the
9636 expression. */
9637
9638 static inline dw_loc_list_ref
9639 new_loc_list (dw_loc_descr_ref expr, const char *begin, const char *end,
9640 const char *section)
9641 {
9642 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
9643
9644 retlist->begin = begin;
9645 retlist->begin_entry = NULL;
9646 retlist->end = end;
9647 retlist->expr = expr;
9648 retlist->section = section;
9649
9650 return retlist;
9651 }
9652
9653 /* Generate a new internal symbol for this location list node, if it
9654 hasn't got one yet. */
9655
9656 static inline void
9657 gen_llsym (dw_loc_list_ref list)
9658 {
9659 gcc_assert (!list->ll_symbol);
9660 list->ll_symbol = gen_internal_sym ("LLST");
9661 }
9662
9663 /* Output the location list given to us. */
9664
9665 static void
9666 output_loc_list (dw_loc_list_ref list_head)
9667 {
9668 if (list_head->emitted)
9669 return;
9670 list_head->emitted = true;
9671
9672 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
9673
9674 dw_loc_list_ref curr = list_head;
9675 const char *last_section = NULL;
9676 const char *base_label = NULL;
9677
9678 /* Walk the location list, and output each range + expression. */
9679 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
9680 {
9681 unsigned long size;
9682 /* Don't output an entry that starts and ends at the same address. */
9683 if (strcmp (curr->begin, curr->end) == 0 && !curr->force)
9684 continue;
9685 size = size_of_locs (curr->expr);
9686 /* If the expression is too large, drop it on the floor. We could
9687 perhaps put it into DW_TAG_dwarf_procedure and refer to that
9688 in the expression, but >= 64KB expressions for a single value
9689 in a single range are unlikely very useful. */
9690 if (dwarf_version < 5 && size > 0xffff)
9691 continue;
9692 if (dwarf_version >= 5)
9693 {
9694 if (dwarf_split_debug_info)
9695 {
9696 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
9697 uleb128 index into .debug_addr and uleb128 length. */
9698 dw2_asm_output_data (1, DW_LLE_startx_length,
9699 "DW_LLE_startx_length (%s)",
9700 list_head->ll_symbol);
9701 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
9702 "Location list range start index "
9703 "(%s)", curr->begin);
9704 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
9705 For that case we probably need to emit DW_LLE_startx_endx,
9706 but we'd need 2 .debug_addr entries rather than just one. */
9707 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
9708 "Location list length (%s)",
9709 list_head->ll_symbol);
9710 }
9711 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
9712 {
9713 /* If all code is in .text section, the base address is
9714 already provided by the CU attributes. Use
9715 DW_LLE_offset_pair where both addresses are uleb128 encoded
9716 offsets against that base. */
9717 dw2_asm_output_data (1, DW_LLE_offset_pair,
9718 "DW_LLE_offset_pair (%s)",
9719 list_head->ll_symbol);
9720 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
9721 "Location list begin address (%s)",
9722 list_head->ll_symbol);
9723 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
9724 "Location list end address (%s)",
9725 list_head->ll_symbol);
9726 }
9727 else if (HAVE_AS_LEB128)
9728 {
9729 /* Otherwise, find out how many consecutive entries could share
9730 the same base entry. If just one, emit DW_LLE_start_length,
9731 otherwise emit DW_LLE_base_address for the base address
9732 followed by a series of DW_LLE_offset_pair. */
9733 if (last_section == NULL || curr->section != last_section)
9734 {
9735 dw_loc_list_ref curr2;
9736 for (curr2 = curr->dw_loc_next; curr2 != NULL;
9737 curr2 = curr2->dw_loc_next)
9738 {
9739 if (strcmp (curr2->begin, curr2->end) == 0
9740 && !curr2->force)
9741 continue;
9742 break;
9743 }
9744 if (curr2 == NULL || curr->section != curr2->section)
9745 last_section = NULL;
9746 else
9747 {
9748 last_section = curr->section;
9749 base_label = curr->begin;
9750 dw2_asm_output_data (1, DW_LLE_base_address,
9751 "DW_LLE_base_address (%s)",
9752 list_head->ll_symbol);
9753 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
9754 "Base address (%s)",
9755 list_head->ll_symbol);
9756 }
9757 }
9758 /* Only one entry with the same base address. Use
9759 DW_LLE_start_length with absolute address and uleb128
9760 length. */
9761 if (last_section == NULL)
9762 {
9763 dw2_asm_output_data (1, DW_LLE_start_length,
9764 "DW_LLE_start_length (%s)",
9765 list_head->ll_symbol);
9766 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9767 "Location list begin address (%s)",
9768 list_head->ll_symbol);
9769 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
9770 "Location list length "
9771 "(%s)", list_head->ll_symbol);
9772 }
9773 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
9774 DW_LLE_base_address. */
9775 else
9776 {
9777 dw2_asm_output_data (1, DW_LLE_offset_pair,
9778 "DW_LLE_offset_pair (%s)",
9779 list_head->ll_symbol);
9780 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
9781 "Location list begin address "
9782 "(%s)", list_head->ll_symbol);
9783 dw2_asm_output_delta_uleb128 (curr->end, base_label,
9784 "Location list end address "
9785 "(%s)", list_head->ll_symbol);
9786 }
9787 }
9788 /* The assembler does not support .uleb128 directive. Emit
9789 DW_LLE_start_end with a pair of absolute addresses. */
9790 else
9791 {
9792 dw2_asm_output_data (1, DW_LLE_start_end,
9793 "DW_LLE_start_end (%s)",
9794 list_head->ll_symbol);
9795 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9796 "Location list begin address (%s)",
9797 list_head->ll_symbol);
9798 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
9799 "Location list end address (%s)",
9800 list_head->ll_symbol);
9801 }
9802 }
9803 else if (dwarf_split_debug_info)
9804 {
9805 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
9806 and 4 byte length. */
9807 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
9808 "Location list start/length entry (%s)",
9809 list_head->ll_symbol);
9810 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
9811 "Location list range start index (%s)",
9812 curr->begin);
9813 /* The length field is 4 bytes. If we ever need to support
9814 an 8-byte length, we can add a new DW_LLE code or fall back
9815 to DW_LLE_GNU_start_end_entry. */
9816 dw2_asm_output_delta (4, curr->end, curr->begin,
9817 "Location list range length (%s)",
9818 list_head->ll_symbol);
9819 }
9820 else if (!have_multiple_function_sections)
9821 {
9822 /* Pair of relative addresses against start of text section. */
9823 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
9824 "Location list begin address (%s)",
9825 list_head->ll_symbol);
9826 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
9827 "Location list end address (%s)",
9828 list_head->ll_symbol);
9829 }
9830 else
9831 {
9832 /* Pair of absolute addresses. */
9833 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9834 "Location list begin address (%s)",
9835 list_head->ll_symbol);
9836 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
9837 "Location list end address (%s)",
9838 list_head->ll_symbol);
9839 }
9840
9841 /* Output the block length for this list of location operations. */
9842 if (dwarf_version >= 5)
9843 dw2_asm_output_data_uleb128 (size, "Location expression size");
9844 else
9845 {
9846 gcc_assert (size <= 0xffff);
9847 dw2_asm_output_data (2, size, "Location expression size");
9848 }
9849
9850 output_loc_sequence (curr->expr, -1);
9851 }
9852
9853 /* And finally list termination. */
9854 if (dwarf_version >= 5)
9855 dw2_asm_output_data (1, DW_LLE_end_of_list,
9856 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
9857 else if (dwarf_split_debug_info)
9858 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
9859 "Location list terminator (%s)",
9860 list_head->ll_symbol);
9861 else
9862 {
9863 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
9864 "Location list terminator begin (%s)",
9865 list_head->ll_symbol);
9866 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
9867 "Location list terminator end (%s)",
9868 list_head->ll_symbol);
9869 }
9870 }
9871
9872 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
9873 section. Emit a relocated reference if val_entry is NULL, otherwise,
9874 emit an indirect reference. */
9875
9876 static void
9877 output_range_list_offset (dw_attr_node *a)
9878 {
9879 const char *name = dwarf_attr_name (a->dw_attr);
9880
9881 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
9882 {
9883 if (dwarf_version >= 5)
9884 {
9885 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9886 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
9887 debug_ranges_section, "%s", name);
9888 }
9889 else
9890 {
9891 char *p = strchr (ranges_section_label, '\0');
9892 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
9893 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
9894 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
9895 debug_ranges_section, "%s", name);
9896 *p = '\0';
9897 }
9898 }
9899 else if (dwarf_version >= 5)
9900 {
9901 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9902 gcc_assert (rnglist_idx);
9903 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
9904 }
9905 else
9906 dw2_asm_output_data (DWARF_OFFSET_SIZE,
9907 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
9908 "%s (offset from %s)", name, ranges_section_label);
9909 }
9910
9911 /* Output the offset into the debug_loc section. */
9912
9913 static void
9914 output_loc_list_offset (dw_attr_node *a)
9915 {
9916 char *sym = AT_loc_list (a)->ll_symbol;
9917
9918 gcc_assert (sym);
9919 if (!dwarf_split_debug_info)
9920 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
9921 "%s", dwarf_attr_name (a->dw_attr));
9922 else if (dwarf_version >= 5)
9923 {
9924 gcc_assert (AT_loc_list (a)->num_assigned);
9925 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
9926 dwarf_attr_name (a->dw_attr),
9927 sym);
9928 }
9929 else
9930 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
9931 "%s", dwarf_attr_name (a->dw_attr));
9932 }
9933
9934 /* Output an attribute's index or value appropriately. */
9935
9936 static void
9937 output_attr_index_or_value (dw_attr_node *a)
9938 {
9939 const char *name = dwarf_attr_name (a->dw_attr);
9940
9941 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9942 {
9943 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
9944 return;
9945 }
9946 switch (AT_class (a))
9947 {
9948 case dw_val_class_addr:
9949 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
9950 break;
9951 case dw_val_class_high_pc:
9952 case dw_val_class_lbl_id:
9953 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
9954 break;
9955 default:
9956 gcc_unreachable ();
9957 }
9958 }
9959
9960 /* Output a type signature. */
9961
9962 static inline void
9963 output_signature (const char *sig, const char *name)
9964 {
9965 int i;
9966
9967 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
9968 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
9969 }
9970
9971 /* Output a discriminant value. */
9972
9973 static inline void
9974 output_discr_value (dw_discr_value *discr_value, const char *name)
9975 {
9976 if (discr_value->pos)
9977 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
9978 else
9979 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
9980 }
9981
9982 /* Output the DIE and its attributes. Called recursively to generate
9983 the definitions of each child DIE. */
9984
9985 static void
9986 output_die (dw_die_ref die)
9987 {
9988 dw_attr_node *a;
9989 dw_die_ref c;
9990 unsigned long size;
9991 unsigned ix;
9992
9993 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
9994 (unsigned long)die->die_offset,
9995 dwarf_tag_name (die->die_tag));
9996
9997 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9998 {
9999 const char *name = dwarf_attr_name (a->dw_attr);
10000
10001 switch (AT_class (a))
10002 {
10003 case dw_val_class_addr:
10004 output_attr_index_or_value (a);
10005 break;
10006
10007 case dw_val_class_offset:
10008 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10009 "%s", name);
10010 break;
10011
10012 case dw_val_class_range_list:
10013 output_range_list_offset (a);
10014 break;
10015
10016 case dw_val_class_loc:
10017 size = size_of_locs (AT_loc (a));
10018
10019 /* Output the block length for this list of location operations. */
10020 if (dwarf_version >= 4)
10021 dw2_asm_output_data_uleb128 (size, "%s", name);
10022 else
10023 dw2_asm_output_data (constant_size (size), size, "%s", name);
10024
10025 output_loc_sequence (AT_loc (a), -1);
10026 break;
10027
10028 case dw_val_class_const:
10029 /* ??? It would be slightly more efficient to use a scheme like is
10030 used for unsigned constants below, but gdb 4.x does not sign
10031 extend. Gdb 5.x does sign extend. */
10032 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10033 break;
10034
10035 case dw_val_class_unsigned_const:
10036 {
10037 int csize = constant_size (AT_unsigned (a));
10038 if (dwarf_version == 3
10039 && a->dw_attr == DW_AT_data_member_location
10040 && csize >= 4)
10041 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10042 else
10043 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10044 }
10045 break;
10046
10047 case dw_val_class_const_implicit:
10048 if (flag_debug_asm)
10049 fprintf (asm_out_file, "\t\t\t%s %s ("
10050 HOST_WIDE_INT_PRINT_DEC ")\n",
10051 ASM_COMMENT_START, name, AT_int (a));
10052 break;
10053
10054 case dw_val_class_unsigned_const_implicit:
10055 if (flag_debug_asm)
10056 fprintf (asm_out_file, "\t\t\t%s %s ("
10057 HOST_WIDE_INT_PRINT_HEX ")\n",
10058 ASM_COMMENT_START, name, AT_unsigned (a));
10059 break;
10060
10061 case dw_val_class_const_double:
10062 {
10063 unsigned HOST_WIDE_INT first, second;
10064
10065 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10066 dw2_asm_output_data (1,
10067 HOST_BITS_PER_DOUBLE_INT
10068 / HOST_BITS_PER_CHAR,
10069 NULL);
10070
10071 if (WORDS_BIG_ENDIAN)
10072 {
10073 first = a->dw_attr_val.v.val_double.high;
10074 second = a->dw_attr_val.v.val_double.low;
10075 }
10076 else
10077 {
10078 first = a->dw_attr_val.v.val_double.low;
10079 second = a->dw_attr_val.v.val_double.high;
10080 }
10081
10082 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10083 first, "%s", name);
10084 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10085 second, NULL);
10086 }
10087 break;
10088
10089 case dw_val_class_wide_int:
10090 {
10091 int i;
10092 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10093 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10094 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10095 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10096 * l, NULL);
10097
10098 if (WORDS_BIG_ENDIAN)
10099 for (i = len - 1; i >= 0; --i)
10100 {
10101 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10102 "%s", name);
10103 name = "";
10104 }
10105 else
10106 for (i = 0; i < len; ++i)
10107 {
10108 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10109 "%s", name);
10110 name = "";
10111 }
10112 }
10113 break;
10114
10115 case dw_val_class_vec:
10116 {
10117 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10118 unsigned int len = a->dw_attr_val.v.val_vec.length;
10119 unsigned int i;
10120 unsigned char *p;
10121
10122 dw2_asm_output_data (constant_size (len * elt_size),
10123 len * elt_size, "%s", name);
10124 if (elt_size > sizeof (HOST_WIDE_INT))
10125 {
10126 elt_size /= 2;
10127 len *= 2;
10128 }
10129 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10130 i < len;
10131 i++, p += elt_size)
10132 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10133 "fp or vector constant word %u", i);
10134 break;
10135 }
10136
10137 case dw_val_class_flag:
10138 if (dwarf_version >= 4)
10139 {
10140 /* Currently all add_AT_flag calls pass in 1 as last argument,
10141 so DW_FORM_flag_present can be used. If that ever changes,
10142 we'll need to use DW_FORM_flag and have some optimization
10143 in build_abbrev_table that will change those to
10144 DW_FORM_flag_present if it is set to 1 in all DIEs using
10145 the same abbrev entry. */
10146 gcc_assert (AT_flag (a) == 1);
10147 if (flag_debug_asm)
10148 fprintf (asm_out_file, "\t\t\t%s %s\n",
10149 ASM_COMMENT_START, name);
10150 break;
10151 }
10152 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10153 break;
10154
10155 case dw_val_class_loc_list:
10156 output_loc_list_offset (a);
10157 break;
10158
10159 case dw_val_class_die_ref:
10160 if (AT_ref_external (a))
10161 {
10162 if (AT_ref (a)->comdat_type_p)
10163 {
10164 comdat_type_node *type_node
10165 = AT_ref (a)->die_id.die_type_node;
10166
10167 gcc_assert (type_node);
10168 output_signature (type_node->signature, name);
10169 }
10170 else
10171 {
10172 const char *sym = AT_ref (a)->die_id.die_symbol;
10173 int size;
10174
10175 gcc_assert (sym);
10176 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10177 length, whereas in DWARF3 it's always sized as an
10178 offset. */
10179 if (dwarf_version == 2)
10180 size = DWARF2_ADDR_SIZE;
10181 else
10182 size = DWARF_OFFSET_SIZE;
10183 /* ??? We cannot unconditionally output die_offset if
10184 non-zero - others might create references to those
10185 DIEs via symbols.
10186 And we do not clear its DIE offset after outputting it
10187 (and the label refers to the actual DIEs, not the
10188 DWARF CU unit header which is when using label + offset
10189 would be the correct thing to do).
10190 ??? This is the reason for the with_offset flag. */
10191 if (AT_ref (a)->with_offset)
10192 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10193 debug_info_section, "%s", name);
10194 else
10195 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10196 name);
10197 }
10198 }
10199 else
10200 {
10201 gcc_assert (AT_ref (a)->die_offset);
10202 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10203 "%s", name);
10204 }
10205 break;
10206
10207 case dw_val_class_fde_ref:
10208 {
10209 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10210
10211 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10212 a->dw_attr_val.v.val_fde_index * 2);
10213 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10214 "%s", name);
10215 }
10216 break;
10217
10218 case dw_val_class_vms_delta:
10219 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10220 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10221 AT_vms_delta2 (a), AT_vms_delta1 (a),
10222 "%s", name);
10223 #else
10224 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10225 AT_vms_delta2 (a), AT_vms_delta1 (a),
10226 "%s", name);
10227 #endif
10228 break;
10229
10230 case dw_val_class_lbl_id:
10231 output_attr_index_or_value (a);
10232 break;
10233
10234 case dw_val_class_lineptr:
10235 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10236 debug_line_section, "%s", name);
10237 break;
10238
10239 case dw_val_class_macptr:
10240 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10241 debug_macinfo_section, "%s", name);
10242 break;
10243
10244 case dw_val_class_loclistsptr:
10245 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10246 debug_loc_section, "%s", name);
10247 break;
10248
10249 case dw_val_class_str:
10250 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10251 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10252 a->dw_attr_val.v.val_str->label,
10253 debug_str_section,
10254 "%s: \"%s\"", name, AT_string (a));
10255 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10256 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10257 a->dw_attr_val.v.val_str->label,
10258 debug_line_str_section,
10259 "%s: \"%s\"", name, AT_string (a));
10260 else if (a->dw_attr_val.v.val_str->form == DW_FORM_GNU_str_index)
10261 dw2_asm_output_data_uleb128 (AT_index (a),
10262 "%s: \"%s\"", name, AT_string (a));
10263 else
10264 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10265 break;
10266
10267 case dw_val_class_file:
10268 {
10269 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10270
10271 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10272 a->dw_attr_val.v.val_file->filename);
10273 break;
10274 }
10275
10276 case dw_val_class_file_implicit:
10277 if (flag_debug_asm)
10278 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10279 ASM_COMMENT_START, name,
10280 maybe_emit_file (a->dw_attr_val.v.val_file),
10281 a->dw_attr_val.v.val_file->filename);
10282 break;
10283
10284 case dw_val_class_data8:
10285 {
10286 int i;
10287
10288 for (i = 0; i < 8; i++)
10289 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10290 i == 0 ? "%s" : NULL, name);
10291 break;
10292 }
10293
10294 case dw_val_class_high_pc:
10295 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10296 get_AT_low_pc (die), "DW_AT_high_pc");
10297 break;
10298
10299 case dw_val_class_discr_value:
10300 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10301 break;
10302
10303 case dw_val_class_discr_list:
10304 {
10305 dw_discr_list_ref list = AT_discr_list (a);
10306 const int size = size_of_discr_list (list);
10307
10308 /* This is a block, so output its length first. */
10309 dw2_asm_output_data (constant_size (size), size,
10310 "%s: block size", name);
10311
10312 for (; list != NULL; list = list->dw_discr_next)
10313 {
10314 /* One byte for the discriminant value descriptor, and then as
10315 many LEB128 numbers as required. */
10316 if (list->dw_discr_range)
10317 dw2_asm_output_data (1, DW_DSC_range,
10318 "%s: DW_DSC_range", name);
10319 else
10320 dw2_asm_output_data (1, DW_DSC_label,
10321 "%s: DW_DSC_label", name);
10322
10323 output_discr_value (&list->dw_discr_lower_bound, name);
10324 if (list->dw_discr_range)
10325 output_discr_value (&list->dw_discr_upper_bound, name);
10326 }
10327 break;
10328 }
10329
10330 default:
10331 gcc_unreachable ();
10332 }
10333 }
10334
10335 FOR_EACH_CHILD (die, c, output_die (c));
10336
10337 /* Add null byte to terminate sibling list. */
10338 if (die->die_child != NULL)
10339 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10340 (unsigned long) die->die_offset);
10341 }
10342
10343 /* Output the compilation unit that appears at the beginning of the
10344 .debug_info section, and precedes the DIE descriptions. */
10345
10346 static void
10347 output_compilation_unit_header (enum dwarf_unit_type ut)
10348 {
10349 if (!XCOFF_DEBUGGING_INFO)
10350 {
10351 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10352 dw2_asm_output_data (4, 0xffffffff,
10353 "Initial length escape value indicating 64-bit DWARF extension");
10354 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10355 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10356 "Length of Compilation Unit Info");
10357 }
10358
10359 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10360 if (dwarf_version >= 5)
10361 {
10362 const char *name;
10363 switch (ut)
10364 {
10365 case DW_UT_compile: name = "DW_UT_compile"; break;
10366 case DW_UT_type: name = "DW_UT_type"; break;
10367 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10368 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10369 default: gcc_unreachable ();
10370 }
10371 dw2_asm_output_data (1, ut, "%s", name);
10372 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10373 }
10374 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10375 debug_abbrev_section,
10376 "Offset Into Abbrev. Section");
10377 if (dwarf_version < 5)
10378 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10379 }
10380
10381 /* Output the compilation unit DIE and its children. */
10382
10383 static void
10384 output_comp_unit (dw_die_ref die, int output_if_empty,
10385 const unsigned char *dwo_id)
10386 {
10387 const char *secname, *oldsym;
10388 char *tmp;
10389
10390 /* Unless we are outputting main CU, we may throw away empty ones. */
10391 if (!output_if_empty && die->die_child == NULL)
10392 return;
10393
10394 /* Even if there are no children of this DIE, we must output the information
10395 about the compilation unit. Otherwise, on an empty translation unit, we
10396 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
10397 will then complain when examining the file. First mark all the DIEs in
10398 this CU so we know which get local refs. */
10399 mark_dies (die);
10400
10401 external_ref_hash_type *extern_map = optimize_external_refs (die);
10402
10403 /* For now, optimize only the main CU, in order to optimize the rest
10404 we'd need to see all of them earlier. Leave the rest for post-linking
10405 tools like DWZ. */
10406 if (die == comp_unit_die ())
10407 abbrev_opt_start = vec_safe_length (abbrev_die_table);
10408
10409 build_abbrev_table (die, extern_map);
10410
10411 optimize_abbrev_table ();
10412
10413 delete extern_map;
10414
10415 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
10416 next_die_offset = (dwo_id
10417 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
10418 : DWARF_COMPILE_UNIT_HEADER_SIZE);
10419 calc_die_sizes (die);
10420
10421 oldsym = die->die_id.die_symbol;
10422 if (oldsym && die->comdat_type_p)
10423 {
10424 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
10425
10426 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
10427 secname = tmp;
10428 die->die_id.die_symbol = NULL;
10429 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
10430 }
10431 else
10432 {
10433 switch_to_section (debug_info_section);
10434 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
10435 info_section_emitted = true;
10436 }
10437
10438 /* For LTO cross unit DIE refs we want a symbol on the start of the
10439 debuginfo section, not on the CU DIE. */
10440 if ((flag_generate_lto || flag_generate_offload) && oldsym)
10441 {
10442 /* ??? No way to get visibility assembled without a decl. */
10443 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
10444 get_identifier (oldsym), char_type_node);
10445 TREE_PUBLIC (decl) = true;
10446 TREE_STATIC (decl) = true;
10447 DECL_ARTIFICIAL (decl) = true;
10448 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
10449 DECL_VISIBILITY_SPECIFIED (decl) = true;
10450 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
10451 #ifdef ASM_WEAKEN_LABEL
10452 /* We prefer a .weak because that handles duplicates from duplicate
10453 archive members in a graceful way. */
10454 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
10455 #else
10456 targetm.asm_out.globalize_label (asm_out_file, oldsym);
10457 #endif
10458 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
10459 }
10460
10461 /* Output debugging information. */
10462 output_compilation_unit_header (dwo_id
10463 ? DW_UT_split_compile : DW_UT_compile);
10464 if (dwarf_version >= 5)
10465 {
10466 if (dwo_id != NULL)
10467 for (int i = 0; i < 8; i++)
10468 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
10469 }
10470 output_die (die);
10471
10472 /* Leave the marks on the main CU, so we can check them in
10473 output_pubnames. */
10474 if (oldsym)
10475 {
10476 unmark_dies (die);
10477 die->die_id.die_symbol = oldsym;
10478 }
10479 }
10480
10481 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
10482 and .debug_pubtypes. This is configured per-target, but can be
10483 overridden by the -gpubnames or -gno-pubnames options. */
10484
10485 static inline bool
10486 want_pubnames (void)
10487 {
10488 if (debug_info_level <= DINFO_LEVEL_TERSE)
10489 return false;
10490 if (debug_generate_pub_sections != -1)
10491 return debug_generate_pub_sections;
10492 return targetm.want_debug_pub_sections;
10493 }
10494
10495 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
10496
10497 static void
10498 add_AT_pubnames (dw_die_ref die)
10499 {
10500 if (want_pubnames ())
10501 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
10502 }
10503
10504 /* Add a string attribute value to a skeleton DIE. */
10505
10506 static inline void
10507 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
10508 const char *str)
10509 {
10510 dw_attr_node attr;
10511 struct indirect_string_node *node;
10512
10513 if (! skeleton_debug_str_hash)
10514 skeleton_debug_str_hash
10515 = hash_table<indirect_string_hasher>::create_ggc (10);
10516
10517 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
10518 find_string_form (node);
10519 if (node->form == DW_FORM_GNU_str_index)
10520 node->form = DW_FORM_strp;
10521
10522 attr.dw_attr = attr_kind;
10523 attr.dw_attr_val.val_class = dw_val_class_str;
10524 attr.dw_attr_val.val_entry = NULL;
10525 attr.dw_attr_val.v.val_str = node;
10526 add_dwarf_attr (die, &attr);
10527 }
10528
10529 /* Helper function to generate top-level dies for skeleton debug_info and
10530 debug_types. */
10531
10532 static void
10533 add_top_level_skeleton_die_attrs (dw_die_ref die)
10534 {
10535 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
10536 const char *comp_dir = comp_dir_string ();
10537
10538 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
10539 if (comp_dir != NULL)
10540 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
10541 add_AT_pubnames (die);
10542 add_AT_lineptr (die, DW_AT_GNU_addr_base, debug_addr_section_label);
10543 }
10544
10545 /* Output skeleton debug sections that point to the dwo file. */
10546
10547 static void
10548 output_skeleton_debug_sections (dw_die_ref comp_unit,
10549 const unsigned char *dwo_id)
10550 {
10551 /* These attributes will be found in the full debug_info section. */
10552 remove_AT (comp_unit, DW_AT_producer);
10553 remove_AT (comp_unit, DW_AT_language);
10554
10555 switch_to_section (debug_skeleton_info_section);
10556 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
10557
10558 /* Produce the skeleton compilation-unit header. This one differs enough from
10559 a normal CU header that it's better not to call output_compilation_unit
10560 header. */
10561 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10562 dw2_asm_output_data (4, 0xffffffff,
10563 "Initial length escape value indicating 64-bit "
10564 "DWARF extension");
10565
10566 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10567 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
10568 - DWARF_INITIAL_LENGTH_SIZE
10569 + size_of_die (comp_unit),
10570 "Length of Compilation Unit Info");
10571 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10572 if (dwarf_version >= 5)
10573 {
10574 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
10575 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10576 }
10577 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
10578 debug_skeleton_abbrev_section,
10579 "Offset Into Abbrev. Section");
10580 if (dwarf_version < 5)
10581 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10582 else
10583 for (int i = 0; i < 8; i++)
10584 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
10585
10586 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
10587 output_die (comp_unit);
10588
10589 /* Build the skeleton debug_abbrev section. */
10590 switch_to_section (debug_skeleton_abbrev_section);
10591 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
10592
10593 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
10594
10595 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
10596 }
10597
10598 /* Output a comdat type unit DIE and its children. */
10599
10600 static void
10601 output_comdat_type_unit (comdat_type_node *node)
10602 {
10603 const char *secname;
10604 char *tmp;
10605 int i;
10606 #if defined (OBJECT_FORMAT_ELF)
10607 tree comdat_key;
10608 #endif
10609
10610 /* First mark all the DIEs in this CU so we know which get local refs. */
10611 mark_dies (node->root_die);
10612
10613 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
10614
10615 build_abbrev_table (node->root_die, extern_map);
10616
10617 delete extern_map;
10618 extern_map = NULL;
10619
10620 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
10621 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
10622 calc_die_sizes (node->root_die);
10623
10624 #if defined (OBJECT_FORMAT_ELF)
10625 if (dwarf_version >= 5)
10626 {
10627 if (!dwarf_split_debug_info)
10628 secname = ".debug_info";
10629 else
10630 secname = ".debug_info.dwo";
10631 }
10632 else if (!dwarf_split_debug_info)
10633 secname = ".debug_types";
10634 else
10635 secname = ".debug_types.dwo";
10636
10637 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
10638 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
10639 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10640 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
10641 comdat_key = get_identifier (tmp);
10642 targetm.asm_out.named_section (secname,
10643 SECTION_DEBUG | SECTION_LINKONCE,
10644 comdat_key);
10645 #else
10646 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
10647 sprintf (tmp, (dwarf_version >= 5
10648 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
10649 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10650 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
10651 secname = tmp;
10652 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
10653 #endif
10654
10655 /* Output debugging information. */
10656 output_compilation_unit_header (dwarf_split_debug_info
10657 ? DW_UT_split_type : DW_UT_type);
10658 output_signature (node->signature, "Type Signature");
10659 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
10660 "Offset to Type DIE");
10661 output_die (node->root_die);
10662
10663 unmark_dies (node->root_die);
10664 }
10665
10666 /* Return the DWARF2/3 pubname associated with a decl. */
10667
10668 static const char *
10669 dwarf2_name (tree decl, int scope)
10670 {
10671 if (DECL_NAMELESS (decl))
10672 return NULL;
10673 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
10674 }
10675
10676 /* Add a new entry to .debug_pubnames if appropriate. */
10677
10678 static void
10679 add_pubname_string (const char *str, dw_die_ref die)
10680 {
10681 pubname_entry e;
10682
10683 e.die = die;
10684 e.name = xstrdup (str);
10685 vec_safe_push (pubname_table, e);
10686 }
10687
10688 static void
10689 add_pubname (tree decl, dw_die_ref die)
10690 {
10691 if (!want_pubnames ())
10692 return;
10693
10694 /* Don't add items to the table when we expect that the consumer will have
10695 just read the enclosing die. For example, if the consumer is looking at a
10696 class_member, it will either be inside the class already, or will have just
10697 looked up the class to find the member. Either way, searching the class is
10698 faster than searching the index. */
10699 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
10700 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
10701 {
10702 const char *name = dwarf2_name (decl, 1);
10703
10704 if (name)
10705 add_pubname_string (name, die);
10706 }
10707 }
10708
10709 /* Add an enumerator to the pubnames section. */
10710
10711 static void
10712 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
10713 {
10714 pubname_entry e;
10715
10716 gcc_assert (scope_name);
10717 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
10718 e.die = die;
10719 vec_safe_push (pubname_table, e);
10720 }
10721
10722 /* Add a new entry to .debug_pubtypes if appropriate. */
10723
10724 static void
10725 add_pubtype (tree decl, dw_die_ref die)
10726 {
10727 pubname_entry e;
10728
10729 if (!want_pubnames ())
10730 return;
10731
10732 if ((TREE_PUBLIC (decl)
10733 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
10734 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
10735 {
10736 tree scope = NULL;
10737 const char *scope_name = "";
10738 const char *sep = is_cxx () ? "::" : ".";
10739 const char *name;
10740
10741 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
10742 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
10743 {
10744 scope_name = lang_hooks.dwarf_name (scope, 1);
10745 if (scope_name != NULL && scope_name[0] != '\0')
10746 scope_name = concat (scope_name, sep, NULL);
10747 else
10748 scope_name = "";
10749 }
10750
10751 if (TYPE_P (decl))
10752 name = type_tag (decl);
10753 else
10754 name = lang_hooks.dwarf_name (decl, 1);
10755
10756 /* If we don't have a name for the type, there's no point in adding
10757 it to the table. */
10758 if (name != NULL && name[0] != '\0')
10759 {
10760 e.die = die;
10761 e.name = concat (scope_name, name, NULL);
10762 vec_safe_push (pubtype_table, e);
10763 }
10764
10765 /* Although it might be more consistent to add the pubinfo for the
10766 enumerators as their dies are created, they should only be added if the
10767 enum type meets the criteria above. So rather than re-check the parent
10768 enum type whenever an enumerator die is created, just output them all
10769 here. This isn't protected by the name conditional because anonymous
10770 enums don't have names. */
10771 if (die->die_tag == DW_TAG_enumeration_type)
10772 {
10773 dw_die_ref c;
10774
10775 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
10776 }
10777 }
10778 }
10779
10780 /* Output a single entry in the pubnames table. */
10781
10782 static void
10783 output_pubname (dw_offset die_offset, pubname_entry *entry)
10784 {
10785 dw_die_ref die = entry->die;
10786 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
10787
10788 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
10789
10790 if (debug_generate_pub_sections == 2)
10791 {
10792 /* This logic follows gdb's method for determining the value of the flag
10793 byte. */
10794 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
10795 switch (die->die_tag)
10796 {
10797 case DW_TAG_typedef:
10798 case DW_TAG_base_type:
10799 case DW_TAG_subrange_type:
10800 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
10801 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
10802 break;
10803 case DW_TAG_enumerator:
10804 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10805 GDB_INDEX_SYMBOL_KIND_VARIABLE);
10806 if (!is_cxx ())
10807 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
10808 break;
10809 case DW_TAG_subprogram:
10810 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10811 GDB_INDEX_SYMBOL_KIND_FUNCTION);
10812 if (!is_ada ())
10813 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
10814 break;
10815 case DW_TAG_constant:
10816 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10817 GDB_INDEX_SYMBOL_KIND_VARIABLE);
10818 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
10819 break;
10820 case DW_TAG_variable:
10821 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10822 GDB_INDEX_SYMBOL_KIND_VARIABLE);
10823 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
10824 break;
10825 case DW_TAG_namespace:
10826 case DW_TAG_imported_declaration:
10827 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
10828 break;
10829 case DW_TAG_class_type:
10830 case DW_TAG_interface_type:
10831 case DW_TAG_structure_type:
10832 case DW_TAG_union_type:
10833 case DW_TAG_enumeration_type:
10834 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
10835 if (!is_cxx ())
10836 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
10837 break;
10838 default:
10839 /* An unusual tag. Leave the flag-byte empty. */
10840 break;
10841 }
10842 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
10843 "GDB-index flags");
10844 }
10845
10846 dw2_asm_output_nstring (entry->name, -1, "external name");
10847 }
10848
10849
10850 /* Output the public names table used to speed up access to externally
10851 visible names; or the public types table used to find type definitions. */
10852
10853 static void
10854 output_pubnames (vec<pubname_entry, va_gc> *names)
10855 {
10856 unsigned i;
10857 unsigned long pubnames_length = size_of_pubnames (names);
10858 pubname_entry *pub;
10859
10860 if (!XCOFF_DEBUGGING_INFO)
10861 {
10862 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10863 dw2_asm_output_data (4, 0xffffffff,
10864 "Initial length escape value indicating 64-bit DWARF extension");
10865 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
10866 "Pub Info Length");
10867 }
10868
10869 /* Version number for pubnames/pubtypes is independent of dwarf version. */
10870 dw2_asm_output_data (2, 2, "DWARF Version");
10871
10872 if (dwarf_split_debug_info)
10873 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
10874 debug_skeleton_info_section,
10875 "Offset of Compilation Unit Info");
10876 else
10877 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
10878 debug_info_section,
10879 "Offset of Compilation Unit Info");
10880 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
10881 "Compilation Unit Length");
10882
10883 FOR_EACH_VEC_ELT (*names, i, pub)
10884 {
10885 if (include_pubname_in_output (names, pub))
10886 {
10887 dw_offset die_offset = pub->die->die_offset;
10888
10889 /* We shouldn't see pubnames for DIEs outside of the main CU. */
10890 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
10891 gcc_assert (pub->die->die_mark);
10892
10893 /* If we're putting types in their own .debug_types sections,
10894 the .debug_pubtypes table will still point to the compile
10895 unit (not the type unit), so we want to use the offset of
10896 the skeleton DIE (if there is one). */
10897 if (pub->die->comdat_type_p && names == pubtype_table)
10898 {
10899 comdat_type_node *type_node = pub->die->die_id.die_type_node;
10900
10901 if (type_node != NULL)
10902 die_offset = (type_node->skeleton_die != NULL
10903 ? type_node->skeleton_die->die_offset
10904 : comp_unit_die ()->die_offset);
10905 }
10906
10907 output_pubname (die_offset, pub);
10908 }
10909 }
10910
10911 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
10912 }
10913
10914 /* Output public names and types tables if necessary. */
10915
10916 static void
10917 output_pubtables (void)
10918 {
10919 if (!want_pubnames () || !info_section_emitted)
10920 return;
10921
10922 switch_to_section (debug_pubnames_section);
10923 output_pubnames (pubname_table);
10924 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
10925 It shouldn't hurt to emit it always, since pure DWARF2 consumers
10926 simply won't look for the section. */
10927 switch_to_section (debug_pubtypes_section);
10928 output_pubnames (pubtype_table);
10929 }
10930
10931
10932 /* Output the information that goes into the .debug_aranges table.
10933 Namely, define the beginning and ending address range of the
10934 text section generated for this compilation unit. */
10935
10936 static void
10937 output_aranges (void)
10938 {
10939 unsigned i;
10940 unsigned long aranges_length = size_of_aranges ();
10941
10942 if (!XCOFF_DEBUGGING_INFO)
10943 {
10944 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10945 dw2_asm_output_data (4, 0xffffffff,
10946 "Initial length escape value indicating 64-bit DWARF extension");
10947 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
10948 "Length of Address Ranges Info");
10949 }
10950
10951 /* Version number for aranges is still 2, even up to DWARF5. */
10952 dw2_asm_output_data (2, 2, "DWARF Version");
10953 if (dwarf_split_debug_info)
10954 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
10955 debug_skeleton_info_section,
10956 "Offset of Compilation Unit Info");
10957 else
10958 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
10959 debug_info_section,
10960 "Offset of Compilation Unit Info");
10961 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
10962 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
10963
10964 /* We need to align to twice the pointer size here. */
10965 if (DWARF_ARANGES_PAD_SIZE)
10966 {
10967 /* Pad using a 2 byte words so that padding is correct for any
10968 pointer size. */
10969 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
10970 2 * DWARF2_ADDR_SIZE);
10971 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
10972 dw2_asm_output_data (2, 0, NULL);
10973 }
10974
10975 /* It is necessary not to output these entries if the sections were
10976 not used; if the sections were not used, the length will be 0 and
10977 the address may end up as 0 if the section is discarded by ld
10978 --gc-sections, leaving an invalid (0, 0) entry that can be
10979 confused with the terminator. */
10980 if (text_section_used)
10981 {
10982 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
10983 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
10984 text_section_label, "Length");
10985 }
10986 if (cold_text_section_used)
10987 {
10988 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
10989 "Address");
10990 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
10991 cold_text_section_label, "Length");
10992 }
10993
10994 if (have_multiple_function_sections)
10995 {
10996 unsigned fde_idx;
10997 dw_fde_ref fde;
10998
10999 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11000 {
11001 if (DECL_IGNORED_P (fde->decl))
11002 continue;
11003 if (!fde->in_std_section)
11004 {
11005 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11006 "Address");
11007 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11008 fde->dw_fde_begin, "Length");
11009 }
11010 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11011 {
11012 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11013 "Address");
11014 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11015 fde->dw_fde_second_begin, "Length");
11016 }
11017 }
11018 }
11019
11020 /* Output the terminator words. */
11021 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11022 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11023 }
11024
11025 /* Add a new entry to .debug_ranges. Return its index into
11026 ranges_table vector. */
11027
11028 static unsigned int
11029 add_ranges_num (int num, bool maybe_new_sec)
11030 {
11031 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11032 vec_safe_push (ranges_table, r);
11033 return vec_safe_length (ranges_table) - 1;
11034 }
11035
11036 /* Add a new entry to .debug_ranges corresponding to a block, or a
11037 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11038 this entry might be in a different section from previous range. */
11039
11040 static unsigned int
11041 add_ranges (const_tree block, bool maybe_new_sec)
11042 {
11043 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11044 }
11045
11046 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11047 chain, or middle entry of a chain that will be directly referred to. */
11048
11049 static void
11050 note_rnglist_head (unsigned int offset)
11051 {
11052 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11053 return;
11054 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11055 }
11056
11057 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11058 When using dwarf_split_debug_info, address attributes in dies destined
11059 for the final executable should be direct references--setting the
11060 parameter force_direct ensures this behavior. */
11061
11062 static void
11063 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11064 bool *added, bool force_direct)
11065 {
11066 unsigned int in_use = vec_safe_length (ranges_by_label);
11067 unsigned int offset;
11068 dw_ranges_by_label rbl = { begin, end };
11069 vec_safe_push (ranges_by_label, rbl);
11070 offset = add_ranges_num (-(int)in_use - 1, true);
11071 if (!*added)
11072 {
11073 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11074 *added = true;
11075 note_rnglist_head (offset);
11076 }
11077 }
11078
11079 /* Emit .debug_ranges section. */
11080
11081 static void
11082 output_ranges (void)
11083 {
11084 unsigned i;
11085 static const char *const start_fmt = "Offset %#x";
11086 const char *fmt = start_fmt;
11087 dw_ranges *r;
11088
11089 switch_to_section (debug_ranges_section);
11090 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11091 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11092 {
11093 int block_num = r->num;
11094
11095 if (block_num > 0)
11096 {
11097 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11098 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11099
11100 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11101 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11102
11103 /* If all code is in the text section, then the compilation
11104 unit base address defaults to DW_AT_low_pc, which is the
11105 base of the text section. */
11106 if (!have_multiple_function_sections)
11107 {
11108 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11109 text_section_label,
11110 fmt, i * 2 * DWARF2_ADDR_SIZE);
11111 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11112 text_section_label, NULL);
11113 }
11114
11115 /* Otherwise, the compilation unit base address is zero,
11116 which allows us to use absolute addresses, and not worry
11117 about whether the target supports cross-section
11118 arithmetic. */
11119 else
11120 {
11121 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11122 fmt, i * 2 * DWARF2_ADDR_SIZE);
11123 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11124 }
11125
11126 fmt = NULL;
11127 }
11128
11129 /* Negative block_num stands for an index into ranges_by_label. */
11130 else if (block_num < 0)
11131 {
11132 int lab_idx = - block_num - 1;
11133
11134 if (!have_multiple_function_sections)
11135 {
11136 gcc_unreachable ();
11137 #if 0
11138 /* If we ever use add_ranges_by_labels () for a single
11139 function section, all we have to do is to take out
11140 the #if 0 above. */
11141 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11142 (*ranges_by_label)[lab_idx].begin,
11143 text_section_label,
11144 fmt, i * 2 * DWARF2_ADDR_SIZE);
11145 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11146 (*ranges_by_label)[lab_idx].end,
11147 text_section_label, NULL);
11148 #endif
11149 }
11150 else
11151 {
11152 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11153 (*ranges_by_label)[lab_idx].begin,
11154 fmt, i * 2 * DWARF2_ADDR_SIZE);
11155 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11156 (*ranges_by_label)[lab_idx].end,
11157 NULL);
11158 }
11159 }
11160 else
11161 {
11162 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11163 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11164 fmt = start_fmt;
11165 }
11166 }
11167 }
11168
11169 /* Non-zero if .debug_line_str should be used for .debug_line section
11170 strings or strings that are likely shareable with those. */
11171 #define DWARF5_USE_DEBUG_LINE_STR \
11172 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11173 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11174 /* FIXME: there is no .debug_line_str.dwo section, \
11175 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11176 && !dwarf_split_debug_info)
11177
11178 /* Assign .debug_rnglists indexes. */
11179
11180 static void
11181 index_rnglists (void)
11182 {
11183 unsigned i;
11184 dw_ranges *r;
11185
11186 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11187 if (r->label)
11188 r->idx = rnglist_idx++;
11189 }
11190
11191 /* Emit .debug_rnglists section. */
11192
11193 static void
11194 output_rnglists (unsigned generation)
11195 {
11196 unsigned i;
11197 dw_ranges *r;
11198 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11199 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11200 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11201
11202 switch_to_section (debug_ranges_section);
11203 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11204 /* There are up to 4 unique ranges labels per generation.
11205 See also init_sections_and_labels. */
11206 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11207 2 + generation * 4);
11208 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11209 3 + generation * 4);
11210 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11211 dw2_asm_output_data (4, 0xffffffff,
11212 "Initial length escape value indicating "
11213 "64-bit DWARF extension");
11214 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11215 "Length of Range Lists");
11216 ASM_OUTPUT_LABEL (asm_out_file, l1);
11217 dw2_asm_output_data (2, dwarf_version, "DWARF Version");
11218 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11219 dw2_asm_output_data (1, 0, "Segment Size");
11220 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11221 about relocation sizes and primarily care about the size of .debug*
11222 sections in linked shared libraries and executables, then
11223 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11224 into it are usually larger than just DW_FORM_sec_offset offsets
11225 into the .debug_rnglists section. */
11226 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11227 "Offset Entry Count");
11228 if (dwarf_split_debug_info)
11229 {
11230 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11231 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11232 if (r->label)
11233 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11234 ranges_base_label, NULL);
11235 }
11236
11237 const char *lab = "";
11238 unsigned int len = vec_safe_length (ranges_table);
11239 const char *base = NULL;
11240 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11241 {
11242 int block_num = r->num;
11243
11244 if (r->label)
11245 {
11246 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11247 lab = r->label;
11248 }
11249 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11250 base = NULL;
11251 if (block_num > 0)
11252 {
11253 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11254 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11255
11256 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11257 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11258
11259 if (HAVE_AS_LEB128)
11260 {
11261 /* If all code is in the text section, then the compilation
11262 unit base address defaults to DW_AT_low_pc, which is the
11263 base of the text section. */
11264 if (!have_multiple_function_sections)
11265 {
11266 dw2_asm_output_data (1, DW_RLE_offset_pair,
11267 "DW_RLE_offset_pair (%s)", lab);
11268 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11269 "Range begin address (%s)", lab);
11270 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11271 "Range end address (%s)", lab);
11272 continue;
11273 }
11274 if (base == NULL)
11275 {
11276 dw_ranges *r2 = NULL;
11277 if (i < len - 1)
11278 r2 = &(*ranges_table)[i + 1];
11279 if (r2
11280 && r2->num != 0
11281 && r2->label == NULL
11282 && !r2->maybe_new_sec)
11283 {
11284 dw2_asm_output_data (1, DW_RLE_base_address,
11285 "DW_RLE_base_address (%s)", lab);
11286 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11287 "Base address (%s)", lab);
11288 strcpy (basebuf, blabel);
11289 base = basebuf;
11290 }
11291 }
11292 if (base)
11293 {
11294 dw2_asm_output_data (1, DW_RLE_offset_pair,
11295 "DW_RLE_offset_pair (%s)", lab);
11296 dw2_asm_output_delta_uleb128 (blabel, base,
11297 "Range begin address (%s)", lab);
11298 dw2_asm_output_delta_uleb128 (elabel, base,
11299 "Range end address (%s)", lab);
11300 continue;
11301 }
11302 dw2_asm_output_data (1, DW_RLE_start_length,
11303 "DW_RLE_start_length (%s)", lab);
11304 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11305 "Range begin address (%s)", lab);
11306 dw2_asm_output_delta_uleb128 (elabel, blabel,
11307 "Range length (%s)", lab);
11308 }
11309 else
11310 {
11311 dw2_asm_output_data (1, DW_RLE_start_end,
11312 "DW_RLE_start_end (%s)", lab);
11313 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11314 "Range begin address (%s)", lab);
11315 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11316 "Range end address (%s)", lab);
11317 }
11318 }
11319
11320 /* Negative block_num stands for an index into ranges_by_label. */
11321 else if (block_num < 0)
11322 {
11323 int lab_idx = - block_num - 1;
11324 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11325 const char *elabel = (*ranges_by_label)[lab_idx].end;
11326
11327 if (!have_multiple_function_sections)
11328 gcc_unreachable ();
11329 if (HAVE_AS_LEB128)
11330 {
11331 dw2_asm_output_data (1, DW_RLE_start_length,
11332 "DW_RLE_start_length (%s)", lab);
11333 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11334 "Range begin address (%s)", lab);
11335 dw2_asm_output_delta_uleb128 (elabel, blabel,
11336 "Range length (%s)", lab);
11337 }
11338 else
11339 {
11340 dw2_asm_output_data (1, DW_RLE_start_end,
11341 "DW_RLE_start_end (%s)", lab);
11342 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11343 "Range begin address (%s)", lab);
11344 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11345 "Range end address (%s)", lab);
11346 }
11347 }
11348 else
11349 dw2_asm_output_data (1, DW_RLE_end_of_list,
11350 "DW_RLE_end_of_list (%s)", lab);
11351 }
11352 ASM_OUTPUT_LABEL (asm_out_file, l2);
11353 }
11354
11355 /* Data structure containing information about input files. */
11356 struct file_info
11357 {
11358 const char *path; /* Complete file name. */
11359 const char *fname; /* File name part. */
11360 int length; /* Length of entire string. */
11361 struct dwarf_file_data * file_idx; /* Index in input file table. */
11362 int dir_idx; /* Index in directory table. */
11363 };
11364
11365 /* Data structure containing information about directories with source
11366 files. */
11367 struct dir_info
11368 {
11369 const char *path; /* Path including directory name. */
11370 int length; /* Path length. */
11371 int prefix; /* Index of directory entry which is a prefix. */
11372 int count; /* Number of files in this directory. */
11373 int dir_idx; /* Index of directory used as base. */
11374 };
11375
11376 /* Callback function for file_info comparison. We sort by looking at
11377 the directories in the path. */
11378
11379 static int
11380 file_info_cmp (const void *p1, const void *p2)
11381 {
11382 const struct file_info *const s1 = (const struct file_info *) p1;
11383 const struct file_info *const s2 = (const struct file_info *) p2;
11384 const unsigned char *cp1;
11385 const unsigned char *cp2;
11386
11387 /* Take care of file names without directories. We need to make sure that
11388 we return consistent values to qsort since some will get confused if
11389 we return the same value when identical operands are passed in opposite
11390 orders. So if neither has a directory, return 0 and otherwise return
11391 1 or -1 depending on which one has the directory. */
11392 if ((s1->path == s1->fname || s2->path == s2->fname))
11393 return (s2->path == s2->fname) - (s1->path == s1->fname);
11394
11395 cp1 = (const unsigned char *) s1->path;
11396 cp2 = (const unsigned char *) s2->path;
11397
11398 while (1)
11399 {
11400 ++cp1;
11401 ++cp2;
11402 /* Reached the end of the first path? If so, handle like above. */
11403 if ((cp1 == (const unsigned char *) s1->fname)
11404 || (cp2 == (const unsigned char *) s2->fname))
11405 return ((cp2 == (const unsigned char *) s2->fname)
11406 - (cp1 == (const unsigned char *) s1->fname));
11407
11408 /* Character of current path component the same? */
11409 else if (*cp1 != *cp2)
11410 return *cp1 - *cp2;
11411 }
11412 }
11413
11414 struct file_name_acquire_data
11415 {
11416 struct file_info *files;
11417 int used_files;
11418 int max_files;
11419 };
11420
11421 /* Traversal function for the hash table. */
11422
11423 int
11424 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
11425 {
11426 struct dwarf_file_data *d = *slot;
11427 struct file_info *fi;
11428 const char *f;
11429
11430 gcc_assert (fnad->max_files >= d->emitted_number);
11431
11432 if (! d->emitted_number)
11433 return 1;
11434
11435 gcc_assert (fnad->max_files != fnad->used_files);
11436
11437 fi = fnad->files + fnad->used_files++;
11438
11439 /* Skip all leading "./". */
11440 f = d->filename;
11441 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
11442 f += 2;
11443
11444 /* Create a new array entry. */
11445 fi->path = f;
11446 fi->length = strlen (f);
11447 fi->file_idx = d;
11448
11449 /* Search for the file name part. */
11450 f = strrchr (f, DIR_SEPARATOR);
11451 #if defined (DIR_SEPARATOR_2)
11452 {
11453 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
11454
11455 if (g != NULL)
11456 {
11457 if (f == NULL || f < g)
11458 f = g;
11459 }
11460 }
11461 #endif
11462
11463 fi->fname = f == NULL ? fi->path : f + 1;
11464 return 1;
11465 }
11466
11467 /* Helper function for output_file_names. Emit a FORM encoded
11468 string STR, with assembly comment start ENTRY_KIND and
11469 index IDX */
11470
11471 static void
11472 output_line_string (enum dwarf_form form, const char *str,
11473 const char *entry_kind, unsigned int idx)
11474 {
11475 switch (form)
11476 {
11477 case DW_FORM_string:
11478 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
11479 break;
11480 case DW_FORM_line_strp:
11481 if (!debug_line_str_hash)
11482 debug_line_str_hash
11483 = hash_table<indirect_string_hasher>::create_ggc (10);
11484
11485 struct indirect_string_node *node;
11486 node = find_AT_string_in_table (str, debug_line_str_hash);
11487 set_indirect_string (node);
11488 node->form = form;
11489 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
11490 debug_line_str_section, "%s: %#x: \"%s\"",
11491 entry_kind, 0, node->str);
11492 break;
11493 default:
11494 gcc_unreachable ();
11495 }
11496 }
11497
11498 /* Output the directory table and the file name table. We try to minimize
11499 the total amount of memory needed. A heuristic is used to avoid large
11500 slowdowns with many input files. */
11501
11502 static void
11503 output_file_names (void)
11504 {
11505 struct file_name_acquire_data fnad;
11506 int numfiles;
11507 struct file_info *files;
11508 struct dir_info *dirs;
11509 int *saved;
11510 int *savehere;
11511 int *backmap;
11512 int ndirs;
11513 int idx_offset;
11514 int i;
11515
11516 if (!last_emitted_file)
11517 {
11518 if (dwarf_version >= 5)
11519 {
11520 dw2_asm_output_data (1, 0, "Directory entry format count");
11521 dw2_asm_output_data_uleb128 (0, "Directories count");
11522 dw2_asm_output_data (1, 0, "File name entry format count");
11523 dw2_asm_output_data_uleb128 (0, "File names count");
11524 }
11525 else
11526 {
11527 dw2_asm_output_data (1, 0, "End directory table");
11528 dw2_asm_output_data (1, 0, "End file name table");
11529 }
11530 return;
11531 }
11532
11533 numfiles = last_emitted_file->emitted_number;
11534
11535 /* Allocate the various arrays we need. */
11536 files = XALLOCAVEC (struct file_info, numfiles);
11537 dirs = XALLOCAVEC (struct dir_info, numfiles);
11538
11539 fnad.files = files;
11540 fnad.used_files = 0;
11541 fnad.max_files = numfiles;
11542 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
11543 gcc_assert (fnad.used_files == fnad.max_files);
11544
11545 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
11546
11547 /* Find all the different directories used. */
11548 dirs[0].path = files[0].path;
11549 dirs[0].length = files[0].fname - files[0].path;
11550 dirs[0].prefix = -1;
11551 dirs[0].count = 1;
11552 dirs[0].dir_idx = 0;
11553 files[0].dir_idx = 0;
11554 ndirs = 1;
11555
11556 for (i = 1; i < numfiles; i++)
11557 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
11558 && memcmp (dirs[ndirs - 1].path, files[i].path,
11559 dirs[ndirs - 1].length) == 0)
11560 {
11561 /* Same directory as last entry. */
11562 files[i].dir_idx = ndirs - 1;
11563 ++dirs[ndirs - 1].count;
11564 }
11565 else
11566 {
11567 int j;
11568
11569 /* This is a new directory. */
11570 dirs[ndirs].path = files[i].path;
11571 dirs[ndirs].length = files[i].fname - files[i].path;
11572 dirs[ndirs].count = 1;
11573 dirs[ndirs].dir_idx = ndirs;
11574 files[i].dir_idx = ndirs;
11575
11576 /* Search for a prefix. */
11577 dirs[ndirs].prefix = -1;
11578 for (j = 0; j < ndirs; j++)
11579 if (dirs[j].length < dirs[ndirs].length
11580 && dirs[j].length > 1
11581 && (dirs[ndirs].prefix == -1
11582 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
11583 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
11584 dirs[ndirs].prefix = j;
11585
11586 ++ndirs;
11587 }
11588
11589 /* Now to the actual work. We have to find a subset of the directories which
11590 allow expressing the file name using references to the directory table
11591 with the least amount of characters. We do not do an exhaustive search
11592 where we would have to check out every combination of every single
11593 possible prefix. Instead we use a heuristic which provides nearly optimal
11594 results in most cases and never is much off. */
11595 saved = XALLOCAVEC (int, ndirs);
11596 savehere = XALLOCAVEC (int, ndirs);
11597
11598 memset (saved, '\0', ndirs * sizeof (saved[0]));
11599 for (i = 0; i < ndirs; i++)
11600 {
11601 int j;
11602 int total;
11603
11604 /* We can always save some space for the current directory. But this
11605 does not mean it will be enough to justify adding the directory. */
11606 savehere[i] = dirs[i].length;
11607 total = (savehere[i] - saved[i]) * dirs[i].count;
11608
11609 for (j = i + 1; j < ndirs; j++)
11610 {
11611 savehere[j] = 0;
11612 if (saved[j] < dirs[i].length)
11613 {
11614 /* Determine whether the dirs[i] path is a prefix of the
11615 dirs[j] path. */
11616 int k;
11617
11618 k = dirs[j].prefix;
11619 while (k != -1 && k != (int) i)
11620 k = dirs[k].prefix;
11621
11622 if (k == (int) i)
11623 {
11624 /* Yes it is. We can possibly save some memory by
11625 writing the filenames in dirs[j] relative to
11626 dirs[i]. */
11627 savehere[j] = dirs[i].length;
11628 total += (savehere[j] - saved[j]) * dirs[j].count;
11629 }
11630 }
11631 }
11632
11633 /* Check whether we can save enough to justify adding the dirs[i]
11634 directory. */
11635 if (total > dirs[i].length + 1)
11636 {
11637 /* It's worthwhile adding. */
11638 for (j = i; j < ndirs; j++)
11639 if (savehere[j] > 0)
11640 {
11641 /* Remember how much we saved for this directory so far. */
11642 saved[j] = savehere[j];
11643
11644 /* Remember the prefix directory. */
11645 dirs[j].dir_idx = i;
11646 }
11647 }
11648 }
11649
11650 /* Emit the directory name table. */
11651 idx_offset = dirs[0].length > 0 ? 1 : 0;
11652 enum dwarf_form str_form = DW_FORM_string;
11653 enum dwarf_form idx_form = DW_FORM_udata;
11654 if (dwarf_version >= 5)
11655 {
11656 const char *comp_dir = comp_dir_string ();
11657 if (comp_dir == NULL)
11658 comp_dir = "";
11659 dw2_asm_output_data (1, 1, "Directory entry format count");
11660 if (DWARF5_USE_DEBUG_LINE_STR)
11661 str_form = DW_FORM_line_strp;
11662 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
11663 dw2_asm_output_data_uleb128 (str_form, "%s",
11664 get_DW_FORM_name (str_form));
11665 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
11666 if (str_form == DW_FORM_string)
11667 {
11668 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
11669 for (i = 1 - idx_offset; i < ndirs; i++)
11670 dw2_asm_output_nstring (dirs[i].path,
11671 dirs[i].length
11672 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
11673 "Directory Entry: %#x", i + idx_offset);
11674 }
11675 else
11676 {
11677 output_line_string (str_form, comp_dir, "Directory Entry", 0);
11678 for (i = 1 - idx_offset; i < ndirs; i++)
11679 {
11680 const char *str
11681 = ggc_alloc_string (dirs[i].path,
11682 dirs[i].length
11683 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
11684 output_line_string (str_form, str, "Directory Entry",
11685 (unsigned) i + idx_offset);
11686 }
11687 }
11688 }
11689 else
11690 {
11691 for (i = 1 - idx_offset; i < ndirs; i++)
11692 dw2_asm_output_nstring (dirs[i].path,
11693 dirs[i].length
11694 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
11695 "Directory Entry: %#x", i + idx_offset);
11696
11697 dw2_asm_output_data (1, 0, "End directory table");
11698 }
11699
11700 /* We have to emit them in the order of emitted_number since that's
11701 used in the debug info generation. To do this efficiently we
11702 generate a back-mapping of the indices first. */
11703 backmap = XALLOCAVEC (int, numfiles);
11704 for (i = 0; i < numfiles; i++)
11705 backmap[files[i].file_idx->emitted_number - 1] = i;
11706
11707 if (dwarf_version >= 5)
11708 {
11709 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
11710 if (filename0 == NULL)
11711 filename0 = "";
11712 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
11713 DW_FORM_data2. Choose one based on the number of directories
11714 and how much space would they occupy in each encoding.
11715 If we have at most 256 directories, all indexes fit into
11716 a single byte, so DW_FORM_data1 is most compact (if there
11717 are at most 128 directories, DW_FORM_udata would be as
11718 compact as that, but not shorter and slower to decode). */
11719 if (ndirs + idx_offset <= 256)
11720 idx_form = DW_FORM_data1;
11721 /* If there are more than 65536 directories, we have to use
11722 DW_FORM_udata, DW_FORM_data2 can't refer to them.
11723 Otherwise, compute what space would occupy if all the indexes
11724 used DW_FORM_udata - sum - and compare that to how large would
11725 be DW_FORM_data2 encoding, and pick the more efficient one. */
11726 else if (ndirs + idx_offset <= 65536)
11727 {
11728 unsigned HOST_WIDE_INT sum = 1;
11729 for (i = 0; i < numfiles; i++)
11730 {
11731 int file_idx = backmap[i];
11732 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
11733 sum += size_of_uleb128 (dir_idx);
11734 }
11735 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
11736 idx_form = DW_FORM_data2;
11737 }
11738 #ifdef VMS_DEBUGGING_INFO
11739 dw2_asm_output_data (1, 4, "File name entry format count");
11740 #else
11741 dw2_asm_output_data (1, 2, "File name entry format count");
11742 #endif
11743 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
11744 dw2_asm_output_data_uleb128 (str_form, "%s",
11745 get_DW_FORM_name (str_form));
11746 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
11747 "DW_LNCT_directory_index");
11748 dw2_asm_output_data_uleb128 (idx_form, "%s",
11749 get_DW_FORM_name (idx_form));
11750 #ifdef VMS_DEBUGGING_INFO
11751 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
11752 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
11753 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
11754 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
11755 #endif
11756 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
11757
11758 output_line_string (str_form, filename0, "File Entry", 0);
11759
11760 /* Include directory index. */
11761 if (idx_form != DW_FORM_udata)
11762 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
11763 0, NULL);
11764 else
11765 dw2_asm_output_data_uleb128 (0, NULL);
11766
11767 #ifdef VMS_DEBUGGING_INFO
11768 dw2_asm_output_data_uleb128 (0, NULL);
11769 dw2_asm_output_data_uleb128 (0, NULL);
11770 #endif
11771 }
11772
11773 /* Now write all the file names. */
11774 for (i = 0; i < numfiles; i++)
11775 {
11776 int file_idx = backmap[i];
11777 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
11778
11779 #ifdef VMS_DEBUGGING_INFO
11780 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
11781
11782 /* Setting these fields can lead to debugger miscomparisons,
11783 but VMS Debug requires them to be set correctly. */
11784
11785 int ver;
11786 long long cdt;
11787 long siz;
11788 int maxfilelen = (strlen (files[file_idx].path)
11789 + dirs[dir_idx].length
11790 + MAX_VMS_VERSION_LEN + 1);
11791 char *filebuf = XALLOCAVEC (char, maxfilelen);
11792
11793 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
11794 snprintf (filebuf, maxfilelen, "%s;%d",
11795 files[file_idx].path + dirs[dir_idx].length, ver);
11796
11797 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
11798
11799 /* Include directory index. */
11800 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
11801 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
11802 dir_idx + idx_offset, NULL);
11803 else
11804 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
11805
11806 /* Modification time. */
11807 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
11808 &cdt, 0, 0, 0) == 0)
11809 ? cdt : 0, NULL);
11810
11811 /* File length in bytes. */
11812 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
11813 0, &siz, 0, 0) == 0)
11814 ? siz : 0, NULL);
11815 #else
11816 output_line_string (str_form,
11817 files[file_idx].path + dirs[dir_idx].length,
11818 "File Entry", (unsigned) i + 1);
11819
11820 /* Include directory index. */
11821 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
11822 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
11823 dir_idx + idx_offset, NULL);
11824 else
11825 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
11826
11827 if (dwarf_version >= 5)
11828 continue;
11829
11830 /* Modification time. */
11831 dw2_asm_output_data_uleb128 (0, NULL);
11832
11833 /* File length in bytes. */
11834 dw2_asm_output_data_uleb128 (0, NULL);
11835 #endif /* VMS_DEBUGGING_INFO */
11836 }
11837
11838 if (dwarf_version < 5)
11839 dw2_asm_output_data (1, 0, "End file name table");
11840 }
11841
11842
11843 /* Output one line number table into the .debug_line section. */
11844
11845 static void
11846 output_one_line_info_table (dw_line_info_table *table)
11847 {
11848 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
11849 unsigned int current_line = 1;
11850 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
11851 dw_line_info_entry *ent;
11852 size_t i;
11853
11854 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
11855 {
11856 switch (ent->opcode)
11857 {
11858 case LI_set_address:
11859 /* ??? Unfortunately, we have little choice here currently, and
11860 must always use the most general form. GCC does not know the
11861 address delta itself, so we can't use DW_LNS_advance_pc. Many
11862 ports do have length attributes which will give an upper bound
11863 on the address range. We could perhaps use length attributes
11864 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
11865 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
11866
11867 /* This can handle any delta. This takes
11868 4+DWARF2_ADDR_SIZE bytes. */
11869 dw2_asm_output_data (1, 0, "set address %s", line_label);
11870 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
11871 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
11872 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
11873 break;
11874
11875 case LI_set_line:
11876 if (ent->val == current_line)
11877 {
11878 /* We still need to start a new row, so output a copy insn. */
11879 dw2_asm_output_data (1, DW_LNS_copy,
11880 "copy line %u", current_line);
11881 }
11882 else
11883 {
11884 int line_offset = ent->val - current_line;
11885 int line_delta = line_offset - DWARF_LINE_BASE;
11886
11887 current_line = ent->val;
11888 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
11889 {
11890 /* This can handle deltas from -10 to 234, using the current
11891 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
11892 This takes 1 byte. */
11893 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
11894 "line %u", current_line);
11895 }
11896 else
11897 {
11898 /* This can handle any delta. This takes at least 4 bytes,
11899 depending on the value being encoded. */
11900 dw2_asm_output_data (1, DW_LNS_advance_line,
11901 "advance to line %u", current_line);
11902 dw2_asm_output_data_sleb128 (line_offset, NULL);
11903 dw2_asm_output_data (1, DW_LNS_copy, NULL);
11904 }
11905 }
11906 break;
11907
11908 case LI_set_file:
11909 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
11910 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
11911 break;
11912
11913 case LI_set_column:
11914 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
11915 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
11916 break;
11917
11918 case LI_negate_stmt:
11919 current_is_stmt = !current_is_stmt;
11920 dw2_asm_output_data (1, DW_LNS_negate_stmt,
11921 "is_stmt %d", current_is_stmt);
11922 break;
11923
11924 case LI_set_prologue_end:
11925 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
11926 "set prologue end");
11927 break;
11928
11929 case LI_set_epilogue_begin:
11930 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
11931 "set epilogue begin");
11932 break;
11933
11934 case LI_set_discriminator:
11935 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
11936 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
11937 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
11938 dw2_asm_output_data_uleb128 (ent->val, NULL);
11939 break;
11940 }
11941 }
11942
11943 /* Emit debug info for the address of the end of the table. */
11944 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
11945 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
11946 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
11947 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
11948
11949 dw2_asm_output_data (1, 0, "end sequence");
11950 dw2_asm_output_data_uleb128 (1, NULL);
11951 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
11952 }
11953
11954 /* Output the source line number correspondence information. This
11955 information goes into the .debug_line section. */
11956
11957 static void
11958 output_line_info (bool prologue_only)
11959 {
11960 static unsigned int generation;
11961 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
11962 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
11963 bool saw_one = false;
11964 int opc;
11965
11966 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
11967 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
11968 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
11969 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
11970
11971 if (!XCOFF_DEBUGGING_INFO)
11972 {
11973 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11974 dw2_asm_output_data (4, 0xffffffff,
11975 "Initial length escape value indicating 64-bit DWARF extension");
11976 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11977 "Length of Source Line Info");
11978 }
11979
11980 ASM_OUTPUT_LABEL (asm_out_file, l1);
11981
11982 dw2_asm_output_data (2, dwarf_version, "DWARF Version");
11983 if (dwarf_version >= 5)
11984 {
11985 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11986 dw2_asm_output_data (1, 0, "Segment Size");
11987 }
11988 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
11989 ASM_OUTPUT_LABEL (asm_out_file, p1);
11990
11991 /* Define the architecture-dependent minimum instruction length (in bytes).
11992 In this implementation of DWARF, this field is used for information
11993 purposes only. Since GCC generates assembly language, we have no
11994 a priori knowledge of how many instruction bytes are generated for each
11995 source line, and therefore can use only the DW_LNE_set_address and
11996 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
11997 this as '1', which is "correct enough" for all architectures,
11998 and don't let the target override. */
11999 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12000
12001 if (dwarf_version >= 4)
12002 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12003 "Maximum Operations Per Instruction");
12004 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12005 "Default is_stmt_start flag");
12006 dw2_asm_output_data (1, DWARF_LINE_BASE,
12007 "Line Base Value (Special Opcodes)");
12008 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12009 "Line Range Value (Special Opcodes)");
12010 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12011 "Special Opcode Base");
12012
12013 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12014 {
12015 int n_op_args;
12016 switch (opc)
12017 {
12018 case DW_LNS_advance_pc:
12019 case DW_LNS_advance_line:
12020 case DW_LNS_set_file:
12021 case DW_LNS_set_column:
12022 case DW_LNS_fixed_advance_pc:
12023 case DW_LNS_set_isa:
12024 n_op_args = 1;
12025 break;
12026 default:
12027 n_op_args = 0;
12028 break;
12029 }
12030
12031 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12032 opc, n_op_args);
12033 }
12034
12035 /* Write out the information about the files we use. */
12036 output_file_names ();
12037 ASM_OUTPUT_LABEL (asm_out_file, p2);
12038 if (prologue_only)
12039 {
12040 /* Output the marker for the end of the line number info. */
12041 ASM_OUTPUT_LABEL (asm_out_file, l2);
12042 return;
12043 }
12044
12045 if (separate_line_info)
12046 {
12047 dw_line_info_table *table;
12048 size_t i;
12049
12050 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12051 if (table->in_use)
12052 {
12053 output_one_line_info_table (table);
12054 saw_one = true;
12055 }
12056 }
12057 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12058 {
12059 output_one_line_info_table (cold_text_section_line_info);
12060 saw_one = true;
12061 }
12062
12063 /* ??? Some Darwin linkers crash on a .debug_line section with no
12064 sequences. Further, merely a DW_LNE_end_sequence entry is not
12065 sufficient -- the address column must also be initialized.
12066 Make sure to output at least one set_address/end_sequence pair,
12067 choosing .text since that section is always present. */
12068 if (text_section_line_info->in_use || !saw_one)
12069 output_one_line_info_table (text_section_line_info);
12070
12071 /* Output the marker for the end of the line number info. */
12072 ASM_OUTPUT_LABEL (asm_out_file, l2);
12073 }
12074 \f
12075 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12076
12077 static inline bool
12078 need_endianity_attribute_p (bool reverse)
12079 {
12080 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12081 }
12082
12083 /* Given a pointer to a tree node for some base type, return a pointer to
12084 a DIE that describes the given type. REVERSE is true if the type is
12085 to be interpreted in the reverse storage order wrt the target order.
12086
12087 This routine must only be called for GCC type nodes that correspond to
12088 Dwarf base (fundamental) types. */
12089
12090 static dw_die_ref
12091 base_type_die (tree type, bool reverse)
12092 {
12093 dw_die_ref base_type_result;
12094 enum dwarf_type encoding;
12095 bool fpt_used = false;
12096 struct fixed_point_type_info fpt_info;
12097 tree type_bias = NULL_TREE;
12098
12099 /* If this is a subtype that should not be emitted as a subrange type,
12100 use the base type. See subrange_type_for_debug_p. */
12101 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12102 type = TREE_TYPE (type);
12103
12104 switch (TREE_CODE (type))
12105 {
12106 case INTEGER_TYPE:
12107 if ((dwarf_version >= 4 || !dwarf_strict)
12108 && TYPE_NAME (type)
12109 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12110 && DECL_IS_BUILTIN (TYPE_NAME (type))
12111 && DECL_NAME (TYPE_NAME (type)))
12112 {
12113 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12114 if (strcmp (name, "char16_t") == 0
12115 || strcmp (name, "char32_t") == 0)
12116 {
12117 encoding = DW_ATE_UTF;
12118 break;
12119 }
12120 }
12121 if ((dwarf_version >= 3 || !dwarf_strict)
12122 && lang_hooks.types.get_fixed_point_type_info)
12123 {
12124 memset (&fpt_info, 0, sizeof (fpt_info));
12125 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12126 {
12127 fpt_used = true;
12128 encoding = ((TYPE_UNSIGNED (type))
12129 ? DW_ATE_unsigned_fixed
12130 : DW_ATE_signed_fixed);
12131 break;
12132 }
12133 }
12134 if (TYPE_STRING_FLAG (type))
12135 {
12136 if (TYPE_UNSIGNED (type))
12137 encoding = DW_ATE_unsigned_char;
12138 else
12139 encoding = DW_ATE_signed_char;
12140 }
12141 else if (TYPE_UNSIGNED (type))
12142 encoding = DW_ATE_unsigned;
12143 else
12144 encoding = DW_ATE_signed;
12145
12146 if (!dwarf_strict
12147 && lang_hooks.types.get_type_bias)
12148 type_bias = lang_hooks.types.get_type_bias (type);
12149 break;
12150
12151 case REAL_TYPE:
12152 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12153 {
12154 if (dwarf_version >= 3 || !dwarf_strict)
12155 encoding = DW_ATE_decimal_float;
12156 else
12157 encoding = DW_ATE_lo_user;
12158 }
12159 else
12160 encoding = DW_ATE_float;
12161 break;
12162
12163 case FIXED_POINT_TYPE:
12164 if (!(dwarf_version >= 3 || !dwarf_strict))
12165 encoding = DW_ATE_lo_user;
12166 else if (TYPE_UNSIGNED (type))
12167 encoding = DW_ATE_unsigned_fixed;
12168 else
12169 encoding = DW_ATE_signed_fixed;
12170 break;
12171
12172 /* Dwarf2 doesn't know anything about complex ints, so use
12173 a user defined type for it. */
12174 case COMPLEX_TYPE:
12175 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12176 encoding = DW_ATE_complex_float;
12177 else
12178 encoding = DW_ATE_lo_user;
12179 break;
12180
12181 case BOOLEAN_TYPE:
12182 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12183 encoding = DW_ATE_boolean;
12184 break;
12185
12186 default:
12187 /* No other TREE_CODEs are Dwarf fundamental types. */
12188 gcc_unreachable ();
12189 }
12190
12191 base_type_result = new_die_raw (DW_TAG_base_type);
12192
12193 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12194 int_size_in_bytes (type));
12195 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12196
12197 if (need_endianity_attribute_p (reverse))
12198 add_AT_unsigned (base_type_result, DW_AT_endianity,
12199 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12200
12201 add_alignment_attribute (base_type_result, type);
12202
12203 if (fpt_used)
12204 {
12205 switch (fpt_info.scale_factor_kind)
12206 {
12207 case fixed_point_scale_factor_binary:
12208 add_AT_int (base_type_result, DW_AT_binary_scale,
12209 fpt_info.scale_factor.binary);
12210 break;
12211
12212 case fixed_point_scale_factor_decimal:
12213 add_AT_int (base_type_result, DW_AT_decimal_scale,
12214 fpt_info.scale_factor.decimal);
12215 break;
12216
12217 case fixed_point_scale_factor_arbitrary:
12218 /* Arbitrary scale factors cannot be described in standard DWARF,
12219 yet. */
12220 if (!dwarf_strict)
12221 {
12222 /* Describe the scale factor as a rational constant. */
12223 const dw_die_ref scale_factor
12224 = new_die (DW_TAG_constant, comp_unit_die (), type);
12225
12226 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12227 fpt_info.scale_factor.arbitrary.numerator);
12228 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12229 fpt_info.scale_factor.arbitrary.denominator);
12230
12231 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12232 }
12233 break;
12234
12235 default:
12236 gcc_unreachable ();
12237 }
12238 }
12239
12240 if (type_bias)
12241 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12242 dw_scalar_form_constant
12243 | dw_scalar_form_exprloc
12244 | dw_scalar_form_reference,
12245 NULL);
12246
12247 return base_type_result;
12248 }
12249
12250 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12251 named 'auto' in its type: return true for it, false otherwise. */
12252
12253 static inline bool
12254 is_cxx_auto (tree type)
12255 {
12256 if (is_cxx ())
12257 {
12258 tree name = TYPE_IDENTIFIER (type);
12259 if (name == get_identifier ("auto")
12260 || name == get_identifier ("decltype(auto)"))
12261 return true;
12262 }
12263 return false;
12264 }
12265
12266 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12267 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12268
12269 static inline int
12270 is_base_type (tree type)
12271 {
12272 switch (TREE_CODE (type))
12273 {
12274 case INTEGER_TYPE:
12275 case REAL_TYPE:
12276 case FIXED_POINT_TYPE:
12277 case COMPLEX_TYPE:
12278 case BOOLEAN_TYPE:
12279 case POINTER_BOUNDS_TYPE:
12280 return 1;
12281
12282 case VOID_TYPE:
12283 case ARRAY_TYPE:
12284 case RECORD_TYPE:
12285 case UNION_TYPE:
12286 case QUAL_UNION_TYPE:
12287 case ENUMERAL_TYPE:
12288 case FUNCTION_TYPE:
12289 case METHOD_TYPE:
12290 case POINTER_TYPE:
12291 case REFERENCE_TYPE:
12292 case NULLPTR_TYPE:
12293 case OFFSET_TYPE:
12294 case LANG_TYPE:
12295 case VECTOR_TYPE:
12296 return 0;
12297
12298 default:
12299 if (is_cxx_auto (type))
12300 return 0;
12301 gcc_unreachable ();
12302 }
12303
12304 return 0;
12305 }
12306
12307 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12308 node, return the size in bits for the type if it is a constant, or else
12309 return the alignment for the type if the type's size is not constant, or
12310 else return BITS_PER_WORD if the type actually turns out to be an
12311 ERROR_MARK node. */
12312
12313 static inline unsigned HOST_WIDE_INT
12314 simple_type_size_in_bits (const_tree type)
12315 {
12316 if (TREE_CODE (type) == ERROR_MARK)
12317 return BITS_PER_WORD;
12318 else if (TYPE_SIZE (type) == NULL_TREE)
12319 return 0;
12320 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12321 return tree_to_uhwi (TYPE_SIZE (type));
12322 else
12323 return TYPE_ALIGN (type);
12324 }
12325
12326 /* Similarly, but return an offset_int instead of UHWI. */
12327
12328 static inline offset_int
12329 offset_int_type_size_in_bits (const_tree type)
12330 {
12331 if (TREE_CODE (type) == ERROR_MARK)
12332 return BITS_PER_WORD;
12333 else if (TYPE_SIZE (type) == NULL_TREE)
12334 return 0;
12335 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12336 return wi::to_offset (TYPE_SIZE (type));
12337 else
12338 return TYPE_ALIGN (type);
12339 }
12340
12341 /* Given a pointer to a tree node for a subrange type, return a pointer
12342 to a DIE that describes the given type. */
12343
12344 static dw_die_ref
12345 subrange_type_die (tree type, tree low, tree high, tree bias,
12346 dw_die_ref context_die)
12347 {
12348 dw_die_ref subrange_die;
12349 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12350
12351 if (context_die == NULL)
12352 context_die = comp_unit_die ();
12353
12354 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12355
12356 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12357 {
12358 /* The size of the subrange type and its base type do not match,
12359 so we need to generate a size attribute for the subrange type. */
12360 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
12361 }
12362
12363 add_alignment_attribute (subrange_die, type);
12364
12365 if (low)
12366 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
12367 if (high)
12368 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
12369 if (bias && !dwarf_strict)
12370 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
12371 dw_scalar_form_constant
12372 | dw_scalar_form_exprloc
12373 | dw_scalar_form_reference,
12374 NULL);
12375
12376 return subrange_die;
12377 }
12378
12379 /* Returns the (const and/or volatile) cv_qualifiers associated with
12380 the decl node. This will normally be augmented with the
12381 cv_qualifiers of the underlying type in add_type_attribute. */
12382
12383 static int
12384 decl_quals (const_tree decl)
12385 {
12386 return ((TREE_READONLY (decl)
12387 /* The C++ front-end correctly marks reference-typed
12388 variables as readonly, but from a language (and debug
12389 info) standpoint they are not const-qualified. */
12390 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
12391 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
12392 | (TREE_THIS_VOLATILE (decl)
12393 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
12394 }
12395
12396 /* Determine the TYPE whose qualifiers match the largest strict subset
12397 of the given TYPE_QUALS, and return its qualifiers. Ignore all
12398 qualifiers outside QUAL_MASK. */
12399
12400 static int
12401 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
12402 {
12403 tree t;
12404 int best_rank = 0, best_qual = 0, max_rank;
12405
12406 type_quals &= qual_mask;
12407 max_rank = popcount_hwi (type_quals) - 1;
12408
12409 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
12410 t = TYPE_NEXT_VARIANT (t))
12411 {
12412 int q = TYPE_QUALS (t) & qual_mask;
12413
12414 if ((q & type_quals) == q && q != type_quals
12415 && check_base_type (t, type))
12416 {
12417 int rank = popcount_hwi (q);
12418
12419 if (rank > best_rank)
12420 {
12421 best_rank = rank;
12422 best_qual = q;
12423 }
12424 }
12425 }
12426
12427 return best_qual;
12428 }
12429
12430 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
12431 static const dwarf_qual_info_t dwarf_qual_info[] =
12432 {
12433 { TYPE_QUAL_CONST, DW_TAG_const_type },
12434 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
12435 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
12436 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
12437 };
12438 static const unsigned int dwarf_qual_info_size
12439 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
12440
12441 /* If DIE is a qualified DIE of some base DIE with the same parent,
12442 return the base DIE, otherwise return NULL. Set MASK to the
12443 qualifiers added compared to the returned DIE. */
12444
12445 static dw_die_ref
12446 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
12447 {
12448 unsigned int i;
12449 for (i = 0; i < dwarf_qual_info_size; i++)
12450 if (die->die_tag == dwarf_qual_info[i].t)
12451 break;
12452 if (i == dwarf_qual_info_size)
12453 return NULL;
12454 if (vec_safe_length (die->die_attr) != 1)
12455 return NULL;
12456 dw_die_ref type = get_AT_ref (die, DW_AT_type);
12457 if (type == NULL || type->die_parent != die->die_parent)
12458 return NULL;
12459 *mask |= dwarf_qual_info[i].q;
12460 if (depth)
12461 {
12462 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
12463 if (ret)
12464 return ret;
12465 }
12466 return type;
12467 }
12468
12469 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
12470 entry that chains the modifiers specified by CV_QUALS in front of the
12471 given type. REVERSE is true if the type is to be interpreted in the
12472 reverse storage order wrt the target order. */
12473
12474 static dw_die_ref
12475 modified_type_die (tree type, int cv_quals, bool reverse,
12476 dw_die_ref context_die)
12477 {
12478 enum tree_code code = TREE_CODE (type);
12479 dw_die_ref mod_type_die;
12480 dw_die_ref sub_die = NULL;
12481 tree item_type = NULL;
12482 tree qualified_type;
12483 tree name, low, high;
12484 dw_die_ref mod_scope;
12485 /* Only these cv-qualifiers are currently handled. */
12486 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
12487 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
12488 ENCODE_QUAL_ADDR_SPACE(~0U));
12489 const bool reverse_base_type
12490 = need_endianity_attribute_p (reverse) && is_base_type (type);
12491
12492 if (code == ERROR_MARK)
12493 return NULL;
12494
12495 if (lang_hooks.types.get_debug_type)
12496 {
12497 tree debug_type = lang_hooks.types.get_debug_type (type);
12498
12499 if (debug_type != NULL_TREE && debug_type != type)
12500 return modified_type_die (debug_type, cv_quals, reverse, context_die);
12501 }
12502
12503 cv_quals &= cv_qual_mask;
12504
12505 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
12506 tag modifier (and not an attribute) old consumers won't be able
12507 to handle it. */
12508 if (dwarf_version < 3)
12509 cv_quals &= ~TYPE_QUAL_RESTRICT;
12510
12511 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
12512 if (dwarf_version < 5)
12513 cv_quals &= ~TYPE_QUAL_ATOMIC;
12514
12515 /* See if we already have the appropriately qualified variant of
12516 this type. */
12517 qualified_type = get_qualified_type (type, cv_quals);
12518
12519 if (qualified_type == sizetype)
12520 {
12521 /* Try not to expose the internal sizetype type's name. */
12522 if (TYPE_NAME (qualified_type)
12523 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
12524 {
12525 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
12526
12527 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
12528 && (TYPE_PRECISION (t)
12529 == TYPE_PRECISION (qualified_type))
12530 && (TYPE_UNSIGNED (t)
12531 == TYPE_UNSIGNED (qualified_type)));
12532 qualified_type = t;
12533 }
12534 else if (qualified_type == sizetype
12535 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
12536 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
12537 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
12538 qualified_type = size_type_node;
12539 }
12540
12541 /* If we do, then we can just use its DIE, if it exists. */
12542 if (qualified_type)
12543 {
12544 mod_type_die = lookup_type_die (qualified_type);
12545
12546 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
12547 dealt with specially: the DIE with the attribute, if it exists, is
12548 placed immediately after the regular DIE for the same base type. */
12549 if (mod_type_die
12550 && (!reverse_base_type
12551 || ((mod_type_die = mod_type_die->die_sib) != NULL
12552 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
12553 return mod_type_die;
12554 }
12555
12556 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
12557
12558 /* Handle C typedef types. */
12559 if (name
12560 && TREE_CODE (name) == TYPE_DECL
12561 && DECL_ORIGINAL_TYPE (name)
12562 && !DECL_ARTIFICIAL (name))
12563 {
12564 tree dtype = TREE_TYPE (name);
12565
12566 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
12567 if (qualified_type == dtype && !reverse_base_type)
12568 {
12569 tree origin = decl_ultimate_origin (name);
12570
12571 /* Typedef variants that have an abstract origin don't get their own
12572 type DIE (see gen_typedef_die), so fall back on the ultimate
12573 abstract origin instead. */
12574 if (origin != NULL && origin != name)
12575 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
12576 context_die);
12577
12578 /* For a named type, use the typedef. */
12579 gen_type_die (qualified_type, context_die);
12580 return lookup_type_die (qualified_type);
12581 }
12582 else
12583 {
12584 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
12585 dquals &= cv_qual_mask;
12586 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
12587 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
12588 /* cv-unqualified version of named type. Just use
12589 the unnamed type to which it refers. */
12590 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
12591 reverse, context_die);
12592 /* Else cv-qualified version of named type; fall through. */
12593 }
12594 }
12595
12596 mod_scope = scope_die_for (type, context_die);
12597
12598 if (cv_quals)
12599 {
12600 int sub_quals = 0, first_quals = 0;
12601 unsigned i;
12602 dw_die_ref first = NULL, last = NULL;
12603
12604 /* Determine a lesser qualified type that most closely matches
12605 this one. Then generate DW_TAG_* entries for the remaining
12606 qualifiers. */
12607 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
12608 cv_qual_mask);
12609 if (sub_quals && use_debug_types)
12610 {
12611 bool needed = false;
12612 /* If emitting type units, make sure the order of qualifiers
12613 is canonical. Thus, start from unqualified type if
12614 an earlier qualifier is missing in sub_quals, but some later
12615 one is present there. */
12616 for (i = 0; i < dwarf_qual_info_size; i++)
12617 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
12618 needed = true;
12619 else if (needed && (dwarf_qual_info[i].q & cv_quals))
12620 {
12621 sub_quals = 0;
12622 break;
12623 }
12624 }
12625 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
12626 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
12627 {
12628 /* As not all intermediate qualified DIEs have corresponding
12629 tree types, ensure that qualified DIEs in the same scope
12630 as their DW_AT_type are emitted after their DW_AT_type,
12631 only with other qualified DIEs for the same type possibly
12632 in between them. Determine the range of such qualified
12633 DIEs now (first being the base type, last being corresponding
12634 last qualified DIE for it). */
12635 unsigned int count = 0;
12636 first = qualified_die_p (mod_type_die, &first_quals,
12637 dwarf_qual_info_size);
12638 if (first == NULL)
12639 first = mod_type_die;
12640 gcc_assert ((first_quals & ~sub_quals) == 0);
12641 for (count = 0, last = first;
12642 count < (1U << dwarf_qual_info_size);
12643 count++, last = last->die_sib)
12644 {
12645 int quals = 0;
12646 if (last == mod_scope->die_child)
12647 break;
12648 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
12649 != first)
12650 break;
12651 }
12652 }
12653
12654 for (i = 0; i < dwarf_qual_info_size; i++)
12655 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
12656 {
12657 dw_die_ref d;
12658 if (first && first != last)
12659 {
12660 for (d = first->die_sib; ; d = d->die_sib)
12661 {
12662 int quals = 0;
12663 qualified_die_p (d, &quals, dwarf_qual_info_size);
12664 if (quals == (first_quals | dwarf_qual_info[i].q))
12665 break;
12666 if (d == last)
12667 {
12668 d = NULL;
12669 break;
12670 }
12671 }
12672 if (d)
12673 {
12674 mod_type_die = d;
12675 continue;
12676 }
12677 }
12678 if (first)
12679 {
12680 d = new_die_raw (dwarf_qual_info[i].t);
12681 add_child_die_after (mod_scope, d, last);
12682 last = d;
12683 }
12684 else
12685 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
12686 if (mod_type_die)
12687 add_AT_die_ref (d, DW_AT_type, mod_type_die);
12688 mod_type_die = d;
12689 first_quals |= dwarf_qual_info[i].q;
12690 }
12691 }
12692 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
12693 {
12694 dwarf_tag tag = DW_TAG_pointer_type;
12695 if (code == REFERENCE_TYPE)
12696 {
12697 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
12698 tag = DW_TAG_rvalue_reference_type;
12699 else
12700 tag = DW_TAG_reference_type;
12701 }
12702 mod_type_die = new_die (tag, mod_scope, type);
12703
12704 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
12705 simple_type_size_in_bits (type) / BITS_PER_UNIT);
12706 add_alignment_attribute (mod_type_die, type);
12707 item_type = TREE_TYPE (type);
12708
12709 addr_space_t as = TYPE_ADDR_SPACE (item_type);
12710 if (!ADDR_SPACE_GENERIC_P (as))
12711 {
12712 int action = targetm.addr_space.debug (as);
12713 if (action >= 0)
12714 {
12715 /* Positive values indicate an address_class. */
12716 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
12717 }
12718 else
12719 {
12720 /* Negative values indicate an (inverted) segment base reg. */
12721 dw_loc_descr_ref d
12722 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
12723 add_AT_loc (mod_type_die, DW_AT_segment, d);
12724 }
12725 }
12726 }
12727 else if (code == INTEGER_TYPE
12728 && TREE_TYPE (type) != NULL_TREE
12729 && subrange_type_for_debug_p (type, &low, &high))
12730 {
12731 tree bias = NULL_TREE;
12732 if (lang_hooks.types.get_type_bias)
12733 bias = lang_hooks.types.get_type_bias (type);
12734 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
12735 item_type = TREE_TYPE (type);
12736 }
12737 else if (is_base_type (type))
12738 {
12739 mod_type_die = base_type_die (type, reverse);
12740
12741 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
12742 if (reverse_base_type)
12743 {
12744 dw_die_ref after_die
12745 = modified_type_die (type, cv_quals, false, context_die);
12746 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
12747 }
12748 else
12749 add_child_die (comp_unit_die (), mod_type_die);
12750
12751 add_pubtype (type, mod_type_die);
12752 }
12753 else
12754 {
12755 gen_type_die (type, context_die);
12756
12757 /* We have to get the type_main_variant here (and pass that to the
12758 `lookup_type_die' routine) because the ..._TYPE node we have
12759 might simply be a *copy* of some original type node (where the
12760 copy was created to help us keep track of typedef names) and
12761 that copy might have a different TYPE_UID from the original
12762 ..._TYPE node. */
12763 if (TREE_CODE (type) == FUNCTION_TYPE
12764 || TREE_CODE (type) == METHOD_TYPE)
12765 {
12766 /* For function/method types, can't just use type_main_variant here,
12767 because that can have different ref-qualifiers for C++,
12768 but try to canonicalize. */
12769 tree main = TYPE_MAIN_VARIANT (type);
12770 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
12771 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
12772 && check_base_type (t, main)
12773 && check_lang_type (t, type))
12774 return lookup_type_die (t);
12775 return lookup_type_die (type);
12776 }
12777 else if (TREE_CODE (type) != VECTOR_TYPE
12778 && TREE_CODE (type) != ARRAY_TYPE)
12779 return lookup_type_die (type_main_variant (type));
12780 else
12781 /* Vectors have the debugging information in the type,
12782 not the main variant. */
12783 return lookup_type_die (type);
12784 }
12785
12786 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
12787 don't output a DW_TAG_typedef, since there isn't one in the
12788 user's program; just attach a DW_AT_name to the type.
12789 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
12790 if the base type already has the same name. */
12791 if (name
12792 && ((TREE_CODE (name) != TYPE_DECL
12793 && (qualified_type == TYPE_MAIN_VARIANT (type)
12794 || (cv_quals == TYPE_UNQUALIFIED)))
12795 || (TREE_CODE (name) == TYPE_DECL
12796 && TREE_TYPE (name) == qualified_type
12797 && DECL_NAME (name))))
12798 {
12799 if (TREE_CODE (name) == TYPE_DECL)
12800 /* Could just call add_name_and_src_coords_attributes here,
12801 but since this is a builtin type it doesn't have any
12802 useful source coordinates anyway. */
12803 name = DECL_NAME (name);
12804 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
12805 }
12806 /* This probably indicates a bug. */
12807 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
12808 {
12809 name = TYPE_IDENTIFIER (type);
12810 add_name_attribute (mod_type_die,
12811 name ? IDENTIFIER_POINTER (name) : "__unknown__");
12812 }
12813
12814 if (qualified_type && !reverse_base_type)
12815 equate_type_number_to_die (qualified_type, mod_type_die);
12816
12817 if (item_type)
12818 /* We must do this after the equate_type_number_to_die call, in case
12819 this is a recursive type. This ensures that the modified_type_die
12820 recursion will terminate even if the type is recursive. Recursive
12821 types are possible in Ada. */
12822 sub_die = modified_type_die (item_type,
12823 TYPE_QUALS_NO_ADDR_SPACE (item_type),
12824 reverse,
12825 context_die);
12826
12827 if (sub_die != NULL)
12828 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
12829
12830 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
12831 if (TYPE_ARTIFICIAL (type))
12832 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
12833
12834 return mod_type_die;
12835 }
12836
12837 /* Generate DIEs for the generic parameters of T.
12838 T must be either a generic type or a generic function.
12839 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
12840
12841 static void
12842 gen_generic_params_dies (tree t)
12843 {
12844 tree parms, args;
12845 int parms_num, i;
12846 dw_die_ref die = NULL;
12847 int non_default;
12848
12849 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
12850 return;
12851
12852 if (TYPE_P (t))
12853 die = lookup_type_die (t);
12854 else if (DECL_P (t))
12855 die = lookup_decl_die (t);
12856
12857 gcc_assert (die);
12858
12859 parms = lang_hooks.get_innermost_generic_parms (t);
12860 if (!parms)
12861 /* T has no generic parameter. It means T is neither a generic type
12862 or function. End of story. */
12863 return;
12864
12865 parms_num = TREE_VEC_LENGTH (parms);
12866 args = lang_hooks.get_innermost_generic_args (t);
12867 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
12868 non_default = int_cst_value (TREE_CHAIN (args));
12869 else
12870 non_default = TREE_VEC_LENGTH (args);
12871 for (i = 0; i < parms_num; i++)
12872 {
12873 tree parm, arg, arg_pack_elems;
12874 dw_die_ref parm_die;
12875
12876 parm = TREE_VEC_ELT (parms, i);
12877 arg = TREE_VEC_ELT (args, i);
12878 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
12879 gcc_assert (parm && TREE_VALUE (parm) && arg);
12880
12881 if (parm && TREE_VALUE (parm) && arg)
12882 {
12883 /* If PARM represents a template parameter pack,
12884 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
12885 by DW_TAG_template_*_parameter DIEs for the argument
12886 pack elements of ARG. Note that ARG would then be
12887 an argument pack. */
12888 if (arg_pack_elems)
12889 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
12890 arg_pack_elems,
12891 die);
12892 else
12893 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
12894 true /* emit name */, die);
12895 if (i >= non_default)
12896 add_AT_flag (parm_die, DW_AT_default_value, 1);
12897 }
12898 }
12899 }
12900
12901 /* Create and return a DIE for PARM which should be
12902 the representation of a generic type parameter.
12903 For instance, in the C++ front end, PARM would be a template parameter.
12904 ARG is the argument to PARM.
12905 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
12906 name of the PARM.
12907 PARENT_DIE is the parent DIE which the new created DIE should be added to,
12908 as a child node. */
12909
12910 static dw_die_ref
12911 generic_parameter_die (tree parm, tree arg,
12912 bool emit_name_p,
12913 dw_die_ref parent_die)
12914 {
12915 dw_die_ref tmpl_die = NULL;
12916 const char *name = NULL;
12917
12918 if (!parm || !DECL_NAME (parm) || !arg)
12919 return NULL;
12920
12921 /* We support non-type generic parameters and arguments,
12922 type generic parameters and arguments, as well as
12923 generic generic parameters (a.k.a. template template parameters in C++)
12924 and arguments. */
12925 if (TREE_CODE (parm) == PARM_DECL)
12926 /* PARM is a nontype generic parameter */
12927 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
12928 else if (TREE_CODE (parm) == TYPE_DECL)
12929 /* PARM is a type generic parameter. */
12930 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
12931 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
12932 /* PARM is a generic generic parameter.
12933 Its DIE is a GNU extension. It shall have a
12934 DW_AT_name attribute to represent the name of the template template
12935 parameter, and a DW_AT_GNU_template_name attribute to represent the
12936 name of the template template argument. */
12937 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
12938 parent_die, parm);
12939 else
12940 gcc_unreachable ();
12941
12942 if (tmpl_die)
12943 {
12944 tree tmpl_type;
12945
12946 /* If PARM is a generic parameter pack, it means we are
12947 emitting debug info for a template argument pack element.
12948 In other terms, ARG is a template argument pack element.
12949 In that case, we don't emit any DW_AT_name attribute for
12950 the die. */
12951 if (emit_name_p)
12952 {
12953 name = IDENTIFIER_POINTER (DECL_NAME (parm));
12954 gcc_assert (name);
12955 add_AT_string (tmpl_die, DW_AT_name, name);
12956 }
12957
12958 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
12959 {
12960 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
12961 TMPL_DIE should have a child DW_AT_type attribute that is set
12962 to the type of the argument to PARM, which is ARG.
12963 If PARM is a type generic parameter, TMPL_DIE should have a
12964 child DW_AT_type that is set to ARG. */
12965 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
12966 add_type_attribute (tmpl_die, tmpl_type,
12967 (TREE_THIS_VOLATILE (tmpl_type)
12968 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
12969 false, parent_die);
12970 }
12971 else
12972 {
12973 /* So TMPL_DIE is a DIE representing a
12974 a generic generic template parameter, a.k.a template template
12975 parameter in C++ and arg is a template. */
12976
12977 /* The DW_AT_GNU_template_name attribute of the DIE must be set
12978 to the name of the argument. */
12979 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
12980 if (name)
12981 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
12982 }
12983
12984 if (TREE_CODE (parm) == PARM_DECL)
12985 /* So PARM is a non-type generic parameter.
12986 DWARF3 5.6.8 says we must set a DW_AT_const_value child
12987 attribute of TMPL_DIE which value represents the value
12988 of ARG.
12989 We must be careful here:
12990 The value of ARG might reference some function decls.
12991 We might currently be emitting debug info for a generic
12992 type and types are emitted before function decls, we don't
12993 know if the function decls referenced by ARG will actually be
12994 emitted after cgraph computations.
12995 So must defer the generation of the DW_AT_const_value to
12996 after cgraph is ready. */
12997 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
12998 }
12999
13000 return tmpl_die;
13001 }
13002
13003 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13004 PARM_PACK must be a template parameter pack. The returned DIE
13005 will be child DIE of PARENT_DIE. */
13006
13007 static dw_die_ref
13008 template_parameter_pack_die (tree parm_pack,
13009 tree parm_pack_args,
13010 dw_die_ref parent_die)
13011 {
13012 dw_die_ref die;
13013 int j;
13014
13015 gcc_assert (parent_die && parm_pack);
13016
13017 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13018 add_name_and_src_coords_attributes (die, parm_pack);
13019 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13020 generic_parameter_die (parm_pack,
13021 TREE_VEC_ELT (parm_pack_args, j),
13022 false /* Don't emit DW_AT_name */,
13023 die);
13024 return die;
13025 }
13026
13027 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13028 an enumerated type. */
13029
13030 static inline int
13031 type_is_enum (const_tree type)
13032 {
13033 return TREE_CODE (type) == ENUMERAL_TYPE;
13034 }
13035
13036 /* Return the DBX register number described by a given RTL node. */
13037
13038 static unsigned int
13039 dbx_reg_number (const_rtx rtl)
13040 {
13041 unsigned regno = REGNO (rtl);
13042
13043 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13044
13045 #ifdef LEAF_REG_REMAP
13046 if (crtl->uses_only_leaf_regs)
13047 {
13048 int leaf_reg = LEAF_REG_REMAP (regno);
13049 if (leaf_reg != -1)
13050 regno = (unsigned) leaf_reg;
13051 }
13052 #endif
13053
13054 regno = DBX_REGISTER_NUMBER (regno);
13055 gcc_assert (regno != INVALID_REGNUM);
13056 return regno;
13057 }
13058
13059 /* Optionally add a DW_OP_piece term to a location description expression.
13060 DW_OP_piece is only added if the location description expression already
13061 doesn't end with DW_OP_piece. */
13062
13063 static void
13064 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13065 {
13066 dw_loc_descr_ref loc;
13067
13068 if (*list_head != NULL)
13069 {
13070 /* Find the end of the chain. */
13071 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13072 ;
13073
13074 if (loc->dw_loc_opc != DW_OP_piece)
13075 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13076 }
13077 }
13078
13079 /* Return a location descriptor that designates a machine register or
13080 zero if there is none. */
13081
13082 static dw_loc_descr_ref
13083 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13084 {
13085 rtx regs;
13086
13087 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13088 return 0;
13089
13090 /* We only use "frame base" when we're sure we're talking about the
13091 post-prologue local stack frame. We do this by *not* running
13092 register elimination until this point, and recognizing the special
13093 argument pointer and soft frame pointer rtx's.
13094 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13095 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13096 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13097 {
13098 dw_loc_descr_ref result = NULL;
13099
13100 if (dwarf_version >= 4 || !dwarf_strict)
13101 {
13102 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13103 initialized);
13104 if (result)
13105 add_loc_descr (&result,
13106 new_loc_descr (DW_OP_stack_value, 0, 0));
13107 }
13108 return result;
13109 }
13110
13111 regs = targetm.dwarf_register_span (rtl);
13112
13113 if (REG_NREGS (rtl) > 1 || regs)
13114 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13115 else
13116 {
13117 unsigned int dbx_regnum = dbx_reg_number (rtl);
13118 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13119 return 0;
13120 return one_reg_loc_descriptor (dbx_regnum, initialized);
13121 }
13122 }
13123
13124 /* Return a location descriptor that designates a machine register for
13125 a given hard register number. */
13126
13127 static dw_loc_descr_ref
13128 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13129 {
13130 dw_loc_descr_ref reg_loc_descr;
13131
13132 if (regno <= 31)
13133 reg_loc_descr
13134 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13135 else
13136 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13137
13138 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13139 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13140
13141 return reg_loc_descr;
13142 }
13143
13144 /* Given an RTL of a register, return a location descriptor that
13145 designates a value that spans more than one register. */
13146
13147 static dw_loc_descr_ref
13148 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13149 enum var_init_status initialized)
13150 {
13151 int size, i;
13152 dw_loc_descr_ref loc_result = NULL;
13153
13154 /* Simple, contiguous registers. */
13155 if (regs == NULL_RTX)
13156 {
13157 unsigned reg = REGNO (rtl);
13158 int nregs;
13159
13160 #ifdef LEAF_REG_REMAP
13161 if (crtl->uses_only_leaf_regs)
13162 {
13163 int leaf_reg = LEAF_REG_REMAP (reg);
13164 if (leaf_reg != -1)
13165 reg = (unsigned) leaf_reg;
13166 }
13167 #endif
13168
13169 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13170 nregs = REG_NREGS (rtl);
13171
13172 size = GET_MODE_SIZE (GET_MODE (rtl)) / nregs;
13173
13174 loc_result = NULL;
13175 while (nregs--)
13176 {
13177 dw_loc_descr_ref t;
13178
13179 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13180 VAR_INIT_STATUS_INITIALIZED);
13181 add_loc_descr (&loc_result, t);
13182 add_loc_descr_op_piece (&loc_result, size);
13183 ++reg;
13184 }
13185 return loc_result;
13186 }
13187
13188 /* Now onto stupid register sets in non contiguous locations. */
13189
13190 gcc_assert (GET_CODE (regs) == PARALLEL);
13191
13192 size = GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0)));
13193 loc_result = NULL;
13194
13195 for (i = 0; i < XVECLEN (regs, 0); ++i)
13196 {
13197 dw_loc_descr_ref t;
13198
13199 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13200 VAR_INIT_STATUS_INITIALIZED);
13201 add_loc_descr (&loc_result, t);
13202 add_loc_descr_op_piece (&loc_result, size);
13203 }
13204
13205 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13206 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13207 return loc_result;
13208 }
13209
13210 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13211
13212 /* Return a location descriptor that designates a constant i,
13213 as a compound operation from constant (i >> shift), constant shift
13214 and DW_OP_shl. */
13215
13216 static dw_loc_descr_ref
13217 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13218 {
13219 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13220 add_loc_descr (&ret, int_loc_descriptor (shift));
13221 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13222 return ret;
13223 }
13224
13225 /* Return a location descriptor that designates a constant. */
13226
13227 static dw_loc_descr_ref
13228 int_loc_descriptor (HOST_WIDE_INT i)
13229 {
13230 enum dwarf_location_atom op;
13231
13232 /* Pick the smallest representation of a constant, rather than just
13233 defaulting to the LEB encoding. */
13234 if (i >= 0)
13235 {
13236 int clz = clz_hwi (i);
13237 int ctz = ctz_hwi (i);
13238 if (i <= 31)
13239 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13240 else if (i <= 0xff)
13241 op = DW_OP_const1u;
13242 else if (i <= 0xffff)
13243 op = DW_OP_const2u;
13244 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13245 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13246 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13247 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13248 while DW_OP_const4u is 5 bytes. */
13249 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13250 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13251 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13252 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13253 while DW_OP_const4u is 5 bytes. */
13254 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13255
13256 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13257 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13258 <= 4)
13259 {
13260 /* As i >= 2**31, the double cast above will yield a negative number.
13261 Since wrapping is defined in DWARF expressions we can output big
13262 positive integers as small negative ones, regardless of the size
13263 of host wide ints.
13264
13265 Here, since the evaluator will handle 32-bit values and since i >=
13266 2**31, we know it's going to be interpreted as a negative literal:
13267 store it this way if we can do better than 5 bytes this way. */
13268 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13269 }
13270 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13271 op = DW_OP_const4u;
13272
13273 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13274 least 6 bytes: see if we can do better before falling back to it. */
13275 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13276 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13277 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13278 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13279 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13280 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13281 >= HOST_BITS_PER_WIDE_INT)
13282 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13283 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13284 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13285 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13286 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13287 && size_of_uleb128 (i) > 6)
13288 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13289 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13290 else
13291 op = DW_OP_constu;
13292 }
13293 else
13294 {
13295 if (i >= -0x80)
13296 op = DW_OP_const1s;
13297 else if (i >= -0x8000)
13298 op = DW_OP_const2s;
13299 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13300 {
13301 if (size_of_int_loc_descriptor (i) < 5)
13302 {
13303 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13304 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13305 return ret;
13306 }
13307 op = DW_OP_const4s;
13308 }
13309 else
13310 {
13311 if (size_of_int_loc_descriptor (i)
13312 < (unsigned long) 1 + size_of_sleb128 (i))
13313 {
13314 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13315 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13316 return ret;
13317 }
13318 op = DW_OP_consts;
13319 }
13320 }
13321
13322 return new_loc_descr (op, i, 0);
13323 }
13324
13325 /* Likewise, for unsigned constants. */
13326
13327 static dw_loc_descr_ref
13328 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
13329 {
13330 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
13331 const unsigned HOST_WIDE_INT max_uint
13332 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
13333
13334 /* If possible, use the clever signed constants handling. */
13335 if (i <= max_int)
13336 return int_loc_descriptor ((HOST_WIDE_INT) i);
13337
13338 /* Here, we are left with positive numbers that cannot be represented as
13339 HOST_WIDE_INT, i.e.:
13340 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
13341
13342 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
13343 whereas may be better to output a negative integer: thanks to integer
13344 wrapping, we know that:
13345 x = x - 2 ** DWARF2_ADDR_SIZE
13346 = x - 2 * (max (HOST_WIDE_INT) + 1)
13347 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
13348 small negative integers. Let's try that in cases it will clearly improve
13349 the encoding: there is no gain turning DW_OP_const4u into
13350 DW_OP_const4s. */
13351 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
13352 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
13353 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
13354 {
13355 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
13356
13357 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
13358 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
13359 const HOST_WIDE_INT second_shift
13360 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
13361
13362 /* So we finally have:
13363 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
13364 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
13365 return int_loc_descriptor (second_shift);
13366 }
13367
13368 /* Last chance: fallback to a simple constant operation. */
13369 return new_loc_descr
13370 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13371 ? DW_OP_const4u
13372 : DW_OP_const8u,
13373 i, 0);
13374 }
13375
13376 /* Generate and return a location description that computes the unsigned
13377 comparison of the two stack top entries (a OP b where b is the top-most
13378 entry and a is the second one). The KIND of comparison can be LT_EXPR,
13379 LE_EXPR, GT_EXPR or GE_EXPR. */
13380
13381 static dw_loc_descr_ref
13382 uint_comparison_loc_list (enum tree_code kind)
13383 {
13384 enum dwarf_location_atom op, flip_op;
13385 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
13386
13387 switch (kind)
13388 {
13389 case LT_EXPR:
13390 op = DW_OP_lt;
13391 break;
13392 case LE_EXPR:
13393 op = DW_OP_le;
13394 break;
13395 case GT_EXPR:
13396 op = DW_OP_gt;
13397 break;
13398 case GE_EXPR:
13399 op = DW_OP_ge;
13400 break;
13401 default:
13402 gcc_unreachable ();
13403 }
13404
13405 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
13406 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
13407
13408 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
13409 possible to perform unsigned comparisons: we just have to distinguish
13410 three cases:
13411
13412 1. when a and b have the same sign (as signed integers); then we should
13413 return: a OP(signed) b;
13414
13415 2. when a is a negative signed integer while b is a positive one, then a
13416 is a greater unsigned integer than b; likewise when a and b's roles
13417 are flipped.
13418
13419 So first, compare the sign of the two operands. */
13420 ret = new_loc_descr (DW_OP_over, 0, 0);
13421 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
13422 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
13423 /* If they have different signs (i.e. they have different sign bits), then
13424 the stack top value has now the sign bit set and thus it's smaller than
13425 zero. */
13426 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
13427 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
13428 add_loc_descr (&ret, bra_node);
13429
13430 /* We are in case 1. At this point, we know both operands have the same
13431 sign, to it's safe to use the built-in signed comparison. */
13432 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
13433 add_loc_descr (&ret, jmp_node);
13434
13435 /* We are in case 2. Here, we know both operands do not have the same sign,
13436 so we have to flip the signed comparison. */
13437 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
13438 tmp = new_loc_descr (flip_op, 0, 0);
13439 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
13440 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
13441 add_loc_descr (&ret, tmp);
13442
13443 /* This dummy operation is necessary to make the two branches join. */
13444 tmp = new_loc_descr (DW_OP_nop, 0, 0);
13445 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
13446 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
13447 add_loc_descr (&ret, tmp);
13448
13449 return ret;
13450 }
13451
13452 /* Likewise, but takes the location description lists (might be destructive on
13453 them). Return NULL if either is NULL or if concatenation fails. */
13454
13455 static dw_loc_list_ref
13456 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
13457 enum tree_code kind)
13458 {
13459 if (left == NULL || right == NULL)
13460 return NULL;
13461
13462 add_loc_list (&left, right);
13463 if (left == NULL)
13464 return NULL;
13465
13466 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
13467 return left;
13468 }
13469
13470 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
13471 without actually allocating it. */
13472
13473 static unsigned long
13474 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13475 {
13476 return size_of_int_loc_descriptor (i >> shift)
13477 + size_of_int_loc_descriptor (shift)
13478 + 1;
13479 }
13480
13481 /* Return size_of_locs (int_loc_descriptor (i)) without
13482 actually allocating it. */
13483
13484 static unsigned long
13485 size_of_int_loc_descriptor (HOST_WIDE_INT i)
13486 {
13487 unsigned long s;
13488
13489 if (i >= 0)
13490 {
13491 int clz, ctz;
13492 if (i <= 31)
13493 return 1;
13494 else if (i <= 0xff)
13495 return 2;
13496 else if (i <= 0xffff)
13497 return 3;
13498 clz = clz_hwi (i);
13499 ctz = ctz_hwi (i);
13500 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13501 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13502 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13503 - clz - 5);
13504 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13505 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13506 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13507 - clz - 8);
13508 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13509 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13510 <= 4)
13511 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13512 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13513 return 5;
13514 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
13515 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13516 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13517 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13518 - clz - 8);
13519 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13520 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
13521 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13522 - clz - 16);
13523 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13524 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13525 && s > 6)
13526 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13527 - clz - 32);
13528 else
13529 return 1 + s;
13530 }
13531 else
13532 {
13533 if (i >= -0x80)
13534 return 2;
13535 else if (i >= -0x8000)
13536 return 3;
13537 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13538 {
13539 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
13540 {
13541 s = size_of_int_loc_descriptor (-i) + 1;
13542 if (s < 5)
13543 return s;
13544 }
13545 return 5;
13546 }
13547 else
13548 {
13549 unsigned long r = 1 + size_of_sleb128 (i);
13550 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
13551 {
13552 s = size_of_int_loc_descriptor (-i) + 1;
13553 if (s < r)
13554 return s;
13555 }
13556 return r;
13557 }
13558 }
13559 }
13560
13561 /* Return loc description representing "address" of integer value.
13562 This can appear only as toplevel expression. */
13563
13564 static dw_loc_descr_ref
13565 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
13566 {
13567 int litsize;
13568 dw_loc_descr_ref loc_result = NULL;
13569
13570 if (!(dwarf_version >= 4 || !dwarf_strict))
13571 return NULL;
13572
13573 litsize = size_of_int_loc_descriptor (i);
13574 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
13575 is more compact. For DW_OP_stack_value we need:
13576 litsize + 1 (DW_OP_stack_value)
13577 and for DW_OP_implicit_value:
13578 1 (DW_OP_implicit_value) + 1 (length) + size. */
13579 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
13580 {
13581 loc_result = int_loc_descriptor (i);
13582 add_loc_descr (&loc_result,
13583 new_loc_descr (DW_OP_stack_value, 0, 0));
13584 return loc_result;
13585 }
13586
13587 loc_result = new_loc_descr (DW_OP_implicit_value,
13588 size, 0);
13589 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
13590 loc_result->dw_loc_oprnd2.v.val_int = i;
13591 return loc_result;
13592 }
13593
13594 /* Return a location descriptor that designates a base+offset location. */
13595
13596 static dw_loc_descr_ref
13597 based_loc_descr (rtx reg, HOST_WIDE_INT offset,
13598 enum var_init_status initialized)
13599 {
13600 unsigned int regno;
13601 dw_loc_descr_ref result;
13602 dw_fde_ref fde = cfun->fde;
13603
13604 /* We only use "frame base" when we're sure we're talking about the
13605 post-prologue local stack frame. We do this by *not* running
13606 register elimination until this point, and recognizing the special
13607 argument pointer and soft frame pointer rtx's. */
13608 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
13609 {
13610 rtx elim = (ira_use_lra_p
13611 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
13612 : eliminate_regs (reg, VOIDmode, NULL_RTX));
13613
13614 if (elim != reg)
13615 {
13616 if (GET_CODE (elim) == PLUS)
13617 {
13618 offset += INTVAL (XEXP (elim, 1));
13619 elim = XEXP (elim, 0);
13620 }
13621 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
13622 && (elim == hard_frame_pointer_rtx
13623 || elim == stack_pointer_rtx))
13624 || elim == (frame_pointer_needed
13625 ? hard_frame_pointer_rtx
13626 : stack_pointer_rtx));
13627
13628 /* If drap register is used to align stack, use frame
13629 pointer + offset to access stack variables. If stack
13630 is aligned without drap, use stack pointer + offset to
13631 access stack variables. */
13632 if (crtl->stack_realign_tried
13633 && reg == frame_pointer_rtx)
13634 {
13635 int base_reg
13636 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
13637 ? HARD_FRAME_POINTER_REGNUM
13638 : REGNO (elim));
13639 return new_reg_loc_descr (base_reg, offset);
13640 }
13641
13642 gcc_assert (frame_pointer_fb_offset_valid);
13643 offset += frame_pointer_fb_offset;
13644 return new_loc_descr (DW_OP_fbreg, offset, 0);
13645 }
13646 }
13647
13648 regno = REGNO (reg);
13649 #ifdef LEAF_REG_REMAP
13650 if (crtl->uses_only_leaf_regs)
13651 {
13652 int leaf_reg = LEAF_REG_REMAP (regno);
13653 if (leaf_reg != -1)
13654 regno = (unsigned) leaf_reg;
13655 }
13656 #endif
13657 regno = DWARF_FRAME_REGNUM (regno);
13658
13659 if (!optimize && fde
13660 && (fde->drap_reg == regno || fde->vdrap_reg == regno))
13661 {
13662 /* Use cfa+offset to represent the location of arguments passed
13663 on the stack when drap is used to align stack.
13664 Only do this when not optimizing, for optimized code var-tracking
13665 is supposed to track where the arguments live and the register
13666 used as vdrap or drap in some spot might be used for something
13667 else in other part of the routine. */
13668 return new_loc_descr (DW_OP_fbreg, offset, 0);
13669 }
13670
13671 if (regno <= 31)
13672 result = new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + regno),
13673 offset, 0);
13674 else
13675 result = new_loc_descr (DW_OP_bregx, regno, offset);
13676
13677 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13678 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13679
13680 return result;
13681 }
13682
13683 /* Return true if this RTL expression describes a base+offset calculation. */
13684
13685 static inline int
13686 is_based_loc (const_rtx rtl)
13687 {
13688 return (GET_CODE (rtl) == PLUS
13689 && ((REG_P (XEXP (rtl, 0))
13690 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
13691 && CONST_INT_P (XEXP (rtl, 1)))));
13692 }
13693
13694 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
13695 failed. */
13696
13697 static dw_loc_descr_ref
13698 tls_mem_loc_descriptor (rtx mem)
13699 {
13700 tree base;
13701 dw_loc_descr_ref loc_result;
13702
13703 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
13704 return NULL;
13705
13706 base = get_base_address (MEM_EXPR (mem));
13707 if (base == NULL
13708 || !VAR_P (base)
13709 || !DECL_THREAD_LOCAL_P (base))
13710 return NULL;
13711
13712 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
13713 if (loc_result == NULL)
13714 return NULL;
13715
13716 if (MEM_OFFSET (mem))
13717 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
13718
13719 return loc_result;
13720 }
13721
13722 /* Output debug info about reason why we failed to expand expression as dwarf
13723 expression. */
13724
13725 static void
13726 expansion_failed (tree expr, rtx rtl, char const *reason)
13727 {
13728 if (dump_file && (dump_flags & TDF_DETAILS))
13729 {
13730 fprintf (dump_file, "Failed to expand as dwarf: ");
13731 if (expr)
13732 print_generic_expr (dump_file, expr, dump_flags);
13733 if (rtl)
13734 {
13735 fprintf (dump_file, "\n");
13736 print_rtl (dump_file, rtl);
13737 }
13738 fprintf (dump_file, "\nReason: %s\n", reason);
13739 }
13740 }
13741
13742 /* Helper function for const_ok_for_output. */
13743
13744 static bool
13745 const_ok_for_output_1 (rtx rtl)
13746 {
13747 if (targetm.const_not_ok_for_debug_p (rtl))
13748 {
13749 if (GET_CODE (rtl) != UNSPEC)
13750 {
13751 expansion_failed (NULL_TREE, rtl,
13752 "Expression rejected for debug by the backend.\n");
13753 return false;
13754 }
13755
13756 /* If delegitimize_address couldn't do anything with the UNSPEC, and
13757 the target hook doesn't explicitly allow it in debug info, assume
13758 we can't express it in the debug info. */
13759 /* Don't complain about TLS UNSPECs, those are just too hard to
13760 delegitimize. Note this could be a non-decl SYMBOL_REF such as
13761 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
13762 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
13763 if (flag_checking
13764 && (XVECLEN (rtl, 0) == 0
13765 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
13766 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
13767 inform (current_function_decl
13768 ? DECL_SOURCE_LOCATION (current_function_decl)
13769 : UNKNOWN_LOCATION,
13770 #if NUM_UNSPEC_VALUES > 0
13771 "non-delegitimized UNSPEC %s (%d) found in variable location",
13772 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
13773 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
13774 XINT (rtl, 1));
13775 #else
13776 "non-delegitimized UNSPEC %d found in variable location",
13777 XINT (rtl, 1));
13778 #endif
13779 expansion_failed (NULL_TREE, rtl,
13780 "UNSPEC hasn't been delegitimized.\n");
13781 return false;
13782 }
13783
13784 if (CONST_POLY_INT_P (rtl))
13785 return false;
13786
13787 if (targetm.const_not_ok_for_debug_p (rtl))
13788 {
13789 expansion_failed (NULL_TREE, rtl,
13790 "Expression rejected for debug by the backend.\n");
13791 return false;
13792 }
13793
13794 /* FIXME: Refer to PR60655. It is possible for simplification
13795 of rtl expressions in var tracking to produce such expressions.
13796 We should really identify / validate expressions
13797 enclosed in CONST that can be handled by assemblers on various
13798 targets and only handle legitimate cases here. */
13799 switch (GET_CODE (rtl))
13800 {
13801 case SYMBOL_REF:
13802 break;
13803 case NOT:
13804 case NEG:
13805 return false;
13806 default:
13807 return true;
13808 }
13809
13810 if (CONSTANT_POOL_ADDRESS_P (rtl))
13811 {
13812 bool marked;
13813 get_pool_constant_mark (rtl, &marked);
13814 /* If all references to this pool constant were optimized away,
13815 it was not output and thus we can't represent it. */
13816 if (!marked)
13817 {
13818 expansion_failed (NULL_TREE, rtl,
13819 "Constant was removed from constant pool.\n");
13820 return false;
13821 }
13822 }
13823
13824 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
13825 return false;
13826
13827 /* Avoid references to external symbols in debug info, on several targets
13828 the linker might even refuse to link when linking a shared library,
13829 and in many other cases the relocations for .debug_info/.debug_loc are
13830 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
13831 to be defined within the same shared library or executable are fine. */
13832 if (SYMBOL_REF_EXTERNAL_P (rtl))
13833 {
13834 tree decl = SYMBOL_REF_DECL (rtl);
13835
13836 if (decl == NULL || !targetm.binds_local_p (decl))
13837 {
13838 expansion_failed (NULL_TREE, rtl,
13839 "Symbol not defined in current TU.\n");
13840 return false;
13841 }
13842 }
13843
13844 return true;
13845 }
13846
13847 /* Return true if constant RTL can be emitted in DW_OP_addr or
13848 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
13849 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
13850
13851 static bool
13852 const_ok_for_output (rtx rtl)
13853 {
13854 if (GET_CODE (rtl) == SYMBOL_REF)
13855 return const_ok_for_output_1 (rtl);
13856
13857 if (GET_CODE (rtl) == CONST)
13858 {
13859 subrtx_var_iterator::array_type array;
13860 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
13861 if (!const_ok_for_output_1 (*iter))
13862 return false;
13863 return true;
13864 }
13865
13866 return true;
13867 }
13868
13869 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
13870 if possible, NULL otherwise. */
13871
13872 static dw_die_ref
13873 base_type_for_mode (machine_mode mode, bool unsignedp)
13874 {
13875 dw_die_ref type_die;
13876 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
13877
13878 if (type == NULL)
13879 return NULL;
13880 switch (TREE_CODE (type))
13881 {
13882 case INTEGER_TYPE:
13883 case REAL_TYPE:
13884 break;
13885 default:
13886 return NULL;
13887 }
13888 type_die = lookup_type_die (type);
13889 if (!type_die)
13890 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
13891 comp_unit_die ());
13892 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
13893 return NULL;
13894 return type_die;
13895 }
13896
13897 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
13898 type matching MODE, or, if MODE is narrower than or as wide as
13899 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
13900 possible. */
13901
13902 static dw_loc_descr_ref
13903 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
13904 {
13905 machine_mode outer_mode = mode;
13906 dw_die_ref type_die;
13907 dw_loc_descr_ref cvt;
13908
13909 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
13910 {
13911 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
13912 return op;
13913 }
13914 type_die = base_type_for_mode (outer_mode, 1);
13915 if (type_die == NULL)
13916 return NULL;
13917 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
13918 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13919 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13920 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13921 add_loc_descr (&op, cvt);
13922 return op;
13923 }
13924
13925 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
13926
13927 static dw_loc_descr_ref
13928 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
13929 dw_loc_descr_ref op1)
13930 {
13931 dw_loc_descr_ref ret = op0;
13932 add_loc_descr (&ret, op1);
13933 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
13934 if (STORE_FLAG_VALUE != 1)
13935 {
13936 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
13937 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13938 }
13939 return ret;
13940 }
13941
13942 /* Subroutine of scompare_loc_descriptor for the case in which we're
13943 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
13944 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
13945
13946 static dw_loc_descr_ref
13947 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
13948 scalar_int_mode op_mode,
13949 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
13950 {
13951 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
13952 dw_loc_descr_ref cvt;
13953
13954 if (type_die == NULL)
13955 return NULL;
13956 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
13957 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13958 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13959 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13960 add_loc_descr (&op0, cvt);
13961 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
13962 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13963 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13964 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13965 add_loc_descr (&op1, cvt);
13966 return compare_loc_descriptor (op, op0, op1);
13967 }
13968
13969 /* Subroutine of scompare_loc_descriptor for the case in which we're
13970 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
13971 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
13972
13973 static dw_loc_descr_ref
13974 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
13975 scalar_int_mode op_mode,
13976 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
13977 {
13978 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
13979 /* For eq/ne, if the operands are known to be zero-extended,
13980 there is no need to do the fancy shifting up. */
13981 if (op == DW_OP_eq || op == DW_OP_ne)
13982 {
13983 dw_loc_descr_ref last0, last1;
13984 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
13985 ;
13986 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
13987 ;
13988 /* deref_size zero extends, and for constants we can check
13989 whether they are zero extended or not. */
13990 if (((last0->dw_loc_opc == DW_OP_deref_size
13991 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
13992 || (CONST_INT_P (XEXP (rtl, 0))
13993 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
13994 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
13995 && ((last1->dw_loc_opc == DW_OP_deref_size
13996 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
13997 || (CONST_INT_P (XEXP (rtl, 1))
13998 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
13999 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14000 return compare_loc_descriptor (op, op0, op1);
14001
14002 /* EQ/NE comparison against constant in narrower type than
14003 DWARF2_ADDR_SIZE can be performed either as
14004 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14005 DW_OP_{eq,ne}
14006 or
14007 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14008 DW_OP_{eq,ne}. Pick whatever is shorter. */
14009 if (CONST_INT_P (XEXP (rtl, 1))
14010 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14011 && (size_of_int_loc_descriptor (shift) + 1
14012 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14013 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14014 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14015 & GET_MODE_MASK (op_mode))))
14016 {
14017 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14018 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14019 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14020 & GET_MODE_MASK (op_mode));
14021 return compare_loc_descriptor (op, op0, op1);
14022 }
14023 }
14024 add_loc_descr (&op0, int_loc_descriptor (shift));
14025 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14026 if (CONST_INT_P (XEXP (rtl, 1)))
14027 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14028 else
14029 {
14030 add_loc_descr (&op1, int_loc_descriptor (shift));
14031 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14032 }
14033 return compare_loc_descriptor (op, op0, op1);
14034 }
14035
14036 /* Return location descriptor for unsigned comparison OP RTL. */
14037
14038 static dw_loc_descr_ref
14039 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14040 machine_mode mem_mode)
14041 {
14042 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14043 dw_loc_descr_ref op0, op1;
14044
14045 if (op_mode == VOIDmode)
14046 op_mode = GET_MODE (XEXP (rtl, 1));
14047 if (op_mode == VOIDmode)
14048 return NULL;
14049
14050 scalar_int_mode int_op_mode;
14051 if (dwarf_strict
14052 && dwarf_version < 5
14053 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14054 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14055 return NULL;
14056
14057 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14058 VAR_INIT_STATUS_INITIALIZED);
14059 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14060 VAR_INIT_STATUS_INITIALIZED);
14061
14062 if (op0 == NULL || op1 == NULL)
14063 return NULL;
14064
14065 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14066 {
14067 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14068 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14069
14070 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14071 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14072 }
14073 return compare_loc_descriptor (op, op0, op1);
14074 }
14075
14076 /* Return location descriptor for unsigned comparison OP RTL. */
14077
14078 static dw_loc_descr_ref
14079 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14080 machine_mode mem_mode)
14081 {
14082 dw_loc_descr_ref op0, op1;
14083
14084 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14085 if (test_op_mode == VOIDmode)
14086 test_op_mode = GET_MODE (XEXP (rtl, 1));
14087
14088 scalar_int_mode op_mode;
14089 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14090 return NULL;
14091
14092 if (dwarf_strict
14093 && dwarf_version < 5
14094 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14095 return NULL;
14096
14097 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14098 VAR_INIT_STATUS_INITIALIZED);
14099 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14100 VAR_INIT_STATUS_INITIALIZED);
14101
14102 if (op0 == NULL || op1 == NULL)
14103 return NULL;
14104
14105 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14106 {
14107 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14108 dw_loc_descr_ref last0, last1;
14109 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14110 ;
14111 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14112 ;
14113 if (CONST_INT_P (XEXP (rtl, 0)))
14114 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14115 /* deref_size zero extends, so no need to mask it again. */
14116 else if (last0->dw_loc_opc != DW_OP_deref_size
14117 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14118 {
14119 add_loc_descr (&op0, int_loc_descriptor (mask));
14120 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14121 }
14122 if (CONST_INT_P (XEXP (rtl, 1)))
14123 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14124 /* deref_size zero extends, so no need to mask it again. */
14125 else if (last1->dw_loc_opc != DW_OP_deref_size
14126 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14127 {
14128 add_loc_descr (&op1, int_loc_descriptor (mask));
14129 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14130 }
14131 }
14132 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14133 {
14134 HOST_WIDE_INT bias = 1;
14135 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14136 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14137 if (CONST_INT_P (XEXP (rtl, 1)))
14138 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14139 + INTVAL (XEXP (rtl, 1)));
14140 else
14141 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14142 bias, 0));
14143 }
14144 return compare_loc_descriptor (op, op0, op1);
14145 }
14146
14147 /* Return location descriptor for {U,S}{MIN,MAX}. */
14148
14149 static dw_loc_descr_ref
14150 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14151 machine_mode mem_mode)
14152 {
14153 enum dwarf_location_atom op;
14154 dw_loc_descr_ref op0, op1, ret;
14155 dw_loc_descr_ref bra_node, drop_node;
14156
14157 scalar_int_mode int_mode;
14158 if (dwarf_strict
14159 && dwarf_version < 5
14160 && (!is_a <scalar_int_mode> (mode, &int_mode)
14161 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14162 return NULL;
14163
14164 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14165 VAR_INIT_STATUS_INITIALIZED);
14166 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14167 VAR_INIT_STATUS_INITIALIZED);
14168
14169 if (op0 == NULL || op1 == NULL)
14170 return NULL;
14171
14172 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14173 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14174 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14175 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14176 {
14177 /* Checked by the caller. */
14178 int_mode = as_a <scalar_int_mode> (mode);
14179 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14180 {
14181 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14182 add_loc_descr (&op0, int_loc_descriptor (mask));
14183 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14184 add_loc_descr (&op1, int_loc_descriptor (mask));
14185 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14186 }
14187 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14188 {
14189 HOST_WIDE_INT bias = 1;
14190 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14191 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14192 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14193 }
14194 }
14195 else if (is_a <scalar_int_mode> (mode, &int_mode)
14196 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14197 {
14198 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14199 add_loc_descr (&op0, int_loc_descriptor (shift));
14200 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14201 add_loc_descr (&op1, int_loc_descriptor (shift));
14202 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14203 }
14204 else if (is_a <scalar_int_mode> (mode, &int_mode)
14205 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14206 {
14207 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14208 dw_loc_descr_ref cvt;
14209 if (type_die == NULL)
14210 return NULL;
14211 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14212 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14213 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14214 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14215 add_loc_descr (&op0, cvt);
14216 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14217 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14218 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14219 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14220 add_loc_descr (&op1, cvt);
14221 }
14222
14223 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14224 op = DW_OP_lt;
14225 else
14226 op = DW_OP_gt;
14227 ret = op0;
14228 add_loc_descr (&ret, op1);
14229 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14230 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14231 add_loc_descr (&ret, bra_node);
14232 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14233 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14234 add_loc_descr (&ret, drop_node);
14235 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14236 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14237 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14238 && is_a <scalar_int_mode> (mode, &int_mode)
14239 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14240 ret = convert_descriptor_to_mode (int_mode, ret);
14241 return ret;
14242 }
14243
14244 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14245 but after converting arguments to type_die, afterwards
14246 convert back to unsigned. */
14247
14248 static dw_loc_descr_ref
14249 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14250 scalar_int_mode mode, machine_mode mem_mode)
14251 {
14252 dw_loc_descr_ref cvt, op0, op1;
14253
14254 if (type_die == NULL)
14255 return NULL;
14256 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14257 VAR_INIT_STATUS_INITIALIZED);
14258 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14259 VAR_INIT_STATUS_INITIALIZED);
14260 if (op0 == NULL || op1 == NULL)
14261 return NULL;
14262 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14263 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14264 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14265 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14266 add_loc_descr (&op0, cvt);
14267 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14268 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14269 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14270 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14271 add_loc_descr (&op1, cvt);
14272 add_loc_descr (&op0, op1);
14273 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14274 return convert_descriptor_to_mode (mode, op0);
14275 }
14276
14277 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14278 const0 is DW_OP_lit0 or corresponding typed constant,
14279 const1 is DW_OP_lit1 or corresponding typed constant
14280 and constMSB is constant with just the MSB bit set
14281 for the mode):
14282 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14283 L1: const0 DW_OP_swap
14284 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14285 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14286 L3: DW_OP_drop
14287 L4: DW_OP_nop
14288
14289 CTZ is similar:
14290 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14291 L1: const0 DW_OP_swap
14292 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14293 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14294 L3: DW_OP_drop
14295 L4: DW_OP_nop
14296
14297 FFS is similar:
14298 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14299 L1: const1 DW_OP_swap
14300 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14301 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14302 L3: DW_OP_drop
14303 L4: DW_OP_nop */
14304
14305 static dw_loc_descr_ref
14306 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14307 machine_mode mem_mode)
14308 {
14309 dw_loc_descr_ref op0, ret, tmp;
14310 HOST_WIDE_INT valv;
14311 dw_loc_descr_ref l1jump, l1label;
14312 dw_loc_descr_ref l2jump, l2label;
14313 dw_loc_descr_ref l3jump, l3label;
14314 dw_loc_descr_ref l4jump, l4label;
14315 rtx msb;
14316
14317 if (GET_MODE (XEXP (rtl, 0)) != mode)
14318 return NULL;
14319
14320 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14321 VAR_INIT_STATUS_INITIALIZED);
14322 if (op0 == NULL)
14323 return NULL;
14324 ret = op0;
14325 if (GET_CODE (rtl) == CLZ)
14326 {
14327 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14328 valv = GET_MODE_BITSIZE (mode);
14329 }
14330 else if (GET_CODE (rtl) == FFS)
14331 valv = 0;
14332 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14333 valv = GET_MODE_BITSIZE (mode);
14334 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14335 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
14336 add_loc_descr (&ret, l1jump);
14337 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14338 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
14339 VAR_INIT_STATUS_INITIALIZED);
14340 if (tmp == NULL)
14341 return NULL;
14342 add_loc_descr (&ret, tmp);
14343 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
14344 add_loc_descr (&ret, l4jump);
14345 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
14346 ? const1_rtx : const0_rtx,
14347 mode, mem_mode,
14348 VAR_INIT_STATUS_INITIALIZED);
14349 if (l1label == NULL)
14350 return NULL;
14351 add_loc_descr (&ret, l1label);
14352 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14353 l2label = new_loc_descr (DW_OP_dup, 0, 0);
14354 add_loc_descr (&ret, l2label);
14355 if (GET_CODE (rtl) != CLZ)
14356 msb = const1_rtx;
14357 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
14358 msb = GEN_INT (HOST_WIDE_INT_1U
14359 << (GET_MODE_BITSIZE (mode) - 1));
14360 else
14361 msb = immed_wide_int_const
14362 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
14363 GET_MODE_PRECISION (mode)), mode);
14364 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
14365 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
14366 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
14367 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
14368 else
14369 tmp = mem_loc_descriptor (msb, mode, mem_mode,
14370 VAR_INIT_STATUS_INITIALIZED);
14371 if (tmp == NULL)
14372 return NULL;
14373 add_loc_descr (&ret, tmp);
14374 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
14375 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
14376 add_loc_descr (&ret, l3jump);
14377 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
14378 VAR_INIT_STATUS_INITIALIZED);
14379 if (tmp == NULL)
14380 return NULL;
14381 add_loc_descr (&ret, tmp);
14382 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
14383 ? DW_OP_shl : DW_OP_shr, 0, 0));
14384 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14385 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
14386 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14387 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
14388 add_loc_descr (&ret, l2jump);
14389 l3label = new_loc_descr (DW_OP_drop, 0, 0);
14390 add_loc_descr (&ret, l3label);
14391 l4label = new_loc_descr (DW_OP_nop, 0, 0);
14392 add_loc_descr (&ret, l4label);
14393 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14394 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
14395 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14396 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
14397 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14398 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
14399 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14400 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
14401 return ret;
14402 }
14403
14404 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
14405 const1 is DW_OP_lit1 or corresponding typed constant):
14406 const0 DW_OP_swap
14407 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
14408 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
14409 L2: DW_OP_drop
14410
14411 PARITY is similar:
14412 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
14413 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
14414 L2: DW_OP_drop */
14415
14416 static dw_loc_descr_ref
14417 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
14418 machine_mode mem_mode)
14419 {
14420 dw_loc_descr_ref op0, ret, tmp;
14421 dw_loc_descr_ref l1jump, l1label;
14422 dw_loc_descr_ref l2jump, l2label;
14423
14424 if (GET_MODE (XEXP (rtl, 0)) != mode)
14425 return NULL;
14426
14427 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14428 VAR_INIT_STATUS_INITIALIZED);
14429 if (op0 == NULL)
14430 return NULL;
14431 ret = op0;
14432 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
14433 VAR_INIT_STATUS_INITIALIZED);
14434 if (tmp == NULL)
14435 return NULL;
14436 add_loc_descr (&ret, tmp);
14437 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14438 l1label = new_loc_descr (DW_OP_dup, 0, 0);
14439 add_loc_descr (&ret, l1label);
14440 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
14441 add_loc_descr (&ret, l2jump);
14442 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14443 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
14444 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
14445 VAR_INIT_STATUS_INITIALIZED);
14446 if (tmp == NULL)
14447 return NULL;
14448 add_loc_descr (&ret, tmp);
14449 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
14450 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
14451 ? DW_OP_plus : DW_OP_xor, 0, 0));
14452 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14453 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
14454 VAR_INIT_STATUS_INITIALIZED);
14455 add_loc_descr (&ret, tmp);
14456 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14457 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
14458 add_loc_descr (&ret, l1jump);
14459 l2label = new_loc_descr (DW_OP_drop, 0, 0);
14460 add_loc_descr (&ret, l2label);
14461 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14462 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
14463 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14464 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
14465 return ret;
14466 }
14467
14468 /* BSWAP (constS is initial shift count, either 56 or 24):
14469 constS const0
14470 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
14471 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
14472 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
14473 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
14474 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
14475
14476 static dw_loc_descr_ref
14477 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
14478 machine_mode mem_mode)
14479 {
14480 dw_loc_descr_ref op0, ret, tmp;
14481 dw_loc_descr_ref l1jump, l1label;
14482 dw_loc_descr_ref l2jump, l2label;
14483
14484 if (BITS_PER_UNIT != 8
14485 || (GET_MODE_BITSIZE (mode) != 32
14486 && GET_MODE_BITSIZE (mode) != 64))
14487 return NULL;
14488
14489 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14490 VAR_INIT_STATUS_INITIALIZED);
14491 if (op0 == NULL)
14492 return NULL;
14493
14494 ret = op0;
14495 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
14496 mode, mem_mode,
14497 VAR_INIT_STATUS_INITIALIZED);
14498 if (tmp == NULL)
14499 return NULL;
14500 add_loc_descr (&ret, tmp);
14501 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
14502 VAR_INIT_STATUS_INITIALIZED);
14503 if (tmp == NULL)
14504 return NULL;
14505 add_loc_descr (&ret, tmp);
14506 l1label = new_loc_descr (DW_OP_pick, 2, 0);
14507 add_loc_descr (&ret, l1label);
14508 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
14509 mode, mem_mode,
14510 VAR_INIT_STATUS_INITIALIZED);
14511 add_loc_descr (&ret, tmp);
14512 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
14513 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
14514 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14515 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
14516 VAR_INIT_STATUS_INITIALIZED);
14517 if (tmp == NULL)
14518 return NULL;
14519 add_loc_descr (&ret, tmp);
14520 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
14521 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
14522 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
14523 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
14524 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14525 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14526 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
14527 VAR_INIT_STATUS_INITIALIZED);
14528 add_loc_descr (&ret, tmp);
14529 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
14530 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
14531 add_loc_descr (&ret, l2jump);
14532 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
14533 VAR_INIT_STATUS_INITIALIZED);
14534 add_loc_descr (&ret, tmp);
14535 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
14536 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14537 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
14538 add_loc_descr (&ret, l1jump);
14539 l2label = new_loc_descr (DW_OP_drop, 0, 0);
14540 add_loc_descr (&ret, l2label);
14541 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14542 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14543 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14544 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
14545 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14546 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
14547 return ret;
14548 }
14549
14550 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
14551 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
14552 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
14553 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
14554
14555 ROTATERT is similar:
14556 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
14557 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
14558 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
14559
14560 static dw_loc_descr_ref
14561 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
14562 machine_mode mem_mode)
14563 {
14564 rtx rtlop1 = XEXP (rtl, 1);
14565 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
14566 int i;
14567
14568 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
14569 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
14570 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14571 VAR_INIT_STATUS_INITIALIZED);
14572 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
14573 VAR_INIT_STATUS_INITIALIZED);
14574 if (op0 == NULL || op1 == NULL)
14575 return NULL;
14576 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
14577 for (i = 0; i < 2; i++)
14578 {
14579 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
14580 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
14581 mode, mem_mode,
14582 VAR_INIT_STATUS_INITIALIZED);
14583 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
14584 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
14585 ? DW_OP_const4u
14586 : HOST_BITS_PER_WIDE_INT == 64
14587 ? DW_OP_const8u : DW_OP_constu,
14588 GET_MODE_MASK (mode), 0);
14589 else
14590 mask[i] = NULL;
14591 if (mask[i] == NULL)
14592 return NULL;
14593 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
14594 }
14595 ret = op0;
14596 add_loc_descr (&ret, op1);
14597 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14598 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14599 if (GET_CODE (rtl) == ROTATERT)
14600 {
14601 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14602 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
14603 GET_MODE_BITSIZE (mode), 0));
14604 }
14605 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
14606 if (mask[0] != NULL)
14607 add_loc_descr (&ret, mask[0]);
14608 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
14609 if (mask[1] != NULL)
14610 {
14611 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14612 add_loc_descr (&ret, mask[1]);
14613 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14614 }
14615 if (GET_CODE (rtl) == ROTATE)
14616 {
14617 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14618 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
14619 GET_MODE_BITSIZE (mode), 0));
14620 }
14621 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14622 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
14623 return ret;
14624 }
14625
14626 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
14627 for DEBUG_PARAMETER_REF RTL. */
14628
14629 static dw_loc_descr_ref
14630 parameter_ref_descriptor (rtx rtl)
14631 {
14632 dw_loc_descr_ref ret;
14633 dw_die_ref ref;
14634
14635 if (dwarf_strict)
14636 return NULL;
14637 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
14638 /* With LTO during LTRANS we get the late DIE that refers to the early
14639 DIE, thus we add another indirection here. This seems to confuse
14640 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
14641 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
14642 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
14643 if (ref)
14644 {
14645 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14646 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
14647 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
14648 }
14649 else
14650 {
14651 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
14652 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
14653 }
14654 return ret;
14655 }
14656
14657 /* The following routine converts the RTL for a variable or parameter
14658 (resident in memory) into an equivalent Dwarf representation of a
14659 mechanism for getting the address of that same variable onto the top of a
14660 hypothetical "address evaluation" stack.
14661
14662 When creating memory location descriptors, we are effectively transforming
14663 the RTL for a memory-resident object into its Dwarf postfix expression
14664 equivalent. This routine recursively descends an RTL tree, turning
14665 it into Dwarf postfix code as it goes.
14666
14667 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
14668
14669 MEM_MODE is the mode of the memory reference, needed to handle some
14670 autoincrement addressing modes.
14671
14672 Return 0 if we can't represent the location. */
14673
14674 dw_loc_descr_ref
14675 mem_loc_descriptor (rtx rtl, machine_mode mode,
14676 machine_mode mem_mode,
14677 enum var_init_status initialized)
14678 {
14679 dw_loc_descr_ref mem_loc_result = NULL;
14680 enum dwarf_location_atom op;
14681 dw_loc_descr_ref op0, op1;
14682 rtx inner = NULL_RTX;
14683
14684 if (mode == VOIDmode)
14685 mode = GET_MODE (rtl);
14686
14687 /* Note that for a dynamically sized array, the location we will generate a
14688 description of here will be the lowest numbered location which is
14689 actually within the array. That's *not* necessarily the same as the
14690 zeroth element of the array. */
14691
14692 rtl = targetm.delegitimize_address (rtl);
14693
14694 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
14695 return NULL;
14696
14697 scalar_int_mode int_mode, inner_mode, op1_mode;
14698 switch (GET_CODE (rtl))
14699 {
14700 case POST_INC:
14701 case POST_DEC:
14702 case POST_MODIFY:
14703 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
14704
14705 case SUBREG:
14706 /* The case of a subreg may arise when we have a local (register)
14707 variable or a formal (register) parameter which doesn't quite fill
14708 up an entire register. For now, just assume that it is
14709 legitimate to make the Dwarf info refer to the whole register which
14710 contains the given subreg. */
14711 if (!subreg_lowpart_p (rtl))
14712 break;
14713 inner = SUBREG_REG (rtl);
14714 /* FALLTHRU */
14715 case TRUNCATE:
14716 if (inner == NULL_RTX)
14717 inner = XEXP (rtl, 0);
14718 if (is_a <scalar_int_mode> (mode, &int_mode)
14719 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
14720 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
14721 #ifdef POINTERS_EXTEND_UNSIGNED
14722 || (int_mode == Pmode && mem_mode != VOIDmode)
14723 #endif
14724 )
14725 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
14726 {
14727 mem_loc_result = mem_loc_descriptor (inner,
14728 inner_mode,
14729 mem_mode, initialized);
14730 break;
14731 }
14732 if (dwarf_strict && dwarf_version < 5)
14733 break;
14734 if (is_a <scalar_int_mode> (mode, &int_mode)
14735 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
14736 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
14737 : GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (inner)))
14738 {
14739 dw_die_ref type_die;
14740 dw_loc_descr_ref cvt;
14741
14742 mem_loc_result = mem_loc_descriptor (inner,
14743 GET_MODE (inner),
14744 mem_mode, initialized);
14745 if (mem_loc_result == NULL)
14746 break;
14747 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
14748 if (type_die == NULL)
14749 {
14750 mem_loc_result = NULL;
14751 break;
14752 }
14753 if (GET_MODE_SIZE (mode)
14754 != GET_MODE_SIZE (GET_MODE (inner)))
14755 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14756 else
14757 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
14758 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14759 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14760 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14761 add_loc_descr (&mem_loc_result, cvt);
14762 if (is_a <scalar_int_mode> (mode, &int_mode)
14763 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
14764 {
14765 /* Convert it to untyped afterwards. */
14766 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14767 add_loc_descr (&mem_loc_result, cvt);
14768 }
14769 }
14770 break;
14771
14772 case REG:
14773 if (!is_a <scalar_int_mode> (mode, &int_mode)
14774 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
14775 && rtl != arg_pointer_rtx
14776 && rtl != frame_pointer_rtx
14777 #ifdef POINTERS_EXTEND_UNSIGNED
14778 && (int_mode != Pmode || mem_mode == VOIDmode)
14779 #endif
14780 ))
14781 {
14782 dw_die_ref type_die;
14783 unsigned int dbx_regnum;
14784
14785 if (dwarf_strict && dwarf_version < 5)
14786 break;
14787 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
14788 break;
14789 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
14790 if (type_die == NULL)
14791 break;
14792
14793 dbx_regnum = dbx_reg_number (rtl);
14794 if (dbx_regnum == IGNORED_DWARF_REGNUM)
14795 break;
14796 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
14797 dbx_regnum, 0);
14798 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
14799 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
14800 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
14801 break;
14802 }
14803 /* Whenever a register number forms a part of the description of the
14804 method for calculating the (dynamic) address of a memory resident
14805 object, DWARF rules require the register number be referred to as
14806 a "base register". This distinction is not based in any way upon
14807 what category of register the hardware believes the given register
14808 belongs to. This is strictly DWARF terminology we're dealing with
14809 here. Note that in cases where the location of a memory-resident
14810 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
14811 OP_CONST (0)) the actual DWARF location descriptor that we generate
14812 may just be OP_BASEREG (basereg). This may look deceptively like
14813 the object in question was allocated to a register (rather than in
14814 memory) so DWARF consumers need to be aware of the subtle
14815 distinction between OP_REG and OP_BASEREG. */
14816 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
14817 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
14818 else if (stack_realign_drap
14819 && crtl->drap_reg
14820 && crtl->args.internal_arg_pointer == rtl
14821 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
14822 {
14823 /* If RTL is internal_arg_pointer, which has been optimized
14824 out, use DRAP instead. */
14825 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
14826 VAR_INIT_STATUS_INITIALIZED);
14827 }
14828 break;
14829
14830 case SIGN_EXTEND:
14831 case ZERO_EXTEND:
14832 if (!is_a <scalar_int_mode> (mode, &int_mode)
14833 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
14834 break;
14835 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
14836 mem_mode, VAR_INIT_STATUS_INITIALIZED);
14837 if (op0 == 0)
14838 break;
14839 else if (GET_CODE (rtl) == ZERO_EXTEND
14840 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
14841 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
14842 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
14843 to expand zero extend as two shifts instead of
14844 masking. */
14845 && GET_MODE_SIZE (inner_mode) <= 4)
14846 {
14847 mem_loc_result = op0;
14848 add_loc_descr (&mem_loc_result,
14849 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
14850 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
14851 }
14852 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
14853 {
14854 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
14855 shift *= BITS_PER_UNIT;
14856 if (GET_CODE (rtl) == SIGN_EXTEND)
14857 op = DW_OP_shra;
14858 else
14859 op = DW_OP_shr;
14860 mem_loc_result = op0;
14861 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
14862 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
14863 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
14864 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
14865 }
14866 else if (!dwarf_strict || dwarf_version >= 5)
14867 {
14868 dw_die_ref type_die1, type_die2;
14869 dw_loc_descr_ref cvt;
14870
14871 type_die1 = base_type_for_mode (inner_mode,
14872 GET_CODE (rtl) == ZERO_EXTEND);
14873 if (type_die1 == NULL)
14874 break;
14875 type_die2 = base_type_for_mode (int_mode, 1);
14876 if (type_die2 == NULL)
14877 break;
14878 mem_loc_result = op0;
14879 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14880 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14881 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
14882 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14883 add_loc_descr (&mem_loc_result, cvt);
14884 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14885 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14886 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
14887 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14888 add_loc_descr (&mem_loc_result, cvt);
14889 }
14890 break;
14891
14892 case MEM:
14893 {
14894 rtx new_rtl = avoid_constant_pool_reference (rtl);
14895 if (new_rtl != rtl)
14896 {
14897 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
14898 initialized);
14899 if (mem_loc_result != NULL)
14900 return mem_loc_result;
14901 }
14902 }
14903 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
14904 get_address_mode (rtl), mode,
14905 VAR_INIT_STATUS_INITIALIZED);
14906 if (mem_loc_result == NULL)
14907 mem_loc_result = tls_mem_loc_descriptor (rtl);
14908 if (mem_loc_result != NULL)
14909 {
14910 if (!is_a <scalar_int_mode> (mode, &int_mode)
14911 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14912 {
14913 dw_die_ref type_die;
14914 dw_loc_descr_ref deref;
14915
14916 if (dwarf_strict && dwarf_version < 5)
14917 return NULL;
14918 type_die
14919 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
14920 if (type_die == NULL)
14921 return NULL;
14922 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type),
14923 GET_MODE_SIZE (mode), 0);
14924 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
14925 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
14926 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
14927 add_loc_descr (&mem_loc_result, deref);
14928 }
14929 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14930 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
14931 else
14932 add_loc_descr (&mem_loc_result,
14933 new_loc_descr (DW_OP_deref_size,
14934 GET_MODE_SIZE (int_mode), 0));
14935 }
14936 break;
14937
14938 case LO_SUM:
14939 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
14940
14941 case LABEL_REF:
14942 /* Some ports can transform a symbol ref into a label ref, because
14943 the symbol ref is too far away and has to be dumped into a constant
14944 pool. */
14945 case CONST:
14946 case SYMBOL_REF:
14947 if (!is_a <scalar_int_mode> (mode, &int_mode)
14948 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
14949 #ifdef POINTERS_EXTEND_UNSIGNED
14950 && (int_mode != Pmode || mem_mode == VOIDmode)
14951 #endif
14952 ))
14953 break;
14954 if (GET_CODE (rtl) == SYMBOL_REF
14955 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14956 {
14957 dw_loc_descr_ref temp;
14958
14959 /* If this is not defined, we have no way to emit the data. */
14960 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
14961 break;
14962
14963 temp = new_addr_loc_descr (rtl, dtprel_true);
14964
14965 /* We check for DWARF 5 here because gdb did not implement
14966 DW_OP_form_tls_address until after 7.12. */
14967 mem_loc_result = new_loc_descr ((dwarf_version >= 5
14968 ? DW_OP_form_tls_address
14969 : DW_OP_GNU_push_tls_address),
14970 0, 0);
14971 add_loc_descr (&mem_loc_result, temp);
14972
14973 break;
14974 }
14975
14976 if (!const_ok_for_output (rtl))
14977 {
14978 if (GET_CODE (rtl) == CONST)
14979 switch (GET_CODE (XEXP (rtl, 0)))
14980 {
14981 case NOT:
14982 op = DW_OP_not;
14983 goto try_const_unop;
14984 case NEG:
14985 op = DW_OP_neg;
14986 goto try_const_unop;
14987 try_const_unop:
14988 rtx arg;
14989 arg = XEXP (XEXP (rtl, 0), 0);
14990 if (!CONSTANT_P (arg))
14991 arg = gen_rtx_CONST (int_mode, arg);
14992 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
14993 initialized);
14994 if (op0)
14995 {
14996 mem_loc_result = op0;
14997 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
14998 }
14999 break;
15000 default:
15001 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15002 mem_mode, initialized);
15003 break;
15004 }
15005 break;
15006 }
15007
15008 symref:
15009 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15010 vec_safe_push (used_rtx_array, rtl);
15011 break;
15012
15013 case CONCAT:
15014 case CONCATN:
15015 case VAR_LOCATION:
15016 case DEBUG_IMPLICIT_PTR:
15017 expansion_failed (NULL_TREE, rtl,
15018 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15019 return 0;
15020
15021 case ENTRY_VALUE:
15022 if (dwarf_strict && dwarf_version < 5)
15023 return NULL;
15024 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15025 {
15026 if (!is_a <scalar_int_mode> (mode, &int_mode)
15027 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15028 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15029 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15030 else
15031 {
15032 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15033 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15034 return NULL;
15035 op0 = one_reg_loc_descriptor (dbx_regnum,
15036 VAR_INIT_STATUS_INITIALIZED);
15037 }
15038 }
15039 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15040 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15041 {
15042 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15043 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15044 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15045 return NULL;
15046 }
15047 else
15048 gcc_unreachable ();
15049 if (op0 == NULL)
15050 return NULL;
15051 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15052 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15053 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15054 break;
15055
15056 case DEBUG_PARAMETER_REF:
15057 mem_loc_result = parameter_ref_descriptor (rtl);
15058 break;
15059
15060 case PRE_MODIFY:
15061 /* Extract the PLUS expression nested inside and fall into
15062 PLUS code below. */
15063 rtl = XEXP (rtl, 1);
15064 goto plus;
15065
15066 case PRE_INC:
15067 case PRE_DEC:
15068 /* Turn these into a PLUS expression and fall into the PLUS code
15069 below. */
15070 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15071 gen_int_mode (GET_CODE (rtl) == PRE_INC
15072 ? GET_MODE_UNIT_SIZE (mem_mode)
15073 : -GET_MODE_UNIT_SIZE (mem_mode),
15074 mode));
15075
15076 /* fall through */
15077
15078 case PLUS:
15079 plus:
15080 if (is_based_loc (rtl)
15081 && is_a <scalar_int_mode> (mode, &int_mode)
15082 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15083 || XEXP (rtl, 0) == arg_pointer_rtx
15084 || XEXP (rtl, 0) == frame_pointer_rtx))
15085 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15086 INTVAL (XEXP (rtl, 1)),
15087 VAR_INIT_STATUS_INITIALIZED);
15088 else
15089 {
15090 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15091 VAR_INIT_STATUS_INITIALIZED);
15092 if (mem_loc_result == 0)
15093 break;
15094
15095 if (CONST_INT_P (XEXP (rtl, 1))
15096 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15097 <= DWARF2_ADDR_SIZE))
15098 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15099 else
15100 {
15101 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15102 VAR_INIT_STATUS_INITIALIZED);
15103 if (op1 == 0)
15104 return NULL;
15105 add_loc_descr (&mem_loc_result, op1);
15106 add_loc_descr (&mem_loc_result,
15107 new_loc_descr (DW_OP_plus, 0, 0));
15108 }
15109 }
15110 break;
15111
15112 /* If a pseudo-reg is optimized away, it is possible for it to
15113 be replaced with a MEM containing a multiply or shift. */
15114 case MINUS:
15115 op = DW_OP_minus;
15116 goto do_binop;
15117
15118 case MULT:
15119 op = DW_OP_mul;
15120 goto do_binop;
15121
15122 case DIV:
15123 if ((!dwarf_strict || dwarf_version >= 5)
15124 && is_a <scalar_int_mode> (mode, &int_mode)
15125 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15126 {
15127 mem_loc_result = typed_binop (DW_OP_div, rtl,
15128 base_type_for_mode (mode, 0),
15129 int_mode, mem_mode);
15130 break;
15131 }
15132 op = DW_OP_div;
15133 goto do_binop;
15134
15135 case UMOD:
15136 op = DW_OP_mod;
15137 goto do_binop;
15138
15139 case ASHIFT:
15140 op = DW_OP_shl;
15141 goto do_shift;
15142
15143 case ASHIFTRT:
15144 op = DW_OP_shra;
15145 goto do_shift;
15146
15147 case LSHIFTRT:
15148 op = DW_OP_shr;
15149 goto do_shift;
15150
15151 do_shift:
15152 if (!is_a <scalar_int_mode> (mode, &int_mode))
15153 break;
15154 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15155 VAR_INIT_STATUS_INITIALIZED);
15156 {
15157 rtx rtlop1 = XEXP (rtl, 1);
15158 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15159 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15160 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15161 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15162 VAR_INIT_STATUS_INITIALIZED);
15163 }
15164
15165 if (op0 == 0 || op1 == 0)
15166 break;
15167
15168 mem_loc_result = op0;
15169 add_loc_descr (&mem_loc_result, op1);
15170 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15171 break;
15172
15173 case AND:
15174 op = DW_OP_and;
15175 goto do_binop;
15176
15177 case IOR:
15178 op = DW_OP_or;
15179 goto do_binop;
15180
15181 case XOR:
15182 op = DW_OP_xor;
15183 goto do_binop;
15184
15185 do_binop:
15186 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15187 VAR_INIT_STATUS_INITIALIZED);
15188 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15189 VAR_INIT_STATUS_INITIALIZED);
15190
15191 if (op0 == 0 || op1 == 0)
15192 break;
15193
15194 mem_loc_result = op0;
15195 add_loc_descr (&mem_loc_result, op1);
15196 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15197 break;
15198
15199 case MOD:
15200 if ((!dwarf_strict || dwarf_version >= 5)
15201 && is_a <scalar_int_mode> (mode, &int_mode)
15202 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15203 {
15204 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15205 base_type_for_mode (mode, 0),
15206 int_mode, mem_mode);
15207 break;
15208 }
15209
15210 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15211 VAR_INIT_STATUS_INITIALIZED);
15212 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15213 VAR_INIT_STATUS_INITIALIZED);
15214
15215 if (op0 == 0 || op1 == 0)
15216 break;
15217
15218 mem_loc_result = op0;
15219 add_loc_descr (&mem_loc_result, op1);
15220 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15221 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15222 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15223 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15224 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15225 break;
15226
15227 case UDIV:
15228 if ((!dwarf_strict || dwarf_version >= 5)
15229 && is_a <scalar_int_mode> (mode, &int_mode))
15230 {
15231 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15232 {
15233 op = DW_OP_div;
15234 goto do_binop;
15235 }
15236 mem_loc_result = typed_binop (DW_OP_div, rtl,
15237 base_type_for_mode (int_mode, 1),
15238 int_mode, mem_mode);
15239 }
15240 break;
15241
15242 case NOT:
15243 op = DW_OP_not;
15244 goto do_unop;
15245
15246 case ABS:
15247 op = DW_OP_abs;
15248 goto do_unop;
15249
15250 case NEG:
15251 op = DW_OP_neg;
15252 goto do_unop;
15253
15254 do_unop:
15255 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15256 VAR_INIT_STATUS_INITIALIZED);
15257
15258 if (op0 == 0)
15259 break;
15260
15261 mem_loc_result = op0;
15262 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15263 break;
15264
15265 case CONST_INT:
15266 if (!is_a <scalar_int_mode> (mode, &int_mode)
15267 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15268 #ifdef POINTERS_EXTEND_UNSIGNED
15269 || (int_mode == Pmode
15270 && mem_mode != VOIDmode
15271 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15272 #endif
15273 )
15274 {
15275 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15276 break;
15277 }
15278 if ((!dwarf_strict || dwarf_version >= 5)
15279 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15280 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15281 {
15282 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15283 scalar_int_mode amode;
15284 if (type_die == NULL)
15285 return NULL;
15286 if (INTVAL (rtl) >= 0
15287 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15288 .exists (&amode))
15289 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15290 /* const DW_OP_convert <XXX> vs.
15291 DW_OP_const_type <XXX, 1, const>. */
15292 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15293 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15294 {
15295 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15296 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15297 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15298 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15299 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15300 add_loc_descr (&mem_loc_result, op0);
15301 return mem_loc_result;
15302 }
15303 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15304 INTVAL (rtl));
15305 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15306 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15307 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15308 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
15309 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
15310 else
15311 {
15312 mem_loc_result->dw_loc_oprnd2.val_class
15313 = dw_val_class_const_double;
15314 mem_loc_result->dw_loc_oprnd2.v.val_double
15315 = double_int::from_shwi (INTVAL (rtl));
15316 }
15317 }
15318 break;
15319
15320 case CONST_DOUBLE:
15321 if (!dwarf_strict || dwarf_version >= 5)
15322 {
15323 dw_die_ref type_die;
15324
15325 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
15326 CONST_DOUBLE rtx could represent either a large integer
15327 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
15328 the value is always a floating point constant.
15329
15330 When it is an integer, a CONST_DOUBLE is used whenever
15331 the constant requires 2 HWIs to be adequately represented.
15332 We output CONST_DOUBLEs as blocks. */
15333 if (mode == VOIDmode
15334 || (GET_MODE (rtl) == VOIDmode
15335 && GET_MODE_BITSIZE (mode) != HOST_BITS_PER_DOUBLE_INT))
15336 break;
15337 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15338 if (type_die == NULL)
15339 return NULL;
15340 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15341 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15342 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15343 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15344 #if TARGET_SUPPORTS_WIDE_INT == 0
15345 if (!SCALAR_FLOAT_MODE_P (mode))
15346 {
15347 mem_loc_result->dw_loc_oprnd2.val_class
15348 = dw_val_class_const_double;
15349 mem_loc_result->dw_loc_oprnd2.v.val_double
15350 = rtx_to_double_int (rtl);
15351 }
15352 else
15353 #endif
15354 {
15355 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
15356 unsigned int length = GET_MODE_SIZE (float_mode);
15357 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
15358
15359 insert_float (rtl, array);
15360 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
15361 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
15362 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
15363 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
15364 }
15365 }
15366 break;
15367
15368 case CONST_WIDE_INT:
15369 if (!dwarf_strict || dwarf_version >= 5)
15370 {
15371 dw_die_ref type_die;
15372
15373 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15374 if (type_die == NULL)
15375 return NULL;
15376 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15377 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15378 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15379 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15380 mem_loc_result->dw_loc_oprnd2.val_class
15381 = dw_val_class_wide_int;
15382 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
15383 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
15384 }
15385 break;
15386
15387 case EQ:
15388 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
15389 break;
15390
15391 case GE:
15392 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
15393 break;
15394
15395 case GT:
15396 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
15397 break;
15398
15399 case LE:
15400 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
15401 break;
15402
15403 case LT:
15404 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
15405 break;
15406
15407 case NE:
15408 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
15409 break;
15410
15411 case GEU:
15412 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
15413 break;
15414
15415 case GTU:
15416 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
15417 break;
15418
15419 case LEU:
15420 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
15421 break;
15422
15423 case LTU:
15424 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
15425 break;
15426
15427 case UMIN:
15428 case UMAX:
15429 if (!SCALAR_INT_MODE_P (mode))
15430 break;
15431 /* FALLTHRU */
15432 case SMIN:
15433 case SMAX:
15434 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
15435 break;
15436
15437 case ZERO_EXTRACT:
15438 case SIGN_EXTRACT:
15439 if (CONST_INT_P (XEXP (rtl, 1))
15440 && CONST_INT_P (XEXP (rtl, 2))
15441 && is_a <scalar_int_mode> (mode, &int_mode)
15442 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
15443 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15444 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
15445 && ((unsigned) INTVAL (XEXP (rtl, 1))
15446 + (unsigned) INTVAL (XEXP (rtl, 2))
15447 <= GET_MODE_BITSIZE (int_mode)))
15448 {
15449 int shift, size;
15450 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15451 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15452 if (op0 == 0)
15453 break;
15454 if (GET_CODE (rtl) == SIGN_EXTRACT)
15455 op = DW_OP_shra;
15456 else
15457 op = DW_OP_shr;
15458 mem_loc_result = op0;
15459 size = INTVAL (XEXP (rtl, 1));
15460 shift = INTVAL (XEXP (rtl, 2));
15461 if (BITS_BIG_ENDIAN)
15462 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
15463 if (shift + size != (int) DWARF2_ADDR_SIZE)
15464 {
15465 add_loc_descr (&mem_loc_result,
15466 int_loc_descriptor (DWARF2_ADDR_SIZE
15467 - shift - size));
15468 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15469 }
15470 if (size != (int) DWARF2_ADDR_SIZE)
15471 {
15472 add_loc_descr (&mem_loc_result,
15473 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
15474 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15475 }
15476 }
15477 break;
15478
15479 case IF_THEN_ELSE:
15480 {
15481 dw_loc_descr_ref op2, bra_node, drop_node;
15482 op0 = mem_loc_descriptor (XEXP (rtl, 0),
15483 GET_MODE (XEXP (rtl, 0)) == VOIDmode
15484 ? word_mode : GET_MODE (XEXP (rtl, 0)),
15485 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15486 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15487 VAR_INIT_STATUS_INITIALIZED);
15488 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
15489 VAR_INIT_STATUS_INITIALIZED);
15490 if (op0 == NULL || op1 == NULL || op2 == NULL)
15491 break;
15492
15493 mem_loc_result = op1;
15494 add_loc_descr (&mem_loc_result, op2);
15495 add_loc_descr (&mem_loc_result, op0);
15496 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
15497 add_loc_descr (&mem_loc_result, bra_node);
15498 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
15499 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
15500 add_loc_descr (&mem_loc_result, drop_node);
15501 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
15502 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
15503 }
15504 break;
15505
15506 case FLOAT_EXTEND:
15507 case FLOAT_TRUNCATE:
15508 case FLOAT:
15509 case UNSIGNED_FLOAT:
15510 case FIX:
15511 case UNSIGNED_FIX:
15512 if (!dwarf_strict || dwarf_version >= 5)
15513 {
15514 dw_die_ref type_die;
15515 dw_loc_descr_ref cvt;
15516
15517 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
15518 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15519 if (op0 == NULL)
15520 break;
15521 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
15522 && (GET_CODE (rtl) == FLOAT
15523 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
15524 {
15525 type_die = base_type_for_mode (int_mode,
15526 GET_CODE (rtl) == UNSIGNED_FLOAT);
15527 if (type_die == NULL)
15528 break;
15529 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15530 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15531 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15532 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15533 add_loc_descr (&op0, cvt);
15534 }
15535 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
15536 if (type_die == NULL)
15537 break;
15538 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15539 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15540 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15541 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15542 add_loc_descr (&op0, cvt);
15543 if (is_a <scalar_int_mode> (mode, &int_mode)
15544 && (GET_CODE (rtl) == FIX
15545 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
15546 {
15547 op0 = convert_descriptor_to_mode (int_mode, op0);
15548 if (op0 == NULL)
15549 break;
15550 }
15551 mem_loc_result = op0;
15552 }
15553 break;
15554
15555 case CLZ:
15556 case CTZ:
15557 case FFS:
15558 if (is_a <scalar_int_mode> (mode, &int_mode))
15559 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
15560 break;
15561
15562 case POPCOUNT:
15563 case PARITY:
15564 if (is_a <scalar_int_mode> (mode, &int_mode))
15565 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
15566 break;
15567
15568 case BSWAP:
15569 if (is_a <scalar_int_mode> (mode, &int_mode))
15570 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
15571 break;
15572
15573 case ROTATE:
15574 case ROTATERT:
15575 if (is_a <scalar_int_mode> (mode, &int_mode))
15576 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
15577 break;
15578
15579 case COMPARE:
15580 /* In theory, we could implement the above. */
15581 /* DWARF cannot represent the unsigned compare operations
15582 natively. */
15583 case SS_MULT:
15584 case US_MULT:
15585 case SS_DIV:
15586 case US_DIV:
15587 case SS_PLUS:
15588 case US_PLUS:
15589 case SS_MINUS:
15590 case US_MINUS:
15591 case SS_NEG:
15592 case US_NEG:
15593 case SS_ABS:
15594 case SS_ASHIFT:
15595 case US_ASHIFT:
15596 case SS_TRUNCATE:
15597 case US_TRUNCATE:
15598 case UNORDERED:
15599 case ORDERED:
15600 case UNEQ:
15601 case UNGE:
15602 case UNGT:
15603 case UNLE:
15604 case UNLT:
15605 case LTGT:
15606 case FRACT_CONVERT:
15607 case UNSIGNED_FRACT_CONVERT:
15608 case SAT_FRACT:
15609 case UNSIGNED_SAT_FRACT:
15610 case SQRT:
15611 case ASM_OPERANDS:
15612 case VEC_MERGE:
15613 case VEC_SELECT:
15614 case VEC_CONCAT:
15615 case VEC_DUPLICATE:
15616 case VEC_SERIES:
15617 case UNSPEC:
15618 case HIGH:
15619 case FMA:
15620 case STRICT_LOW_PART:
15621 case CONST_VECTOR:
15622 case CONST_FIXED:
15623 case CLRSB:
15624 case CLOBBER:
15625 /* If delegitimize_address couldn't do anything with the UNSPEC, we
15626 can't express it in the debug info. This can happen e.g. with some
15627 TLS UNSPECs. */
15628 break;
15629
15630 case CONST_STRING:
15631 resolve_one_addr (&rtl);
15632 goto symref;
15633
15634 /* RTL sequences inside PARALLEL record a series of DWARF operations for
15635 the expression. An UNSPEC rtx represents a raw DWARF operation,
15636 new_loc_descr is called for it to build the operation directly.
15637 Otherwise mem_loc_descriptor is called recursively. */
15638 case PARALLEL:
15639 {
15640 int index = 0;
15641 dw_loc_descr_ref exp_result = NULL;
15642
15643 for (; index < XVECLEN (rtl, 0); index++)
15644 {
15645 rtx elem = XVECEXP (rtl, 0, index);
15646 if (GET_CODE (elem) == UNSPEC)
15647 {
15648 /* Each DWARF operation UNSPEC contain two operands, if
15649 one operand is not used for the operation, const0_rtx is
15650 passed. */
15651 gcc_assert (XVECLEN (elem, 0) == 2);
15652
15653 HOST_WIDE_INT dw_op = XINT (elem, 1);
15654 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
15655 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
15656 exp_result
15657 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
15658 oprnd2);
15659 }
15660 else
15661 exp_result
15662 = mem_loc_descriptor (elem, mode, mem_mode,
15663 VAR_INIT_STATUS_INITIALIZED);
15664
15665 if (!mem_loc_result)
15666 mem_loc_result = exp_result;
15667 else
15668 add_loc_descr (&mem_loc_result, exp_result);
15669 }
15670
15671 break;
15672 }
15673
15674 default:
15675 if (flag_checking)
15676 {
15677 print_rtl (stderr, rtl);
15678 gcc_unreachable ();
15679 }
15680 break;
15681 }
15682
15683 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
15684 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
15685
15686 return mem_loc_result;
15687 }
15688
15689 /* Return a descriptor that describes the concatenation of two locations.
15690 This is typically a complex variable. */
15691
15692 static dw_loc_descr_ref
15693 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
15694 {
15695 dw_loc_descr_ref cc_loc_result = NULL;
15696 dw_loc_descr_ref x0_ref
15697 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15698 dw_loc_descr_ref x1_ref
15699 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15700
15701 if (x0_ref == 0 || x1_ref == 0)
15702 return 0;
15703
15704 cc_loc_result = x0_ref;
15705 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x0)));
15706
15707 add_loc_descr (&cc_loc_result, x1_ref);
15708 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x1)));
15709
15710 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
15711 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
15712
15713 return cc_loc_result;
15714 }
15715
15716 /* Return a descriptor that describes the concatenation of N
15717 locations. */
15718
15719 static dw_loc_descr_ref
15720 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
15721 {
15722 unsigned int i;
15723 dw_loc_descr_ref cc_loc_result = NULL;
15724 unsigned int n = XVECLEN (concatn, 0);
15725
15726 for (i = 0; i < n; ++i)
15727 {
15728 dw_loc_descr_ref ref;
15729 rtx x = XVECEXP (concatn, 0, i);
15730
15731 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15732 if (ref == NULL)
15733 return NULL;
15734
15735 add_loc_descr (&cc_loc_result, ref);
15736 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x)));
15737 }
15738
15739 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
15740 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
15741
15742 return cc_loc_result;
15743 }
15744
15745 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
15746 for DEBUG_IMPLICIT_PTR RTL. */
15747
15748 static dw_loc_descr_ref
15749 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
15750 {
15751 dw_loc_descr_ref ret;
15752 dw_die_ref ref;
15753
15754 if (dwarf_strict && dwarf_version < 5)
15755 return NULL;
15756 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
15757 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
15758 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
15759 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
15760 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
15761 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
15762 if (ref)
15763 {
15764 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15765 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15766 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15767 }
15768 else
15769 {
15770 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15771 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
15772 }
15773 return ret;
15774 }
15775
15776 /* Output a proper Dwarf location descriptor for a variable or parameter
15777 which is either allocated in a register or in a memory location. For a
15778 register, we just generate an OP_REG and the register number. For a
15779 memory location we provide a Dwarf postfix expression describing how to
15780 generate the (dynamic) address of the object onto the address stack.
15781
15782 MODE is mode of the decl if this loc_descriptor is going to be used in
15783 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
15784 allowed, VOIDmode otherwise.
15785
15786 If we don't know how to describe it, return 0. */
15787
15788 static dw_loc_descr_ref
15789 loc_descriptor (rtx rtl, machine_mode mode,
15790 enum var_init_status initialized)
15791 {
15792 dw_loc_descr_ref loc_result = NULL;
15793 scalar_int_mode int_mode;
15794
15795 switch (GET_CODE (rtl))
15796 {
15797 case SUBREG:
15798 /* The case of a subreg may arise when we have a local (register)
15799 variable or a formal (register) parameter which doesn't quite fill
15800 up an entire register. For now, just assume that it is
15801 legitimate to make the Dwarf info refer to the whole register which
15802 contains the given subreg. */
15803 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
15804 loc_result = loc_descriptor (SUBREG_REG (rtl),
15805 GET_MODE (SUBREG_REG (rtl)), initialized);
15806 else
15807 goto do_default;
15808 break;
15809
15810 case REG:
15811 loc_result = reg_loc_descriptor (rtl, initialized);
15812 break;
15813
15814 case MEM:
15815 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
15816 GET_MODE (rtl), initialized);
15817 if (loc_result == NULL)
15818 loc_result = tls_mem_loc_descriptor (rtl);
15819 if (loc_result == NULL)
15820 {
15821 rtx new_rtl = avoid_constant_pool_reference (rtl);
15822 if (new_rtl != rtl)
15823 loc_result = loc_descriptor (new_rtl, mode, initialized);
15824 }
15825 break;
15826
15827 case CONCAT:
15828 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
15829 initialized);
15830 break;
15831
15832 case CONCATN:
15833 loc_result = concatn_loc_descriptor (rtl, initialized);
15834 break;
15835
15836 case VAR_LOCATION:
15837 /* Single part. */
15838 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
15839 {
15840 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
15841 if (GET_CODE (loc) == EXPR_LIST)
15842 loc = XEXP (loc, 0);
15843 loc_result = loc_descriptor (loc, mode, initialized);
15844 break;
15845 }
15846
15847 rtl = XEXP (rtl, 1);
15848 /* FALLTHRU */
15849
15850 case PARALLEL:
15851 {
15852 rtvec par_elems = XVEC (rtl, 0);
15853 int num_elem = GET_NUM_ELEM (par_elems);
15854 machine_mode mode;
15855 int i;
15856
15857 /* Create the first one, so we have something to add to. */
15858 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
15859 VOIDmode, initialized);
15860 if (loc_result == NULL)
15861 return NULL;
15862 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
15863 add_loc_descr_op_piece (&loc_result, GET_MODE_SIZE (mode));
15864 for (i = 1; i < num_elem; i++)
15865 {
15866 dw_loc_descr_ref temp;
15867
15868 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
15869 VOIDmode, initialized);
15870 if (temp == NULL)
15871 return NULL;
15872 add_loc_descr (&loc_result, temp);
15873 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
15874 add_loc_descr_op_piece (&loc_result, GET_MODE_SIZE (mode));
15875 }
15876 }
15877 break;
15878
15879 case CONST_INT:
15880 if (mode != VOIDmode && mode != BLKmode)
15881 {
15882 int_mode = as_a <scalar_int_mode> (mode);
15883 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
15884 INTVAL (rtl));
15885 }
15886 break;
15887
15888 case CONST_DOUBLE:
15889 if (mode == VOIDmode)
15890 mode = GET_MODE (rtl);
15891
15892 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
15893 {
15894 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
15895
15896 /* Note that a CONST_DOUBLE rtx could represent either an integer
15897 or a floating-point constant. A CONST_DOUBLE is used whenever
15898 the constant requires more than one word in order to be
15899 adequately represented. We output CONST_DOUBLEs as blocks. */
15900 scalar_mode smode = as_a <scalar_mode> (mode);
15901 loc_result = new_loc_descr (DW_OP_implicit_value,
15902 GET_MODE_SIZE (smode), 0);
15903 #if TARGET_SUPPORTS_WIDE_INT == 0
15904 if (!SCALAR_FLOAT_MODE_P (smode))
15905 {
15906 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
15907 loc_result->dw_loc_oprnd2.v.val_double
15908 = rtx_to_double_int (rtl);
15909 }
15910 else
15911 #endif
15912 {
15913 unsigned int length = GET_MODE_SIZE (smode);
15914 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
15915
15916 insert_float (rtl, array);
15917 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
15918 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
15919 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
15920 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
15921 }
15922 }
15923 break;
15924
15925 case CONST_WIDE_INT:
15926 if (mode == VOIDmode)
15927 mode = GET_MODE (rtl);
15928
15929 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
15930 {
15931 int_mode = as_a <scalar_int_mode> (mode);
15932 loc_result = new_loc_descr (DW_OP_implicit_value,
15933 GET_MODE_SIZE (int_mode), 0);
15934 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
15935 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
15936 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
15937 }
15938 break;
15939
15940 case CONST_VECTOR:
15941 if (mode == VOIDmode)
15942 mode = GET_MODE (rtl);
15943
15944 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
15945 {
15946 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
15947 unsigned int length = CONST_VECTOR_NUNITS (rtl);
15948 unsigned char *array
15949 = ggc_vec_alloc<unsigned char> (length * elt_size);
15950 unsigned int i;
15951 unsigned char *p;
15952 machine_mode imode = GET_MODE_INNER (mode);
15953
15954 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
15955 switch (GET_MODE_CLASS (mode))
15956 {
15957 case MODE_VECTOR_INT:
15958 for (i = 0, p = array; i < length; i++, p += elt_size)
15959 {
15960 rtx elt = CONST_VECTOR_ELT (rtl, i);
15961 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
15962 }
15963 break;
15964
15965 case MODE_VECTOR_FLOAT:
15966 for (i = 0, p = array; i < length; i++, p += elt_size)
15967 {
15968 rtx elt = CONST_VECTOR_ELT (rtl, i);
15969 insert_float (elt, p);
15970 }
15971 break;
15972
15973 default:
15974 gcc_unreachable ();
15975 }
15976
15977 loc_result = new_loc_descr (DW_OP_implicit_value,
15978 length * elt_size, 0);
15979 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
15980 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
15981 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
15982 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
15983 }
15984 break;
15985
15986 case CONST:
15987 if (mode == VOIDmode
15988 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
15989 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
15990 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
15991 {
15992 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
15993 break;
15994 }
15995 /* FALLTHROUGH */
15996 case SYMBOL_REF:
15997 if (!const_ok_for_output (rtl))
15998 break;
15999 /* FALLTHROUGH */
16000 case LABEL_REF:
16001 if (is_a <scalar_int_mode> (mode, &int_mode)
16002 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16003 && (dwarf_version >= 4 || !dwarf_strict))
16004 {
16005 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16006 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16007 vec_safe_push (used_rtx_array, rtl);
16008 }
16009 break;
16010
16011 case DEBUG_IMPLICIT_PTR:
16012 loc_result = implicit_ptr_descriptor (rtl, 0);
16013 break;
16014
16015 case PLUS:
16016 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16017 && CONST_INT_P (XEXP (rtl, 1)))
16018 {
16019 loc_result
16020 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16021 break;
16022 }
16023 /* FALLTHRU */
16024 do_default:
16025 default:
16026 if ((is_a <scalar_int_mode> (mode, &int_mode)
16027 && GET_MODE (rtl) == int_mode
16028 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16029 && dwarf_version >= 4)
16030 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16031 {
16032 /* Value expression. */
16033 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16034 if (loc_result)
16035 add_loc_descr (&loc_result,
16036 new_loc_descr (DW_OP_stack_value, 0, 0));
16037 }
16038 break;
16039 }
16040
16041 return loc_result;
16042 }
16043
16044 /* We need to figure out what section we should use as the base for the
16045 address ranges where a given location is valid.
16046 1. If this particular DECL has a section associated with it, use that.
16047 2. If this function has a section associated with it, use that.
16048 3. Otherwise, use the text section.
16049 XXX: If you split a variable across multiple sections, we won't notice. */
16050
16051 static const char *
16052 secname_for_decl (const_tree decl)
16053 {
16054 const char *secname;
16055
16056 if (VAR_OR_FUNCTION_DECL_P (decl)
16057 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16058 && DECL_SECTION_NAME (decl))
16059 secname = DECL_SECTION_NAME (decl);
16060 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16061 secname = DECL_SECTION_NAME (current_function_decl);
16062 else if (cfun && in_cold_section_p)
16063 secname = crtl->subsections.cold_section_label;
16064 else
16065 secname = text_section_label;
16066
16067 return secname;
16068 }
16069
16070 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16071
16072 static bool
16073 decl_by_reference_p (tree decl)
16074 {
16075 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16076 || VAR_P (decl))
16077 && DECL_BY_REFERENCE (decl));
16078 }
16079
16080 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16081 for VARLOC. */
16082
16083 static dw_loc_descr_ref
16084 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16085 enum var_init_status initialized)
16086 {
16087 int have_address = 0;
16088 dw_loc_descr_ref descr;
16089 machine_mode mode;
16090
16091 if (want_address != 2)
16092 {
16093 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16094 /* Single part. */
16095 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16096 {
16097 varloc = PAT_VAR_LOCATION_LOC (varloc);
16098 if (GET_CODE (varloc) == EXPR_LIST)
16099 varloc = XEXP (varloc, 0);
16100 mode = GET_MODE (varloc);
16101 if (MEM_P (varloc))
16102 {
16103 rtx addr = XEXP (varloc, 0);
16104 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16105 mode, initialized);
16106 if (descr)
16107 have_address = 1;
16108 else
16109 {
16110 rtx x = avoid_constant_pool_reference (varloc);
16111 if (x != varloc)
16112 descr = mem_loc_descriptor (x, mode, VOIDmode,
16113 initialized);
16114 }
16115 }
16116 else
16117 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16118 }
16119 else
16120 return 0;
16121 }
16122 else
16123 {
16124 if (GET_CODE (varloc) == VAR_LOCATION)
16125 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16126 else
16127 mode = DECL_MODE (loc);
16128 descr = loc_descriptor (varloc, mode, initialized);
16129 have_address = 1;
16130 }
16131
16132 if (!descr)
16133 return 0;
16134
16135 if (want_address == 2 && !have_address
16136 && (dwarf_version >= 4 || !dwarf_strict))
16137 {
16138 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16139 {
16140 expansion_failed (loc, NULL_RTX,
16141 "DWARF address size mismatch");
16142 return 0;
16143 }
16144 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16145 have_address = 1;
16146 }
16147 /* Show if we can't fill the request for an address. */
16148 if (want_address && !have_address)
16149 {
16150 expansion_failed (loc, NULL_RTX,
16151 "Want address and only have value");
16152 return 0;
16153 }
16154
16155 /* If we've got an address and don't want one, dereference. */
16156 if (!want_address && have_address)
16157 {
16158 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16159 enum dwarf_location_atom op;
16160
16161 if (size > DWARF2_ADDR_SIZE || size == -1)
16162 {
16163 expansion_failed (loc, NULL_RTX,
16164 "DWARF address size mismatch");
16165 return 0;
16166 }
16167 else if (size == DWARF2_ADDR_SIZE)
16168 op = DW_OP_deref;
16169 else
16170 op = DW_OP_deref_size;
16171
16172 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16173 }
16174
16175 return descr;
16176 }
16177
16178 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16179 if it is not possible. */
16180
16181 static dw_loc_descr_ref
16182 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16183 {
16184 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16185 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16186 else if (dwarf_version >= 3 || !dwarf_strict)
16187 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16188 else
16189 return NULL;
16190 }
16191
16192 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16193 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16194
16195 static dw_loc_descr_ref
16196 dw_sra_loc_expr (tree decl, rtx loc)
16197 {
16198 rtx p;
16199 unsigned HOST_WIDE_INT padsize = 0;
16200 dw_loc_descr_ref descr, *descr_tail;
16201 unsigned HOST_WIDE_INT decl_size;
16202 rtx varloc;
16203 enum var_init_status initialized;
16204
16205 if (DECL_SIZE (decl) == NULL
16206 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16207 return NULL;
16208
16209 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16210 descr = NULL;
16211 descr_tail = &descr;
16212
16213 for (p = loc; p; p = XEXP (p, 1))
16214 {
16215 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16216 rtx loc_note = *decl_piece_varloc_ptr (p);
16217 dw_loc_descr_ref cur_descr;
16218 dw_loc_descr_ref *tail, last = NULL;
16219 unsigned HOST_WIDE_INT opsize = 0;
16220
16221 if (loc_note == NULL_RTX
16222 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16223 {
16224 padsize += bitsize;
16225 continue;
16226 }
16227 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16228 varloc = NOTE_VAR_LOCATION (loc_note);
16229 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16230 if (cur_descr == NULL)
16231 {
16232 padsize += bitsize;
16233 continue;
16234 }
16235
16236 /* Check that cur_descr either doesn't use
16237 DW_OP_*piece operations, or their sum is equal
16238 to bitsize. Otherwise we can't embed it. */
16239 for (tail = &cur_descr; *tail != NULL;
16240 tail = &(*tail)->dw_loc_next)
16241 if ((*tail)->dw_loc_opc == DW_OP_piece)
16242 {
16243 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16244 * BITS_PER_UNIT;
16245 last = *tail;
16246 }
16247 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16248 {
16249 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16250 last = *tail;
16251 }
16252
16253 if (last != NULL && opsize != bitsize)
16254 {
16255 padsize += bitsize;
16256 /* Discard the current piece of the descriptor and release any
16257 addr_table entries it uses. */
16258 remove_loc_list_addr_table_entries (cur_descr);
16259 continue;
16260 }
16261
16262 /* If there is a hole, add DW_OP_*piece after empty DWARF
16263 expression, which means that those bits are optimized out. */
16264 if (padsize)
16265 {
16266 if (padsize > decl_size)
16267 {
16268 remove_loc_list_addr_table_entries (cur_descr);
16269 goto discard_descr;
16270 }
16271 decl_size -= padsize;
16272 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16273 if (*descr_tail == NULL)
16274 {
16275 remove_loc_list_addr_table_entries (cur_descr);
16276 goto discard_descr;
16277 }
16278 descr_tail = &(*descr_tail)->dw_loc_next;
16279 padsize = 0;
16280 }
16281 *descr_tail = cur_descr;
16282 descr_tail = tail;
16283 if (bitsize > decl_size)
16284 goto discard_descr;
16285 decl_size -= bitsize;
16286 if (last == NULL)
16287 {
16288 HOST_WIDE_INT offset = 0;
16289 if (GET_CODE (varloc) == VAR_LOCATION
16290 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16291 {
16292 varloc = PAT_VAR_LOCATION_LOC (varloc);
16293 if (GET_CODE (varloc) == EXPR_LIST)
16294 varloc = XEXP (varloc, 0);
16295 }
16296 do
16297 {
16298 if (GET_CODE (varloc) == CONST
16299 || GET_CODE (varloc) == SIGN_EXTEND
16300 || GET_CODE (varloc) == ZERO_EXTEND)
16301 varloc = XEXP (varloc, 0);
16302 else if (GET_CODE (varloc) == SUBREG)
16303 varloc = SUBREG_REG (varloc);
16304 else
16305 break;
16306 }
16307 while (1);
16308 /* DW_OP_bit_size offset should be zero for register
16309 or implicit location descriptions and empty location
16310 descriptions, but for memory addresses needs big endian
16311 adjustment. */
16312 if (MEM_P (varloc))
16313 {
16314 unsigned HOST_WIDE_INT memsize
16315 = MEM_SIZE (varloc) * BITS_PER_UNIT;
16316 if (memsize != bitsize)
16317 {
16318 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
16319 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
16320 goto discard_descr;
16321 if (memsize < bitsize)
16322 goto discard_descr;
16323 if (BITS_BIG_ENDIAN)
16324 offset = memsize - bitsize;
16325 }
16326 }
16327
16328 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
16329 if (*descr_tail == NULL)
16330 goto discard_descr;
16331 descr_tail = &(*descr_tail)->dw_loc_next;
16332 }
16333 }
16334
16335 /* If there were any non-empty expressions, add padding till the end of
16336 the decl. */
16337 if (descr != NULL && decl_size != 0)
16338 {
16339 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
16340 if (*descr_tail == NULL)
16341 goto discard_descr;
16342 }
16343 return descr;
16344
16345 discard_descr:
16346 /* Discard the descriptor and release any addr_table entries it uses. */
16347 remove_loc_list_addr_table_entries (descr);
16348 return NULL;
16349 }
16350
16351 /* Return the dwarf representation of the location list LOC_LIST of
16352 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
16353 function. */
16354
16355 static dw_loc_list_ref
16356 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
16357 {
16358 const char *endname, *secname;
16359 rtx varloc;
16360 enum var_init_status initialized;
16361 struct var_loc_node *node;
16362 dw_loc_descr_ref descr;
16363 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
16364 dw_loc_list_ref list = NULL;
16365 dw_loc_list_ref *listp = &list;
16366
16367 /* Now that we know what section we are using for a base,
16368 actually construct the list of locations.
16369 The first location information is what is passed to the
16370 function that creates the location list, and the remaining
16371 locations just get added on to that list.
16372 Note that we only know the start address for a location
16373 (IE location changes), so to build the range, we use
16374 the range [current location start, next location start].
16375 This means we have to special case the last node, and generate
16376 a range of [last location start, end of function label]. */
16377
16378 if (cfun && crtl->has_bb_partition)
16379 {
16380 bool save_in_cold_section_p = in_cold_section_p;
16381 in_cold_section_p = first_function_block_is_cold;
16382 if (loc_list->last_before_switch == NULL)
16383 in_cold_section_p = !in_cold_section_p;
16384 secname = secname_for_decl (decl);
16385 in_cold_section_p = save_in_cold_section_p;
16386 }
16387 else
16388 secname = secname_for_decl (decl);
16389
16390 for (node = loc_list->first; node; node = node->next)
16391 {
16392 bool range_across_switch = false;
16393 if (GET_CODE (node->loc) == EXPR_LIST
16394 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
16395 {
16396 if (GET_CODE (node->loc) == EXPR_LIST)
16397 {
16398 descr = NULL;
16399 /* This requires DW_OP_{,bit_}piece, which is not usable
16400 inside DWARF expressions. */
16401 if (want_address == 2)
16402 descr = dw_sra_loc_expr (decl, node->loc);
16403 }
16404 else
16405 {
16406 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
16407 varloc = NOTE_VAR_LOCATION (node->loc);
16408 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
16409 }
16410 if (descr)
16411 {
16412 /* If section switch happens in between node->label
16413 and node->next->label (or end of function) and
16414 we can't emit it as a single entry list,
16415 emit two ranges, first one ending at the end
16416 of first partition and second one starting at the
16417 beginning of second partition. */
16418 if (node == loc_list->last_before_switch
16419 && (node != loc_list->first || loc_list->first->next)
16420 && current_function_decl)
16421 {
16422 endname = cfun->fde->dw_fde_end;
16423 range_across_switch = true;
16424 }
16425 /* The variable has a location between NODE->LABEL and
16426 NODE->NEXT->LABEL. */
16427 else if (node->next)
16428 endname = node->next->label;
16429 /* If the variable has a location at the last label
16430 it keeps its location until the end of function. */
16431 else if (!current_function_decl)
16432 endname = text_end_label;
16433 else
16434 {
16435 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
16436 current_function_funcdef_no);
16437 endname = ggc_strdup (label_id);
16438 }
16439
16440 *listp = new_loc_list (descr, node->label, endname, secname);
16441 if (TREE_CODE (decl) == PARM_DECL
16442 && node == loc_list->first
16443 && NOTE_P (node->loc)
16444 && strcmp (node->label, endname) == 0)
16445 (*listp)->force = true;
16446 listp = &(*listp)->dw_loc_next;
16447 }
16448 }
16449
16450 if (cfun
16451 && crtl->has_bb_partition
16452 && node == loc_list->last_before_switch)
16453 {
16454 bool save_in_cold_section_p = in_cold_section_p;
16455 in_cold_section_p = !first_function_block_is_cold;
16456 secname = secname_for_decl (decl);
16457 in_cold_section_p = save_in_cold_section_p;
16458 }
16459
16460 if (range_across_switch)
16461 {
16462 if (GET_CODE (node->loc) == EXPR_LIST)
16463 descr = dw_sra_loc_expr (decl, node->loc);
16464 else
16465 {
16466 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
16467 varloc = NOTE_VAR_LOCATION (node->loc);
16468 descr = dw_loc_list_1 (decl, varloc, want_address,
16469 initialized);
16470 }
16471 gcc_assert (descr);
16472 /* The variable has a location between NODE->LABEL and
16473 NODE->NEXT->LABEL. */
16474 if (node->next)
16475 endname = node->next->label;
16476 else
16477 endname = cfun->fde->dw_fde_second_end;
16478 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin,
16479 endname, secname);
16480 listp = &(*listp)->dw_loc_next;
16481 }
16482 }
16483
16484 /* Try to avoid the overhead of a location list emitting a location
16485 expression instead, but only if we didn't have more than one
16486 location entry in the first place. If some entries were not
16487 representable, we don't want to pretend a single entry that was
16488 applies to the entire scope in which the variable is
16489 available. */
16490 if (list && loc_list->first->next)
16491 gen_llsym (list);
16492
16493 return list;
16494 }
16495
16496 /* Return if the loc_list has only single element and thus can be represented
16497 as location description. */
16498
16499 static bool
16500 single_element_loc_list_p (dw_loc_list_ref list)
16501 {
16502 gcc_assert (!list->dw_loc_next || list->ll_symbol);
16503 return !list->ll_symbol;
16504 }
16505
16506 /* Duplicate a single element of location list. */
16507
16508 static inline dw_loc_descr_ref
16509 copy_loc_descr (dw_loc_descr_ref ref)
16510 {
16511 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
16512 memcpy (copy, ref, sizeof (dw_loc_descr_node));
16513 return copy;
16514 }
16515
16516 /* To each location in list LIST append loc descr REF. */
16517
16518 static void
16519 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
16520 {
16521 dw_loc_descr_ref copy;
16522 add_loc_descr (&list->expr, ref);
16523 list = list->dw_loc_next;
16524 while (list)
16525 {
16526 copy = copy_loc_descr (ref);
16527 add_loc_descr (&list->expr, copy);
16528 while (copy->dw_loc_next)
16529 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
16530 list = list->dw_loc_next;
16531 }
16532 }
16533
16534 /* To each location in list LIST prepend loc descr REF. */
16535
16536 static void
16537 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
16538 {
16539 dw_loc_descr_ref copy;
16540 dw_loc_descr_ref ref_end = list->expr;
16541 add_loc_descr (&ref, list->expr);
16542 list->expr = ref;
16543 list = list->dw_loc_next;
16544 while (list)
16545 {
16546 dw_loc_descr_ref end = list->expr;
16547 list->expr = copy = copy_loc_descr (ref);
16548 while (copy->dw_loc_next != ref_end)
16549 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
16550 copy->dw_loc_next = end;
16551 list = list->dw_loc_next;
16552 }
16553 }
16554
16555 /* Given two lists RET and LIST
16556 produce location list that is result of adding expression in LIST
16557 to expression in RET on each position in program.
16558 Might be destructive on both RET and LIST.
16559
16560 TODO: We handle only simple cases of RET or LIST having at most one
16561 element. General case would involve sorting the lists in program order
16562 and merging them that will need some additional work.
16563 Adding that will improve quality of debug info especially for SRA-ed
16564 structures. */
16565
16566 static void
16567 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
16568 {
16569 if (!list)
16570 return;
16571 if (!*ret)
16572 {
16573 *ret = list;
16574 return;
16575 }
16576 if (!list->dw_loc_next)
16577 {
16578 add_loc_descr_to_each (*ret, list->expr);
16579 return;
16580 }
16581 if (!(*ret)->dw_loc_next)
16582 {
16583 prepend_loc_descr_to_each (list, (*ret)->expr);
16584 *ret = list;
16585 return;
16586 }
16587 expansion_failed (NULL_TREE, NULL_RTX,
16588 "Don't know how to merge two non-trivial"
16589 " location lists.\n");
16590 *ret = NULL;
16591 return;
16592 }
16593
16594 /* LOC is constant expression. Try a luck, look it up in constant
16595 pool and return its loc_descr of its address. */
16596
16597 static dw_loc_descr_ref
16598 cst_pool_loc_descr (tree loc)
16599 {
16600 /* Get an RTL for this, if something has been emitted. */
16601 rtx rtl = lookup_constant_def (loc);
16602
16603 if (!rtl || !MEM_P (rtl))
16604 {
16605 gcc_assert (!rtl);
16606 return 0;
16607 }
16608 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
16609
16610 /* TODO: We might get more coverage if we was actually delaying expansion
16611 of all expressions till end of compilation when constant pools are fully
16612 populated. */
16613 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
16614 {
16615 expansion_failed (loc, NULL_RTX,
16616 "CST value in contant pool but not marked.");
16617 return 0;
16618 }
16619 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16620 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
16621 }
16622
16623 /* Return dw_loc_list representing address of addr_expr LOC
16624 by looking for inner INDIRECT_REF expression and turning
16625 it into simple arithmetics.
16626
16627 See loc_list_from_tree for the meaning of CONTEXT. */
16628
16629 static dw_loc_list_ref
16630 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
16631 loc_descr_context *context)
16632 {
16633 tree obj, offset;
16634 HOST_WIDE_INT bitsize, bitpos, bytepos;
16635 machine_mode mode;
16636 int unsignedp, reversep, volatilep = 0;
16637 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
16638
16639 obj = get_inner_reference (TREE_OPERAND (loc, 0),
16640 &bitsize, &bitpos, &offset, &mode,
16641 &unsignedp, &reversep, &volatilep);
16642 STRIP_NOPS (obj);
16643 if (bitpos % BITS_PER_UNIT)
16644 {
16645 expansion_failed (loc, NULL_RTX, "bitfield access");
16646 return 0;
16647 }
16648 if (!INDIRECT_REF_P (obj))
16649 {
16650 expansion_failed (obj,
16651 NULL_RTX, "no indirect ref in inner refrence");
16652 return 0;
16653 }
16654 if (!offset && !bitpos)
16655 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
16656 context);
16657 else if (toplev
16658 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
16659 && (dwarf_version >= 4 || !dwarf_strict))
16660 {
16661 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
16662 if (!list_ret)
16663 return 0;
16664 if (offset)
16665 {
16666 /* Variable offset. */
16667 list_ret1 = loc_list_from_tree (offset, 0, context);
16668 if (list_ret1 == 0)
16669 return 0;
16670 add_loc_list (&list_ret, list_ret1);
16671 if (!list_ret)
16672 return 0;
16673 add_loc_descr_to_each (list_ret,
16674 new_loc_descr (DW_OP_plus, 0, 0));
16675 }
16676 bytepos = bitpos / BITS_PER_UNIT;
16677 if (bytepos > 0)
16678 add_loc_descr_to_each (list_ret,
16679 new_loc_descr (DW_OP_plus_uconst,
16680 bytepos, 0));
16681 else if (bytepos < 0)
16682 loc_list_plus_const (list_ret, bytepos);
16683 add_loc_descr_to_each (list_ret,
16684 new_loc_descr (DW_OP_stack_value, 0, 0));
16685 }
16686 return list_ret;
16687 }
16688
16689 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
16690 all operations from LOC are nops, move to the last one. Insert in NOPS all
16691 operations that are skipped. */
16692
16693 static void
16694 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
16695 hash_set<dw_loc_descr_ref> &nops)
16696 {
16697 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
16698 {
16699 nops.add (loc);
16700 loc = loc->dw_loc_next;
16701 }
16702 }
16703
16704 /* Helper for loc_descr_without_nops: free the location description operation
16705 P. */
16706
16707 bool
16708 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
16709 {
16710 ggc_free (loc);
16711 return true;
16712 }
16713
16714 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
16715 finishes LOC. */
16716
16717 static void
16718 loc_descr_without_nops (dw_loc_descr_ref &loc)
16719 {
16720 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
16721 return;
16722
16723 /* Set of all DW_OP_nop operations we remove. */
16724 hash_set<dw_loc_descr_ref> nops;
16725
16726 /* First, strip all prefix NOP operations in order to keep the head of the
16727 operations list. */
16728 loc_descr_to_next_no_nop (loc, nops);
16729
16730 for (dw_loc_descr_ref cur = loc; cur != NULL;)
16731 {
16732 /* For control flow operations: strip "prefix" nops in destination
16733 labels. */
16734 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
16735 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
16736 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
16737 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
16738
16739 /* Do the same for the operations that follow, then move to the next
16740 iteration. */
16741 if (cur->dw_loc_next != NULL)
16742 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
16743 cur = cur->dw_loc_next;
16744 }
16745
16746 nops.traverse<void *, free_loc_descr> (NULL);
16747 }
16748
16749
16750 struct dwarf_procedure_info;
16751
16752 /* Helper structure for location descriptions generation. */
16753 struct loc_descr_context
16754 {
16755 /* The type that is implicitly referenced by DW_OP_push_object_address, or
16756 NULL_TREE if DW_OP_push_object_address in invalid for this location
16757 description. This is used when processing PLACEHOLDER_EXPR nodes. */
16758 tree context_type;
16759 /* The ..._DECL node that should be translated as a
16760 DW_OP_push_object_address operation. */
16761 tree base_decl;
16762 /* Information about the DWARF procedure we are currently generating. NULL if
16763 we are not generating a DWARF procedure. */
16764 struct dwarf_procedure_info *dpi;
16765 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
16766 by consumer. Used for DW_TAG_generic_subrange attributes. */
16767 bool placeholder_arg;
16768 /* True if PLACEHOLDER_EXPR has been seen. */
16769 bool placeholder_seen;
16770 };
16771
16772 /* DWARF procedures generation
16773
16774 DWARF expressions (aka. location descriptions) are used to encode variable
16775 things such as sizes or offsets. Such computations can have redundant parts
16776 that can be factorized in order to reduce the size of the output debug
16777 information. This is the whole point of DWARF procedures.
16778
16779 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
16780 already factorized into functions ("size functions") in order to handle very
16781 big and complex types. Such functions are quite simple: they have integral
16782 arguments, they return an integral result and their body contains only a
16783 return statement with arithmetic expressions. This is the only kind of
16784 function we are interested in translating into DWARF procedures, here.
16785
16786 DWARF expressions and DWARF procedure are executed using a stack, so we have
16787 to define some calling convention for them to interact. Let's say that:
16788
16789 - Before calling a DWARF procedure, DWARF expressions must push on the stack
16790 all arguments in reverse order (right-to-left) so that when the DWARF
16791 procedure execution starts, the first argument is the top of the stack.
16792
16793 - Then, when returning, the DWARF procedure must have consumed all arguments
16794 on the stack, must have pushed the result and touched nothing else.
16795
16796 - Each integral argument and the result are integral types can be hold in a
16797 single stack slot.
16798
16799 - We call "frame offset" the number of stack slots that are "under DWARF
16800 procedure control": it includes the arguments slots, the temporaries and
16801 the result slot. Thus, it is equal to the number of arguments when the
16802 procedure execution starts and must be equal to one (the result) when it
16803 returns. */
16804
16805 /* Helper structure used when generating operations for a DWARF procedure. */
16806 struct dwarf_procedure_info
16807 {
16808 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
16809 currently translated. */
16810 tree fndecl;
16811 /* The number of arguments FNDECL takes. */
16812 unsigned args_count;
16813 };
16814
16815 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
16816 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
16817 equate it to this DIE. */
16818
16819 static dw_die_ref
16820 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
16821 dw_die_ref parent_die)
16822 {
16823 dw_die_ref dwarf_proc_die;
16824
16825 if ((dwarf_version < 3 && dwarf_strict)
16826 || location == NULL)
16827 return NULL;
16828
16829 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
16830 if (fndecl)
16831 equate_decl_number_to_die (fndecl, dwarf_proc_die);
16832 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
16833 return dwarf_proc_die;
16834 }
16835
16836 /* Return whether TYPE is a supported type as a DWARF procedure argument
16837 type or return type (we handle only scalar types and pointer types that
16838 aren't wider than the DWARF expression evaluation stack. */
16839
16840 static bool
16841 is_handled_procedure_type (tree type)
16842 {
16843 return ((INTEGRAL_TYPE_P (type)
16844 || TREE_CODE (type) == OFFSET_TYPE
16845 || TREE_CODE (type) == POINTER_TYPE)
16846 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
16847 }
16848
16849 /* Helper for resolve_args_picking: do the same but stop when coming across
16850 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
16851 offset *before* evaluating the corresponding operation. */
16852
16853 static bool
16854 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
16855 struct dwarf_procedure_info *dpi,
16856 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
16857 {
16858 /* The "frame_offset" identifier is already used to name a macro... */
16859 unsigned frame_offset_ = initial_frame_offset;
16860 dw_loc_descr_ref l;
16861
16862 for (l = loc; l != NULL;)
16863 {
16864 bool existed;
16865 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
16866
16867 /* If we already met this node, there is nothing to compute anymore. */
16868 if (existed)
16869 {
16870 /* Make sure that the stack size is consistent wherever the execution
16871 flow comes from. */
16872 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
16873 break;
16874 }
16875 l_frame_offset = frame_offset_;
16876
16877 /* If needed, relocate the picking offset with respect to the frame
16878 offset. */
16879 if (l->frame_offset_rel)
16880 {
16881 unsigned HOST_WIDE_INT off;
16882 switch (l->dw_loc_opc)
16883 {
16884 case DW_OP_pick:
16885 off = l->dw_loc_oprnd1.v.val_unsigned;
16886 break;
16887 case DW_OP_dup:
16888 off = 0;
16889 break;
16890 case DW_OP_over:
16891 off = 1;
16892 break;
16893 default:
16894 gcc_unreachable ();
16895 }
16896 /* frame_offset_ is the size of the current stack frame, including
16897 incoming arguments. Besides, the arguments are pushed
16898 right-to-left. Thus, in order to access the Nth argument from
16899 this operation node, the picking has to skip temporaries *plus*
16900 one stack slot per argument (0 for the first one, 1 for the second
16901 one, etc.).
16902
16903 The targetted argument number (N) is already set as the operand,
16904 and the number of temporaries can be computed with:
16905 frame_offsets_ - dpi->args_count */
16906 off += frame_offset_ - dpi->args_count;
16907
16908 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
16909 if (off > 255)
16910 return false;
16911
16912 if (off == 0)
16913 {
16914 l->dw_loc_opc = DW_OP_dup;
16915 l->dw_loc_oprnd1.v.val_unsigned = 0;
16916 }
16917 else if (off == 1)
16918 {
16919 l->dw_loc_opc = DW_OP_over;
16920 l->dw_loc_oprnd1.v.val_unsigned = 0;
16921 }
16922 else
16923 {
16924 l->dw_loc_opc = DW_OP_pick;
16925 l->dw_loc_oprnd1.v.val_unsigned = off;
16926 }
16927 }
16928
16929 /* Update frame_offset according to the effect the current operation has
16930 on the stack. */
16931 switch (l->dw_loc_opc)
16932 {
16933 case DW_OP_deref:
16934 case DW_OP_swap:
16935 case DW_OP_rot:
16936 case DW_OP_abs:
16937 case DW_OP_neg:
16938 case DW_OP_not:
16939 case DW_OP_plus_uconst:
16940 case DW_OP_skip:
16941 case DW_OP_reg0:
16942 case DW_OP_reg1:
16943 case DW_OP_reg2:
16944 case DW_OP_reg3:
16945 case DW_OP_reg4:
16946 case DW_OP_reg5:
16947 case DW_OP_reg6:
16948 case DW_OP_reg7:
16949 case DW_OP_reg8:
16950 case DW_OP_reg9:
16951 case DW_OP_reg10:
16952 case DW_OP_reg11:
16953 case DW_OP_reg12:
16954 case DW_OP_reg13:
16955 case DW_OP_reg14:
16956 case DW_OP_reg15:
16957 case DW_OP_reg16:
16958 case DW_OP_reg17:
16959 case DW_OP_reg18:
16960 case DW_OP_reg19:
16961 case DW_OP_reg20:
16962 case DW_OP_reg21:
16963 case DW_OP_reg22:
16964 case DW_OP_reg23:
16965 case DW_OP_reg24:
16966 case DW_OP_reg25:
16967 case DW_OP_reg26:
16968 case DW_OP_reg27:
16969 case DW_OP_reg28:
16970 case DW_OP_reg29:
16971 case DW_OP_reg30:
16972 case DW_OP_reg31:
16973 case DW_OP_bregx:
16974 case DW_OP_piece:
16975 case DW_OP_deref_size:
16976 case DW_OP_nop:
16977 case DW_OP_bit_piece:
16978 case DW_OP_implicit_value:
16979 case DW_OP_stack_value:
16980 break;
16981
16982 case DW_OP_addr:
16983 case DW_OP_const1u:
16984 case DW_OP_const1s:
16985 case DW_OP_const2u:
16986 case DW_OP_const2s:
16987 case DW_OP_const4u:
16988 case DW_OP_const4s:
16989 case DW_OP_const8u:
16990 case DW_OP_const8s:
16991 case DW_OP_constu:
16992 case DW_OP_consts:
16993 case DW_OP_dup:
16994 case DW_OP_over:
16995 case DW_OP_pick:
16996 case DW_OP_lit0:
16997 case DW_OP_lit1:
16998 case DW_OP_lit2:
16999 case DW_OP_lit3:
17000 case DW_OP_lit4:
17001 case DW_OP_lit5:
17002 case DW_OP_lit6:
17003 case DW_OP_lit7:
17004 case DW_OP_lit8:
17005 case DW_OP_lit9:
17006 case DW_OP_lit10:
17007 case DW_OP_lit11:
17008 case DW_OP_lit12:
17009 case DW_OP_lit13:
17010 case DW_OP_lit14:
17011 case DW_OP_lit15:
17012 case DW_OP_lit16:
17013 case DW_OP_lit17:
17014 case DW_OP_lit18:
17015 case DW_OP_lit19:
17016 case DW_OP_lit20:
17017 case DW_OP_lit21:
17018 case DW_OP_lit22:
17019 case DW_OP_lit23:
17020 case DW_OP_lit24:
17021 case DW_OP_lit25:
17022 case DW_OP_lit26:
17023 case DW_OP_lit27:
17024 case DW_OP_lit28:
17025 case DW_OP_lit29:
17026 case DW_OP_lit30:
17027 case DW_OP_lit31:
17028 case DW_OP_breg0:
17029 case DW_OP_breg1:
17030 case DW_OP_breg2:
17031 case DW_OP_breg3:
17032 case DW_OP_breg4:
17033 case DW_OP_breg5:
17034 case DW_OP_breg6:
17035 case DW_OP_breg7:
17036 case DW_OP_breg8:
17037 case DW_OP_breg9:
17038 case DW_OP_breg10:
17039 case DW_OP_breg11:
17040 case DW_OP_breg12:
17041 case DW_OP_breg13:
17042 case DW_OP_breg14:
17043 case DW_OP_breg15:
17044 case DW_OP_breg16:
17045 case DW_OP_breg17:
17046 case DW_OP_breg18:
17047 case DW_OP_breg19:
17048 case DW_OP_breg20:
17049 case DW_OP_breg21:
17050 case DW_OP_breg22:
17051 case DW_OP_breg23:
17052 case DW_OP_breg24:
17053 case DW_OP_breg25:
17054 case DW_OP_breg26:
17055 case DW_OP_breg27:
17056 case DW_OP_breg28:
17057 case DW_OP_breg29:
17058 case DW_OP_breg30:
17059 case DW_OP_breg31:
17060 case DW_OP_fbreg:
17061 case DW_OP_push_object_address:
17062 case DW_OP_call_frame_cfa:
17063 case DW_OP_GNU_variable_value:
17064 ++frame_offset_;
17065 break;
17066
17067 case DW_OP_drop:
17068 case DW_OP_xderef:
17069 case DW_OP_and:
17070 case DW_OP_div:
17071 case DW_OP_minus:
17072 case DW_OP_mod:
17073 case DW_OP_mul:
17074 case DW_OP_or:
17075 case DW_OP_plus:
17076 case DW_OP_shl:
17077 case DW_OP_shr:
17078 case DW_OP_shra:
17079 case DW_OP_xor:
17080 case DW_OP_bra:
17081 case DW_OP_eq:
17082 case DW_OP_ge:
17083 case DW_OP_gt:
17084 case DW_OP_le:
17085 case DW_OP_lt:
17086 case DW_OP_ne:
17087 case DW_OP_regx:
17088 case DW_OP_xderef_size:
17089 --frame_offset_;
17090 break;
17091
17092 case DW_OP_call2:
17093 case DW_OP_call4:
17094 case DW_OP_call_ref:
17095 {
17096 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17097 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17098
17099 if (stack_usage == NULL)
17100 return false;
17101 frame_offset_ += *stack_usage;
17102 break;
17103 }
17104
17105 case DW_OP_implicit_pointer:
17106 case DW_OP_entry_value:
17107 case DW_OP_const_type:
17108 case DW_OP_regval_type:
17109 case DW_OP_deref_type:
17110 case DW_OP_convert:
17111 case DW_OP_reinterpret:
17112 case DW_OP_form_tls_address:
17113 case DW_OP_GNU_push_tls_address:
17114 case DW_OP_GNU_uninit:
17115 case DW_OP_GNU_encoded_addr:
17116 case DW_OP_GNU_implicit_pointer:
17117 case DW_OP_GNU_entry_value:
17118 case DW_OP_GNU_const_type:
17119 case DW_OP_GNU_regval_type:
17120 case DW_OP_GNU_deref_type:
17121 case DW_OP_GNU_convert:
17122 case DW_OP_GNU_reinterpret:
17123 case DW_OP_GNU_parameter_ref:
17124 /* loc_list_from_tree will probably not output these operations for
17125 size functions, so assume they will not appear here. */
17126 /* Fall through... */
17127
17128 default:
17129 gcc_unreachable ();
17130 }
17131
17132 /* Now, follow the control flow (except subroutine calls). */
17133 switch (l->dw_loc_opc)
17134 {
17135 case DW_OP_bra:
17136 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17137 frame_offsets))
17138 return false;
17139 /* Fall through. */
17140
17141 case DW_OP_skip:
17142 l = l->dw_loc_oprnd1.v.val_loc;
17143 break;
17144
17145 case DW_OP_stack_value:
17146 return true;
17147
17148 default:
17149 l = l->dw_loc_next;
17150 break;
17151 }
17152 }
17153
17154 return true;
17155 }
17156
17157 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17158 operations) in order to resolve the operand of DW_OP_pick operations that
17159 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17160 offset *before* LOC is executed. Return if all relocations were
17161 successful. */
17162
17163 static bool
17164 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17165 struct dwarf_procedure_info *dpi)
17166 {
17167 /* Associate to all visited operations the frame offset *before* evaluating
17168 this operation. */
17169 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17170
17171 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17172 frame_offsets);
17173 }
17174
17175 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17176 Return NULL if it is not possible. */
17177
17178 static dw_die_ref
17179 function_to_dwarf_procedure (tree fndecl)
17180 {
17181 struct loc_descr_context ctx;
17182 struct dwarf_procedure_info dpi;
17183 dw_die_ref dwarf_proc_die;
17184 tree tree_body = DECL_SAVED_TREE (fndecl);
17185 dw_loc_descr_ref loc_body, epilogue;
17186
17187 tree cursor;
17188 unsigned i;
17189
17190 /* Do not generate multiple DWARF procedures for the same function
17191 declaration. */
17192 dwarf_proc_die = lookup_decl_die (fndecl);
17193 if (dwarf_proc_die != NULL)
17194 return dwarf_proc_die;
17195
17196 /* DWARF procedures are available starting with the DWARFv3 standard. */
17197 if (dwarf_version < 3 && dwarf_strict)
17198 return NULL;
17199
17200 /* We handle only functions for which we still have a body, that return a
17201 supported type and that takes arguments with supported types. Note that
17202 there is no point translating functions that return nothing. */
17203 if (tree_body == NULL_TREE
17204 || DECL_RESULT (fndecl) == NULL_TREE
17205 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17206 return NULL;
17207
17208 for (cursor = DECL_ARGUMENTS (fndecl);
17209 cursor != NULL_TREE;
17210 cursor = TREE_CHAIN (cursor))
17211 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17212 return NULL;
17213
17214 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17215 if (TREE_CODE (tree_body) != RETURN_EXPR)
17216 return NULL;
17217 tree_body = TREE_OPERAND (tree_body, 0);
17218 if (TREE_CODE (tree_body) != MODIFY_EXPR
17219 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17220 return NULL;
17221 tree_body = TREE_OPERAND (tree_body, 1);
17222
17223 /* Try to translate the body expression itself. Note that this will probably
17224 cause an infinite recursion if its call graph has a cycle. This is very
17225 unlikely for size functions, however, so don't bother with such things at
17226 the moment. */
17227 ctx.context_type = NULL_TREE;
17228 ctx.base_decl = NULL_TREE;
17229 ctx.dpi = &dpi;
17230 ctx.placeholder_arg = false;
17231 ctx.placeholder_seen = false;
17232 dpi.fndecl = fndecl;
17233 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17234 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17235 if (!loc_body)
17236 return NULL;
17237
17238 /* After evaluating all operands in "loc_body", we should still have on the
17239 stack all arguments plus the desired function result (top of the stack).
17240 Generate code in order to keep only the result in our stack frame. */
17241 epilogue = NULL;
17242 for (i = 0; i < dpi.args_count; ++i)
17243 {
17244 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17245 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17246 op_couple->dw_loc_next->dw_loc_next = epilogue;
17247 epilogue = op_couple;
17248 }
17249 add_loc_descr (&loc_body, epilogue);
17250 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17251 return NULL;
17252
17253 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17254 because they are considered useful. Now there is an epilogue, they are
17255 not anymore, so give it another try. */
17256 loc_descr_without_nops (loc_body);
17257
17258 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17259 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17260 though, given that size functions do not come from source, so they should
17261 not have a dedicated DW_TAG_subprogram DIE. */
17262 dwarf_proc_die
17263 = new_dwarf_proc_die (loc_body, fndecl,
17264 get_context_die (DECL_CONTEXT (fndecl)));
17265
17266 /* The called DWARF procedure consumes one stack slot per argument and
17267 returns one stack slot. */
17268 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
17269
17270 return dwarf_proc_die;
17271 }
17272
17273
17274 /* Generate Dwarf location list representing LOC.
17275 If WANT_ADDRESS is false, expression computing LOC will be computed
17276 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
17277 if WANT_ADDRESS is 2, expression computing address useable in location
17278 will be returned (i.e. DW_OP_reg can be used
17279 to refer to register values).
17280
17281 CONTEXT provides information to customize the location descriptions
17282 generation. Its context_type field specifies what type is implicitly
17283 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
17284 will not be generated.
17285
17286 Its DPI field determines whether we are generating a DWARF expression for a
17287 DWARF procedure, so PARM_DECL references are processed specifically.
17288
17289 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
17290 and dpi fields were null. */
17291
17292 static dw_loc_list_ref
17293 loc_list_from_tree_1 (tree loc, int want_address,
17294 struct loc_descr_context *context)
17295 {
17296 dw_loc_descr_ref ret = NULL, ret1 = NULL;
17297 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17298 int have_address = 0;
17299 enum dwarf_location_atom op;
17300
17301 /* ??? Most of the time we do not take proper care for sign/zero
17302 extending the values properly. Hopefully this won't be a real
17303 problem... */
17304
17305 if (context != NULL
17306 && context->base_decl == loc
17307 && want_address == 0)
17308 {
17309 if (dwarf_version >= 3 || !dwarf_strict)
17310 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
17311 NULL, NULL, NULL);
17312 else
17313 return NULL;
17314 }
17315
17316 switch (TREE_CODE (loc))
17317 {
17318 case ERROR_MARK:
17319 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
17320 return 0;
17321
17322 case PLACEHOLDER_EXPR:
17323 /* This case involves extracting fields from an object to determine the
17324 position of other fields. It is supposed to appear only as the first
17325 operand of COMPONENT_REF nodes and to reference precisely the type
17326 that the context allows. */
17327 if (context != NULL
17328 && TREE_TYPE (loc) == context->context_type
17329 && want_address >= 1)
17330 {
17331 if (dwarf_version >= 3 || !dwarf_strict)
17332 {
17333 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
17334 have_address = 1;
17335 break;
17336 }
17337 else
17338 return NULL;
17339 }
17340 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
17341 the single argument passed by consumer. */
17342 else if (context != NULL
17343 && context->placeholder_arg
17344 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
17345 && want_address == 0)
17346 {
17347 ret = new_loc_descr (DW_OP_pick, 0, 0);
17348 ret->frame_offset_rel = 1;
17349 context->placeholder_seen = true;
17350 break;
17351 }
17352 else
17353 expansion_failed (loc, NULL_RTX,
17354 "PLACEHOLDER_EXPR for an unexpected type");
17355 break;
17356
17357 case CALL_EXPR:
17358 {
17359 const int nargs = call_expr_nargs (loc);
17360 tree callee = get_callee_fndecl (loc);
17361 int i;
17362 dw_die_ref dwarf_proc;
17363
17364 if (callee == NULL_TREE)
17365 goto call_expansion_failed;
17366
17367 /* We handle only functions that return an integer. */
17368 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
17369 goto call_expansion_failed;
17370
17371 dwarf_proc = function_to_dwarf_procedure (callee);
17372 if (dwarf_proc == NULL)
17373 goto call_expansion_failed;
17374
17375 /* Evaluate arguments right-to-left so that the first argument will
17376 be the top-most one on the stack. */
17377 for (i = nargs - 1; i >= 0; --i)
17378 {
17379 dw_loc_descr_ref loc_descr
17380 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
17381 context);
17382
17383 if (loc_descr == NULL)
17384 goto call_expansion_failed;
17385
17386 add_loc_descr (&ret, loc_descr);
17387 }
17388
17389 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
17390 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
17391 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
17392 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
17393 add_loc_descr (&ret, ret1);
17394 break;
17395
17396 call_expansion_failed:
17397 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
17398 /* There are no opcodes for these operations. */
17399 return 0;
17400 }
17401
17402 case PREINCREMENT_EXPR:
17403 case PREDECREMENT_EXPR:
17404 case POSTINCREMENT_EXPR:
17405 case POSTDECREMENT_EXPR:
17406 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
17407 /* There are no opcodes for these operations. */
17408 return 0;
17409
17410 case ADDR_EXPR:
17411 /* If we already want an address, see if there is INDIRECT_REF inside
17412 e.g. for &this->field. */
17413 if (want_address)
17414 {
17415 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
17416 (loc, want_address == 2, context);
17417 if (list_ret)
17418 have_address = 1;
17419 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
17420 && (ret = cst_pool_loc_descr (loc)))
17421 have_address = 1;
17422 }
17423 /* Otherwise, process the argument and look for the address. */
17424 if (!list_ret && !ret)
17425 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
17426 else
17427 {
17428 if (want_address)
17429 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
17430 return NULL;
17431 }
17432 break;
17433
17434 case VAR_DECL:
17435 if (DECL_THREAD_LOCAL_P (loc))
17436 {
17437 rtx rtl;
17438 enum dwarf_location_atom tls_op;
17439 enum dtprel_bool dtprel = dtprel_false;
17440
17441 if (targetm.have_tls)
17442 {
17443 /* If this is not defined, we have no way to emit the
17444 data. */
17445 if (!targetm.asm_out.output_dwarf_dtprel)
17446 return 0;
17447
17448 /* The way DW_OP_GNU_push_tls_address is specified, we
17449 can only look up addresses of objects in the current
17450 module. We used DW_OP_addr as first op, but that's
17451 wrong, because DW_OP_addr is relocated by the debug
17452 info consumer, while DW_OP_GNU_push_tls_address
17453 operand shouldn't be. */
17454 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
17455 return 0;
17456 dtprel = dtprel_true;
17457 /* We check for DWARF 5 here because gdb did not implement
17458 DW_OP_form_tls_address until after 7.12. */
17459 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
17460 : DW_OP_GNU_push_tls_address);
17461 }
17462 else
17463 {
17464 if (!targetm.emutls.debug_form_tls_address
17465 || !(dwarf_version >= 3 || !dwarf_strict))
17466 return 0;
17467 /* We stuffed the control variable into the DECL_VALUE_EXPR
17468 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
17469 no longer appear in gimple code. We used the control
17470 variable in specific so that we could pick it up here. */
17471 loc = DECL_VALUE_EXPR (loc);
17472 tls_op = DW_OP_form_tls_address;
17473 }
17474
17475 rtl = rtl_for_decl_location (loc);
17476 if (rtl == NULL_RTX)
17477 return 0;
17478
17479 if (!MEM_P (rtl))
17480 return 0;
17481 rtl = XEXP (rtl, 0);
17482 if (! CONSTANT_P (rtl))
17483 return 0;
17484
17485 ret = new_addr_loc_descr (rtl, dtprel);
17486 ret1 = new_loc_descr (tls_op, 0, 0);
17487 add_loc_descr (&ret, ret1);
17488
17489 have_address = 1;
17490 break;
17491 }
17492 /* FALLTHRU */
17493
17494 case PARM_DECL:
17495 if (context != NULL && context->dpi != NULL
17496 && DECL_CONTEXT (loc) == context->dpi->fndecl)
17497 {
17498 /* We are generating code for a DWARF procedure and we want to access
17499 one of its arguments: find the appropriate argument offset and let
17500 the resolve_args_picking pass compute the offset that complies
17501 with the stack frame size. */
17502 unsigned i = 0;
17503 tree cursor;
17504
17505 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
17506 cursor != NULL_TREE && cursor != loc;
17507 cursor = TREE_CHAIN (cursor), ++i)
17508 ;
17509 /* If we are translating a DWARF procedure, all referenced parameters
17510 must belong to the current function. */
17511 gcc_assert (cursor != NULL_TREE);
17512
17513 ret = new_loc_descr (DW_OP_pick, i, 0);
17514 ret->frame_offset_rel = 1;
17515 break;
17516 }
17517 /* FALLTHRU */
17518
17519 case RESULT_DECL:
17520 if (DECL_HAS_VALUE_EXPR_P (loc))
17521 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
17522 want_address, context);
17523 /* FALLTHRU */
17524
17525 case FUNCTION_DECL:
17526 {
17527 rtx rtl;
17528 var_loc_list *loc_list = lookup_decl_loc (loc);
17529
17530 if (loc_list && loc_list->first)
17531 {
17532 list_ret = dw_loc_list (loc_list, loc, want_address);
17533 have_address = want_address != 0;
17534 break;
17535 }
17536 rtl = rtl_for_decl_location (loc);
17537 if (rtl == NULL_RTX)
17538 {
17539 if (TREE_CODE (loc) != FUNCTION_DECL
17540 && early_dwarf
17541 && current_function_decl
17542 && want_address != 1
17543 && ! DECL_IGNORED_P (loc)
17544 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
17545 || POINTER_TYPE_P (TREE_TYPE (loc)))
17546 && DECL_CONTEXT (loc) == current_function_decl
17547 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
17548 <= DWARF2_ADDR_SIZE))
17549 {
17550 dw_die_ref ref = lookup_decl_die (loc);
17551 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
17552 if (ref)
17553 {
17554 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
17555 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
17556 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
17557 }
17558 else
17559 {
17560 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
17561 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
17562 }
17563 break;
17564 }
17565 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
17566 return 0;
17567 }
17568 else if (CONST_INT_P (rtl))
17569 {
17570 HOST_WIDE_INT val = INTVAL (rtl);
17571 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
17572 val &= GET_MODE_MASK (DECL_MODE (loc));
17573 ret = int_loc_descriptor (val);
17574 }
17575 else if (GET_CODE (rtl) == CONST_STRING)
17576 {
17577 expansion_failed (loc, NULL_RTX, "CONST_STRING");
17578 return 0;
17579 }
17580 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
17581 ret = new_addr_loc_descr (rtl, dtprel_false);
17582 else
17583 {
17584 machine_mode mode, mem_mode;
17585
17586 /* Certain constructs can only be represented at top-level. */
17587 if (want_address == 2)
17588 {
17589 ret = loc_descriptor (rtl, VOIDmode,
17590 VAR_INIT_STATUS_INITIALIZED);
17591 have_address = 1;
17592 }
17593 else
17594 {
17595 mode = GET_MODE (rtl);
17596 mem_mode = VOIDmode;
17597 if (MEM_P (rtl))
17598 {
17599 mem_mode = mode;
17600 mode = get_address_mode (rtl);
17601 rtl = XEXP (rtl, 0);
17602 have_address = 1;
17603 }
17604 ret = mem_loc_descriptor (rtl, mode, mem_mode,
17605 VAR_INIT_STATUS_INITIALIZED);
17606 }
17607 if (!ret)
17608 expansion_failed (loc, rtl,
17609 "failed to produce loc descriptor for rtl");
17610 }
17611 }
17612 break;
17613
17614 case MEM_REF:
17615 if (!integer_zerop (TREE_OPERAND (loc, 1)))
17616 {
17617 have_address = 1;
17618 goto do_plus;
17619 }
17620 /* Fallthru. */
17621 case INDIRECT_REF:
17622 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17623 have_address = 1;
17624 break;
17625
17626 case TARGET_MEM_REF:
17627 case SSA_NAME:
17628 case DEBUG_EXPR_DECL:
17629 return NULL;
17630
17631 case COMPOUND_EXPR:
17632 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
17633 context);
17634
17635 CASE_CONVERT:
17636 case VIEW_CONVERT_EXPR:
17637 case SAVE_EXPR:
17638 case MODIFY_EXPR:
17639 case NON_LVALUE_EXPR:
17640 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
17641 context);
17642
17643 case COMPONENT_REF:
17644 case BIT_FIELD_REF:
17645 case ARRAY_REF:
17646 case ARRAY_RANGE_REF:
17647 case REALPART_EXPR:
17648 case IMAGPART_EXPR:
17649 {
17650 tree obj, offset;
17651 HOST_WIDE_INT bitsize, bitpos, bytepos;
17652 machine_mode mode;
17653 int unsignedp, reversep, volatilep = 0;
17654
17655 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
17656 &unsignedp, &reversep, &volatilep);
17657
17658 gcc_assert (obj != loc);
17659
17660 list_ret = loc_list_from_tree_1 (obj,
17661 want_address == 2
17662 && !bitpos && !offset ? 2 : 1,
17663 context);
17664 /* TODO: We can extract value of the small expression via shifting even
17665 for nonzero bitpos. */
17666 if (list_ret == 0)
17667 return 0;
17668 if (bitpos % BITS_PER_UNIT != 0 || bitsize % BITS_PER_UNIT != 0)
17669 {
17670 expansion_failed (loc, NULL_RTX,
17671 "bitfield access");
17672 return 0;
17673 }
17674
17675 if (offset != NULL_TREE)
17676 {
17677 /* Variable offset. */
17678 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
17679 if (list_ret1 == 0)
17680 return 0;
17681 add_loc_list (&list_ret, list_ret1);
17682 if (!list_ret)
17683 return 0;
17684 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
17685 }
17686
17687 bytepos = bitpos / BITS_PER_UNIT;
17688 if (bytepos > 0)
17689 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst, bytepos, 0));
17690 else if (bytepos < 0)
17691 loc_list_plus_const (list_ret, bytepos);
17692
17693 have_address = 1;
17694 break;
17695 }
17696
17697 case INTEGER_CST:
17698 if ((want_address || !tree_fits_shwi_p (loc))
17699 && (ret = cst_pool_loc_descr (loc)))
17700 have_address = 1;
17701 else if (want_address == 2
17702 && tree_fits_shwi_p (loc)
17703 && (ret = address_of_int_loc_descriptor
17704 (int_size_in_bytes (TREE_TYPE (loc)),
17705 tree_to_shwi (loc))))
17706 have_address = 1;
17707 else if (tree_fits_shwi_p (loc))
17708 ret = int_loc_descriptor (tree_to_shwi (loc));
17709 else if (tree_fits_uhwi_p (loc))
17710 ret = uint_loc_descriptor (tree_to_uhwi (loc));
17711 else
17712 {
17713 expansion_failed (loc, NULL_RTX,
17714 "Integer operand is not host integer");
17715 return 0;
17716 }
17717 break;
17718
17719 case CONSTRUCTOR:
17720 case REAL_CST:
17721 case STRING_CST:
17722 case COMPLEX_CST:
17723 if ((ret = cst_pool_loc_descr (loc)))
17724 have_address = 1;
17725 else if (TREE_CODE (loc) == CONSTRUCTOR)
17726 {
17727 tree type = TREE_TYPE (loc);
17728 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
17729 unsigned HOST_WIDE_INT offset = 0;
17730 unsigned HOST_WIDE_INT cnt;
17731 constructor_elt *ce;
17732
17733 if (TREE_CODE (type) == RECORD_TYPE)
17734 {
17735 /* This is very limited, but it's enough to output
17736 pointers to member functions, as long as the
17737 referenced function is defined in the current
17738 translation unit. */
17739 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
17740 {
17741 tree val = ce->value;
17742
17743 tree field = ce->index;
17744
17745 if (val)
17746 STRIP_NOPS (val);
17747
17748 if (!field || DECL_BIT_FIELD (field))
17749 {
17750 expansion_failed (loc, NULL_RTX,
17751 "bitfield in record type constructor");
17752 size = offset = (unsigned HOST_WIDE_INT)-1;
17753 ret = NULL;
17754 break;
17755 }
17756
17757 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
17758 unsigned HOST_WIDE_INT pos = int_byte_position (field);
17759 gcc_assert (pos + fieldsize <= size);
17760 if (pos < offset)
17761 {
17762 expansion_failed (loc, NULL_RTX,
17763 "out-of-order fields in record constructor");
17764 size = offset = (unsigned HOST_WIDE_INT)-1;
17765 ret = NULL;
17766 break;
17767 }
17768 if (pos > offset)
17769 {
17770 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
17771 add_loc_descr (&ret, ret1);
17772 offset = pos;
17773 }
17774 if (val && fieldsize != 0)
17775 {
17776 ret1 = loc_descriptor_from_tree (val, want_address, context);
17777 if (!ret1)
17778 {
17779 expansion_failed (loc, NULL_RTX,
17780 "unsupported expression in field");
17781 size = offset = (unsigned HOST_WIDE_INT)-1;
17782 ret = NULL;
17783 break;
17784 }
17785 add_loc_descr (&ret, ret1);
17786 }
17787 if (fieldsize)
17788 {
17789 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
17790 add_loc_descr (&ret, ret1);
17791 offset = pos + fieldsize;
17792 }
17793 }
17794
17795 if (offset != size)
17796 {
17797 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
17798 add_loc_descr (&ret, ret1);
17799 offset = size;
17800 }
17801
17802 have_address = !!want_address;
17803 }
17804 else
17805 expansion_failed (loc, NULL_RTX,
17806 "constructor of non-record type");
17807 }
17808 else
17809 /* We can construct small constants here using int_loc_descriptor. */
17810 expansion_failed (loc, NULL_RTX,
17811 "constructor or constant not in constant pool");
17812 break;
17813
17814 case TRUTH_AND_EXPR:
17815 case TRUTH_ANDIF_EXPR:
17816 case BIT_AND_EXPR:
17817 op = DW_OP_and;
17818 goto do_binop;
17819
17820 case TRUTH_XOR_EXPR:
17821 case BIT_XOR_EXPR:
17822 op = DW_OP_xor;
17823 goto do_binop;
17824
17825 case TRUTH_OR_EXPR:
17826 case TRUTH_ORIF_EXPR:
17827 case BIT_IOR_EXPR:
17828 op = DW_OP_or;
17829 goto do_binop;
17830
17831 case FLOOR_DIV_EXPR:
17832 case CEIL_DIV_EXPR:
17833 case ROUND_DIV_EXPR:
17834 case TRUNC_DIV_EXPR:
17835 case EXACT_DIV_EXPR:
17836 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
17837 return 0;
17838 op = DW_OP_div;
17839 goto do_binop;
17840
17841 case MINUS_EXPR:
17842 op = DW_OP_minus;
17843 goto do_binop;
17844
17845 case FLOOR_MOD_EXPR:
17846 case CEIL_MOD_EXPR:
17847 case ROUND_MOD_EXPR:
17848 case TRUNC_MOD_EXPR:
17849 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
17850 {
17851 op = DW_OP_mod;
17852 goto do_binop;
17853 }
17854 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17855 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
17856 if (list_ret == 0 || list_ret1 == 0)
17857 return 0;
17858
17859 add_loc_list (&list_ret, list_ret1);
17860 if (list_ret == 0)
17861 return 0;
17862 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
17863 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
17864 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
17865 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
17866 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
17867 break;
17868
17869 case MULT_EXPR:
17870 op = DW_OP_mul;
17871 goto do_binop;
17872
17873 case LSHIFT_EXPR:
17874 op = DW_OP_shl;
17875 goto do_binop;
17876
17877 case RSHIFT_EXPR:
17878 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
17879 goto do_binop;
17880
17881 case POINTER_PLUS_EXPR:
17882 case PLUS_EXPR:
17883 do_plus:
17884 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
17885 {
17886 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
17887 smarter to encode their opposite. The DW_OP_plus_uconst operation
17888 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
17889 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
17890 bytes, Y being the size of the operation that pushes the opposite
17891 of the addend. So let's choose the smallest representation. */
17892 const tree tree_addend = TREE_OPERAND (loc, 1);
17893 offset_int wi_addend;
17894 HOST_WIDE_INT shwi_addend;
17895 dw_loc_descr_ref loc_naddend;
17896
17897 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17898 if (list_ret == 0)
17899 return 0;
17900
17901 /* Try to get the literal to push. It is the opposite of the addend,
17902 so as we rely on wrapping during DWARF evaluation, first decode
17903 the literal as a "DWARF-sized" signed number. */
17904 wi_addend = wi::to_offset (tree_addend);
17905 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
17906 shwi_addend = wi_addend.to_shwi ();
17907 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
17908 ? int_loc_descriptor (-shwi_addend)
17909 : NULL;
17910
17911 if (loc_naddend != NULL
17912 && ((unsigned) size_of_uleb128 (shwi_addend)
17913 > size_of_loc_descr (loc_naddend)))
17914 {
17915 add_loc_descr_to_each (list_ret, loc_naddend);
17916 add_loc_descr_to_each (list_ret,
17917 new_loc_descr (DW_OP_minus, 0, 0));
17918 }
17919 else
17920 {
17921 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
17922 {
17923 loc_naddend = loc_cur;
17924 loc_cur = loc_cur->dw_loc_next;
17925 ggc_free (loc_naddend);
17926 }
17927 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
17928 }
17929 break;
17930 }
17931
17932 op = DW_OP_plus;
17933 goto do_binop;
17934
17935 case LE_EXPR:
17936 op = DW_OP_le;
17937 goto do_comp_binop;
17938
17939 case GE_EXPR:
17940 op = DW_OP_ge;
17941 goto do_comp_binop;
17942
17943 case LT_EXPR:
17944 op = DW_OP_lt;
17945 goto do_comp_binop;
17946
17947 case GT_EXPR:
17948 op = DW_OP_gt;
17949 goto do_comp_binop;
17950
17951 do_comp_binop:
17952 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
17953 {
17954 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
17955 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
17956 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
17957 TREE_CODE (loc));
17958 break;
17959 }
17960 else
17961 goto do_binop;
17962
17963 case EQ_EXPR:
17964 op = DW_OP_eq;
17965 goto do_binop;
17966
17967 case NE_EXPR:
17968 op = DW_OP_ne;
17969 goto do_binop;
17970
17971 do_binop:
17972 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17973 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
17974 if (list_ret == 0 || list_ret1 == 0)
17975 return 0;
17976
17977 add_loc_list (&list_ret, list_ret1);
17978 if (list_ret == 0)
17979 return 0;
17980 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
17981 break;
17982
17983 case TRUTH_NOT_EXPR:
17984 case BIT_NOT_EXPR:
17985 op = DW_OP_not;
17986 goto do_unop;
17987
17988 case ABS_EXPR:
17989 op = DW_OP_abs;
17990 goto do_unop;
17991
17992 case NEGATE_EXPR:
17993 op = DW_OP_neg;
17994 goto do_unop;
17995
17996 do_unop:
17997 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17998 if (list_ret == 0)
17999 return 0;
18000
18001 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18002 break;
18003
18004 case MIN_EXPR:
18005 case MAX_EXPR:
18006 {
18007 const enum tree_code code =
18008 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18009
18010 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18011 build2 (code, integer_type_node,
18012 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18013 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18014 }
18015
18016 /* fall through */
18017
18018 case COND_EXPR:
18019 {
18020 dw_loc_descr_ref lhs
18021 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18022 dw_loc_list_ref rhs
18023 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18024 dw_loc_descr_ref bra_node, jump_node, tmp;
18025
18026 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18027 if (list_ret == 0 || lhs == 0 || rhs == 0)
18028 return 0;
18029
18030 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18031 add_loc_descr_to_each (list_ret, bra_node);
18032
18033 add_loc_list (&list_ret, rhs);
18034 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18035 add_loc_descr_to_each (list_ret, jump_node);
18036
18037 add_loc_descr_to_each (list_ret, lhs);
18038 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18039 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18040
18041 /* ??? Need a node to point the skip at. Use a nop. */
18042 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18043 add_loc_descr_to_each (list_ret, tmp);
18044 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18045 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18046 }
18047 break;
18048
18049 case FIX_TRUNC_EXPR:
18050 return 0;
18051
18052 default:
18053 /* Leave front-end specific codes as simply unknown. This comes
18054 up, for instance, with the C STMT_EXPR. */
18055 if ((unsigned int) TREE_CODE (loc)
18056 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18057 {
18058 expansion_failed (loc, NULL_RTX,
18059 "language specific tree node");
18060 return 0;
18061 }
18062
18063 /* Otherwise this is a generic code; we should just lists all of
18064 these explicitly. We forgot one. */
18065 if (flag_checking)
18066 gcc_unreachable ();
18067
18068 /* In a release build, we want to degrade gracefully: better to
18069 generate incomplete debugging information than to crash. */
18070 return NULL;
18071 }
18072
18073 if (!ret && !list_ret)
18074 return 0;
18075
18076 if (want_address == 2 && !have_address
18077 && (dwarf_version >= 4 || !dwarf_strict))
18078 {
18079 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18080 {
18081 expansion_failed (loc, NULL_RTX,
18082 "DWARF address size mismatch");
18083 return 0;
18084 }
18085 if (ret)
18086 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18087 else
18088 add_loc_descr_to_each (list_ret,
18089 new_loc_descr (DW_OP_stack_value, 0, 0));
18090 have_address = 1;
18091 }
18092 /* Show if we can't fill the request for an address. */
18093 if (want_address && !have_address)
18094 {
18095 expansion_failed (loc, NULL_RTX,
18096 "Want address and only have value");
18097 return 0;
18098 }
18099
18100 gcc_assert (!ret || !list_ret);
18101
18102 /* If we've got an address and don't want one, dereference. */
18103 if (!want_address && have_address)
18104 {
18105 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18106
18107 if (size > DWARF2_ADDR_SIZE || size == -1)
18108 {
18109 expansion_failed (loc, NULL_RTX,
18110 "DWARF address size mismatch");
18111 return 0;
18112 }
18113 else if (size == DWARF2_ADDR_SIZE)
18114 op = DW_OP_deref;
18115 else
18116 op = DW_OP_deref_size;
18117
18118 if (ret)
18119 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18120 else
18121 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18122 }
18123 if (ret)
18124 list_ret = new_loc_list (ret, NULL, NULL, NULL);
18125
18126 return list_ret;
18127 }
18128
18129 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18130 expressions. */
18131
18132 static dw_loc_list_ref
18133 loc_list_from_tree (tree loc, int want_address,
18134 struct loc_descr_context *context)
18135 {
18136 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18137
18138 for (dw_loc_list_ref loc_cur = result;
18139 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18140 loc_descr_without_nops (loc_cur->expr);
18141 return result;
18142 }
18143
18144 /* Same as above but return only single location expression. */
18145 static dw_loc_descr_ref
18146 loc_descriptor_from_tree (tree loc, int want_address,
18147 struct loc_descr_context *context)
18148 {
18149 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18150 if (!ret)
18151 return NULL;
18152 if (ret->dw_loc_next)
18153 {
18154 expansion_failed (loc, NULL_RTX,
18155 "Location list where only loc descriptor needed");
18156 return NULL;
18157 }
18158 return ret->expr;
18159 }
18160
18161 /* Given a value, round it up to the lowest multiple of `boundary'
18162 which is not less than the value itself. */
18163
18164 static inline HOST_WIDE_INT
18165 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18166 {
18167 return (((value + boundary - 1) / boundary) * boundary);
18168 }
18169
18170 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18171 pointer to the declared type for the relevant field variable, or return
18172 `integer_type_node' if the given node turns out to be an
18173 ERROR_MARK node. */
18174
18175 static inline tree
18176 field_type (const_tree decl)
18177 {
18178 tree type;
18179
18180 if (TREE_CODE (decl) == ERROR_MARK)
18181 return integer_type_node;
18182
18183 type = DECL_BIT_FIELD_TYPE (decl);
18184 if (type == NULL_TREE)
18185 type = TREE_TYPE (decl);
18186
18187 return type;
18188 }
18189
18190 /* Given a pointer to a tree node, return the alignment in bits for
18191 it, or else return BITS_PER_WORD if the node actually turns out to
18192 be an ERROR_MARK node. */
18193
18194 static inline unsigned
18195 simple_type_align_in_bits (const_tree type)
18196 {
18197 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18198 }
18199
18200 static inline unsigned
18201 simple_decl_align_in_bits (const_tree decl)
18202 {
18203 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18204 }
18205
18206 /* Return the result of rounding T up to ALIGN. */
18207
18208 static inline offset_int
18209 round_up_to_align (const offset_int &t, unsigned int align)
18210 {
18211 return wi::udiv_trunc (t + align - 1, align) * align;
18212 }
18213
18214 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18215 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18216 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18217 if we fail to return the size in one of these two forms. */
18218
18219 static dw_loc_descr_ref
18220 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18221 {
18222 tree tree_size;
18223 struct loc_descr_context ctx;
18224
18225 /* Return a constant integer in priority, if possible. */
18226 *cst_size = int_size_in_bytes (type);
18227 if (*cst_size != -1)
18228 return NULL;
18229
18230 ctx.context_type = const_cast<tree> (type);
18231 ctx.base_decl = NULL_TREE;
18232 ctx.dpi = NULL;
18233 ctx.placeholder_arg = false;
18234 ctx.placeholder_seen = false;
18235
18236 type = TYPE_MAIN_VARIANT (type);
18237 tree_size = TYPE_SIZE_UNIT (type);
18238 return ((tree_size != NULL_TREE)
18239 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18240 : NULL);
18241 }
18242
18243 /* Helper structure for RECORD_TYPE processing. */
18244 struct vlr_context
18245 {
18246 /* Root RECORD_TYPE. It is needed to generate data member location
18247 descriptions in variable-length records (VLR), but also to cope with
18248 variants, which are composed of nested structures multiplexed with
18249 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18250 function processing a FIELD_DECL, it is required to be non null. */
18251 tree struct_type;
18252 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18253 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18254 this variant part as part of the root record (in storage units). For
18255 regular records, it must be NULL_TREE. */
18256 tree variant_part_offset;
18257 };
18258
18259 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18260 addressed byte of the "containing object" for the given FIELD_DECL. If
18261 possible, return a native constant through CST_OFFSET (in which case NULL is
18262 returned); otherwise return a DWARF expression that computes the offset.
18263
18264 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
18265 that offset is, either because the argument turns out to be a pointer to an
18266 ERROR_MARK node, or because the offset expression is too complex for us.
18267
18268 CTX is required: see the comment for VLR_CONTEXT. */
18269
18270 static dw_loc_descr_ref
18271 field_byte_offset (const_tree decl, struct vlr_context *ctx,
18272 HOST_WIDE_INT *cst_offset)
18273 {
18274 tree tree_result;
18275 dw_loc_list_ref loc_result;
18276
18277 *cst_offset = 0;
18278
18279 if (TREE_CODE (decl) == ERROR_MARK)
18280 return NULL;
18281 else
18282 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
18283
18284 /* We cannot handle variable bit offsets at the moment, so abort if it's the
18285 case. */
18286 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
18287 return NULL;
18288
18289 #ifdef PCC_BITFIELD_TYPE_MATTERS
18290 /* We used to handle only constant offsets in all cases. Now, we handle
18291 properly dynamic byte offsets only when PCC bitfield type doesn't
18292 matter. */
18293 if (PCC_BITFIELD_TYPE_MATTERS
18294 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
18295 {
18296 offset_int object_offset_in_bits;
18297 offset_int object_offset_in_bytes;
18298 offset_int bitpos_int;
18299 tree type;
18300 tree field_size_tree;
18301 offset_int deepest_bitpos;
18302 offset_int field_size_in_bits;
18303 unsigned int type_align_in_bits;
18304 unsigned int decl_align_in_bits;
18305 offset_int type_size_in_bits;
18306
18307 bitpos_int = wi::to_offset (bit_position (decl));
18308 type = field_type (decl);
18309 type_size_in_bits = offset_int_type_size_in_bits (type);
18310 type_align_in_bits = simple_type_align_in_bits (type);
18311
18312 field_size_tree = DECL_SIZE (decl);
18313
18314 /* The size could be unspecified if there was an error, or for
18315 a flexible array member. */
18316 if (!field_size_tree)
18317 field_size_tree = bitsize_zero_node;
18318
18319 /* If the size of the field is not constant, use the type size. */
18320 if (TREE_CODE (field_size_tree) == INTEGER_CST)
18321 field_size_in_bits = wi::to_offset (field_size_tree);
18322 else
18323 field_size_in_bits = type_size_in_bits;
18324
18325 decl_align_in_bits = simple_decl_align_in_bits (decl);
18326
18327 /* The GCC front-end doesn't make any attempt to keep track of the
18328 starting bit offset (relative to the start of the containing
18329 structure type) of the hypothetical "containing object" for a
18330 bit-field. Thus, when computing the byte offset value for the
18331 start of the "containing object" of a bit-field, we must deduce
18332 this information on our own. This can be rather tricky to do in
18333 some cases. For example, handling the following structure type
18334 definition when compiling for an i386/i486 target (which only
18335 aligns long long's to 32-bit boundaries) can be very tricky:
18336
18337 struct S { int field1; long long field2:31; };
18338
18339 Fortunately, there is a simple rule-of-thumb which can be used
18340 in such cases. When compiling for an i386/i486, GCC will
18341 allocate 8 bytes for the structure shown above. It decides to
18342 do this based upon one simple rule for bit-field allocation.
18343 GCC allocates each "containing object" for each bit-field at
18344 the first (i.e. lowest addressed) legitimate alignment boundary
18345 (based upon the required minimum alignment for the declared
18346 type of the field) which it can possibly use, subject to the
18347 condition that there is still enough available space remaining
18348 in the containing object (when allocated at the selected point)
18349 to fully accommodate all of the bits of the bit-field itself.
18350
18351 This simple rule makes it obvious why GCC allocates 8 bytes for
18352 each object of the structure type shown above. When looking
18353 for a place to allocate the "containing object" for `field2',
18354 the compiler simply tries to allocate a 64-bit "containing
18355 object" at each successive 32-bit boundary (starting at zero)
18356 until it finds a place to allocate that 64- bit field such that
18357 at least 31 contiguous (and previously unallocated) bits remain
18358 within that selected 64 bit field. (As it turns out, for the
18359 example above, the compiler finds it is OK to allocate the
18360 "containing object" 64-bit field at bit-offset zero within the
18361 structure type.)
18362
18363 Here we attempt to work backwards from the limited set of facts
18364 we're given, and we try to deduce from those facts, where GCC
18365 must have believed that the containing object started (within
18366 the structure type). The value we deduce is then used (by the
18367 callers of this routine) to generate DW_AT_location and
18368 DW_AT_bit_offset attributes for fields (both bit-fields and, in
18369 the case of DW_AT_location, regular fields as well). */
18370
18371 /* Figure out the bit-distance from the start of the structure to
18372 the "deepest" bit of the bit-field. */
18373 deepest_bitpos = bitpos_int + field_size_in_bits;
18374
18375 /* This is the tricky part. Use some fancy footwork to deduce
18376 where the lowest addressed bit of the containing object must
18377 be. */
18378 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
18379
18380 /* Round up to type_align by default. This works best for
18381 bitfields. */
18382 object_offset_in_bits
18383 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
18384
18385 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
18386 {
18387 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
18388
18389 /* Round up to decl_align instead. */
18390 object_offset_in_bits
18391 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
18392 }
18393
18394 object_offset_in_bytes
18395 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
18396 if (ctx->variant_part_offset == NULL_TREE)
18397 {
18398 *cst_offset = object_offset_in_bytes.to_shwi ();
18399 return NULL;
18400 }
18401 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
18402 }
18403 else
18404 #endif /* PCC_BITFIELD_TYPE_MATTERS */
18405 tree_result = byte_position (decl);
18406
18407 if (ctx->variant_part_offset != NULL_TREE)
18408 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
18409 ctx->variant_part_offset, tree_result);
18410
18411 /* If the byte offset is a constant, it's simplier to handle a native
18412 constant rather than a DWARF expression. */
18413 if (TREE_CODE (tree_result) == INTEGER_CST)
18414 {
18415 *cst_offset = wi::to_offset (tree_result).to_shwi ();
18416 return NULL;
18417 }
18418 struct loc_descr_context loc_ctx = {
18419 ctx->struct_type, /* context_type */
18420 NULL_TREE, /* base_decl */
18421 NULL, /* dpi */
18422 false, /* placeholder_arg */
18423 false /* placeholder_seen */
18424 };
18425 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
18426
18427 /* We want a DWARF expression: abort if we only have a location list with
18428 multiple elements. */
18429 if (!loc_result || !single_element_loc_list_p (loc_result))
18430 return NULL;
18431 else
18432 return loc_result->expr;
18433 }
18434 \f
18435 /* The following routines define various Dwarf attributes and any data
18436 associated with them. */
18437
18438 /* Add a location description attribute value to a DIE.
18439
18440 This emits location attributes suitable for whole variables and
18441 whole parameters. Note that the location attributes for struct fields are
18442 generated by the routine `data_member_location_attribute' below. */
18443
18444 static inline void
18445 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
18446 dw_loc_list_ref descr)
18447 {
18448 if (descr == 0)
18449 return;
18450 if (single_element_loc_list_p (descr))
18451 add_AT_loc (die, attr_kind, descr->expr);
18452 else
18453 add_AT_loc_list (die, attr_kind, descr);
18454 }
18455
18456 /* Add DW_AT_accessibility attribute to DIE if needed. */
18457
18458 static void
18459 add_accessibility_attribute (dw_die_ref die, tree decl)
18460 {
18461 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
18462 children, otherwise the default is DW_ACCESS_public. In DWARF2
18463 the default has always been DW_ACCESS_public. */
18464 if (TREE_PROTECTED (decl))
18465 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
18466 else if (TREE_PRIVATE (decl))
18467 {
18468 if (dwarf_version == 2
18469 || die->die_parent == NULL
18470 || die->die_parent->die_tag != DW_TAG_class_type)
18471 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
18472 }
18473 else if (dwarf_version > 2
18474 && die->die_parent
18475 && die->die_parent->die_tag == DW_TAG_class_type)
18476 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
18477 }
18478
18479 /* Attach the specialized form of location attribute used for data members of
18480 struct and union types. In the special case of a FIELD_DECL node which
18481 represents a bit-field, the "offset" part of this special location
18482 descriptor must indicate the distance in bytes from the lowest-addressed
18483 byte of the containing struct or union type to the lowest-addressed byte of
18484 the "containing object" for the bit-field. (See the `field_byte_offset'
18485 function above).
18486
18487 For any given bit-field, the "containing object" is a hypothetical object
18488 (of some integral or enum type) within which the given bit-field lives. The
18489 type of this hypothetical "containing object" is always the same as the
18490 declared type of the individual bit-field itself (for GCC anyway... the
18491 DWARF spec doesn't actually mandate this). Note that it is the size (in
18492 bytes) of the hypothetical "containing object" which will be given in the
18493 DW_AT_byte_size attribute for this bit-field. (See the
18494 `byte_size_attribute' function below.) It is also used when calculating the
18495 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
18496 function below.)
18497
18498 CTX is required: see the comment for VLR_CONTEXT. */
18499
18500 static void
18501 add_data_member_location_attribute (dw_die_ref die,
18502 tree decl,
18503 struct vlr_context *ctx)
18504 {
18505 HOST_WIDE_INT offset;
18506 dw_loc_descr_ref loc_descr = 0;
18507
18508 if (TREE_CODE (decl) == TREE_BINFO)
18509 {
18510 /* We're working on the TAG_inheritance for a base class. */
18511 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
18512 {
18513 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
18514 aren't at a fixed offset from all (sub)objects of the same
18515 type. We need to extract the appropriate offset from our
18516 vtable. The following dwarf expression means
18517
18518 BaseAddr = ObAddr + *((*ObAddr) - Offset)
18519
18520 This is specific to the V3 ABI, of course. */
18521
18522 dw_loc_descr_ref tmp;
18523
18524 /* Make a copy of the object address. */
18525 tmp = new_loc_descr (DW_OP_dup, 0, 0);
18526 add_loc_descr (&loc_descr, tmp);
18527
18528 /* Extract the vtable address. */
18529 tmp = new_loc_descr (DW_OP_deref, 0, 0);
18530 add_loc_descr (&loc_descr, tmp);
18531
18532 /* Calculate the address of the offset. */
18533 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
18534 gcc_assert (offset < 0);
18535
18536 tmp = int_loc_descriptor (-offset);
18537 add_loc_descr (&loc_descr, tmp);
18538 tmp = new_loc_descr (DW_OP_minus, 0, 0);
18539 add_loc_descr (&loc_descr, tmp);
18540
18541 /* Extract the offset. */
18542 tmp = new_loc_descr (DW_OP_deref, 0, 0);
18543 add_loc_descr (&loc_descr, tmp);
18544
18545 /* Add it to the object address. */
18546 tmp = new_loc_descr (DW_OP_plus, 0, 0);
18547 add_loc_descr (&loc_descr, tmp);
18548 }
18549 else
18550 offset = tree_to_shwi (BINFO_OFFSET (decl));
18551 }
18552 else
18553 {
18554 loc_descr = field_byte_offset (decl, ctx, &offset);
18555
18556 /* If loc_descr is available then we know the field offset is dynamic.
18557 However, GDB does not handle dynamic field offsets very well at the
18558 moment. */
18559 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
18560 {
18561 loc_descr = NULL;
18562 offset = 0;
18563 }
18564
18565 /* Data member location evalutation starts with the base address on the
18566 stack. Compute the field offset and add it to this base address. */
18567 else if (loc_descr != NULL)
18568 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
18569 }
18570
18571 if (! loc_descr)
18572 {
18573 /* While DW_AT_data_bit_offset has been added already in DWARF4,
18574 e.g. GDB only added support to it in November 2016. For DWARF5
18575 we need newer debug info consumers anyway. We might change this
18576 to dwarf_version >= 4 once most consumers catched up. */
18577 if (dwarf_version >= 5
18578 && TREE_CODE (decl) == FIELD_DECL
18579 && DECL_BIT_FIELD_TYPE (decl))
18580 {
18581 tree off = bit_position (decl);
18582 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
18583 {
18584 remove_AT (die, DW_AT_byte_size);
18585 remove_AT (die, DW_AT_bit_offset);
18586 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
18587 return;
18588 }
18589 }
18590 if (dwarf_version > 2)
18591 {
18592 /* Don't need to output a location expression, just the constant. */
18593 if (offset < 0)
18594 add_AT_int (die, DW_AT_data_member_location, offset);
18595 else
18596 add_AT_unsigned (die, DW_AT_data_member_location, offset);
18597 return;
18598 }
18599 else
18600 {
18601 enum dwarf_location_atom op;
18602
18603 /* The DWARF2 standard says that we should assume that the structure
18604 address is already on the stack, so we can specify a structure
18605 field address by using DW_OP_plus_uconst. */
18606 op = DW_OP_plus_uconst;
18607 loc_descr = new_loc_descr (op, offset, 0);
18608 }
18609 }
18610
18611 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
18612 }
18613
18614 /* Writes integer values to dw_vec_const array. */
18615
18616 static void
18617 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
18618 {
18619 while (size != 0)
18620 {
18621 *dest++ = val & 0xff;
18622 val >>= 8;
18623 --size;
18624 }
18625 }
18626
18627 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
18628
18629 static HOST_WIDE_INT
18630 extract_int (const unsigned char *src, unsigned int size)
18631 {
18632 HOST_WIDE_INT val = 0;
18633
18634 src += size;
18635 while (size != 0)
18636 {
18637 val <<= 8;
18638 val |= *--src & 0xff;
18639 --size;
18640 }
18641 return val;
18642 }
18643
18644 /* Writes wide_int values to dw_vec_const array. */
18645
18646 static void
18647 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
18648 {
18649 int i;
18650
18651 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
18652 {
18653 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
18654 return;
18655 }
18656
18657 /* We'd have to extend this code to support odd sizes. */
18658 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
18659
18660 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
18661
18662 if (WORDS_BIG_ENDIAN)
18663 for (i = n - 1; i >= 0; i--)
18664 {
18665 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
18666 dest += sizeof (HOST_WIDE_INT);
18667 }
18668 else
18669 for (i = 0; i < n; i++)
18670 {
18671 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
18672 dest += sizeof (HOST_WIDE_INT);
18673 }
18674 }
18675
18676 /* Writes floating point values to dw_vec_const array. */
18677
18678 static void
18679 insert_float (const_rtx rtl, unsigned char *array)
18680 {
18681 long val[4];
18682 int i;
18683 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
18684
18685 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
18686
18687 /* real_to_target puts 32-bit pieces in each long. Pack them. */
18688 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
18689 {
18690 insert_int (val[i], 4, array);
18691 array += 4;
18692 }
18693 }
18694
18695 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
18696 does not have a "location" either in memory or in a register. These
18697 things can arise in GNU C when a constant is passed as an actual parameter
18698 to an inlined function. They can also arise in C++ where declared
18699 constants do not necessarily get memory "homes". */
18700
18701 static bool
18702 add_const_value_attribute (dw_die_ref die, rtx rtl)
18703 {
18704 switch (GET_CODE (rtl))
18705 {
18706 case CONST_INT:
18707 {
18708 HOST_WIDE_INT val = INTVAL (rtl);
18709
18710 if (val < 0)
18711 add_AT_int (die, DW_AT_const_value, val);
18712 else
18713 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
18714 }
18715 return true;
18716
18717 case CONST_WIDE_INT:
18718 {
18719 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
18720 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
18721 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
18722 wide_int w = wi::zext (w1, prec);
18723 add_AT_wide (die, DW_AT_const_value, w);
18724 }
18725 return true;
18726
18727 case CONST_DOUBLE:
18728 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
18729 floating-point constant. A CONST_DOUBLE is used whenever the
18730 constant requires more than one word in order to be adequately
18731 represented. */
18732 if (TARGET_SUPPORTS_WIDE_INT == 0
18733 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
18734 add_AT_double (die, DW_AT_const_value,
18735 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
18736 else
18737 {
18738 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
18739 unsigned int length = GET_MODE_SIZE (mode);
18740 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
18741
18742 insert_float (rtl, array);
18743 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
18744 }
18745 return true;
18746
18747 case CONST_VECTOR:
18748 {
18749 machine_mode mode = GET_MODE (rtl);
18750 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
18751 unsigned int length = CONST_VECTOR_NUNITS (rtl);
18752 unsigned char *array
18753 = ggc_vec_alloc<unsigned char> (length * elt_size);
18754 unsigned int i;
18755 unsigned char *p;
18756 machine_mode imode = GET_MODE_INNER (mode);
18757
18758 switch (GET_MODE_CLASS (mode))
18759 {
18760 case MODE_VECTOR_INT:
18761 for (i = 0, p = array; i < length; i++, p += elt_size)
18762 {
18763 rtx elt = CONST_VECTOR_ELT (rtl, i);
18764 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
18765 }
18766 break;
18767
18768 case MODE_VECTOR_FLOAT:
18769 for (i = 0, p = array; i < length; i++, p += elt_size)
18770 {
18771 rtx elt = CONST_VECTOR_ELT (rtl, i);
18772 insert_float (elt, p);
18773 }
18774 break;
18775
18776 default:
18777 gcc_unreachable ();
18778 }
18779
18780 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
18781 }
18782 return true;
18783
18784 case CONST_STRING:
18785 if (dwarf_version >= 4 || !dwarf_strict)
18786 {
18787 dw_loc_descr_ref loc_result;
18788 resolve_one_addr (&rtl);
18789 rtl_addr:
18790 loc_result = new_addr_loc_descr (rtl, dtprel_false);
18791 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
18792 add_AT_loc (die, DW_AT_location, loc_result);
18793 vec_safe_push (used_rtx_array, rtl);
18794 return true;
18795 }
18796 return false;
18797
18798 case CONST:
18799 if (CONSTANT_P (XEXP (rtl, 0)))
18800 return add_const_value_attribute (die, XEXP (rtl, 0));
18801 /* FALLTHROUGH */
18802 case SYMBOL_REF:
18803 if (!const_ok_for_output (rtl))
18804 return false;
18805 /* FALLTHROUGH */
18806 case LABEL_REF:
18807 if (dwarf_version >= 4 || !dwarf_strict)
18808 goto rtl_addr;
18809 return false;
18810
18811 case PLUS:
18812 /* In cases where an inlined instance of an inline function is passed
18813 the address of an `auto' variable (which is local to the caller) we
18814 can get a situation where the DECL_RTL of the artificial local
18815 variable (for the inlining) which acts as a stand-in for the
18816 corresponding formal parameter (of the inline function) will look
18817 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
18818 exactly a compile-time constant expression, but it isn't the address
18819 of the (artificial) local variable either. Rather, it represents the
18820 *value* which the artificial local variable always has during its
18821 lifetime. We currently have no way to represent such quasi-constant
18822 values in Dwarf, so for now we just punt and generate nothing. */
18823 return false;
18824
18825 case HIGH:
18826 case CONST_FIXED:
18827 return false;
18828
18829 case MEM:
18830 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
18831 && MEM_READONLY_P (rtl)
18832 && GET_MODE (rtl) == BLKmode)
18833 {
18834 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
18835 return true;
18836 }
18837 return false;
18838
18839 default:
18840 /* No other kinds of rtx should be possible here. */
18841 gcc_unreachable ();
18842 }
18843 return false;
18844 }
18845
18846 /* Determine whether the evaluation of EXPR references any variables
18847 or functions which aren't otherwise used (and therefore may not be
18848 output). */
18849 static tree
18850 reference_to_unused (tree * tp, int * walk_subtrees,
18851 void * data ATTRIBUTE_UNUSED)
18852 {
18853 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
18854 *walk_subtrees = 0;
18855
18856 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
18857 && ! TREE_ASM_WRITTEN (*tp))
18858 return *tp;
18859 /* ??? The C++ FE emits debug information for using decls, so
18860 putting gcc_unreachable here falls over. See PR31899. For now
18861 be conservative. */
18862 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
18863 return *tp;
18864 else if (VAR_P (*tp))
18865 {
18866 varpool_node *node = varpool_node::get (*tp);
18867 if (!node || !node->definition)
18868 return *tp;
18869 }
18870 else if (TREE_CODE (*tp) == FUNCTION_DECL
18871 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
18872 {
18873 /* The call graph machinery must have finished analyzing,
18874 optimizing and gimplifying the CU by now.
18875 So if *TP has no call graph node associated
18876 to it, it means *TP will not be emitted. */
18877 if (!cgraph_node::get (*tp))
18878 return *tp;
18879 }
18880 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
18881 return *tp;
18882
18883 return NULL_TREE;
18884 }
18885
18886 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
18887 for use in a later add_const_value_attribute call. */
18888
18889 static rtx
18890 rtl_for_decl_init (tree init, tree type)
18891 {
18892 rtx rtl = NULL_RTX;
18893
18894 STRIP_NOPS (init);
18895
18896 /* If a variable is initialized with a string constant without embedded
18897 zeros, build CONST_STRING. */
18898 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
18899 {
18900 tree enttype = TREE_TYPE (type);
18901 tree domain = TYPE_DOMAIN (type);
18902 scalar_int_mode mode;
18903
18904 if (is_int_mode (TYPE_MODE (enttype), &mode)
18905 && GET_MODE_SIZE (mode) == 1
18906 && domain
18907 && integer_zerop (TYPE_MIN_VALUE (domain))
18908 && compare_tree_int (TYPE_MAX_VALUE (domain),
18909 TREE_STRING_LENGTH (init) - 1) == 0
18910 && ((size_t) TREE_STRING_LENGTH (init)
18911 == strlen (TREE_STRING_POINTER (init)) + 1))
18912 {
18913 rtl = gen_rtx_CONST_STRING (VOIDmode,
18914 ggc_strdup (TREE_STRING_POINTER (init)));
18915 rtl = gen_rtx_MEM (BLKmode, rtl);
18916 MEM_READONLY_P (rtl) = 1;
18917 }
18918 }
18919 /* Other aggregates, and complex values, could be represented using
18920 CONCAT: FIXME! */
18921 else if (AGGREGATE_TYPE_P (type)
18922 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
18923 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
18924 || TREE_CODE (type) == COMPLEX_TYPE)
18925 ;
18926 /* Vectors only work if their mode is supported by the target.
18927 FIXME: generic vectors ought to work too. */
18928 else if (TREE_CODE (type) == VECTOR_TYPE
18929 && !VECTOR_MODE_P (TYPE_MODE (type)))
18930 ;
18931 /* If the initializer is something that we know will expand into an
18932 immediate RTL constant, expand it now. We must be careful not to
18933 reference variables which won't be output. */
18934 else if (initializer_constant_valid_p (init, type)
18935 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
18936 {
18937 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
18938 possible. */
18939 if (TREE_CODE (type) == VECTOR_TYPE)
18940 switch (TREE_CODE (init))
18941 {
18942 case VECTOR_CST:
18943 break;
18944 case CONSTRUCTOR:
18945 if (TREE_CONSTANT (init))
18946 {
18947 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
18948 bool constant_p = true;
18949 tree value;
18950 unsigned HOST_WIDE_INT ix;
18951
18952 /* Even when ctor is constant, it might contain non-*_CST
18953 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
18954 belong into VECTOR_CST nodes. */
18955 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
18956 if (!CONSTANT_CLASS_P (value))
18957 {
18958 constant_p = false;
18959 break;
18960 }
18961
18962 if (constant_p)
18963 {
18964 init = build_vector_from_ctor (type, elts);
18965 break;
18966 }
18967 }
18968 /* FALLTHRU */
18969
18970 default:
18971 return NULL;
18972 }
18973
18974 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
18975
18976 /* If expand_expr returns a MEM, it wasn't immediate. */
18977 gcc_assert (!rtl || !MEM_P (rtl));
18978 }
18979
18980 return rtl;
18981 }
18982
18983 /* Generate RTL for the variable DECL to represent its location. */
18984
18985 static rtx
18986 rtl_for_decl_location (tree decl)
18987 {
18988 rtx rtl;
18989
18990 /* Here we have to decide where we are going to say the parameter "lives"
18991 (as far as the debugger is concerned). We only have a couple of
18992 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
18993
18994 DECL_RTL normally indicates where the parameter lives during most of the
18995 activation of the function. If optimization is enabled however, this
18996 could be either NULL or else a pseudo-reg. Both of those cases indicate
18997 that the parameter doesn't really live anywhere (as far as the code
18998 generation parts of GCC are concerned) during most of the function's
18999 activation. That will happen (for example) if the parameter is never
19000 referenced within the function.
19001
19002 We could just generate a location descriptor here for all non-NULL
19003 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19004 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19005 where DECL_RTL is NULL or is a pseudo-reg.
19006
19007 Note however that we can only get away with using DECL_INCOMING_RTL as
19008 a backup substitute for DECL_RTL in certain limited cases. In cases
19009 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19010 we can be sure that the parameter was passed using the same type as it is
19011 declared to have within the function, and that its DECL_INCOMING_RTL
19012 points us to a place where a value of that type is passed.
19013
19014 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19015 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19016 because in these cases DECL_INCOMING_RTL points us to a value of some
19017 type which is *different* from the type of the parameter itself. Thus,
19018 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19019 such cases, the debugger would end up (for example) trying to fetch a
19020 `float' from a place which actually contains the first part of a
19021 `double'. That would lead to really incorrect and confusing
19022 output at debug-time.
19023
19024 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19025 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19026 are a couple of exceptions however. On little-endian machines we can
19027 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19028 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19029 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19030 when (on a little-endian machine) a non-prototyped function has a
19031 parameter declared to be of type `short' or `char'. In such cases,
19032 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19033 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19034 passed `int' value. If the debugger then uses that address to fetch
19035 a `short' or a `char' (on a little-endian machine) the result will be
19036 the correct data, so we allow for such exceptional cases below.
19037
19038 Note that our goal here is to describe the place where the given formal
19039 parameter lives during most of the function's activation (i.e. between the
19040 end of the prologue and the start of the epilogue). We'll do that as best
19041 as we can. Note however that if the given formal parameter is modified
19042 sometime during the execution of the function, then a stack backtrace (at
19043 debug-time) will show the function as having been called with the *new*
19044 value rather than the value which was originally passed in. This happens
19045 rarely enough that it is not a major problem, but it *is* a problem, and
19046 I'd like to fix it.
19047
19048 A future version of dwarf2out.c may generate two additional attributes for
19049 any given DW_TAG_formal_parameter DIE which will describe the "passed
19050 type" and the "passed location" for the given formal parameter in addition
19051 to the attributes we now generate to indicate the "declared type" and the
19052 "active location" for each parameter. This additional set of attributes
19053 could be used by debuggers for stack backtraces. Separately, note that
19054 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19055 This happens (for example) for inlined-instances of inline function formal
19056 parameters which are never referenced. This really shouldn't be
19057 happening. All PARM_DECL nodes should get valid non-NULL
19058 DECL_INCOMING_RTL values. FIXME. */
19059
19060 /* Use DECL_RTL as the "location" unless we find something better. */
19061 rtl = DECL_RTL_IF_SET (decl);
19062
19063 /* When generating abstract instances, ignore everything except
19064 constants, symbols living in memory, and symbols living in
19065 fixed registers. */
19066 if (! reload_completed)
19067 {
19068 if (rtl
19069 && (CONSTANT_P (rtl)
19070 || (MEM_P (rtl)
19071 && CONSTANT_P (XEXP (rtl, 0)))
19072 || (REG_P (rtl)
19073 && VAR_P (decl)
19074 && TREE_STATIC (decl))))
19075 {
19076 rtl = targetm.delegitimize_address (rtl);
19077 return rtl;
19078 }
19079 rtl = NULL_RTX;
19080 }
19081 else if (TREE_CODE (decl) == PARM_DECL)
19082 {
19083 if (rtl == NULL_RTX
19084 || is_pseudo_reg (rtl)
19085 || (MEM_P (rtl)
19086 && is_pseudo_reg (XEXP (rtl, 0))
19087 && DECL_INCOMING_RTL (decl)
19088 && MEM_P (DECL_INCOMING_RTL (decl))
19089 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19090 {
19091 tree declared_type = TREE_TYPE (decl);
19092 tree passed_type = DECL_ARG_TYPE (decl);
19093 machine_mode dmode = TYPE_MODE (declared_type);
19094 machine_mode pmode = TYPE_MODE (passed_type);
19095
19096 /* This decl represents a formal parameter which was optimized out.
19097 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19098 all cases where (rtl == NULL_RTX) just below. */
19099 if (dmode == pmode)
19100 rtl = DECL_INCOMING_RTL (decl);
19101 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19102 && SCALAR_INT_MODE_P (dmode)
19103 && GET_MODE_SIZE (dmode) <= GET_MODE_SIZE (pmode)
19104 && DECL_INCOMING_RTL (decl))
19105 {
19106 rtx inc = DECL_INCOMING_RTL (decl);
19107 if (REG_P (inc))
19108 rtl = inc;
19109 else if (MEM_P (inc))
19110 {
19111 if (BYTES_BIG_ENDIAN)
19112 rtl = adjust_address_nv (inc, dmode,
19113 GET_MODE_SIZE (pmode)
19114 - GET_MODE_SIZE (dmode));
19115 else
19116 rtl = inc;
19117 }
19118 }
19119 }
19120
19121 /* If the parm was passed in registers, but lives on the stack, then
19122 make a big endian correction if the mode of the type of the
19123 parameter is not the same as the mode of the rtl. */
19124 /* ??? This is the same series of checks that are made in dbxout.c before
19125 we reach the big endian correction code there. It isn't clear if all
19126 of these checks are necessary here, but keeping them all is the safe
19127 thing to do. */
19128 else if (MEM_P (rtl)
19129 && XEXP (rtl, 0) != const0_rtx
19130 && ! CONSTANT_P (XEXP (rtl, 0))
19131 /* Not passed in memory. */
19132 && !MEM_P (DECL_INCOMING_RTL (decl))
19133 /* Not passed by invisible reference. */
19134 && (!REG_P (XEXP (rtl, 0))
19135 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19136 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19137 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19138 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19139 #endif
19140 )
19141 /* Big endian correction check. */
19142 && BYTES_BIG_ENDIAN
19143 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19144 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl)))
19145 < UNITS_PER_WORD))
19146 {
19147 machine_mode addr_mode = get_address_mode (rtl);
19148 int offset = (UNITS_PER_WORD
19149 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19150
19151 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19152 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19153 }
19154 }
19155 else if (VAR_P (decl)
19156 && rtl
19157 && MEM_P (rtl)
19158 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19159 {
19160 machine_mode addr_mode = get_address_mode (rtl);
19161 HOST_WIDE_INT offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19162 GET_MODE (rtl));
19163
19164 /* If a variable is declared "register" yet is smaller than
19165 a register, then if we store the variable to memory, it
19166 looks like we're storing a register-sized value, when in
19167 fact we are not. We need to adjust the offset of the
19168 storage location to reflect the actual value's bytes,
19169 else gdb will not be able to display it. */
19170 if (offset != 0)
19171 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19172 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19173 }
19174
19175 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19176 and will have been substituted directly into all expressions that use it.
19177 C does not have such a concept, but C++ and other languages do. */
19178 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19179 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19180
19181 if (rtl)
19182 rtl = targetm.delegitimize_address (rtl);
19183
19184 /* If we don't look past the constant pool, we risk emitting a
19185 reference to a constant pool entry that isn't referenced from
19186 code, and thus is not emitted. */
19187 if (rtl)
19188 rtl = avoid_constant_pool_reference (rtl);
19189
19190 /* Try harder to get a rtl. If this symbol ends up not being emitted
19191 in the current CU, resolve_addr will remove the expression referencing
19192 it. */
19193 if (rtl == NULL_RTX
19194 && VAR_P (decl)
19195 && !DECL_EXTERNAL (decl)
19196 && TREE_STATIC (decl)
19197 && DECL_NAME (decl)
19198 && !DECL_HARD_REGISTER (decl)
19199 && DECL_MODE (decl) != VOIDmode)
19200 {
19201 rtl = make_decl_rtl_for_debug (decl);
19202 if (!MEM_P (rtl)
19203 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19204 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19205 rtl = NULL_RTX;
19206 }
19207
19208 return rtl;
19209 }
19210
19211 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19212 returned. If so, the decl for the COMMON block is returned, and the
19213 value is the offset into the common block for the symbol. */
19214
19215 static tree
19216 fortran_common (tree decl, HOST_WIDE_INT *value)
19217 {
19218 tree val_expr, cvar;
19219 machine_mode mode;
19220 HOST_WIDE_INT bitsize, bitpos;
19221 tree offset;
19222 int unsignedp, reversep, volatilep = 0;
19223
19224 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19225 it does not have a value (the offset into the common area), or if it
19226 is thread local (as opposed to global) then it isn't common, and shouldn't
19227 be handled as such. */
19228 if (!VAR_P (decl)
19229 || !TREE_STATIC (decl)
19230 || !DECL_HAS_VALUE_EXPR_P (decl)
19231 || !is_fortran ())
19232 return NULL_TREE;
19233
19234 val_expr = DECL_VALUE_EXPR (decl);
19235 if (TREE_CODE (val_expr) != COMPONENT_REF)
19236 return NULL_TREE;
19237
19238 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19239 &unsignedp, &reversep, &volatilep);
19240
19241 if (cvar == NULL_TREE
19242 || !VAR_P (cvar)
19243 || DECL_ARTIFICIAL (cvar)
19244 || !TREE_PUBLIC (cvar))
19245 return NULL_TREE;
19246
19247 *value = 0;
19248 if (offset != NULL)
19249 {
19250 if (!tree_fits_shwi_p (offset))
19251 return NULL_TREE;
19252 *value = tree_to_shwi (offset);
19253 }
19254 if (bitpos != 0)
19255 *value += bitpos / BITS_PER_UNIT;
19256
19257 return cvar;
19258 }
19259
19260 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
19261 data attribute for a variable or a parameter. We generate the
19262 DW_AT_const_value attribute only in those cases where the given variable
19263 or parameter does not have a true "location" either in memory or in a
19264 register. This can happen (for example) when a constant is passed as an
19265 actual argument in a call to an inline function. (It's possible that
19266 these things can crop up in other ways also.) Note that one type of
19267 constant value which can be passed into an inlined function is a constant
19268 pointer. This can happen for example if an actual argument in an inlined
19269 function call evaluates to a compile-time constant address.
19270
19271 CACHE_P is true if it is worth caching the location list for DECL,
19272 so that future calls can reuse it rather than regenerate it from scratch.
19273 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
19274 since we will need to refer to them each time the function is inlined. */
19275
19276 static bool
19277 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
19278 {
19279 rtx rtl;
19280 dw_loc_list_ref list;
19281 var_loc_list *loc_list;
19282 cached_dw_loc_list *cache;
19283
19284 if (early_dwarf)
19285 return false;
19286
19287 if (TREE_CODE (decl) == ERROR_MARK)
19288 return false;
19289
19290 if (get_AT (die, DW_AT_location)
19291 || get_AT (die, DW_AT_const_value))
19292 return true;
19293
19294 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
19295 || TREE_CODE (decl) == RESULT_DECL);
19296
19297 /* Try to get some constant RTL for this decl, and use that as the value of
19298 the location. */
19299
19300 rtl = rtl_for_decl_location (decl);
19301 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
19302 && add_const_value_attribute (die, rtl))
19303 return true;
19304
19305 /* See if we have single element location list that is equivalent to
19306 a constant value. That way we are better to use add_const_value_attribute
19307 rather than expanding constant value equivalent. */
19308 loc_list = lookup_decl_loc (decl);
19309 if (loc_list
19310 && loc_list->first
19311 && loc_list->first->next == NULL
19312 && NOTE_P (loc_list->first->loc)
19313 && NOTE_VAR_LOCATION (loc_list->first->loc)
19314 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
19315 {
19316 struct var_loc_node *node;
19317
19318 node = loc_list->first;
19319 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
19320 if (GET_CODE (rtl) == EXPR_LIST)
19321 rtl = XEXP (rtl, 0);
19322 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
19323 && add_const_value_attribute (die, rtl))
19324 return true;
19325 }
19326 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
19327 list several times. See if we've already cached the contents. */
19328 list = NULL;
19329 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
19330 cache_p = false;
19331 if (cache_p)
19332 {
19333 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
19334 if (cache)
19335 list = cache->loc_list;
19336 }
19337 if (list == NULL)
19338 {
19339 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
19340 NULL);
19341 /* It is usually worth caching this result if the decl is from
19342 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
19343 if (cache_p && list && list->dw_loc_next)
19344 {
19345 cached_dw_loc_list **slot
19346 = cached_dw_loc_list_table->find_slot_with_hash (decl,
19347 DECL_UID (decl),
19348 INSERT);
19349 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
19350 cache->decl_id = DECL_UID (decl);
19351 cache->loc_list = list;
19352 *slot = cache;
19353 }
19354 }
19355 if (list)
19356 {
19357 add_AT_location_description (die, DW_AT_location, list);
19358 return true;
19359 }
19360 /* None of that worked, so it must not really have a location;
19361 try adding a constant value attribute from the DECL_INITIAL. */
19362 return tree_add_const_value_attribute_for_decl (die, decl);
19363 }
19364
19365 /* Helper function for tree_add_const_value_attribute. Natively encode
19366 initializer INIT into an array. Return true if successful. */
19367
19368 static bool
19369 native_encode_initializer (tree init, unsigned char *array, int size)
19370 {
19371 tree type;
19372
19373 if (init == NULL_TREE)
19374 return false;
19375
19376 STRIP_NOPS (init);
19377 switch (TREE_CODE (init))
19378 {
19379 case STRING_CST:
19380 type = TREE_TYPE (init);
19381 if (TREE_CODE (type) == ARRAY_TYPE)
19382 {
19383 tree enttype = TREE_TYPE (type);
19384 scalar_int_mode mode;
19385
19386 if (!is_int_mode (TYPE_MODE (enttype), &mode)
19387 || GET_MODE_SIZE (mode) != 1)
19388 return false;
19389 if (int_size_in_bytes (type) != size)
19390 return false;
19391 if (size > TREE_STRING_LENGTH (init))
19392 {
19393 memcpy (array, TREE_STRING_POINTER (init),
19394 TREE_STRING_LENGTH (init));
19395 memset (array + TREE_STRING_LENGTH (init),
19396 '\0', size - TREE_STRING_LENGTH (init));
19397 }
19398 else
19399 memcpy (array, TREE_STRING_POINTER (init), size);
19400 return true;
19401 }
19402 return false;
19403 case CONSTRUCTOR:
19404 type = TREE_TYPE (init);
19405 if (int_size_in_bytes (type) != size)
19406 return false;
19407 if (TREE_CODE (type) == ARRAY_TYPE)
19408 {
19409 HOST_WIDE_INT min_index;
19410 unsigned HOST_WIDE_INT cnt;
19411 int curpos = 0, fieldsize;
19412 constructor_elt *ce;
19413
19414 if (TYPE_DOMAIN (type) == NULL_TREE
19415 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
19416 return false;
19417
19418 fieldsize = int_size_in_bytes (TREE_TYPE (type));
19419 if (fieldsize <= 0)
19420 return false;
19421
19422 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
19423 memset (array, '\0', size);
19424 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
19425 {
19426 tree val = ce->value;
19427 tree index = ce->index;
19428 int pos = curpos;
19429 if (index && TREE_CODE (index) == RANGE_EXPR)
19430 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
19431 * fieldsize;
19432 else if (index)
19433 pos = (tree_to_shwi (index) - min_index) * fieldsize;
19434
19435 if (val)
19436 {
19437 STRIP_NOPS (val);
19438 if (!native_encode_initializer (val, array + pos, fieldsize))
19439 return false;
19440 }
19441 curpos = pos + fieldsize;
19442 if (index && TREE_CODE (index) == RANGE_EXPR)
19443 {
19444 int count = tree_to_shwi (TREE_OPERAND (index, 1))
19445 - tree_to_shwi (TREE_OPERAND (index, 0));
19446 while (count-- > 0)
19447 {
19448 if (val)
19449 memcpy (array + curpos, array + pos, fieldsize);
19450 curpos += fieldsize;
19451 }
19452 }
19453 gcc_assert (curpos <= size);
19454 }
19455 return true;
19456 }
19457 else if (TREE_CODE (type) == RECORD_TYPE
19458 || TREE_CODE (type) == UNION_TYPE)
19459 {
19460 tree field = NULL_TREE;
19461 unsigned HOST_WIDE_INT cnt;
19462 constructor_elt *ce;
19463
19464 if (int_size_in_bytes (type) != size)
19465 return false;
19466
19467 if (TREE_CODE (type) == RECORD_TYPE)
19468 field = TYPE_FIELDS (type);
19469
19470 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
19471 {
19472 tree val = ce->value;
19473 int pos, fieldsize;
19474
19475 if (ce->index != 0)
19476 field = ce->index;
19477
19478 if (val)
19479 STRIP_NOPS (val);
19480
19481 if (field == NULL_TREE || DECL_BIT_FIELD (field))
19482 return false;
19483
19484 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
19485 && TYPE_DOMAIN (TREE_TYPE (field))
19486 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
19487 return false;
19488 else if (DECL_SIZE_UNIT (field) == NULL_TREE
19489 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
19490 return false;
19491 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
19492 pos = int_byte_position (field);
19493 gcc_assert (pos + fieldsize <= size);
19494 if (val && fieldsize != 0
19495 && !native_encode_initializer (val, array + pos, fieldsize))
19496 return false;
19497 }
19498 return true;
19499 }
19500 return false;
19501 case VIEW_CONVERT_EXPR:
19502 case NON_LVALUE_EXPR:
19503 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
19504 default:
19505 return native_encode_expr (init, array, size) == size;
19506 }
19507 }
19508
19509 /* Attach a DW_AT_const_value attribute to DIE. The value of the
19510 attribute is the const value T. */
19511
19512 static bool
19513 tree_add_const_value_attribute (dw_die_ref die, tree t)
19514 {
19515 tree init;
19516 tree type = TREE_TYPE (t);
19517 rtx rtl;
19518
19519 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
19520 return false;
19521
19522 init = t;
19523 gcc_assert (!DECL_P (init));
19524
19525 if (TREE_CODE (init) == INTEGER_CST)
19526 {
19527 if (tree_fits_uhwi_p (init))
19528 {
19529 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
19530 return true;
19531 }
19532 if (tree_fits_shwi_p (init))
19533 {
19534 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
19535 return true;
19536 }
19537 }
19538 if (! early_dwarf)
19539 {
19540 rtl = rtl_for_decl_init (init, type);
19541 if (rtl)
19542 return add_const_value_attribute (die, rtl);
19543 }
19544 /* If the host and target are sane, try harder. */
19545 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
19546 && initializer_constant_valid_p (init, type))
19547 {
19548 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
19549 if (size > 0 && (int) size == size)
19550 {
19551 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
19552
19553 if (native_encode_initializer (init, array, size))
19554 {
19555 add_AT_vec (die, DW_AT_const_value, size, 1, array);
19556 return true;
19557 }
19558 ggc_free (array);
19559 }
19560 }
19561 return false;
19562 }
19563
19564 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
19565 attribute is the const value of T, where T is an integral constant
19566 variable with static storage duration
19567 (so it can't be a PARM_DECL or a RESULT_DECL). */
19568
19569 static bool
19570 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
19571 {
19572
19573 if (!decl
19574 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
19575 || (VAR_P (decl) && !TREE_STATIC (decl)))
19576 return false;
19577
19578 if (TREE_READONLY (decl)
19579 && ! TREE_THIS_VOLATILE (decl)
19580 && DECL_INITIAL (decl))
19581 /* OK */;
19582 else
19583 return false;
19584
19585 /* Don't add DW_AT_const_value if abstract origin already has one. */
19586 if (get_AT (var_die, DW_AT_const_value))
19587 return false;
19588
19589 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
19590 }
19591
19592 /* Convert the CFI instructions for the current function into a
19593 location list. This is used for DW_AT_frame_base when we targeting
19594 a dwarf2 consumer that does not support the dwarf3
19595 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
19596 expressions. */
19597
19598 static dw_loc_list_ref
19599 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
19600 {
19601 int ix;
19602 dw_fde_ref fde;
19603 dw_loc_list_ref list, *list_tail;
19604 dw_cfi_ref cfi;
19605 dw_cfa_location last_cfa, next_cfa;
19606 const char *start_label, *last_label, *section;
19607 dw_cfa_location remember;
19608
19609 fde = cfun->fde;
19610 gcc_assert (fde != NULL);
19611
19612 section = secname_for_decl (current_function_decl);
19613 list_tail = &list;
19614 list = NULL;
19615
19616 memset (&next_cfa, 0, sizeof (next_cfa));
19617 next_cfa.reg = INVALID_REGNUM;
19618 remember = next_cfa;
19619
19620 start_label = fde->dw_fde_begin;
19621
19622 /* ??? Bald assumption that the CIE opcode list does not contain
19623 advance opcodes. */
19624 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
19625 lookup_cfa_1 (cfi, &next_cfa, &remember);
19626
19627 last_cfa = next_cfa;
19628 last_label = start_label;
19629
19630 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
19631 {
19632 /* If the first partition contained no CFI adjustments, the
19633 CIE opcodes apply to the whole first partition. */
19634 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19635 fde->dw_fde_begin, fde->dw_fde_end, section);
19636 list_tail =&(*list_tail)->dw_loc_next;
19637 start_label = last_label = fde->dw_fde_second_begin;
19638 }
19639
19640 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
19641 {
19642 switch (cfi->dw_cfi_opc)
19643 {
19644 case DW_CFA_set_loc:
19645 case DW_CFA_advance_loc1:
19646 case DW_CFA_advance_loc2:
19647 case DW_CFA_advance_loc4:
19648 if (!cfa_equal_p (&last_cfa, &next_cfa))
19649 {
19650 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19651 start_label, last_label, section);
19652
19653 list_tail = &(*list_tail)->dw_loc_next;
19654 last_cfa = next_cfa;
19655 start_label = last_label;
19656 }
19657 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
19658 break;
19659
19660 case DW_CFA_advance_loc:
19661 /* The encoding is complex enough that we should never emit this. */
19662 gcc_unreachable ();
19663
19664 default:
19665 lookup_cfa_1 (cfi, &next_cfa, &remember);
19666 break;
19667 }
19668 if (ix + 1 == fde->dw_fde_switch_cfi_index)
19669 {
19670 if (!cfa_equal_p (&last_cfa, &next_cfa))
19671 {
19672 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19673 start_label, last_label, section);
19674
19675 list_tail = &(*list_tail)->dw_loc_next;
19676 last_cfa = next_cfa;
19677 start_label = last_label;
19678 }
19679 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19680 start_label, fde->dw_fde_end, section);
19681 list_tail = &(*list_tail)->dw_loc_next;
19682 start_label = last_label = fde->dw_fde_second_begin;
19683 }
19684 }
19685
19686 if (!cfa_equal_p (&last_cfa, &next_cfa))
19687 {
19688 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19689 start_label, last_label, section);
19690 list_tail = &(*list_tail)->dw_loc_next;
19691 start_label = last_label;
19692 }
19693
19694 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
19695 start_label,
19696 fde->dw_fde_second_begin
19697 ? fde->dw_fde_second_end : fde->dw_fde_end,
19698 section);
19699
19700 if (list && list->dw_loc_next)
19701 gen_llsym (list);
19702
19703 return list;
19704 }
19705
19706 /* Compute a displacement from the "steady-state frame pointer" to the
19707 frame base (often the same as the CFA), and store it in
19708 frame_pointer_fb_offset. OFFSET is added to the displacement
19709 before the latter is negated. */
19710
19711 static void
19712 compute_frame_pointer_to_fb_displacement (HOST_WIDE_INT offset)
19713 {
19714 rtx reg, elim;
19715
19716 #ifdef FRAME_POINTER_CFA_OFFSET
19717 reg = frame_pointer_rtx;
19718 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
19719 #else
19720 reg = arg_pointer_rtx;
19721 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
19722 #endif
19723
19724 elim = (ira_use_lra_p
19725 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
19726 : eliminate_regs (reg, VOIDmode, NULL_RTX));
19727 if (GET_CODE (elim) == PLUS)
19728 {
19729 offset += INTVAL (XEXP (elim, 1));
19730 elim = XEXP (elim, 0);
19731 }
19732
19733 frame_pointer_fb_offset = -offset;
19734
19735 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
19736 in which to eliminate. This is because it's stack pointer isn't
19737 directly accessible as a register within the ISA. To work around
19738 this, assume that while we cannot provide a proper value for
19739 frame_pointer_fb_offset, we won't need one either. */
19740 frame_pointer_fb_offset_valid
19741 = ((SUPPORTS_STACK_ALIGNMENT
19742 && (elim == hard_frame_pointer_rtx
19743 || elim == stack_pointer_rtx))
19744 || elim == (frame_pointer_needed
19745 ? hard_frame_pointer_rtx
19746 : stack_pointer_rtx));
19747 }
19748
19749 /* Generate a DW_AT_name attribute given some string value to be included as
19750 the value of the attribute. */
19751
19752 static void
19753 add_name_attribute (dw_die_ref die, const char *name_string)
19754 {
19755 if (name_string != NULL && *name_string != 0)
19756 {
19757 if (demangle_name_func)
19758 name_string = (*demangle_name_func) (name_string);
19759
19760 add_AT_string (die, DW_AT_name, name_string);
19761 }
19762 }
19763
19764 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
19765 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
19766 of TYPE accordingly.
19767
19768 ??? This is a temporary measure until after we're able to generate
19769 regular DWARF for the complex Ada type system. */
19770
19771 static void
19772 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
19773 dw_die_ref context_die)
19774 {
19775 tree dtype;
19776 dw_die_ref dtype_die;
19777
19778 if (!lang_hooks.types.descriptive_type)
19779 return;
19780
19781 dtype = lang_hooks.types.descriptive_type (type);
19782 if (!dtype)
19783 return;
19784
19785 dtype_die = lookup_type_die (dtype);
19786 if (!dtype_die)
19787 {
19788 gen_type_die (dtype, context_die);
19789 dtype_die = lookup_type_die (dtype);
19790 gcc_assert (dtype_die);
19791 }
19792
19793 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
19794 }
19795
19796 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
19797
19798 static const char *
19799 comp_dir_string (void)
19800 {
19801 const char *wd;
19802 char *wd1;
19803 static const char *cached_wd = NULL;
19804
19805 if (cached_wd != NULL)
19806 return cached_wd;
19807
19808 wd = get_src_pwd ();
19809 if (wd == NULL)
19810 return NULL;
19811
19812 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
19813 {
19814 int wdlen;
19815
19816 wdlen = strlen (wd);
19817 wd1 = ggc_vec_alloc<char> (wdlen + 2);
19818 strcpy (wd1, wd);
19819 wd1 [wdlen] = DIR_SEPARATOR;
19820 wd1 [wdlen + 1] = 0;
19821 wd = wd1;
19822 }
19823
19824 cached_wd = remap_debug_filename (wd);
19825 return cached_wd;
19826 }
19827
19828 /* Generate a DW_AT_comp_dir attribute for DIE. */
19829
19830 static void
19831 add_comp_dir_attribute (dw_die_ref die)
19832 {
19833 const char * wd = comp_dir_string ();
19834 if (wd != NULL)
19835 add_AT_string (die, DW_AT_comp_dir, wd);
19836 }
19837
19838 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
19839 pointer computation, ...), output a representation for that bound according
19840 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
19841 loc_list_from_tree for the meaning of CONTEXT. */
19842
19843 static void
19844 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
19845 int forms, struct loc_descr_context *context)
19846 {
19847 dw_die_ref context_die, decl_die;
19848 dw_loc_list_ref list;
19849 bool strip_conversions = true;
19850 bool placeholder_seen = false;
19851
19852 while (strip_conversions)
19853 switch (TREE_CODE (value))
19854 {
19855 case ERROR_MARK:
19856 case SAVE_EXPR:
19857 return;
19858
19859 CASE_CONVERT:
19860 case VIEW_CONVERT_EXPR:
19861 value = TREE_OPERAND (value, 0);
19862 break;
19863
19864 default:
19865 strip_conversions = false;
19866 break;
19867 }
19868
19869 /* If possible and permitted, output the attribute as a constant. */
19870 if ((forms & dw_scalar_form_constant) != 0
19871 && TREE_CODE (value) == INTEGER_CST)
19872 {
19873 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
19874
19875 /* If HOST_WIDE_INT is big enough then represent the bound as
19876 a constant value. We need to choose a form based on
19877 whether the type is signed or unsigned. We cannot just
19878 call add_AT_unsigned if the value itself is positive
19879 (add_AT_unsigned might add the unsigned value encoded as
19880 DW_FORM_data[1248]). Some DWARF consumers will lookup the
19881 bounds type and then sign extend any unsigned values found
19882 for signed types. This is needed only for
19883 DW_AT_{lower,upper}_bound, since for most other attributes,
19884 consumers will treat DW_FORM_data[1248] as unsigned values,
19885 regardless of the underlying type. */
19886 if (prec <= HOST_BITS_PER_WIDE_INT
19887 || tree_fits_uhwi_p (value))
19888 {
19889 if (TYPE_UNSIGNED (TREE_TYPE (value)))
19890 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
19891 else
19892 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
19893 }
19894 else
19895 /* Otherwise represent the bound as an unsigned value with
19896 the precision of its type. The precision and signedness
19897 of the type will be necessary to re-interpret it
19898 unambiguously. */
19899 add_AT_wide (die, attr, wi::to_wide (value));
19900 return;
19901 }
19902
19903 /* Otherwise, if it's possible and permitted too, output a reference to
19904 another DIE. */
19905 if ((forms & dw_scalar_form_reference) != 0)
19906 {
19907 tree decl = NULL_TREE;
19908
19909 /* Some type attributes reference an outer type. For instance, the upper
19910 bound of an array may reference an embedding record (this happens in
19911 Ada). */
19912 if (TREE_CODE (value) == COMPONENT_REF
19913 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
19914 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
19915 decl = TREE_OPERAND (value, 1);
19916
19917 else if (VAR_P (value)
19918 || TREE_CODE (value) == PARM_DECL
19919 || TREE_CODE (value) == RESULT_DECL)
19920 decl = value;
19921
19922 if (decl != NULL_TREE)
19923 {
19924 dw_die_ref decl_die = lookup_decl_die (decl);
19925
19926 /* ??? Can this happen, or should the variable have been bound
19927 first? Probably it can, since I imagine that we try to create
19928 the types of parameters in the order in which they exist in
19929 the list, and won't have created a forward reference to a
19930 later parameter. */
19931 if (decl_die != NULL)
19932 {
19933 add_AT_die_ref (die, attr, decl_die);
19934 return;
19935 }
19936 }
19937 }
19938
19939 /* Last chance: try to create a stack operation procedure to evaluate the
19940 value. Do nothing if even that is not possible or permitted. */
19941 if ((forms & dw_scalar_form_exprloc) == 0)
19942 return;
19943
19944 list = loc_list_from_tree (value, 2, context);
19945 if (context && context->placeholder_arg)
19946 {
19947 placeholder_seen = context->placeholder_seen;
19948 context->placeholder_seen = false;
19949 }
19950 if (list == NULL || single_element_loc_list_p (list))
19951 {
19952 /* If this attribute is not a reference nor constant, it is
19953 a DWARF expression rather than location description. For that
19954 loc_list_from_tree (value, 0, &context) is needed. */
19955 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
19956 if (list2 && single_element_loc_list_p (list2))
19957 {
19958 if (placeholder_seen)
19959 {
19960 struct dwarf_procedure_info dpi;
19961 dpi.fndecl = NULL_TREE;
19962 dpi.args_count = 1;
19963 if (!resolve_args_picking (list2->expr, 1, &dpi))
19964 return;
19965 }
19966 add_AT_loc (die, attr, list2->expr);
19967 return;
19968 }
19969 }
19970
19971 /* If that failed to give a single element location list, fall back to
19972 outputting this as a reference... still if permitted. */
19973 if (list == NULL
19974 || (forms & dw_scalar_form_reference) == 0
19975 || placeholder_seen)
19976 return;
19977
19978 if (current_function_decl == 0)
19979 context_die = comp_unit_die ();
19980 else
19981 context_die = lookup_decl_die (current_function_decl);
19982
19983 decl_die = new_die (DW_TAG_variable, context_die, value);
19984 add_AT_flag (decl_die, DW_AT_artificial, 1);
19985 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
19986 context_die);
19987 add_AT_location_description (decl_die, DW_AT_location, list);
19988 add_AT_die_ref (die, attr, decl_die);
19989 }
19990
19991 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
19992 default. */
19993
19994 static int
19995 lower_bound_default (void)
19996 {
19997 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
19998 {
19999 case DW_LANG_C:
20000 case DW_LANG_C89:
20001 case DW_LANG_C99:
20002 case DW_LANG_C11:
20003 case DW_LANG_C_plus_plus:
20004 case DW_LANG_C_plus_plus_11:
20005 case DW_LANG_C_plus_plus_14:
20006 case DW_LANG_ObjC:
20007 case DW_LANG_ObjC_plus_plus:
20008 return 0;
20009 case DW_LANG_Fortran77:
20010 case DW_LANG_Fortran90:
20011 case DW_LANG_Fortran95:
20012 case DW_LANG_Fortran03:
20013 case DW_LANG_Fortran08:
20014 return 1;
20015 case DW_LANG_UPC:
20016 case DW_LANG_D:
20017 case DW_LANG_Python:
20018 return dwarf_version >= 4 ? 0 : -1;
20019 case DW_LANG_Ada95:
20020 case DW_LANG_Ada83:
20021 case DW_LANG_Cobol74:
20022 case DW_LANG_Cobol85:
20023 case DW_LANG_Modula2:
20024 case DW_LANG_PLI:
20025 return dwarf_version >= 4 ? 1 : -1;
20026 default:
20027 return -1;
20028 }
20029 }
20030
20031 /* Given a tree node describing an array bound (either lower or upper) output
20032 a representation for that bound. */
20033
20034 static void
20035 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20036 tree bound, struct loc_descr_context *context)
20037 {
20038 int dflt;
20039
20040 while (1)
20041 switch (TREE_CODE (bound))
20042 {
20043 /* Strip all conversions. */
20044 CASE_CONVERT:
20045 case VIEW_CONVERT_EXPR:
20046 bound = TREE_OPERAND (bound, 0);
20047 break;
20048
20049 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20050 are even omitted when they are the default. */
20051 case INTEGER_CST:
20052 /* If the value for this bound is the default one, we can even omit the
20053 attribute. */
20054 if (bound_attr == DW_AT_lower_bound
20055 && tree_fits_shwi_p (bound)
20056 && (dflt = lower_bound_default ()) != -1
20057 && tree_to_shwi (bound) == dflt)
20058 return;
20059
20060 /* FALLTHRU */
20061
20062 default:
20063 /* Because of the complex interaction there can be with other GNAT
20064 encodings, GDB isn't ready yet to handle proper DWARF description
20065 for self-referencial subrange bounds: let GNAT encodings do the
20066 magic in such a case. */
20067 if (is_ada ()
20068 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20069 && contains_placeholder_p (bound))
20070 return;
20071
20072 add_scalar_info (subrange_die, bound_attr, bound,
20073 dw_scalar_form_constant
20074 | dw_scalar_form_exprloc
20075 | dw_scalar_form_reference,
20076 context);
20077 return;
20078 }
20079 }
20080
20081 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20082 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20083 Note that the block of subscript information for an array type also
20084 includes information about the element type of the given array type.
20085
20086 This function reuses previously set type and bound information if
20087 available. */
20088
20089 static void
20090 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20091 {
20092 unsigned dimension_number;
20093 tree lower, upper;
20094 dw_die_ref child = type_die->die_child;
20095
20096 for (dimension_number = 0;
20097 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20098 type = TREE_TYPE (type), dimension_number++)
20099 {
20100 tree domain = TYPE_DOMAIN (type);
20101
20102 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20103 break;
20104
20105 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20106 and (in GNU C only) variable bounds. Handle all three forms
20107 here. */
20108
20109 /* Find and reuse a previously generated DW_TAG_subrange_type if
20110 available.
20111
20112 For multi-dimensional arrays, as we iterate through the
20113 various dimensions in the enclosing for loop above, we also
20114 iterate through the DIE children and pick at each
20115 DW_TAG_subrange_type previously generated (if available).
20116 Each child DW_TAG_subrange_type DIE describes the range of
20117 the current dimension. At this point we should have as many
20118 DW_TAG_subrange_type's as we have dimensions in the
20119 array. */
20120 dw_die_ref subrange_die = NULL;
20121 if (child)
20122 while (1)
20123 {
20124 child = child->die_sib;
20125 if (child->die_tag == DW_TAG_subrange_type)
20126 subrange_die = child;
20127 if (child == type_die->die_child)
20128 {
20129 /* If we wrapped around, stop looking next time. */
20130 child = NULL;
20131 break;
20132 }
20133 if (child->die_tag == DW_TAG_subrange_type)
20134 break;
20135 }
20136 if (!subrange_die)
20137 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20138
20139 if (domain)
20140 {
20141 /* We have an array type with specified bounds. */
20142 lower = TYPE_MIN_VALUE (domain);
20143 upper = TYPE_MAX_VALUE (domain);
20144
20145 /* Define the index type. */
20146 if (TREE_TYPE (domain)
20147 && !get_AT (subrange_die, DW_AT_type))
20148 {
20149 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20150 TREE_TYPE field. We can't emit debug info for this
20151 because it is an unnamed integral type. */
20152 if (TREE_CODE (domain) == INTEGER_TYPE
20153 && TYPE_NAME (domain) == NULL_TREE
20154 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20155 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20156 ;
20157 else
20158 add_type_attribute (subrange_die, TREE_TYPE (domain),
20159 TYPE_UNQUALIFIED, false, type_die);
20160 }
20161
20162 /* ??? If upper is NULL, the array has unspecified length,
20163 but it does have a lower bound. This happens with Fortran
20164 dimension arr(N:*)
20165 Since the debugger is definitely going to need to know N
20166 to produce useful results, go ahead and output the lower
20167 bound solo, and hope the debugger can cope. */
20168
20169 if (!get_AT (subrange_die, DW_AT_lower_bound))
20170 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20171 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
20172 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20173 }
20174
20175 /* Otherwise we have an array type with an unspecified length. The
20176 DWARF-2 spec does not say how to handle this; let's just leave out the
20177 bounds. */
20178 }
20179 }
20180
20181 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20182
20183 static void
20184 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20185 {
20186 dw_die_ref decl_die;
20187 HOST_WIDE_INT size;
20188 dw_loc_descr_ref size_expr = NULL;
20189
20190 switch (TREE_CODE (tree_node))
20191 {
20192 case ERROR_MARK:
20193 size = 0;
20194 break;
20195 case ENUMERAL_TYPE:
20196 case RECORD_TYPE:
20197 case UNION_TYPE:
20198 case QUAL_UNION_TYPE:
20199 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20200 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20201 {
20202 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20203 return;
20204 }
20205 size_expr = type_byte_size (tree_node, &size);
20206 break;
20207 case FIELD_DECL:
20208 /* For a data member of a struct or union, the DW_AT_byte_size is
20209 generally given as the number of bytes normally allocated for an
20210 object of the *declared* type of the member itself. This is true
20211 even for bit-fields. */
20212 size = int_size_in_bytes (field_type (tree_node));
20213 break;
20214 default:
20215 gcc_unreachable ();
20216 }
20217
20218 /* Support for dynamically-sized objects was introduced by DWARFv3.
20219 At the moment, GDB does not handle variable byte sizes very well,
20220 though. */
20221 if ((dwarf_version >= 3 || !dwarf_strict)
20222 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20223 && size_expr != NULL)
20224 add_AT_loc (die, DW_AT_byte_size, size_expr);
20225
20226 /* Note that `size' might be -1 when we get to this point. If it is, that
20227 indicates that the byte size of the entity in question is variable and
20228 that we could not generate a DWARF expression that computes it. */
20229 if (size >= 0)
20230 add_AT_unsigned (die, DW_AT_byte_size, size);
20231 }
20232
20233 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20234 alignment. */
20235
20236 static void
20237 add_alignment_attribute (dw_die_ref die, tree tree_node)
20238 {
20239 if (dwarf_version < 5 && dwarf_strict)
20240 return;
20241
20242 unsigned align;
20243
20244 if (DECL_P (tree_node))
20245 {
20246 if (!DECL_USER_ALIGN (tree_node))
20247 return;
20248
20249 align = DECL_ALIGN_UNIT (tree_node);
20250 }
20251 else if (TYPE_P (tree_node))
20252 {
20253 if (!TYPE_USER_ALIGN (tree_node))
20254 return;
20255
20256 align = TYPE_ALIGN_UNIT (tree_node);
20257 }
20258 else
20259 gcc_unreachable ();
20260
20261 add_AT_unsigned (die, DW_AT_alignment, align);
20262 }
20263
20264 /* For a FIELD_DECL node which represents a bit-field, output an attribute
20265 which specifies the distance in bits from the highest order bit of the
20266 "containing object" for the bit-field to the highest order bit of the
20267 bit-field itself.
20268
20269 For any given bit-field, the "containing object" is a hypothetical object
20270 (of some integral or enum type) within which the given bit-field lives. The
20271 type of this hypothetical "containing object" is always the same as the
20272 declared type of the individual bit-field itself. The determination of the
20273 exact location of the "containing object" for a bit-field is rather
20274 complicated. It's handled by the `field_byte_offset' function (above).
20275
20276 CTX is required: see the comment for VLR_CONTEXT.
20277
20278 Note that it is the size (in bytes) of the hypothetical "containing object"
20279 which will be given in the DW_AT_byte_size attribute for this bit-field.
20280 (See `byte_size_attribute' above). */
20281
20282 static inline void
20283 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
20284 {
20285 HOST_WIDE_INT object_offset_in_bytes;
20286 tree original_type = DECL_BIT_FIELD_TYPE (decl);
20287 HOST_WIDE_INT bitpos_int;
20288 HOST_WIDE_INT highest_order_object_bit_offset;
20289 HOST_WIDE_INT highest_order_field_bit_offset;
20290 HOST_WIDE_INT bit_offset;
20291
20292 field_byte_offset (decl, ctx, &object_offset_in_bytes);
20293
20294 /* Must be a field and a bit field. */
20295 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
20296
20297 /* We can't yet handle bit-fields whose offsets are variable, so if we
20298 encounter such things, just return without generating any attribute
20299 whatsoever. Likewise for variable or too large size. */
20300 if (! tree_fits_shwi_p (bit_position (decl))
20301 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
20302 return;
20303
20304 bitpos_int = int_bit_position (decl);
20305
20306 /* Note that the bit offset is always the distance (in bits) from the
20307 highest-order bit of the "containing object" to the highest-order bit of
20308 the bit-field itself. Since the "high-order end" of any object or field
20309 is different on big-endian and little-endian machines, the computation
20310 below must take account of these differences. */
20311 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
20312 highest_order_field_bit_offset = bitpos_int;
20313
20314 if (! BYTES_BIG_ENDIAN)
20315 {
20316 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
20317 highest_order_object_bit_offset +=
20318 simple_type_size_in_bits (original_type);
20319 }
20320
20321 bit_offset
20322 = (! BYTES_BIG_ENDIAN
20323 ? highest_order_object_bit_offset - highest_order_field_bit_offset
20324 : highest_order_field_bit_offset - highest_order_object_bit_offset);
20325
20326 if (bit_offset < 0)
20327 add_AT_int (die, DW_AT_bit_offset, bit_offset);
20328 else
20329 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
20330 }
20331
20332 /* For a FIELD_DECL node which represents a bit field, output an attribute
20333 which specifies the length in bits of the given field. */
20334
20335 static inline void
20336 add_bit_size_attribute (dw_die_ref die, tree decl)
20337 {
20338 /* Must be a field and a bit field. */
20339 gcc_assert (TREE_CODE (decl) == FIELD_DECL
20340 && DECL_BIT_FIELD_TYPE (decl));
20341
20342 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
20343 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
20344 }
20345
20346 /* If the compiled language is ANSI C, then add a 'prototyped'
20347 attribute, if arg types are given for the parameters of a function. */
20348
20349 static inline void
20350 add_prototyped_attribute (dw_die_ref die, tree func_type)
20351 {
20352 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20353 {
20354 case DW_LANG_C:
20355 case DW_LANG_C89:
20356 case DW_LANG_C99:
20357 case DW_LANG_C11:
20358 case DW_LANG_ObjC:
20359 if (prototype_p (func_type))
20360 add_AT_flag (die, DW_AT_prototyped, 1);
20361 break;
20362 default:
20363 break;
20364 }
20365 }
20366
20367 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
20368 by looking in the type declaration, the object declaration equate table or
20369 the block mapping. */
20370
20371 static inline dw_die_ref
20372 add_abstract_origin_attribute (dw_die_ref die, tree origin)
20373 {
20374 dw_die_ref origin_die = NULL;
20375
20376 if (DECL_P (origin))
20377 {
20378 dw_die_ref c;
20379 origin_die = lookup_decl_die (origin);
20380 /* "Unwrap" the decls DIE which we put in the imported unit context.
20381 We are looking for the abstract copy here. */
20382 if (in_lto_p
20383 && origin_die
20384 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
20385 /* ??? Identify this better. */
20386 && c->with_offset)
20387 origin_die = c;
20388 }
20389 else if (TYPE_P (origin))
20390 origin_die = lookup_type_die (origin);
20391 else if (TREE_CODE (origin) == BLOCK)
20392 origin_die = BLOCK_DIE (origin);
20393
20394 /* XXX: Functions that are never lowered don't always have correct block
20395 trees (in the case of java, they simply have no block tree, in some other
20396 languages). For these functions, there is nothing we can really do to
20397 output correct debug info for inlined functions in all cases. Rather
20398 than die, we'll just produce deficient debug info now, in that we will
20399 have variables without a proper abstract origin. In the future, when all
20400 functions are lowered, we should re-add a gcc_assert (origin_die)
20401 here. */
20402
20403 if (origin_die)
20404 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
20405 return origin_die;
20406 }
20407
20408 /* We do not currently support the pure_virtual attribute. */
20409
20410 static inline void
20411 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
20412 {
20413 if (DECL_VINDEX (func_decl))
20414 {
20415 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
20416
20417 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
20418 add_AT_loc (die, DW_AT_vtable_elem_location,
20419 new_loc_descr (DW_OP_constu,
20420 tree_to_shwi (DECL_VINDEX (func_decl)),
20421 0));
20422
20423 /* GNU extension: Record what type this method came from originally. */
20424 if (debug_info_level > DINFO_LEVEL_TERSE
20425 && DECL_CONTEXT (func_decl))
20426 add_AT_die_ref (die, DW_AT_containing_type,
20427 lookup_type_die (DECL_CONTEXT (func_decl)));
20428 }
20429 }
20430 \f
20431 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
20432 given decl. This used to be a vendor extension until after DWARF 4
20433 standardized it. */
20434
20435 static void
20436 add_linkage_attr (dw_die_ref die, tree decl)
20437 {
20438 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
20439
20440 /* Mimic what assemble_name_raw does with a leading '*'. */
20441 if (name[0] == '*')
20442 name = &name[1];
20443
20444 if (dwarf_version >= 4)
20445 add_AT_string (die, DW_AT_linkage_name, name);
20446 else
20447 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
20448 }
20449
20450 /* Add source coordinate attributes for the given decl. */
20451
20452 static void
20453 add_src_coords_attributes (dw_die_ref die, tree decl)
20454 {
20455 expanded_location s;
20456
20457 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
20458 return;
20459 s = expand_location (DECL_SOURCE_LOCATION (decl));
20460 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
20461 add_AT_unsigned (die, DW_AT_decl_line, s.line);
20462 if (debug_column_info && s.column)
20463 add_AT_unsigned (die, DW_AT_decl_column, s.column);
20464 }
20465
20466 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
20467
20468 static void
20469 add_linkage_name_raw (dw_die_ref die, tree decl)
20470 {
20471 /* Defer until we have an assembler name set. */
20472 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
20473 {
20474 limbo_die_node *asm_name;
20475
20476 asm_name = ggc_cleared_alloc<limbo_die_node> ();
20477 asm_name->die = die;
20478 asm_name->created_for = decl;
20479 asm_name->next = deferred_asm_name;
20480 deferred_asm_name = asm_name;
20481 }
20482 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
20483 add_linkage_attr (die, decl);
20484 }
20485
20486 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
20487
20488 static void
20489 add_linkage_name (dw_die_ref die, tree decl)
20490 {
20491 if (debug_info_level > DINFO_LEVEL_NONE
20492 && VAR_OR_FUNCTION_DECL_P (decl)
20493 && TREE_PUBLIC (decl)
20494 && !(VAR_P (decl) && DECL_REGISTER (decl))
20495 && die->die_tag != DW_TAG_member)
20496 add_linkage_name_raw (die, decl);
20497 }
20498
20499 /* Add a DW_AT_name attribute and source coordinate attribute for the
20500 given decl, but only if it actually has a name. */
20501
20502 static void
20503 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
20504 bool no_linkage_name)
20505 {
20506 tree decl_name;
20507
20508 decl_name = DECL_NAME (decl);
20509 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20510 {
20511 const char *name = dwarf2_name (decl, 0);
20512 if (name)
20513 add_name_attribute (die, name);
20514 if (! DECL_ARTIFICIAL (decl))
20515 add_src_coords_attributes (die, decl);
20516
20517 if (!no_linkage_name)
20518 add_linkage_name (die, decl);
20519 }
20520
20521 #ifdef VMS_DEBUGGING_INFO
20522 /* Get the function's name, as described by its RTL. This may be different
20523 from the DECL_NAME name used in the source file. */
20524 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
20525 {
20526 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
20527 XEXP (DECL_RTL (decl), 0), false);
20528 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
20529 }
20530 #endif /* VMS_DEBUGGING_INFO */
20531 }
20532
20533 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
20534
20535 static void
20536 add_discr_value (dw_die_ref die, dw_discr_value *value)
20537 {
20538 dw_attr_node attr;
20539
20540 attr.dw_attr = DW_AT_discr_value;
20541 attr.dw_attr_val.val_class = dw_val_class_discr_value;
20542 attr.dw_attr_val.val_entry = NULL;
20543 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
20544 if (value->pos)
20545 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
20546 else
20547 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
20548 add_dwarf_attr (die, &attr);
20549 }
20550
20551 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
20552
20553 static void
20554 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
20555 {
20556 dw_attr_node attr;
20557
20558 attr.dw_attr = DW_AT_discr_list;
20559 attr.dw_attr_val.val_class = dw_val_class_discr_list;
20560 attr.dw_attr_val.val_entry = NULL;
20561 attr.dw_attr_val.v.val_discr_list = discr_list;
20562 add_dwarf_attr (die, &attr);
20563 }
20564
20565 static inline dw_discr_list_ref
20566 AT_discr_list (dw_attr_node *attr)
20567 {
20568 return attr->dw_attr_val.v.val_discr_list;
20569 }
20570
20571 #ifdef VMS_DEBUGGING_INFO
20572 /* Output the debug main pointer die for VMS */
20573
20574 void
20575 dwarf2out_vms_debug_main_pointer (void)
20576 {
20577 char label[MAX_ARTIFICIAL_LABEL_BYTES];
20578 dw_die_ref die;
20579
20580 /* Allocate the VMS debug main subprogram die. */
20581 die = new_die_raw (DW_TAG_subprogram);
20582 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
20583 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
20584 current_function_funcdef_no);
20585 add_AT_lbl_id (die, DW_AT_entry_pc, label);
20586
20587 /* Make it the first child of comp_unit_die (). */
20588 die->die_parent = comp_unit_die ();
20589 if (comp_unit_die ()->die_child)
20590 {
20591 die->die_sib = comp_unit_die ()->die_child->die_sib;
20592 comp_unit_die ()->die_child->die_sib = die;
20593 }
20594 else
20595 {
20596 die->die_sib = die;
20597 comp_unit_die ()->die_child = die;
20598 }
20599 }
20600 #endif /* VMS_DEBUGGING_INFO */
20601
20602 /* Push a new declaration scope. */
20603
20604 static void
20605 push_decl_scope (tree scope)
20606 {
20607 vec_safe_push (decl_scope_table, scope);
20608 }
20609
20610 /* Pop a declaration scope. */
20611
20612 static inline void
20613 pop_decl_scope (void)
20614 {
20615 decl_scope_table->pop ();
20616 }
20617
20618 /* walk_tree helper function for uses_local_type, below. */
20619
20620 static tree
20621 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
20622 {
20623 if (!TYPE_P (*tp))
20624 *walk_subtrees = 0;
20625 else
20626 {
20627 tree name = TYPE_NAME (*tp);
20628 if (name && DECL_P (name) && decl_function_context (name))
20629 return *tp;
20630 }
20631 return NULL_TREE;
20632 }
20633
20634 /* If TYPE involves a function-local type (including a local typedef to a
20635 non-local type), returns that type; otherwise returns NULL_TREE. */
20636
20637 static tree
20638 uses_local_type (tree type)
20639 {
20640 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
20641 return used;
20642 }
20643
20644 /* Return the DIE for the scope that immediately contains this type.
20645 Non-named types that do not involve a function-local type get global
20646 scope. Named types nested in namespaces or other types get their
20647 containing scope. All other types (i.e. function-local named types) get
20648 the current active scope. */
20649
20650 static dw_die_ref
20651 scope_die_for (tree t, dw_die_ref context_die)
20652 {
20653 dw_die_ref scope_die = NULL;
20654 tree containing_scope;
20655
20656 /* Non-types always go in the current scope. */
20657 gcc_assert (TYPE_P (t));
20658
20659 /* Use the scope of the typedef, rather than the scope of the type
20660 it refers to. */
20661 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
20662 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
20663 else
20664 containing_scope = TYPE_CONTEXT (t);
20665
20666 /* Use the containing namespace if there is one. */
20667 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
20668 {
20669 if (context_die == lookup_decl_die (containing_scope))
20670 /* OK */;
20671 else if (debug_info_level > DINFO_LEVEL_TERSE)
20672 context_die = get_context_die (containing_scope);
20673 else
20674 containing_scope = NULL_TREE;
20675 }
20676
20677 /* Ignore function type "scopes" from the C frontend. They mean that
20678 a tagged type is local to a parmlist of a function declarator, but
20679 that isn't useful to DWARF. */
20680 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
20681 containing_scope = NULL_TREE;
20682
20683 if (SCOPE_FILE_SCOPE_P (containing_scope))
20684 {
20685 /* If T uses a local type keep it local as well, to avoid references
20686 to function-local DIEs from outside the function. */
20687 if (current_function_decl && uses_local_type (t))
20688 scope_die = context_die;
20689 else
20690 scope_die = comp_unit_die ();
20691 }
20692 else if (TYPE_P (containing_scope))
20693 {
20694 /* For types, we can just look up the appropriate DIE. */
20695 if (debug_info_level > DINFO_LEVEL_TERSE)
20696 scope_die = get_context_die (containing_scope);
20697 else
20698 {
20699 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
20700 if (scope_die == NULL)
20701 scope_die = comp_unit_die ();
20702 }
20703 }
20704 else
20705 scope_die = context_die;
20706
20707 return scope_die;
20708 }
20709
20710 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
20711
20712 static inline int
20713 local_scope_p (dw_die_ref context_die)
20714 {
20715 for (; context_die; context_die = context_die->die_parent)
20716 if (context_die->die_tag == DW_TAG_inlined_subroutine
20717 || context_die->die_tag == DW_TAG_subprogram)
20718 return 1;
20719
20720 return 0;
20721 }
20722
20723 /* Returns nonzero if CONTEXT_DIE is a class. */
20724
20725 static inline int
20726 class_scope_p (dw_die_ref context_die)
20727 {
20728 return (context_die
20729 && (context_die->die_tag == DW_TAG_structure_type
20730 || context_die->die_tag == DW_TAG_class_type
20731 || context_die->die_tag == DW_TAG_interface_type
20732 || context_die->die_tag == DW_TAG_union_type));
20733 }
20734
20735 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
20736 whether or not to treat a DIE in this context as a declaration. */
20737
20738 static inline int
20739 class_or_namespace_scope_p (dw_die_ref context_die)
20740 {
20741 return (class_scope_p (context_die)
20742 || (context_die && context_die->die_tag == DW_TAG_namespace));
20743 }
20744
20745 /* Many forms of DIEs require a "type description" attribute. This
20746 routine locates the proper "type descriptor" die for the type given
20747 by 'type' plus any additional qualifiers given by 'cv_quals', and
20748 adds a DW_AT_type attribute below the given die. */
20749
20750 static void
20751 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
20752 bool reverse, dw_die_ref context_die)
20753 {
20754 enum tree_code code = TREE_CODE (type);
20755 dw_die_ref type_die = NULL;
20756
20757 /* ??? If this type is an unnamed subrange type of an integral, floating-point
20758 or fixed-point type, use the inner type. This is because we have no
20759 support for unnamed types in base_type_die. This can happen if this is
20760 an Ada subrange type. Correct solution is emit a subrange type die. */
20761 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
20762 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
20763 type = TREE_TYPE (type), code = TREE_CODE (type);
20764
20765 if (code == ERROR_MARK
20766 /* Handle a special case. For functions whose return type is void, we
20767 generate *no* type attribute. (Note that no object may have type
20768 `void', so this only applies to function return types). */
20769 || code == VOID_TYPE)
20770 return;
20771
20772 type_die = modified_type_die (type,
20773 cv_quals | TYPE_QUALS (type),
20774 reverse,
20775 context_die);
20776
20777 if (type_die != NULL)
20778 add_AT_die_ref (object_die, DW_AT_type, type_die);
20779 }
20780
20781 /* Given an object die, add the calling convention attribute for the
20782 function call type. */
20783 static void
20784 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
20785 {
20786 enum dwarf_calling_convention value = DW_CC_normal;
20787
20788 value = ((enum dwarf_calling_convention)
20789 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
20790
20791 if (is_fortran ()
20792 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
20793 {
20794 /* DWARF 2 doesn't provide a way to identify a program's source-level
20795 entry point. DW_AT_calling_convention attributes are only meant
20796 to describe functions' calling conventions. However, lacking a
20797 better way to signal the Fortran main program, we used this for
20798 a long time, following existing custom. Now, DWARF 4 has
20799 DW_AT_main_subprogram, which we add below, but some tools still
20800 rely on the old way, which we thus keep. */
20801 value = DW_CC_program;
20802
20803 if (dwarf_version >= 4 || !dwarf_strict)
20804 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
20805 }
20806
20807 /* Only add the attribute if the backend requests it, and
20808 is not DW_CC_normal. */
20809 if (value && (value != DW_CC_normal))
20810 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
20811 }
20812
20813 /* Given a tree pointer to a struct, class, union, or enum type node, return
20814 a pointer to the (string) tag name for the given type, or zero if the type
20815 was declared without a tag. */
20816
20817 static const char *
20818 type_tag (const_tree type)
20819 {
20820 const char *name = 0;
20821
20822 if (TYPE_NAME (type) != 0)
20823 {
20824 tree t = 0;
20825
20826 /* Find the IDENTIFIER_NODE for the type name. */
20827 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
20828 && !TYPE_NAMELESS (type))
20829 t = TYPE_NAME (type);
20830
20831 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
20832 a TYPE_DECL node, regardless of whether or not a `typedef' was
20833 involved. */
20834 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
20835 && ! DECL_IGNORED_P (TYPE_NAME (type)))
20836 {
20837 /* We want to be extra verbose. Don't call dwarf_name if
20838 DECL_NAME isn't set. The default hook for decl_printable_name
20839 doesn't like that, and in this context it's correct to return
20840 0, instead of "<anonymous>" or the like. */
20841 if (DECL_NAME (TYPE_NAME (type))
20842 && !DECL_NAMELESS (TYPE_NAME (type)))
20843 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
20844 }
20845
20846 /* Now get the name as a string, or invent one. */
20847 if (!name && t != 0)
20848 name = IDENTIFIER_POINTER (t);
20849 }
20850
20851 return (name == 0 || *name == '\0') ? 0 : name;
20852 }
20853
20854 /* Return the type associated with a data member, make a special check
20855 for bit field types. */
20856
20857 static inline tree
20858 member_declared_type (const_tree member)
20859 {
20860 return (DECL_BIT_FIELD_TYPE (member)
20861 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
20862 }
20863
20864 /* Get the decl's label, as described by its RTL. This may be different
20865 from the DECL_NAME name used in the source file. */
20866
20867 #if 0
20868 static const char *
20869 decl_start_label (tree decl)
20870 {
20871 rtx x;
20872 const char *fnname;
20873
20874 x = DECL_RTL (decl);
20875 gcc_assert (MEM_P (x));
20876
20877 x = XEXP (x, 0);
20878 gcc_assert (GET_CODE (x) == SYMBOL_REF);
20879
20880 fnname = XSTR (x, 0);
20881 return fnname;
20882 }
20883 #endif
20884 \f
20885 /* For variable-length arrays that have been previously generated, but
20886 may be incomplete due to missing subscript info, fill the subscript
20887 info. Return TRUE if this is one of those cases. */
20888 static bool
20889 fill_variable_array_bounds (tree type)
20890 {
20891 if (TREE_ASM_WRITTEN (type)
20892 && TREE_CODE (type) == ARRAY_TYPE
20893 && variably_modified_type_p (type, NULL))
20894 {
20895 dw_die_ref array_die = lookup_type_die (type);
20896 if (!array_die)
20897 return false;
20898 add_subscript_info (array_die, type, !is_ada ());
20899 return true;
20900 }
20901 return false;
20902 }
20903
20904 /* These routines generate the internal representation of the DIE's for
20905 the compilation unit. Debugging information is collected by walking
20906 the declaration trees passed in from dwarf2out_decl(). */
20907
20908 static void
20909 gen_array_type_die (tree type, dw_die_ref context_die)
20910 {
20911 dw_die_ref array_die;
20912
20913 /* GNU compilers represent multidimensional array types as sequences of one
20914 dimensional array types whose element types are themselves array types.
20915 We sometimes squish that down to a single array_type DIE with multiple
20916 subscripts in the Dwarf debugging info. The draft Dwarf specification
20917 say that we are allowed to do this kind of compression in C, because
20918 there is no difference between an array of arrays and a multidimensional
20919 array. We don't do this for Ada to remain as close as possible to the
20920 actual representation, which is especially important against the language
20921 flexibilty wrt arrays of variable size. */
20922
20923 bool collapse_nested_arrays = !is_ada ();
20924
20925 if (fill_variable_array_bounds (type))
20926 return;
20927
20928 dw_die_ref scope_die = scope_die_for (type, context_die);
20929 tree element_type;
20930
20931 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
20932 DW_TAG_string_type doesn't have DW_AT_type attribute). */
20933 if (TYPE_STRING_FLAG (type)
20934 && TREE_CODE (type) == ARRAY_TYPE
20935 && is_fortran ()
20936 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
20937 {
20938 HOST_WIDE_INT size;
20939
20940 array_die = new_die (DW_TAG_string_type, scope_die, type);
20941 add_name_attribute (array_die, type_tag (type));
20942 equate_type_number_to_die (type, array_die);
20943 size = int_size_in_bytes (type);
20944 if (size >= 0)
20945 add_AT_unsigned (array_die, DW_AT_byte_size, size);
20946 /* ??? We can't annotate types late, but for LTO we may not
20947 generate a location early either (gfortran.dg/save_6.f90). */
20948 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
20949 && TYPE_DOMAIN (type) != NULL_TREE
20950 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
20951 {
20952 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
20953 tree rszdecl = szdecl;
20954
20955 size = int_size_in_bytes (TREE_TYPE (szdecl));
20956 if (!DECL_P (szdecl))
20957 {
20958 if (TREE_CODE (szdecl) == INDIRECT_REF
20959 && DECL_P (TREE_OPERAND (szdecl, 0)))
20960 {
20961 rszdecl = TREE_OPERAND (szdecl, 0);
20962 if (int_size_in_bytes (TREE_TYPE (rszdecl))
20963 != DWARF2_ADDR_SIZE)
20964 size = 0;
20965 }
20966 else
20967 size = 0;
20968 }
20969 if (size > 0)
20970 {
20971 dw_loc_list_ref loc
20972 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
20973 NULL);
20974 if (loc)
20975 {
20976 add_AT_location_description (array_die, DW_AT_string_length,
20977 loc);
20978 if (size != DWARF2_ADDR_SIZE)
20979 add_AT_unsigned (array_die, dwarf_version >= 5
20980 ? DW_AT_string_length_byte_size
20981 : DW_AT_byte_size, size);
20982 }
20983 }
20984 }
20985 return;
20986 }
20987
20988 array_die = new_die (DW_TAG_array_type, scope_die, type);
20989 add_name_attribute (array_die, type_tag (type));
20990 equate_type_number_to_die (type, array_die);
20991
20992 if (TREE_CODE (type) == VECTOR_TYPE)
20993 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
20994
20995 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
20996 if (is_fortran ()
20997 && TREE_CODE (type) == ARRAY_TYPE
20998 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
20999 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21000 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21001
21002 #if 0
21003 /* We default the array ordering. Debuggers will probably do the right
21004 things even if DW_AT_ordering is not present. It's not even an issue
21005 until we start to get into multidimensional arrays anyway. If a debugger
21006 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21007 then we'll have to put the DW_AT_ordering attribute back in. (But if
21008 and when we find out that we need to put these in, we will only do so
21009 for multidimensional arrays. */
21010 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21011 #endif
21012
21013 if (TREE_CODE (type) == VECTOR_TYPE)
21014 {
21015 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21016 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21017 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21018 add_bound_info (subrange_die, DW_AT_upper_bound,
21019 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21020 }
21021 else
21022 add_subscript_info (array_die, type, collapse_nested_arrays);
21023
21024 /* Add representation of the type of the elements of this array type and
21025 emit the corresponding DIE if we haven't done it already. */
21026 element_type = TREE_TYPE (type);
21027 if (collapse_nested_arrays)
21028 while (TREE_CODE (element_type) == ARRAY_TYPE)
21029 {
21030 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21031 break;
21032 element_type = TREE_TYPE (element_type);
21033 }
21034
21035 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21036 TREE_CODE (type) == ARRAY_TYPE
21037 && TYPE_REVERSE_STORAGE_ORDER (type),
21038 context_die);
21039
21040 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21041 if (TYPE_ARTIFICIAL (type))
21042 add_AT_flag (array_die, DW_AT_artificial, 1);
21043
21044 if (get_AT (array_die, DW_AT_name))
21045 add_pubtype (type, array_die);
21046
21047 add_alignment_attribute (array_die, type);
21048 }
21049
21050 /* This routine generates DIE for array with hidden descriptor, details
21051 are filled into *info by a langhook. */
21052
21053 static void
21054 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21055 dw_die_ref context_die)
21056 {
21057 const dw_die_ref scope_die = scope_die_for (type, context_die);
21058 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21059 struct loc_descr_context context = { type, info->base_decl, NULL,
21060 false, false };
21061 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21062 int dim;
21063
21064 add_name_attribute (array_die, type_tag (type));
21065 equate_type_number_to_die (type, array_die);
21066
21067 if (info->ndimensions > 1)
21068 switch (info->ordering)
21069 {
21070 case array_descr_ordering_row_major:
21071 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21072 break;
21073 case array_descr_ordering_column_major:
21074 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21075 break;
21076 default:
21077 break;
21078 }
21079
21080 if (dwarf_version >= 3 || !dwarf_strict)
21081 {
21082 if (info->data_location)
21083 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21084 dw_scalar_form_exprloc, &context);
21085 if (info->associated)
21086 add_scalar_info (array_die, DW_AT_associated, info->associated,
21087 dw_scalar_form_constant
21088 | dw_scalar_form_exprloc
21089 | dw_scalar_form_reference, &context);
21090 if (info->allocated)
21091 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21092 dw_scalar_form_constant
21093 | dw_scalar_form_exprloc
21094 | dw_scalar_form_reference, &context);
21095 if (info->stride)
21096 {
21097 const enum dwarf_attribute attr
21098 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21099 const int forms
21100 = (info->stride_in_bits)
21101 ? dw_scalar_form_constant
21102 : (dw_scalar_form_constant
21103 | dw_scalar_form_exprloc
21104 | dw_scalar_form_reference);
21105
21106 add_scalar_info (array_die, attr, info->stride, forms, &context);
21107 }
21108 }
21109 if (dwarf_version >= 5)
21110 {
21111 if (info->rank)
21112 {
21113 add_scalar_info (array_die, DW_AT_rank, info->rank,
21114 dw_scalar_form_constant
21115 | dw_scalar_form_exprloc, &context);
21116 subrange_tag = DW_TAG_generic_subrange;
21117 context.placeholder_arg = true;
21118 }
21119 }
21120
21121 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21122
21123 for (dim = 0; dim < info->ndimensions; dim++)
21124 {
21125 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21126
21127 if (info->dimen[dim].bounds_type)
21128 add_type_attribute (subrange_die,
21129 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21130 false, context_die);
21131 if (info->dimen[dim].lower_bound)
21132 add_bound_info (subrange_die, DW_AT_lower_bound,
21133 info->dimen[dim].lower_bound, &context);
21134 if (info->dimen[dim].upper_bound)
21135 add_bound_info (subrange_die, DW_AT_upper_bound,
21136 info->dimen[dim].upper_bound, &context);
21137 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21138 add_scalar_info (subrange_die, DW_AT_byte_stride,
21139 info->dimen[dim].stride,
21140 dw_scalar_form_constant
21141 | dw_scalar_form_exprloc
21142 | dw_scalar_form_reference,
21143 &context);
21144 }
21145
21146 gen_type_die (info->element_type, context_die);
21147 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21148 TREE_CODE (type) == ARRAY_TYPE
21149 && TYPE_REVERSE_STORAGE_ORDER (type),
21150 context_die);
21151
21152 if (get_AT (array_die, DW_AT_name))
21153 add_pubtype (type, array_die);
21154
21155 add_alignment_attribute (array_die, type);
21156 }
21157
21158 #if 0
21159 static void
21160 gen_entry_point_die (tree decl, dw_die_ref context_die)
21161 {
21162 tree origin = decl_ultimate_origin (decl);
21163 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21164
21165 if (origin != NULL)
21166 add_abstract_origin_attribute (decl_die, origin);
21167 else
21168 {
21169 add_name_and_src_coords_attributes (decl_die, decl);
21170 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21171 TYPE_UNQUALIFIED, false, context_die);
21172 }
21173
21174 if (DECL_ABSTRACT_P (decl))
21175 equate_decl_number_to_die (decl, decl_die);
21176 else
21177 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21178 }
21179 #endif
21180
21181 /* Walk through the list of incomplete types again, trying once more to
21182 emit full debugging info for them. */
21183
21184 static void
21185 retry_incomplete_types (void)
21186 {
21187 set_early_dwarf s;
21188 int i;
21189
21190 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21191 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21192 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21193 vec_safe_truncate (incomplete_types, 0);
21194 }
21195
21196 /* Determine what tag to use for a record type. */
21197
21198 static enum dwarf_tag
21199 record_type_tag (tree type)
21200 {
21201 if (! lang_hooks.types.classify_record)
21202 return DW_TAG_structure_type;
21203
21204 switch (lang_hooks.types.classify_record (type))
21205 {
21206 case RECORD_IS_STRUCT:
21207 return DW_TAG_structure_type;
21208
21209 case RECORD_IS_CLASS:
21210 return DW_TAG_class_type;
21211
21212 case RECORD_IS_INTERFACE:
21213 if (dwarf_version >= 3 || !dwarf_strict)
21214 return DW_TAG_interface_type;
21215 return DW_TAG_structure_type;
21216
21217 default:
21218 gcc_unreachable ();
21219 }
21220 }
21221
21222 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21223 include all of the information about the enumeration values also. Each
21224 enumerated type name/value is listed as a child of the enumerated type
21225 DIE. */
21226
21227 static dw_die_ref
21228 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21229 {
21230 dw_die_ref type_die = lookup_type_die (type);
21231
21232 if (type_die == NULL)
21233 {
21234 type_die = new_die (DW_TAG_enumeration_type,
21235 scope_die_for (type, context_die), type);
21236 equate_type_number_to_die (type, type_die);
21237 add_name_attribute (type_die, type_tag (type));
21238 if (dwarf_version >= 4 || !dwarf_strict)
21239 {
21240 if (ENUM_IS_SCOPED (type))
21241 add_AT_flag (type_die, DW_AT_enum_class, 1);
21242 if (ENUM_IS_OPAQUE (type))
21243 add_AT_flag (type_die, DW_AT_declaration, 1);
21244 }
21245 if (!dwarf_strict)
21246 add_AT_unsigned (type_die, DW_AT_encoding,
21247 TYPE_UNSIGNED (type)
21248 ? DW_ATE_unsigned
21249 : DW_ATE_signed);
21250 }
21251 else if (! TYPE_SIZE (type))
21252 return type_die;
21253 else
21254 remove_AT (type_die, DW_AT_declaration);
21255
21256 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
21257 given enum type is incomplete, do not generate the DW_AT_byte_size
21258 attribute or the DW_AT_element_list attribute. */
21259 if (TYPE_SIZE (type))
21260 {
21261 tree link;
21262
21263 TREE_ASM_WRITTEN (type) = 1;
21264 add_byte_size_attribute (type_die, type);
21265 add_alignment_attribute (type_die, type);
21266 if (dwarf_version >= 3 || !dwarf_strict)
21267 {
21268 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
21269 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
21270 context_die);
21271 }
21272 if (TYPE_STUB_DECL (type) != NULL_TREE)
21273 {
21274 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
21275 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
21276 }
21277
21278 /* If the first reference to this type was as the return type of an
21279 inline function, then it may not have a parent. Fix this now. */
21280 if (type_die->die_parent == NULL)
21281 add_child_die (scope_die_for (type, context_die), type_die);
21282
21283 for (link = TYPE_VALUES (type);
21284 link != NULL; link = TREE_CHAIN (link))
21285 {
21286 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
21287 tree value = TREE_VALUE (link);
21288
21289 add_name_attribute (enum_die,
21290 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
21291
21292 if (TREE_CODE (value) == CONST_DECL)
21293 value = DECL_INITIAL (value);
21294
21295 if (simple_type_size_in_bits (TREE_TYPE (value))
21296 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
21297 {
21298 /* For constant forms created by add_AT_unsigned DWARF
21299 consumers (GDB, elfutils, etc.) always zero extend
21300 the value. Only when the actual value is negative
21301 do we need to use add_AT_int to generate a constant
21302 form that can represent negative values. */
21303 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
21304 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
21305 add_AT_unsigned (enum_die, DW_AT_const_value,
21306 (unsigned HOST_WIDE_INT) val);
21307 else
21308 add_AT_int (enum_die, DW_AT_const_value, val);
21309 }
21310 else
21311 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
21312 that here. TODO: This should be re-worked to use correct
21313 signed/unsigned double tags for all cases. */
21314 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
21315 }
21316
21317 add_gnat_descriptive_type_attribute (type_die, type, context_die);
21318 if (TYPE_ARTIFICIAL (type))
21319 add_AT_flag (type_die, DW_AT_artificial, 1);
21320 }
21321 else
21322 add_AT_flag (type_die, DW_AT_declaration, 1);
21323
21324 add_pubtype (type, type_die);
21325
21326 return type_die;
21327 }
21328
21329 /* Generate a DIE to represent either a real live formal parameter decl or to
21330 represent just the type of some formal parameter position in some function
21331 type.
21332
21333 Note that this routine is a bit unusual because its argument may be a
21334 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
21335 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
21336 node. If it's the former then this function is being called to output a
21337 DIE to represent a formal parameter object (or some inlining thereof). If
21338 it's the latter, then this function is only being called to output a
21339 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
21340 argument type of some subprogram type.
21341 If EMIT_NAME_P is true, name and source coordinate attributes
21342 are emitted. */
21343
21344 static dw_die_ref
21345 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
21346 dw_die_ref context_die)
21347 {
21348 tree node_or_origin = node ? node : origin;
21349 tree ultimate_origin;
21350 dw_die_ref parm_die = NULL;
21351
21352 if (DECL_P (node_or_origin))
21353 {
21354 parm_die = lookup_decl_die (node);
21355
21356 /* If the contexts differ, we may not be talking about the same
21357 thing.
21358 ??? When in LTO the DIE parent is the "abstract" copy and the
21359 context_die is the specification "copy". But this whole block
21360 should eventually be no longer needed. */
21361 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
21362 {
21363 if (!DECL_ABSTRACT_P (node))
21364 {
21365 /* This can happen when creating an inlined instance, in
21366 which case we need to create a new DIE that will get
21367 annotated with DW_AT_abstract_origin. */
21368 parm_die = NULL;
21369 }
21370 else
21371 gcc_unreachable ();
21372 }
21373
21374 if (parm_die && parm_die->die_parent == NULL)
21375 {
21376 /* Check that parm_die already has the right attributes that
21377 we would have added below. If any attributes are
21378 missing, fall through to add them. */
21379 if (! DECL_ABSTRACT_P (node_or_origin)
21380 && !get_AT (parm_die, DW_AT_location)
21381 && !get_AT (parm_die, DW_AT_const_value))
21382 /* We are missing location info, and are about to add it. */
21383 ;
21384 else
21385 {
21386 add_child_die (context_die, parm_die);
21387 return parm_die;
21388 }
21389 }
21390 }
21391
21392 /* If we have a previously generated DIE, use it, unless this is an
21393 concrete instance (origin != NULL), in which case we need a new
21394 DIE with a corresponding DW_AT_abstract_origin. */
21395 bool reusing_die;
21396 if (parm_die && origin == NULL)
21397 reusing_die = true;
21398 else
21399 {
21400 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
21401 reusing_die = false;
21402 }
21403
21404 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
21405 {
21406 case tcc_declaration:
21407 ultimate_origin = decl_ultimate_origin (node_or_origin);
21408 if (node || ultimate_origin)
21409 origin = ultimate_origin;
21410
21411 if (reusing_die)
21412 goto add_location;
21413
21414 if (origin != NULL)
21415 add_abstract_origin_attribute (parm_die, origin);
21416 else if (emit_name_p)
21417 add_name_and_src_coords_attributes (parm_die, node);
21418 if (origin == NULL
21419 || (! DECL_ABSTRACT_P (node_or_origin)
21420 && variably_modified_type_p (TREE_TYPE (node_or_origin),
21421 decl_function_context
21422 (node_or_origin))))
21423 {
21424 tree type = TREE_TYPE (node_or_origin);
21425 if (decl_by_reference_p (node_or_origin))
21426 add_type_attribute (parm_die, TREE_TYPE (type),
21427 TYPE_UNQUALIFIED,
21428 false, context_die);
21429 else
21430 add_type_attribute (parm_die, type,
21431 decl_quals (node_or_origin),
21432 false, context_die);
21433 }
21434 if (origin == NULL && DECL_ARTIFICIAL (node))
21435 add_AT_flag (parm_die, DW_AT_artificial, 1);
21436 add_location:
21437 if (node && node != origin)
21438 equate_decl_number_to_die (node, parm_die);
21439 if (! DECL_ABSTRACT_P (node_or_origin))
21440 add_location_or_const_value_attribute (parm_die, node_or_origin,
21441 node == NULL);
21442
21443 break;
21444
21445 case tcc_type:
21446 /* We were called with some kind of a ..._TYPE node. */
21447 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
21448 context_die);
21449 break;
21450
21451 default:
21452 gcc_unreachable ();
21453 }
21454
21455 return parm_die;
21456 }
21457
21458 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
21459 children DW_TAG_formal_parameter DIEs representing the arguments of the
21460 parameter pack.
21461
21462 PARM_PACK must be a function parameter pack.
21463 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
21464 must point to the subsequent arguments of the function PACK_ARG belongs to.
21465 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
21466 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
21467 following the last one for which a DIE was generated. */
21468
21469 static dw_die_ref
21470 gen_formal_parameter_pack_die (tree parm_pack,
21471 tree pack_arg,
21472 dw_die_ref subr_die,
21473 tree *next_arg)
21474 {
21475 tree arg;
21476 dw_die_ref parm_pack_die;
21477
21478 gcc_assert (parm_pack
21479 && lang_hooks.function_parameter_pack_p (parm_pack)
21480 && subr_die);
21481
21482 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
21483 add_src_coords_attributes (parm_pack_die, parm_pack);
21484
21485 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
21486 {
21487 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
21488 parm_pack))
21489 break;
21490 gen_formal_parameter_die (arg, NULL,
21491 false /* Don't emit name attribute. */,
21492 parm_pack_die);
21493 }
21494 if (next_arg)
21495 *next_arg = arg;
21496 return parm_pack_die;
21497 }
21498
21499 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
21500 at the end of an (ANSI prototyped) formal parameters list. */
21501
21502 static void
21503 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
21504 {
21505 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
21506 }
21507
21508 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
21509 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
21510 parameters as specified in some function type specification (except for
21511 those which appear as part of a function *definition*). */
21512
21513 static void
21514 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
21515 {
21516 tree link;
21517 tree formal_type = NULL;
21518 tree first_parm_type;
21519 tree arg;
21520
21521 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
21522 {
21523 arg = DECL_ARGUMENTS (function_or_method_type);
21524 function_or_method_type = TREE_TYPE (function_or_method_type);
21525 }
21526 else
21527 arg = NULL_TREE;
21528
21529 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
21530
21531 /* Make our first pass over the list of formal parameter types and output a
21532 DW_TAG_formal_parameter DIE for each one. */
21533 for (link = first_parm_type; link; )
21534 {
21535 dw_die_ref parm_die;
21536
21537 formal_type = TREE_VALUE (link);
21538 if (formal_type == void_type_node)
21539 break;
21540
21541 /* Output a (nameless) DIE to represent the formal parameter itself. */
21542 if (!POINTER_BOUNDS_TYPE_P (formal_type))
21543 {
21544 parm_die = gen_formal_parameter_die (formal_type, NULL,
21545 true /* Emit name attribute. */,
21546 context_die);
21547 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
21548 && link == first_parm_type)
21549 {
21550 add_AT_flag (parm_die, DW_AT_artificial, 1);
21551 if (dwarf_version >= 3 || !dwarf_strict)
21552 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
21553 }
21554 else if (arg && DECL_ARTIFICIAL (arg))
21555 add_AT_flag (parm_die, DW_AT_artificial, 1);
21556 }
21557
21558 link = TREE_CHAIN (link);
21559 if (arg)
21560 arg = DECL_CHAIN (arg);
21561 }
21562
21563 /* If this function type has an ellipsis, add a
21564 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
21565 if (formal_type != void_type_node)
21566 gen_unspecified_parameters_die (function_or_method_type, context_die);
21567
21568 /* Make our second (and final) pass over the list of formal parameter types
21569 and output DIEs to represent those types (as necessary). */
21570 for (link = TYPE_ARG_TYPES (function_or_method_type);
21571 link && TREE_VALUE (link);
21572 link = TREE_CHAIN (link))
21573 gen_type_die (TREE_VALUE (link), context_die);
21574 }
21575
21576 /* We want to generate the DIE for TYPE so that we can generate the
21577 die for MEMBER, which has been defined; we will need to refer back
21578 to the member declaration nested within TYPE. If we're trying to
21579 generate minimal debug info for TYPE, processing TYPE won't do the
21580 trick; we need to attach the member declaration by hand. */
21581
21582 static void
21583 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
21584 {
21585 gen_type_die (type, context_die);
21586
21587 /* If we're trying to avoid duplicate debug info, we may not have
21588 emitted the member decl for this function. Emit it now. */
21589 if (TYPE_STUB_DECL (type)
21590 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
21591 && ! lookup_decl_die (member))
21592 {
21593 dw_die_ref type_die;
21594 gcc_assert (!decl_ultimate_origin (member));
21595
21596 push_decl_scope (type);
21597 type_die = lookup_type_die_strip_naming_typedef (type);
21598 if (TREE_CODE (member) == FUNCTION_DECL)
21599 gen_subprogram_die (member, type_die);
21600 else if (TREE_CODE (member) == FIELD_DECL)
21601 {
21602 /* Ignore the nameless fields that are used to skip bits but handle
21603 C++ anonymous unions and structs. */
21604 if (DECL_NAME (member) != NULL_TREE
21605 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
21606 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
21607 {
21608 struct vlr_context vlr_ctx = {
21609 DECL_CONTEXT (member), /* struct_type */
21610 NULL_TREE /* variant_part_offset */
21611 };
21612 gen_type_die (member_declared_type (member), type_die);
21613 gen_field_die (member, &vlr_ctx, type_die);
21614 }
21615 }
21616 else
21617 gen_variable_die (member, NULL_TREE, type_die);
21618
21619 pop_decl_scope ();
21620 }
21621 }
21622 \f
21623 /* Forward declare these functions, because they are mutually recursive
21624 with their set_block_* pairing functions. */
21625 static void set_decl_origin_self (tree);
21626
21627 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
21628 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
21629 that it points to the node itself, thus indicating that the node is its
21630 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
21631 the given node is NULL, recursively descend the decl/block tree which
21632 it is the root of, and for each other ..._DECL or BLOCK node contained
21633 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
21634 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
21635 values to point to themselves. */
21636
21637 static void
21638 set_block_origin_self (tree stmt)
21639 {
21640 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
21641 {
21642 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
21643
21644 {
21645 tree local_decl;
21646
21647 for (local_decl = BLOCK_VARS (stmt);
21648 local_decl != NULL_TREE;
21649 local_decl = DECL_CHAIN (local_decl))
21650 /* Do not recurse on nested functions since the inlining status
21651 of parent and child can be different as per the DWARF spec. */
21652 if (TREE_CODE (local_decl) != FUNCTION_DECL
21653 && !DECL_EXTERNAL (local_decl))
21654 set_decl_origin_self (local_decl);
21655 }
21656
21657 {
21658 tree subblock;
21659
21660 for (subblock = BLOCK_SUBBLOCKS (stmt);
21661 subblock != NULL_TREE;
21662 subblock = BLOCK_CHAIN (subblock))
21663 set_block_origin_self (subblock); /* Recurse. */
21664 }
21665 }
21666 }
21667
21668 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
21669 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
21670 node to so that it points to the node itself, thus indicating that the
21671 node represents its own (abstract) origin. Additionally, if the
21672 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
21673 the decl/block tree of which the given node is the root of, and for
21674 each other ..._DECL or BLOCK node contained therein whose
21675 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
21676 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
21677 point to themselves. */
21678
21679 static void
21680 set_decl_origin_self (tree decl)
21681 {
21682 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
21683 {
21684 DECL_ABSTRACT_ORIGIN (decl) = decl;
21685 if (TREE_CODE (decl) == FUNCTION_DECL)
21686 {
21687 tree arg;
21688
21689 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
21690 DECL_ABSTRACT_ORIGIN (arg) = arg;
21691 if (DECL_INITIAL (decl) != NULL_TREE
21692 && DECL_INITIAL (decl) != error_mark_node)
21693 set_block_origin_self (DECL_INITIAL (decl));
21694 }
21695 }
21696 }
21697 \f
21698 /* Mark the early DIE for DECL as the abstract instance. */
21699
21700 static void
21701 dwarf2out_abstract_function (tree decl)
21702 {
21703 dw_die_ref old_die;
21704
21705 /* Make sure we have the actual abstract inline, not a clone. */
21706 decl = DECL_ORIGIN (decl);
21707
21708 if (DECL_IGNORED_P (decl))
21709 return;
21710
21711 old_die = lookup_decl_die (decl);
21712 /* With early debug we always have an old DIE unless we are in LTO
21713 and the user did not compile but only link with debug. */
21714 if (in_lto_p && ! old_die)
21715 return;
21716 gcc_assert (old_die != NULL);
21717 if (get_AT (old_die, DW_AT_inline)
21718 || get_AT (old_die, DW_AT_abstract_origin))
21719 /* We've already generated the abstract instance. */
21720 return;
21721
21722 /* Go ahead and put DW_AT_inline on the DIE. */
21723 if (DECL_DECLARED_INLINE_P (decl))
21724 {
21725 if (cgraph_function_possibly_inlined_p (decl))
21726 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
21727 else
21728 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
21729 }
21730 else
21731 {
21732 if (cgraph_function_possibly_inlined_p (decl))
21733 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
21734 else
21735 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
21736 }
21737
21738 if (DECL_DECLARED_INLINE_P (decl)
21739 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
21740 add_AT_flag (old_die, DW_AT_artificial, 1);
21741
21742 set_decl_origin_self (decl);
21743 }
21744
21745 /* Helper function of premark_used_types() which gets called through
21746 htab_traverse.
21747
21748 Marks the DIE of a given type in *SLOT as perennial, so it never gets
21749 marked as unused by prune_unused_types. */
21750
21751 bool
21752 premark_used_types_helper (tree const &type, void *)
21753 {
21754 dw_die_ref die;
21755
21756 die = lookup_type_die (type);
21757 if (die != NULL)
21758 die->die_perennial_p = 1;
21759 return true;
21760 }
21761
21762 /* Helper function of premark_types_used_by_global_vars which gets called
21763 through htab_traverse.
21764
21765 Marks the DIE of a given type in *SLOT as perennial, so it never gets
21766 marked as unused by prune_unused_types. The DIE of the type is marked
21767 only if the global variable using the type will actually be emitted. */
21768
21769 int
21770 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
21771 void *)
21772 {
21773 struct types_used_by_vars_entry *entry;
21774 dw_die_ref die;
21775
21776 entry = (struct types_used_by_vars_entry *) *slot;
21777 gcc_assert (entry->type != NULL
21778 && entry->var_decl != NULL);
21779 die = lookup_type_die (entry->type);
21780 if (die)
21781 {
21782 /* Ask cgraph if the global variable really is to be emitted.
21783 If yes, then we'll keep the DIE of ENTRY->TYPE. */
21784 varpool_node *node = varpool_node::get (entry->var_decl);
21785 if (node && node->definition)
21786 {
21787 die->die_perennial_p = 1;
21788 /* Keep the parent DIEs as well. */
21789 while ((die = die->die_parent) && die->die_perennial_p == 0)
21790 die->die_perennial_p = 1;
21791 }
21792 }
21793 return 1;
21794 }
21795
21796 /* Mark all members of used_types_hash as perennial. */
21797
21798 static void
21799 premark_used_types (struct function *fun)
21800 {
21801 if (fun && fun->used_types_hash)
21802 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
21803 }
21804
21805 /* Mark all members of types_used_by_vars_entry as perennial. */
21806
21807 static void
21808 premark_types_used_by_global_vars (void)
21809 {
21810 if (types_used_by_vars_hash)
21811 types_used_by_vars_hash
21812 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
21813 }
21814
21815 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
21816 for CA_LOC call arg loc node. */
21817
21818 static dw_die_ref
21819 gen_call_site_die (tree decl, dw_die_ref subr_die,
21820 struct call_arg_loc_node *ca_loc)
21821 {
21822 dw_die_ref stmt_die = NULL, die;
21823 tree block = ca_loc->block;
21824
21825 while (block
21826 && block != DECL_INITIAL (decl)
21827 && TREE_CODE (block) == BLOCK)
21828 {
21829 stmt_die = BLOCK_DIE (block);
21830 if (stmt_die)
21831 break;
21832 block = BLOCK_SUPERCONTEXT (block);
21833 }
21834 if (stmt_die == NULL)
21835 stmt_die = subr_die;
21836 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
21837 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
21838 if (ca_loc->tail_call_p)
21839 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
21840 if (ca_loc->symbol_ref)
21841 {
21842 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
21843 if (tdie)
21844 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
21845 else
21846 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
21847 false);
21848 }
21849 return die;
21850 }
21851
21852 /* Generate a DIE to represent a declared function (either file-scope or
21853 block-local). */
21854
21855 static void
21856 gen_subprogram_die (tree decl, dw_die_ref context_die)
21857 {
21858 tree origin = decl_ultimate_origin (decl);
21859 dw_die_ref subr_die;
21860 dw_die_ref old_die = lookup_decl_die (decl);
21861
21862 /* This function gets called multiple times for different stages of
21863 the debug process. For example, for func() in this code:
21864
21865 namespace S
21866 {
21867 void func() { ... }
21868 }
21869
21870 ...we get called 4 times. Twice in early debug and twice in
21871 late debug:
21872
21873 Early debug
21874 -----------
21875
21876 1. Once while generating func() within the namespace. This is
21877 the declaration. The declaration bit below is set, as the
21878 context is the namespace.
21879
21880 A new DIE will be generated with DW_AT_declaration set.
21881
21882 2. Once for func() itself. This is the specification. The
21883 declaration bit below is clear as the context is the CU.
21884
21885 We will use the cached DIE from (1) to create a new DIE with
21886 DW_AT_specification pointing to the declaration in (1).
21887
21888 Late debug via rest_of_handle_final()
21889 -------------------------------------
21890
21891 3. Once generating func() within the namespace. This is also the
21892 declaration, as in (1), but this time we will early exit below
21893 as we have a cached DIE and a declaration needs no additional
21894 annotations (no locations), as the source declaration line
21895 info is enough.
21896
21897 4. Once for func() itself. As in (2), this is the specification,
21898 but this time we will re-use the cached DIE, and just annotate
21899 it with the location information that should now be available.
21900
21901 For something without namespaces, but with abstract instances, we
21902 are also called a multiple times:
21903
21904 class Base
21905 {
21906 public:
21907 Base (); // constructor declaration (1)
21908 };
21909
21910 Base::Base () { } // constructor specification (2)
21911
21912 Early debug
21913 -----------
21914
21915 1. Once for the Base() constructor by virtue of it being a
21916 member of the Base class. This is done via
21917 rest_of_type_compilation.
21918
21919 This is a declaration, so a new DIE will be created with
21920 DW_AT_declaration.
21921
21922 2. Once for the Base() constructor definition, but this time
21923 while generating the abstract instance of the base
21924 constructor (__base_ctor) which is being generated via early
21925 debug of reachable functions.
21926
21927 Even though we have a cached version of the declaration (1),
21928 we will create a DW_AT_specification of the declaration DIE
21929 in (1).
21930
21931 3. Once for the __base_ctor itself, but this time, we generate
21932 an DW_AT_abstract_origin version of the DW_AT_specification in
21933 (2).
21934
21935 Late debug via rest_of_handle_final
21936 -----------------------------------
21937
21938 4. One final time for the __base_ctor (which will have a cached
21939 DIE with DW_AT_abstract_origin created in (3). This time,
21940 we will just annotate the location information now
21941 available.
21942 */
21943 int declaration = (current_function_decl != decl
21944 || class_or_namespace_scope_p (context_die));
21945
21946 /* Now that the C++ front end lazily declares artificial member fns, we
21947 might need to retrofit the declaration into its class. */
21948 if (!declaration && !origin && !old_die
21949 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
21950 && !class_or_namespace_scope_p (context_die)
21951 && debug_info_level > DINFO_LEVEL_TERSE)
21952 old_die = force_decl_die (decl);
21953
21954 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
21955 if (origin != NULL)
21956 {
21957 gcc_assert (!declaration || local_scope_p (context_die));
21958
21959 /* Fixup die_parent for the abstract instance of a nested
21960 inline function. */
21961 if (old_die && old_die->die_parent == NULL)
21962 add_child_die (context_die, old_die);
21963
21964 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
21965 {
21966 /* If we have a DW_AT_abstract_origin we have a working
21967 cached version. */
21968 subr_die = old_die;
21969 }
21970 else
21971 {
21972 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
21973 add_abstract_origin_attribute (subr_die, origin);
21974 /* This is where the actual code for a cloned function is.
21975 Let's emit linkage name attribute for it. This helps
21976 debuggers to e.g, set breakpoints into
21977 constructors/destructors when the user asks "break
21978 K::K". */
21979 add_linkage_name (subr_die, decl);
21980 }
21981 }
21982 /* A cached copy, possibly from early dwarf generation. Reuse as
21983 much as possible. */
21984 else if (old_die)
21985 {
21986 /* A declaration that has been previously dumped needs no
21987 additional information. */
21988 if (declaration)
21989 return;
21990
21991 if (!get_AT_flag (old_die, DW_AT_declaration)
21992 /* We can have a normal definition following an inline one in the
21993 case of redefinition of GNU C extern inlines.
21994 It seems reasonable to use AT_specification in this case. */
21995 && !get_AT (old_die, DW_AT_inline))
21996 {
21997 /* Detect and ignore this case, where we are trying to output
21998 something we have already output. */
21999 if (get_AT (old_die, DW_AT_low_pc)
22000 || get_AT (old_die, DW_AT_ranges))
22001 return;
22002
22003 /* If we have no location information, this must be a
22004 partially generated DIE from early dwarf generation.
22005 Fall through and generate it. */
22006 }
22007
22008 /* If the definition comes from the same place as the declaration,
22009 maybe use the old DIE. We always want the DIE for this function
22010 that has the *_pc attributes to be under comp_unit_die so the
22011 debugger can find it. We also need to do this for abstract
22012 instances of inlines, since the spec requires the out-of-line copy
22013 to have the same parent. For local class methods, this doesn't
22014 apply; we just use the old DIE. */
22015 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22016 struct dwarf_file_data * file_index = lookup_filename (s.file);
22017 if ((is_cu_die (old_die->die_parent)
22018 /* This condition fixes the inconsistency/ICE with the
22019 following Fortran test (or some derivative thereof) while
22020 building libgfortran:
22021
22022 module some_m
22023 contains
22024 logical function funky (FLAG)
22025 funky = .true.
22026 end function
22027 end module
22028 */
22029 || (old_die->die_parent
22030 && old_die->die_parent->die_tag == DW_TAG_module)
22031 || context_die == NULL)
22032 && (DECL_ARTIFICIAL (decl)
22033 /* The location attributes may be in the abstract origin
22034 which in the case of LTO might be not available to
22035 look at. */
22036 || get_AT (old_die, DW_AT_abstract_origin)
22037 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22038 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22039 == (unsigned) s.line)
22040 && (!debug_column_info
22041 || s.column == 0
22042 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22043 == (unsigned) s.column)))))
22044 {
22045 subr_die = old_die;
22046
22047 /* Clear out the declaration attribute, but leave the
22048 parameters so they can be augmented with location
22049 information later. Unless this was a declaration, in
22050 which case, wipe out the nameless parameters and recreate
22051 them further down. */
22052 if (remove_AT (subr_die, DW_AT_declaration))
22053 {
22054
22055 remove_AT (subr_die, DW_AT_object_pointer);
22056 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22057 }
22058 }
22059 /* Make a specification pointing to the previously built
22060 declaration. */
22061 else
22062 {
22063 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22064 add_AT_specification (subr_die, old_die);
22065 add_pubname (decl, subr_die);
22066 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22067 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22068 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22069 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22070 if (debug_column_info
22071 && s.column
22072 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22073 != (unsigned) s.column))
22074 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22075
22076 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22077 emit the real type on the definition die. */
22078 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22079 {
22080 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22081 if (die == auto_die || die == decltype_auto_die)
22082 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22083 TYPE_UNQUALIFIED, false, context_die);
22084 }
22085
22086 /* When we process the method declaration, we haven't seen
22087 the out-of-class defaulted definition yet, so we have to
22088 recheck now. */
22089 if ((dwarf_version >= 5 || ! dwarf_strict)
22090 && !get_AT (subr_die, DW_AT_defaulted))
22091 {
22092 int defaulted
22093 = lang_hooks.decls.decl_dwarf_attribute (decl,
22094 DW_AT_defaulted);
22095 if (defaulted != -1)
22096 {
22097 /* Other values must have been handled before. */
22098 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22099 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22100 }
22101 }
22102 }
22103 }
22104 /* Create a fresh DIE for anything else. */
22105 else
22106 {
22107 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22108
22109 if (TREE_PUBLIC (decl))
22110 add_AT_flag (subr_die, DW_AT_external, 1);
22111
22112 add_name_and_src_coords_attributes (subr_die, decl);
22113 add_pubname (decl, subr_die);
22114 if (debug_info_level > DINFO_LEVEL_TERSE)
22115 {
22116 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22117 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22118 TYPE_UNQUALIFIED, false, context_die);
22119 }
22120
22121 add_pure_or_virtual_attribute (subr_die, decl);
22122 if (DECL_ARTIFICIAL (decl))
22123 add_AT_flag (subr_die, DW_AT_artificial, 1);
22124
22125 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22126 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22127
22128 add_alignment_attribute (subr_die, decl);
22129
22130 add_accessibility_attribute (subr_die, decl);
22131 }
22132
22133 /* Unless we have an existing non-declaration DIE, equate the new
22134 DIE. */
22135 if (!old_die || is_declaration_die (old_die))
22136 equate_decl_number_to_die (decl, subr_die);
22137
22138 if (declaration)
22139 {
22140 if (!old_die || !get_AT (old_die, DW_AT_inline))
22141 {
22142 add_AT_flag (subr_die, DW_AT_declaration, 1);
22143
22144 /* If this is an explicit function declaration then generate
22145 a DW_AT_explicit attribute. */
22146 if ((dwarf_version >= 3 || !dwarf_strict)
22147 && lang_hooks.decls.decl_dwarf_attribute (decl,
22148 DW_AT_explicit) == 1)
22149 add_AT_flag (subr_die, DW_AT_explicit, 1);
22150
22151 /* If this is a C++11 deleted special function member then generate
22152 a DW_AT_deleted attribute. */
22153 if ((dwarf_version >= 5 || !dwarf_strict)
22154 && lang_hooks.decls.decl_dwarf_attribute (decl,
22155 DW_AT_deleted) == 1)
22156 add_AT_flag (subr_die, DW_AT_deleted, 1);
22157
22158 /* If this is a C++11 defaulted special function member then
22159 generate a DW_AT_defaulted attribute. */
22160 if (dwarf_version >= 5 || !dwarf_strict)
22161 {
22162 int defaulted
22163 = lang_hooks.decls.decl_dwarf_attribute (decl,
22164 DW_AT_defaulted);
22165 if (defaulted != -1)
22166 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22167 }
22168
22169 /* If this is a C++11 non-static member function with & ref-qualifier
22170 then generate a DW_AT_reference attribute. */
22171 if ((dwarf_version >= 5 || !dwarf_strict)
22172 && lang_hooks.decls.decl_dwarf_attribute (decl,
22173 DW_AT_reference) == 1)
22174 add_AT_flag (subr_die, DW_AT_reference, 1);
22175
22176 /* If this is a C++11 non-static member function with &&
22177 ref-qualifier then generate a DW_AT_reference attribute. */
22178 if ((dwarf_version >= 5 || !dwarf_strict)
22179 && lang_hooks.decls.decl_dwarf_attribute (decl,
22180 DW_AT_rvalue_reference)
22181 == 1)
22182 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22183 }
22184 }
22185 /* For non DECL_EXTERNALs, if range information is available, fill
22186 the DIE with it. */
22187 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22188 {
22189 HOST_WIDE_INT cfa_fb_offset;
22190
22191 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22192
22193 if (!crtl->has_bb_partition)
22194 {
22195 dw_fde_ref fde = fun->fde;
22196 if (fde->dw_fde_begin)
22197 {
22198 /* We have already generated the labels. */
22199 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22200 fde->dw_fde_end, false);
22201 }
22202 else
22203 {
22204 /* Create start/end labels and add the range. */
22205 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22206 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22207 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22208 current_function_funcdef_no);
22209 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22210 current_function_funcdef_no);
22211 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22212 false);
22213 }
22214
22215 #if VMS_DEBUGGING_INFO
22216 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22217 Section 2.3 Prologue and Epilogue Attributes:
22218 When a breakpoint is set on entry to a function, it is generally
22219 desirable for execution to be suspended, not on the very first
22220 instruction of the function, but rather at a point after the
22221 function's frame has been set up, after any language defined local
22222 declaration processing has been completed, and before execution of
22223 the first statement of the function begins. Debuggers generally
22224 cannot properly determine where this point is. Similarly for a
22225 breakpoint set on exit from a function. The prologue and epilogue
22226 attributes allow a compiler to communicate the location(s) to use. */
22227
22228 {
22229 if (fde->dw_fde_vms_end_prologue)
22230 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22231 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22232
22233 if (fde->dw_fde_vms_begin_epilogue)
22234 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22235 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22236 }
22237 #endif
22238
22239 }
22240 else
22241 {
22242 /* Generate pubnames entries for the split function code ranges. */
22243 dw_fde_ref fde = fun->fde;
22244
22245 if (fde->dw_fde_second_begin)
22246 {
22247 if (dwarf_version >= 3 || !dwarf_strict)
22248 {
22249 /* We should use ranges for non-contiguous code section
22250 addresses. Use the actual code range for the initial
22251 section, since the HOT/COLD labels might precede an
22252 alignment offset. */
22253 bool range_list_added = false;
22254 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
22255 fde->dw_fde_end, &range_list_added,
22256 false);
22257 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
22258 fde->dw_fde_second_end,
22259 &range_list_added, false);
22260 if (range_list_added)
22261 add_ranges (NULL);
22262 }
22263 else
22264 {
22265 /* There is no real support in DW2 for this .. so we make
22266 a work-around. First, emit the pub name for the segment
22267 containing the function label. Then make and emit a
22268 simplified subprogram DIE for the second segment with the
22269 name pre-fixed by __hot/cold_sect_of_. We use the same
22270 linkage name for the second die so that gdb will find both
22271 sections when given "b foo". */
22272 const char *name = NULL;
22273 tree decl_name = DECL_NAME (decl);
22274 dw_die_ref seg_die;
22275
22276 /* Do the 'primary' section. */
22277 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22278 fde->dw_fde_end, false);
22279
22280 /* Build a minimal DIE for the secondary section. */
22281 seg_die = new_die (DW_TAG_subprogram,
22282 subr_die->die_parent, decl);
22283
22284 if (TREE_PUBLIC (decl))
22285 add_AT_flag (seg_die, DW_AT_external, 1);
22286
22287 if (decl_name != NULL
22288 && IDENTIFIER_POINTER (decl_name) != NULL)
22289 {
22290 name = dwarf2_name (decl, 1);
22291 if (! DECL_ARTIFICIAL (decl))
22292 add_src_coords_attributes (seg_die, decl);
22293
22294 add_linkage_name (seg_die, decl);
22295 }
22296 gcc_assert (name != NULL);
22297 add_pure_or_virtual_attribute (seg_die, decl);
22298 if (DECL_ARTIFICIAL (decl))
22299 add_AT_flag (seg_die, DW_AT_artificial, 1);
22300
22301 name = concat ("__second_sect_of_", name, NULL);
22302 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
22303 fde->dw_fde_second_end, false);
22304 add_name_attribute (seg_die, name);
22305 if (want_pubnames ())
22306 add_pubname_string (name, seg_die);
22307 }
22308 }
22309 else
22310 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
22311 false);
22312 }
22313
22314 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
22315
22316 /* We define the "frame base" as the function's CFA. This is more
22317 convenient for several reasons: (1) It's stable across the prologue
22318 and epilogue, which makes it better than just a frame pointer,
22319 (2) With dwarf3, there exists a one-byte encoding that allows us
22320 to reference the .debug_frame data by proxy, but failing that,
22321 (3) We can at least reuse the code inspection and interpretation
22322 code that determines the CFA position at various points in the
22323 function. */
22324 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
22325 {
22326 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
22327 add_AT_loc (subr_die, DW_AT_frame_base, op);
22328 }
22329 else
22330 {
22331 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
22332 if (list->dw_loc_next)
22333 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
22334 else
22335 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
22336 }
22337
22338 /* Compute a displacement from the "steady-state frame pointer" to
22339 the CFA. The former is what all stack slots and argument slots
22340 will reference in the rtl; the latter is what we've told the
22341 debugger about. We'll need to adjust all frame_base references
22342 by this displacement. */
22343 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
22344
22345 if (fun->static_chain_decl)
22346 {
22347 /* DWARF requires here a location expression that computes the
22348 address of the enclosing subprogram's frame base. The machinery
22349 in tree-nested.c is supposed to store this specific address in the
22350 last field of the FRAME record. */
22351 const tree frame_type
22352 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
22353 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
22354
22355 tree fb_expr
22356 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
22357 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
22358 fb_expr, fb_decl, NULL_TREE);
22359
22360 add_AT_location_description (subr_die, DW_AT_static_link,
22361 loc_list_from_tree (fb_expr, 0, NULL));
22362 }
22363
22364 resolve_variable_values ();
22365 }
22366
22367 /* Generate child dies for template paramaters. */
22368 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
22369 gen_generic_params_dies (decl);
22370
22371 /* Now output descriptions of the arguments for this function. This gets
22372 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
22373 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
22374 `...' at the end of the formal parameter list. In order to find out if
22375 there was a trailing ellipsis or not, we must instead look at the type
22376 associated with the FUNCTION_DECL. This will be a node of type
22377 FUNCTION_TYPE. If the chain of type nodes hanging off of this
22378 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
22379 an ellipsis at the end. */
22380
22381 /* In the case where we are describing a mere function declaration, all we
22382 need to do here (and all we *can* do here) is to describe the *types* of
22383 its formal parameters. */
22384 if (debug_info_level <= DINFO_LEVEL_TERSE)
22385 ;
22386 else if (declaration)
22387 gen_formal_types_die (decl, subr_die);
22388 else
22389 {
22390 /* Generate DIEs to represent all known formal parameters. */
22391 tree parm = DECL_ARGUMENTS (decl);
22392 tree generic_decl = early_dwarf
22393 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
22394 tree generic_decl_parm = generic_decl
22395 ? DECL_ARGUMENTS (generic_decl)
22396 : NULL;
22397
22398 /* Now we want to walk the list of parameters of the function and
22399 emit their relevant DIEs.
22400
22401 We consider the case of DECL being an instance of a generic function
22402 as well as it being a normal function.
22403
22404 If DECL is an instance of a generic function we walk the
22405 parameters of the generic function declaration _and_ the parameters of
22406 DECL itself. This is useful because we want to emit specific DIEs for
22407 function parameter packs and those are declared as part of the
22408 generic function declaration. In that particular case,
22409 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
22410 That DIE has children DIEs representing the set of arguments
22411 of the pack. Note that the set of pack arguments can be empty.
22412 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
22413 children DIE.
22414
22415 Otherwise, we just consider the parameters of DECL. */
22416 while (generic_decl_parm || parm)
22417 {
22418 if (generic_decl_parm
22419 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
22420 gen_formal_parameter_pack_die (generic_decl_parm,
22421 parm, subr_die,
22422 &parm);
22423 else if (parm && !POINTER_BOUNDS_P (parm))
22424 {
22425 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
22426
22427 if (early_dwarf
22428 && parm == DECL_ARGUMENTS (decl)
22429 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
22430 && parm_die
22431 && (dwarf_version >= 3 || !dwarf_strict))
22432 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
22433
22434 parm = DECL_CHAIN (parm);
22435 }
22436 else if (parm)
22437 parm = DECL_CHAIN (parm);
22438
22439 if (generic_decl_parm)
22440 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
22441 }
22442
22443 /* Decide whether we need an unspecified_parameters DIE at the end.
22444 There are 2 more cases to do this for: 1) the ansi ... declaration -
22445 this is detectable when the end of the arg list is not a
22446 void_type_node 2) an unprototyped function declaration (not a
22447 definition). This just means that we have no info about the
22448 parameters at all. */
22449 if (early_dwarf)
22450 {
22451 if (prototype_p (TREE_TYPE (decl)))
22452 {
22453 /* This is the prototyped case, check for.... */
22454 if (stdarg_p (TREE_TYPE (decl)))
22455 gen_unspecified_parameters_die (decl, subr_die);
22456 }
22457 else if (DECL_INITIAL (decl) == NULL_TREE)
22458 gen_unspecified_parameters_die (decl, subr_die);
22459 }
22460 }
22461
22462 if (subr_die != old_die)
22463 /* Add the calling convention attribute if requested. */
22464 add_calling_convention_attribute (subr_die, decl);
22465
22466 /* Output Dwarf info for all of the stuff within the body of the function
22467 (if it has one - it may be just a declaration).
22468
22469 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
22470 a function. This BLOCK actually represents the outermost binding contour
22471 for the function, i.e. the contour in which the function's formal
22472 parameters and labels get declared. Curiously, it appears that the front
22473 end doesn't actually put the PARM_DECL nodes for the current function onto
22474 the BLOCK_VARS list for this outer scope, but are strung off of the
22475 DECL_ARGUMENTS list for the function instead.
22476
22477 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
22478 the LABEL_DECL nodes for the function however, and we output DWARF info
22479 for those in decls_for_scope. Just within the `outer_scope' there will be
22480 a BLOCK node representing the function's outermost pair of curly braces,
22481 and any blocks used for the base and member initializers of a C++
22482 constructor function. */
22483 tree outer_scope = DECL_INITIAL (decl);
22484 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
22485 {
22486 int call_site_note_count = 0;
22487 int tail_call_site_note_count = 0;
22488
22489 /* Emit a DW_TAG_variable DIE for a named return value. */
22490 if (DECL_NAME (DECL_RESULT (decl)))
22491 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
22492
22493 /* The first time through decls_for_scope we will generate the
22494 DIEs for the locals. The second time, we fill in the
22495 location info. */
22496 decls_for_scope (outer_scope, subr_die);
22497
22498 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
22499 {
22500 struct call_arg_loc_node *ca_loc;
22501 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
22502 {
22503 dw_die_ref die = NULL;
22504 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
22505 rtx arg, next_arg;
22506
22507 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
22508 ? NOTE_VAR_LOCATION (ca_loc->call_arg_loc_note)
22509 : NULL_RTX);
22510 arg; arg = next_arg)
22511 {
22512 dw_loc_descr_ref reg, val;
22513 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
22514 dw_die_ref cdie, tdie = NULL;
22515
22516 next_arg = XEXP (arg, 1);
22517 if (REG_P (XEXP (XEXP (arg, 0), 0))
22518 && next_arg
22519 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
22520 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
22521 && REGNO (XEXP (XEXP (arg, 0), 0))
22522 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
22523 next_arg = XEXP (next_arg, 1);
22524 if (mode == VOIDmode)
22525 {
22526 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
22527 if (mode == VOIDmode)
22528 mode = GET_MODE (XEXP (arg, 0));
22529 }
22530 if (mode == VOIDmode || mode == BLKmode)
22531 continue;
22532 /* Get dynamic information about call target only if we
22533 have no static information: we cannot generate both
22534 DW_AT_call_origin and DW_AT_call_target
22535 attributes. */
22536 if (ca_loc->symbol_ref == NULL_RTX)
22537 {
22538 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
22539 {
22540 tloc = XEXP (XEXP (arg, 0), 1);
22541 continue;
22542 }
22543 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
22544 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
22545 {
22546 tlocc = XEXP (XEXP (arg, 0), 1);
22547 continue;
22548 }
22549 }
22550 reg = NULL;
22551 if (REG_P (XEXP (XEXP (arg, 0), 0)))
22552 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
22553 VAR_INIT_STATUS_INITIALIZED);
22554 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
22555 {
22556 rtx mem = XEXP (XEXP (arg, 0), 0);
22557 reg = mem_loc_descriptor (XEXP (mem, 0),
22558 get_address_mode (mem),
22559 GET_MODE (mem),
22560 VAR_INIT_STATUS_INITIALIZED);
22561 }
22562 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
22563 == DEBUG_PARAMETER_REF)
22564 {
22565 tree tdecl
22566 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
22567 tdie = lookup_decl_die (tdecl);
22568 if (tdie == NULL)
22569 continue;
22570 }
22571 else
22572 continue;
22573 if (reg == NULL
22574 && GET_CODE (XEXP (XEXP (arg, 0), 0))
22575 != DEBUG_PARAMETER_REF)
22576 continue;
22577 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
22578 VOIDmode,
22579 VAR_INIT_STATUS_INITIALIZED);
22580 if (val == NULL)
22581 continue;
22582 if (die == NULL)
22583 die = gen_call_site_die (decl, subr_die, ca_loc);
22584 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
22585 NULL_TREE);
22586 if (reg != NULL)
22587 add_AT_loc (cdie, DW_AT_location, reg);
22588 else if (tdie != NULL)
22589 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
22590 tdie);
22591 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
22592 if (next_arg != XEXP (arg, 1))
22593 {
22594 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
22595 if (mode == VOIDmode)
22596 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
22597 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
22598 0), 1),
22599 mode, VOIDmode,
22600 VAR_INIT_STATUS_INITIALIZED);
22601 if (val != NULL)
22602 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
22603 val);
22604 }
22605 }
22606 if (die == NULL
22607 && (ca_loc->symbol_ref || tloc))
22608 die = gen_call_site_die (decl, subr_die, ca_loc);
22609 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
22610 {
22611 dw_loc_descr_ref tval = NULL;
22612
22613 if (tloc != NULL_RTX)
22614 tval = mem_loc_descriptor (tloc,
22615 GET_MODE (tloc) == VOIDmode
22616 ? Pmode : GET_MODE (tloc),
22617 VOIDmode,
22618 VAR_INIT_STATUS_INITIALIZED);
22619 if (tval)
22620 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
22621 else if (tlocc != NULL_RTX)
22622 {
22623 tval = mem_loc_descriptor (tlocc,
22624 GET_MODE (tlocc) == VOIDmode
22625 ? Pmode : GET_MODE (tlocc),
22626 VOIDmode,
22627 VAR_INIT_STATUS_INITIALIZED);
22628 if (tval)
22629 add_AT_loc (die,
22630 dwarf_AT (DW_AT_call_target_clobbered),
22631 tval);
22632 }
22633 }
22634 if (die != NULL)
22635 {
22636 call_site_note_count++;
22637 if (ca_loc->tail_call_p)
22638 tail_call_site_note_count++;
22639 }
22640 }
22641 }
22642 call_arg_locations = NULL;
22643 call_arg_loc_last = NULL;
22644 if (tail_call_site_count >= 0
22645 && tail_call_site_count == tail_call_site_note_count
22646 && (!dwarf_strict || dwarf_version >= 5))
22647 {
22648 if (call_site_count >= 0
22649 && call_site_count == call_site_note_count)
22650 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
22651 else
22652 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
22653 }
22654 call_site_count = -1;
22655 tail_call_site_count = -1;
22656 }
22657
22658 /* Mark used types after we have created DIEs for the functions scopes. */
22659 premark_used_types (DECL_STRUCT_FUNCTION (decl));
22660 }
22661
22662 /* Returns a hash value for X (which really is a die_struct). */
22663
22664 hashval_t
22665 block_die_hasher::hash (die_struct *d)
22666 {
22667 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
22668 }
22669
22670 /* Return nonzero if decl_id and die_parent of die_struct X is the same
22671 as decl_id and die_parent of die_struct Y. */
22672
22673 bool
22674 block_die_hasher::equal (die_struct *x, die_struct *y)
22675 {
22676 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
22677 }
22678
22679 /* Return TRUE if DECL, which may have been previously generated as
22680 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
22681 true if decl (or its origin) is either an extern declaration or a
22682 class/namespace scoped declaration.
22683
22684 The declare_in_namespace support causes us to get two DIEs for one
22685 variable, both of which are declarations. We want to avoid
22686 considering one to be a specification, so we must test for
22687 DECLARATION and DW_AT_declaration. */
22688 static inline bool
22689 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
22690 {
22691 return (old_die && TREE_STATIC (decl) && !declaration
22692 && get_AT_flag (old_die, DW_AT_declaration) == 1);
22693 }
22694
22695 /* Return true if DECL is a local static. */
22696
22697 static inline bool
22698 local_function_static (tree decl)
22699 {
22700 gcc_assert (VAR_P (decl));
22701 return TREE_STATIC (decl)
22702 && DECL_CONTEXT (decl)
22703 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
22704 }
22705
22706 /* Generate a DIE to represent a declared data object.
22707 Either DECL or ORIGIN must be non-null. */
22708
22709 static void
22710 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
22711 {
22712 HOST_WIDE_INT off = 0;
22713 tree com_decl;
22714 tree decl_or_origin = decl ? decl : origin;
22715 tree ultimate_origin;
22716 dw_die_ref var_die;
22717 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
22718 bool declaration = (DECL_EXTERNAL (decl_or_origin)
22719 || class_or_namespace_scope_p (context_die));
22720 bool specialization_p = false;
22721 bool no_linkage_name = false;
22722
22723 /* While C++ inline static data members have definitions inside of the
22724 class, force the first DIE to be a declaration, then let gen_member_die
22725 reparent it to the class context and call gen_variable_die again
22726 to create the outside of the class DIE for the definition. */
22727 if (!declaration
22728 && old_die == NULL
22729 && decl
22730 && DECL_CONTEXT (decl)
22731 && TYPE_P (DECL_CONTEXT (decl))
22732 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
22733 {
22734 declaration = true;
22735 if (dwarf_version < 5)
22736 no_linkage_name = true;
22737 }
22738
22739 ultimate_origin = decl_ultimate_origin (decl_or_origin);
22740 if (decl || ultimate_origin)
22741 origin = ultimate_origin;
22742 com_decl = fortran_common (decl_or_origin, &off);
22743
22744 /* Symbol in common gets emitted as a child of the common block, in the form
22745 of a data member. */
22746 if (com_decl)
22747 {
22748 dw_die_ref com_die;
22749 dw_loc_list_ref loc = NULL;
22750 die_node com_die_arg;
22751
22752 var_die = lookup_decl_die (decl_or_origin);
22753 if (var_die)
22754 {
22755 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
22756 {
22757 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
22758 if (loc)
22759 {
22760 if (off)
22761 {
22762 /* Optimize the common case. */
22763 if (single_element_loc_list_p (loc)
22764 && loc->expr->dw_loc_opc == DW_OP_addr
22765 && loc->expr->dw_loc_next == NULL
22766 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
22767 == SYMBOL_REF)
22768 {
22769 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
22770 loc->expr->dw_loc_oprnd1.v.val_addr
22771 = plus_constant (GET_MODE (x), x , off);
22772 }
22773 else
22774 loc_list_plus_const (loc, off);
22775 }
22776 add_AT_location_description (var_die, DW_AT_location, loc);
22777 remove_AT (var_die, DW_AT_declaration);
22778 }
22779 }
22780 return;
22781 }
22782
22783 if (common_block_die_table == NULL)
22784 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
22785
22786 com_die_arg.decl_id = DECL_UID (com_decl);
22787 com_die_arg.die_parent = context_die;
22788 com_die = common_block_die_table->find (&com_die_arg);
22789 if (! early_dwarf)
22790 loc = loc_list_from_tree (com_decl, 2, NULL);
22791 if (com_die == NULL)
22792 {
22793 const char *cnam
22794 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
22795 die_node **slot;
22796
22797 com_die = new_die (DW_TAG_common_block, context_die, decl);
22798 add_name_and_src_coords_attributes (com_die, com_decl);
22799 if (loc)
22800 {
22801 add_AT_location_description (com_die, DW_AT_location, loc);
22802 /* Avoid sharing the same loc descriptor between
22803 DW_TAG_common_block and DW_TAG_variable. */
22804 loc = loc_list_from_tree (com_decl, 2, NULL);
22805 }
22806 else if (DECL_EXTERNAL (decl_or_origin))
22807 add_AT_flag (com_die, DW_AT_declaration, 1);
22808 if (want_pubnames ())
22809 add_pubname_string (cnam, com_die); /* ??? needed? */
22810 com_die->decl_id = DECL_UID (com_decl);
22811 slot = common_block_die_table->find_slot (com_die, INSERT);
22812 *slot = com_die;
22813 }
22814 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
22815 {
22816 add_AT_location_description (com_die, DW_AT_location, loc);
22817 loc = loc_list_from_tree (com_decl, 2, NULL);
22818 remove_AT (com_die, DW_AT_declaration);
22819 }
22820 var_die = new_die (DW_TAG_variable, com_die, decl);
22821 add_name_and_src_coords_attributes (var_die, decl_or_origin);
22822 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
22823 decl_quals (decl_or_origin), false,
22824 context_die);
22825 add_alignment_attribute (var_die, decl);
22826 add_AT_flag (var_die, DW_AT_external, 1);
22827 if (loc)
22828 {
22829 if (off)
22830 {
22831 /* Optimize the common case. */
22832 if (single_element_loc_list_p (loc)
22833 && loc->expr->dw_loc_opc == DW_OP_addr
22834 && loc->expr->dw_loc_next == NULL
22835 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
22836 {
22837 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
22838 loc->expr->dw_loc_oprnd1.v.val_addr
22839 = plus_constant (GET_MODE (x), x, off);
22840 }
22841 else
22842 loc_list_plus_const (loc, off);
22843 }
22844 add_AT_location_description (var_die, DW_AT_location, loc);
22845 }
22846 else if (DECL_EXTERNAL (decl_or_origin))
22847 add_AT_flag (var_die, DW_AT_declaration, 1);
22848 if (decl)
22849 equate_decl_number_to_die (decl, var_die);
22850 return;
22851 }
22852
22853 if (old_die)
22854 {
22855 if (declaration)
22856 {
22857 /* A declaration that has been previously dumped, needs no
22858 further annotations, since it doesn't need location on
22859 the second pass. */
22860 return;
22861 }
22862 else if (decl_will_get_specification_p (old_die, decl, declaration)
22863 && !get_AT (old_die, DW_AT_specification))
22864 {
22865 /* Fall-thru so we can make a new variable die along with a
22866 DW_AT_specification. */
22867 }
22868 else if (origin && old_die->die_parent != context_die)
22869 {
22870 /* If we will be creating an inlined instance, we need a
22871 new DIE that will get annotated with
22872 DW_AT_abstract_origin. Clear things so we can get a
22873 new DIE. */
22874 gcc_assert (!DECL_ABSTRACT_P (decl));
22875 old_die = NULL;
22876 }
22877 else
22878 {
22879 /* If a DIE was dumped early, it still needs location info.
22880 Skip to where we fill the location bits. */
22881 var_die = old_die;
22882
22883 /* ??? In LTRANS we cannot annotate early created variably
22884 modified type DIEs without copying them and adjusting all
22885 references to them. Thus we dumped them again, also add a
22886 reference to them. */
22887 tree type = TREE_TYPE (decl_or_origin);
22888 if (in_lto_p
22889 && variably_modified_type_p
22890 (type, decl_function_context (decl_or_origin)))
22891 {
22892 if (decl_by_reference_p (decl_or_origin))
22893 add_type_attribute (var_die, TREE_TYPE (type),
22894 TYPE_UNQUALIFIED, false, context_die);
22895 else
22896 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
22897 false, context_die);
22898 }
22899
22900 goto gen_variable_die_location;
22901 }
22902 }
22903
22904 /* For static data members, the declaration in the class is supposed
22905 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
22906 also in DWARF2; the specification should still be DW_TAG_variable
22907 referencing the DW_TAG_member DIE. */
22908 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
22909 var_die = new_die (DW_TAG_member, context_die, decl);
22910 else
22911 var_die = new_die (DW_TAG_variable, context_die, decl);
22912
22913 if (origin != NULL)
22914 add_abstract_origin_attribute (var_die, origin);
22915
22916 /* Loop unrolling can create multiple blocks that refer to the same
22917 static variable, so we must test for the DW_AT_declaration flag.
22918
22919 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
22920 copy decls and set the DECL_ABSTRACT_P flag on them instead of
22921 sharing them.
22922
22923 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
22924 else if (decl_will_get_specification_p (old_die, decl, declaration))
22925 {
22926 /* This is a definition of a C++ class level static. */
22927 add_AT_specification (var_die, old_die);
22928 specialization_p = true;
22929 if (DECL_NAME (decl))
22930 {
22931 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22932 struct dwarf_file_data * file_index = lookup_filename (s.file);
22933
22934 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22935 add_AT_file (var_die, DW_AT_decl_file, file_index);
22936
22937 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22938 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
22939
22940 if (debug_column_info
22941 && s.column
22942 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22943 != (unsigned) s.column))
22944 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
22945
22946 if (old_die->die_tag == DW_TAG_member)
22947 add_linkage_name (var_die, decl);
22948 }
22949 }
22950 else
22951 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
22952
22953 if ((origin == NULL && !specialization_p)
22954 || (origin != NULL
22955 && !DECL_ABSTRACT_P (decl_or_origin)
22956 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
22957 decl_function_context
22958 (decl_or_origin))))
22959 {
22960 tree type = TREE_TYPE (decl_or_origin);
22961
22962 if (decl_by_reference_p (decl_or_origin))
22963 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
22964 context_die);
22965 else
22966 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
22967 context_die);
22968 }
22969
22970 if (origin == NULL && !specialization_p)
22971 {
22972 if (TREE_PUBLIC (decl))
22973 add_AT_flag (var_die, DW_AT_external, 1);
22974
22975 if (DECL_ARTIFICIAL (decl))
22976 add_AT_flag (var_die, DW_AT_artificial, 1);
22977
22978 add_alignment_attribute (var_die, decl);
22979
22980 add_accessibility_attribute (var_die, decl);
22981 }
22982
22983 if (declaration)
22984 add_AT_flag (var_die, DW_AT_declaration, 1);
22985
22986 if (decl && (DECL_ABSTRACT_P (decl)
22987 || !old_die || is_declaration_die (old_die)))
22988 equate_decl_number_to_die (decl, var_die);
22989
22990 gen_variable_die_location:
22991 if (! declaration
22992 && (! DECL_ABSTRACT_P (decl_or_origin)
22993 /* Local static vars are shared between all clones/inlines,
22994 so emit DW_AT_location on the abstract DIE if DECL_RTL is
22995 already set. */
22996 || (VAR_P (decl_or_origin)
22997 && TREE_STATIC (decl_or_origin)
22998 && DECL_RTL_SET_P (decl_or_origin))))
22999 {
23000 if (early_dwarf)
23001 add_pubname (decl_or_origin, var_die);
23002 else
23003 add_location_or_const_value_attribute (var_die, decl_or_origin,
23004 decl == NULL);
23005 }
23006 else
23007 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23008
23009 if ((dwarf_version >= 4 || !dwarf_strict)
23010 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23011 DW_AT_const_expr) == 1
23012 && !get_AT (var_die, DW_AT_const_expr)
23013 && !specialization_p)
23014 add_AT_flag (var_die, DW_AT_const_expr, 1);
23015
23016 if (!dwarf_strict)
23017 {
23018 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23019 DW_AT_inline);
23020 if (inl != -1
23021 && !get_AT (var_die, DW_AT_inline)
23022 && !specialization_p)
23023 add_AT_unsigned (var_die, DW_AT_inline, inl);
23024 }
23025 }
23026
23027 /* Generate a DIE to represent a named constant. */
23028
23029 static void
23030 gen_const_die (tree decl, dw_die_ref context_die)
23031 {
23032 dw_die_ref const_die;
23033 tree type = TREE_TYPE (decl);
23034
23035 const_die = lookup_decl_die (decl);
23036 if (const_die)
23037 return;
23038
23039 const_die = new_die (DW_TAG_constant, context_die, decl);
23040 equate_decl_number_to_die (decl, const_die);
23041 add_name_and_src_coords_attributes (const_die, decl);
23042 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23043 if (TREE_PUBLIC (decl))
23044 add_AT_flag (const_die, DW_AT_external, 1);
23045 if (DECL_ARTIFICIAL (decl))
23046 add_AT_flag (const_die, DW_AT_artificial, 1);
23047 tree_add_const_value_attribute_for_decl (const_die, decl);
23048 }
23049
23050 /* Generate a DIE to represent a label identifier. */
23051
23052 static void
23053 gen_label_die (tree decl, dw_die_ref context_die)
23054 {
23055 tree origin = decl_ultimate_origin (decl);
23056 dw_die_ref lbl_die = lookup_decl_die (decl);
23057 rtx insn;
23058 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23059
23060 if (!lbl_die)
23061 {
23062 lbl_die = new_die (DW_TAG_label, context_die, decl);
23063 equate_decl_number_to_die (decl, lbl_die);
23064
23065 if (origin != NULL)
23066 add_abstract_origin_attribute (lbl_die, origin);
23067 else
23068 add_name_and_src_coords_attributes (lbl_die, decl);
23069 }
23070
23071 if (DECL_ABSTRACT_P (decl))
23072 equate_decl_number_to_die (decl, lbl_die);
23073 else if (! early_dwarf)
23074 {
23075 insn = DECL_RTL_IF_SET (decl);
23076
23077 /* Deleted labels are programmer specified labels which have been
23078 eliminated because of various optimizations. We still emit them
23079 here so that it is possible to put breakpoints on them. */
23080 if (insn
23081 && (LABEL_P (insn)
23082 || ((NOTE_P (insn)
23083 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23084 {
23085 /* When optimization is enabled (via -O) some parts of the compiler
23086 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23087 represent source-level labels which were explicitly declared by
23088 the user. This really shouldn't be happening though, so catch
23089 it if it ever does happen. */
23090 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23091
23092 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23093 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23094 }
23095 else if (insn
23096 && NOTE_P (insn)
23097 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23098 && CODE_LABEL_NUMBER (insn) != -1)
23099 {
23100 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23101 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23102 }
23103 }
23104 }
23105
23106 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23107 attributes to the DIE for a block STMT, to describe where the inlined
23108 function was called from. This is similar to add_src_coords_attributes. */
23109
23110 static inline void
23111 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23112 {
23113 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23114
23115 if (dwarf_version >= 3 || !dwarf_strict)
23116 {
23117 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23118 add_AT_unsigned (die, DW_AT_call_line, s.line);
23119 if (debug_column_info && s.column)
23120 add_AT_unsigned (die, DW_AT_call_column, s.column);
23121 }
23122 }
23123
23124
23125 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23126 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23127
23128 static inline void
23129 add_high_low_attributes (tree stmt, dw_die_ref die)
23130 {
23131 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23132
23133 if (BLOCK_FRAGMENT_CHAIN (stmt)
23134 && (dwarf_version >= 3 || !dwarf_strict))
23135 {
23136 tree chain, superblock = NULL_TREE;
23137 dw_die_ref pdie;
23138 dw_attr_node *attr = NULL;
23139
23140 if (inlined_function_outer_scope_p (stmt))
23141 {
23142 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23143 BLOCK_NUMBER (stmt));
23144 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23145 }
23146
23147 /* Optimize duplicate .debug_ranges lists or even tails of
23148 lists. If this BLOCK has same ranges as its supercontext,
23149 lookup DW_AT_ranges attribute in the supercontext (and
23150 recursively so), verify that the ranges_table contains the
23151 right values and use it instead of adding a new .debug_range. */
23152 for (chain = stmt, pdie = die;
23153 BLOCK_SAME_RANGE (chain);
23154 chain = BLOCK_SUPERCONTEXT (chain))
23155 {
23156 dw_attr_node *new_attr;
23157
23158 pdie = pdie->die_parent;
23159 if (pdie == NULL)
23160 break;
23161 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
23162 break;
23163 new_attr = get_AT (pdie, DW_AT_ranges);
23164 if (new_attr == NULL
23165 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
23166 break;
23167 attr = new_attr;
23168 superblock = BLOCK_SUPERCONTEXT (chain);
23169 }
23170 if (attr != NULL
23171 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
23172 == BLOCK_NUMBER (superblock))
23173 && BLOCK_FRAGMENT_CHAIN (superblock))
23174 {
23175 unsigned long off = attr->dw_attr_val.v.val_offset;
23176 unsigned long supercnt = 0, thiscnt = 0;
23177 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
23178 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23179 {
23180 ++supercnt;
23181 gcc_checking_assert ((*ranges_table)[off + supercnt].num
23182 == BLOCK_NUMBER (chain));
23183 }
23184 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
23185 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
23186 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23187 ++thiscnt;
23188 gcc_assert (supercnt >= thiscnt);
23189 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
23190 false);
23191 note_rnglist_head (off + supercnt - thiscnt);
23192 return;
23193 }
23194
23195 unsigned int offset = add_ranges (stmt, true);
23196 add_AT_range_list (die, DW_AT_ranges, offset, false);
23197 note_rnglist_head (offset);
23198
23199 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
23200 chain = BLOCK_FRAGMENT_CHAIN (stmt);
23201 do
23202 {
23203 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
23204 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
23205 chain = BLOCK_FRAGMENT_CHAIN (chain);
23206 }
23207 while (chain);
23208 add_ranges (NULL);
23209 }
23210 else
23211 {
23212 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
23213 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23214 BLOCK_NUMBER (stmt));
23215 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
23216 BLOCK_NUMBER (stmt));
23217 add_AT_low_high_pc (die, label, label_high, false);
23218 }
23219 }
23220
23221 /* Generate a DIE for a lexical block. */
23222
23223 static void
23224 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
23225 {
23226 dw_die_ref old_die = BLOCK_DIE (stmt);
23227 dw_die_ref stmt_die = NULL;
23228 if (!old_die)
23229 {
23230 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
23231 BLOCK_DIE (stmt) = stmt_die;
23232 }
23233
23234 if (BLOCK_ABSTRACT (stmt))
23235 {
23236 if (old_die)
23237 {
23238 /* This must have been generated early and it won't even
23239 need location information since it's a DW_AT_inline
23240 function. */
23241 if (flag_checking)
23242 for (dw_die_ref c = context_die; c; c = c->die_parent)
23243 if (c->die_tag == DW_TAG_inlined_subroutine
23244 || c->die_tag == DW_TAG_subprogram)
23245 {
23246 gcc_assert (get_AT (c, DW_AT_inline));
23247 break;
23248 }
23249 return;
23250 }
23251 }
23252 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
23253 {
23254 /* If this is an inlined instance, create a new lexical die for
23255 anything below to attach DW_AT_abstract_origin to. */
23256 if (old_die)
23257 {
23258 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
23259 BLOCK_DIE (stmt) = stmt_die;
23260 old_die = NULL;
23261 }
23262
23263 tree origin = block_ultimate_origin (stmt);
23264 if (origin != NULL_TREE && origin != stmt)
23265 add_abstract_origin_attribute (stmt_die, origin);
23266 }
23267
23268 if (old_die)
23269 stmt_die = old_die;
23270
23271 /* A non abstract block whose blocks have already been reordered
23272 should have the instruction range for this block. If so, set the
23273 high/low attributes. */
23274 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
23275 {
23276 gcc_assert (stmt_die);
23277 add_high_low_attributes (stmt, stmt_die);
23278 }
23279
23280 decls_for_scope (stmt, stmt_die);
23281 }
23282
23283 /* Generate a DIE for an inlined subprogram. */
23284
23285 static void
23286 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
23287 {
23288 tree decl;
23289
23290 /* The instance of function that is effectively being inlined shall not
23291 be abstract. */
23292 gcc_assert (! BLOCK_ABSTRACT (stmt));
23293
23294 decl = block_ultimate_origin (stmt);
23295
23296 /* Make sure any inlined functions are known to be inlineable. */
23297 gcc_checking_assert (DECL_ABSTRACT_P (decl)
23298 || cgraph_function_possibly_inlined_p (decl));
23299
23300 if (! BLOCK_ABSTRACT (stmt))
23301 {
23302 dw_die_ref subr_die
23303 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
23304
23305 if (call_arg_locations)
23306 BLOCK_DIE (stmt) = subr_die;
23307 add_abstract_origin_attribute (subr_die, decl);
23308 if (TREE_ASM_WRITTEN (stmt))
23309 add_high_low_attributes (stmt, subr_die);
23310 add_call_src_coords_attributes (stmt, subr_die);
23311
23312 decls_for_scope (stmt, subr_die);
23313 }
23314 }
23315
23316 /* Generate a DIE for a field in a record, or structure. CTX is required: see
23317 the comment for VLR_CONTEXT. */
23318
23319 static void
23320 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
23321 {
23322 dw_die_ref decl_die;
23323
23324 if (TREE_TYPE (decl) == error_mark_node)
23325 return;
23326
23327 decl_die = new_die (DW_TAG_member, context_die, decl);
23328 add_name_and_src_coords_attributes (decl_die, decl);
23329 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
23330 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
23331 context_die);
23332
23333 if (DECL_BIT_FIELD_TYPE (decl))
23334 {
23335 add_byte_size_attribute (decl_die, decl);
23336 add_bit_size_attribute (decl_die, decl);
23337 add_bit_offset_attribute (decl_die, decl, ctx);
23338 }
23339
23340 add_alignment_attribute (decl_die, decl);
23341
23342 /* If we have a variant part offset, then we are supposed to process a member
23343 of a QUAL_UNION_TYPE, which is how we represent variant parts in
23344 trees. */
23345 gcc_assert (ctx->variant_part_offset == NULL_TREE
23346 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
23347 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
23348 add_data_member_location_attribute (decl_die, decl, ctx);
23349
23350 if (DECL_ARTIFICIAL (decl))
23351 add_AT_flag (decl_die, DW_AT_artificial, 1);
23352
23353 add_accessibility_attribute (decl_die, decl);
23354
23355 /* Equate decl number to die, so that we can look up this decl later on. */
23356 equate_decl_number_to_die (decl, decl_die);
23357 }
23358
23359 /* Generate a DIE for a pointer to a member type. TYPE can be an
23360 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
23361 pointer to member function. */
23362
23363 static void
23364 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
23365 {
23366 if (lookup_type_die (type))
23367 return;
23368
23369 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
23370 scope_die_for (type, context_die), type);
23371
23372 equate_type_number_to_die (type, ptr_die);
23373 add_AT_die_ref (ptr_die, DW_AT_containing_type,
23374 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
23375 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23376 context_die);
23377 add_alignment_attribute (ptr_die, type);
23378
23379 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
23380 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
23381 {
23382 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
23383 add_AT_loc (ptr_die, DW_AT_use_location, op);
23384 }
23385 }
23386
23387 static char *producer_string;
23388
23389 /* Return a heap allocated producer string including command line options
23390 if -grecord-gcc-switches. */
23391
23392 static char *
23393 gen_producer_string (void)
23394 {
23395 size_t j;
23396 auto_vec<const char *> switches;
23397 const char *language_string = lang_hooks.name;
23398 char *producer, *tail;
23399 const char *p;
23400 size_t len = dwarf_record_gcc_switches ? 0 : 3;
23401 size_t plen = strlen (language_string) + 1 + strlen (version_string);
23402
23403 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
23404 switch (save_decoded_options[j].opt_index)
23405 {
23406 case OPT_o:
23407 case OPT_d:
23408 case OPT_dumpbase:
23409 case OPT_dumpdir:
23410 case OPT_auxbase:
23411 case OPT_auxbase_strip:
23412 case OPT_quiet:
23413 case OPT_version:
23414 case OPT_v:
23415 case OPT_w:
23416 case OPT_L:
23417 case OPT_D:
23418 case OPT_I:
23419 case OPT_U:
23420 case OPT_SPECIAL_unknown:
23421 case OPT_SPECIAL_ignore:
23422 case OPT_SPECIAL_program_name:
23423 case OPT_SPECIAL_input_file:
23424 case OPT_grecord_gcc_switches:
23425 case OPT__output_pch_:
23426 case OPT_fdiagnostics_show_location_:
23427 case OPT_fdiagnostics_show_option:
23428 case OPT_fdiagnostics_show_caret:
23429 case OPT_fdiagnostics_color_:
23430 case OPT_fverbose_asm:
23431 case OPT____:
23432 case OPT__sysroot_:
23433 case OPT_nostdinc:
23434 case OPT_nostdinc__:
23435 case OPT_fpreprocessed:
23436 case OPT_fltrans_output_list_:
23437 case OPT_fresolution_:
23438 case OPT_fdebug_prefix_map_:
23439 case OPT_fcompare_debug:
23440 /* Ignore these. */
23441 continue;
23442 default:
23443 if (cl_options[save_decoded_options[j].opt_index].flags
23444 & CL_NO_DWARF_RECORD)
23445 continue;
23446 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
23447 == '-');
23448 switch (save_decoded_options[j].canonical_option[0][1])
23449 {
23450 case 'M':
23451 case 'i':
23452 case 'W':
23453 continue;
23454 case 'f':
23455 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
23456 "dump", 4) == 0)
23457 continue;
23458 break;
23459 default:
23460 break;
23461 }
23462 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
23463 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
23464 break;
23465 }
23466
23467 producer = XNEWVEC (char, plen + 1 + len + 1);
23468 tail = producer;
23469 sprintf (tail, "%s %s", language_string, version_string);
23470 tail += plen;
23471
23472 FOR_EACH_VEC_ELT (switches, j, p)
23473 {
23474 len = strlen (p);
23475 *tail = ' ';
23476 memcpy (tail + 1, p, len);
23477 tail += len + 1;
23478 }
23479
23480 *tail = '\0';
23481 return producer;
23482 }
23483
23484 /* Given a C and/or C++ language/version string return the "highest".
23485 C++ is assumed to be "higher" than C in this case. Used for merging
23486 LTO translation unit languages. */
23487 static const char *
23488 highest_c_language (const char *lang1, const char *lang2)
23489 {
23490 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
23491 return "GNU C++17";
23492 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
23493 return "GNU C++14";
23494 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
23495 return "GNU C++11";
23496 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
23497 return "GNU C++98";
23498
23499 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
23500 return "GNU C17";
23501 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
23502 return "GNU C11";
23503 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
23504 return "GNU C99";
23505 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
23506 return "GNU C89";
23507
23508 gcc_unreachable ();
23509 }
23510
23511
23512 /* Generate the DIE for the compilation unit. */
23513
23514 static dw_die_ref
23515 gen_compile_unit_die (const char *filename)
23516 {
23517 dw_die_ref die;
23518 const char *language_string = lang_hooks.name;
23519 int language;
23520
23521 die = new_die (DW_TAG_compile_unit, NULL, NULL);
23522
23523 if (filename)
23524 {
23525 add_name_attribute (die, filename);
23526 /* Don't add cwd for <built-in>. */
23527 if (filename[0] != '<')
23528 add_comp_dir_attribute (die);
23529 }
23530
23531 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
23532
23533 /* If our producer is LTO try to figure out a common language to use
23534 from the global list of translation units. */
23535 if (strcmp (language_string, "GNU GIMPLE") == 0)
23536 {
23537 unsigned i;
23538 tree t;
23539 const char *common_lang = NULL;
23540
23541 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
23542 {
23543 if (!TRANSLATION_UNIT_LANGUAGE (t))
23544 continue;
23545 if (!common_lang)
23546 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
23547 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
23548 ;
23549 else if (strncmp (common_lang, "GNU C", 5) == 0
23550 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
23551 /* Mixing C and C++ is ok, use C++ in that case. */
23552 common_lang = highest_c_language (common_lang,
23553 TRANSLATION_UNIT_LANGUAGE (t));
23554 else
23555 {
23556 /* Fall back to C. */
23557 common_lang = NULL;
23558 break;
23559 }
23560 }
23561
23562 if (common_lang)
23563 language_string = common_lang;
23564 }
23565
23566 language = DW_LANG_C;
23567 if (strncmp (language_string, "GNU C", 5) == 0
23568 && ISDIGIT (language_string[5]))
23569 {
23570 language = DW_LANG_C89;
23571 if (dwarf_version >= 3 || !dwarf_strict)
23572 {
23573 if (strcmp (language_string, "GNU C89") != 0)
23574 language = DW_LANG_C99;
23575
23576 if (dwarf_version >= 5 /* || !dwarf_strict */)
23577 if (strcmp (language_string, "GNU C11") == 0
23578 || strcmp (language_string, "GNU C17") == 0)
23579 language = DW_LANG_C11;
23580 }
23581 }
23582 else if (strncmp (language_string, "GNU C++", 7) == 0)
23583 {
23584 language = DW_LANG_C_plus_plus;
23585 if (dwarf_version >= 5 /* || !dwarf_strict */)
23586 {
23587 if (strcmp (language_string, "GNU C++11") == 0)
23588 language = DW_LANG_C_plus_plus_11;
23589 else if (strcmp (language_string, "GNU C++14") == 0)
23590 language = DW_LANG_C_plus_plus_14;
23591 else if (strcmp (language_string, "GNU C++17") == 0)
23592 /* For now. */
23593 language = DW_LANG_C_plus_plus_14;
23594 }
23595 }
23596 else if (strcmp (language_string, "GNU F77") == 0)
23597 language = DW_LANG_Fortran77;
23598 else if (dwarf_version >= 3 || !dwarf_strict)
23599 {
23600 if (strcmp (language_string, "GNU Ada") == 0)
23601 language = DW_LANG_Ada95;
23602 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
23603 {
23604 language = DW_LANG_Fortran95;
23605 if (dwarf_version >= 5 /* || !dwarf_strict */)
23606 {
23607 if (strcmp (language_string, "GNU Fortran2003") == 0)
23608 language = DW_LANG_Fortran03;
23609 else if (strcmp (language_string, "GNU Fortran2008") == 0)
23610 language = DW_LANG_Fortran08;
23611 }
23612 }
23613 else if (strcmp (language_string, "GNU Objective-C") == 0)
23614 language = DW_LANG_ObjC;
23615 else if (strcmp (language_string, "GNU Objective-C++") == 0)
23616 language = DW_LANG_ObjC_plus_plus;
23617 else if (dwarf_version >= 5 || !dwarf_strict)
23618 {
23619 if (strcmp (language_string, "GNU Go") == 0)
23620 language = DW_LANG_Go;
23621 }
23622 }
23623 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
23624 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
23625 language = DW_LANG_Fortran90;
23626
23627 add_AT_unsigned (die, DW_AT_language, language);
23628
23629 switch (language)
23630 {
23631 case DW_LANG_Fortran77:
23632 case DW_LANG_Fortran90:
23633 case DW_LANG_Fortran95:
23634 case DW_LANG_Fortran03:
23635 case DW_LANG_Fortran08:
23636 /* Fortran has case insensitive identifiers and the front-end
23637 lowercases everything. */
23638 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
23639 break;
23640 default:
23641 /* The default DW_ID_case_sensitive doesn't need to be specified. */
23642 break;
23643 }
23644 return die;
23645 }
23646
23647 /* Generate the DIE for a base class. */
23648
23649 static void
23650 gen_inheritance_die (tree binfo, tree access, tree type,
23651 dw_die_ref context_die)
23652 {
23653 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
23654 struct vlr_context ctx = { type, NULL };
23655
23656 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
23657 context_die);
23658 add_data_member_location_attribute (die, binfo, &ctx);
23659
23660 if (BINFO_VIRTUAL_P (binfo))
23661 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
23662
23663 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
23664 children, otherwise the default is DW_ACCESS_public. In DWARF2
23665 the default has always been DW_ACCESS_private. */
23666 if (access == access_public_node)
23667 {
23668 if (dwarf_version == 2
23669 || context_die->die_tag == DW_TAG_class_type)
23670 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
23671 }
23672 else if (access == access_protected_node)
23673 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
23674 else if (dwarf_version > 2
23675 && context_die->die_tag != DW_TAG_class_type)
23676 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
23677 }
23678
23679 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
23680 structure. */
23681 static bool
23682 is_variant_part (tree decl)
23683 {
23684 return (TREE_CODE (decl) == FIELD_DECL
23685 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
23686 }
23687
23688 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
23689 return the FIELD_DECL. Return NULL_TREE otherwise. */
23690
23691 static tree
23692 analyze_discr_in_predicate (tree operand, tree struct_type)
23693 {
23694 bool continue_stripping = true;
23695 while (continue_stripping)
23696 switch (TREE_CODE (operand))
23697 {
23698 CASE_CONVERT:
23699 operand = TREE_OPERAND (operand, 0);
23700 break;
23701 default:
23702 continue_stripping = false;
23703 break;
23704 }
23705
23706 /* Match field access to members of struct_type only. */
23707 if (TREE_CODE (operand) == COMPONENT_REF
23708 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
23709 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
23710 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
23711 return TREE_OPERAND (operand, 1);
23712 else
23713 return NULL_TREE;
23714 }
23715
23716 /* Check that SRC is a constant integer that can be represented as a native
23717 integer constant (either signed or unsigned). If so, store it into DEST and
23718 return true. Return false otherwise. */
23719
23720 static bool
23721 get_discr_value (tree src, dw_discr_value *dest)
23722 {
23723 tree discr_type = TREE_TYPE (src);
23724
23725 if (lang_hooks.types.get_debug_type)
23726 {
23727 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
23728 if (debug_type != NULL)
23729 discr_type = debug_type;
23730 }
23731
23732 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
23733 return false;
23734
23735 /* Signedness can vary between the original type and the debug type. This
23736 can happen for character types in Ada for instance: the character type
23737 used for code generation can be signed, to be compatible with the C one,
23738 but from a debugger point of view, it must be unsigned. */
23739 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
23740 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
23741
23742 if (is_orig_unsigned != is_debug_unsigned)
23743 src = fold_convert (discr_type, src);
23744
23745 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
23746 return false;
23747
23748 dest->pos = is_debug_unsigned;
23749 if (is_debug_unsigned)
23750 dest->v.uval = tree_to_uhwi (src);
23751 else
23752 dest->v.sval = tree_to_shwi (src);
23753
23754 return true;
23755 }
23756
23757 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
23758 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
23759 store NULL_TREE in DISCR_DECL. Otherwise:
23760
23761 - store the discriminant field in STRUCT_TYPE that controls the variant
23762 part to *DISCR_DECL
23763
23764 - put in *DISCR_LISTS_P an array where for each variant, the item
23765 represents the corresponding matching list of discriminant values.
23766
23767 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
23768 the above array.
23769
23770 Note that when the array is allocated (i.e. when the analysis is
23771 successful), it is up to the caller to free the array. */
23772
23773 static void
23774 analyze_variants_discr (tree variant_part_decl,
23775 tree struct_type,
23776 tree *discr_decl,
23777 dw_discr_list_ref **discr_lists_p,
23778 unsigned *discr_lists_length)
23779 {
23780 tree variant_part_type = TREE_TYPE (variant_part_decl);
23781 tree variant;
23782 dw_discr_list_ref *discr_lists;
23783 unsigned i;
23784
23785 /* Compute how many variants there are in this variant part. */
23786 *discr_lists_length = 0;
23787 for (variant = TYPE_FIELDS (variant_part_type);
23788 variant != NULL_TREE;
23789 variant = DECL_CHAIN (variant))
23790 ++*discr_lists_length;
23791
23792 *discr_decl = NULL_TREE;
23793 *discr_lists_p
23794 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
23795 sizeof (**discr_lists_p));
23796 discr_lists = *discr_lists_p;
23797
23798 /* And then analyze all variants to extract discriminant information for all
23799 of them. This analysis is conservative: as soon as we detect something we
23800 do not support, abort everything and pretend we found nothing. */
23801 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
23802 variant != NULL_TREE;
23803 variant = DECL_CHAIN (variant), ++i)
23804 {
23805 tree match_expr = DECL_QUALIFIER (variant);
23806
23807 /* Now, try to analyze the predicate and deduce a discriminant for
23808 it. */
23809 if (match_expr == boolean_true_node)
23810 /* Typically happens for the default variant: it matches all cases that
23811 previous variants rejected. Don't output any matching value for
23812 this one. */
23813 continue;
23814
23815 /* The following loop tries to iterate over each discriminant
23816 possibility: single values or ranges. */
23817 while (match_expr != NULL_TREE)
23818 {
23819 tree next_round_match_expr;
23820 tree candidate_discr = NULL_TREE;
23821 dw_discr_list_ref new_node = NULL;
23822
23823 /* Possibilities are matched one after the other by nested
23824 TRUTH_ORIF_EXPR expressions. Process the current possibility and
23825 continue with the rest at next iteration. */
23826 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
23827 {
23828 next_round_match_expr = TREE_OPERAND (match_expr, 0);
23829 match_expr = TREE_OPERAND (match_expr, 1);
23830 }
23831 else
23832 next_round_match_expr = NULL_TREE;
23833
23834 if (match_expr == boolean_false_node)
23835 /* This sub-expression matches nothing: just wait for the next
23836 one. */
23837 ;
23838
23839 else if (TREE_CODE (match_expr) == EQ_EXPR)
23840 {
23841 /* We are matching: <discr_field> == <integer_cst>
23842 This sub-expression matches a single value. */
23843 tree integer_cst = TREE_OPERAND (match_expr, 1);
23844
23845 candidate_discr
23846 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
23847 struct_type);
23848
23849 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
23850 if (!get_discr_value (integer_cst,
23851 &new_node->dw_discr_lower_bound))
23852 goto abort;
23853 new_node->dw_discr_range = false;
23854 }
23855
23856 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
23857 {
23858 /* We are matching:
23859 <discr_field> > <integer_cst>
23860 && <discr_field> < <integer_cst>.
23861 This sub-expression matches the range of values between the
23862 two matched integer constants. Note that comparisons can be
23863 inclusive or exclusive. */
23864 tree candidate_discr_1, candidate_discr_2;
23865 tree lower_cst, upper_cst;
23866 bool lower_cst_included, upper_cst_included;
23867 tree lower_op = TREE_OPERAND (match_expr, 0);
23868 tree upper_op = TREE_OPERAND (match_expr, 1);
23869
23870 /* When the comparison is exclusive, the integer constant is not
23871 the discriminant range bound we are looking for: we will have
23872 to increment or decrement it. */
23873 if (TREE_CODE (lower_op) == GE_EXPR)
23874 lower_cst_included = true;
23875 else if (TREE_CODE (lower_op) == GT_EXPR)
23876 lower_cst_included = false;
23877 else
23878 goto abort;
23879
23880 if (TREE_CODE (upper_op) == LE_EXPR)
23881 upper_cst_included = true;
23882 else if (TREE_CODE (upper_op) == LT_EXPR)
23883 upper_cst_included = false;
23884 else
23885 goto abort;
23886
23887 /* Extract the discriminant from the first operand and check it
23888 is consistant with the same analysis in the second
23889 operand. */
23890 candidate_discr_1
23891 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
23892 struct_type);
23893 candidate_discr_2
23894 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
23895 struct_type);
23896 if (candidate_discr_1 == candidate_discr_2)
23897 candidate_discr = candidate_discr_1;
23898 else
23899 goto abort;
23900
23901 /* Extract bounds from both. */
23902 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
23903 lower_cst = TREE_OPERAND (lower_op, 1);
23904 upper_cst = TREE_OPERAND (upper_op, 1);
23905
23906 if (!lower_cst_included)
23907 lower_cst
23908 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
23909 build_int_cst (TREE_TYPE (lower_cst), 1));
23910 if (!upper_cst_included)
23911 upper_cst
23912 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
23913 build_int_cst (TREE_TYPE (upper_cst), 1));
23914
23915 if (!get_discr_value (lower_cst,
23916 &new_node->dw_discr_lower_bound)
23917 || !get_discr_value (upper_cst,
23918 &new_node->dw_discr_upper_bound))
23919 goto abort;
23920
23921 new_node->dw_discr_range = true;
23922 }
23923
23924 else
23925 /* Unsupported sub-expression: we cannot determine the set of
23926 matching discriminant values. Abort everything. */
23927 goto abort;
23928
23929 /* If the discriminant info is not consistant with what we saw so
23930 far, consider the analysis failed and abort everything. */
23931 if (candidate_discr == NULL_TREE
23932 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
23933 goto abort;
23934 else
23935 *discr_decl = candidate_discr;
23936
23937 if (new_node != NULL)
23938 {
23939 new_node->dw_discr_next = discr_lists[i];
23940 discr_lists[i] = new_node;
23941 }
23942 match_expr = next_round_match_expr;
23943 }
23944 }
23945
23946 /* If we reach this point, we could match everything we were interested
23947 in. */
23948 return;
23949
23950 abort:
23951 /* Clean all data structure and return no result. */
23952 free (*discr_lists_p);
23953 *discr_lists_p = NULL;
23954 *discr_decl = NULL_TREE;
23955 }
23956
23957 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
23958 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
23959 under CONTEXT_DIE.
23960
23961 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
23962 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
23963 this type, which are record types, represent the available variants and each
23964 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
23965 values are inferred from these attributes.
23966
23967 In trees, the offsets for the fields inside these sub-records are relative
23968 to the variant part itself, whereas the corresponding DIEs should have
23969 offset attributes that are relative to the embedding record base address.
23970 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
23971 must be an expression that computes the offset of the variant part to
23972 describe in DWARF. */
23973
23974 static void
23975 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
23976 dw_die_ref context_die)
23977 {
23978 const tree variant_part_type = TREE_TYPE (variant_part_decl);
23979 tree variant_part_offset = vlr_ctx->variant_part_offset;
23980 struct loc_descr_context ctx = {
23981 vlr_ctx->struct_type, /* context_type */
23982 NULL_TREE, /* base_decl */
23983 NULL, /* dpi */
23984 false, /* placeholder_arg */
23985 false /* placeholder_seen */
23986 };
23987
23988 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
23989 NULL_TREE if there is no such field. */
23990 tree discr_decl = NULL_TREE;
23991 dw_discr_list_ref *discr_lists;
23992 unsigned discr_lists_length = 0;
23993 unsigned i;
23994
23995 dw_die_ref dwarf_proc_die = NULL;
23996 dw_die_ref variant_part_die
23997 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
23998
23999 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24000
24001 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24002 &discr_decl, &discr_lists, &discr_lists_length);
24003
24004 if (discr_decl != NULL_TREE)
24005 {
24006 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24007
24008 if (discr_die)
24009 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24010 else
24011 /* We have no DIE for the discriminant, so just discard all
24012 discrimimant information in the output. */
24013 discr_decl = NULL_TREE;
24014 }
24015
24016 /* If the offset for this variant part is more complex than a constant,
24017 create a DWARF procedure for it so that we will not have to generate DWARF
24018 expressions for it for each member. */
24019 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24020 && (dwarf_version >= 3 || !dwarf_strict))
24021 {
24022 const tree dwarf_proc_fndecl
24023 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24024 build_function_type (TREE_TYPE (variant_part_offset),
24025 NULL_TREE));
24026 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24027 const dw_loc_descr_ref dwarf_proc_body
24028 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24029
24030 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24031 dwarf_proc_fndecl, context_die);
24032 if (dwarf_proc_die != NULL)
24033 variant_part_offset = dwarf_proc_call;
24034 }
24035
24036 /* Output DIEs for all variants. */
24037 i = 0;
24038 for (tree variant = TYPE_FIELDS (variant_part_type);
24039 variant != NULL_TREE;
24040 variant = DECL_CHAIN (variant), ++i)
24041 {
24042 tree variant_type = TREE_TYPE (variant);
24043 dw_die_ref variant_die;
24044
24045 /* All variants (i.e. members of a variant part) are supposed to be
24046 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24047 under these records. */
24048 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24049
24050 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24051 equate_decl_number_to_die (variant, variant_die);
24052
24053 /* Output discriminant values this variant matches, if any. */
24054 if (discr_decl == NULL || discr_lists[i] == NULL)
24055 /* In the case we have discriminant information at all, this is
24056 probably the default variant: as the standard says, don't
24057 output any discriminant value/list attribute. */
24058 ;
24059 else if (discr_lists[i]->dw_discr_next == NULL
24060 && !discr_lists[i]->dw_discr_range)
24061 /* If there is only one accepted value, don't bother outputting a
24062 list. */
24063 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24064 else
24065 add_discr_list (variant_die, discr_lists[i]);
24066
24067 for (tree member = TYPE_FIELDS (variant_type);
24068 member != NULL_TREE;
24069 member = DECL_CHAIN (member))
24070 {
24071 struct vlr_context vlr_sub_ctx = {
24072 vlr_ctx->struct_type, /* struct_type */
24073 NULL /* variant_part_offset */
24074 };
24075 if (is_variant_part (member))
24076 {
24077 /* All offsets for fields inside variant parts are relative to
24078 the top-level embedding RECORD_TYPE's base address. On the
24079 other hand, offsets in GCC's types are relative to the
24080 nested-most variant part. So we have to sum offsets each time
24081 we recurse. */
24082
24083 vlr_sub_ctx.variant_part_offset
24084 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24085 variant_part_offset, byte_position (member));
24086 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24087 }
24088 else
24089 {
24090 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24091 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24092 }
24093 }
24094 }
24095
24096 free (discr_lists);
24097 }
24098
24099 /* Generate a DIE for a class member. */
24100
24101 static void
24102 gen_member_die (tree type, dw_die_ref context_die)
24103 {
24104 tree member;
24105 tree binfo = TYPE_BINFO (type);
24106
24107 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24108
24109 /* If this is not an incomplete type, output descriptions of each of its
24110 members. Note that as we output the DIEs necessary to represent the
24111 members of this record or union type, we will also be trying to output
24112 DIEs to represent the *types* of those members. However the `type'
24113 function (above) will specifically avoid generating type DIEs for member
24114 types *within* the list of member DIEs for this (containing) type except
24115 for those types (of members) which are explicitly marked as also being
24116 members of this (containing) type themselves. The g++ front- end can
24117 force any given type to be treated as a member of some other (containing)
24118 type by setting the TYPE_CONTEXT of the given (member) type to point to
24119 the TREE node representing the appropriate (containing) type. */
24120
24121 /* First output info about the base classes. */
24122 if (binfo)
24123 {
24124 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24125 int i;
24126 tree base;
24127
24128 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24129 gen_inheritance_die (base,
24130 (accesses ? (*accesses)[i] : access_public_node),
24131 type,
24132 context_die);
24133 }
24134
24135 /* Now output info about the data members and type members. */
24136 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24137 {
24138 struct vlr_context vlr_ctx = { type, NULL_TREE };
24139 bool static_inline_p
24140 = (TREE_STATIC (member)
24141 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24142 != -1));
24143
24144 /* Ignore clones. */
24145 if (DECL_ABSTRACT_ORIGIN (member))
24146 continue;
24147
24148 /* If we thought we were generating minimal debug info for TYPE
24149 and then changed our minds, some of the member declarations
24150 may have already been defined. Don't define them again, but
24151 do put them in the right order. */
24152
24153 if (dw_die_ref child = lookup_decl_die (member))
24154 {
24155 /* Handle inline static data members, which only have in-class
24156 declarations. */
24157 dw_die_ref ref = NULL;
24158 if (child->die_tag == DW_TAG_variable
24159 && child->die_parent == comp_unit_die ())
24160 {
24161 ref = get_AT_ref (child, DW_AT_specification);
24162 /* For C++17 inline static data members followed by redundant
24163 out of class redeclaration, we might get here with
24164 child being the DIE created for the out of class
24165 redeclaration and with its DW_AT_specification being
24166 the DIE created for in-class definition. We want to
24167 reparent the latter, and don't want to create another
24168 DIE with DW_AT_specification in that case, because
24169 we already have one. */
24170 if (ref
24171 && static_inline_p
24172 && ref->die_tag == DW_TAG_variable
24173 && ref->die_parent == comp_unit_die ()
24174 && get_AT (ref, DW_AT_specification) == NULL)
24175 {
24176 child = ref;
24177 ref = NULL;
24178 static_inline_p = false;
24179 }
24180 }
24181
24182 if (child->die_tag == DW_TAG_variable
24183 && child->die_parent == comp_unit_die ()
24184 && ref == NULL)
24185 {
24186 reparent_child (child, context_die);
24187 if (dwarf_version < 5)
24188 child->die_tag = DW_TAG_member;
24189 }
24190 else
24191 splice_child_die (context_die, child);
24192 }
24193
24194 /* Do not generate standard DWARF for variant parts if we are generating
24195 the corresponding GNAT encodings: DIEs generated for both would
24196 conflict in our mappings. */
24197 else if (is_variant_part (member)
24198 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
24199 {
24200 vlr_ctx.variant_part_offset = byte_position (member);
24201 gen_variant_part (member, &vlr_ctx, context_die);
24202 }
24203 else
24204 {
24205 vlr_ctx.variant_part_offset = NULL_TREE;
24206 gen_decl_die (member, NULL, &vlr_ctx, context_die);
24207 }
24208
24209 /* For C++ inline static data members emit immediately a DW_TAG_variable
24210 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
24211 DW_AT_specification. */
24212 if (static_inline_p)
24213 {
24214 int old_extern = DECL_EXTERNAL (member);
24215 DECL_EXTERNAL (member) = 0;
24216 gen_decl_die (member, NULL, NULL, comp_unit_die ());
24217 DECL_EXTERNAL (member) = old_extern;
24218 }
24219 }
24220 }
24221
24222 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
24223 is set, we pretend that the type was never defined, so we only get the
24224 member DIEs needed by later specification DIEs. */
24225
24226 static void
24227 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
24228 enum debug_info_usage usage)
24229 {
24230 if (TREE_ASM_WRITTEN (type))
24231 {
24232 /* Fill in the bound of variable-length fields in late dwarf if
24233 still incomplete. */
24234 if (!early_dwarf && variably_modified_type_p (type, NULL))
24235 for (tree member = TYPE_FIELDS (type);
24236 member;
24237 member = DECL_CHAIN (member))
24238 fill_variable_array_bounds (TREE_TYPE (member));
24239 return;
24240 }
24241
24242 dw_die_ref type_die = lookup_type_die (type);
24243 dw_die_ref scope_die = 0;
24244 int nested = 0;
24245 int complete = (TYPE_SIZE (type)
24246 && (! TYPE_STUB_DECL (type)
24247 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
24248 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
24249 complete = complete && should_emit_struct_debug (type, usage);
24250
24251 if (type_die && ! complete)
24252 return;
24253
24254 if (TYPE_CONTEXT (type) != NULL_TREE
24255 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
24256 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
24257 nested = 1;
24258
24259 scope_die = scope_die_for (type, context_die);
24260
24261 /* Generate child dies for template paramaters. */
24262 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
24263 schedule_generic_params_dies_gen (type);
24264
24265 if (! type_die || (nested && is_cu_die (scope_die)))
24266 /* First occurrence of type or toplevel definition of nested class. */
24267 {
24268 dw_die_ref old_die = type_die;
24269
24270 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
24271 ? record_type_tag (type) : DW_TAG_union_type,
24272 scope_die, type);
24273 equate_type_number_to_die (type, type_die);
24274 if (old_die)
24275 add_AT_specification (type_die, old_die);
24276 else
24277 add_name_attribute (type_die, type_tag (type));
24278 }
24279 else
24280 remove_AT (type_die, DW_AT_declaration);
24281
24282 /* If this type has been completed, then give it a byte_size attribute and
24283 then give a list of members. */
24284 if (complete && !ns_decl)
24285 {
24286 /* Prevent infinite recursion in cases where the type of some member of
24287 this type is expressed in terms of this type itself. */
24288 TREE_ASM_WRITTEN (type) = 1;
24289 add_byte_size_attribute (type_die, type);
24290 add_alignment_attribute (type_die, type);
24291 if (TYPE_STUB_DECL (type) != NULL_TREE)
24292 {
24293 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
24294 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
24295 }
24296
24297 /* If the first reference to this type was as the return type of an
24298 inline function, then it may not have a parent. Fix this now. */
24299 if (type_die->die_parent == NULL)
24300 add_child_die (scope_die, type_die);
24301
24302 push_decl_scope (type);
24303 gen_member_die (type, type_die);
24304 pop_decl_scope ();
24305
24306 add_gnat_descriptive_type_attribute (type_die, type, context_die);
24307 if (TYPE_ARTIFICIAL (type))
24308 add_AT_flag (type_die, DW_AT_artificial, 1);
24309
24310 /* GNU extension: Record what type our vtable lives in. */
24311 if (TYPE_VFIELD (type))
24312 {
24313 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
24314
24315 gen_type_die (vtype, context_die);
24316 add_AT_die_ref (type_die, DW_AT_containing_type,
24317 lookup_type_die (vtype));
24318 }
24319 }
24320 else
24321 {
24322 add_AT_flag (type_die, DW_AT_declaration, 1);
24323
24324 /* We don't need to do this for function-local types. */
24325 if (TYPE_STUB_DECL (type)
24326 && ! decl_function_context (TYPE_STUB_DECL (type)))
24327 vec_safe_push (incomplete_types, type);
24328 }
24329
24330 if (get_AT (type_die, DW_AT_name))
24331 add_pubtype (type, type_die);
24332 }
24333
24334 /* Generate a DIE for a subroutine _type_. */
24335
24336 static void
24337 gen_subroutine_type_die (tree type, dw_die_ref context_die)
24338 {
24339 tree return_type = TREE_TYPE (type);
24340 dw_die_ref subr_die
24341 = new_die (DW_TAG_subroutine_type,
24342 scope_die_for (type, context_die), type);
24343
24344 equate_type_number_to_die (type, subr_die);
24345 add_prototyped_attribute (subr_die, type);
24346 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
24347 context_die);
24348 add_alignment_attribute (subr_die, type);
24349 gen_formal_types_die (type, subr_die);
24350
24351 if (get_AT (subr_die, DW_AT_name))
24352 add_pubtype (type, subr_die);
24353 if ((dwarf_version >= 5 || !dwarf_strict)
24354 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
24355 add_AT_flag (subr_die, DW_AT_reference, 1);
24356 if ((dwarf_version >= 5 || !dwarf_strict)
24357 && lang_hooks.types.type_dwarf_attribute (type,
24358 DW_AT_rvalue_reference) != -1)
24359 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
24360 }
24361
24362 /* Generate a DIE for a type definition. */
24363
24364 static void
24365 gen_typedef_die (tree decl, dw_die_ref context_die)
24366 {
24367 dw_die_ref type_die;
24368 tree type;
24369
24370 if (TREE_ASM_WRITTEN (decl))
24371 {
24372 if (DECL_ORIGINAL_TYPE (decl))
24373 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
24374 return;
24375 }
24376
24377 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
24378 checks in process_scope_var and modified_type_die), this should be called
24379 only for original types. */
24380 gcc_assert (decl_ultimate_origin (decl) == NULL
24381 || decl_ultimate_origin (decl) == decl);
24382
24383 TREE_ASM_WRITTEN (decl) = 1;
24384 type_die = new_die (DW_TAG_typedef, context_die, decl);
24385
24386 add_name_and_src_coords_attributes (type_die, decl);
24387 if (DECL_ORIGINAL_TYPE (decl))
24388 {
24389 type = DECL_ORIGINAL_TYPE (decl);
24390 if (type == error_mark_node)
24391 return;
24392
24393 gcc_assert (type != TREE_TYPE (decl));
24394 equate_type_number_to_die (TREE_TYPE (decl), type_die);
24395 }
24396 else
24397 {
24398 type = TREE_TYPE (decl);
24399 if (type == error_mark_node)
24400 return;
24401
24402 if (is_naming_typedef_decl (TYPE_NAME (type)))
24403 {
24404 /* Here, we are in the case of decl being a typedef naming
24405 an anonymous type, e.g:
24406 typedef struct {...} foo;
24407 In that case TREE_TYPE (decl) is not a typedef variant
24408 type and TYPE_NAME of the anonymous type is set to the
24409 TYPE_DECL of the typedef. This construct is emitted by
24410 the C++ FE.
24411
24412 TYPE is the anonymous struct named by the typedef
24413 DECL. As we need the DW_AT_type attribute of the
24414 DW_TAG_typedef to point to the DIE of TYPE, let's
24415 generate that DIE right away. add_type_attribute
24416 called below will then pick (via lookup_type_die) that
24417 anonymous struct DIE. */
24418 if (!TREE_ASM_WRITTEN (type))
24419 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
24420
24421 /* This is a GNU Extension. We are adding a
24422 DW_AT_linkage_name attribute to the DIE of the
24423 anonymous struct TYPE. The value of that attribute
24424 is the name of the typedef decl naming the anonymous
24425 struct. This greatly eases the work of consumers of
24426 this debug info. */
24427 add_linkage_name_raw (lookup_type_die (type), decl);
24428 }
24429 }
24430
24431 add_type_attribute (type_die, type, decl_quals (decl), false,
24432 context_die);
24433
24434 if (is_naming_typedef_decl (decl))
24435 /* We want that all subsequent calls to lookup_type_die with
24436 TYPE in argument yield the DW_TAG_typedef we have just
24437 created. */
24438 equate_type_number_to_die (type, type_die);
24439
24440 add_alignment_attribute (type_die, TREE_TYPE (decl));
24441
24442 add_accessibility_attribute (type_die, decl);
24443
24444 if (DECL_ABSTRACT_P (decl))
24445 equate_decl_number_to_die (decl, type_die);
24446
24447 if (get_AT (type_die, DW_AT_name))
24448 add_pubtype (decl, type_die);
24449 }
24450
24451 /* Generate a DIE for a struct, class, enum or union type. */
24452
24453 static void
24454 gen_tagged_type_die (tree type,
24455 dw_die_ref context_die,
24456 enum debug_info_usage usage)
24457 {
24458 int need_pop;
24459
24460 if (type == NULL_TREE
24461 || !is_tagged_type (type))
24462 return;
24463
24464 if (TREE_ASM_WRITTEN (type))
24465 need_pop = 0;
24466 /* If this is a nested type whose containing class hasn't been written
24467 out yet, writing it out will cover this one, too. This does not apply
24468 to instantiations of member class templates; they need to be added to
24469 the containing class as they are generated. FIXME: This hurts the
24470 idea of combining type decls from multiple TUs, since we can't predict
24471 what set of template instantiations we'll get. */
24472 else if (TYPE_CONTEXT (type)
24473 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
24474 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
24475 {
24476 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
24477
24478 if (TREE_ASM_WRITTEN (type))
24479 return;
24480
24481 /* If that failed, attach ourselves to the stub. */
24482 push_decl_scope (TYPE_CONTEXT (type));
24483 context_die = lookup_type_die (TYPE_CONTEXT (type));
24484 need_pop = 1;
24485 }
24486 else if (TYPE_CONTEXT (type) != NULL_TREE
24487 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
24488 {
24489 /* If this type is local to a function that hasn't been written
24490 out yet, use a NULL context for now; it will be fixed up in
24491 decls_for_scope. */
24492 context_die = lookup_decl_die (TYPE_CONTEXT (type));
24493 /* A declaration DIE doesn't count; nested types need to go in the
24494 specification. */
24495 if (context_die && is_declaration_die (context_die))
24496 context_die = NULL;
24497 need_pop = 0;
24498 }
24499 else
24500 {
24501 context_die = declare_in_namespace (type, context_die);
24502 need_pop = 0;
24503 }
24504
24505 if (TREE_CODE (type) == ENUMERAL_TYPE)
24506 {
24507 /* This might have been written out by the call to
24508 declare_in_namespace. */
24509 if (!TREE_ASM_WRITTEN (type))
24510 gen_enumeration_type_die (type, context_die);
24511 }
24512 else
24513 gen_struct_or_union_type_die (type, context_die, usage);
24514
24515 if (need_pop)
24516 pop_decl_scope ();
24517
24518 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
24519 it up if it is ever completed. gen_*_type_die will set it for us
24520 when appropriate. */
24521 }
24522
24523 /* Generate a type description DIE. */
24524
24525 static void
24526 gen_type_die_with_usage (tree type, dw_die_ref context_die,
24527 enum debug_info_usage usage)
24528 {
24529 struct array_descr_info info;
24530
24531 if (type == NULL_TREE || type == error_mark_node)
24532 return;
24533
24534 if (flag_checking && type)
24535 verify_type (type);
24536
24537 if (TYPE_NAME (type) != NULL_TREE
24538 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
24539 && is_redundant_typedef (TYPE_NAME (type))
24540 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
24541 /* The DECL of this type is a typedef we don't want to emit debug
24542 info for but we want debug info for its underlying typedef.
24543 This can happen for e.g, the injected-class-name of a C++
24544 type. */
24545 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
24546
24547 /* If TYPE is a typedef type variant, let's generate debug info
24548 for the parent typedef which TYPE is a type of. */
24549 if (typedef_variant_p (type))
24550 {
24551 if (TREE_ASM_WRITTEN (type))
24552 return;
24553
24554 tree name = TYPE_NAME (type);
24555 tree origin = decl_ultimate_origin (name);
24556 if (origin != NULL && origin != name)
24557 {
24558 gen_decl_die (origin, NULL, NULL, context_die);
24559 return;
24560 }
24561
24562 /* Prevent broken recursion; we can't hand off to the same type. */
24563 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
24564
24565 /* Give typedefs the right scope. */
24566 context_die = scope_die_for (type, context_die);
24567
24568 TREE_ASM_WRITTEN (type) = 1;
24569
24570 gen_decl_die (name, NULL, NULL, context_die);
24571 return;
24572 }
24573
24574 /* If type is an anonymous tagged type named by a typedef, let's
24575 generate debug info for the typedef. */
24576 if (is_naming_typedef_decl (TYPE_NAME (type)))
24577 {
24578 /* Use the DIE of the containing namespace as the parent DIE of
24579 the type description DIE we want to generate. */
24580 if (DECL_CONTEXT (TYPE_NAME (type))
24581 && TREE_CODE (DECL_CONTEXT (TYPE_NAME (type))) == NAMESPACE_DECL)
24582 context_die = get_context_die (DECL_CONTEXT (TYPE_NAME (type)));
24583
24584 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
24585 return;
24586 }
24587
24588 if (lang_hooks.types.get_debug_type)
24589 {
24590 tree debug_type = lang_hooks.types.get_debug_type (type);
24591
24592 if (debug_type != NULL_TREE && debug_type != type)
24593 {
24594 gen_type_die_with_usage (debug_type, context_die, usage);
24595 return;
24596 }
24597 }
24598
24599 /* We are going to output a DIE to represent the unqualified version
24600 of this type (i.e. without any const or volatile qualifiers) so
24601 get the main variant (i.e. the unqualified version) of this type
24602 now. (Vectors and arrays are special because the debugging info is in the
24603 cloned type itself. Similarly function/method types can contain extra
24604 ref-qualification). */
24605 if (TREE_CODE (type) == FUNCTION_TYPE
24606 || TREE_CODE (type) == METHOD_TYPE)
24607 {
24608 /* For function/method types, can't use type_main_variant here,
24609 because that can have different ref-qualifiers for C++,
24610 but try to canonicalize. */
24611 tree main = TYPE_MAIN_VARIANT (type);
24612 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
24613 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
24614 && check_base_type (t, main)
24615 && check_lang_type (t, type))
24616 {
24617 type = t;
24618 break;
24619 }
24620 }
24621 else if (TREE_CODE (type) != VECTOR_TYPE
24622 && TREE_CODE (type) != ARRAY_TYPE)
24623 type = type_main_variant (type);
24624
24625 /* If this is an array type with hidden descriptor, handle it first. */
24626 if (!TREE_ASM_WRITTEN (type)
24627 && lang_hooks.types.get_array_descr_info)
24628 {
24629 memset (&info, 0, sizeof (info));
24630 if (lang_hooks.types.get_array_descr_info (type, &info))
24631 {
24632 /* Fortran sometimes emits array types with no dimension. */
24633 gcc_assert (info.ndimensions >= 0
24634 && (info.ndimensions
24635 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
24636 gen_descr_array_type_die (type, &info, context_die);
24637 TREE_ASM_WRITTEN (type) = 1;
24638 return;
24639 }
24640 }
24641
24642 if (TREE_ASM_WRITTEN (type))
24643 {
24644 /* Variable-length types may be incomplete even if
24645 TREE_ASM_WRITTEN. For such types, fall through to
24646 gen_array_type_die() and possibly fill in
24647 DW_AT_{upper,lower}_bound attributes. */
24648 if ((TREE_CODE (type) != ARRAY_TYPE
24649 && TREE_CODE (type) != RECORD_TYPE
24650 && TREE_CODE (type) != UNION_TYPE
24651 && TREE_CODE (type) != QUAL_UNION_TYPE)
24652 || !variably_modified_type_p (type, NULL))
24653 return;
24654 }
24655
24656 switch (TREE_CODE (type))
24657 {
24658 case ERROR_MARK:
24659 break;
24660
24661 case POINTER_TYPE:
24662 case REFERENCE_TYPE:
24663 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
24664 ensures that the gen_type_die recursion will terminate even if the
24665 type is recursive. Recursive types are possible in Ada. */
24666 /* ??? We could perhaps do this for all types before the switch
24667 statement. */
24668 TREE_ASM_WRITTEN (type) = 1;
24669
24670 /* For these types, all that is required is that we output a DIE (or a
24671 set of DIEs) to represent the "basis" type. */
24672 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24673 DINFO_USAGE_IND_USE);
24674 break;
24675
24676 case OFFSET_TYPE:
24677 /* This code is used for C++ pointer-to-data-member types.
24678 Output a description of the relevant class type. */
24679 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
24680 DINFO_USAGE_IND_USE);
24681
24682 /* Output a description of the type of the object pointed to. */
24683 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24684 DINFO_USAGE_IND_USE);
24685
24686 /* Now output a DIE to represent this pointer-to-data-member type
24687 itself. */
24688 gen_ptr_to_mbr_type_die (type, context_die);
24689 break;
24690
24691 case FUNCTION_TYPE:
24692 /* Force out return type (in case it wasn't forced out already). */
24693 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24694 DINFO_USAGE_DIR_USE);
24695 gen_subroutine_type_die (type, context_die);
24696 break;
24697
24698 case METHOD_TYPE:
24699 /* Force out return type (in case it wasn't forced out already). */
24700 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24701 DINFO_USAGE_DIR_USE);
24702 gen_subroutine_type_die (type, context_die);
24703 break;
24704
24705 case ARRAY_TYPE:
24706 case VECTOR_TYPE:
24707 gen_array_type_die (type, context_die);
24708 break;
24709
24710 case ENUMERAL_TYPE:
24711 case RECORD_TYPE:
24712 case UNION_TYPE:
24713 case QUAL_UNION_TYPE:
24714 gen_tagged_type_die (type, context_die, usage);
24715 return;
24716
24717 case VOID_TYPE:
24718 case INTEGER_TYPE:
24719 case REAL_TYPE:
24720 case FIXED_POINT_TYPE:
24721 case COMPLEX_TYPE:
24722 case BOOLEAN_TYPE:
24723 case POINTER_BOUNDS_TYPE:
24724 /* No DIEs needed for fundamental types. */
24725 break;
24726
24727 case NULLPTR_TYPE:
24728 case LANG_TYPE:
24729 /* Just use DW_TAG_unspecified_type. */
24730 {
24731 dw_die_ref type_die = lookup_type_die (type);
24732 if (type_die == NULL)
24733 {
24734 tree name = TYPE_IDENTIFIER (type);
24735 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
24736 type);
24737 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
24738 equate_type_number_to_die (type, type_die);
24739 }
24740 }
24741 break;
24742
24743 default:
24744 if (is_cxx_auto (type))
24745 {
24746 tree name = TYPE_IDENTIFIER (type);
24747 dw_die_ref *die = (name == get_identifier ("auto")
24748 ? &auto_die : &decltype_auto_die);
24749 if (!*die)
24750 {
24751 *die = new_die (DW_TAG_unspecified_type,
24752 comp_unit_die (), NULL_TREE);
24753 add_name_attribute (*die, IDENTIFIER_POINTER (name));
24754 }
24755 equate_type_number_to_die (type, *die);
24756 break;
24757 }
24758 gcc_unreachable ();
24759 }
24760
24761 TREE_ASM_WRITTEN (type) = 1;
24762 }
24763
24764 static void
24765 gen_type_die (tree type, dw_die_ref context_die)
24766 {
24767 if (type != error_mark_node)
24768 {
24769 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
24770 if (flag_checking)
24771 {
24772 dw_die_ref die = lookup_type_die (type);
24773 if (die)
24774 check_die (die);
24775 }
24776 }
24777 }
24778
24779 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
24780 things which are local to the given block. */
24781
24782 static void
24783 gen_block_die (tree stmt, dw_die_ref context_die)
24784 {
24785 int must_output_die = 0;
24786 bool inlined_func;
24787
24788 /* Ignore blocks that are NULL. */
24789 if (stmt == NULL_TREE)
24790 return;
24791
24792 inlined_func = inlined_function_outer_scope_p (stmt);
24793
24794 /* If the block is one fragment of a non-contiguous block, do not
24795 process the variables, since they will have been done by the
24796 origin block. Do process subblocks. */
24797 if (BLOCK_FRAGMENT_ORIGIN (stmt))
24798 {
24799 tree sub;
24800
24801 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
24802 gen_block_die (sub, context_die);
24803
24804 return;
24805 }
24806
24807 /* Determine if we need to output any Dwarf DIEs at all to represent this
24808 block. */
24809 if (inlined_func)
24810 /* The outer scopes for inlinings *must* always be represented. We
24811 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
24812 must_output_die = 1;
24813 else
24814 {
24815 /* Determine if this block directly contains any "significant"
24816 local declarations which we will need to output DIEs for. */
24817 if (debug_info_level > DINFO_LEVEL_TERSE)
24818 /* We are not in terse mode so *any* local declaration counts
24819 as being a "significant" one. */
24820 must_output_die = ((BLOCK_VARS (stmt) != NULL
24821 || BLOCK_NUM_NONLOCALIZED_VARS (stmt))
24822 && (TREE_USED (stmt)
24823 || TREE_ASM_WRITTEN (stmt)
24824 || BLOCK_ABSTRACT (stmt)));
24825 else if ((TREE_USED (stmt)
24826 || TREE_ASM_WRITTEN (stmt)
24827 || BLOCK_ABSTRACT (stmt))
24828 && !dwarf2out_ignore_block (stmt))
24829 must_output_die = 1;
24830 }
24831
24832 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
24833 DIE for any block which contains no significant local declarations at
24834 all. Rather, in such cases we just call `decls_for_scope' so that any
24835 needed Dwarf info for any sub-blocks will get properly generated. Note
24836 that in terse mode, our definition of what constitutes a "significant"
24837 local declaration gets restricted to include only inlined function
24838 instances and local (nested) function definitions. */
24839 if (must_output_die)
24840 {
24841 if (inlined_func)
24842 {
24843 /* If STMT block is abstract, that means we have been called
24844 indirectly from dwarf2out_abstract_function.
24845 That function rightfully marks the descendent blocks (of
24846 the abstract function it is dealing with) as being abstract,
24847 precisely to prevent us from emitting any
24848 DW_TAG_inlined_subroutine DIE as a descendent
24849 of an abstract function instance. So in that case, we should
24850 not call gen_inlined_subroutine_die.
24851
24852 Later though, when cgraph asks dwarf2out to emit info
24853 for the concrete instance of the function decl into which
24854 the concrete instance of STMT got inlined, the later will lead
24855 to the generation of a DW_TAG_inlined_subroutine DIE. */
24856 if (! BLOCK_ABSTRACT (stmt))
24857 gen_inlined_subroutine_die (stmt, context_die);
24858 }
24859 else
24860 gen_lexical_block_die (stmt, context_die);
24861 }
24862 else
24863 decls_for_scope (stmt, context_die);
24864 }
24865
24866 /* Process variable DECL (or variable with origin ORIGIN) within
24867 block STMT and add it to CONTEXT_DIE. */
24868 static void
24869 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
24870 {
24871 dw_die_ref die;
24872 tree decl_or_origin = decl ? decl : origin;
24873
24874 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
24875 die = lookup_decl_die (decl_or_origin);
24876 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
24877 {
24878 if (TYPE_DECL_IS_STUB (decl_or_origin))
24879 die = lookup_type_die (TREE_TYPE (decl_or_origin));
24880 else
24881 die = lookup_decl_die (decl_or_origin);
24882 /* Avoid re-creating the DIE late if it was optimized as unused early. */
24883 if (! die && ! early_dwarf)
24884 return;
24885 }
24886 else
24887 die = NULL;
24888
24889 /* Avoid creating DIEs for local typedefs and concrete static variables that
24890 will only be pruned later. */
24891 if ((origin || decl_ultimate_origin (decl))
24892 && (TREE_CODE (decl_or_origin) == TYPE_DECL
24893 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
24894 {
24895 origin = decl_ultimate_origin (decl_or_origin);
24896 if (decl && VAR_P (decl) && die != NULL)
24897 {
24898 die = lookup_decl_die (origin);
24899 if (die != NULL)
24900 equate_decl_number_to_die (decl, die);
24901 }
24902 return;
24903 }
24904
24905 if (die != NULL && die->die_parent == NULL)
24906 add_child_die (context_die, die);
24907 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
24908 {
24909 if (early_dwarf)
24910 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
24911 stmt, context_die);
24912 }
24913 else
24914 {
24915 if (decl && DECL_P (decl))
24916 {
24917 die = lookup_decl_die (decl);
24918
24919 /* Early created DIEs do not have a parent as the decls refer
24920 to the function as DECL_CONTEXT rather than the BLOCK. */
24921 if (die && die->die_parent == NULL)
24922 {
24923 gcc_assert (in_lto_p);
24924 add_child_die (context_die, die);
24925 }
24926 }
24927
24928 gen_decl_die (decl, origin, NULL, context_die);
24929 }
24930 }
24931
24932 /* Generate all of the decls declared within a given scope and (recursively)
24933 all of its sub-blocks. */
24934
24935 static void
24936 decls_for_scope (tree stmt, dw_die_ref context_die)
24937 {
24938 tree decl;
24939 unsigned int i;
24940 tree subblocks;
24941
24942 /* Ignore NULL blocks. */
24943 if (stmt == NULL_TREE)
24944 return;
24945
24946 /* Output the DIEs to represent all of the data objects and typedefs
24947 declared directly within this block but not within any nested
24948 sub-blocks. Also, nested function and tag DIEs have been
24949 generated with a parent of NULL; fix that up now. We don't
24950 have to do this if we're at -g1. */
24951 if (debug_info_level > DINFO_LEVEL_TERSE)
24952 {
24953 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
24954 process_scope_var (stmt, decl, NULL_TREE, context_die);
24955 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
24956 origin - avoid doing this twice as we have no good way to see
24957 if we've done it once already. */
24958 if (! early_dwarf)
24959 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
24960 {
24961 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
24962 if (decl == current_function_decl)
24963 /* Ignore declarations of the current function, while they
24964 are declarations, gen_subprogram_die would treat them
24965 as definitions again, because they are equal to
24966 current_function_decl and endlessly recurse. */;
24967 else if (TREE_CODE (decl) == FUNCTION_DECL)
24968 process_scope_var (stmt, decl, NULL_TREE, context_die);
24969 else
24970 process_scope_var (stmt, NULL_TREE, decl, context_die);
24971 }
24972 }
24973
24974 /* Even if we're at -g1, we need to process the subblocks in order to get
24975 inlined call information. */
24976
24977 /* Output the DIEs to represent all sub-blocks (and the items declared
24978 therein) of this block. */
24979 for (subblocks = BLOCK_SUBBLOCKS (stmt);
24980 subblocks != NULL;
24981 subblocks = BLOCK_CHAIN (subblocks))
24982 gen_block_die (subblocks, context_die);
24983 }
24984
24985 /* Is this a typedef we can avoid emitting? */
24986
24987 bool
24988 is_redundant_typedef (const_tree decl)
24989 {
24990 if (TYPE_DECL_IS_STUB (decl))
24991 return true;
24992
24993 if (DECL_ARTIFICIAL (decl)
24994 && DECL_CONTEXT (decl)
24995 && is_tagged_type (DECL_CONTEXT (decl))
24996 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
24997 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
24998 /* Also ignore the artificial member typedef for the class name. */
24999 return true;
25000
25001 return false;
25002 }
25003
25004 /* Return TRUE if TYPE is a typedef that names a type for linkage
25005 purposes. This kind of typedefs is produced by the C++ FE for
25006 constructs like:
25007
25008 typedef struct {...} foo;
25009
25010 In that case, there is no typedef variant type produced for foo.
25011 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25012 struct type. */
25013
25014 static bool
25015 is_naming_typedef_decl (const_tree decl)
25016 {
25017 if (decl == NULL_TREE
25018 || TREE_CODE (decl) != TYPE_DECL
25019 || DECL_NAMELESS (decl)
25020 || !is_tagged_type (TREE_TYPE (decl))
25021 || DECL_IS_BUILTIN (decl)
25022 || is_redundant_typedef (decl)
25023 /* It looks like Ada produces TYPE_DECLs that are very similar
25024 to C++ naming typedefs but that have different
25025 semantics. Let's be specific to c++ for now. */
25026 || !is_cxx (decl))
25027 return FALSE;
25028
25029 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25030 && TYPE_NAME (TREE_TYPE (decl)) == decl
25031 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25032 != TYPE_NAME (TREE_TYPE (decl))));
25033 }
25034
25035 /* Looks up the DIE for a context. */
25036
25037 static inline dw_die_ref
25038 lookup_context_die (tree context)
25039 {
25040 if (context)
25041 {
25042 /* Find die that represents this context. */
25043 if (TYPE_P (context))
25044 {
25045 context = TYPE_MAIN_VARIANT (context);
25046 dw_die_ref ctx = lookup_type_die (context);
25047 if (!ctx)
25048 return NULL;
25049 return strip_naming_typedef (context, ctx);
25050 }
25051 else
25052 return lookup_decl_die (context);
25053 }
25054 return comp_unit_die ();
25055 }
25056
25057 /* Returns the DIE for a context. */
25058
25059 static inline dw_die_ref
25060 get_context_die (tree context)
25061 {
25062 if (context)
25063 {
25064 /* Find die that represents this context. */
25065 if (TYPE_P (context))
25066 {
25067 context = TYPE_MAIN_VARIANT (context);
25068 return strip_naming_typedef (context, force_type_die (context));
25069 }
25070 else
25071 return force_decl_die (context);
25072 }
25073 return comp_unit_die ();
25074 }
25075
25076 /* Returns the DIE for decl. A DIE will always be returned. */
25077
25078 static dw_die_ref
25079 force_decl_die (tree decl)
25080 {
25081 dw_die_ref decl_die;
25082 unsigned saved_external_flag;
25083 tree save_fn = NULL_TREE;
25084 decl_die = lookup_decl_die (decl);
25085 if (!decl_die)
25086 {
25087 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25088
25089 decl_die = lookup_decl_die (decl);
25090 if (decl_die)
25091 return decl_die;
25092
25093 switch (TREE_CODE (decl))
25094 {
25095 case FUNCTION_DECL:
25096 /* Clear current_function_decl, so that gen_subprogram_die thinks
25097 that this is a declaration. At this point, we just want to force
25098 declaration die. */
25099 save_fn = current_function_decl;
25100 current_function_decl = NULL_TREE;
25101 gen_subprogram_die (decl, context_die);
25102 current_function_decl = save_fn;
25103 break;
25104
25105 case VAR_DECL:
25106 /* Set external flag to force declaration die. Restore it after
25107 gen_decl_die() call. */
25108 saved_external_flag = DECL_EXTERNAL (decl);
25109 DECL_EXTERNAL (decl) = 1;
25110 gen_decl_die (decl, NULL, NULL, context_die);
25111 DECL_EXTERNAL (decl) = saved_external_flag;
25112 break;
25113
25114 case NAMESPACE_DECL:
25115 if (dwarf_version >= 3 || !dwarf_strict)
25116 dwarf2out_decl (decl);
25117 else
25118 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25119 decl_die = comp_unit_die ();
25120 break;
25121
25122 case TRANSLATION_UNIT_DECL:
25123 decl_die = comp_unit_die ();
25124 break;
25125
25126 default:
25127 gcc_unreachable ();
25128 }
25129
25130 /* We should be able to find the DIE now. */
25131 if (!decl_die)
25132 decl_die = lookup_decl_die (decl);
25133 gcc_assert (decl_die);
25134 }
25135
25136 return decl_die;
25137 }
25138
25139 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25140 always returned. */
25141
25142 static dw_die_ref
25143 force_type_die (tree type)
25144 {
25145 dw_die_ref type_die;
25146
25147 type_die = lookup_type_die (type);
25148 if (!type_die)
25149 {
25150 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25151
25152 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25153 false, context_die);
25154 gcc_assert (type_die);
25155 }
25156 return type_die;
25157 }
25158
25159 /* Force out any required namespaces to be able to output DECL,
25160 and return the new context_die for it, if it's changed. */
25161
25162 static dw_die_ref
25163 setup_namespace_context (tree thing, dw_die_ref context_die)
25164 {
25165 tree context = (DECL_P (thing)
25166 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
25167 if (context && TREE_CODE (context) == NAMESPACE_DECL)
25168 /* Force out the namespace. */
25169 context_die = force_decl_die (context);
25170
25171 return context_die;
25172 }
25173
25174 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
25175 type) within its namespace, if appropriate.
25176
25177 For compatibility with older debuggers, namespace DIEs only contain
25178 declarations; all definitions are emitted at CU scope, with
25179 DW_AT_specification pointing to the declaration (like with class
25180 members). */
25181
25182 static dw_die_ref
25183 declare_in_namespace (tree thing, dw_die_ref context_die)
25184 {
25185 dw_die_ref ns_context;
25186
25187 if (debug_info_level <= DINFO_LEVEL_TERSE)
25188 return context_die;
25189
25190 /* External declarations in the local scope only need to be emitted
25191 once, not once in the namespace and once in the scope.
25192
25193 This avoids declaring the `extern' below in the
25194 namespace DIE as well as in the innermost scope:
25195
25196 namespace S
25197 {
25198 int i=5;
25199 int foo()
25200 {
25201 int i=8;
25202 extern int i;
25203 return i;
25204 }
25205 }
25206 */
25207 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
25208 return context_die;
25209
25210 /* If this decl is from an inlined function, then don't try to emit it in its
25211 namespace, as we will get confused. It would have already been emitted
25212 when the abstract instance of the inline function was emitted anyways. */
25213 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
25214 return context_die;
25215
25216 ns_context = setup_namespace_context (thing, context_die);
25217
25218 if (ns_context != context_die)
25219 {
25220 if (is_fortran ())
25221 return ns_context;
25222 if (DECL_P (thing))
25223 gen_decl_die (thing, NULL, NULL, ns_context);
25224 else
25225 gen_type_die (thing, ns_context);
25226 }
25227 return context_die;
25228 }
25229
25230 /* Generate a DIE for a namespace or namespace alias. */
25231
25232 static void
25233 gen_namespace_die (tree decl, dw_die_ref context_die)
25234 {
25235 dw_die_ref namespace_die;
25236
25237 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
25238 they are an alias of. */
25239 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
25240 {
25241 /* Output a real namespace or module. */
25242 context_die = setup_namespace_context (decl, comp_unit_die ());
25243 namespace_die = new_die (is_fortran ()
25244 ? DW_TAG_module : DW_TAG_namespace,
25245 context_die, decl);
25246 /* For Fortran modules defined in different CU don't add src coords. */
25247 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
25248 {
25249 const char *name = dwarf2_name (decl, 0);
25250 if (name)
25251 add_name_attribute (namespace_die, name);
25252 }
25253 else
25254 add_name_and_src_coords_attributes (namespace_die, decl);
25255 if (DECL_EXTERNAL (decl))
25256 add_AT_flag (namespace_die, DW_AT_declaration, 1);
25257 equate_decl_number_to_die (decl, namespace_die);
25258 }
25259 else
25260 {
25261 /* Output a namespace alias. */
25262
25263 /* Force out the namespace we are an alias of, if necessary. */
25264 dw_die_ref origin_die
25265 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
25266
25267 if (DECL_FILE_SCOPE_P (decl)
25268 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
25269 context_die = setup_namespace_context (decl, comp_unit_die ());
25270 /* Now create the namespace alias DIE. */
25271 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
25272 add_name_and_src_coords_attributes (namespace_die, decl);
25273 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
25274 equate_decl_number_to_die (decl, namespace_die);
25275 }
25276 if ((dwarf_version >= 5 || !dwarf_strict)
25277 && lang_hooks.decls.decl_dwarf_attribute (decl,
25278 DW_AT_export_symbols) == 1)
25279 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
25280
25281 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
25282 if (want_pubnames ())
25283 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
25284 }
25285
25286 /* Generate Dwarf debug information for a decl described by DECL.
25287 The return value is currently only meaningful for PARM_DECLs,
25288 for all other decls it returns NULL.
25289
25290 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
25291 It can be NULL otherwise. */
25292
25293 static dw_die_ref
25294 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
25295 dw_die_ref context_die)
25296 {
25297 tree decl_or_origin = decl ? decl : origin;
25298 tree class_origin = NULL, ultimate_origin;
25299
25300 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
25301 return NULL;
25302
25303 /* Ignore pointer bounds decls. */
25304 if (DECL_P (decl_or_origin)
25305 && TREE_TYPE (decl_or_origin)
25306 && POINTER_BOUNDS_P (decl_or_origin))
25307 return NULL;
25308
25309 switch (TREE_CODE (decl_or_origin))
25310 {
25311 case ERROR_MARK:
25312 break;
25313
25314 case CONST_DECL:
25315 if (!is_fortran () && !is_ada ())
25316 {
25317 /* The individual enumerators of an enum type get output when we output
25318 the Dwarf representation of the relevant enum type itself. */
25319 break;
25320 }
25321
25322 /* Emit its type. */
25323 gen_type_die (TREE_TYPE (decl), context_die);
25324
25325 /* And its containing namespace. */
25326 context_die = declare_in_namespace (decl, context_die);
25327
25328 gen_const_die (decl, context_die);
25329 break;
25330
25331 case FUNCTION_DECL:
25332 #if 0
25333 /* FIXME */
25334 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
25335 on local redeclarations of global functions. That seems broken. */
25336 if (current_function_decl != decl)
25337 /* This is only a declaration. */;
25338 #endif
25339
25340 /* We should have abstract copies already and should not generate
25341 stray type DIEs in late LTO dumping. */
25342 if (! early_dwarf)
25343 ;
25344
25345 /* If we're emitting a clone, emit info for the abstract instance. */
25346 else if (origin || DECL_ORIGIN (decl) != decl)
25347 dwarf2out_abstract_function (origin
25348 ? DECL_ORIGIN (origin)
25349 : DECL_ABSTRACT_ORIGIN (decl));
25350
25351 /* If we're emitting a possibly inlined function emit it as
25352 abstract instance. */
25353 else if (cgraph_function_possibly_inlined_p (decl)
25354 && ! DECL_ABSTRACT_P (decl)
25355 && ! class_or_namespace_scope_p (context_die)
25356 /* dwarf2out_abstract_function won't emit a die if this is just
25357 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
25358 that case, because that works only if we have a die. */
25359 && DECL_INITIAL (decl) != NULL_TREE)
25360 dwarf2out_abstract_function (decl);
25361
25362 /* Otherwise we're emitting the primary DIE for this decl. */
25363 else if (debug_info_level > DINFO_LEVEL_TERSE)
25364 {
25365 /* Before we describe the FUNCTION_DECL itself, make sure that we
25366 have its containing type. */
25367 if (!origin)
25368 origin = decl_class_context (decl);
25369 if (origin != NULL_TREE)
25370 gen_type_die (origin, context_die);
25371
25372 /* And its return type. */
25373 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
25374
25375 /* And its virtual context. */
25376 if (DECL_VINDEX (decl) != NULL_TREE)
25377 gen_type_die (DECL_CONTEXT (decl), context_die);
25378
25379 /* Make sure we have a member DIE for decl. */
25380 if (origin != NULL_TREE)
25381 gen_type_die_for_member (origin, decl, context_die);
25382
25383 /* And its containing namespace. */
25384 context_die = declare_in_namespace (decl, context_die);
25385 }
25386
25387 /* Now output a DIE to represent the function itself. */
25388 if (decl)
25389 gen_subprogram_die (decl, context_die);
25390 break;
25391
25392 case TYPE_DECL:
25393 /* If we are in terse mode, don't generate any DIEs to represent any
25394 actual typedefs. */
25395 if (debug_info_level <= DINFO_LEVEL_TERSE)
25396 break;
25397
25398 /* In the special case of a TYPE_DECL node representing the declaration
25399 of some type tag, if the given TYPE_DECL is marked as having been
25400 instantiated from some other (original) TYPE_DECL node (e.g. one which
25401 was generated within the original definition of an inline function) we
25402 used to generate a special (abbreviated) DW_TAG_structure_type,
25403 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
25404 should be actually referencing those DIEs, as variable DIEs with that
25405 type would be emitted already in the abstract origin, so it was always
25406 removed during unused type prunning. Don't add anything in this
25407 case. */
25408 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
25409 break;
25410
25411 if (is_redundant_typedef (decl))
25412 gen_type_die (TREE_TYPE (decl), context_die);
25413 else
25414 /* Output a DIE to represent the typedef itself. */
25415 gen_typedef_die (decl, context_die);
25416 break;
25417
25418 case LABEL_DECL:
25419 if (debug_info_level >= DINFO_LEVEL_NORMAL)
25420 gen_label_die (decl, context_die);
25421 break;
25422
25423 case VAR_DECL:
25424 case RESULT_DECL:
25425 /* If we are in terse mode, don't generate any DIEs to represent any
25426 variable declarations or definitions. */
25427 if (debug_info_level <= DINFO_LEVEL_TERSE)
25428 break;
25429
25430 /* Avoid generating stray type DIEs during late dwarf dumping.
25431 All types have been dumped early. */
25432 if (early_dwarf
25433 /* ??? But in LTRANS we cannot annotate early created variably
25434 modified type DIEs without copying them and adjusting all
25435 references to them. Dump them again as happens for inlining
25436 which copies both the decl and the types. */
25437 /* ??? And even non-LTO needs to re-visit type DIEs to fill
25438 in VLA bound information for example. */
25439 || (decl && variably_modified_type_p (TREE_TYPE (decl),
25440 current_function_decl)))
25441 {
25442 /* Output any DIEs that are needed to specify the type of this data
25443 object. */
25444 if (decl_by_reference_p (decl_or_origin))
25445 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
25446 else
25447 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
25448 }
25449
25450 if (early_dwarf)
25451 {
25452 /* And its containing type. */
25453 class_origin = decl_class_context (decl_or_origin);
25454 if (class_origin != NULL_TREE)
25455 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
25456
25457 /* And its containing namespace. */
25458 context_die = declare_in_namespace (decl_or_origin, context_die);
25459 }
25460
25461 /* Now output the DIE to represent the data object itself. This gets
25462 complicated because of the possibility that the VAR_DECL really
25463 represents an inlined instance of a formal parameter for an inline
25464 function. */
25465 ultimate_origin = decl_ultimate_origin (decl_or_origin);
25466 if (ultimate_origin != NULL_TREE
25467 && TREE_CODE (ultimate_origin) == PARM_DECL)
25468 gen_formal_parameter_die (decl, origin,
25469 true /* Emit name attribute. */,
25470 context_die);
25471 else
25472 gen_variable_die (decl, origin, context_die);
25473 break;
25474
25475 case FIELD_DECL:
25476 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
25477 /* Ignore the nameless fields that are used to skip bits but handle C++
25478 anonymous unions and structs. */
25479 if (DECL_NAME (decl) != NULL_TREE
25480 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
25481 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
25482 {
25483 gen_type_die (member_declared_type (decl), context_die);
25484 gen_field_die (decl, ctx, context_die);
25485 }
25486 break;
25487
25488 case PARM_DECL:
25489 /* Avoid generating stray type DIEs during late dwarf dumping.
25490 All types have been dumped early. */
25491 if (early_dwarf
25492 /* ??? But in LTRANS we cannot annotate early created variably
25493 modified type DIEs without copying them and adjusting all
25494 references to them. Dump them again as happens for inlining
25495 which copies both the decl and the types. */
25496 /* ??? And even non-LTO needs to re-visit type DIEs to fill
25497 in VLA bound information for example. */
25498 || (decl && variably_modified_type_p (TREE_TYPE (decl),
25499 current_function_decl)))
25500 {
25501 if (DECL_BY_REFERENCE (decl_or_origin))
25502 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
25503 else
25504 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
25505 }
25506 return gen_formal_parameter_die (decl, origin,
25507 true /* Emit name attribute. */,
25508 context_die);
25509
25510 case NAMESPACE_DECL:
25511 if (dwarf_version >= 3 || !dwarf_strict)
25512 gen_namespace_die (decl, context_die);
25513 break;
25514
25515 case IMPORTED_DECL:
25516 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
25517 DECL_CONTEXT (decl), context_die);
25518 break;
25519
25520 case NAMELIST_DECL:
25521 gen_namelist_decl (DECL_NAME (decl), context_die,
25522 NAMELIST_DECL_ASSOCIATED_DECL (decl));
25523 break;
25524
25525 default:
25526 /* Probably some frontend-internal decl. Assume we don't care. */
25527 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
25528 break;
25529 }
25530
25531 return NULL;
25532 }
25533 \f
25534 /* Output initial debug information for global DECL. Called at the
25535 end of the parsing process.
25536
25537 This is the initial debug generation process. As such, the DIEs
25538 generated may be incomplete. A later debug generation pass
25539 (dwarf2out_late_global_decl) will augment the information generated
25540 in this pass (e.g., with complete location info). */
25541
25542 static void
25543 dwarf2out_early_global_decl (tree decl)
25544 {
25545 set_early_dwarf s;
25546
25547 /* gen_decl_die() will set DECL_ABSTRACT because
25548 cgraph_function_possibly_inlined_p() returns true. This is in
25549 turn will cause DW_AT_inline attributes to be set.
25550
25551 This happens because at early dwarf generation, there is no
25552 cgraph information, causing cgraph_function_possibly_inlined_p()
25553 to return true. Trick cgraph_function_possibly_inlined_p()
25554 while we generate dwarf early. */
25555 bool save = symtab->global_info_ready;
25556 symtab->global_info_ready = true;
25557
25558 /* We don't handle TYPE_DECLs. If required, they'll be reached via
25559 other DECLs and they can point to template types or other things
25560 that dwarf2out can't handle when done via dwarf2out_decl. */
25561 if (TREE_CODE (decl) != TYPE_DECL
25562 && TREE_CODE (decl) != PARM_DECL)
25563 {
25564 if (TREE_CODE (decl) == FUNCTION_DECL)
25565 {
25566 tree save_fndecl = current_function_decl;
25567
25568 /* For nested functions, make sure we have DIEs for the parents first
25569 so that all nested DIEs are generated at the proper scope in the
25570 first shot. */
25571 tree context = decl_function_context (decl);
25572 if (context != NULL)
25573 {
25574 dw_die_ref context_die = lookup_decl_die (context);
25575 current_function_decl = context;
25576
25577 /* Avoid emitting DIEs multiple times, but still process CONTEXT
25578 enough so that it lands in its own context. This avoids type
25579 pruning issues later on. */
25580 if (context_die == NULL || is_declaration_die (context_die))
25581 dwarf2out_decl (context);
25582 }
25583
25584 /* Emit an abstract origin of a function first. This happens
25585 with C++ constructor clones for example and makes
25586 dwarf2out_abstract_function happy which requires the early
25587 DIE of the abstract instance to be present. */
25588 tree origin = DECL_ABSTRACT_ORIGIN (decl);
25589 dw_die_ref origin_die;
25590 if (origin != NULL
25591 /* Do not emit the DIE multiple times but make sure to
25592 process it fully here in case we just saw a declaration. */
25593 && ((origin_die = lookup_decl_die (origin)) == NULL
25594 || is_declaration_die (origin_die)))
25595 {
25596 current_function_decl = origin;
25597 dwarf2out_decl (origin);
25598 }
25599
25600 /* Emit the DIE for decl but avoid doing that multiple times. */
25601 dw_die_ref old_die;
25602 if ((old_die = lookup_decl_die (decl)) == NULL
25603 || is_declaration_die (old_die))
25604 {
25605 current_function_decl = decl;
25606 dwarf2out_decl (decl);
25607 }
25608
25609 current_function_decl = save_fndecl;
25610 }
25611 else
25612 dwarf2out_decl (decl);
25613 }
25614 symtab->global_info_ready = save;
25615 }
25616
25617 /* Output debug information for global decl DECL. Called from
25618 toplev.c after compilation proper has finished. */
25619
25620 static void
25621 dwarf2out_late_global_decl (tree decl)
25622 {
25623 /* Fill-in any location information we were unable to determine
25624 on the first pass. */
25625 if (VAR_P (decl) && !POINTER_BOUNDS_P (decl))
25626 {
25627 dw_die_ref die = lookup_decl_die (decl);
25628
25629 /* We may have to generate early debug late for LTO in case debug
25630 was not enabled at compile-time or the target doesn't support
25631 the LTO early debug scheme. */
25632 if (! die && in_lto_p)
25633 {
25634 dwarf2out_decl (decl);
25635 die = lookup_decl_die (decl);
25636 }
25637
25638 if (die)
25639 {
25640 /* We get called via the symtab code invoking late_global_decl
25641 for symbols that are optimized out. Do not add locations
25642 for those, except if they have a DECL_VALUE_EXPR, in which case
25643 they are relevant for debuggers. */
25644 varpool_node *node = varpool_node::get (decl);
25645 if ((! node || ! node->definition) && ! DECL_HAS_VALUE_EXPR_P (decl))
25646 tree_add_const_value_attribute_for_decl (die, decl);
25647 else
25648 add_location_or_const_value_attribute (die, decl, false);
25649 }
25650 }
25651 }
25652
25653 /* Output debug information for type decl DECL. Called from toplev.c
25654 and from language front ends (to record built-in types). */
25655 static void
25656 dwarf2out_type_decl (tree decl, int local)
25657 {
25658 if (!local)
25659 {
25660 set_early_dwarf s;
25661 dwarf2out_decl (decl);
25662 }
25663 }
25664
25665 /* Output debug information for imported module or decl DECL.
25666 NAME is non-NULL name in the lexical block if the decl has been renamed.
25667 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
25668 that DECL belongs to.
25669 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
25670 static void
25671 dwarf2out_imported_module_or_decl_1 (tree decl,
25672 tree name,
25673 tree lexical_block,
25674 dw_die_ref lexical_block_die)
25675 {
25676 expanded_location xloc;
25677 dw_die_ref imported_die = NULL;
25678 dw_die_ref at_import_die;
25679
25680 if (TREE_CODE (decl) == IMPORTED_DECL)
25681 {
25682 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
25683 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
25684 gcc_assert (decl);
25685 }
25686 else
25687 xloc = expand_location (input_location);
25688
25689 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
25690 {
25691 at_import_die = force_type_die (TREE_TYPE (decl));
25692 /* For namespace N { typedef void T; } using N::T; base_type_die
25693 returns NULL, but DW_TAG_imported_declaration requires
25694 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
25695 if (!at_import_die)
25696 {
25697 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
25698 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
25699 at_import_die = lookup_type_die (TREE_TYPE (decl));
25700 gcc_assert (at_import_die);
25701 }
25702 }
25703 else
25704 {
25705 at_import_die = lookup_decl_die (decl);
25706 if (!at_import_die)
25707 {
25708 /* If we're trying to avoid duplicate debug info, we may not have
25709 emitted the member decl for this field. Emit it now. */
25710 if (TREE_CODE (decl) == FIELD_DECL)
25711 {
25712 tree type = DECL_CONTEXT (decl);
25713
25714 if (TYPE_CONTEXT (type)
25715 && TYPE_P (TYPE_CONTEXT (type))
25716 && !should_emit_struct_debug (TYPE_CONTEXT (type),
25717 DINFO_USAGE_DIR_USE))
25718 return;
25719 gen_type_die_for_member (type, decl,
25720 get_context_die (TYPE_CONTEXT (type)));
25721 }
25722 if (TREE_CODE (decl) == NAMELIST_DECL)
25723 at_import_die = gen_namelist_decl (DECL_NAME (decl),
25724 get_context_die (DECL_CONTEXT (decl)),
25725 NULL_TREE);
25726 else
25727 at_import_die = force_decl_die (decl);
25728 }
25729 }
25730
25731 if (TREE_CODE (decl) == NAMESPACE_DECL)
25732 {
25733 if (dwarf_version >= 3 || !dwarf_strict)
25734 imported_die = new_die (DW_TAG_imported_module,
25735 lexical_block_die,
25736 lexical_block);
25737 else
25738 return;
25739 }
25740 else
25741 imported_die = new_die (DW_TAG_imported_declaration,
25742 lexical_block_die,
25743 lexical_block);
25744
25745 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
25746 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
25747 if (debug_column_info && xloc.column)
25748 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
25749 if (name)
25750 add_AT_string (imported_die, DW_AT_name,
25751 IDENTIFIER_POINTER (name));
25752 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
25753 }
25754
25755 /* Output debug information for imported module or decl DECL.
25756 NAME is non-NULL name in context if the decl has been renamed.
25757 CHILD is true if decl is one of the renamed decls as part of
25758 importing whole module.
25759 IMPLICIT is set if this hook is called for an implicit import
25760 such as inline namespace. */
25761
25762 static void
25763 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
25764 bool child, bool implicit)
25765 {
25766 /* dw_die_ref at_import_die; */
25767 dw_die_ref scope_die;
25768
25769 if (debug_info_level <= DINFO_LEVEL_TERSE)
25770 return;
25771
25772 gcc_assert (decl);
25773
25774 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
25775 should be enough, for DWARF4 and older even if we emit as extension
25776 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
25777 for the benefit of consumers unaware of DW_AT_export_symbols. */
25778 if (implicit
25779 && dwarf_version >= 5
25780 && lang_hooks.decls.decl_dwarf_attribute (decl,
25781 DW_AT_export_symbols) == 1)
25782 return;
25783
25784 set_early_dwarf s;
25785
25786 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
25787 We need decl DIE for reference and scope die. First, get DIE for the decl
25788 itself. */
25789
25790 /* Get the scope die for decl context. Use comp_unit_die for global module
25791 or decl. If die is not found for non globals, force new die. */
25792 if (context
25793 && TYPE_P (context)
25794 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
25795 return;
25796
25797 scope_die = get_context_die (context);
25798
25799 if (child)
25800 {
25801 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
25802 there is nothing we can do, here. */
25803 if (dwarf_version < 3 && dwarf_strict)
25804 return;
25805
25806 gcc_assert (scope_die->die_child);
25807 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
25808 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
25809 scope_die = scope_die->die_child;
25810 }
25811
25812 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
25813 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
25814 }
25815
25816 /* Output debug information for namelists. */
25817
25818 static dw_die_ref
25819 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
25820 {
25821 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
25822 tree value;
25823 unsigned i;
25824
25825 if (debug_info_level <= DINFO_LEVEL_TERSE)
25826 return NULL;
25827
25828 gcc_assert (scope_die != NULL);
25829 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
25830 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
25831
25832 /* If there are no item_decls, we have a nondefining namelist, e.g.
25833 with USE association; hence, set DW_AT_declaration. */
25834 if (item_decls == NULL_TREE)
25835 {
25836 add_AT_flag (nml_die, DW_AT_declaration, 1);
25837 return nml_die;
25838 }
25839
25840 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
25841 {
25842 nml_item_ref_die = lookup_decl_die (value);
25843 if (!nml_item_ref_die)
25844 nml_item_ref_die = force_decl_die (value);
25845
25846 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
25847 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
25848 }
25849 return nml_die;
25850 }
25851
25852
25853 /* Write the debugging output for DECL and return the DIE. */
25854
25855 static void
25856 dwarf2out_decl (tree decl)
25857 {
25858 dw_die_ref context_die = comp_unit_die ();
25859
25860 switch (TREE_CODE (decl))
25861 {
25862 case ERROR_MARK:
25863 return;
25864
25865 case FUNCTION_DECL:
25866 /* If we're a nested function, initially use a parent of NULL; if we're
25867 a plain function, this will be fixed up in decls_for_scope. If
25868 we're a method, it will be ignored, since we already have a DIE. */
25869 if (decl_function_context (decl)
25870 /* But if we're in terse mode, we don't care about scope. */
25871 && debug_info_level > DINFO_LEVEL_TERSE)
25872 context_die = NULL;
25873 break;
25874
25875 case VAR_DECL:
25876 /* For local statics lookup proper context die. */
25877 if (local_function_static (decl))
25878 context_die = lookup_decl_die (DECL_CONTEXT (decl));
25879
25880 /* If we are in terse mode, don't generate any DIEs to represent any
25881 variable declarations or definitions. */
25882 if (debug_info_level <= DINFO_LEVEL_TERSE)
25883 return;
25884 break;
25885
25886 case CONST_DECL:
25887 if (debug_info_level <= DINFO_LEVEL_TERSE)
25888 return;
25889 if (!is_fortran () && !is_ada ())
25890 return;
25891 if (TREE_STATIC (decl) && decl_function_context (decl))
25892 context_die = lookup_decl_die (DECL_CONTEXT (decl));
25893 break;
25894
25895 case NAMESPACE_DECL:
25896 case IMPORTED_DECL:
25897 if (debug_info_level <= DINFO_LEVEL_TERSE)
25898 return;
25899 if (lookup_decl_die (decl) != NULL)
25900 return;
25901 break;
25902
25903 case TYPE_DECL:
25904 /* Don't emit stubs for types unless they are needed by other DIEs. */
25905 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
25906 return;
25907
25908 /* Don't bother trying to generate any DIEs to represent any of the
25909 normal built-in types for the language we are compiling. */
25910 if (DECL_IS_BUILTIN (decl))
25911 return;
25912
25913 /* If we are in terse mode, don't generate any DIEs for types. */
25914 if (debug_info_level <= DINFO_LEVEL_TERSE)
25915 return;
25916
25917 /* If we're a function-scope tag, initially use a parent of NULL;
25918 this will be fixed up in decls_for_scope. */
25919 if (decl_function_context (decl))
25920 context_die = NULL;
25921
25922 break;
25923
25924 case NAMELIST_DECL:
25925 break;
25926
25927 default:
25928 return;
25929 }
25930
25931 gen_decl_die (decl, NULL, NULL, context_die);
25932
25933 if (flag_checking)
25934 {
25935 dw_die_ref die = lookup_decl_die (decl);
25936 if (die)
25937 check_die (die);
25938 }
25939 }
25940
25941 /* Write the debugging output for DECL. */
25942
25943 static void
25944 dwarf2out_function_decl (tree decl)
25945 {
25946 dwarf2out_decl (decl);
25947 call_arg_locations = NULL;
25948 call_arg_loc_last = NULL;
25949 call_site_count = -1;
25950 tail_call_site_count = -1;
25951 decl_loc_table->empty ();
25952 cached_dw_loc_list_table->empty ();
25953 }
25954
25955 /* Output a marker (i.e. a label) for the beginning of the generated code for
25956 a lexical block. */
25957
25958 static void
25959 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
25960 unsigned int blocknum)
25961 {
25962 switch_to_section (current_function_section ());
25963 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
25964 }
25965
25966 /* Output a marker (i.e. a label) for the end of the generated code for a
25967 lexical block. */
25968
25969 static void
25970 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
25971 {
25972 switch_to_section (current_function_section ());
25973 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
25974 }
25975
25976 /* Returns nonzero if it is appropriate not to emit any debugging
25977 information for BLOCK, because it doesn't contain any instructions.
25978
25979 Don't allow this for blocks with nested functions or local classes
25980 as we would end up with orphans, and in the presence of scheduling
25981 we may end up calling them anyway. */
25982
25983 static bool
25984 dwarf2out_ignore_block (const_tree block)
25985 {
25986 tree decl;
25987 unsigned int i;
25988
25989 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
25990 if (TREE_CODE (decl) == FUNCTION_DECL
25991 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
25992 return 0;
25993 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
25994 {
25995 decl = BLOCK_NONLOCALIZED_VAR (block, i);
25996 if (TREE_CODE (decl) == FUNCTION_DECL
25997 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
25998 return 0;
25999 }
26000
26001 return 1;
26002 }
26003
26004 /* Hash table routines for file_hash. */
26005
26006 bool
26007 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26008 {
26009 return filename_cmp (p1->filename, p2) == 0;
26010 }
26011
26012 hashval_t
26013 dwarf_file_hasher::hash (dwarf_file_data *p)
26014 {
26015 return htab_hash_string (p->filename);
26016 }
26017
26018 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26019 dwarf2out.c) and return its "index". The index of each (known) filename is
26020 just a unique number which is associated with only that one filename. We
26021 need such numbers for the sake of generating labels (in the .debug_sfnames
26022 section) and references to those files numbers (in the .debug_srcinfo
26023 and .debug_macinfo sections). If the filename given as an argument is not
26024 found in our current list, add it to the list and assign it the next
26025 available unique index number. */
26026
26027 static struct dwarf_file_data *
26028 lookup_filename (const char *file_name)
26029 {
26030 struct dwarf_file_data * created;
26031
26032 if (!file_name)
26033 return NULL;
26034
26035 dwarf_file_data **slot
26036 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26037 INSERT);
26038 if (*slot)
26039 return *slot;
26040
26041 created = ggc_alloc<dwarf_file_data> ();
26042 created->filename = file_name;
26043 created->emitted_number = 0;
26044 *slot = created;
26045 return created;
26046 }
26047
26048 /* If the assembler will construct the file table, then translate the compiler
26049 internal file table number into the assembler file table number, and emit
26050 a .file directive if we haven't already emitted one yet. The file table
26051 numbers are different because we prune debug info for unused variables and
26052 types, which may include filenames. */
26053
26054 static int
26055 maybe_emit_file (struct dwarf_file_data * fd)
26056 {
26057 if (! fd->emitted_number)
26058 {
26059 if (last_emitted_file)
26060 fd->emitted_number = last_emitted_file->emitted_number + 1;
26061 else
26062 fd->emitted_number = 1;
26063 last_emitted_file = fd;
26064
26065 if (DWARF2_ASM_LINE_DEBUG_INFO)
26066 {
26067 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26068 output_quoted_string (asm_out_file,
26069 remap_debug_filename (fd->filename));
26070 fputc ('\n', asm_out_file);
26071 }
26072 }
26073
26074 return fd->emitted_number;
26075 }
26076
26077 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26078 That generation should happen after function debug info has been
26079 generated. The value of the attribute is the constant value of ARG. */
26080
26081 static void
26082 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26083 {
26084 die_arg_entry entry;
26085
26086 if (!die || !arg)
26087 return;
26088
26089 gcc_assert (early_dwarf);
26090
26091 if (!tmpl_value_parm_die_table)
26092 vec_alloc (tmpl_value_parm_die_table, 32);
26093
26094 entry.die = die;
26095 entry.arg = arg;
26096 vec_safe_push (tmpl_value_parm_die_table, entry);
26097 }
26098
26099 /* Return TRUE if T is an instance of generic type, FALSE
26100 otherwise. */
26101
26102 static bool
26103 generic_type_p (tree t)
26104 {
26105 if (t == NULL_TREE || !TYPE_P (t))
26106 return false;
26107 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26108 }
26109
26110 /* Schedule the generation of the generic parameter dies for the
26111 instance of generic type T. The proper generation itself is later
26112 done by gen_scheduled_generic_parms_dies. */
26113
26114 static void
26115 schedule_generic_params_dies_gen (tree t)
26116 {
26117 if (!generic_type_p (t))
26118 return;
26119
26120 gcc_assert (early_dwarf);
26121
26122 if (!generic_type_instances)
26123 vec_alloc (generic_type_instances, 256);
26124
26125 vec_safe_push (generic_type_instances, t);
26126 }
26127
26128 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26129 by append_entry_to_tmpl_value_parm_die_table. This function must
26130 be called after function DIEs have been generated. */
26131
26132 static void
26133 gen_remaining_tmpl_value_param_die_attribute (void)
26134 {
26135 if (tmpl_value_parm_die_table)
26136 {
26137 unsigned i, j;
26138 die_arg_entry *e;
26139
26140 /* We do this in two phases - first get the cases we can
26141 handle during early-finish, preserving those we cannot
26142 (containing symbolic constants where we don't yet know
26143 whether we are going to output the referenced symbols).
26144 For those we try again at late-finish. */
26145 j = 0;
26146 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
26147 {
26148 if (!e->die->removed
26149 && !tree_add_const_value_attribute (e->die, e->arg))
26150 {
26151 dw_loc_descr_ref loc = NULL;
26152 if (! early_dwarf
26153 && (dwarf_version >= 5 || !dwarf_strict))
26154 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
26155 if (loc)
26156 add_AT_loc (e->die, DW_AT_location, loc);
26157 else
26158 (*tmpl_value_parm_die_table)[j++] = *e;
26159 }
26160 }
26161 tmpl_value_parm_die_table->truncate (j);
26162 }
26163 }
26164
26165 /* Generate generic parameters DIEs for instances of generic types
26166 that have been previously scheduled by
26167 schedule_generic_params_dies_gen. This function must be called
26168 after all the types of the CU have been laid out. */
26169
26170 static void
26171 gen_scheduled_generic_parms_dies (void)
26172 {
26173 unsigned i;
26174 tree t;
26175
26176 if (!generic_type_instances)
26177 return;
26178
26179 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
26180 if (COMPLETE_TYPE_P (t))
26181 gen_generic_params_dies (t);
26182
26183 generic_type_instances = NULL;
26184 }
26185
26186
26187 /* Replace DW_AT_name for the decl with name. */
26188
26189 static void
26190 dwarf2out_set_name (tree decl, tree name)
26191 {
26192 dw_die_ref die;
26193 dw_attr_node *attr;
26194 const char *dname;
26195
26196 die = TYPE_SYMTAB_DIE (decl);
26197 if (!die)
26198 return;
26199
26200 dname = dwarf2_name (name, 0);
26201 if (!dname)
26202 return;
26203
26204 attr = get_AT (die, DW_AT_name);
26205 if (attr)
26206 {
26207 struct indirect_string_node *node;
26208
26209 node = find_AT_string (dname);
26210 /* replace the string. */
26211 attr->dw_attr_val.v.val_str = node;
26212 }
26213
26214 else
26215 add_name_attribute (die, dname);
26216 }
26217
26218 /* True if before or during processing of the first function being emitted. */
26219 static bool in_first_function_p = true;
26220 /* True if loc_note during dwarf2out_var_location call might still be
26221 before first real instruction at address equal to .Ltext0. */
26222 static bool maybe_at_text_label_p = true;
26223 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
26224 static unsigned int first_loclabel_num_not_at_text_label;
26225
26226 /* Look ahead for a real insn, or for a begin stmt marker. */
26227
26228 static rtx_insn *
26229 dwarf2out_next_real_insn (rtx_insn *loc_note)
26230 {
26231 rtx_insn *next_real = NEXT_INSN (loc_note);
26232
26233 while (next_real)
26234 if (INSN_P (next_real))
26235 break;
26236 else
26237 next_real = NEXT_INSN (next_real);
26238
26239 return next_real;
26240 }
26241
26242 /* Called by the final INSN scan whenever we see a var location. We
26243 use it to drop labels in the right places, and throw the location in
26244 our lookup table. */
26245
26246 static void
26247 dwarf2out_var_location (rtx_insn *loc_note)
26248 {
26249 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
26250 struct var_loc_node *newloc;
26251 rtx_insn *next_real, *next_note;
26252 rtx_insn *call_insn = NULL;
26253 static const char *last_label;
26254 static const char *last_postcall_label;
26255 static bool last_in_cold_section_p;
26256 static rtx_insn *expected_next_loc_note;
26257 tree decl;
26258 bool var_loc_p;
26259
26260 if (!NOTE_P (loc_note))
26261 {
26262 if (CALL_P (loc_note))
26263 {
26264 call_site_count++;
26265 if (SIBLING_CALL_P (loc_note))
26266 tail_call_site_count++;
26267 if (optimize == 0 && !flag_var_tracking)
26268 {
26269 /* When the var-tracking pass is not running, there is no note
26270 for indirect calls whose target is compile-time known. In this
26271 case, process such calls specifically so that we generate call
26272 sites for them anyway. */
26273 rtx x = PATTERN (loc_note);
26274 if (GET_CODE (x) == PARALLEL)
26275 x = XVECEXP (x, 0, 0);
26276 if (GET_CODE (x) == SET)
26277 x = SET_SRC (x);
26278 if (GET_CODE (x) == CALL)
26279 x = XEXP (x, 0);
26280 if (!MEM_P (x)
26281 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
26282 || !SYMBOL_REF_DECL (XEXP (x, 0))
26283 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
26284 != FUNCTION_DECL))
26285 {
26286 call_insn = loc_note;
26287 loc_note = NULL;
26288 var_loc_p = false;
26289
26290 next_real = dwarf2out_next_real_insn (call_insn);
26291 next_note = NULL;
26292 cached_next_real_insn = NULL;
26293 goto create_label;
26294 }
26295 }
26296 }
26297 return;
26298 }
26299
26300 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
26301 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
26302 return;
26303
26304 /* Optimize processing a large consecutive sequence of location
26305 notes so we don't spend too much time in next_real_insn. If the
26306 next insn is another location note, remember the next_real_insn
26307 calculation for next time. */
26308 next_real = cached_next_real_insn;
26309 if (next_real)
26310 {
26311 if (expected_next_loc_note != loc_note)
26312 next_real = NULL;
26313 }
26314
26315 next_note = NEXT_INSN (loc_note);
26316 if (! next_note
26317 || next_note->deleted ()
26318 || ! NOTE_P (next_note)
26319 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
26320 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
26321 && NOTE_KIND (next_note) != NOTE_INSN_CALL_ARG_LOCATION))
26322 next_note = NULL;
26323
26324 if (! next_real)
26325 next_real = dwarf2out_next_real_insn (loc_note);
26326
26327 if (next_note)
26328 {
26329 expected_next_loc_note = next_note;
26330 cached_next_real_insn = next_real;
26331 }
26332 else
26333 cached_next_real_insn = NULL;
26334
26335 /* If there are no instructions which would be affected by this note,
26336 don't do anything. */
26337 if (var_loc_p
26338 && next_real == NULL_RTX
26339 && !NOTE_DURING_CALL_P (loc_note))
26340 return;
26341
26342 create_label:
26343
26344 if (next_real == NULL_RTX)
26345 next_real = get_last_insn ();
26346
26347 /* If there were any real insns between note we processed last time
26348 and this note (or if it is the first note), clear
26349 last_{,postcall_}label so that they are not reused this time. */
26350 if (last_var_location_insn == NULL_RTX
26351 || last_var_location_insn != next_real
26352 || last_in_cold_section_p != in_cold_section_p)
26353 {
26354 last_label = NULL;
26355 last_postcall_label = NULL;
26356 }
26357
26358 if (var_loc_p)
26359 {
26360 decl = NOTE_VAR_LOCATION_DECL (loc_note);
26361 newloc = add_var_loc_to_decl (decl, loc_note,
26362 NOTE_DURING_CALL_P (loc_note)
26363 ? last_postcall_label : last_label);
26364 if (newloc == NULL)
26365 return;
26366 }
26367 else
26368 {
26369 decl = NULL_TREE;
26370 newloc = NULL;
26371 }
26372
26373 /* If there were no real insns between note we processed last time
26374 and this note, use the label we emitted last time. Otherwise
26375 create a new label and emit it. */
26376 if (last_label == NULL)
26377 {
26378 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
26379 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
26380 loclabel_num++;
26381 last_label = ggc_strdup (loclabel);
26382 /* See if loclabel might be equal to .Ltext0. If yes,
26383 bump first_loclabel_num_not_at_text_label. */
26384 if (!have_multiple_function_sections
26385 && in_first_function_p
26386 && maybe_at_text_label_p)
26387 {
26388 static rtx_insn *last_start;
26389 rtx_insn *insn;
26390 for (insn = loc_note; insn; insn = previous_insn (insn))
26391 if (insn == last_start)
26392 break;
26393 else if (!NONDEBUG_INSN_P (insn))
26394 continue;
26395 else
26396 {
26397 rtx body = PATTERN (insn);
26398 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
26399 continue;
26400 /* Inline asm could occupy zero bytes. */
26401 else if (GET_CODE (body) == ASM_INPUT
26402 || asm_noperands (body) >= 0)
26403 continue;
26404 #ifdef HAVE_attr_length
26405 else if (get_attr_min_length (insn) == 0)
26406 continue;
26407 #endif
26408 else
26409 {
26410 /* Assume insn has non-zero length. */
26411 maybe_at_text_label_p = false;
26412 break;
26413 }
26414 }
26415 if (maybe_at_text_label_p)
26416 {
26417 last_start = loc_note;
26418 first_loclabel_num_not_at_text_label = loclabel_num;
26419 }
26420 }
26421 }
26422
26423 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
26424 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
26425
26426 if (!var_loc_p)
26427 {
26428 struct call_arg_loc_node *ca_loc
26429 = ggc_cleared_alloc<call_arg_loc_node> ();
26430 rtx_insn *prev
26431 = loc_note != NULL_RTX ? prev_real_insn (loc_note) : call_insn;
26432
26433 ca_loc->call_arg_loc_note = loc_note;
26434 ca_loc->next = NULL;
26435 ca_loc->label = last_label;
26436 gcc_assert (prev
26437 && (CALL_P (prev)
26438 || (NONJUMP_INSN_P (prev)
26439 && GET_CODE (PATTERN (prev)) == SEQUENCE
26440 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
26441 if (!CALL_P (prev))
26442 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
26443 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
26444
26445 /* Look for a SYMBOL_REF in the "prev" instruction. */
26446 rtx x = get_call_rtx_from (PATTERN (prev));
26447 if (x)
26448 {
26449 /* Try to get the call symbol, if any. */
26450 if (MEM_P (XEXP (x, 0)))
26451 x = XEXP (x, 0);
26452 /* First, look for a memory access to a symbol_ref. */
26453 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
26454 && SYMBOL_REF_DECL (XEXP (x, 0))
26455 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
26456 ca_loc->symbol_ref = XEXP (x, 0);
26457 /* Otherwise, look at a compile-time known user-level function
26458 declaration. */
26459 else if (MEM_P (x)
26460 && MEM_EXPR (x)
26461 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
26462 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
26463 }
26464
26465 ca_loc->block = insn_scope (prev);
26466 if (call_arg_locations)
26467 call_arg_loc_last->next = ca_loc;
26468 else
26469 call_arg_locations = ca_loc;
26470 call_arg_loc_last = ca_loc;
26471 }
26472 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
26473 newloc->label = last_label;
26474 else
26475 {
26476 if (!last_postcall_label)
26477 {
26478 sprintf (loclabel, "%s-1", last_label);
26479 last_postcall_label = ggc_strdup (loclabel);
26480 }
26481 newloc->label = last_postcall_label;
26482 }
26483
26484 if (var_loc_p && flag_debug_asm)
26485 {
26486 const char *name = NULL, *sep = " => ", *patstr = NULL;
26487 if (decl && DECL_NAME (decl))
26488 name = IDENTIFIER_POINTER (DECL_NAME (decl));
26489 if (NOTE_VAR_LOCATION_LOC (loc_note))
26490 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
26491 else
26492 {
26493 sep = " ";
26494 patstr = "RESET";
26495 }
26496 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
26497 name, sep, patstr);
26498 }
26499
26500 last_var_location_insn = next_real;
26501 last_in_cold_section_p = in_cold_section_p;
26502 }
26503
26504 /* Called from finalize_size_functions for size functions so that their body
26505 can be encoded in the debug info to describe the layout of variable-length
26506 structures. */
26507
26508 static void
26509 dwarf2out_size_function (tree decl)
26510 {
26511 function_to_dwarf_procedure (decl);
26512 }
26513
26514 /* Note in one location list that text section has changed. */
26515
26516 int
26517 var_location_switch_text_section_1 (var_loc_list **slot, void *)
26518 {
26519 var_loc_list *list = *slot;
26520 if (list->first)
26521 list->last_before_switch
26522 = list->last->next ? list->last->next : list->last;
26523 return 1;
26524 }
26525
26526 /* Note in all location lists that text section has changed. */
26527
26528 static void
26529 var_location_switch_text_section (void)
26530 {
26531 if (decl_loc_table == NULL)
26532 return;
26533
26534 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
26535 }
26536
26537 /* Create a new line number table. */
26538
26539 static dw_line_info_table *
26540 new_line_info_table (void)
26541 {
26542 dw_line_info_table *table;
26543
26544 table = ggc_cleared_alloc<dw_line_info_table> ();
26545 table->file_num = 1;
26546 table->line_num = 1;
26547 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
26548
26549 return table;
26550 }
26551
26552 /* Lookup the "current" table into which we emit line info, so
26553 that we don't have to do it for every source line. */
26554
26555 static void
26556 set_cur_line_info_table (section *sec)
26557 {
26558 dw_line_info_table *table;
26559
26560 if (sec == text_section)
26561 table = text_section_line_info;
26562 else if (sec == cold_text_section)
26563 {
26564 table = cold_text_section_line_info;
26565 if (!table)
26566 {
26567 cold_text_section_line_info = table = new_line_info_table ();
26568 table->end_label = cold_end_label;
26569 }
26570 }
26571 else
26572 {
26573 const char *end_label;
26574
26575 if (crtl->has_bb_partition)
26576 {
26577 if (in_cold_section_p)
26578 end_label = crtl->subsections.cold_section_end_label;
26579 else
26580 end_label = crtl->subsections.hot_section_end_label;
26581 }
26582 else
26583 {
26584 char label[MAX_ARTIFICIAL_LABEL_BYTES];
26585 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
26586 current_function_funcdef_no);
26587 end_label = ggc_strdup (label);
26588 }
26589
26590 table = new_line_info_table ();
26591 table->end_label = end_label;
26592
26593 vec_safe_push (separate_line_info, table);
26594 }
26595
26596 if (DWARF2_ASM_LINE_DEBUG_INFO)
26597 table->is_stmt = (cur_line_info_table
26598 ? cur_line_info_table->is_stmt
26599 : DWARF_LINE_DEFAULT_IS_STMT_START);
26600 cur_line_info_table = table;
26601 }
26602
26603
26604 /* We need to reset the locations at the beginning of each
26605 function. We can't do this in the end_function hook, because the
26606 declarations that use the locations won't have been output when
26607 that hook is called. Also compute have_multiple_function_sections here. */
26608
26609 static void
26610 dwarf2out_begin_function (tree fun)
26611 {
26612 section *sec = function_section (fun);
26613
26614 if (sec != text_section)
26615 have_multiple_function_sections = true;
26616
26617 if (crtl->has_bb_partition && !cold_text_section)
26618 {
26619 gcc_assert (current_function_decl == fun);
26620 cold_text_section = unlikely_text_section ();
26621 switch_to_section (cold_text_section);
26622 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
26623 switch_to_section (sec);
26624 }
26625
26626 dwarf2out_note_section_used ();
26627 call_site_count = 0;
26628 tail_call_site_count = 0;
26629
26630 set_cur_line_info_table (sec);
26631 }
26632
26633 /* Helper function of dwarf2out_end_function, called only after emitting
26634 the very first function into assembly. Check if some .debug_loc range
26635 might end with a .LVL* label that could be equal to .Ltext0.
26636 In that case we must force using absolute addresses in .debug_loc ranges,
26637 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
26638 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
26639 list terminator.
26640 Set have_multiple_function_sections to true in that case and
26641 terminate htab traversal. */
26642
26643 int
26644 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
26645 {
26646 var_loc_list *entry = *slot;
26647 struct var_loc_node *node;
26648
26649 node = entry->first;
26650 if (node && node->next && node->next->label)
26651 {
26652 unsigned int i;
26653 const char *label = node->next->label;
26654 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
26655
26656 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
26657 {
26658 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
26659 if (strcmp (label, loclabel) == 0)
26660 {
26661 have_multiple_function_sections = true;
26662 return 0;
26663 }
26664 }
26665 }
26666 return 1;
26667 }
26668
26669 /* Hook called after emitting a function into assembly.
26670 This does something only for the very first function emitted. */
26671
26672 static void
26673 dwarf2out_end_function (unsigned int)
26674 {
26675 if (in_first_function_p
26676 && !have_multiple_function_sections
26677 && first_loclabel_num_not_at_text_label
26678 && decl_loc_table)
26679 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
26680 in_first_function_p = false;
26681 maybe_at_text_label_p = false;
26682 }
26683
26684 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
26685 front-ends register a translation unit even before dwarf2out_init is
26686 called. */
26687 static tree main_translation_unit = NULL_TREE;
26688
26689 /* Hook called by front-ends after they built their main translation unit.
26690 Associate comp_unit_die to UNIT. */
26691
26692 static void
26693 dwarf2out_register_main_translation_unit (tree unit)
26694 {
26695 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
26696 && main_translation_unit == NULL_TREE);
26697 main_translation_unit = unit;
26698 /* If dwarf2out_init has not been called yet, it will perform the association
26699 itself looking at main_translation_unit. */
26700 if (decl_die_table != NULL)
26701 equate_decl_number_to_die (unit, comp_unit_die ());
26702 }
26703
26704 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
26705
26706 static void
26707 push_dw_line_info_entry (dw_line_info_table *table,
26708 enum dw_line_info_opcode opcode, unsigned int val)
26709 {
26710 dw_line_info_entry e;
26711 e.opcode = opcode;
26712 e.val = val;
26713 vec_safe_push (table->entries, e);
26714 }
26715
26716 /* Output a label to mark the beginning of a source code line entry
26717 and record information relating to this source line, in
26718 'line_info_table' for later output of the .debug_line section. */
26719 /* ??? The discriminator parameter ought to be unsigned. */
26720
26721 static void
26722 dwarf2out_source_line (unsigned int line, unsigned int column,
26723 const char *filename,
26724 int discriminator, bool is_stmt)
26725 {
26726 unsigned int file_num;
26727 dw_line_info_table *table;
26728
26729 if (debug_info_level < DINFO_LEVEL_TERSE || line == 0)
26730 return;
26731
26732 /* The discriminator column was added in dwarf4. Simplify the below
26733 by simply removing it if we're not supposed to output it. */
26734 if (dwarf_version < 4 && dwarf_strict)
26735 discriminator = 0;
26736
26737 if (!debug_column_info)
26738 column = 0;
26739
26740 table = cur_line_info_table;
26741 file_num = maybe_emit_file (lookup_filename (filename));
26742
26743 /* ??? TODO: Elide duplicate line number entries. Traditionally,
26744 the debugger has used the second (possibly duplicate) line number
26745 at the beginning of the function to mark the end of the prologue.
26746 We could eliminate any other duplicates within the function. For
26747 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
26748 that second line number entry. */
26749 /* Recall that this end-of-prologue indication is *not* the same thing
26750 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
26751 to which the hook corresponds, follows the last insn that was
26752 emitted by gen_prologue. What we need is to precede the first insn
26753 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
26754 insn that corresponds to something the user wrote. These may be
26755 very different locations once scheduling is enabled. */
26756
26757 if (0 && file_num == table->file_num
26758 && line == table->line_num
26759 && column == table->column_num
26760 && discriminator == table->discrim_num
26761 && is_stmt == table->is_stmt)
26762 return;
26763
26764 switch_to_section (current_function_section ());
26765
26766 /* If requested, emit something human-readable. */
26767 if (flag_debug_asm)
26768 {
26769 if (debug_column_info)
26770 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
26771 filename, line, column);
26772 else
26773 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
26774 filename, line);
26775 }
26776
26777 if (DWARF2_ASM_LINE_DEBUG_INFO)
26778 {
26779 /* Emit the .loc directive understood by GNU as. */
26780 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
26781 file_num, line, is_stmt, discriminator */
26782 fputs ("\t.loc ", asm_out_file);
26783 fprint_ul (asm_out_file, file_num);
26784 putc (' ', asm_out_file);
26785 fprint_ul (asm_out_file, line);
26786 putc (' ', asm_out_file);
26787 fprint_ul (asm_out_file, column);
26788
26789 if (is_stmt != table->is_stmt)
26790 {
26791 fputs (" is_stmt ", asm_out_file);
26792 putc (is_stmt ? '1' : '0', asm_out_file);
26793 }
26794 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
26795 {
26796 gcc_assert (discriminator > 0);
26797 fputs (" discriminator ", asm_out_file);
26798 fprint_ul (asm_out_file, (unsigned long) discriminator);
26799 }
26800 putc ('\n', asm_out_file);
26801 }
26802 else
26803 {
26804 unsigned int label_num = ++line_info_label_num;
26805
26806 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
26807
26808 push_dw_line_info_entry (table, LI_set_address, label_num);
26809 if (file_num != table->file_num)
26810 push_dw_line_info_entry (table, LI_set_file, file_num);
26811 if (discriminator != table->discrim_num)
26812 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
26813 if (is_stmt != table->is_stmt)
26814 push_dw_line_info_entry (table, LI_negate_stmt, 0);
26815 push_dw_line_info_entry (table, LI_set_line, line);
26816 if (debug_column_info)
26817 push_dw_line_info_entry (table, LI_set_column, column);
26818 }
26819
26820 table->file_num = file_num;
26821 table->line_num = line;
26822 table->column_num = column;
26823 table->discrim_num = discriminator;
26824 table->is_stmt = is_stmt;
26825 table->in_use = true;
26826 }
26827
26828 /* Record the beginning of a new source file. */
26829
26830 static void
26831 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
26832 {
26833 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26834 {
26835 macinfo_entry e;
26836 e.code = DW_MACINFO_start_file;
26837 e.lineno = lineno;
26838 e.info = ggc_strdup (filename);
26839 vec_safe_push (macinfo_table, e);
26840 }
26841 }
26842
26843 /* Record the end of a source file. */
26844
26845 static void
26846 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
26847 {
26848 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26849 {
26850 macinfo_entry e;
26851 e.code = DW_MACINFO_end_file;
26852 e.lineno = lineno;
26853 e.info = NULL;
26854 vec_safe_push (macinfo_table, e);
26855 }
26856 }
26857
26858 /* Called from debug_define in toplev.c. The `buffer' parameter contains
26859 the tail part of the directive line, i.e. the part which is past the
26860 initial whitespace, #, whitespace, directive-name, whitespace part. */
26861
26862 static void
26863 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
26864 const char *buffer ATTRIBUTE_UNUSED)
26865 {
26866 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26867 {
26868 macinfo_entry e;
26869 /* Insert a dummy first entry to be able to optimize the whole
26870 predefined macro block using DW_MACRO_import. */
26871 if (macinfo_table->is_empty () && lineno <= 1)
26872 {
26873 e.code = 0;
26874 e.lineno = 0;
26875 e.info = NULL;
26876 vec_safe_push (macinfo_table, e);
26877 }
26878 e.code = DW_MACINFO_define;
26879 e.lineno = lineno;
26880 e.info = ggc_strdup (buffer);
26881 vec_safe_push (macinfo_table, e);
26882 }
26883 }
26884
26885 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
26886 the tail part of the directive line, i.e. the part which is past the
26887 initial whitespace, #, whitespace, directive-name, whitespace part. */
26888
26889 static void
26890 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
26891 const char *buffer ATTRIBUTE_UNUSED)
26892 {
26893 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26894 {
26895 macinfo_entry e;
26896 /* Insert a dummy first entry to be able to optimize the whole
26897 predefined macro block using DW_MACRO_import. */
26898 if (macinfo_table->is_empty () && lineno <= 1)
26899 {
26900 e.code = 0;
26901 e.lineno = 0;
26902 e.info = NULL;
26903 vec_safe_push (macinfo_table, e);
26904 }
26905 e.code = DW_MACINFO_undef;
26906 e.lineno = lineno;
26907 e.info = ggc_strdup (buffer);
26908 vec_safe_push (macinfo_table, e);
26909 }
26910 }
26911
26912 /* Helpers to manipulate hash table of CUs. */
26913
26914 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
26915 {
26916 static inline hashval_t hash (const macinfo_entry *);
26917 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
26918 };
26919
26920 inline hashval_t
26921 macinfo_entry_hasher::hash (const macinfo_entry *entry)
26922 {
26923 return htab_hash_string (entry->info);
26924 }
26925
26926 inline bool
26927 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
26928 const macinfo_entry *entry2)
26929 {
26930 return !strcmp (entry1->info, entry2->info);
26931 }
26932
26933 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
26934
26935 /* Output a single .debug_macinfo entry. */
26936
26937 static void
26938 output_macinfo_op (macinfo_entry *ref)
26939 {
26940 int file_num;
26941 size_t len;
26942 struct indirect_string_node *node;
26943 char label[MAX_ARTIFICIAL_LABEL_BYTES];
26944 struct dwarf_file_data *fd;
26945
26946 switch (ref->code)
26947 {
26948 case DW_MACINFO_start_file:
26949 fd = lookup_filename (ref->info);
26950 file_num = maybe_emit_file (fd);
26951 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
26952 dw2_asm_output_data_uleb128 (ref->lineno,
26953 "Included from line number %lu",
26954 (unsigned long) ref->lineno);
26955 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
26956 break;
26957 case DW_MACINFO_end_file:
26958 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
26959 break;
26960 case DW_MACINFO_define:
26961 case DW_MACINFO_undef:
26962 len = strlen (ref->info) + 1;
26963 if (!dwarf_strict
26964 && len > DWARF_OFFSET_SIZE
26965 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
26966 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
26967 {
26968 ref->code = ref->code == DW_MACINFO_define
26969 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
26970 output_macinfo_op (ref);
26971 return;
26972 }
26973 dw2_asm_output_data (1, ref->code,
26974 ref->code == DW_MACINFO_define
26975 ? "Define macro" : "Undefine macro");
26976 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
26977 (unsigned long) ref->lineno);
26978 dw2_asm_output_nstring (ref->info, -1, "The macro");
26979 break;
26980 case DW_MACRO_define_strp:
26981 case DW_MACRO_undef_strp:
26982 node = find_AT_string (ref->info);
26983 gcc_assert (node
26984 && (node->form == DW_FORM_strp
26985 || node->form == DW_FORM_GNU_str_index));
26986 dw2_asm_output_data (1, ref->code,
26987 ref->code == DW_MACRO_define_strp
26988 ? "Define macro strp"
26989 : "Undefine macro strp");
26990 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
26991 (unsigned long) ref->lineno);
26992 if (node->form == DW_FORM_strp)
26993 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
26994 debug_str_section, "The macro: \"%s\"",
26995 ref->info);
26996 else
26997 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
26998 ref->info);
26999 break;
27000 case DW_MACRO_import:
27001 dw2_asm_output_data (1, ref->code, "Import");
27002 ASM_GENERATE_INTERNAL_LABEL (label,
27003 DEBUG_MACRO_SECTION_LABEL,
27004 ref->lineno + macinfo_label_base);
27005 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
27006 break;
27007 default:
27008 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
27009 ASM_COMMENT_START, (unsigned long) ref->code);
27010 break;
27011 }
27012 }
27013
27014 /* Attempt to make a sequence of define/undef macinfo ops shareable with
27015 other compilation unit .debug_macinfo sections. IDX is the first
27016 index of a define/undef, return the number of ops that should be
27017 emitted in a comdat .debug_macinfo section and emit
27018 a DW_MACRO_import entry referencing it.
27019 If the define/undef entry should be emitted normally, return 0. */
27020
27021 static unsigned
27022 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
27023 macinfo_hash_type **macinfo_htab)
27024 {
27025 macinfo_entry *first, *second, *cur, *inc;
27026 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
27027 unsigned char checksum[16];
27028 struct md5_ctx ctx;
27029 char *grp_name, *tail;
27030 const char *base;
27031 unsigned int i, count, encoded_filename_len, linebuf_len;
27032 macinfo_entry **slot;
27033
27034 first = &(*macinfo_table)[idx];
27035 second = &(*macinfo_table)[idx + 1];
27036
27037 /* Optimize only if there are at least two consecutive define/undef ops,
27038 and either all of them are before first DW_MACINFO_start_file
27039 with lineno {0,1} (i.e. predefined macro block), or all of them are
27040 in some included header file. */
27041 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
27042 return 0;
27043 if (vec_safe_is_empty (files))
27044 {
27045 if (first->lineno > 1 || second->lineno > 1)
27046 return 0;
27047 }
27048 else if (first->lineno == 0)
27049 return 0;
27050
27051 /* Find the last define/undef entry that can be grouped together
27052 with first and at the same time compute md5 checksum of their
27053 codes, linenumbers and strings. */
27054 md5_init_ctx (&ctx);
27055 for (i = idx; macinfo_table->iterate (i, &cur); i++)
27056 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
27057 break;
27058 else if (vec_safe_is_empty (files) && cur->lineno > 1)
27059 break;
27060 else
27061 {
27062 unsigned char code = cur->code;
27063 md5_process_bytes (&code, 1, &ctx);
27064 checksum_uleb128 (cur->lineno, &ctx);
27065 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
27066 }
27067 md5_finish_ctx (&ctx, checksum);
27068 count = i - idx;
27069
27070 /* From the containing include filename (if any) pick up just
27071 usable characters from its basename. */
27072 if (vec_safe_is_empty (files))
27073 base = "";
27074 else
27075 base = lbasename (files->last ().info);
27076 for (encoded_filename_len = 0, i = 0; base[i]; i++)
27077 if (ISIDNUM (base[i]) || base[i] == '.')
27078 encoded_filename_len++;
27079 /* Count . at the end. */
27080 if (encoded_filename_len)
27081 encoded_filename_len++;
27082
27083 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
27084 linebuf_len = strlen (linebuf);
27085
27086 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
27087 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
27088 + 16 * 2 + 1);
27089 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
27090 tail = grp_name + 4;
27091 if (encoded_filename_len)
27092 {
27093 for (i = 0; base[i]; i++)
27094 if (ISIDNUM (base[i]) || base[i] == '.')
27095 *tail++ = base[i];
27096 *tail++ = '.';
27097 }
27098 memcpy (tail, linebuf, linebuf_len);
27099 tail += linebuf_len;
27100 *tail++ = '.';
27101 for (i = 0; i < 16; i++)
27102 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
27103
27104 /* Construct a macinfo_entry for DW_MACRO_import
27105 in the empty vector entry before the first define/undef. */
27106 inc = &(*macinfo_table)[idx - 1];
27107 inc->code = DW_MACRO_import;
27108 inc->lineno = 0;
27109 inc->info = ggc_strdup (grp_name);
27110 if (!*macinfo_htab)
27111 *macinfo_htab = new macinfo_hash_type (10);
27112 /* Avoid emitting duplicates. */
27113 slot = (*macinfo_htab)->find_slot (inc, INSERT);
27114 if (*slot != NULL)
27115 {
27116 inc->code = 0;
27117 inc->info = NULL;
27118 /* If such an entry has been used before, just emit
27119 a DW_MACRO_import op. */
27120 inc = *slot;
27121 output_macinfo_op (inc);
27122 /* And clear all macinfo_entry in the range to avoid emitting them
27123 in the second pass. */
27124 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
27125 {
27126 cur->code = 0;
27127 cur->info = NULL;
27128 }
27129 }
27130 else
27131 {
27132 *slot = inc;
27133 inc->lineno = (*macinfo_htab)->elements ();
27134 output_macinfo_op (inc);
27135 }
27136 return count;
27137 }
27138
27139 /* Save any strings needed by the macinfo table in the debug str
27140 table. All strings must be collected into the table by the time
27141 index_string is called. */
27142
27143 static void
27144 save_macinfo_strings (void)
27145 {
27146 unsigned len;
27147 unsigned i;
27148 macinfo_entry *ref;
27149
27150 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
27151 {
27152 switch (ref->code)
27153 {
27154 /* Match the logic in output_macinfo_op to decide on
27155 indirect strings. */
27156 case DW_MACINFO_define:
27157 case DW_MACINFO_undef:
27158 len = strlen (ref->info) + 1;
27159 if (!dwarf_strict
27160 && len > DWARF_OFFSET_SIZE
27161 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
27162 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
27163 set_indirect_string (find_AT_string (ref->info));
27164 break;
27165 case DW_MACRO_define_strp:
27166 case DW_MACRO_undef_strp:
27167 set_indirect_string (find_AT_string (ref->info));
27168 break;
27169 default:
27170 break;
27171 }
27172 }
27173 }
27174
27175 /* Output macinfo section(s). */
27176
27177 static void
27178 output_macinfo (const char *debug_line_label, bool early_lto_debug)
27179 {
27180 unsigned i;
27181 unsigned long length = vec_safe_length (macinfo_table);
27182 macinfo_entry *ref;
27183 vec<macinfo_entry, va_gc> *files = NULL;
27184 macinfo_hash_type *macinfo_htab = NULL;
27185 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
27186
27187 if (! length)
27188 return;
27189
27190 /* output_macinfo* uses these interchangeably. */
27191 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
27192 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
27193 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
27194 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
27195
27196 /* AIX Assembler inserts the length, so adjust the reference to match the
27197 offset expected by debuggers. */
27198 strcpy (dl_section_ref, debug_line_label);
27199 if (XCOFF_DEBUGGING_INFO)
27200 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
27201
27202 /* For .debug_macro emit the section header. */
27203 if (!dwarf_strict || dwarf_version >= 5)
27204 {
27205 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
27206 "DWARF macro version number");
27207 if (DWARF_OFFSET_SIZE == 8)
27208 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
27209 else
27210 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
27211 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
27212 debug_line_section, NULL);
27213 }
27214
27215 /* In the first loop, it emits the primary .debug_macinfo section
27216 and after each emitted op the macinfo_entry is cleared.
27217 If a longer range of define/undef ops can be optimized using
27218 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
27219 the vector before the first define/undef in the range and the
27220 whole range of define/undef ops is not emitted and kept. */
27221 for (i = 0; macinfo_table->iterate (i, &ref); i++)
27222 {
27223 switch (ref->code)
27224 {
27225 case DW_MACINFO_start_file:
27226 vec_safe_push (files, *ref);
27227 break;
27228 case DW_MACINFO_end_file:
27229 if (!vec_safe_is_empty (files))
27230 files->pop ();
27231 break;
27232 case DW_MACINFO_define:
27233 case DW_MACINFO_undef:
27234 if ((!dwarf_strict || dwarf_version >= 5)
27235 && HAVE_COMDAT_GROUP
27236 && vec_safe_length (files) != 1
27237 && i > 0
27238 && i + 1 < length
27239 && (*macinfo_table)[i - 1].code == 0)
27240 {
27241 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
27242 if (count)
27243 {
27244 i += count - 1;
27245 continue;
27246 }
27247 }
27248 break;
27249 case 0:
27250 /* A dummy entry may be inserted at the beginning to be able
27251 to optimize the whole block of predefined macros. */
27252 if (i == 0)
27253 continue;
27254 default:
27255 break;
27256 }
27257 output_macinfo_op (ref);
27258 ref->info = NULL;
27259 ref->code = 0;
27260 }
27261
27262 if (!macinfo_htab)
27263 return;
27264
27265 /* Save the number of transparent includes so we can adjust the
27266 label number for the fat LTO object DWARF. */
27267 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
27268
27269 delete macinfo_htab;
27270 macinfo_htab = NULL;
27271
27272 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
27273 terminate the current chain and switch to a new comdat .debug_macinfo
27274 section and emit the define/undef entries within it. */
27275 for (i = 0; macinfo_table->iterate (i, &ref); i++)
27276 switch (ref->code)
27277 {
27278 case 0:
27279 continue;
27280 case DW_MACRO_import:
27281 {
27282 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27283 tree comdat_key = get_identifier (ref->info);
27284 /* Terminate the previous .debug_macinfo section. */
27285 dw2_asm_output_data (1, 0, "End compilation unit");
27286 targetm.asm_out.named_section (debug_macinfo_section_name,
27287 SECTION_DEBUG
27288 | SECTION_LINKONCE
27289 | (early_lto_debug
27290 ? SECTION_EXCLUDE : 0),
27291 comdat_key);
27292 ASM_GENERATE_INTERNAL_LABEL (label,
27293 DEBUG_MACRO_SECTION_LABEL,
27294 ref->lineno + macinfo_label_base);
27295 ASM_OUTPUT_LABEL (asm_out_file, label);
27296 ref->code = 0;
27297 ref->info = NULL;
27298 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
27299 "DWARF macro version number");
27300 if (DWARF_OFFSET_SIZE == 8)
27301 dw2_asm_output_data (1, 1, "Flags: 64-bit");
27302 else
27303 dw2_asm_output_data (1, 0, "Flags: 32-bit");
27304 }
27305 break;
27306 case DW_MACINFO_define:
27307 case DW_MACINFO_undef:
27308 output_macinfo_op (ref);
27309 ref->code = 0;
27310 ref->info = NULL;
27311 break;
27312 default:
27313 gcc_unreachable ();
27314 }
27315
27316 macinfo_label_base += macinfo_label_base_adj;
27317 }
27318
27319 /* Initialize the various sections and labels for dwarf output and prefix
27320 them with PREFIX if non-NULL. Returns the generation (zero based
27321 number of times function was called). */
27322
27323 static unsigned
27324 init_sections_and_labels (bool early_lto_debug)
27325 {
27326 /* As we may get called multiple times have a generation count for
27327 labels. */
27328 static unsigned generation = 0;
27329
27330 if (early_lto_debug)
27331 {
27332 if (!dwarf_split_debug_info)
27333 {
27334 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
27335 SECTION_DEBUG | SECTION_EXCLUDE,
27336 NULL);
27337 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
27338 SECTION_DEBUG | SECTION_EXCLUDE,
27339 NULL);
27340 debug_macinfo_section_name
27341 = ((dwarf_strict && dwarf_version < 5)
27342 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
27343 debug_macinfo_section = get_section (debug_macinfo_section_name,
27344 SECTION_DEBUG
27345 | SECTION_EXCLUDE, NULL);
27346 /* For macro info we have to refer to a debug_line section, so
27347 similar to split-dwarf emit a skeleton one for early debug. */
27348 debug_skeleton_line_section
27349 = get_section (DEBUG_LTO_LINE_SECTION,
27350 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27351 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
27352 DEBUG_SKELETON_LINE_SECTION_LABEL,
27353 generation);
27354 }
27355 else
27356 {
27357 /* ??? Which of the following do we need early? */
27358 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
27359 SECTION_DEBUG | SECTION_EXCLUDE,
27360 NULL);
27361 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
27362 SECTION_DEBUG | SECTION_EXCLUDE,
27363 NULL);
27364 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
27365 SECTION_DEBUG
27366 | SECTION_EXCLUDE, NULL);
27367 debug_skeleton_abbrev_section
27368 = get_section (DEBUG_LTO_ABBREV_SECTION,
27369 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27370 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
27371 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
27372 generation);
27373
27374 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
27375 stay in the main .o, but the skeleton_line goes into the split
27376 off dwo. */
27377 debug_skeleton_line_section
27378 = get_section (DEBUG_LTO_LINE_SECTION,
27379 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27380 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
27381 DEBUG_SKELETON_LINE_SECTION_LABEL,
27382 generation);
27383 debug_str_offsets_section
27384 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
27385 SECTION_DEBUG | SECTION_EXCLUDE,
27386 NULL);
27387 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
27388 DEBUG_SKELETON_INFO_SECTION_LABEL,
27389 generation);
27390 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
27391 DEBUG_STR_DWO_SECTION_FLAGS,
27392 NULL);
27393 debug_macinfo_section_name
27394 = ((dwarf_strict && dwarf_version < 5)
27395 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
27396 debug_macinfo_section = get_section (debug_macinfo_section_name,
27397 SECTION_DEBUG | SECTION_EXCLUDE,
27398 NULL);
27399 }
27400 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
27401 DEBUG_STR_SECTION_FLAGS
27402 | SECTION_EXCLUDE, NULL);
27403 if (!dwarf_split_debug_info && !DWARF2_ASM_LINE_DEBUG_INFO)
27404 debug_line_str_section
27405 = get_section (DEBUG_LTO_LINE_STR_SECTION,
27406 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
27407 }
27408 else
27409 {
27410 if (!dwarf_split_debug_info)
27411 {
27412 debug_info_section = get_section (DEBUG_INFO_SECTION,
27413 SECTION_DEBUG, NULL);
27414 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
27415 SECTION_DEBUG, NULL);
27416 debug_loc_section = get_section (dwarf_version >= 5
27417 ? DEBUG_LOCLISTS_SECTION
27418 : DEBUG_LOC_SECTION,
27419 SECTION_DEBUG, NULL);
27420 debug_macinfo_section_name
27421 = ((dwarf_strict && dwarf_version < 5)
27422 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
27423 debug_macinfo_section = get_section (debug_macinfo_section_name,
27424 SECTION_DEBUG, NULL);
27425 }
27426 else
27427 {
27428 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
27429 SECTION_DEBUG | SECTION_EXCLUDE,
27430 NULL);
27431 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
27432 SECTION_DEBUG | SECTION_EXCLUDE,
27433 NULL);
27434 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
27435 SECTION_DEBUG, NULL);
27436 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
27437 SECTION_DEBUG, NULL);
27438 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
27439 SECTION_DEBUG, NULL);
27440 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
27441 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
27442 generation);
27443
27444 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
27445 stay in the main .o, but the skeleton_line goes into the
27446 split off dwo. */
27447 debug_skeleton_line_section
27448 = get_section (DEBUG_DWO_LINE_SECTION,
27449 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27450 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
27451 DEBUG_SKELETON_LINE_SECTION_LABEL,
27452 generation);
27453 debug_str_offsets_section
27454 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
27455 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27456 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
27457 DEBUG_SKELETON_INFO_SECTION_LABEL,
27458 generation);
27459 debug_loc_section = get_section (dwarf_version >= 5
27460 ? DEBUG_DWO_LOCLISTS_SECTION
27461 : DEBUG_DWO_LOC_SECTION,
27462 SECTION_DEBUG | SECTION_EXCLUDE,
27463 NULL);
27464 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
27465 DEBUG_STR_DWO_SECTION_FLAGS,
27466 NULL);
27467 debug_macinfo_section_name
27468 = ((dwarf_strict && dwarf_version < 5)
27469 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
27470 debug_macinfo_section = get_section (debug_macinfo_section_name,
27471 SECTION_DEBUG | SECTION_EXCLUDE,
27472 NULL);
27473 }
27474 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
27475 SECTION_DEBUG, NULL);
27476 debug_line_section = get_section (DEBUG_LINE_SECTION,
27477 SECTION_DEBUG, NULL);
27478 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
27479 SECTION_DEBUG, NULL);
27480 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
27481 SECTION_DEBUG, NULL);
27482 debug_str_section = get_section (DEBUG_STR_SECTION,
27483 DEBUG_STR_SECTION_FLAGS, NULL);
27484 if (!dwarf_split_debug_info && !DWARF2_ASM_LINE_DEBUG_INFO)
27485 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
27486 DEBUG_STR_SECTION_FLAGS, NULL);
27487 debug_ranges_section = get_section (dwarf_version >= 5
27488 ? DEBUG_RNGLISTS_SECTION
27489 : DEBUG_RANGES_SECTION,
27490 SECTION_DEBUG, NULL);
27491 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
27492 SECTION_DEBUG, NULL);
27493 }
27494
27495 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
27496 DEBUG_ABBREV_SECTION_LABEL, generation);
27497 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
27498 DEBUG_INFO_SECTION_LABEL, generation);
27499 info_section_emitted = false;
27500 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
27501 DEBUG_LINE_SECTION_LABEL, generation);
27502 /* There are up to 4 unique ranges labels per generation.
27503 See also output_rnglists. */
27504 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
27505 DEBUG_RANGES_SECTION_LABEL, generation * 4);
27506 if (dwarf_version >= 5 && dwarf_split_debug_info)
27507 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
27508 DEBUG_RANGES_SECTION_LABEL,
27509 1 + generation * 4);
27510 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
27511 DEBUG_ADDR_SECTION_LABEL, generation);
27512 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
27513 (dwarf_strict && dwarf_version < 5)
27514 ? DEBUG_MACINFO_SECTION_LABEL
27515 : DEBUG_MACRO_SECTION_LABEL, generation);
27516 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
27517 generation);
27518
27519 ++generation;
27520 return generation - 1;
27521 }
27522
27523 /* Set up for Dwarf output at the start of compilation. */
27524
27525 static void
27526 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
27527 {
27528 /* Allocate the file_table. */
27529 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
27530
27531 #ifndef DWARF2_LINENO_DEBUGGING_INFO
27532 /* Allocate the decl_die_table. */
27533 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
27534
27535 /* Allocate the decl_loc_table. */
27536 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
27537
27538 /* Allocate the cached_dw_loc_list_table. */
27539 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
27540
27541 /* Allocate the initial hunk of the decl_scope_table. */
27542 vec_alloc (decl_scope_table, 256);
27543
27544 /* Allocate the initial hunk of the abbrev_die_table. */
27545 vec_alloc (abbrev_die_table, 256);
27546 /* Zero-th entry is allocated, but unused. */
27547 abbrev_die_table->quick_push (NULL);
27548
27549 /* Allocate the dwarf_proc_stack_usage_map. */
27550 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
27551
27552 /* Allocate the pubtypes and pubnames vectors. */
27553 vec_alloc (pubname_table, 32);
27554 vec_alloc (pubtype_table, 32);
27555
27556 vec_alloc (incomplete_types, 64);
27557
27558 vec_alloc (used_rtx_array, 32);
27559
27560 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27561 vec_alloc (macinfo_table, 64);
27562 #endif
27563
27564 /* If front-ends already registered a main translation unit but we were not
27565 ready to perform the association, do this now. */
27566 if (main_translation_unit != NULL_TREE)
27567 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
27568 }
27569
27570 /* Called before compile () starts outputtting functions, variables
27571 and toplevel asms into assembly. */
27572
27573 static void
27574 dwarf2out_assembly_start (void)
27575 {
27576 if (text_section_line_info)
27577 return;
27578
27579 #ifndef DWARF2_LINENO_DEBUGGING_INFO
27580 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
27581 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
27582 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
27583 COLD_TEXT_SECTION_LABEL, 0);
27584 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
27585
27586 switch_to_section (text_section);
27587 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
27588 #endif
27589
27590 /* Make sure the line number table for .text always exists. */
27591 text_section_line_info = new_line_info_table ();
27592 text_section_line_info->end_label = text_end_label;
27593
27594 #ifdef DWARF2_LINENO_DEBUGGING_INFO
27595 cur_line_info_table = text_section_line_info;
27596 #endif
27597
27598 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
27599 && dwarf2out_do_cfi_asm ()
27600 && !dwarf2out_do_eh_frame ())
27601 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
27602 }
27603
27604 /* A helper function for dwarf2out_finish called through
27605 htab_traverse. Assign a string its index. All strings must be
27606 collected into the table by the time index_string is called,
27607 because the indexing code relies on htab_traverse to traverse nodes
27608 in the same order for each run. */
27609
27610 int
27611 index_string (indirect_string_node **h, unsigned int *index)
27612 {
27613 indirect_string_node *node = *h;
27614
27615 find_string_form (node);
27616 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
27617 {
27618 gcc_assert (node->index == NO_INDEX_ASSIGNED);
27619 node->index = *index;
27620 *index += 1;
27621 }
27622 return 1;
27623 }
27624
27625 /* A helper function for output_indirect_strings called through
27626 htab_traverse. Output the offset to a string and update the
27627 current offset. */
27628
27629 int
27630 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
27631 {
27632 indirect_string_node *node = *h;
27633
27634 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
27635 {
27636 /* Assert that this node has been assigned an index. */
27637 gcc_assert (node->index != NO_INDEX_ASSIGNED
27638 && node->index != NOT_INDEXED);
27639 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
27640 "indexed string 0x%x: %s", node->index, node->str);
27641 *offset += strlen (node->str) + 1;
27642 }
27643 return 1;
27644 }
27645
27646 /* A helper function for dwarf2out_finish called through
27647 htab_traverse. Output the indexed string. */
27648
27649 int
27650 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
27651 {
27652 struct indirect_string_node *node = *h;
27653
27654 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
27655 {
27656 /* Assert that the strings are output in the same order as their
27657 indexes were assigned. */
27658 gcc_assert (*cur_idx == node->index);
27659 assemble_string (node->str, strlen (node->str) + 1);
27660 *cur_idx += 1;
27661 }
27662 return 1;
27663 }
27664
27665 /* A helper function for dwarf2out_finish called through
27666 htab_traverse. Emit one queued .debug_str string. */
27667
27668 int
27669 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
27670 {
27671 struct indirect_string_node *node = *h;
27672
27673 node->form = find_string_form (node);
27674 if (node->form == form && node->refcount > 0)
27675 {
27676 ASM_OUTPUT_LABEL (asm_out_file, node->label);
27677 assemble_string (node->str, strlen (node->str) + 1);
27678 }
27679
27680 return 1;
27681 }
27682
27683 /* Output the indexed string table. */
27684
27685 static void
27686 output_indirect_strings (void)
27687 {
27688 switch_to_section (debug_str_section);
27689 if (!dwarf_split_debug_info)
27690 debug_str_hash->traverse<enum dwarf_form,
27691 output_indirect_string> (DW_FORM_strp);
27692 else
27693 {
27694 unsigned int offset = 0;
27695 unsigned int cur_idx = 0;
27696
27697 skeleton_debug_str_hash->traverse<enum dwarf_form,
27698 output_indirect_string> (DW_FORM_strp);
27699
27700 switch_to_section (debug_str_offsets_section);
27701 debug_str_hash->traverse_noresize
27702 <unsigned int *, output_index_string_offset> (&offset);
27703 switch_to_section (debug_str_dwo_section);
27704 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
27705 (&cur_idx);
27706 }
27707 }
27708
27709 /* Callback for htab_traverse to assign an index to an entry in the
27710 table, and to write that entry to the .debug_addr section. */
27711
27712 int
27713 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
27714 {
27715 addr_table_entry *entry = *slot;
27716
27717 if (entry->refcount == 0)
27718 {
27719 gcc_assert (entry->index == NO_INDEX_ASSIGNED
27720 || entry->index == NOT_INDEXED);
27721 return 1;
27722 }
27723
27724 gcc_assert (entry->index == *cur_index);
27725 (*cur_index)++;
27726
27727 switch (entry->kind)
27728 {
27729 case ate_kind_rtx:
27730 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
27731 "0x%x", entry->index);
27732 break;
27733 case ate_kind_rtx_dtprel:
27734 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
27735 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
27736 DWARF2_ADDR_SIZE,
27737 entry->addr.rtl);
27738 fputc ('\n', asm_out_file);
27739 break;
27740 case ate_kind_label:
27741 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
27742 "0x%x", entry->index);
27743 break;
27744 default:
27745 gcc_unreachable ();
27746 }
27747 return 1;
27748 }
27749
27750 /* Produce the .debug_addr section. */
27751
27752 static void
27753 output_addr_table (void)
27754 {
27755 unsigned int index = 0;
27756 if (addr_index_table == NULL || addr_index_table->size () == 0)
27757 return;
27758
27759 switch_to_section (debug_addr_section);
27760 addr_index_table
27761 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
27762 }
27763
27764 #if ENABLE_ASSERT_CHECKING
27765 /* Verify that all marks are clear. */
27766
27767 static void
27768 verify_marks_clear (dw_die_ref die)
27769 {
27770 dw_die_ref c;
27771
27772 gcc_assert (! die->die_mark);
27773 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
27774 }
27775 #endif /* ENABLE_ASSERT_CHECKING */
27776
27777 /* Clear the marks for a die and its children.
27778 Be cool if the mark isn't set. */
27779
27780 static void
27781 prune_unmark_dies (dw_die_ref die)
27782 {
27783 dw_die_ref c;
27784
27785 if (die->die_mark)
27786 die->die_mark = 0;
27787 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
27788 }
27789
27790 /* Given LOC that is referenced by a DIE we're marking as used, find all
27791 referenced DWARF procedures it references and mark them as used. */
27792
27793 static void
27794 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
27795 {
27796 for (; loc != NULL; loc = loc->dw_loc_next)
27797 switch (loc->dw_loc_opc)
27798 {
27799 case DW_OP_implicit_pointer:
27800 case DW_OP_convert:
27801 case DW_OP_reinterpret:
27802 case DW_OP_GNU_implicit_pointer:
27803 case DW_OP_GNU_convert:
27804 case DW_OP_GNU_reinterpret:
27805 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
27806 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
27807 break;
27808 case DW_OP_GNU_variable_value:
27809 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
27810 {
27811 dw_die_ref ref
27812 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
27813 if (ref == NULL)
27814 break;
27815 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
27816 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
27817 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
27818 }
27819 /* FALLTHRU */
27820 case DW_OP_call2:
27821 case DW_OP_call4:
27822 case DW_OP_call_ref:
27823 case DW_OP_const_type:
27824 case DW_OP_GNU_const_type:
27825 case DW_OP_GNU_parameter_ref:
27826 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
27827 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
27828 break;
27829 case DW_OP_regval_type:
27830 case DW_OP_deref_type:
27831 case DW_OP_GNU_regval_type:
27832 case DW_OP_GNU_deref_type:
27833 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
27834 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
27835 break;
27836 case DW_OP_entry_value:
27837 case DW_OP_GNU_entry_value:
27838 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
27839 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
27840 break;
27841 default:
27842 break;
27843 }
27844 }
27845
27846 /* Given DIE that we're marking as used, find any other dies
27847 it references as attributes and mark them as used. */
27848
27849 static void
27850 prune_unused_types_walk_attribs (dw_die_ref die)
27851 {
27852 dw_attr_node *a;
27853 unsigned ix;
27854
27855 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
27856 {
27857 switch (AT_class (a))
27858 {
27859 /* Make sure DWARF procedures referenced by location descriptions will
27860 get emitted. */
27861 case dw_val_class_loc:
27862 prune_unused_types_walk_loc_descr (AT_loc (a));
27863 break;
27864 case dw_val_class_loc_list:
27865 for (dw_loc_list_ref list = AT_loc_list (a);
27866 list != NULL;
27867 list = list->dw_loc_next)
27868 prune_unused_types_walk_loc_descr (list->expr);
27869 break;
27870
27871 case dw_val_class_die_ref:
27872 /* A reference to another DIE.
27873 Make sure that it will get emitted.
27874 If it was broken out into a comdat group, don't follow it. */
27875 if (! AT_ref (a)->comdat_type_p
27876 || a->dw_attr == DW_AT_specification)
27877 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
27878 break;
27879
27880 case dw_val_class_str:
27881 /* Set the string's refcount to 0 so that prune_unused_types_mark
27882 accounts properly for it. */
27883 a->dw_attr_val.v.val_str->refcount = 0;
27884 break;
27885
27886 default:
27887 break;
27888 }
27889 }
27890 }
27891
27892 /* Mark the generic parameters and arguments children DIEs of DIE. */
27893
27894 static void
27895 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
27896 {
27897 dw_die_ref c;
27898
27899 if (die == NULL || die->die_child == NULL)
27900 return;
27901 c = die->die_child;
27902 do
27903 {
27904 if (is_template_parameter (c))
27905 prune_unused_types_mark (c, 1);
27906 c = c->die_sib;
27907 } while (c && c != die->die_child);
27908 }
27909
27910 /* Mark DIE as being used. If DOKIDS is true, then walk down
27911 to DIE's children. */
27912
27913 static void
27914 prune_unused_types_mark (dw_die_ref die, int dokids)
27915 {
27916 dw_die_ref c;
27917
27918 if (die->die_mark == 0)
27919 {
27920 /* We haven't done this node yet. Mark it as used. */
27921 die->die_mark = 1;
27922 /* If this is the DIE of a generic type instantiation,
27923 mark the children DIEs that describe its generic parms and
27924 args. */
27925 prune_unused_types_mark_generic_parms_dies (die);
27926
27927 /* We also have to mark its parents as used.
27928 (But we don't want to mark our parent's kids due to this,
27929 unless it is a class.) */
27930 if (die->die_parent)
27931 prune_unused_types_mark (die->die_parent,
27932 class_scope_p (die->die_parent));
27933
27934 /* Mark any referenced nodes. */
27935 prune_unused_types_walk_attribs (die);
27936
27937 /* If this node is a specification,
27938 also mark the definition, if it exists. */
27939 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
27940 prune_unused_types_mark (die->die_definition, 1);
27941 }
27942
27943 if (dokids && die->die_mark != 2)
27944 {
27945 /* We need to walk the children, but haven't done so yet.
27946 Remember that we've walked the kids. */
27947 die->die_mark = 2;
27948
27949 /* If this is an array type, we need to make sure our
27950 kids get marked, even if they're types. If we're
27951 breaking out types into comdat sections, do this
27952 for all type definitions. */
27953 if (die->die_tag == DW_TAG_array_type
27954 || (use_debug_types
27955 && is_type_die (die) && ! is_declaration_die (die)))
27956 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
27957 else
27958 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
27959 }
27960 }
27961
27962 /* For local classes, look if any static member functions were emitted
27963 and if so, mark them. */
27964
27965 static void
27966 prune_unused_types_walk_local_classes (dw_die_ref die)
27967 {
27968 dw_die_ref c;
27969
27970 if (die->die_mark == 2)
27971 return;
27972
27973 switch (die->die_tag)
27974 {
27975 case DW_TAG_structure_type:
27976 case DW_TAG_union_type:
27977 case DW_TAG_class_type:
27978 break;
27979
27980 case DW_TAG_subprogram:
27981 if (!get_AT_flag (die, DW_AT_declaration)
27982 || die->die_definition != NULL)
27983 prune_unused_types_mark (die, 1);
27984 return;
27985
27986 default:
27987 return;
27988 }
27989
27990 /* Mark children. */
27991 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
27992 }
27993
27994 /* Walk the tree DIE and mark types that we actually use. */
27995
27996 static void
27997 prune_unused_types_walk (dw_die_ref die)
27998 {
27999 dw_die_ref c;
28000
28001 /* Don't do anything if this node is already marked and
28002 children have been marked as well. */
28003 if (die->die_mark == 2)
28004 return;
28005
28006 switch (die->die_tag)
28007 {
28008 case DW_TAG_structure_type:
28009 case DW_TAG_union_type:
28010 case DW_TAG_class_type:
28011 if (die->die_perennial_p)
28012 break;
28013
28014 for (c = die->die_parent; c; c = c->die_parent)
28015 if (c->die_tag == DW_TAG_subprogram)
28016 break;
28017
28018 /* Finding used static member functions inside of classes
28019 is needed just for local classes, because for other classes
28020 static member function DIEs with DW_AT_specification
28021 are emitted outside of the DW_TAG_*_type. If we ever change
28022 it, we'd need to call this even for non-local classes. */
28023 if (c)
28024 prune_unused_types_walk_local_classes (die);
28025
28026 /* It's a type node --- don't mark it. */
28027 return;
28028
28029 case DW_TAG_const_type:
28030 case DW_TAG_packed_type:
28031 case DW_TAG_pointer_type:
28032 case DW_TAG_reference_type:
28033 case DW_TAG_rvalue_reference_type:
28034 case DW_TAG_volatile_type:
28035 case DW_TAG_typedef:
28036 case DW_TAG_array_type:
28037 case DW_TAG_interface_type:
28038 case DW_TAG_friend:
28039 case DW_TAG_enumeration_type:
28040 case DW_TAG_subroutine_type:
28041 case DW_TAG_string_type:
28042 case DW_TAG_set_type:
28043 case DW_TAG_subrange_type:
28044 case DW_TAG_ptr_to_member_type:
28045 case DW_TAG_file_type:
28046 /* Type nodes are useful only when other DIEs reference them --- don't
28047 mark them. */
28048 /* FALLTHROUGH */
28049
28050 case DW_TAG_dwarf_procedure:
28051 /* Likewise for DWARF procedures. */
28052
28053 if (die->die_perennial_p)
28054 break;
28055
28056 return;
28057
28058 default:
28059 /* Mark everything else. */
28060 break;
28061 }
28062
28063 if (die->die_mark == 0)
28064 {
28065 die->die_mark = 1;
28066
28067 /* Now, mark any dies referenced from here. */
28068 prune_unused_types_walk_attribs (die);
28069 }
28070
28071 die->die_mark = 2;
28072
28073 /* Mark children. */
28074 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
28075 }
28076
28077 /* Increment the string counts on strings referred to from DIE's
28078 attributes. */
28079
28080 static void
28081 prune_unused_types_update_strings (dw_die_ref die)
28082 {
28083 dw_attr_node *a;
28084 unsigned ix;
28085
28086 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
28087 if (AT_class (a) == dw_val_class_str)
28088 {
28089 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
28090 s->refcount++;
28091 /* Avoid unnecessarily putting strings that are used less than
28092 twice in the hash table. */
28093 if (s->refcount
28094 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
28095 {
28096 indirect_string_node **slot
28097 = debug_str_hash->find_slot_with_hash (s->str,
28098 htab_hash_string (s->str),
28099 INSERT);
28100 gcc_assert (*slot == NULL);
28101 *slot = s;
28102 }
28103 }
28104 }
28105
28106 /* Mark DIE and its children as removed. */
28107
28108 static void
28109 mark_removed (dw_die_ref die)
28110 {
28111 dw_die_ref c;
28112 die->removed = true;
28113 FOR_EACH_CHILD (die, c, mark_removed (c));
28114 }
28115
28116 /* Remove from the tree DIE any dies that aren't marked. */
28117
28118 static void
28119 prune_unused_types_prune (dw_die_ref die)
28120 {
28121 dw_die_ref c;
28122
28123 gcc_assert (die->die_mark);
28124 prune_unused_types_update_strings (die);
28125
28126 if (! die->die_child)
28127 return;
28128
28129 c = die->die_child;
28130 do {
28131 dw_die_ref prev = c, next;
28132 for (c = c->die_sib; ! c->die_mark; c = next)
28133 if (c == die->die_child)
28134 {
28135 /* No marked children between 'prev' and the end of the list. */
28136 if (prev == c)
28137 /* No marked children at all. */
28138 die->die_child = NULL;
28139 else
28140 {
28141 prev->die_sib = c->die_sib;
28142 die->die_child = prev;
28143 }
28144 c->die_sib = NULL;
28145 mark_removed (c);
28146 return;
28147 }
28148 else
28149 {
28150 next = c->die_sib;
28151 c->die_sib = NULL;
28152 mark_removed (c);
28153 }
28154
28155 if (c != prev->die_sib)
28156 prev->die_sib = c;
28157 prune_unused_types_prune (c);
28158 } while (c != die->die_child);
28159 }
28160
28161 /* Remove dies representing declarations that we never use. */
28162
28163 static void
28164 prune_unused_types (void)
28165 {
28166 unsigned int i;
28167 limbo_die_node *node;
28168 comdat_type_node *ctnode;
28169 pubname_entry *pub;
28170 dw_die_ref base_type;
28171
28172 #if ENABLE_ASSERT_CHECKING
28173 /* All the marks should already be clear. */
28174 verify_marks_clear (comp_unit_die ());
28175 for (node = limbo_die_list; node; node = node->next)
28176 verify_marks_clear (node->die);
28177 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
28178 verify_marks_clear (ctnode->root_die);
28179 #endif /* ENABLE_ASSERT_CHECKING */
28180
28181 /* Mark types that are used in global variables. */
28182 premark_types_used_by_global_vars ();
28183
28184 /* Set the mark on nodes that are actually used. */
28185 prune_unused_types_walk (comp_unit_die ());
28186 for (node = limbo_die_list; node; node = node->next)
28187 prune_unused_types_walk (node->die);
28188 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
28189 {
28190 prune_unused_types_walk (ctnode->root_die);
28191 prune_unused_types_mark (ctnode->type_die, 1);
28192 }
28193
28194 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
28195 are unusual in that they are pubnames that are the children of pubtypes.
28196 They should only be marked via their parent DW_TAG_enumeration_type die,
28197 not as roots in themselves. */
28198 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
28199 if (pub->die->die_tag != DW_TAG_enumerator)
28200 prune_unused_types_mark (pub->die, 1);
28201 for (i = 0; base_types.iterate (i, &base_type); i++)
28202 prune_unused_types_mark (base_type, 1);
28203
28204 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
28205 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
28206 callees). */
28207 cgraph_node *cnode;
28208 FOR_EACH_FUNCTION (cnode)
28209 if (cnode->referred_to_p (false))
28210 {
28211 dw_die_ref die = lookup_decl_die (cnode->decl);
28212 if (die == NULL || die->die_mark)
28213 continue;
28214 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
28215 if (e->caller != cnode
28216 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
28217 {
28218 prune_unused_types_mark (die, 1);
28219 break;
28220 }
28221 }
28222
28223 if (debug_str_hash)
28224 debug_str_hash->empty ();
28225 if (skeleton_debug_str_hash)
28226 skeleton_debug_str_hash->empty ();
28227 prune_unused_types_prune (comp_unit_die ());
28228 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
28229 {
28230 node = *pnode;
28231 if (!node->die->die_mark)
28232 *pnode = node->next;
28233 else
28234 {
28235 prune_unused_types_prune (node->die);
28236 pnode = &node->next;
28237 }
28238 }
28239 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
28240 prune_unused_types_prune (ctnode->root_die);
28241
28242 /* Leave the marks clear. */
28243 prune_unmark_dies (comp_unit_die ());
28244 for (node = limbo_die_list; node; node = node->next)
28245 prune_unmark_dies (node->die);
28246 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
28247 prune_unmark_dies (ctnode->root_die);
28248 }
28249
28250 /* Helpers to manipulate hash table of comdat type units. */
28251
28252 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
28253 {
28254 static inline hashval_t hash (const comdat_type_node *);
28255 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
28256 };
28257
28258 inline hashval_t
28259 comdat_type_hasher::hash (const comdat_type_node *type_node)
28260 {
28261 hashval_t h;
28262 memcpy (&h, type_node->signature, sizeof (h));
28263 return h;
28264 }
28265
28266 inline bool
28267 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
28268 const comdat_type_node *type_node_2)
28269 {
28270 return (! memcmp (type_node_1->signature, type_node_2->signature,
28271 DWARF_TYPE_SIGNATURE_SIZE));
28272 }
28273
28274 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
28275 to the location it would have been added, should we know its
28276 DECL_ASSEMBLER_NAME when we added other attributes. This will
28277 probably improve compactness of debug info, removing equivalent
28278 abbrevs, and hide any differences caused by deferring the
28279 computation of the assembler name, triggered by e.g. PCH. */
28280
28281 static inline void
28282 move_linkage_attr (dw_die_ref die)
28283 {
28284 unsigned ix = vec_safe_length (die->die_attr);
28285 dw_attr_node linkage = (*die->die_attr)[ix - 1];
28286
28287 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
28288 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
28289
28290 while (--ix > 0)
28291 {
28292 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
28293
28294 if (prev->dw_attr == DW_AT_decl_line
28295 || prev->dw_attr == DW_AT_decl_column
28296 || prev->dw_attr == DW_AT_name)
28297 break;
28298 }
28299
28300 if (ix != vec_safe_length (die->die_attr) - 1)
28301 {
28302 die->die_attr->pop ();
28303 die->die_attr->quick_insert (ix, linkage);
28304 }
28305 }
28306
28307 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
28308 referenced from typed stack ops and count how often they are used. */
28309
28310 static void
28311 mark_base_types (dw_loc_descr_ref loc)
28312 {
28313 dw_die_ref base_type = NULL;
28314
28315 for (; loc; loc = loc->dw_loc_next)
28316 {
28317 switch (loc->dw_loc_opc)
28318 {
28319 case DW_OP_regval_type:
28320 case DW_OP_deref_type:
28321 case DW_OP_GNU_regval_type:
28322 case DW_OP_GNU_deref_type:
28323 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
28324 break;
28325 case DW_OP_convert:
28326 case DW_OP_reinterpret:
28327 case DW_OP_GNU_convert:
28328 case DW_OP_GNU_reinterpret:
28329 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
28330 continue;
28331 /* FALLTHRU */
28332 case DW_OP_const_type:
28333 case DW_OP_GNU_const_type:
28334 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
28335 break;
28336 case DW_OP_entry_value:
28337 case DW_OP_GNU_entry_value:
28338 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
28339 continue;
28340 default:
28341 continue;
28342 }
28343 gcc_assert (base_type->die_parent == comp_unit_die ());
28344 if (base_type->die_mark)
28345 base_type->die_mark++;
28346 else
28347 {
28348 base_types.safe_push (base_type);
28349 base_type->die_mark = 1;
28350 }
28351 }
28352 }
28353
28354 /* Comparison function for sorting marked base types. */
28355
28356 static int
28357 base_type_cmp (const void *x, const void *y)
28358 {
28359 dw_die_ref dx = *(const dw_die_ref *) x;
28360 dw_die_ref dy = *(const dw_die_ref *) y;
28361 unsigned int byte_size1, byte_size2;
28362 unsigned int encoding1, encoding2;
28363 unsigned int align1, align2;
28364 if (dx->die_mark > dy->die_mark)
28365 return -1;
28366 if (dx->die_mark < dy->die_mark)
28367 return 1;
28368 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
28369 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
28370 if (byte_size1 < byte_size2)
28371 return 1;
28372 if (byte_size1 > byte_size2)
28373 return -1;
28374 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
28375 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
28376 if (encoding1 < encoding2)
28377 return 1;
28378 if (encoding1 > encoding2)
28379 return -1;
28380 align1 = get_AT_unsigned (dx, DW_AT_alignment);
28381 align2 = get_AT_unsigned (dy, DW_AT_alignment);
28382 if (align1 < align2)
28383 return 1;
28384 if (align1 > align2)
28385 return -1;
28386 return 0;
28387 }
28388
28389 /* Move base types marked by mark_base_types as early as possible
28390 in the CU, sorted by decreasing usage count both to make the
28391 uleb128 references as small as possible and to make sure they
28392 will have die_offset already computed by calc_die_sizes when
28393 sizes of typed stack loc ops is computed. */
28394
28395 static void
28396 move_marked_base_types (void)
28397 {
28398 unsigned int i;
28399 dw_die_ref base_type, die, c;
28400
28401 if (base_types.is_empty ())
28402 return;
28403
28404 /* Sort by decreasing usage count, they will be added again in that
28405 order later on. */
28406 base_types.qsort (base_type_cmp);
28407 die = comp_unit_die ();
28408 c = die->die_child;
28409 do
28410 {
28411 dw_die_ref prev = c;
28412 c = c->die_sib;
28413 while (c->die_mark)
28414 {
28415 remove_child_with_prev (c, prev);
28416 /* As base types got marked, there must be at least
28417 one node other than DW_TAG_base_type. */
28418 gcc_assert (die->die_child != NULL);
28419 c = prev->die_sib;
28420 }
28421 }
28422 while (c != die->die_child);
28423 gcc_assert (die->die_child);
28424 c = die->die_child;
28425 for (i = 0; base_types.iterate (i, &base_type); i++)
28426 {
28427 base_type->die_mark = 0;
28428 base_type->die_sib = c->die_sib;
28429 c->die_sib = base_type;
28430 c = base_type;
28431 }
28432 }
28433
28434 /* Helper function for resolve_addr, attempt to resolve
28435 one CONST_STRING, return true if successful. Similarly verify that
28436 SYMBOL_REFs refer to variables emitted in the current CU. */
28437
28438 static bool
28439 resolve_one_addr (rtx *addr)
28440 {
28441 rtx rtl = *addr;
28442
28443 if (GET_CODE (rtl) == CONST_STRING)
28444 {
28445 size_t len = strlen (XSTR (rtl, 0)) + 1;
28446 tree t = build_string (len, XSTR (rtl, 0));
28447 tree tlen = size_int (len - 1);
28448 TREE_TYPE (t)
28449 = build_array_type (char_type_node, build_index_type (tlen));
28450 rtl = lookup_constant_def (t);
28451 if (!rtl || !MEM_P (rtl))
28452 return false;
28453 rtl = XEXP (rtl, 0);
28454 if (GET_CODE (rtl) == SYMBOL_REF
28455 && SYMBOL_REF_DECL (rtl)
28456 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
28457 return false;
28458 vec_safe_push (used_rtx_array, rtl);
28459 *addr = rtl;
28460 return true;
28461 }
28462
28463 if (GET_CODE (rtl) == SYMBOL_REF
28464 && SYMBOL_REF_DECL (rtl))
28465 {
28466 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
28467 {
28468 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
28469 return false;
28470 }
28471 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
28472 return false;
28473 }
28474
28475 if (GET_CODE (rtl) == CONST)
28476 {
28477 subrtx_ptr_iterator::array_type array;
28478 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
28479 if (!resolve_one_addr (*iter))
28480 return false;
28481 }
28482
28483 return true;
28484 }
28485
28486 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
28487 if possible, and create DW_TAG_dwarf_procedure that can be referenced
28488 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
28489
28490 static rtx
28491 string_cst_pool_decl (tree t)
28492 {
28493 rtx rtl = output_constant_def (t, 1);
28494 unsigned char *array;
28495 dw_loc_descr_ref l;
28496 tree decl;
28497 size_t len;
28498 dw_die_ref ref;
28499
28500 if (!rtl || !MEM_P (rtl))
28501 return NULL_RTX;
28502 rtl = XEXP (rtl, 0);
28503 if (GET_CODE (rtl) != SYMBOL_REF
28504 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
28505 return NULL_RTX;
28506
28507 decl = SYMBOL_REF_DECL (rtl);
28508 if (!lookup_decl_die (decl))
28509 {
28510 len = TREE_STRING_LENGTH (t);
28511 vec_safe_push (used_rtx_array, rtl);
28512 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
28513 array = ggc_vec_alloc<unsigned char> (len);
28514 memcpy (array, TREE_STRING_POINTER (t), len);
28515 l = new_loc_descr (DW_OP_implicit_value, len, 0);
28516 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
28517 l->dw_loc_oprnd2.v.val_vec.length = len;
28518 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
28519 l->dw_loc_oprnd2.v.val_vec.array = array;
28520 add_AT_loc (ref, DW_AT_location, l);
28521 equate_decl_number_to_die (decl, ref);
28522 }
28523 return rtl;
28524 }
28525
28526 /* Helper function of resolve_addr_in_expr. LOC is
28527 a DW_OP_addr followed by DW_OP_stack_value, either at the start
28528 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
28529 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
28530 with DW_OP_implicit_pointer if possible
28531 and return true, if unsuccessful, return false. */
28532
28533 static bool
28534 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
28535 {
28536 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
28537 HOST_WIDE_INT offset = 0;
28538 dw_die_ref ref = NULL;
28539 tree decl;
28540
28541 if (GET_CODE (rtl) == CONST
28542 && GET_CODE (XEXP (rtl, 0)) == PLUS
28543 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
28544 {
28545 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
28546 rtl = XEXP (XEXP (rtl, 0), 0);
28547 }
28548 if (GET_CODE (rtl) == CONST_STRING)
28549 {
28550 size_t len = strlen (XSTR (rtl, 0)) + 1;
28551 tree t = build_string (len, XSTR (rtl, 0));
28552 tree tlen = size_int (len - 1);
28553
28554 TREE_TYPE (t)
28555 = build_array_type (char_type_node, build_index_type (tlen));
28556 rtl = string_cst_pool_decl (t);
28557 if (!rtl)
28558 return false;
28559 }
28560 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
28561 {
28562 decl = SYMBOL_REF_DECL (rtl);
28563 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
28564 {
28565 ref = lookup_decl_die (decl);
28566 if (ref && (get_AT (ref, DW_AT_location)
28567 || get_AT (ref, DW_AT_const_value)))
28568 {
28569 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
28570 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28571 loc->dw_loc_oprnd1.val_entry = NULL;
28572 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28573 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28574 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
28575 loc->dw_loc_oprnd2.v.val_int = offset;
28576 return true;
28577 }
28578 }
28579 }
28580 return false;
28581 }
28582
28583 /* Helper function for resolve_addr, handle one location
28584 expression, return false if at least one CONST_STRING or SYMBOL_REF in
28585 the location list couldn't be resolved. */
28586
28587 static bool
28588 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
28589 {
28590 dw_loc_descr_ref keep = NULL;
28591 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
28592 switch (loc->dw_loc_opc)
28593 {
28594 case DW_OP_addr:
28595 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
28596 {
28597 if ((prev == NULL
28598 || prev->dw_loc_opc == DW_OP_piece
28599 || prev->dw_loc_opc == DW_OP_bit_piece)
28600 && loc->dw_loc_next
28601 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
28602 && (!dwarf_strict || dwarf_version >= 5)
28603 && optimize_one_addr_into_implicit_ptr (loc))
28604 break;
28605 return false;
28606 }
28607 break;
28608 case DW_OP_GNU_addr_index:
28609 case DW_OP_GNU_const_index:
28610 if (loc->dw_loc_opc == DW_OP_GNU_addr_index
28611 || (loc->dw_loc_opc == DW_OP_GNU_const_index && loc->dtprel))
28612 {
28613 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
28614 if (!resolve_one_addr (&rtl))
28615 return false;
28616 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
28617 loc->dw_loc_oprnd1.val_entry
28618 = add_addr_table_entry (rtl, ate_kind_rtx);
28619 }
28620 break;
28621 case DW_OP_const4u:
28622 case DW_OP_const8u:
28623 if (loc->dtprel
28624 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
28625 return false;
28626 break;
28627 case DW_OP_plus_uconst:
28628 if (size_of_loc_descr (loc)
28629 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
28630 + 1
28631 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
28632 {
28633 dw_loc_descr_ref repl
28634 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
28635 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
28636 add_loc_descr (&repl, loc->dw_loc_next);
28637 *loc = *repl;
28638 }
28639 break;
28640 case DW_OP_implicit_value:
28641 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
28642 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
28643 return false;
28644 break;
28645 case DW_OP_implicit_pointer:
28646 case DW_OP_GNU_implicit_pointer:
28647 case DW_OP_GNU_parameter_ref:
28648 case DW_OP_GNU_variable_value:
28649 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28650 {
28651 dw_die_ref ref
28652 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28653 if (ref == NULL)
28654 return false;
28655 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28656 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28657 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28658 }
28659 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
28660 {
28661 if (prev == NULL
28662 && loc->dw_loc_next == NULL
28663 && AT_class (a) == dw_val_class_loc)
28664 switch (a->dw_attr)
28665 {
28666 /* Following attributes allow both exprloc and reference,
28667 so if the whole expression is DW_OP_GNU_variable_value
28668 alone we could transform it into reference. */
28669 case DW_AT_byte_size:
28670 case DW_AT_bit_size:
28671 case DW_AT_lower_bound:
28672 case DW_AT_upper_bound:
28673 case DW_AT_bit_stride:
28674 case DW_AT_count:
28675 case DW_AT_allocated:
28676 case DW_AT_associated:
28677 case DW_AT_byte_stride:
28678 a->dw_attr_val.val_class = dw_val_class_die_ref;
28679 a->dw_attr_val.val_entry = NULL;
28680 a->dw_attr_val.v.val_die_ref.die
28681 = loc->dw_loc_oprnd1.v.val_die_ref.die;
28682 a->dw_attr_val.v.val_die_ref.external = 0;
28683 return true;
28684 default:
28685 break;
28686 }
28687 if (dwarf_strict)
28688 return false;
28689 }
28690 break;
28691 case DW_OP_const_type:
28692 case DW_OP_regval_type:
28693 case DW_OP_deref_type:
28694 case DW_OP_convert:
28695 case DW_OP_reinterpret:
28696 case DW_OP_GNU_const_type:
28697 case DW_OP_GNU_regval_type:
28698 case DW_OP_GNU_deref_type:
28699 case DW_OP_GNU_convert:
28700 case DW_OP_GNU_reinterpret:
28701 while (loc->dw_loc_next
28702 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
28703 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
28704 {
28705 dw_die_ref base1, base2;
28706 unsigned enc1, enc2, size1, size2;
28707 if (loc->dw_loc_opc == DW_OP_regval_type
28708 || loc->dw_loc_opc == DW_OP_deref_type
28709 || loc->dw_loc_opc == DW_OP_GNU_regval_type
28710 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
28711 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
28712 else if (loc->dw_loc_oprnd1.val_class
28713 == dw_val_class_unsigned_const)
28714 break;
28715 else
28716 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
28717 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
28718 == dw_val_class_unsigned_const)
28719 break;
28720 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
28721 gcc_assert (base1->die_tag == DW_TAG_base_type
28722 && base2->die_tag == DW_TAG_base_type);
28723 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
28724 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
28725 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
28726 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
28727 if (size1 == size2
28728 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
28729 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
28730 && loc != keep)
28731 || enc1 == enc2))
28732 {
28733 /* Optimize away next DW_OP_convert after
28734 adjusting LOC's base type die reference. */
28735 if (loc->dw_loc_opc == DW_OP_regval_type
28736 || loc->dw_loc_opc == DW_OP_deref_type
28737 || loc->dw_loc_opc == DW_OP_GNU_regval_type
28738 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
28739 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
28740 else
28741 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
28742 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
28743 continue;
28744 }
28745 /* Don't change integer DW_OP_convert after e.g. floating
28746 point typed stack entry. */
28747 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
28748 keep = loc->dw_loc_next;
28749 break;
28750 }
28751 break;
28752 default:
28753 break;
28754 }
28755 return true;
28756 }
28757
28758 /* Helper function of resolve_addr. DIE had DW_AT_location of
28759 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
28760 and DW_OP_addr couldn't be resolved. resolve_addr has already
28761 removed the DW_AT_location attribute. This function attempts to
28762 add a new DW_AT_location attribute with DW_OP_implicit_pointer
28763 to it or DW_AT_const_value attribute, if possible. */
28764
28765 static void
28766 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
28767 {
28768 if (!VAR_P (decl)
28769 || lookup_decl_die (decl) != die
28770 || DECL_EXTERNAL (decl)
28771 || !TREE_STATIC (decl)
28772 || DECL_INITIAL (decl) == NULL_TREE
28773 || DECL_P (DECL_INITIAL (decl))
28774 || get_AT (die, DW_AT_const_value))
28775 return;
28776
28777 tree init = DECL_INITIAL (decl);
28778 HOST_WIDE_INT offset = 0;
28779 /* For variables that have been optimized away and thus
28780 don't have a memory location, see if we can emit
28781 DW_AT_const_value instead. */
28782 if (tree_add_const_value_attribute (die, init))
28783 return;
28784 if (dwarf_strict && dwarf_version < 5)
28785 return;
28786 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
28787 and ADDR_EXPR refers to a decl that has DW_AT_location or
28788 DW_AT_const_value (but isn't addressable, otherwise
28789 resolving the original DW_OP_addr wouldn't fail), see if
28790 we can add DW_OP_implicit_pointer. */
28791 STRIP_NOPS (init);
28792 if (TREE_CODE (init) == POINTER_PLUS_EXPR
28793 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
28794 {
28795 offset = tree_to_shwi (TREE_OPERAND (init, 1));
28796 init = TREE_OPERAND (init, 0);
28797 STRIP_NOPS (init);
28798 }
28799 if (TREE_CODE (init) != ADDR_EXPR)
28800 return;
28801 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
28802 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
28803 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
28804 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
28805 && TREE_OPERAND (init, 0) != decl))
28806 {
28807 dw_die_ref ref;
28808 dw_loc_descr_ref l;
28809
28810 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
28811 {
28812 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
28813 if (!rtl)
28814 return;
28815 decl = SYMBOL_REF_DECL (rtl);
28816 }
28817 else
28818 decl = TREE_OPERAND (init, 0);
28819 ref = lookup_decl_die (decl);
28820 if (ref == NULL
28821 || (!get_AT (ref, DW_AT_location)
28822 && !get_AT (ref, DW_AT_const_value)))
28823 return;
28824 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
28825 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28826 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
28827 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
28828 add_AT_loc (die, DW_AT_location, l);
28829 }
28830 }
28831
28832 /* Return NULL if l is a DWARF expression, or first op that is not
28833 valid DWARF expression. */
28834
28835 static dw_loc_descr_ref
28836 non_dwarf_expression (dw_loc_descr_ref l)
28837 {
28838 while (l)
28839 {
28840 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
28841 return l;
28842 switch (l->dw_loc_opc)
28843 {
28844 case DW_OP_regx:
28845 case DW_OP_implicit_value:
28846 case DW_OP_stack_value:
28847 case DW_OP_implicit_pointer:
28848 case DW_OP_GNU_implicit_pointer:
28849 case DW_OP_GNU_parameter_ref:
28850 case DW_OP_piece:
28851 case DW_OP_bit_piece:
28852 return l;
28853 default:
28854 break;
28855 }
28856 l = l->dw_loc_next;
28857 }
28858 return NULL;
28859 }
28860
28861 /* Return adjusted copy of EXPR:
28862 If it is empty DWARF expression, return it.
28863 If it is valid non-empty DWARF expression,
28864 return copy of EXPR with DW_OP_deref appended to it.
28865 If it is DWARF expression followed by DW_OP_reg{N,x}, return
28866 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
28867 If it is DWARF expression followed by DW_OP_stack_value, return
28868 copy of the DWARF expression without anything appended.
28869 Otherwise, return NULL. */
28870
28871 static dw_loc_descr_ref
28872 copy_deref_exprloc (dw_loc_descr_ref expr)
28873 {
28874 dw_loc_descr_ref tail = NULL;
28875
28876 if (expr == NULL)
28877 return NULL;
28878
28879 dw_loc_descr_ref l = non_dwarf_expression (expr);
28880 if (l && l->dw_loc_next)
28881 return NULL;
28882
28883 if (l)
28884 {
28885 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
28886 tail = new_loc_descr ((enum dwarf_location_atom)
28887 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
28888 0, 0);
28889 else
28890 switch (l->dw_loc_opc)
28891 {
28892 case DW_OP_regx:
28893 tail = new_loc_descr (DW_OP_bregx,
28894 l->dw_loc_oprnd1.v.val_unsigned, 0);
28895 break;
28896 case DW_OP_stack_value:
28897 break;
28898 default:
28899 return NULL;
28900 }
28901 }
28902 else
28903 tail = new_loc_descr (DW_OP_deref, 0, 0);
28904
28905 dw_loc_descr_ref ret = NULL, *p = &ret;
28906 while (expr != l)
28907 {
28908 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
28909 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
28910 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
28911 p = &(*p)->dw_loc_next;
28912 expr = expr->dw_loc_next;
28913 }
28914 *p = tail;
28915 return ret;
28916 }
28917
28918 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
28919 reference to a variable or argument, adjust it if needed and return:
28920 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
28921 attribute if present should be removed
28922 0 keep the attribute perhaps with minor modifications, no need to rescan
28923 1 if the attribute has been successfully adjusted. */
28924
28925 static int
28926 optimize_string_length (dw_attr_node *a)
28927 {
28928 dw_loc_descr_ref l = AT_loc (a), lv;
28929 dw_die_ref die;
28930 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28931 {
28932 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
28933 die = lookup_decl_die (decl);
28934 if (die)
28935 {
28936 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28937 l->dw_loc_oprnd1.v.val_die_ref.die = die;
28938 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
28939 }
28940 else
28941 return -1;
28942 }
28943 else
28944 die = l->dw_loc_oprnd1.v.val_die_ref.die;
28945
28946 /* DWARF5 allows reference class, so we can then reference the DIE.
28947 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
28948 if (l->dw_loc_next != NULL && dwarf_version >= 5)
28949 {
28950 a->dw_attr_val.val_class = dw_val_class_die_ref;
28951 a->dw_attr_val.val_entry = NULL;
28952 a->dw_attr_val.v.val_die_ref.die = die;
28953 a->dw_attr_val.v.val_die_ref.external = 0;
28954 return 0;
28955 }
28956
28957 dw_attr_node *av = get_AT (die, DW_AT_location);
28958 dw_loc_list_ref d;
28959 bool non_dwarf_expr = false;
28960
28961 if (av == NULL)
28962 return dwarf_strict ? -1 : 0;
28963 switch (AT_class (av))
28964 {
28965 case dw_val_class_loc_list:
28966 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
28967 if (d->expr && non_dwarf_expression (d->expr))
28968 non_dwarf_expr = true;
28969 break;
28970 case dw_val_class_loc:
28971 lv = AT_loc (av);
28972 if (lv == NULL)
28973 return dwarf_strict ? -1 : 0;
28974 if (non_dwarf_expression (lv))
28975 non_dwarf_expr = true;
28976 break;
28977 default:
28978 return dwarf_strict ? -1 : 0;
28979 }
28980
28981 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
28982 into DW_OP_call4 or DW_OP_GNU_variable_value into
28983 DW_OP_call4 DW_OP_deref, do so. */
28984 if (!non_dwarf_expr
28985 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
28986 {
28987 l->dw_loc_opc = DW_OP_call4;
28988 if (l->dw_loc_next)
28989 l->dw_loc_next = NULL;
28990 else
28991 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
28992 return 0;
28993 }
28994
28995 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
28996 copy over the DW_AT_location attribute from die to a. */
28997 if (l->dw_loc_next != NULL)
28998 {
28999 a->dw_attr_val = av->dw_attr_val;
29000 return 1;
29001 }
29002
29003 dw_loc_list_ref list, *p;
29004 switch (AT_class (av))
29005 {
29006 case dw_val_class_loc_list:
29007 p = &list;
29008 list = NULL;
29009 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
29010 {
29011 lv = copy_deref_exprloc (d->expr);
29012 if (lv)
29013 {
29014 *p = new_loc_list (lv, d->begin, d->end, d->section);
29015 p = &(*p)->dw_loc_next;
29016 }
29017 else if (!dwarf_strict && d->expr)
29018 return 0;
29019 }
29020 if (list == NULL)
29021 return dwarf_strict ? -1 : 0;
29022 a->dw_attr_val.val_class = dw_val_class_loc_list;
29023 gen_llsym (list);
29024 *AT_loc_list_ptr (a) = list;
29025 return 1;
29026 case dw_val_class_loc:
29027 lv = copy_deref_exprloc (AT_loc (av));
29028 if (lv == NULL)
29029 return dwarf_strict ? -1 : 0;
29030 a->dw_attr_val.v.val_loc = lv;
29031 return 1;
29032 default:
29033 gcc_unreachable ();
29034 }
29035 }
29036
29037 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
29038 an address in .rodata section if the string literal is emitted there,
29039 or remove the containing location list or replace DW_AT_const_value
29040 with DW_AT_location and empty location expression, if it isn't found
29041 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
29042 to something that has been emitted in the current CU. */
29043
29044 static void
29045 resolve_addr (dw_die_ref die)
29046 {
29047 dw_die_ref c;
29048 dw_attr_node *a;
29049 dw_loc_list_ref *curr, *start, loc;
29050 unsigned ix;
29051 bool remove_AT_byte_size = false;
29052
29053 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29054 switch (AT_class (a))
29055 {
29056 case dw_val_class_loc_list:
29057 start = curr = AT_loc_list_ptr (a);
29058 loc = *curr;
29059 gcc_assert (loc);
29060 /* The same list can be referenced more than once. See if we have
29061 already recorded the result from a previous pass. */
29062 if (loc->replaced)
29063 *curr = loc->dw_loc_next;
29064 else if (!loc->resolved_addr)
29065 {
29066 /* As things stand, we do not expect or allow one die to
29067 reference a suffix of another die's location list chain.
29068 References must be identical or completely separate.
29069 There is therefore no need to cache the result of this
29070 pass on any list other than the first; doing so
29071 would lead to unnecessary writes. */
29072 while (*curr)
29073 {
29074 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
29075 if (!resolve_addr_in_expr (a, (*curr)->expr))
29076 {
29077 dw_loc_list_ref next = (*curr)->dw_loc_next;
29078 dw_loc_descr_ref l = (*curr)->expr;
29079
29080 if (next && (*curr)->ll_symbol)
29081 {
29082 gcc_assert (!next->ll_symbol);
29083 next->ll_symbol = (*curr)->ll_symbol;
29084 }
29085 if (dwarf_split_debug_info)
29086 remove_loc_list_addr_table_entries (l);
29087 *curr = next;
29088 }
29089 else
29090 {
29091 mark_base_types ((*curr)->expr);
29092 curr = &(*curr)->dw_loc_next;
29093 }
29094 }
29095 if (loc == *start)
29096 loc->resolved_addr = 1;
29097 else
29098 {
29099 loc->replaced = 1;
29100 loc->dw_loc_next = *start;
29101 }
29102 }
29103 if (!*start)
29104 {
29105 remove_AT (die, a->dw_attr);
29106 ix--;
29107 }
29108 break;
29109 case dw_val_class_loc:
29110 {
29111 dw_loc_descr_ref l = AT_loc (a);
29112 /* DW_OP_GNU_variable_value DW_OP_stack_value or
29113 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
29114 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
29115 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
29116 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
29117 with DW_FORM_ref referencing the same DIE as
29118 DW_OP_GNU_variable_value used to reference. */
29119 if (a->dw_attr == DW_AT_string_length
29120 && l
29121 && l->dw_loc_opc == DW_OP_GNU_variable_value
29122 && (l->dw_loc_next == NULL
29123 || (l->dw_loc_next->dw_loc_next == NULL
29124 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
29125 {
29126 switch (optimize_string_length (a))
29127 {
29128 case -1:
29129 remove_AT (die, a->dw_attr);
29130 ix--;
29131 /* If we drop DW_AT_string_length, we need to drop also
29132 DW_AT_{string_length_,}byte_size. */
29133 remove_AT_byte_size = true;
29134 continue;
29135 default:
29136 break;
29137 case 1:
29138 /* Even if we keep the optimized DW_AT_string_length,
29139 it might have changed AT_class, so process it again. */
29140 ix--;
29141 continue;
29142 }
29143 }
29144 /* For -gdwarf-2 don't attempt to optimize
29145 DW_AT_data_member_location containing
29146 DW_OP_plus_uconst - older consumers might
29147 rely on it being that op instead of a more complex,
29148 but shorter, location description. */
29149 if ((dwarf_version > 2
29150 || a->dw_attr != DW_AT_data_member_location
29151 || l == NULL
29152 || l->dw_loc_opc != DW_OP_plus_uconst
29153 || l->dw_loc_next != NULL)
29154 && !resolve_addr_in_expr (a, l))
29155 {
29156 if (dwarf_split_debug_info)
29157 remove_loc_list_addr_table_entries (l);
29158 if (l != NULL
29159 && l->dw_loc_next == NULL
29160 && l->dw_loc_opc == DW_OP_addr
29161 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
29162 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
29163 && a->dw_attr == DW_AT_location)
29164 {
29165 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
29166 remove_AT (die, a->dw_attr);
29167 ix--;
29168 optimize_location_into_implicit_ptr (die, decl);
29169 break;
29170 }
29171 if (a->dw_attr == DW_AT_string_length)
29172 /* If we drop DW_AT_string_length, we need to drop also
29173 DW_AT_{string_length_,}byte_size. */
29174 remove_AT_byte_size = true;
29175 remove_AT (die, a->dw_attr);
29176 ix--;
29177 }
29178 else
29179 mark_base_types (l);
29180 }
29181 break;
29182 case dw_val_class_addr:
29183 if (a->dw_attr == DW_AT_const_value
29184 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
29185 {
29186 if (AT_index (a) != NOT_INDEXED)
29187 remove_addr_table_entry (a->dw_attr_val.val_entry);
29188 remove_AT (die, a->dw_attr);
29189 ix--;
29190 }
29191 if ((die->die_tag == DW_TAG_call_site
29192 && a->dw_attr == DW_AT_call_origin)
29193 || (die->die_tag == DW_TAG_GNU_call_site
29194 && a->dw_attr == DW_AT_abstract_origin))
29195 {
29196 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
29197 dw_die_ref tdie = lookup_decl_die (tdecl);
29198 dw_die_ref cdie;
29199 if (tdie == NULL
29200 && DECL_EXTERNAL (tdecl)
29201 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
29202 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
29203 {
29204 dw_die_ref pdie = cdie;
29205 /* Make sure we don't add these DIEs into type units.
29206 We could emit skeleton DIEs for context (namespaces,
29207 outer structs/classes) and a skeleton DIE for the
29208 innermost context with DW_AT_signature pointing to the
29209 type unit. See PR78835. */
29210 while (pdie && pdie->die_tag != DW_TAG_type_unit)
29211 pdie = pdie->die_parent;
29212 if (pdie == NULL)
29213 {
29214 /* Creating a full DIE for tdecl is overly expensive and
29215 at this point even wrong when in the LTO phase
29216 as it can end up generating new type DIEs we didn't
29217 output and thus optimize_external_refs will crash. */
29218 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
29219 add_AT_flag (tdie, DW_AT_external, 1);
29220 add_AT_flag (tdie, DW_AT_declaration, 1);
29221 add_linkage_attr (tdie, tdecl);
29222 add_name_and_src_coords_attributes (tdie, tdecl, true);
29223 equate_decl_number_to_die (tdecl, tdie);
29224 }
29225 }
29226 if (tdie)
29227 {
29228 a->dw_attr_val.val_class = dw_val_class_die_ref;
29229 a->dw_attr_val.v.val_die_ref.die = tdie;
29230 a->dw_attr_val.v.val_die_ref.external = 0;
29231 }
29232 else
29233 {
29234 if (AT_index (a) != NOT_INDEXED)
29235 remove_addr_table_entry (a->dw_attr_val.val_entry);
29236 remove_AT (die, a->dw_attr);
29237 ix--;
29238 }
29239 }
29240 break;
29241 default:
29242 break;
29243 }
29244
29245 if (remove_AT_byte_size)
29246 remove_AT (die, dwarf_version >= 5
29247 ? DW_AT_string_length_byte_size
29248 : DW_AT_byte_size);
29249
29250 FOR_EACH_CHILD (die, c, resolve_addr (c));
29251 }
29252 \f
29253 /* Helper routines for optimize_location_lists.
29254 This pass tries to share identical local lists in .debug_loc
29255 section. */
29256
29257 /* Iteratively hash operands of LOC opcode into HSTATE. */
29258
29259 static void
29260 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
29261 {
29262 dw_val_ref val1 = &loc->dw_loc_oprnd1;
29263 dw_val_ref val2 = &loc->dw_loc_oprnd2;
29264
29265 switch (loc->dw_loc_opc)
29266 {
29267 case DW_OP_const4u:
29268 case DW_OP_const8u:
29269 if (loc->dtprel)
29270 goto hash_addr;
29271 /* FALLTHRU */
29272 case DW_OP_const1u:
29273 case DW_OP_const1s:
29274 case DW_OP_const2u:
29275 case DW_OP_const2s:
29276 case DW_OP_const4s:
29277 case DW_OP_const8s:
29278 case DW_OP_constu:
29279 case DW_OP_consts:
29280 case DW_OP_pick:
29281 case DW_OP_plus_uconst:
29282 case DW_OP_breg0:
29283 case DW_OP_breg1:
29284 case DW_OP_breg2:
29285 case DW_OP_breg3:
29286 case DW_OP_breg4:
29287 case DW_OP_breg5:
29288 case DW_OP_breg6:
29289 case DW_OP_breg7:
29290 case DW_OP_breg8:
29291 case DW_OP_breg9:
29292 case DW_OP_breg10:
29293 case DW_OP_breg11:
29294 case DW_OP_breg12:
29295 case DW_OP_breg13:
29296 case DW_OP_breg14:
29297 case DW_OP_breg15:
29298 case DW_OP_breg16:
29299 case DW_OP_breg17:
29300 case DW_OP_breg18:
29301 case DW_OP_breg19:
29302 case DW_OP_breg20:
29303 case DW_OP_breg21:
29304 case DW_OP_breg22:
29305 case DW_OP_breg23:
29306 case DW_OP_breg24:
29307 case DW_OP_breg25:
29308 case DW_OP_breg26:
29309 case DW_OP_breg27:
29310 case DW_OP_breg28:
29311 case DW_OP_breg29:
29312 case DW_OP_breg30:
29313 case DW_OP_breg31:
29314 case DW_OP_regx:
29315 case DW_OP_fbreg:
29316 case DW_OP_piece:
29317 case DW_OP_deref_size:
29318 case DW_OP_xderef_size:
29319 hstate.add_object (val1->v.val_int);
29320 break;
29321 case DW_OP_skip:
29322 case DW_OP_bra:
29323 {
29324 int offset;
29325
29326 gcc_assert (val1->val_class == dw_val_class_loc);
29327 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
29328 hstate.add_object (offset);
29329 }
29330 break;
29331 case DW_OP_implicit_value:
29332 hstate.add_object (val1->v.val_unsigned);
29333 switch (val2->val_class)
29334 {
29335 case dw_val_class_const:
29336 hstate.add_object (val2->v.val_int);
29337 break;
29338 case dw_val_class_vec:
29339 {
29340 unsigned int elt_size = val2->v.val_vec.elt_size;
29341 unsigned int len = val2->v.val_vec.length;
29342
29343 hstate.add_int (elt_size);
29344 hstate.add_int (len);
29345 hstate.add (val2->v.val_vec.array, len * elt_size);
29346 }
29347 break;
29348 case dw_val_class_const_double:
29349 hstate.add_object (val2->v.val_double.low);
29350 hstate.add_object (val2->v.val_double.high);
29351 break;
29352 case dw_val_class_wide_int:
29353 hstate.add (val2->v.val_wide->get_val (),
29354 get_full_len (*val2->v.val_wide)
29355 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
29356 break;
29357 case dw_val_class_addr:
29358 inchash::add_rtx (val2->v.val_addr, hstate);
29359 break;
29360 default:
29361 gcc_unreachable ();
29362 }
29363 break;
29364 case DW_OP_bregx:
29365 case DW_OP_bit_piece:
29366 hstate.add_object (val1->v.val_int);
29367 hstate.add_object (val2->v.val_int);
29368 break;
29369 case DW_OP_addr:
29370 hash_addr:
29371 if (loc->dtprel)
29372 {
29373 unsigned char dtprel = 0xd1;
29374 hstate.add_object (dtprel);
29375 }
29376 inchash::add_rtx (val1->v.val_addr, hstate);
29377 break;
29378 case DW_OP_GNU_addr_index:
29379 case DW_OP_GNU_const_index:
29380 {
29381 if (loc->dtprel)
29382 {
29383 unsigned char dtprel = 0xd1;
29384 hstate.add_object (dtprel);
29385 }
29386 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
29387 }
29388 break;
29389 case DW_OP_implicit_pointer:
29390 case DW_OP_GNU_implicit_pointer:
29391 hstate.add_int (val2->v.val_int);
29392 break;
29393 case DW_OP_entry_value:
29394 case DW_OP_GNU_entry_value:
29395 hstate.add_object (val1->v.val_loc);
29396 break;
29397 case DW_OP_regval_type:
29398 case DW_OP_deref_type:
29399 case DW_OP_GNU_regval_type:
29400 case DW_OP_GNU_deref_type:
29401 {
29402 unsigned int byte_size
29403 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
29404 unsigned int encoding
29405 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
29406 hstate.add_object (val1->v.val_int);
29407 hstate.add_object (byte_size);
29408 hstate.add_object (encoding);
29409 }
29410 break;
29411 case DW_OP_convert:
29412 case DW_OP_reinterpret:
29413 case DW_OP_GNU_convert:
29414 case DW_OP_GNU_reinterpret:
29415 if (val1->val_class == dw_val_class_unsigned_const)
29416 {
29417 hstate.add_object (val1->v.val_unsigned);
29418 break;
29419 }
29420 /* FALLTHRU */
29421 case DW_OP_const_type:
29422 case DW_OP_GNU_const_type:
29423 {
29424 unsigned int byte_size
29425 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
29426 unsigned int encoding
29427 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
29428 hstate.add_object (byte_size);
29429 hstate.add_object (encoding);
29430 if (loc->dw_loc_opc != DW_OP_const_type
29431 && loc->dw_loc_opc != DW_OP_GNU_const_type)
29432 break;
29433 hstate.add_object (val2->val_class);
29434 switch (val2->val_class)
29435 {
29436 case dw_val_class_const:
29437 hstate.add_object (val2->v.val_int);
29438 break;
29439 case dw_val_class_vec:
29440 {
29441 unsigned int elt_size = val2->v.val_vec.elt_size;
29442 unsigned int len = val2->v.val_vec.length;
29443
29444 hstate.add_object (elt_size);
29445 hstate.add_object (len);
29446 hstate.add (val2->v.val_vec.array, len * elt_size);
29447 }
29448 break;
29449 case dw_val_class_const_double:
29450 hstate.add_object (val2->v.val_double.low);
29451 hstate.add_object (val2->v.val_double.high);
29452 break;
29453 case dw_val_class_wide_int:
29454 hstate.add (val2->v.val_wide->get_val (),
29455 get_full_len (*val2->v.val_wide)
29456 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
29457 break;
29458 default:
29459 gcc_unreachable ();
29460 }
29461 }
29462 break;
29463
29464 default:
29465 /* Other codes have no operands. */
29466 break;
29467 }
29468 }
29469
29470 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
29471
29472 static inline void
29473 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
29474 {
29475 dw_loc_descr_ref l;
29476 bool sizes_computed = false;
29477 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
29478 size_of_locs (loc);
29479
29480 for (l = loc; l != NULL; l = l->dw_loc_next)
29481 {
29482 enum dwarf_location_atom opc = l->dw_loc_opc;
29483 hstate.add_object (opc);
29484 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
29485 {
29486 size_of_locs (loc);
29487 sizes_computed = true;
29488 }
29489 hash_loc_operands (l, hstate);
29490 }
29491 }
29492
29493 /* Compute hash of the whole location list LIST_HEAD. */
29494
29495 static inline void
29496 hash_loc_list (dw_loc_list_ref list_head)
29497 {
29498 dw_loc_list_ref curr = list_head;
29499 inchash::hash hstate;
29500
29501 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
29502 {
29503 hstate.add (curr->begin, strlen (curr->begin) + 1);
29504 hstate.add (curr->end, strlen (curr->end) + 1);
29505 if (curr->section)
29506 hstate.add (curr->section, strlen (curr->section) + 1);
29507 hash_locs (curr->expr, hstate);
29508 }
29509 list_head->hash = hstate.end ();
29510 }
29511
29512 /* Return true if X and Y opcodes have the same operands. */
29513
29514 static inline bool
29515 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
29516 {
29517 dw_val_ref valx1 = &x->dw_loc_oprnd1;
29518 dw_val_ref valx2 = &x->dw_loc_oprnd2;
29519 dw_val_ref valy1 = &y->dw_loc_oprnd1;
29520 dw_val_ref valy2 = &y->dw_loc_oprnd2;
29521
29522 switch (x->dw_loc_opc)
29523 {
29524 case DW_OP_const4u:
29525 case DW_OP_const8u:
29526 if (x->dtprel)
29527 goto hash_addr;
29528 /* FALLTHRU */
29529 case DW_OP_const1u:
29530 case DW_OP_const1s:
29531 case DW_OP_const2u:
29532 case DW_OP_const2s:
29533 case DW_OP_const4s:
29534 case DW_OP_const8s:
29535 case DW_OP_constu:
29536 case DW_OP_consts:
29537 case DW_OP_pick:
29538 case DW_OP_plus_uconst:
29539 case DW_OP_breg0:
29540 case DW_OP_breg1:
29541 case DW_OP_breg2:
29542 case DW_OP_breg3:
29543 case DW_OP_breg4:
29544 case DW_OP_breg5:
29545 case DW_OP_breg6:
29546 case DW_OP_breg7:
29547 case DW_OP_breg8:
29548 case DW_OP_breg9:
29549 case DW_OP_breg10:
29550 case DW_OP_breg11:
29551 case DW_OP_breg12:
29552 case DW_OP_breg13:
29553 case DW_OP_breg14:
29554 case DW_OP_breg15:
29555 case DW_OP_breg16:
29556 case DW_OP_breg17:
29557 case DW_OP_breg18:
29558 case DW_OP_breg19:
29559 case DW_OP_breg20:
29560 case DW_OP_breg21:
29561 case DW_OP_breg22:
29562 case DW_OP_breg23:
29563 case DW_OP_breg24:
29564 case DW_OP_breg25:
29565 case DW_OP_breg26:
29566 case DW_OP_breg27:
29567 case DW_OP_breg28:
29568 case DW_OP_breg29:
29569 case DW_OP_breg30:
29570 case DW_OP_breg31:
29571 case DW_OP_regx:
29572 case DW_OP_fbreg:
29573 case DW_OP_piece:
29574 case DW_OP_deref_size:
29575 case DW_OP_xderef_size:
29576 return valx1->v.val_int == valy1->v.val_int;
29577 case DW_OP_skip:
29578 case DW_OP_bra:
29579 /* If splitting debug info, the use of DW_OP_GNU_addr_index
29580 can cause irrelevant differences in dw_loc_addr. */
29581 gcc_assert (valx1->val_class == dw_val_class_loc
29582 && valy1->val_class == dw_val_class_loc
29583 && (dwarf_split_debug_info
29584 || x->dw_loc_addr == y->dw_loc_addr));
29585 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
29586 case DW_OP_implicit_value:
29587 if (valx1->v.val_unsigned != valy1->v.val_unsigned
29588 || valx2->val_class != valy2->val_class)
29589 return false;
29590 switch (valx2->val_class)
29591 {
29592 case dw_val_class_const:
29593 return valx2->v.val_int == valy2->v.val_int;
29594 case dw_val_class_vec:
29595 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
29596 && valx2->v.val_vec.length == valy2->v.val_vec.length
29597 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
29598 valx2->v.val_vec.elt_size
29599 * valx2->v.val_vec.length) == 0;
29600 case dw_val_class_const_double:
29601 return valx2->v.val_double.low == valy2->v.val_double.low
29602 && valx2->v.val_double.high == valy2->v.val_double.high;
29603 case dw_val_class_wide_int:
29604 return *valx2->v.val_wide == *valy2->v.val_wide;
29605 case dw_val_class_addr:
29606 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
29607 default:
29608 gcc_unreachable ();
29609 }
29610 case DW_OP_bregx:
29611 case DW_OP_bit_piece:
29612 return valx1->v.val_int == valy1->v.val_int
29613 && valx2->v.val_int == valy2->v.val_int;
29614 case DW_OP_addr:
29615 hash_addr:
29616 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
29617 case DW_OP_GNU_addr_index:
29618 case DW_OP_GNU_const_index:
29619 {
29620 rtx ax1 = valx1->val_entry->addr.rtl;
29621 rtx ay1 = valy1->val_entry->addr.rtl;
29622 return rtx_equal_p (ax1, ay1);
29623 }
29624 case DW_OP_implicit_pointer:
29625 case DW_OP_GNU_implicit_pointer:
29626 return valx1->val_class == dw_val_class_die_ref
29627 && valx1->val_class == valy1->val_class
29628 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
29629 && valx2->v.val_int == valy2->v.val_int;
29630 case DW_OP_entry_value:
29631 case DW_OP_GNU_entry_value:
29632 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
29633 case DW_OP_const_type:
29634 case DW_OP_GNU_const_type:
29635 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
29636 || valx2->val_class != valy2->val_class)
29637 return false;
29638 switch (valx2->val_class)
29639 {
29640 case dw_val_class_const:
29641 return valx2->v.val_int == valy2->v.val_int;
29642 case dw_val_class_vec:
29643 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
29644 && valx2->v.val_vec.length == valy2->v.val_vec.length
29645 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
29646 valx2->v.val_vec.elt_size
29647 * valx2->v.val_vec.length) == 0;
29648 case dw_val_class_const_double:
29649 return valx2->v.val_double.low == valy2->v.val_double.low
29650 && valx2->v.val_double.high == valy2->v.val_double.high;
29651 case dw_val_class_wide_int:
29652 return *valx2->v.val_wide == *valy2->v.val_wide;
29653 default:
29654 gcc_unreachable ();
29655 }
29656 case DW_OP_regval_type:
29657 case DW_OP_deref_type:
29658 case DW_OP_GNU_regval_type:
29659 case DW_OP_GNU_deref_type:
29660 return valx1->v.val_int == valy1->v.val_int
29661 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
29662 case DW_OP_convert:
29663 case DW_OP_reinterpret:
29664 case DW_OP_GNU_convert:
29665 case DW_OP_GNU_reinterpret:
29666 if (valx1->val_class != valy1->val_class)
29667 return false;
29668 if (valx1->val_class == dw_val_class_unsigned_const)
29669 return valx1->v.val_unsigned == valy1->v.val_unsigned;
29670 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
29671 case DW_OP_GNU_parameter_ref:
29672 return valx1->val_class == dw_val_class_die_ref
29673 && valx1->val_class == valy1->val_class
29674 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
29675 default:
29676 /* Other codes have no operands. */
29677 return true;
29678 }
29679 }
29680
29681 /* Return true if DWARF location expressions X and Y are the same. */
29682
29683 static inline bool
29684 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
29685 {
29686 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
29687 if (x->dw_loc_opc != y->dw_loc_opc
29688 || x->dtprel != y->dtprel
29689 || !compare_loc_operands (x, y))
29690 break;
29691 return x == NULL && y == NULL;
29692 }
29693
29694 /* Hashtable helpers. */
29695
29696 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
29697 {
29698 static inline hashval_t hash (const dw_loc_list_struct *);
29699 static inline bool equal (const dw_loc_list_struct *,
29700 const dw_loc_list_struct *);
29701 };
29702
29703 /* Return precomputed hash of location list X. */
29704
29705 inline hashval_t
29706 loc_list_hasher::hash (const dw_loc_list_struct *x)
29707 {
29708 return x->hash;
29709 }
29710
29711 /* Return true if location lists A and B are the same. */
29712
29713 inline bool
29714 loc_list_hasher::equal (const dw_loc_list_struct *a,
29715 const dw_loc_list_struct *b)
29716 {
29717 if (a == b)
29718 return 1;
29719 if (a->hash != b->hash)
29720 return 0;
29721 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
29722 if (strcmp (a->begin, b->begin) != 0
29723 || strcmp (a->end, b->end) != 0
29724 || (a->section == NULL) != (b->section == NULL)
29725 || (a->section && strcmp (a->section, b->section) != 0)
29726 || !compare_locs (a->expr, b->expr))
29727 break;
29728 return a == NULL && b == NULL;
29729 }
29730
29731 typedef hash_table<loc_list_hasher> loc_list_hash_type;
29732
29733
29734 /* Recursively optimize location lists referenced from DIE
29735 children and share them whenever possible. */
29736
29737 static void
29738 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
29739 {
29740 dw_die_ref c;
29741 dw_attr_node *a;
29742 unsigned ix;
29743 dw_loc_list_struct **slot;
29744
29745 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29746 if (AT_class (a) == dw_val_class_loc_list)
29747 {
29748 dw_loc_list_ref list = AT_loc_list (a);
29749 /* TODO: perform some optimizations here, before hashing
29750 it and storing into the hash table. */
29751 hash_loc_list (list);
29752 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
29753 if (*slot == NULL)
29754 *slot = list;
29755 else
29756 a->dw_attr_val.v.val_loc_list = *slot;
29757 }
29758
29759 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
29760 }
29761
29762
29763 /* Recursively assign each location list a unique index into the debug_addr
29764 section. */
29765
29766 static void
29767 index_location_lists (dw_die_ref die)
29768 {
29769 dw_die_ref c;
29770 dw_attr_node *a;
29771 unsigned ix;
29772
29773 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29774 if (AT_class (a) == dw_val_class_loc_list)
29775 {
29776 dw_loc_list_ref list = AT_loc_list (a);
29777 dw_loc_list_ref curr;
29778 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
29779 {
29780 /* Don't index an entry that has already been indexed
29781 or won't be output. */
29782 if (curr->begin_entry != NULL
29783 || (strcmp (curr->begin, curr->end) == 0 && !curr->force))
29784 continue;
29785
29786 curr->begin_entry
29787 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
29788 }
29789 }
29790
29791 FOR_EACH_CHILD (die, c, index_location_lists (c));
29792 }
29793
29794 /* Optimize location lists referenced from DIE
29795 children and share them whenever possible. */
29796
29797 static void
29798 optimize_location_lists (dw_die_ref die)
29799 {
29800 loc_list_hash_type htab (500);
29801 optimize_location_lists_1 (die, &htab);
29802 }
29803 \f
29804 /* Traverse the limbo die list, and add parent/child links. The only
29805 dies without parents that should be here are concrete instances of
29806 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
29807 For concrete instances, we can get the parent die from the abstract
29808 instance. */
29809
29810 static void
29811 flush_limbo_die_list (void)
29812 {
29813 limbo_die_node *node;
29814
29815 /* get_context_die calls force_decl_die, which can put new DIEs on the
29816 limbo list in LTO mode when nested functions are put in a different
29817 partition than that of their parent function. */
29818 while ((node = limbo_die_list))
29819 {
29820 dw_die_ref die = node->die;
29821 limbo_die_list = node->next;
29822
29823 if (die->die_parent == NULL)
29824 {
29825 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
29826
29827 if (origin && origin->die_parent)
29828 add_child_die (origin->die_parent, die);
29829 else if (is_cu_die (die))
29830 ;
29831 else if (seen_error ())
29832 /* It's OK to be confused by errors in the input. */
29833 add_child_die (comp_unit_die (), die);
29834 else
29835 {
29836 /* In certain situations, the lexical block containing a
29837 nested function can be optimized away, which results
29838 in the nested function die being orphaned. Likewise
29839 with the return type of that nested function. Force
29840 this to be a child of the containing function.
29841
29842 It may happen that even the containing function got fully
29843 inlined and optimized out. In that case we are lost and
29844 assign the empty child. This should not be big issue as
29845 the function is likely unreachable too. */
29846 gcc_assert (node->created_for);
29847
29848 if (DECL_P (node->created_for))
29849 origin = get_context_die (DECL_CONTEXT (node->created_for));
29850 else if (TYPE_P (node->created_for))
29851 origin = scope_die_for (node->created_for, comp_unit_die ());
29852 else
29853 origin = comp_unit_die ();
29854
29855 add_child_die (origin, die);
29856 }
29857 }
29858 }
29859 }
29860
29861 /* Reset DIEs so we can output them again. */
29862
29863 static void
29864 reset_dies (dw_die_ref die)
29865 {
29866 dw_die_ref c;
29867
29868 /* Remove stuff we re-generate. */
29869 die->die_mark = 0;
29870 die->die_offset = 0;
29871 die->die_abbrev = 0;
29872 remove_AT (die, DW_AT_sibling);
29873
29874 FOR_EACH_CHILD (die, c, reset_dies (c));
29875 }
29876
29877 /* Output stuff that dwarf requires at the end of every file,
29878 and generate the DWARF-2 debugging info. */
29879
29880 static void
29881 dwarf2out_finish (const char *)
29882 {
29883 comdat_type_node *ctnode;
29884 dw_die_ref main_comp_unit_die;
29885 unsigned char checksum[16];
29886 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
29887
29888 /* Flush out any latecomers to the limbo party. */
29889 flush_limbo_die_list ();
29890
29891 if (flag_checking)
29892 {
29893 verify_die (comp_unit_die ());
29894 for (limbo_die_node *node = cu_die_list; node; node = node->next)
29895 verify_die (node->die);
29896 }
29897
29898 /* We shouldn't have any symbols with delayed asm names for
29899 DIEs generated after early finish. */
29900 gcc_assert (deferred_asm_name == NULL);
29901
29902 gen_remaining_tmpl_value_param_die_attribute ();
29903
29904 if (flag_generate_lto || flag_generate_offload)
29905 {
29906 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
29907
29908 /* Prune stuff so that dwarf2out_finish runs successfully
29909 for the fat part of the object. */
29910 reset_dies (comp_unit_die ());
29911 for (limbo_die_node *node = cu_die_list; node; node = node->next)
29912 reset_dies (node->die);
29913
29914 hash_table<comdat_type_hasher> comdat_type_table (100);
29915 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
29916 {
29917 comdat_type_node **slot
29918 = comdat_type_table.find_slot (ctnode, INSERT);
29919
29920 /* Don't reset types twice. */
29921 if (*slot != HTAB_EMPTY_ENTRY)
29922 continue;
29923
29924 /* Add a pointer to the line table for the main compilation unit
29925 so that the debugger can make sense of DW_AT_decl_file
29926 attributes. */
29927 if (debug_info_level >= DINFO_LEVEL_TERSE)
29928 reset_dies (ctnode->root_die);
29929
29930 *slot = ctnode;
29931 }
29932
29933 /* Reset die CU symbol so we don't output it twice. */
29934 comp_unit_die ()->die_id.die_symbol = NULL;
29935
29936 /* Remove DW_AT_macro from the early output. */
29937 if (have_macinfo)
29938 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
29939
29940 /* Remove indirect string decisions. */
29941 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
29942 }
29943
29944 #if ENABLE_ASSERT_CHECKING
29945 {
29946 dw_die_ref die = comp_unit_die (), c;
29947 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
29948 }
29949 #endif
29950 resolve_addr (comp_unit_die ());
29951 move_marked_base_types ();
29952
29953 /* Initialize sections and labels used for actual assembler output. */
29954 unsigned generation = init_sections_and_labels (false);
29955
29956 /* Traverse the DIE's and add sibling attributes to those DIE's that
29957 have children. */
29958 add_sibling_attributes (comp_unit_die ());
29959 limbo_die_node *node;
29960 for (node = cu_die_list; node; node = node->next)
29961 add_sibling_attributes (node->die);
29962 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
29963 add_sibling_attributes (ctnode->root_die);
29964
29965 /* When splitting DWARF info, we put some attributes in the
29966 skeleton compile_unit DIE that remains in the .o, while
29967 most attributes go in the DWO compile_unit_die. */
29968 if (dwarf_split_debug_info)
29969 {
29970 limbo_die_node *cu;
29971 main_comp_unit_die = gen_compile_unit_die (NULL);
29972 if (dwarf_version >= 5)
29973 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
29974 cu = limbo_die_list;
29975 gcc_assert (cu->die == main_comp_unit_die);
29976 limbo_die_list = limbo_die_list->next;
29977 cu->next = cu_die_list;
29978 cu_die_list = cu;
29979 }
29980 else
29981 main_comp_unit_die = comp_unit_die ();
29982
29983 /* Output a terminator label for the .text section. */
29984 switch_to_section (text_section);
29985 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
29986 if (cold_text_section)
29987 {
29988 switch_to_section (cold_text_section);
29989 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
29990 }
29991
29992 /* We can only use the low/high_pc attributes if all of the code was
29993 in .text. */
29994 if (!have_multiple_function_sections
29995 || (dwarf_version < 3 && dwarf_strict))
29996 {
29997 /* Don't add if the CU has no associated code. */
29998 if (text_section_used)
29999 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
30000 text_end_label, true);
30001 }
30002 else
30003 {
30004 unsigned fde_idx;
30005 dw_fde_ref fde;
30006 bool range_list_added = false;
30007
30008 if (text_section_used)
30009 add_ranges_by_labels (main_comp_unit_die, text_section_label,
30010 text_end_label, &range_list_added, true);
30011 if (cold_text_section_used)
30012 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
30013 cold_end_label, &range_list_added, true);
30014
30015 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
30016 {
30017 if (DECL_IGNORED_P (fde->decl))
30018 continue;
30019 if (!fde->in_std_section)
30020 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
30021 fde->dw_fde_end, &range_list_added,
30022 true);
30023 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
30024 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
30025 fde->dw_fde_second_end, &range_list_added,
30026 true);
30027 }
30028
30029 if (range_list_added)
30030 {
30031 /* We need to give .debug_loc and .debug_ranges an appropriate
30032 "base address". Use zero so that these addresses become
30033 absolute. Historically, we've emitted the unexpected
30034 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
30035 Emit both to give time for other tools to adapt. */
30036 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
30037 if (! dwarf_strict && dwarf_version < 4)
30038 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
30039
30040 add_ranges (NULL);
30041 }
30042 }
30043
30044 /* AIX Assembler inserts the length, so adjust the reference to match the
30045 offset expected by debuggers. */
30046 strcpy (dl_section_ref, debug_line_section_label);
30047 if (XCOFF_DEBUGGING_INFO)
30048 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
30049
30050 if (debug_info_level >= DINFO_LEVEL_TERSE)
30051 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
30052 dl_section_ref);
30053
30054 if (have_macinfo)
30055 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
30056 macinfo_section_label);
30057
30058 if (dwarf_split_debug_info)
30059 {
30060 if (have_location_lists)
30061 {
30062 if (dwarf_version >= 5)
30063 add_AT_loclistsptr (comp_unit_die (), DW_AT_loclists_base,
30064 loc_section_label);
30065 /* optimize_location_lists calculates the size of the lists,
30066 so index them first, and assign indices to the entries.
30067 Although optimize_location_lists will remove entries from
30068 the table, it only does so for duplicates, and therefore
30069 only reduces ref_counts to 1. */
30070 index_location_lists (comp_unit_die ());
30071 }
30072
30073 if (addr_index_table != NULL)
30074 {
30075 unsigned int index = 0;
30076 addr_index_table
30077 ->traverse_noresize<unsigned int *, index_addr_table_entry>
30078 (&index);
30079 }
30080 }
30081
30082 loc_list_idx = 0;
30083 if (have_location_lists)
30084 {
30085 optimize_location_lists (comp_unit_die ());
30086 /* And finally assign indexes to the entries for -gsplit-dwarf. */
30087 if (dwarf_version >= 5 && dwarf_split_debug_info)
30088 assign_location_list_indexes (comp_unit_die ());
30089 }
30090
30091 save_macinfo_strings ();
30092
30093 if (dwarf_split_debug_info)
30094 {
30095 unsigned int index = 0;
30096
30097 /* Add attributes common to skeleton compile_units and
30098 type_units. Because these attributes include strings, it
30099 must be done before freezing the string table. Top-level
30100 skeleton die attrs are added when the skeleton type unit is
30101 created, so ensure it is created by this point. */
30102 add_top_level_skeleton_die_attrs (main_comp_unit_die);
30103 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
30104 }
30105
30106 /* Output all of the compilation units. We put the main one last so that
30107 the offsets are available to output_pubnames. */
30108 for (node = cu_die_list; node; node = node->next)
30109 output_comp_unit (node->die, 0, NULL);
30110
30111 hash_table<comdat_type_hasher> comdat_type_table (100);
30112 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
30113 {
30114 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
30115
30116 /* Don't output duplicate types. */
30117 if (*slot != HTAB_EMPTY_ENTRY)
30118 continue;
30119
30120 /* Add a pointer to the line table for the main compilation unit
30121 so that the debugger can make sense of DW_AT_decl_file
30122 attributes. */
30123 if (debug_info_level >= DINFO_LEVEL_TERSE)
30124 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
30125 (!dwarf_split_debug_info
30126 ? dl_section_ref
30127 : debug_skeleton_line_section_label));
30128
30129 output_comdat_type_unit (ctnode);
30130 *slot = ctnode;
30131 }
30132
30133 if (dwarf_split_debug_info)
30134 {
30135 int mark;
30136 struct md5_ctx ctx;
30137
30138 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
30139 index_rnglists ();
30140
30141 /* Compute a checksum of the comp_unit to use as the dwo_id. */
30142 md5_init_ctx (&ctx);
30143 mark = 0;
30144 die_checksum (comp_unit_die (), &ctx, &mark);
30145 unmark_all_dies (comp_unit_die ());
30146 md5_finish_ctx (&ctx, checksum);
30147
30148 if (dwarf_version < 5)
30149 {
30150 /* Use the first 8 bytes of the checksum as the dwo_id,
30151 and add it to both comp-unit DIEs. */
30152 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
30153 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
30154 }
30155
30156 /* Add the base offset of the ranges table to the skeleton
30157 comp-unit DIE. */
30158 if (!vec_safe_is_empty (ranges_table))
30159 {
30160 if (dwarf_version >= 5)
30161 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
30162 ranges_base_label);
30163 else
30164 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
30165 ranges_section_label);
30166 }
30167
30168 switch_to_section (debug_addr_section);
30169 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
30170 output_addr_table ();
30171 }
30172
30173 /* Output the main compilation unit if non-empty or if .debug_macinfo
30174 or .debug_macro will be emitted. */
30175 output_comp_unit (comp_unit_die (), have_macinfo,
30176 dwarf_split_debug_info ? checksum : NULL);
30177
30178 if (dwarf_split_debug_info && info_section_emitted)
30179 output_skeleton_debug_sections (main_comp_unit_die, checksum);
30180
30181 /* Output the abbreviation table. */
30182 if (vec_safe_length (abbrev_die_table) != 1)
30183 {
30184 switch_to_section (debug_abbrev_section);
30185 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
30186 output_abbrev_section ();
30187 }
30188
30189 /* Output location list section if necessary. */
30190 if (have_location_lists)
30191 {
30192 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
30193 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
30194 /* Output the location lists info. */
30195 switch_to_section (debug_loc_section);
30196 if (dwarf_version >= 5)
30197 {
30198 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 1);
30199 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 2);
30200 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
30201 dw2_asm_output_data (4, 0xffffffff,
30202 "Initial length escape value indicating "
30203 "64-bit DWARF extension");
30204 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
30205 "Length of Location Lists");
30206 ASM_OUTPUT_LABEL (asm_out_file, l1);
30207 dw2_asm_output_data (2, dwarf_version, "DWARF Version");
30208 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
30209 dw2_asm_output_data (1, 0, "Segment Size");
30210 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
30211 "Offset Entry Count");
30212 }
30213 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
30214 if (dwarf_version >= 5 && dwarf_split_debug_info)
30215 {
30216 unsigned int save_loc_list_idx = loc_list_idx;
30217 loc_list_idx = 0;
30218 output_loclists_offsets (comp_unit_die ());
30219 gcc_assert (save_loc_list_idx == loc_list_idx);
30220 }
30221 output_location_lists (comp_unit_die ());
30222 if (dwarf_version >= 5)
30223 ASM_OUTPUT_LABEL (asm_out_file, l2);
30224 }
30225
30226 output_pubtables ();
30227
30228 /* Output the address range information if a CU (.debug_info section)
30229 was emitted. We output an empty table even if we had no functions
30230 to put in it. This because the consumer has no way to tell the
30231 difference between an empty table that we omitted and failure to
30232 generate a table that would have contained data. */
30233 if (info_section_emitted)
30234 {
30235 switch_to_section (debug_aranges_section);
30236 output_aranges ();
30237 }
30238
30239 /* Output ranges section if necessary. */
30240 if (!vec_safe_is_empty (ranges_table))
30241 {
30242 if (dwarf_version >= 5)
30243 output_rnglists (generation);
30244 else
30245 output_ranges ();
30246 }
30247
30248 /* Have to end the macro section. */
30249 if (have_macinfo)
30250 {
30251 switch_to_section (debug_macinfo_section);
30252 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
30253 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
30254 : debug_skeleton_line_section_label, false);
30255 dw2_asm_output_data (1, 0, "End compilation unit");
30256 }
30257
30258 /* Output the source line correspondence table. We must do this
30259 even if there is no line information. Otherwise, on an empty
30260 translation unit, we will generate a present, but empty,
30261 .debug_info section. IRIX 6.5 `nm' will then complain when
30262 examining the file. This is done late so that any filenames
30263 used by the debug_info section are marked as 'used'. */
30264 switch_to_section (debug_line_section);
30265 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
30266 if (! DWARF2_ASM_LINE_DEBUG_INFO)
30267 output_line_info (false);
30268
30269 if (dwarf_split_debug_info && info_section_emitted)
30270 {
30271 switch_to_section (debug_skeleton_line_section);
30272 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
30273 output_line_info (true);
30274 }
30275
30276 /* If we emitted any indirect strings, output the string table too. */
30277 if (debug_str_hash || skeleton_debug_str_hash)
30278 output_indirect_strings ();
30279 if (debug_line_str_hash)
30280 {
30281 switch_to_section (debug_line_str_section);
30282 const enum dwarf_form form = DW_FORM_line_strp;
30283 debug_line_str_hash->traverse<enum dwarf_form,
30284 output_indirect_string> (form);
30285 }
30286 }
30287
30288 /* Returns a hash value for X (which really is a variable_value_struct). */
30289
30290 inline hashval_t
30291 variable_value_hasher::hash (variable_value_struct *x)
30292 {
30293 return (hashval_t) x->decl_id;
30294 }
30295
30296 /* Return nonzero if decl_id of variable_value_struct X is the same as
30297 UID of decl Y. */
30298
30299 inline bool
30300 variable_value_hasher::equal (variable_value_struct *x, tree y)
30301 {
30302 return x->decl_id == DECL_UID (y);
30303 }
30304
30305 /* Helper function for resolve_variable_value, handle
30306 DW_OP_GNU_variable_value in one location expression.
30307 Return true if exprloc has been changed into loclist. */
30308
30309 static bool
30310 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
30311 {
30312 dw_loc_descr_ref next;
30313 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
30314 {
30315 next = loc->dw_loc_next;
30316 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
30317 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
30318 continue;
30319
30320 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
30321 if (DECL_CONTEXT (decl) != current_function_decl)
30322 continue;
30323
30324 dw_die_ref ref = lookup_decl_die (decl);
30325 if (ref)
30326 {
30327 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30328 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30329 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30330 continue;
30331 }
30332 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
30333 if (l == NULL)
30334 continue;
30335 if (l->dw_loc_next)
30336 {
30337 if (AT_class (a) != dw_val_class_loc)
30338 continue;
30339 switch (a->dw_attr)
30340 {
30341 /* Following attributes allow both exprloc and loclist
30342 classes, so we can change them into a loclist. */
30343 case DW_AT_location:
30344 case DW_AT_string_length:
30345 case DW_AT_return_addr:
30346 case DW_AT_data_member_location:
30347 case DW_AT_frame_base:
30348 case DW_AT_segment:
30349 case DW_AT_static_link:
30350 case DW_AT_use_location:
30351 case DW_AT_vtable_elem_location:
30352 if (prev)
30353 {
30354 prev->dw_loc_next = NULL;
30355 prepend_loc_descr_to_each (l, AT_loc (a));
30356 }
30357 if (next)
30358 add_loc_descr_to_each (l, next);
30359 a->dw_attr_val.val_class = dw_val_class_loc_list;
30360 a->dw_attr_val.val_entry = NULL;
30361 a->dw_attr_val.v.val_loc_list = l;
30362 have_location_lists = true;
30363 return true;
30364 /* Following attributes allow both exprloc and reference,
30365 so if the whole expression is DW_OP_GNU_variable_value alone
30366 we could transform it into reference. */
30367 case DW_AT_byte_size:
30368 case DW_AT_bit_size:
30369 case DW_AT_lower_bound:
30370 case DW_AT_upper_bound:
30371 case DW_AT_bit_stride:
30372 case DW_AT_count:
30373 case DW_AT_allocated:
30374 case DW_AT_associated:
30375 case DW_AT_byte_stride:
30376 if (prev == NULL && next == NULL)
30377 break;
30378 /* FALLTHRU */
30379 default:
30380 if (dwarf_strict)
30381 continue;
30382 break;
30383 }
30384 /* Create DW_TAG_variable that we can refer to. */
30385 gen_decl_die (decl, NULL_TREE, NULL,
30386 lookup_decl_die (current_function_decl));
30387 ref = lookup_decl_die (decl);
30388 if (ref)
30389 {
30390 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30391 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30392 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30393 }
30394 continue;
30395 }
30396 if (prev)
30397 {
30398 prev->dw_loc_next = l->expr;
30399 add_loc_descr (&prev->dw_loc_next, next);
30400 free_loc_descr (loc, NULL);
30401 next = prev->dw_loc_next;
30402 }
30403 else
30404 {
30405 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
30406 add_loc_descr (&loc, next);
30407 next = loc;
30408 }
30409 loc = prev;
30410 }
30411 return false;
30412 }
30413
30414 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
30415
30416 static void
30417 resolve_variable_value (dw_die_ref die)
30418 {
30419 dw_attr_node *a;
30420 dw_loc_list_ref loc;
30421 unsigned ix;
30422
30423 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30424 switch (AT_class (a))
30425 {
30426 case dw_val_class_loc:
30427 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
30428 break;
30429 /* FALLTHRU */
30430 case dw_val_class_loc_list:
30431 loc = AT_loc_list (a);
30432 gcc_assert (loc);
30433 for (; loc; loc = loc->dw_loc_next)
30434 resolve_variable_value_in_expr (a, loc->expr);
30435 break;
30436 default:
30437 break;
30438 }
30439 }
30440
30441 /* Attempt to optimize DW_OP_GNU_variable_value refering to
30442 temporaries in the current function. */
30443
30444 static void
30445 resolve_variable_values (void)
30446 {
30447 if (!variable_value_hash || !current_function_decl)
30448 return;
30449
30450 struct variable_value_struct *node
30451 = variable_value_hash->find_with_hash (current_function_decl,
30452 DECL_UID (current_function_decl));
30453
30454 if (node == NULL)
30455 return;
30456
30457 unsigned int i;
30458 dw_die_ref die;
30459 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
30460 resolve_variable_value (die);
30461 }
30462
30463 /* Helper function for note_variable_value, handle one location
30464 expression. */
30465
30466 static void
30467 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
30468 {
30469 for (; loc; loc = loc->dw_loc_next)
30470 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
30471 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30472 {
30473 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
30474 dw_die_ref ref = lookup_decl_die (decl);
30475 if (! ref && (flag_generate_lto || flag_generate_offload))
30476 {
30477 /* ??? This is somewhat a hack because we do not create DIEs
30478 for variables not in BLOCK trees early but when generating
30479 early LTO output we need the dw_val_class_decl_ref to be
30480 fully resolved. For fat LTO objects we'd also like to
30481 undo this after LTO dwarf output. */
30482 gcc_assert (DECL_CONTEXT (decl));
30483 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
30484 gcc_assert (ctx != NULL);
30485 gen_decl_die (decl, NULL_TREE, NULL, ctx);
30486 ref = lookup_decl_die (decl);
30487 gcc_assert (ref != NULL);
30488 }
30489 if (ref)
30490 {
30491 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30492 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30493 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30494 continue;
30495 }
30496 if (VAR_P (decl)
30497 && DECL_CONTEXT (decl)
30498 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
30499 && lookup_decl_die (DECL_CONTEXT (decl)))
30500 {
30501 if (!variable_value_hash)
30502 variable_value_hash
30503 = hash_table<variable_value_hasher>::create_ggc (10);
30504
30505 tree fndecl = DECL_CONTEXT (decl);
30506 struct variable_value_struct *node;
30507 struct variable_value_struct **slot
30508 = variable_value_hash->find_slot_with_hash (fndecl,
30509 DECL_UID (fndecl),
30510 INSERT);
30511 if (*slot == NULL)
30512 {
30513 node = ggc_cleared_alloc<variable_value_struct> ();
30514 node->decl_id = DECL_UID (fndecl);
30515 *slot = node;
30516 }
30517 else
30518 node = *slot;
30519
30520 vec_safe_push (node->dies, die);
30521 }
30522 }
30523 }
30524
30525 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
30526 with dw_val_class_decl_ref operand. */
30527
30528 static void
30529 note_variable_value (dw_die_ref die)
30530 {
30531 dw_die_ref c;
30532 dw_attr_node *a;
30533 dw_loc_list_ref loc;
30534 unsigned ix;
30535
30536 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30537 switch (AT_class (a))
30538 {
30539 case dw_val_class_loc_list:
30540 loc = AT_loc_list (a);
30541 gcc_assert (loc);
30542 if (!loc->noted_variable_value)
30543 {
30544 loc->noted_variable_value = 1;
30545 for (; loc; loc = loc->dw_loc_next)
30546 note_variable_value_in_expr (die, loc->expr);
30547 }
30548 break;
30549 case dw_val_class_loc:
30550 note_variable_value_in_expr (die, AT_loc (a));
30551 break;
30552 default:
30553 break;
30554 }
30555
30556 /* Mark children. */
30557 FOR_EACH_CHILD (die, c, note_variable_value (c));
30558 }
30559
30560 /* Perform any cleanups needed after the early debug generation pass
30561 has run. */
30562
30563 static void
30564 dwarf2out_early_finish (const char *filename)
30565 {
30566 set_early_dwarf s;
30567
30568 /* PCH might result in DW_AT_producer string being restored from the
30569 header compilation, so always fill it with empty string initially
30570 and overwrite only here. */
30571 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
30572 producer_string = gen_producer_string ();
30573 producer->dw_attr_val.v.val_str->refcount--;
30574 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
30575
30576 /* Add the name for the main input file now. We delayed this from
30577 dwarf2out_init to avoid complications with PCH. */
30578 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
30579 add_comp_dir_attribute (comp_unit_die ());
30580
30581 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
30582 DW_AT_comp_dir into .debug_line_str section. */
30583 if (!DWARF2_ASM_LINE_DEBUG_INFO
30584 && dwarf_version >= 5
30585 && DWARF5_USE_DEBUG_LINE_STR)
30586 {
30587 for (int i = 0; i < 2; i++)
30588 {
30589 dw_attr_node *a = get_AT (comp_unit_die (),
30590 i ? DW_AT_comp_dir : DW_AT_name);
30591 if (a == NULL
30592 || AT_class (a) != dw_val_class_str
30593 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
30594 continue;
30595
30596 if (! debug_line_str_hash)
30597 debug_line_str_hash
30598 = hash_table<indirect_string_hasher>::create_ggc (10);
30599
30600 struct indirect_string_node *node
30601 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
30602 set_indirect_string (node);
30603 node->form = DW_FORM_line_strp;
30604 a->dw_attr_val.v.val_str->refcount--;
30605 a->dw_attr_val.v.val_str = node;
30606 }
30607 }
30608
30609 /* With LTO early dwarf was really finished at compile-time, so make
30610 sure to adjust the phase after annotating the LTRANS CU DIE. */
30611 if (in_lto_p)
30612 {
30613 early_dwarf_finished = true;
30614 return;
30615 }
30616
30617 /* Walk through the list of incomplete types again, trying once more to
30618 emit full debugging info for them. */
30619 retry_incomplete_types ();
30620
30621 /* The point here is to flush out the limbo list so that it is empty
30622 and we don't need to stream it for LTO. */
30623 flush_limbo_die_list ();
30624
30625 gen_scheduled_generic_parms_dies ();
30626 gen_remaining_tmpl_value_param_die_attribute ();
30627
30628 /* Add DW_AT_linkage_name for all deferred DIEs. */
30629 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
30630 {
30631 tree decl = node->created_for;
30632 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
30633 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
30634 ended up in deferred_asm_name before we knew it was
30635 constant and never written to disk. */
30636 && DECL_ASSEMBLER_NAME (decl))
30637 {
30638 add_linkage_attr (node->die, decl);
30639 move_linkage_attr (node->die);
30640 }
30641 }
30642 deferred_asm_name = NULL;
30643
30644 if (flag_eliminate_unused_debug_types)
30645 prune_unused_types ();
30646
30647 /* Generate separate COMDAT sections for type DIEs. */
30648 if (use_debug_types)
30649 {
30650 break_out_comdat_types (comp_unit_die ());
30651
30652 /* Each new type_unit DIE was added to the limbo die list when created.
30653 Since these have all been added to comdat_type_list, clear the
30654 limbo die list. */
30655 limbo_die_list = NULL;
30656
30657 /* For each new comdat type unit, copy declarations for incomplete
30658 types to make the new unit self-contained (i.e., no direct
30659 references to the main compile unit). */
30660 for (comdat_type_node *ctnode = comdat_type_list;
30661 ctnode != NULL; ctnode = ctnode->next)
30662 copy_decls_for_unworthy_types (ctnode->root_die);
30663 copy_decls_for_unworthy_types (comp_unit_die ());
30664
30665 /* In the process of copying declarations from one unit to another,
30666 we may have left some declarations behind that are no longer
30667 referenced. Prune them. */
30668 prune_unused_types ();
30669 }
30670
30671 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
30672 with dw_val_class_decl_ref operand. */
30673 note_variable_value (comp_unit_die ());
30674 for (limbo_die_node *node = cu_die_list; node; node = node->next)
30675 note_variable_value (node->die);
30676 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
30677 ctnode = ctnode->next)
30678 note_variable_value (ctnode->root_die);
30679 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
30680 note_variable_value (node->die);
30681
30682 /* The AT_pubnames attribute needs to go in all skeleton dies, including
30683 both the main_cu and all skeleton TUs. Making this call unconditional
30684 would end up either adding a second copy of the AT_pubnames attribute, or
30685 requiring a special case in add_top_level_skeleton_die_attrs. */
30686 if (!dwarf_split_debug_info)
30687 add_AT_pubnames (comp_unit_die ());
30688
30689 /* The early debug phase is now finished. */
30690 early_dwarf_finished = true;
30691
30692 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
30693 if (!flag_generate_lto && !flag_generate_offload)
30694 return;
30695
30696 /* Now as we are going to output for LTO initialize sections and labels
30697 to the LTO variants. We don't need a random-seed postfix as other
30698 LTO sections as linking the LTO debug sections into one in a partial
30699 link is fine. */
30700 init_sections_and_labels (true);
30701
30702 /* The output below is modeled after dwarf2out_finish with all
30703 location related output removed and some LTO specific changes.
30704 Some refactoring might make both smaller and easier to match up. */
30705
30706 /* Traverse the DIE's and add add sibling attributes to those DIE's
30707 that have children. */
30708 add_sibling_attributes (comp_unit_die ());
30709 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
30710 add_sibling_attributes (node->die);
30711 for (comdat_type_node *ctnode = comdat_type_list;
30712 ctnode != NULL; ctnode = ctnode->next)
30713 add_sibling_attributes (ctnode->root_die);
30714
30715 if (have_macinfo)
30716 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
30717 macinfo_section_label);
30718
30719 save_macinfo_strings ();
30720
30721 /* Output all of the compilation units. We put the main one last so that
30722 the offsets are available to output_pubnames. */
30723 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
30724 output_comp_unit (node->die, 0, NULL);
30725
30726 hash_table<comdat_type_hasher> comdat_type_table (100);
30727 for (comdat_type_node *ctnode = comdat_type_list;
30728 ctnode != NULL; ctnode = ctnode->next)
30729 {
30730 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
30731
30732 /* Don't output duplicate types. */
30733 if (*slot != HTAB_EMPTY_ENTRY)
30734 continue;
30735
30736 /* Add a pointer to the line table for the main compilation unit
30737 so that the debugger can make sense of DW_AT_decl_file
30738 attributes. */
30739 if (debug_info_level >= DINFO_LEVEL_TERSE)
30740 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
30741 (!dwarf_split_debug_info
30742 ? debug_line_section_label
30743 : debug_skeleton_line_section_label));
30744
30745 output_comdat_type_unit (ctnode);
30746 *slot = ctnode;
30747 }
30748
30749 /* Stick a unique symbol to the main debuginfo section. */
30750 compute_comp_unit_symbol (comp_unit_die ());
30751
30752 /* Output the main compilation unit. We always need it if only for
30753 the CU symbol. */
30754 output_comp_unit (comp_unit_die (), true, NULL);
30755
30756 /* Output the abbreviation table. */
30757 if (vec_safe_length (abbrev_die_table) != 1)
30758 {
30759 switch_to_section (debug_abbrev_section);
30760 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
30761 output_abbrev_section ();
30762 }
30763
30764 /* Have to end the macro section. */
30765 if (have_macinfo)
30766 {
30767 /* We have to save macinfo state if we need to output it again
30768 for the FAT part of the object. */
30769 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
30770 if (flag_fat_lto_objects)
30771 macinfo_table = macinfo_table->copy ();
30772
30773 switch_to_section (debug_macinfo_section);
30774 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
30775 output_macinfo (debug_skeleton_line_section_label, true);
30776 dw2_asm_output_data (1, 0, "End compilation unit");
30777
30778 /* Emit a skeleton debug_line section. */
30779 switch_to_section (debug_skeleton_line_section);
30780 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
30781 output_line_info (true);
30782
30783 if (flag_fat_lto_objects)
30784 {
30785 vec_free (macinfo_table);
30786 macinfo_table = saved_macinfo_table;
30787 }
30788 }
30789
30790
30791 /* If we emitted any indirect strings, output the string table too. */
30792 if (debug_str_hash || skeleton_debug_str_hash)
30793 output_indirect_strings ();
30794
30795 /* Switch back to the text section. */
30796 switch_to_section (text_section);
30797 }
30798
30799 /* Reset all state within dwarf2out.c so that we can rerun the compiler
30800 within the same process. For use by toplev::finalize. */
30801
30802 void
30803 dwarf2out_c_finalize (void)
30804 {
30805 last_var_location_insn = NULL;
30806 cached_next_real_insn = NULL;
30807 used_rtx_array = NULL;
30808 incomplete_types = NULL;
30809 decl_scope_table = NULL;
30810 debug_info_section = NULL;
30811 debug_skeleton_info_section = NULL;
30812 debug_abbrev_section = NULL;
30813 debug_skeleton_abbrev_section = NULL;
30814 debug_aranges_section = NULL;
30815 debug_addr_section = NULL;
30816 debug_macinfo_section = NULL;
30817 debug_line_section = NULL;
30818 debug_skeleton_line_section = NULL;
30819 debug_loc_section = NULL;
30820 debug_pubnames_section = NULL;
30821 debug_pubtypes_section = NULL;
30822 debug_str_section = NULL;
30823 debug_line_str_section = NULL;
30824 debug_str_dwo_section = NULL;
30825 debug_str_offsets_section = NULL;
30826 debug_ranges_section = NULL;
30827 debug_frame_section = NULL;
30828 fde_vec = NULL;
30829 debug_str_hash = NULL;
30830 debug_line_str_hash = NULL;
30831 skeleton_debug_str_hash = NULL;
30832 dw2_string_counter = 0;
30833 have_multiple_function_sections = false;
30834 text_section_used = false;
30835 cold_text_section_used = false;
30836 cold_text_section = NULL;
30837 current_unit_personality = NULL;
30838
30839 early_dwarf = false;
30840 early_dwarf_finished = false;
30841
30842 next_die_offset = 0;
30843 single_comp_unit_die = NULL;
30844 comdat_type_list = NULL;
30845 limbo_die_list = NULL;
30846 file_table = NULL;
30847 decl_die_table = NULL;
30848 common_block_die_table = NULL;
30849 decl_loc_table = NULL;
30850 call_arg_locations = NULL;
30851 call_arg_loc_last = NULL;
30852 call_site_count = -1;
30853 tail_call_site_count = -1;
30854 cached_dw_loc_list_table = NULL;
30855 abbrev_die_table = NULL;
30856 delete dwarf_proc_stack_usage_map;
30857 dwarf_proc_stack_usage_map = NULL;
30858 line_info_label_num = 0;
30859 cur_line_info_table = NULL;
30860 text_section_line_info = NULL;
30861 cold_text_section_line_info = NULL;
30862 separate_line_info = NULL;
30863 info_section_emitted = false;
30864 pubname_table = NULL;
30865 pubtype_table = NULL;
30866 macinfo_table = NULL;
30867 ranges_table = NULL;
30868 ranges_by_label = NULL;
30869 rnglist_idx = 0;
30870 have_location_lists = false;
30871 loclabel_num = 0;
30872 poc_label_num = 0;
30873 last_emitted_file = NULL;
30874 label_num = 0;
30875 tmpl_value_parm_die_table = NULL;
30876 generic_type_instances = NULL;
30877 frame_pointer_fb_offset = 0;
30878 frame_pointer_fb_offset_valid = false;
30879 base_types.release ();
30880 XDELETEVEC (producer_string);
30881 producer_string = NULL;
30882 }
30883
30884 #include "gt-dwarf2out.h"