poly_int: GET_MODE_BITSIZE
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98
99 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
100 int, bool);
101 static rtx_insn *last_var_location_insn;
102 static rtx_insn *cached_next_real_insn;
103 static void dwarf2out_decl (tree);
104
105 #ifndef XCOFF_DEBUGGING_INFO
106 #define XCOFF_DEBUGGING_INFO 0
107 #endif
108
109 #ifndef HAVE_XCOFF_DWARF_EXTRAS
110 #define HAVE_XCOFF_DWARF_EXTRAS 0
111 #endif
112
113 #ifdef VMS_DEBUGGING_INFO
114 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
115
116 /* Define this macro to be a nonzero value if the directory specifications
117 which are output in the debug info should end with a separator. */
118 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
119 /* Define this macro to evaluate to a nonzero value if GCC should refrain
120 from generating indirect strings in DWARF2 debug information, for instance
121 if your target is stuck with an old version of GDB that is unable to
122 process them properly or uses VMS Debug. */
123 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
124 #else
125 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
126 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
127 #endif
128
129 /* ??? Poison these here until it can be done generically. They've been
130 totally replaced in this file; make sure it stays that way. */
131 #undef DWARF2_UNWIND_INFO
132 #undef DWARF2_FRAME_INFO
133 #if (GCC_VERSION >= 3000)
134 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
135 #endif
136
137 /* The size of the target's pointer type. */
138 #ifndef PTR_SIZE
139 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
140 #endif
141
142 /* Array of RTXes referenced by the debugging information, which therefore
143 must be kept around forever. */
144 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
145
146 /* A pointer to the base of a list of incomplete types which might be
147 completed at some later time. incomplete_types_list needs to be a
148 vec<tree, va_gc> *because we want to tell the garbage collector about
149 it. */
150 static GTY(()) vec<tree, va_gc> *incomplete_types;
151
152 /* A pointer to the base of a table of references to declaration
153 scopes. This table is a display which tracks the nesting
154 of declaration scopes at the current scope and containing
155 scopes. This table is used to find the proper place to
156 define type declaration DIE's. */
157 static GTY(()) vec<tree, va_gc> *decl_scope_table;
158
159 /* Pointers to various DWARF2 sections. */
160 static GTY(()) section *debug_info_section;
161 static GTY(()) section *debug_skeleton_info_section;
162 static GTY(()) section *debug_abbrev_section;
163 static GTY(()) section *debug_skeleton_abbrev_section;
164 static GTY(()) section *debug_aranges_section;
165 static GTY(()) section *debug_addr_section;
166 static GTY(()) section *debug_macinfo_section;
167 static const char *debug_macinfo_section_name;
168 static unsigned macinfo_label_base = 1;
169 static GTY(()) section *debug_line_section;
170 static GTY(()) section *debug_skeleton_line_section;
171 static GTY(()) section *debug_loc_section;
172 static GTY(()) section *debug_pubnames_section;
173 static GTY(()) section *debug_pubtypes_section;
174 static GTY(()) section *debug_str_section;
175 static GTY(()) section *debug_line_str_section;
176 static GTY(()) section *debug_str_dwo_section;
177 static GTY(()) section *debug_str_offsets_section;
178 static GTY(()) section *debug_ranges_section;
179 static GTY(()) section *debug_frame_section;
180
181 /* Maximum size (in bytes) of an artificially generated label. */
182 #define MAX_ARTIFICIAL_LABEL_BYTES 40
183
184 /* According to the (draft) DWARF 3 specification, the initial length
185 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
186 bytes are 0xffffffff, followed by the length stored in the next 8
187 bytes.
188
189 However, the SGI/MIPS ABI uses an initial length which is equal to
190 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
191
192 #ifndef DWARF_INITIAL_LENGTH_SIZE
193 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
194 #endif
195
196 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
197 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
198 #endif
199
200 /* Round SIZE up to the nearest BOUNDARY. */
201 #define DWARF_ROUND(SIZE,BOUNDARY) \
202 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
203
204 /* CIE identifier. */
205 #if HOST_BITS_PER_WIDE_INT >= 64
206 #define DWARF_CIE_ID \
207 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
208 #else
209 #define DWARF_CIE_ID DW_CIE_ID
210 #endif
211
212
213 /* A vector for a table that contains frame description
214 information for each routine. */
215 #define NOT_INDEXED (-1U)
216 #define NO_INDEX_ASSIGNED (-2U)
217
218 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
219
220 struct GTY((for_user)) indirect_string_node {
221 const char *str;
222 unsigned int refcount;
223 enum dwarf_form form;
224 char *label;
225 unsigned int index;
226 };
227
228 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
229 {
230 typedef const char *compare_type;
231
232 static hashval_t hash (indirect_string_node *);
233 static bool equal (indirect_string_node *, const char *);
234 };
235
236 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
237
238 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
239
240 /* With split_debug_info, both the comp_dir and dwo_name go in the
241 main object file, rather than the dwo, similar to the force_direct
242 parameter elsewhere but with additional complications:
243
244 1) The string is needed in both the main object file and the dwo.
245 That is, the comp_dir and dwo_name will appear in both places.
246
247 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
248 DW_FORM_line_strp or DW_FORM_GNU_str_index.
249
250 3) GCC chooses the form to use late, depending on the size and
251 reference count.
252
253 Rather than forcing the all debug string handling functions and
254 callers to deal with these complications, simply use a separate,
255 special-cased string table for any attribute that should go in the
256 main object file. This limits the complexity to just the places
257 that need it. */
258
259 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
260
261 static GTY(()) int dw2_string_counter;
262
263 /* True if the compilation unit places functions in more than one section. */
264 static GTY(()) bool have_multiple_function_sections = false;
265
266 /* Whether the default text and cold text sections have been used at all. */
267 static GTY(()) bool text_section_used = false;
268 static GTY(()) bool cold_text_section_used = false;
269
270 /* The default cold text section. */
271 static GTY(()) section *cold_text_section;
272
273 /* The DIE for C++14 'auto' in a function return type. */
274 static GTY(()) dw_die_ref auto_die;
275
276 /* The DIE for C++14 'decltype(auto)' in a function return type. */
277 static GTY(()) dw_die_ref decltype_auto_die;
278
279 /* Forward declarations for functions defined in this file. */
280
281 static void output_call_frame_info (int);
282 static void dwarf2out_note_section_used (void);
283
284 /* Personality decl of current unit. Used only when assembler does not support
285 personality CFI. */
286 static GTY(()) rtx current_unit_personality;
287
288 /* Whether an eh_frame section is required. */
289 static GTY(()) bool do_eh_frame = false;
290
291 /* .debug_rnglists next index. */
292 static unsigned int rnglist_idx;
293
294 /* Data and reference forms for relocatable data. */
295 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
296 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
297
298 #ifndef DEBUG_FRAME_SECTION
299 #define DEBUG_FRAME_SECTION ".debug_frame"
300 #endif
301
302 #ifndef FUNC_BEGIN_LABEL
303 #define FUNC_BEGIN_LABEL "LFB"
304 #endif
305
306 #ifndef FUNC_END_LABEL
307 #define FUNC_END_LABEL "LFE"
308 #endif
309
310 #ifndef PROLOGUE_END_LABEL
311 #define PROLOGUE_END_LABEL "LPE"
312 #endif
313
314 #ifndef EPILOGUE_BEGIN_LABEL
315 #define EPILOGUE_BEGIN_LABEL "LEB"
316 #endif
317
318 #ifndef FRAME_BEGIN_LABEL
319 #define FRAME_BEGIN_LABEL "Lframe"
320 #endif
321 #define CIE_AFTER_SIZE_LABEL "LSCIE"
322 #define CIE_END_LABEL "LECIE"
323 #define FDE_LABEL "LSFDE"
324 #define FDE_AFTER_SIZE_LABEL "LASFDE"
325 #define FDE_END_LABEL "LEFDE"
326 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
327 #define LINE_NUMBER_END_LABEL "LELT"
328 #define LN_PROLOG_AS_LABEL "LASLTP"
329 #define LN_PROLOG_END_LABEL "LELTP"
330 #define DIE_LABEL_PREFIX "DW"
331 \f
332 /* Match the base name of a file to the base name of a compilation unit. */
333
334 static int
335 matches_main_base (const char *path)
336 {
337 /* Cache the last query. */
338 static const char *last_path = NULL;
339 static int last_match = 0;
340 if (path != last_path)
341 {
342 const char *base;
343 int length = base_of_path (path, &base);
344 last_path = path;
345 last_match = (length == main_input_baselength
346 && memcmp (base, main_input_basename, length) == 0);
347 }
348 return last_match;
349 }
350
351 #ifdef DEBUG_DEBUG_STRUCT
352
353 static int
354 dump_struct_debug (tree type, enum debug_info_usage usage,
355 enum debug_struct_file criterion, int generic,
356 int matches, int result)
357 {
358 /* Find the type name. */
359 tree type_decl = TYPE_STUB_DECL (type);
360 tree t = type_decl;
361 const char *name = 0;
362 if (TREE_CODE (t) == TYPE_DECL)
363 t = DECL_NAME (t);
364 if (t)
365 name = IDENTIFIER_POINTER (t);
366
367 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
368 criterion,
369 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
370 matches ? "bas" : "hdr",
371 generic ? "gen" : "ord",
372 usage == DINFO_USAGE_DFN ? ";" :
373 usage == DINFO_USAGE_DIR_USE ? "." : "*",
374 result,
375 (void*) type_decl, name);
376 return result;
377 }
378 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
379 dump_struct_debug (type, usage, criterion, generic, matches, result)
380
381 #else
382
383 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
384 (result)
385
386 #endif
387
388 /* Get the number of HOST_WIDE_INTs needed to represent the precision
389 of the number. Some constants have a large uniform precision, so
390 we get the precision needed for the actual value of the number. */
391
392 static unsigned int
393 get_full_len (const wide_int &op)
394 {
395 int prec = wi::min_precision (op, UNSIGNED);
396 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
397 / HOST_BITS_PER_WIDE_INT);
398 }
399
400 static bool
401 should_emit_struct_debug (tree type, enum debug_info_usage usage)
402 {
403 enum debug_struct_file criterion;
404 tree type_decl;
405 bool generic = lang_hooks.types.generic_p (type);
406
407 if (generic)
408 criterion = debug_struct_generic[usage];
409 else
410 criterion = debug_struct_ordinary[usage];
411
412 if (criterion == DINFO_STRUCT_FILE_NONE)
413 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
414 if (criterion == DINFO_STRUCT_FILE_ANY)
415 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
416
417 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
418
419 if (type_decl != NULL)
420 {
421 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
422 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
423
424 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
425 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
426 }
427
428 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
429 }
430 \f
431 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
432 switch to the data section instead, and write out a synthetic start label
433 for collect2 the first time around. */
434
435 static void
436 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
437 {
438 if (eh_frame_section == 0)
439 {
440 int flags;
441
442 if (EH_TABLES_CAN_BE_READ_ONLY)
443 {
444 int fde_encoding;
445 int per_encoding;
446 int lsda_encoding;
447
448 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
449 /*global=*/0);
450 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
451 /*global=*/1);
452 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
453 /*global=*/0);
454 flags = ((! flag_pic
455 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
456 && (fde_encoding & 0x70) != DW_EH_PE_aligned
457 && (per_encoding & 0x70) != DW_EH_PE_absptr
458 && (per_encoding & 0x70) != DW_EH_PE_aligned
459 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
460 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
461 ? 0 : SECTION_WRITE);
462 }
463 else
464 flags = SECTION_WRITE;
465
466 #ifdef EH_FRAME_SECTION_NAME
467 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
468 #else
469 eh_frame_section = ((flags == SECTION_WRITE)
470 ? data_section : readonly_data_section);
471 #endif /* EH_FRAME_SECTION_NAME */
472 }
473
474 switch_to_section (eh_frame_section);
475
476 #ifdef EH_FRAME_THROUGH_COLLECT2
477 /* We have no special eh_frame section. Emit special labels to guide
478 collect2. */
479 if (!back)
480 {
481 tree label = get_file_function_name ("F");
482 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
483 targetm.asm_out.globalize_label (asm_out_file,
484 IDENTIFIER_POINTER (label));
485 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
486 }
487 #endif
488 }
489
490 /* Switch [BACK] to the eh or debug frame table section, depending on
491 FOR_EH. */
492
493 static void
494 switch_to_frame_table_section (int for_eh, bool back)
495 {
496 if (for_eh)
497 switch_to_eh_frame_section (back);
498 else
499 {
500 if (!debug_frame_section)
501 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
502 SECTION_DEBUG, NULL);
503 switch_to_section (debug_frame_section);
504 }
505 }
506
507 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
508
509 enum dw_cfi_oprnd_type
510 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
511 {
512 switch (cfi)
513 {
514 case DW_CFA_nop:
515 case DW_CFA_GNU_window_save:
516 case DW_CFA_remember_state:
517 case DW_CFA_restore_state:
518 return dw_cfi_oprnd_unused;
519
520 case DW_CFA_set_loc:
521 case DW_CFA_advance_loc1:
522 case DW_CFA_advance_loc2:
523 case DW_CFA_advance_loc4:
524 case DW_CFA_MIPS_advance_loc8:
525 return dw_cfi_oprnd_addr;
526
527 case DW_CFA_offset:
528 case DW_CFA_offset_extended:
529 case DW_CFA_def_cfa:
530 case DW_CFA_offset_extended_sf:
531 case DW_CFA_def_cfa_sf:
532 case DW_CFA_restore:
533 case DW_CFA_restore_extended:
534 case DW_CFA_undefined:
535 case DW_CFA_same_value:
536 case DW_CFA_def_cfa_register:
537 case DW_CFA_register:
538 case DW_CFA_expression:
539 case DW_CFA_val_expression:
540 return dw_cfi_oprnd_reg_num;
541
542 case DW_CFA_def_cfa_offset:
543 case DW_CFA_GNU_args_size:
544 case DW_CFA_def_cfa_offset_sf:
545 return dw_cfi_oprnd_offset;
546
547 case DW_CFA_def_cfa_expression:
548 return dw_cfi_oprnd_loc;
549
550 default:
551 gcc_unreachable ();
552 }
553 }
554
555 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
556
557 enum dw_cfi_oprnd_type
558 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
559 {
560 switch (cfi)
561 {
562 case DW_CFA_def_cfa:
563 case DW_CFA_def_cfa_sf:
564 case DW_CFA_offset:
565 case DW_CFA_offset_extended_sf:
566 case DW_CFA_offset_extended:
567 return dw_cfi_oprnd_offset;
568
569 case DW_CFA_register:
570 return dw_cfi_oprnd_reg_num;
571
572 case DW_CFA_expression:
573 case DW_CFA_val_expression:
574 return dw_cfi_oprnd_loc;
575
576 case DW_CFA_def_cfa_expression:
577 return dw_cfi_oprnd_cfa_loc;
578
579 default:
580 return dw_cfi_oprnd_unused;
581 }
582 }
583
584 /* Output one FDE. */
585
586 static void
587 output_fde (dw_fde_ref fde, bool for_eh, bool second,
588 char *section_start_label, int fde_encoding, char *augmentation,
589 bool any_lsda_needed, int lsda_encoding)
590 {
591 const char *begin, *end;
592 static unsigned int j;
593 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
594
595 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
596 /* empty */ 0);
597 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
598 for_eh + j);
599 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
600 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
601 if (!XCOFF_DEBUGGING_INFO || for_eh)
602 {
603 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
604 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
605 " indicating 64-bit DWARF extension");
606 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
607 "FDE Length");
608 }
609 ASM_OUTPUT_LABEL (asm_out_file, l1);
610
611 if (for_eh)
612 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
613 else
614 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
615 debug_frame_section, "FDE CIE offset");
616
617 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
618 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
619
620 if (for_eh)
621 {
622 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
623 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
624 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
625 "FDE initial location");
626 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
627 end, begin, "FDE address range");
628 }
629 else
630 {
631 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
632 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
633 }
634
635 if (augmentation[0])
636 {
637 if (any_lsda_needed)
638 {
639 int size = size_of_encoded_value (lsda_encoding);
640
641 if (lsda_encoding == DW_EH_PE_aligned)
642 {
643 int offset = ( 4 /* Length */
644 + 4 /* CIE offset */
645 + 2 * size_of_encoded_value (fde_encoding)
646 + 1 /* Augmentation size */ );
647 int pad = -offset & (PTR_SIZE - 1);
648
649 size += pad;
650 gcc_assert (size_of_uleb128 (size) == 1);
651 }
652
653 dw2_asm_output_data_uleb128 (size, "Augmentation size");
654
655 if (fde->uses_eh_lsda)
656 {
657 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
658 fde->funcdef_number);
659 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
660 gen_rtx_SYMBOL_REF (Pmode, l1),
661 false,
662 "Language Specific Data Area");
663 }
664 else
665 {
666 if (lsda_encoding == DW_EH_PE_aligned)
667 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
668 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
669 "Language Specific Data Area (none)");
670 }
671 }
672 else
673 dw2_asm_output_data_uleb128 (0, "Augmentation size");
674 }
675
676 /* Loop through the Call Frame Instructions associated with this FDE. */
677 fde->dw_fde_current_label = begin;
678 {
679 size_t from, until, i;
680
681 from = 0;
682 until = vec_safe_length (fde->dw_fde_cfi);
683
684 if (fde->dw_fde_second_begin == NULL)
685 ;
686 else if (!second)
687 until = fde->dw_fde_switch_cfi_index;
688 else
689 from = fde->dw_fde_switch_cfi_index;
690
691 for (i = from; i < until; i++)
692 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
693 }
694
695 /* If we are to emit a ref/link from function bodies to their frame tables,
696 do it now. This is typically performed to make sure that tables
697 associated with functions are dragged with them and not discarded in
698 garbage collecting links. We need to do this on a per function basis to
699 cope with -ffunction-sections. */
700
701 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
702 /* Switch to the function section, emit the ref to the tables, and
703 switch *back* into the table section. */
704 switch_to_section (function_section (fde->decl));
705 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
706 switch_to_frame_table_section (for_eh, true);
707 #endif
708
709 /* Pad the FDE out to an address sized boundary. */
710 ASM_OUTPUT_ALIGN (asm_out_file,
711 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
712 ASM_OUTPUT_LABEL (asm_out_file, l2);
713
714 j += 2;
715 }
716
717 /* Return true if frame description entry FDE is needed for EH. */
718
719 static bool
720 fde_needed_for_eh_p (dw_fde_ref fde)
721 {
722 if (flag_asynchronous_unwind_tables)
723 return true;
724
725 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
726 return true;
727
728 if (fde->uses_eh_lsda)
729 return true;
730
731 /* If exceptions are enabled, we have collected nothrow info. */
732 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
733 return false;
734
735 return true;
736 }
737
738 /* Output the call frame information used to record information
739 that relates to calculating the frame pointer, and records the
740 location of saved registers. */
741
742 static void
743 output_call_frame_info (int for_eh)
744 {
745 unsigned int i;
746 dw_fde_ref fde;
747 dw_cfi_ref cfi;
748 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
749 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
750 bool any_lsda_needed = false;
751 char augmentation[6];
752 int augmentation_size;
753 int fde_encoding = DW_EH_PE_absptr;
754 int per_encoding = DW_EH_PE_absptr;
755 int lsda_encoding = DW_EH_PE_absptr;
756 int return_reg;
757 rtx personality = NULL;
758 int dw_cie_version;
759
760 /* Don't emit a CIE if there won't be any FDEs. */
761 if (!fde_vec)
762 return;
763
764 /* Nothing to do if the assembler's doing it all. */
765 if (dwarf2out_do_cfi_asm ())
766 return;
767
768 /* If we don't have any functions we'll want to unwind out of, don't emit
769 any EH unwind information. If we make FDEs linkonce, we may have to
770 emit an empty label for an FDE that wouldn't otherwise be emitted. We
771 want to avoid having an FDE kept around when the function it refers to
772 is discarded. Example where this matters: a primary function template
773 in C++ requires EH information, an explicit specialization doesn't. */
774 if (for_eh)
775 {
776 bool any_eh_needed = false;
777
778 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
779 {
780 if (fde->uses_eh_lsda)
781 any_eh_needed = any_lsda_needed = true;
782 else if (fde_needed_for_eh_p (fde))
783 any_eh_needed = true;
784 else if (TARGET_USES_WEAK_UNWIND_INFO)
785 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
786 }
787
788 if (!any_eh_needed)
789 return;
790 }
791
792 /* We're going to be generating comments, so turn on app. */
793 if (flag_debug_asm)
794 app_enable ();
795
796 /* Switch to the proper frame section, first time. */
797 switch_to_frame_table_section (for_eh, false);
798
799 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
800 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
801
802 /* Output the CIE. */
803 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
804 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
805 if (!XCOFF_DEBUGGING_INFO || for_eh)
806 {
807 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
808 dw2_asm_output_data (4, 0xffffffff,
809 "Initial length escape value indicating 64-bit DWARF extension");
810 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
811 "Length of Common Information Entry");
812 }
813 ASM_OUTPUT_LABEL (asm_out_file, l1);
814
815 /* Now that the CIE pointer is PC-relative for EH,
816 use 0 to identify the CIE. */
817 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
818 (for_eh ? 0 : DWARF_CIE_ID),
819 "CIE Identifier Tag");
820
821 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
822 use CIE version 1, unless that would produce incorrect results
823 due to overflowing the return register column. */
824 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
825 dw_cie_version = 1;
826 if (return_reg >= 256 || dwarf_version > 2)
827 dw_cie_version = 3;
828 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
829
830 augmentation[0] = 0;
831 augmentation_size = 0;
832
833 personality = current_unit_personality;
834 if (for_eh)
835 {
836 char *p;
837
838 /* Augmentation:
839 z Indicates that a uleb128 is present to size the
840 augmentation section.
841 L Indicates the encoding (and thus presence) of
842 an LSDA pointer in the FDE augmentation.
843 R Indicates a non-default pointer encoding for
844 FDE code pointers.
845 P Indicates the presence of an encoding + language
846 personality routine in the CIE augmentation. */
847
848 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
849 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
850 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
851
852 p = augmentation + 1;
853 if (personality)
854 {
855 *p++ = 'P';
856 augmentation_size += 1 + size_of_encoded_value (per_encoding);
857 assemble_external_libcall (personality);
858 }
859 if (any_lsda_needed)
860 {
861 *p++ = 'L';
862 augmentation_size += 1;
863 }
864 if (fde_encoding != DW_EH_PE_absptr)
865 {
866 *p++ = 'R';
867 augmentation_size += 1;
868 }
869 if (p > augmentation + 1)
870 {
871 augmentation[0] = 'z';
872 *p = '\0';
873 }
874
875 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
876 if (personality && per_encoding == DW_EH_PE_aligned)
877 {
878 int offset = ( 4 /* Length */
879 + 4 /* CIE Id */
880 + 1 /* CIE version */
881 + strlen (augmentation) + 1 /* Augmentation */
882 + size_of_uleb128 (1) /* Code alignment */
883 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
884 + 1 /* RA column */
885 + 1 /* Augmentation size */
886 + 1 /* Personality encoding */ );
887 int pad = -offset & (PTR_SIZE - 1);
888
889 augmentation_size += pad;
890
891 /* Augmentations should be small, so there's scarce need to
892 iterate for a solution. Die if we exceed one uleb128 byte. */
893 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
894 }
895 }
896
897 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
898 if (dw_cie_version >= 4)
899 {
900 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
901 dw2_asm_output_data (1, 0, "CIE Segment Size");
902 }
903 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
904 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
905 "CIE Data Alignment Factor");
906
907 if (dw_cie_version == 1)
908 dw2_asm_output_data (1, return_reg, "CIE RA Column");
909 else
910 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
911
912 if (augmentation[0])
913 {
914 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
915 if (personality)
916 {
917 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
918 eh_data_format_name (per_encoding));
919 dw2_asm_output_encoded_addr_rtx (per_encoding,
920 personality,
921 true, NULL);
922 }
923
924 if (any_lsda_needed)
925 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
926 eh_data_format_name (lsda_encoding));
927
928 if (fde_encoding != DW_EH_PE_absptr)
929 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
930 eh_data_format_name (fde_encoding));
931 }
932
933 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
934 output_cfi (cfi, NULL, for_eh);
935
936 /* Pad the CIE out to an address sized boundary. */
937 ASM_OUTPUT_ALIGN (asm_out_file,
938 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
939 ASM_OUTPUT_LABEL (asm_out_file, l2);
940
941 /* Loop through all of the FDE's. */
942 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
943 {
944 unsigned int k;
945
946 /* Don't emit EH unwind info for leaf functions that don't need it. */
947 if (for_eh && !fde_needed_for_eh_p (fde))
948 continue;
949
950 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
951 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
952 augmentation, any_lsda_needed, lsda_encoding);
953 }
954
955 if (for_eh && targetm.terminate_dw2_eh_frame_info)
956 dw2_asm_output_data (4, 0, "End of Table");
957
958 /* Turn off app to make assembly quicker. */
959 if (flag_debug_asm)
960 app_disable ();
961 }
962
963 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
964
965 static void
966 dwarf2out_do_cfi_startproc (bool second)
967 {
968 int enc;
969 rtx ref;
970
971 fprintf (asm_out_file, "\t.cfi_startproc\n");
972
973 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
974 eh unwinders. */
975 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
976 return;
977
978 rtx personality = get_personality_function (current_function_decl);
979
980 if (personality)
981 {
982 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
983 ref = personality;
984
985 /* ??? The GAS support isn't entirely consistent. We have to
986 handle indirect support ourselves, but PC-relative is done
987 in the assembler. Further, the assembler can't handle any
988 of the weirder relocation types. */
989 if (enc & DW_EH_PE_indirect)
990 ref = dw2_force_const_mem (ref, true);
991
992 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
993 output_addr_const (asm_out_file, ref);
994 fputc ('\n', asm_out_file);
995 }
996
997 if (crtl->uses_eh_lsda)
998 {
999 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1000
1001 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1002 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1003 current_function_funcdef_no);
1004 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1005 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1006
1007 if (enc & DW_EH_PE_indirect)
1008 ref = dw2_force_const_mem (ref, true);
1009
1010 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1011 output_addr_const (asm_out_file, ref);
1012 fputc ('\n', asm_out_file);
1013 }
1014 }
1015
1016 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1017 this allocation may be done before pass_final. */
1018
1019 dw_fde_ref
1020 dwarf2out_alloc_current_fde (void)
1021 {
1022 dw_fde_ref fde;
1023
1024 fde = ggc_cleared_alloc<dw_fde_node> ();
1025 fde->decl = current_function_decl;
1026 fde->funcdef_number = current_function_funcdef_no;
1027 fde->fde_index = vec_safe_length (fde_vec);
1028 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1029 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1030 fde->nothrow = crtl->nothrow;
1031 fde->drap_reg = INVALID_REGNUM;
1032 fde->vdrap_reg = INVALID_REGNUM;
1033
1034 /* Record the FDE associated with this function. */
1035 cfun->fde = fde;
1036 vec_safe_push (fde_vec, fde);
1037
1038 return fde;
1039 }
1040
1041 /* Output a marker (i.e. a label) for the beginning of a function, before
1042 the prologue. */
1043
1044 void
1045 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1046 unsigned int column ATTRIBUTE_UNUSED,
1047 const char *file ATTRIBUTE_UNUSED)
1048 {
1049 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1050 char * dup_label;
1051 dw_fde_ref fde;
1052 section *fnsec;
1053 bool do_frame;
1054
1055 current_function_func_begin_label = NULL;
1056
1057 do_frame = dwarf2out_do_frame ();
1058
1059 /* ??? current_function_func_begin_label is also used by except.c for
1060 call-site information. We must emit this label if it might be used. */
1061 if (!do_frame
1062 && (!flag_exceptions
1063 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1064 return;
1065
1066 fnsec = function_section (current_function_decl);
1067 switch_to_section (fnsec);
1068 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1069 current_function_funcdef_no);
1070 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1071 current_function_funcdef_no);
1072 dup_label = xstrdup (label);
1073 current_function_func_begin_label = dup_label;
1074
1075 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1076 if (!do_frame)
1077 return;
1078
1079 /* Unlike the debug version, the EH version of frame unwind info is a per-
1080 function setting so we need to record whether we need it for the unit. */
1081 do_eh_frame |= dwarf2out_do_eh_frame ();
1082
1083 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1084 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1085 would include pass_dwarf2_frame. If we've not created the FDE yet,
1086 do so now. */
1087 fde = cfun->fde;
1088 if (fde == NULL)
1089 fde = dwarf2out_alloc_current_fde ();
1090
1091 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1092 fde->dw_fde_begin = dup_label;
1093 fde->dw_fde_current_label = dup_label;
1094 fde->in_std_section = (fnsec == text_section
1095 || (cold_text_section && fnsec == cold_text_section));
1096
1097 /* We only want to output line number information for the genuine dwarf2
1098 prologue case, not the eh frame case. */
1099 #ifdef DWARF2_DEBUGGING_INFO
1100 if (file)
1101 dwarf2out_source_line (line, column, file, 0, true);
1102 #endif
1103
1104 if (dwarf2out_do_cfi_asm ())
1105 dwarf2out_do_cfi_startproc (false);
1106 else
1107 {
1108 rtx personality = get_personality_function (current_function_decl);
1109 if (!current_unit_personality)
1110 current_unit_personality = personality;
1111
1112 /* We cannot keep a current personality per function as without CFI
1113 asm, at the point where we emit the CFI data, there is no current
1114 function anymore. */
1115 if (personality && current_unit_personality != personality)
1116 sorry ("multiple EH personalities are supported only with assemblers "
1117 "supporting .cfi_personality directive");
1118 }
1119 }
1120
1121 /* Output a marker (i.e. a label) for the end of the generated code
1122 for a function prologue. This gets called *after* the prologue code has
1123 been generated. */
1124
1125 void
1126 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1127 const char *file ATTRIBUTE_UNUSED)
1128 {
1129 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1130
1131 /* Output a label to mark the endpoint of the code generated for this
1132 function. */
1133 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1134 current_function_funcdef_no);
1135 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1136 current_function_funcdef_no);
1137 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1138 }
1139
1140 /* Output a marker (i.e. a label) for the beginning of the generated code
1141 for a function epilogue. This gets called *before* the prologue code has
1142 been generated. */
1143
1144 void
1145 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1146 const char *file ATTRIBUTE_UNUSED)
1147 {
1148 dw_fde_ref fde = cfun->fde;
1149 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1150
1151 if (fde->dw_fde_vms_begin_epilogue)
1152 return;
1153
1154 /* Output a label to mark the endpoint of the code generated for this
1155 function. */
1156 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1157 current_function_funcdef_no);
1158 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1159 current_function_funcdef_no);
1160 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1161 }
1162
1163 /* Output a marker (i.e. a label) for the absolute end of the generated code
1164 for a function definition. This gets called *after* the epilogue code has
1165 been generated. */
1166
1167 void
1168 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1169 const char *file ATTRIBUTE_UNUSED)
1170 {
1171 dw_fde_ref fde;
1172 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1173
1174 last_var_location_insn = NULL;
1175 cached_next_real_insn = NULL;
1176
1177 if (dwarf2out_do_cfi_asm ())
1178 fprintf (asm_out_file, "\t.cfi_endproc\n");
1179
1180 /* Output a label to mark the endpoint of the code generated for this
1181 function. */
1182 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1183 current_function_funcdef_no);
1184 ASM_OUTPUT_LABEL (asm_out_file, label);
1185 fde = cfun->fde;
1186 gcc_assert (fde != NULL);
1187 if (fde->dw_fde_second_begin == NULL)
1188 fde->dw_fde_end = xstrdup (label);
1189 }
1190
1191 void
1192 dwarf2out_frame_finish (void)
1193 {
1194 /* Output call frame information. */
1195 if (targetm.debug_unwind_info () == UI_DWARF2)
1196 output_call_frame_info (0);
1197
1198 /* Output another copy for the unwinder. */
1199 if (do_eh_frame)
1200 output_call_frame_info (1);
1201 }
1202
1203 /* Note that the current function section is being used for code. */
1204
1205 static void
1206 dwarf2out_note_section_used (void)
1207 {
1208 section *sec = current_function_section ();
1209 if (sec == text_section)
1210 text_section_used = true;
1211 else if (sec == cold_text_section)
1212 cold_text_section_used = true;
1213 }
1214
1215 static void var_location_switch_text_section (void);
1216 static void set_cur_line_info_table (section *);
1217
1218 void
1219 dwarf2out_switch_text_section (void)
1220 {
1221 section *sect;
1222 dw_fde_ref fde = cfun->fde;
1223
1224 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1225
1226 if (!in_cold_section_p)
1227 {
1228 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1229 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1230 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1231 }
1232 else
1233 {
1234 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1235 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1236 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1237 }
1238 have_multiple_function_sections = true;
1239
1240 /* There is no need to mark used sections when not debugging. */
1241 if (cold_text_section != NULL)
1242 dwarf2out_note_section_used ();
1243
1244 if (dwarf2out_do_cfi_asm ())
1245 fprintf (asm_out_file, "\t.cfi_endproc\n");
1246
1247 /* Now do the real section switch. */
1248 sect = current_function_section ();
1249 switch_to_section (sect);
1250
1251 fde->second_in_std_section
1252 = (sect == text_section
1253 || (cold_text_section && sect == cold_text_section));
1254
1255 if (dwarf2out_do_cfi_asm ())
1256 dwarf2out_do_cfi_startproc (true);
1257
1258 var_location_switch_text_section ();
1259
1260 if (cold_text_section != NULL)
1261 set_cur_line_info_table (sect);
1262 }
1263 \f
1264 /* And now, the subset of the debugging information support code necessary
1265 for emitting location expressions. */
1266
1267 /* Data about a single source file. */
1268 struct GTY((for_user)) dwarf_file_data {
1269 const char * filename;
1270 int emitted_number;
1271 };
1272
1273 /* Describe an entry into the .debug_addr section. */
1274
1275 enum ate_kind {
1276 ate_kind_rtx,
1277 ate_kind_rtx_dtprel,
1278 ate_kind_label
1279 };
1280
1281 struct GTY((for_user)) addr_table_entry {
1282 enum ate_kind kind;
1283 unsigned int refcount;
1284 unsigned int index;
1285 union addr_table_entry_struct_union
1286 {
1287 rtx GTY ((tag ("0"))) rtl;
1288 char * GTY ((tag ("1"))) label;
1289 }
1290 GTY ((desc ("%1.kind"))) addr;
1291 };
1292
1293 /* Location lists are ranges + location descriptions for that range,
1294 so you can track variables that are in different places over
1295 their entire life. */
1296 typedef struct GTY(()) dw_loc_list_struct {
1297 dw_loc_list_ref dw_loc_next;
1298 const char *begin; /* Label and addr_entry for start of range */
1299 addr_table_entry *begin_entry;
1300 const char *end; /* Label for end of range */
1301 char *ll_symbol; /* Label for beginning of location list.
1302 Only on head of list */
1303 const char *section; /* Section this loclist is relative to */
1304 dw_loc_descr_ref expr;
1305 hashval_t hash;
1306 /* True if all addresses in this and subsequent lists are known to be
1307 resolved. */
1308 bool resolved_addr;
1309 /* True if this list has been replaced by dw_loc_next. */
1310 bool replaced;
1311 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1312 section. */
1313 unsigned char emitted : 1;
1314 /* True if hash field is index rather than hash value. */
1315 unsigned char num_assigned : 1;
1316 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1317 unsigned char offset_emitted : 1;
1318 /* True if note_variable_value_in_expr has been called on it. */
1319 unsigned char noted_variable_value : 1;
1320 /* True if the range should be emitted even if begin and end
1321 are the same. */
1322 bool force;
1323 } dw_loc_list_node;
1324
1325 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1326 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1327
1328 /* Convert a DWARF stack opcode into its string name. */
1329
1330 static const char *
1331 dwarf_stack_op_name (unsigned int op)
1332 {
1333 const char *name = get_DW_OP_name (op);
1334
1335 if (name != NULL)
1336 return name;
1337
1338 return "OP_<unknown>";
1339 }
1340
1341 /* Return a pointer to a newly allocated location description. Location
1342 descriptions are simple expression terms that can be strung
1343 together to form more complicated location (address) descriptions. */
1344
1345 static inline dw_loc_descr_ref
1346 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1347 unsigned HOST_WIDE_INT oprnd2)
1348 {
1349 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1350
1351 descr->dw_loc_opc = op;
1352 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1353 descr->dw_loc_oprnd1.val_entry = NULL;
1354 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1355 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1356 descr->dw_loc_oprnd2.val_entry = NULL;
1357 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1358
1359 return descr;
1360 }
1361
1362 /* Add a location description term to a location description expression. */
1363
1364 static inline void
1365 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1366 {
1367 dw_loc_descr_ref *d;
1368
1369 /* Find the end of the chain. */
1370 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1371 ;
1372
1373 *d = descr;
1374 }
1375
1376 /* Compare two location operands for exact equality. */
1377
1378 static bool
1379 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1380 {
1381 if (a->val_class != b->val_class)
1382 return false;
1383 switch (a->val_class)
1384 {
1385 case dw_val_class_none:
1386 return true;
1387 case dw_val_class_addr:
1388 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1389
1390 case dw_val_class_offset:
1391 case dw_val_class_unsigned_const:
1392 case dw_val_class_const:
1393 case dw_val_class_unsigned_const_implicit:
1394 case dw_val_class_const_implicit:
1395 case dw_val_class_range_list:
1396 /* These are all HOST_WIDE_INT, signed or unsigned. */
1397 return a->v.val_unsigned == b->v.val_unsigned;
1398
1399 case dw_val_class_loc:
1400 return a->v.val_loc == b->v.val_loc;
1401 case dw_val_class_loc_list:
1402 return a->v.val_loc_list == b->v.val_loc_list;
1403 case dw_val_class_die_ref:
1404 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1405 case dw_val_class_fde_ref:
1406 return a->v.val_fde_index == b->v.val_fde_index;
1407 case dw_val_class_lbl_id:
1408 case dw_val_class_lineptr:
1409 case dw_val_class_macptr:
1410 case dw_val_class_loclistsptr:
1411 case dw_val_class_high_pc:
1412 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1413 case dw_val_class_str:
1414 return a->v.val_str == b->v.val_str;
1415 case dw_val_class_flag:
1416 return a->v.val_flag == b->v.val_flag;
1417 case dw_val_class_file:
1418 case dw_val_class_file_implicit:
1419 return a->v.val_file == b->v.val_file;
1420 case dw_val_class_decl_ref:
1421 return a->v.val_decl_ref == b->v.val_decl_ref;
1422
1423 case dw_val_class_const_double:
1424 return (a->v.val_double.high == b->v.val_double.high
1425 && a->v.val_double.low == b->v.val_double.low);
1426
1427 case dw_val_class_wide_int:
1428 return *a->v.val_wide == *b->v.val_wide;
1429
1430 case dw_val_class_vec:
1431 {
1432 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1433 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1434
1435 return (a_len == b_len
1436 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1437 }
1438
1439 case dw_val_class_data8:
1440 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1441
1442 case dw_val_class_vms_delta:
1443 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1444 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1445
1446 case dw_val_class_discr_value:
1447 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1448 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1449 case dw_val_class_discr_list:
1450 /* It makes no sense comparing two discriminant value lists. */
1451 return false;
1452 }
1453 gcc_unreachable ();
1454 }
1455
1456 /* Compare two location atoms for exact equality. */
1457
1458 static bool
1459 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1460 {
1461 if (a->dw_loc_opc != b->dw_loc_opc)
1462 return false;
1463
1464 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1465 address size, but since we always allocate cleared storage it
1466 should be zero for other types of locations. */
1467 if (a->dtprel != b->dtprel)
1468 return false;
1469
1470 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1471 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1472 }
1473
1474 /* Compare two complete location expressions for exact equality. */
1475
1476 bool
1477 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1478 {
1479 while (1)
1480 {
1481 if (a == b)
1482 return true;
1483 if (a == NULL || b == NULL)
1484 return false;
1485 if (!loc_descr_equal_p_1 (a, b))
1486 return false;
1487
1488 a = a->dw_loc_next;
1489 b = b->dw_loc_next;
1490 }
1491 }
1492
1493
1494 /* Add a constant POLY_OFFSET to a location expression. */
1495
1496 static void
1497 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1498 {
1499 dw_loc_descr_ref loc;
1500 HOST_WIDE_INT *p;
1501
1502 gcc_assert (*list_head != NULL);
1503
1504 if (known_eq (poly_offset, 0))
1505 return;
1506
1507 /* Find the end of the chain. */
1508 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1509 ;
1510
1511 HOST_WIDE_INT offset;
1512 if (!poly_offset.is_constant (&offset))
1513 {
1514 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1515 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1516 return;
1517 }
1518
1519 p = NULL;
1520 if (loc->dw_loc_opc == DW_OP_fbreg
1521 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1522 p = &loc->dw_loc_oprnd1.v.val_int;
1523 else if (loc->dw_loc_opc == DW_OP_bregx)
1524 p = &loc->dw_loc_oprnd2.v.val_int;
1525
1526 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1527 offset. Don't optimize if an signed integer overflow would happen. */
1528 if (p != NULL
1529 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1530 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1531 *p += offset;
1532
1533 else if (offset > 0)
1534 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1535
1536 else
1537 {
1538 loc->dw_loc_next
1539 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1540 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1541 }
1542 }
1543
1544 /* Return a pointer to a newly allocated location description for
1545 REG and OFFSET. */
1546
1547 static inline dw_loc_descr_ref
1548 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1549 {
1550 HOST_WIDE_INT const_offset;
1551 if (offset.is_constant (&const_offset))
1552 {
1553 if (reg <= 31)
1554 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1555 const_offset, 0);
1556 else
1557 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1558 }
1559 else
1560 {
1561 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1562 loc_descr_plus_const (&ret, offset);
1563 return ret;
1564 }
1565 }
1566
1567 /* Add a constant OFFSET to a location list. */
1568
1569 static void
1570 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1571 {
1572 dw_loc_list_ref d;
1573 for (d = list_head; d != NULL; d = d->dw_loc_next)
1574 loc_descr_plus_const (&d->expr, offset);
1575 }
1576
1577 #define DWARF_REF_SIZE \
1578 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1579
1580 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1581 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1582 DW_FORM_data16 with 128 bits. */
1583 #define DWARF_LARGEST_DATA_FORM_BITS \
1584 (dwarf_version >= 5 ? 128 : 64)
1585
1586 /* Utility inline function for construction of ops that were GNU extension
1587 before DWARF 5. */
1588 static inline enum dwarf_location_atom
1589 dwarf_OP (enum dwarf_location_atom op)
1590 {
1591 switch (op)
1592 {
1593 case DW_OP_implicit_pointer:
1594 if (dwarf_version < 5)
1595 return DW_OP_GNU_implicit_pointer;
1596 break;
1597
1598 case DW_OP_entry_value:
1599 if (dwarf_version < 5)
1600 return DW_OP_GNU_entry_value;
1601 break;
1602
1603 case DW_OP_const_type:
1604 if (dwarf_version < 5)
1605 return DW_OP_GNU_const_type;
1606 break;
1607
1608 case DW_OP_regval_type:
1609 if (dwarf_version < 5)
1610 return DW_OP_GNU_regval_type;
1611 break;
1612
1613 case DW_OP_deref_type:
1614 if (dwarf_version < 5)
1615 return DW_OP_GNU_deref_type;
1616 break;
1617
1618 case DW_OP_convert:
1619 if (dwarf_version < 5)
1620 return DW_OP_GNU_convert;
1621 break;
1622
1623 case DW_OP_reinterpret:
1624 if (dwarf_version < 5)
1625 return DW_OP_GNU_reinterpret;
1626 break;
1627
1628 default:
1629 break;
1630 }
1631 return op;
1632 }
1633
1634 /* Similarly for attributes. */
1635 static inline enum dwarf_attribute
1636 dwarf_AT (enum dwarf_attribute at)
1637 {
1638 switch (at)
1639 {
1640 case DW_AT_call_return_pc:
1641 if (dwarf_version < 5)
1642 return DW_AT_low_pc;
1643 break;
1644
1645 case DW_AT_call_tail_call:
1646 if (dwarf_version < 5)
1647 return DW_AT_GNU_tail_call;
1648 break;
1649
1650 case DW_AT_call_origin:
1651 if (dwarf_version < 5)
1652 return DW_AT_abstract_origin;
1653 break;
1654
1655 case DW_AT_call_target:
1656 if (dwarf_version < 5)
1657 return DW_AT_GNU_call_site_target;
1658 break;
1659
1660 case DW_AT_call_target_clobbered:
1661 if (dwarf_version < 5)
1662 return DW_AT_GNU_call_site_target_clobbered;
1663 break;
1664
1665 case DW_AT_call_parameter:
1666 if (dwarf_version < 5)
1667 return DW_AT_abstract_origin;
1668 break;
1669
1670 case DW_AT_call_value:
1671 if (dwarf_version < 5)
1672 return DW_AT_GNU_call_site_value;
1673 break;
1674
1675 case DW_AT_call_data_value:
1676 if (dwarf_version < 5)
1677 return DW_AT_GNU_call_site_data_value;
1678 break;
1679
1680 case DW_AT_call_all_calls:
1681 if (dwarf_version < 5)
1682 return DW_AT_GNU_all_call_sites;
1683 break;
1684
1685 case DW_AT_call_all_tail_calls:
1686 if (dwarf_version < 5)
1687 return DW_AT_GNU_all_tail_call_sites;
1688 break;
1689
1690 case DW_AT_dwo_name:
1691 if (dwarf_version < 5)
1692 return DW_AT_GNU_dwo_name;
1693 break;
1694
1695 default:
1696 break;
1697 }
1698 return at;
1699 }
1700
1701 /* And similarly for tags. */
1702 static inline enum dwarf_tag
1703 dwarf_TAG (enum dwarf_tag tag)
1704 {
1705 switch (tag)
1706 {
1707 case DW_TAG_call_site:
1708 if (dwarf_version < 5)
1709 return DW_TAG_GNU_call_site;
1710 break;
1711
1712 case DW_TAG_call_site_parameter:
1713 if (dwarf_version < 5)
1714 return DW_TAG_GNU_call_site_parameter;
1715 break;
1716
1717 default:
1718 break;
1719 }
1720 return tag;
1721 }
1722
1723 static unsigned long int get_base_type_offset (dw_die_ref);
1724
1725 /* Return the size of a location descriptor. */
1726
1727 static unsigned long
1728 size_of_loc_descr (dw_loc_descr_ref loc)
1729 {
1730 unsigned long size = 1;
1731
1732 switch (loc->dw_loc_opc)
1733 {
1734 case DW_OP_addr:
1735 size += DWARF2_ADDR_SIZE;
1736 break;
1737 case DW_OP_GNU_addr_index:
1738 case DW_OP_GNU_const_index:
1739 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1740 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1741 break;
1742 case DW_OP_const1u:
1743 case DW_OP_const1s:
1744 size += 1;
1745 break;
1746 case DW_OP_const2u:
1747 case DW_OP_const2s:
1748 size += 2;
1749 break;
1750 case DW_OP_const4u:
1751 case DW_OP_const4s:
1752 size += 4;
1753 break;
1754 case DW_OP_const8u:
1755 case DW_OP_const8s:
1756 size += 8;
1757 break;
1758 case DW_OP_constu:
1759 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1760 break;
1761 case DW_OP_consts:
1762 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1763 break;
1764 case DW_OP_pick:
1765 size += 1;
1766 break;
1767 case DW_OP_plus_uconst:
1768 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1769 break;
1770 case DW_OP_skip:
1771 case DW_OP_bra:
1772 size += 2;
1773 break;
1774 case DW_OP_breg0:
1775 case DW_OP_breg1:
1776 case DW_OP_breg2:
1777 case DW_OP_breg3:
1778 case DW_OP_breg4:
1779 case DW_OP_breg5:
1780 case DW_OP_breg6:
1781 case DW_OP_breg7:
1782 case DW_OP_breg8:
1783 case DW_OP_breg9:
1784 case DW_OP_breg10:
1785 case DW_OP_breg11:
1786 case DW_OP_breg12:
1787 case DW_OP_breg13:
1788 case DW_OP_breg14:
1789 case DW_OP_breg15:
1790 case DW_OP_breg16:
1791 case DW_OP_breg17:
1792 case DW_OP_breg18:
1793 case DW_OP_breg19:
1794 case DW_OP_breg20:
1795 case DW_OP_breg21:
1796 case DW_OP_breg22:
1797 case DW_OP_breg23:
1798 case DW_OP_breg24:
1799 case DW_OP_breg25:
1800 case DW_OP_breg26:
1801 case DW_OP_breg27:
1802 case DW_OP_breg28:
1803 case DW_OP_breg29:
1804 case DW_OP_breg30:
1805 case DW_OP_breg31:
1806 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1807 break;
1808 case DW_OP_regx:
1809 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1810 break;
1811 case DW_OP_fbreg:
1812 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1813 break;
1814 case DW_OP_bregx:
1815 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1816 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1817 break;
1818 case DW_OP_piece:
1819 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1820 break;
1821 case DW_OP_bit_piece:
1822 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1823 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1824 break;
1825 case DW_OP_deref_size:
1826 case DW_OP_xderef_size:
1827 size += 1;
1828 break;
1829 case DW_OP_call2:
1830 size += 2;
1831 break;
1832 case DW_OP_call4:
1833 size += 4;
1834 break;
1835 case DW_OP_call_ref:
1836 case DW_OP_GNU_variable_value:
1837 size += DWARF_REF_SIZE;
1838 break;
1839 case DW_OP_implicit_value:
1840 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1841 + loc->dw_loc_oprnd1.v.val_unsigned;
1842 break;
1843 case DW_OP_implicit_pointer:
1844 case DW_OP_GNU_implicit_pointer:
1845 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1846 break;
1847 case DW_OP_entry_value:
1848 case DW_OP_GNU_entry_value:
1849 {
1850 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1851 size += size_of_uleb128 (op_size) + op_size;
1852 break;
1853 }
1854 case DW_OP_const_type:
1855 case DW_OP_GNU_const_type:
1856 {
1857 unsigned long o
1858 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1859 size += size_of_uleb128 (o) + 1;
1860 switch (loc->dw_loc_oprnd2.val_class)
1861 {
1862 case dw_val_class_vec:
1863 size += loc->dw_loc_oprnd2.v.val_vec.length
1864 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1865 break;
1866 case dw_val_class_const:
1867 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1868 break;
1869 case dw_val_class_const_double:
1870 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1871 break;
1872 case dw_val_class_wide_int:
1873 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1874 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1875 break;
1876 default:
1877 gcc_unreachable ();
1878 }
1879 break;
1880 }
1881 case DW_OP_regval_type:
1882 case DW_OP_GNU_regval_type:
1883 {
1884 unsigned long o
1885 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1886 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1887 + size_of_uleb128 (o);
1888 }
1889 break;
1890 case DW_OP_deref_type:
1891 case DW_OP_GNU_deref_type:
1892 {
1893 unsigned long o
1894 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1895 size += 1 + size_of_uleb128 (o);
1896 }
1897 break;
1898 case DW_OP_convert:
1899 case DW_OP_reinterpret:
1900 case DW_OP_GNU_convert:
1901 case DW_OP_GNU_reinterpret:
1902 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1903 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1904 else
1905 {
1906 unsigned long o
1907 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1908 size += size_of_uleb128 (o);
1909 }
1910 break;
1911 case DW_OP_GNU_parameter_ref:
1912 size += 4;
1913 break;
1914 default:
1915 break;
1916 }
1917
1918 return size;
1919 }
1920
1921 /* Return the size of a series of location descriptors. */
1922
1923 unsigned long
1924 size_of_locs (dw_loc_descr_ref loc)
1925 {
1926 dw_loc_descr_ref l;
1927 unsigned long size;
1928
1929 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1930 field, to avoid writing to a PCH file. */
1931 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1932 {
1933 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1934 break;
1935 size += size_of_loc_descr (l);
1936 }
1937 if (! l)
1938 return size;
1939
1940 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1941 {
1942 l->dw_loc_addr = size;
1943 size += size_of_loc_descr (l);
1944 }
1945
1946 return size;
1947 }
1948
1949 /* Return the size of the value in a DW_AT_discr_value attribute. */
1950
1951 static int
1952 size_of_discr_value (dw_discr_value *discr_value)
1953 {
1954 if (discr_value->pos)
1955 return size_of_uleb128 (discr_value->v.uval);
1956 else
1957 return size_of_sleb128 (discr_value->v.sval);
1958 }
1959
1960 /* Return the size of the value in a DW_AT_discr_list attribute. */
1961
1962 static int
1963 size_of_discr_list (dw_discr_list_ref discr_list)
1964 {
1965 int size = 0;
1966
1967 for (dw_discr_list_ref list = discr_list;
1968 list != NULL;
1969 list = list->dw_discr_next)
1970 {
1971 /* One byte for the discriminant value descriptor, and then one or two
1972 LEB128 numbers, depending on whether it's a single case label or a
1973 range label. */
1974 size += 1;
1975 size += size_of_discr_value (&list->dw_discr_lower_bound);
1976 if (list->dw_discr_range != 0)
1977 size += size_of_discr_value (&list->dw_discr_upper_bound);
1978 }
1979 return size;
1980 }
1981
1982 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
1983 static void get_ref_die_offset_label (char *, dw_die_ref);
1984 static unsigned long int get_ref_die_offset (dw_die_ref);
1985
1986 /* Output location description stack opcode's operands (if any).
1987 The for_eh_or_skip parameter controls whether register numbers are
1988 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
1989 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
1990 info). This should be suppressed for the cases that have not been converted
1991 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
1992
1993 static void
1994 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
1995 {
1996 dw_val_ref val1 = &loc->dw_loc_oprnd1;
1997 dw_val_ref val2 = &loc->dw_loc_oprnd2;
1998
1999 switch (loc->dw_loc_opc)
2000 {
2001 #ifdef DWARF2_DEBUGGING_INFO
2002 case DW_OP_const2u:
2003 case DW_OP_const2s:
2004 dw2_asm_output_data (2, val1->v.val_int, NULL);
2005 break;
2006 case DW_OP_const4u:
2007 if (loc->dtprel)
2008 {
2009 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2010 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2011 val1->v.val_addr);
2012 fputc ('\n', asm_out_file);
2013 break;
2014 }
2015 /* FALLTHRU */
2016 case DW_OP_const4s:
2017 dw2_asm_output_data (4, val1->v.val_int, NULL);
2018 break;
2019 case DW_OP_const8u:
2020 if (loc->dtprel)
2021 {
2022 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2023 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2024 val1->v.val_addr);
2025 fputc ('\n', asm_out_file);
2026 break;
2027 }
2028 /* FALLTHRU */
2029 case DW_OP_const8s:
2030 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2031 dw2_asm_output_data (8, val1->v.val_int, NULL);
2032 break;
2033 case DW_OP_skip:
2034 case DW_OP_bra:
2035 {
2036 int offset;
2037
2038 gcc_assert (val1->val_class == dw_val_class_loc);
2039 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2040
2041 dw2_asm_output_data (2, offset, NULL);
2042 }
2043 break;
2044 case DW_OP_implicit_value:
2045 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2046 switch (val2->val_class)
2047 {
2048 case dw_val_class_const:
2049 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2050 break;
2051 case dw_val_class_vec:
2052 {
2053 unsigned int elt_size = val2->v.val_vec.elt_size;
2054 unsigned int len = val2->v.val_vec.length;
2055 unsigned int i;
2056 unsigned char *p;
2057
2058 if (elt_size > sizeof (HOST_WIDE_INT))
2059 {
2060 elt_size /= 2;
2061 len *= 2;
2062 }
2063 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2064 i < len;
2065 i++, p += elt_size)
2066 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2067 "fp or vector constant word %u", i);
2068 }
2069 break;
2070 case dw_val_class_const_double:
2071 {
2072 unsigned HOST_WIDE_INT first, second;
2073
2074 if (WORDS_BIG_ENDIAN)
2075 {
2076 first = val2->v.val_double.high;
2077 second = val2->v.val_double.low;
2078 }
2079 else
2080 {
2081 first = val2->v.val_double.low;
2082 second = val2->v.val_double.high;
2083 }
2084 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2085 first, NULL);
2086 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2087 second, NULL);
2088 }
2089 break;
2090 case dw_val_class_wide_int:
2091 {
2092 int i;
2093 int len = get_full_len (*val2->v.val_wide);
2094 if (WORDS_BIG_ENDIAN)
2095 for (i = len - 1; i >= 0; --i)
2096 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2097 val2->v.val_wide->elt (i), NULL);
2098 else
2099 for (i = 0; i < len; ++i)
2100 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2101 val2->v.val_wide->elt (i), NULL);
2102 }
2103 break;
2104 case dw_val_class_addr:
2105 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2106 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2107 break;
2108 default:
2109 gcc_unreachable ();
2110 }
2111 break;
2112 #else
2113 case DW_OP_const2u:
2114 case DW_OP_const2s:
2115 case DW_OP_const4u:
2116 case DW_OP_const4s:
2117 case DW_OP_const8u:
2118 case DW_OP_const8s:
2119 case DW_OP_skip:
2120 case DW_OP_bra:
2121 case DW_OP_implicit_value:
2122 /* We currently don't make any attempt to make sure these are
2123 aligned properly like we do for the main unwind info, so
2124 don't support emitting things larger than a byte if we're
2125 only doing unwinding. */
2126 gcc_unreachable ();
2127 #endif
2128 case DW_OP_const1u:
2129 case DW_OP_const1s:
2130 dw2_asm_output_data (1, val1->v.val_int, NULL);
2131 break;
2132 case DW_OP_constu:
2133 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2134 break;
2135 case DW_OP_consts:
2136 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2137 break;
2138 case DW_OP_pick:
2139 dw2_asm_output_data (1, val1->v.val_int, NULL);
2140 break;
2141 case DW_OP_plus_uconst:
2142 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2143 break;
2144 case DW_OP_breg0:
2145 case DW_OP_breg1:
2146 case DW_OP_breg2:
2147 case DW_OP_breg3:
2148 case DW_OP_breg4:
2149 case DW_OP_breg5:
2150 case DW_OP_breg6:
2151 case DW_OP_breg7:
2152 case DW_OP_breg8:
2153 case DW_OP_breg9:
2154 case DW_OP_breg10:
2155 case DW_OP_breg11:
2156 case DW_OP_breg12:
2157 case DW_OP_breg13:
2158 case DW_OP_breg14:
2159 case DW_OP_breg15:
2160 case DW_OP_breg16:
2161 case DW_OP_breg17:
2162 case DW_OP_breg18:
2163 case DW_OP_breg19:
2164 case DW_OP_breg20:
2165 case DW_OP_breg21:
2166 case DW_OP_breg22:
2167 case DW_OP_breg23:
2168 case DW_OP_breg24:
2169 case DW_OP_breg25:
2170 case DW_OP_breg26:
2171 case DW_OP_breg27:
2172 case DW_OP_breg28:
2173 case DW_OP_breg29:
2174 case DW_OP_breg30:
2175 case DW_OP_breg31:
2176 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2177 break;
2178 case DW_OP_regx:
2179 {
2180 unsigned r = val1->v.val_unsigned;
2181 if (for_eh_or_skip >= 0)
2182 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2183 gcc_assert (size_of_uleb128 (r)
2184 == size_of_uleb128 (val1->v.val_unsigned));
2185 dw2_asm_output_data_uleb128 (r, NULL);
2186 }
2187 break;
2188 case DW_OP_fbreg:
2189 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2190 break;
2191 case DW_OP_bregx:
2192 {
2193 unsigned r = val1->v.val_unsigned;
2194 if (for_eh_or_skip >= 0)
2195 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2196 gcc_assert (size_of_uleb128 (r)
2197 == size_of_uleb128 (val1->v.val_unsigned));
2198 dw2_asm_output_data_uleb128 (r, NULL);
2199 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2200 }
2201 break;
2202 case DW_OP_piece:
2203 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2204 break;
2205 case DW_OP_bit_piece:
2206 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2207 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2208 break;
2209 case DW_OP_deref_size:
2210 case DW_OP_xderef_size:
2211 dw2_asm_output_data (1, val1->v.val_int, NULL);
2212 break;
2213
2214 case DW_OP_addr:
2215 if (loc->dtprel)
2216 {
2217 if (targetm.asm_out.output_dwarf_dtprel)
2218 {
2219 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2220 DWARF2_ADDR_SIZE,
2221 val1->v.val_addr);
2222 fputc ('\n', asm_out_file);
2223 }
2224 else
2225 gcc_unreachable ();
2226 }
2227 else
2228 {
2229 #ifdef DWARF2_DEBUGGING_INFO
2230 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2231 #else
2232 gcc_unreachable ();
2233 #endif
2234 }
2235 break;
2236
2237 case DW_OP_GNU_addr_index:
2238 case DW_OP_GNU_const_index:
2239 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2240 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2241 "(index into .debug_addr)");
2242 break;
2243
2244 case DW_OP_call2:
2245 case DW_OP_call4:
2246 {
2247 unsigned long die_offset
2248 = get_ref_die_offset (val1->v.val_die_ref.die);
2249 /* Make sure the offset has been computed and that we can encode it as
2250 an operand. */
2251 gcc_assert (die_offset > 0
2252 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2253 ? 0xffff
2254 : 0xffffffff));
2255 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2256 die_offset, NULL);
2257 }
2258 break;
2259
2260 case DW_OP_call_ref:
2261 case DW_OP_GNU_variable_value:
2262 {
2263 char label[MAX_ARTIFICIAL_LABEL_BYTES
2264 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2265 gcc_assert (val1->val_class == dw_val_class_die_ref);
2266 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2267 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2268 }
2269 break;
2270
2271 case DW_OP_implicit_pointer:
2272 case DW_OP_GNU_implicit_pointer:
2273 {
2274 char label[MAX_ARTIFICIAL_LABEL_BYTES
2275 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2276 gcc_assert (val1->val_class == dw_val_class_die_ref);
2277 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2278 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2279 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2280 }
2281 break;
2282
2283 case DW_OP_entry_value:
2284 case DW_OP_GNU_entry_value:
2285 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2286 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2287 break;
2288
2289 case DW_OP_const_type:
2290 case DW_OP_GNU_const_type:
2291 {
2292 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2293 gcc_assert (o);
2294 dw2_asm_output_data_uleb128 (o, NULL);
2295 switch (val2->val_class)
2296 {
2297 case dw_val_class_const:
2298 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2299 dw2_asm_output_data (1, l, NULL);
2300 dw2_asm_output_data (l, val2->v.val_int, NULL);
2301 break;
2302 case dw_val_class_vec:
2303 {
2304 unsigned int elt_size = val2->v.val_vec.elt_size;
2305 unsigned int len = val2->v.val_vec.length;
2306 unsigned int i;
2307 unsigned char *p;
2308
2309 l = len * elt_size;
2310 dw2_asm_output_data (1, l, NULL);
2311 if (elt_size > sizeof (HOST_WIDE_INT))
2312 {
2313 elt_size /= 2;
2314 len *= 2;
2315 }
2316 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2317 i < len;
2318 i++, p += elt_size)
2319 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2320 "fp or vector constant word %u", i);
2321 }
2322 break;
2323 case dw_val_class_const_double:
2324 {
2325 unsigned HOST_WIDE_INT first, second;
2326 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2327
2328 dw2_asm_output_data (1, 2 * l, NULL);
2329 if (WORDS_BIG_ENDIAN)
2330 {
2331 first = val2->v.val_double.high;
2332 second = val2->v.val_double.low;
2333 }
2334 else
2335 {
2336 first = val2->v.val_double.low;
2337 second = val2->v.val_double.high;
2338 }
2339 dw2_asm_output_data (l, first, NULL);
2340 dw2_asm_output_data (l, second, NULL);
2341 }
2342 break;
2343 case dw_val_class_wide_int:
2344 {
2345 int i;
2346 int len = get_full_len (*val2->v.val_wide);
2347 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2348
2349 dw2_asm_output_data (1, len * l, NULL);
2350 if (WORDS_BIG_ENDIAN)
2351 for (i = len - 1; i >= 0; --i)
2352 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2353 else
2354 for (i = 0; i < len; ++i)
2355 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2356 }
2357 break;
2358 default:
2359 gcc_unreachable ();
2360 }
2361 }
2362 break;
2363 case DW_OP_regval_type:
2364 case DW_OP_GNU_regval_type:
2365 {
2366 unsigned r = val1->v.val_unsigned;
2367 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2368 gcc_assert (o);
2369 if (for_eh_or_skip >= 0)
2370 {
2371 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2372 gcc_assert (size_of_uleb128 (r)
2373 == size_of_uleb128 (val1->v.val_unsigned));
2374 }
2375 dw2_asm_output_data_uleb128 (r, NULL);
2376 dw2_asm_output_data_uleb128 (o, NULL);
2377 }
2378 break;
2379 case DW_OP_deref_type:
2380 case DW_OP_GNU_deref_type:
2381 {
2382 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2383 gcc_assert (o);
2384 dw2_asm_output_data (1, val1->v.val_int, NULL);
2385 dw2_asm_output_data_uleb128 (o, NULL);
2386 }
2387 break;
2388 case DW_OP_convert:
2389 case DW_OP_reinterpret:
2390 case DW_OP_GNU_convert:
2391 case DW_OP_GNU_reinterpret:
2392 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2393 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2394 else
2395 {
2396 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2397 gcc_assert (o);
2398 dw2_asm_output_data_uleb128 (o, NULL);
2399 }
2400 break;
2401
2402 case DW_OP_GNU_parameter_ref:
2403 {
2404 unsigned long o;
2405 gcc_assert (val1->val_class == dw_val_class_die_ref);
2406 o = get_ref_die_offset (val1->v.val_die_ref.die);
2407 dw2_asm_output_data (4, o, NULL);
2408 }
2409 break;
2410
2411 default:
2412 /* Other codes have no operands. */
2413 break;
2414 }
2415 }
2416
2417 /* Output a sequence of location operations.
2418 The for_eh_or_skip parameter controls whether register numbers are
2419 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2420 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2421 info). This should be suppressed for the cases that have not been converted
2422 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2423
2424 void
2425 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2426 {
2427 for (; loc != NULL; loc = loc->dw_loc_next)
2428 {
2429 enum dwarf_location_atom opc = loc->dw_loc_opc;
2430 /* Output the opcode. */
2431 if (for_eh_or_skip >= 0
2432 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2433 {
2434 unsigned r = (opc - DW_OP_breg0);
2435 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2436 gcc_assert (r <= 31);
2437 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2438 }
2439 else if (for_eh_or_skip >= 0
2440 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2441 {
2442 unsigned r = (opc - DW_OP_reg0);
2443 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2444 gcc_assert (r <= 31);
2445 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2446 }
2447
2448 dw2_asm_output_data (1, opc,
2449 "%s", dwarf_stack_op_name (opc));
2450
2451 /* Output the operand(s) (if any). */
2452 output_loc_operands (loc, for_eh_or_skip);
2453 }
2454 }
2455
2456 /* Output location description stack opcode's operands (if any).
2457 The output is single bytes on a line, suitable for .cfi_escape. */
2458
2459 static void
2460 output_loc_operands_raw (dw_loc_descr_ref loc)
2461 {
2462 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2463 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2464
2465 switch (loc->dw_loc_opc)
2466 {
2467 case DW_OP_addr:
2468 case DW_OP_GNU_addr_index:
2469 case DW_OP_GNU_const_index:
2470 case DW_OP_implicit_value:
2471 /* We cannot output addresses in .cfi_escape, only bytes. */
2472 gcc_unreachable ();
2473
2474 case DW_OP_const1u:
2475 case DW_OP_const1s:
2476 case DW_OP_pick:
2477 case DW_OP_deref_size:
2478 case DW_OP_xderef_size:
2479 fputc (',', asm_out_file);
2480 dw2_asm_output_data_raw (1, val1->v.val_int);
2481 break;
2482
2483 case DW_OP_const2u:
2484 case DW_OP_const2s:
2485 fputc (',', asm_out_file);
2486 dw2_asm_output_data_raw (2, val1->v.val_int);
2487 break;
2488
2489 case DW_OP_const4u:
2490 case DW_OP_const4s:
2491 fputc (',', asm_out_file);
2492 dw2_asm_output_data_raw (4, val1->v.val_int);
2493 break;
2494
2495 case DW_OP_const8u:
2496 case DW_OP_const8s:
2497 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2498 fputc (',', asm_out_file);
2499 dw2_asm_output_data_raw (8, val1->v.val_int);
2500 break;
2501
2502 case DW_OP_skip:
2503 case DW_OP_bra:
2504 {
2505 int offset;
2506
2507 gcc_assert (val1->val_class == dw_val_class_loc);
2508 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2509
2510 fputc (',', asm_out_file);
2511 dw2_asm_output_data_raw (2, offset);
2512 }
2513 break;
2514
2515 case DW_OP_regx:
2516 {
2517 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2518 gcc_assert (size_of_uleb128 (r)
2519 == size_of_uleb128 (val1->v.val_unsigned));
2520 fputc (',', asm_out_file);
2521 dw2_asm_output_data_uleb128_raw (r);
2522 }
2523 break;
2524
2525 case DW_OP_constu:
2526 case DW_OP_plus_uconst:
2527 case DW_OP_piece:
2528 fputc (',', asm_out_file);
2529 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2530 break;
2531
2532 case DW_OP_bit_piece:
2533 fputc (',', asm_out_file);
2534 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2535 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2536 break;
2537
2538 case DW_OP_consts:
2539 case DW_OP_breg0:
2540 case DW_OP_breg1:
2541 case DW_OP_breg2:
2542 case DW_OP_breg3:
2543 case DW_OP_breg4:
2544 case DW_OP_breg5:
2545 case DW_OP_breg6:
2546 case DW_OP_breg7:
2547 case DW_OP_breg8:
2548 case DW_OP_breg9:
2549 case DW_OP_breg10:
2550 case DW_OP_breg11:
2551 case DW_OP_breg12:
2552 case DW_OP_breg13:
2553 case DW_OP_breg14:
2554 case DW_OP_breg15:
2555 case DW_OP_breg16:
2556 case DW_OP_breg17:
2557 case DW_OP_breg18:
2558 case DW_OP_breg19:
2559 case DW_OP_breg20:
2560 case DW_OP_breg21:
2561 case DW_OP_breg22:
2562 case DW_OP_breg23:
2563 case DW_OP_breg24:
2564 case DW_OP_breg25:
2565 case DW_OP_breg26:
2566 case DW_OP_breg27:
2567 case DW_OP_breg28:
2568 case DW_OP_breg29:
2569 case DW_OP_breg30:
2570 case DW_OP_breg31:
2571 case DW_OP_fbreg:
2572 fputc (',', asm_out_file);
2573 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2574 break;
2575
2576 case DW_OP_bregx:
2577 {
2578 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2579 gcc_assert (size_of_uleb128 (r)
2580 == size_of_uleb128 (val1->v.val_unsigned));
2581 fputc (',', asm_out_file);
2582 dw2_asm_output_data_uleb128_raw (r);
2583 fputc (',', asm_out_file);
2584 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2585 }
2586 break;
2587
2588 case DW_OP_implicit_pointer:
2589 case DW_OP_entry_value:
2590 case DW_OP_const_type:
2591 case DW_OP_regval_type:
2592 case DW_OP_deref_type:
2593 case DW_OP_convert:
2594 case DW_OP_reinterpret:
2595 case DW_OP_GNU_implicit_pointer:
2596 case DW_OP_GNU_entry_value:
2597 case DW_OP_GNU_const_type:
2598 case DW_OP_GNU_regval_type:
2599 case DW_OP_GNU_deref_type:
2600 case DW_OP_GNU_convert:
2601 case DW_OP_GNU_reinterpret:
2602 case DW_OP_GNU_parameter_ref:
2603 gcc_unreachable ();
2604 break;
2605
2606 default:
2607 /* Other codes have no operands. */
2608 break;
2609 }
2610 }
2611
2612 void
2613 output_loc_sequence_raw (dw_loc_descr_ref loc)
2614 {
2615 while (1)
2616 {
2617 enum dwarf_location_atom opc = loc->dw_loc_opc;
2618 /* Output the opcode. */
2619 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2620 {
2621 unsigned r = (opc - DW_OP_breg0);
2622 r = DWARF2_FRAME_REG_OUT (r, 1);
2623 gcc_assert (r <= 31);
2624 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2625 }
2626 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2627 {
2628 unsigned r = (opc - DW_OP_reg0);
2629 r = DWARF2_FRAME_REG_OUT (r, 1);
2630 gcc_assert (r <= 31);
2631 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2632 }
2633 /* Output the opcode. */
2634 fprintf (asm_out_file, "%#x", opc);
2635 output_loc_operands_raw (loc);
2636
2637 if (!loc->dw_loc_next)
2638 break;
2639 loc = loc->dw_loc_next;
2640
2641 fputc (',', asm_out_file);
2642 }
2643 }
2644
2645 /* This function builds a dwarf location descriptor sequence from a
2646 dw_cfa_location, adding the given OFFSET to the result of the
2647 expression. */
2648
2649 struct dw_loc_descr_node *
2650 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2651 {
2652 struct dw_loc_descr_node *head, *tmp;
2653
2654 offset += cfa->offset;
2655
2656 if (cfa->indirect)
2657 {
2658 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2659 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2660 head->dw_loc_oprnd1.val_entry = NULL;
2661 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2662 add_loc_descr (&head, tmp);
2663 loc_descr_plus_const (&head, offset);
2664 }
2665 else
2666 head = new_reg_loc_descr (cfa->reg, offset);
2667
2668 return head;
2669 }
2670
2671 /* This function builds a dwarf location descriptor sequence for
2672 the address at OFFSET from the CFA when stack is aligned to
2673 ALIGNMENT byte. */
2674
2675 struct dw_loc_descr_node *
2676 build_cfa_aligned_loc (dw_cfa_location *cfa,
2677 poly_int64 offset, HOST_WIDE_INT alignment)
2678 {
2679 struct dw_loc_descr_node *head;
2680 unsigned int dwarf_fp
2681 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2682
2683 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2684 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2685 {
2686 head = new_reg_loc_descr (dwarf_fp, 0);
2687 add_loc_descr (&head, int_loc_descriptor (alignment));
2688 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2689 loc_descr_plus_const (&head, offset);
2690 }
2691 else
2692 head = new_reg_loc_descr (dwarf_fp, offset);
2693 return head;
2694 }
2695 \f
2696 /* And now, the support for symbolic debugging information. */
2697
2698 /* .debug_str support. */
2699
2700 static void dwarf2out_init (const char *);
2701 static void dwarf2out_finish (const char *);
2702 static void dwarf2out_early_finish (const char *);
2703 static void dwarf2out_assembly_start (void);
2704 static void dwarf2out_define (unsigned int, const char *);
2705 static void dwarf2out_undef (unsigned int, const char *);
2706 static void dwarf2out_start_source_file (unsigned, const char *);
2707 static void dwarf2out_end_source_file (unsigned);
2708 static void dwarf2out_function_decl (tree);
2709 static void dwarf2out_begin_block (unsigned, unsigned);
2710 static void dwarf2out_end_block (unsigned, unsigned);
2711 static bool dwarf2out_ignore_block (const_tree);
2712 static void dwarf2out_early_global_decl (tree);
2713 static void dwarf2out_late_global_decl (tree);
2714 static void dwarf2out_type_decl (tree, int);
2715 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2716 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2717 dw_die_ref);
2718 static void dwarf2out_abstract_function (tree);
2719 static void dwarf2out_var_location (rtx_insn *);
2720 static void dwarf2out_size_function (tree);
2721 static void dwarf2out_begin_function (tree);
2722 static void dwarf2out_end_function (unsigned int);
2723 static void dwarf2out_register_main_translation_unit (tree unit);
2724 static void dwarf2out_set_name (tree, tree);
2725 static void dwarf2out_register_external_die (tree decl, const char *sym,
2726 unsigned HOST_WIDE_INT off);
2727 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2728 unsigned HOST_WIDE_INT *off);
2729
2730 /* The debug hooks structure. */
2731
2732 const struct gcc_debug_hooks dwarf2_debug_hooks =
2733 {
2734 dwarf2out_init,
2735 dwarf2out_finish,
2736 dwarf2out_early_finish,
2737 dwarf2out_assembly_start,
2738 dwarf2out_define,
2739 dwarf2out_undef,
2740 dwarf2out_start_source_file,
2741 dwarf2out_end_source_file,
2742 dwarf2out_begin_block,
2743 dwarf2out_end_block,
2744 dwarf2out_ignore_block,
2745 dwarf2out_source_line,
2746 dwarf2out_begin_prologue,
2747 #if VMS_DEBUGGING_INFO
2748 dwarf2out_vms_end_prologue,
2749 dwarf2out_vms_begin_epilogue,
2750 #else
2751 debug_nothing_int_charstar,
2752 debug_nothing_int_charstar,
2753 #endif
2754 dwarf2out_end_epilogue,
2755 dwarf2out_begin_function,
2756 dwarf2out_end_function, /* end_function */
2757 dwarf2out_register_main_translation_unit,
2758 dwarf2out_function_decl, /* function_decl */
2759 dwarf2out_early_global_decl,
2760 dwarf2out_late_global_decl,
2761 dwarf2out_type_decl, /* type_decl */
2762 dwarf2out_imported_module_or_decl,
2763 dwarf2out_die_ref_for_decl,
2764 dwarf2out_register_external_die,
2765 debug_nothing_tree, /* deferred_inline_function */
2766 /* The DWARF 2 backend tries to reduce debugging bloat by not
2767 emitting the abstract description of inline functions until
2768 something tries to reference them. */
2769 dwarf2out_abstract_function, /* outlining_inline_function */
2770 debug_nothing_rtx_code_label, /* label */
2771 debug_nothing_int, /* handle_pch */
2772 dwarf2out_var_location,
2773 debug_nothing_tree, /* inline_entry */
2774 dwarf2out_size_function, /* size_function */
2775 dwarf2out_switch_text_section,
2776 dwarf2out_set_name,
2777 1, /* start_end_main_source_file */
2778 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2779 };
2780
2781 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2782 {
2783 dwarf2out_init,
2784 debug_nothing_charstar,
2785 debug_nothing_charstar,
2786 dwarf2out_assembly_start,
2787 debug_nothing_int_charstar,
2788 debug_nothing_int_charstar,
2789 debug_nothing_int_charstar,
2790 debug_nothing_int,
2791 debug_nothing_int_int, /* begin_block */
2792 debug_nothing_int_int, /* end_block */
2793 debug_true_const_tree, /* ignore_block */
2794 dwarf2out_source_line, /* source_line */
2795 debug_nothing_int_int_charstar, /* begin_prologue */
2796 debug_nothing_int_charstar, /* end_prologue */
2797 debug_nothing_int_charstar, /* begin_epilogue */
2798 debug_nothing_int_charstar, /* end_epilogue */
2799 debug_nothing_tree, /* begin_function */
2800 debug_nothing_int, /* end_function */
2801 debug_nothing_tree, /* register_main_translation_unit */
2802 debug_nothing_tree, /* function_decl */
2803 debug_nothing_tree, /* early_global_decl */
2804 debug_nothing_tree, /* late_global_decl */
2805 debug_nothing_tree_int, /* type_decl */
2806 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2807 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2808 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2809 debug_nothing_tree, /* deferred_inline_function */
2810 debug_nothing_tree, /* outlining_inline_function */
2811 debug_nothing_rtx_code_label, /* label */
2812 debug_nothing_int, /* handle_pch */
2813 debug_nothing_rtx_insn, /* var_location */
2814 debug_nothing_tree, /* inline_entry */
2815 debug_nothing_tree, /* size_function */
2816 debug_nothing_void, /* switch_text_section */
2817 debug_nothing_tree_tree, /* set_name */
2818 0, /* start_end_main_source_file */
2819 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2820 };
2821 \f
2822 /* NOTE: In the comments in this file, many references are made to
2823 "Debugging Information Entries". This term is abbreviated as `DIE'
2824 throughout the remainder of this file. */
2825
2826 /* An internal representation of the DWARF output is built, and then
2827 walked to generate the DWARF debugging info. The walk of the internal
2828 representation is done after the entire program has been compiled.
2829 The types below are used to describe the internal representation. */
2830
2831 /* Whether to put type DIEs into their own section .debug_types instead
2832 of making them part of the .debug_info section. Only supported for
2833 Dwarf V4 or higher and the user didn't disable them through
2834 -fno-debug-types-section. It is more efficient to put them in a
2835 separate comdat sections since the linker will then be able to
2836 remove duplicates. But not all tools support .debug_types sections
2837 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2838 it is DW_UT_type unit type in .debug_info section. */
2839
2840 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2841
2842 /* Various DIE's use offsets relative to the beginning of the
2843 .debug_info section to refer to each other. */
2844
2845 typedef long int dw_offset;
2846
2847 struct comdat_type_node;
2848
2849 /* The entries in the line_info table more-or-less mirror the opcodes
2850 that are used in the real dwarf line table. Arrays of these entries
2851 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2852 supported. */
2853
2854 enum dw_line_info_opcode {
2855 /* Emit DW_LNE_set_address; the operand is the label index. */
2856 LI_set_address,
2857
2858 /* Emit a row to the matrix with the given line. This may be done
2859 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2860 special opcodes. */
2861 LI_set_line,
2862
2863 /* Emit a DW_LNS_set_file. */
2864 LI_set_file,
2865
2866 /* Emit a DW_LNS_set_column. */
2867 LI_set_column,
2868
2869 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2870 LI_negate_stmt,
2871
2872 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2873 LI_set_prologue_end,
2874 LI_set_epilogue_begin,
2875
2876 /* Emit a DW_LNE_set_discriminator. */
2877 LI_set_discriminator
2878 };
2879
2880 typedef struct GTY(()) dw_line_info_struct {
2881 enum dw_line_info_opcode opcode;
2882 unsigned int val;
2883 } dw_line_info_entry;
2884
2885
2886 struct GTY(()) dw_line_info_table {
2887 /* The label that marks the end of this section. */
2888 const char *end_label;
2889
2890 /* The values for the last row of the matrix, as collected in the table.
2891 These are used to minimize the changes to the next row. */
2892 unsigned int file_num;
2893 unsigned int line_num;
2894 unsigned int column_num;
2895 int discrim_num;
2896 bool is_stmt;
2897 bool in_use;
2898
2899 vec<dw_line_info_entry, va_gc> *entries;
2900 };
2901
2902
2903 /* Each DIE attribute has a field specifying the attribute kind,
2904 a link to the next attribute in the chain, and an attribute value.
2905 Attributes are typically linked below the DIE they modify. */
2906
2907 typedef struct GTY(()) dw_attr_struct {
2908 enum dwarf_attribute dw_attr;
2909 dw_val_node dw_attr_val;
2910 }
2911 dw_attr_node;
2912
2913
2914 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
2915 The children of each node form a circular list linked by
2916 die_sib. die_child points to the node *before* the "first" child node. */
2917
2918 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
2919 union die_symbol_or_type_node
2920 {
2921 const char * GTY ((tag ("0"))) die_symbol;
2922 comdat_type_node *GTY ((tag ("1"))) die_type_node;
2923 }
2924 GTY ((desc ("%0.comdat_type_p"))) die_id;
2925 vec<dw_attr_node, va_gc> *die_attr;
2926 dw_die_ref die_parent;
2927 dw_die_ref die_child;
2928 dw_die_ref die_sib;
2929 dw_die_ref die_definition; /* ref from a specification to its definition */
2930 dw_offset die_offset;
2931 unsigned long die_abbrev;
2932 int die_mark;
2933 unsigned int decl_id;
2934 enum dwarf_tag die_tag;
2935 /* Die is used and must not be pruned as unused. */
2936 BOOL_BITFIELD die_perennial_p : 1;
2937 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
2938 /* For an external ref to die_symbol if die_offset contains an extra
2939 offset to that symbol. */
2940 BOOL_BITFIELD with_offset : 1;
2941 /* Whether this DIE was removed from the DIE tree, for example via
2942 prune_unused_types. We don't consider those present from the
2943 DIE lookup routines. */
2944 BOOL_BITFIELD removed : 1;
2945 /* Lots of spare bits. */
2946 }
2947 die_node;
2948
2949 /* Set to TRUE while dwarf2out_early_global_decl is running. */
2950 static bool early_dwarf;
2951 static bool early_dwarf_finished;
2952 struct set_early_dwarf {
2953 bool saved;
2954 set_early_dwarf () : saved(early_dwarf)
2955 {
2956 gcc_assert (! early_dwarf_finished);
2957 early_dwarf = true;
2958 }
2959 ~set_early_dwarf () { early_dwarf = saved; }
2960 };
2961
2962 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
2963 #define FOR_EACH_CHILD(die, c, expr) do { \
2964 c = die->die_child; \
2965 if (c) do { \
2966 c = c->die_sib; \
2967 expr; \
2968 } while (c != die->die_child); \
2969 } while (0)
2970
2971 /* The pubname structure */
2972
2973 typedef struct GTY(()) pubname_struct {
2974 dw_die_ref die;
2975 const char *name;
2976 }
2977 pubname_entry;
2978
2979
2980 struct GTY(()) dw_ranges {
2981 const char *label;
2982 /* If this is positive, it's a block number, otherwise it's a
2983 bitwise-negated index into dw_ranges_by_label. */
2984 int num;
2985 /* Index for the range list for DW_FORM_rnglistx. */
2986 unsigned int idx : 31;
2987 /* True if this range might be possibly in a different section
2988 from previous entry. */
2989 unsigned int maybe_new_sec : 1;
2990 };
2991
2992 /* A structure to hold a macinfo entry. */
2993
2994 typedef struct GTY(()) macinfo_struct {
2995 unsigned char code;
2996 unsigned HOST_WIDE_INT lineno;
2997 const char *info;
2998 }
2999 macinfo_entry;
3000
3001
3002 struct GTY(()) dw_ranges_by_label {
3003 const char *begin;
3004 const char *end;
3005 };
3006
3007 /* The comdat type node structure. */
3008 struct GTY(()) comdat_type_node
3009 {
3010 dw_die_ref root_die;
3011 dw_die_ref type_die;
3012 dw_die_ref skeleton_die;
3013 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3014 comdat_type_node *next;
3015 };
3016
3017 /* A list of DIEs for which we can't determine ancestry (parent_die
3018 field) just yet. Later in dwarf2out_finish we will fill in the
3019 missing bits. */
3020 typedef struct GTY(()) limbo_die_struct {
3021 dw_die_ref die;
3022 /* The tree for which this DIE was created. We use this to
3023 determine ancestry later. */
3024 tree created_for;
3025 struct limbo_die_struct *next;
3026 }
3027 limbo_die_node;
3028
3029 typedef struct skeleton_chain_struct
3030 {
3031 dw_die_ref old_die;
3032 dw_die_ref new_die;
3033 struct skeleton_chain_struct *parent;
3034 }
3035 skeleton_chain_node;
3036
3037 /* Define a macro which returns nonzero for a TYPE_DECL which was
3038 implicitly generated for a type.
3039
3040 Note that, unlike the C front-end (which generates a NULL named
3041 TYPE_DECL node for each complete tagged type, each array type,
3042 and each function type node created) the C++ front-end generates
3043 a _named_ TYPE_DECL node for each tagged type node created.
3044 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3045 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3046 front-end, but for each type, tagged or not. */
3047
3048 #define TYPE_DECL_IS_STUB(decl) \
3049 (DECL_NAME (decl) == NULL_TREE \
3050 || (DECL_ARTIFICIAL (decl) \
3051 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3052 /* This is necessary for stub decls that \
3053 appear in nested inline functions. */ \
3054 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3055 && (decl_ultimate_origin (decl) \
3056 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3057
3058 /* Information concerning the compilation unit's programming
3059 language, and compiler version. */
3060
3061 /* Fixed size portion of the DWARF compilation unit header. */
3062 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3063 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3064 + (dwarf_version >= 5 ? 4 : 3))
3065
3066 /* Fixed size portion of the DWARF comdat type unit header. */
3067 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3068 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3069 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3070
3071 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3072 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3073 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3074
3075 /* Fixed size portion of public names info. */
3076 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3077
3078 /* Fixed size portion of the address range info. */
3079 #define DWARF_ARANGES_HEADER_SIZE \
3080 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3081 DWARF2_ADDR_SIZE * 2) \
3082 - DWARF_INITIAL_LENGTH_SIZE)
3083
3084 /* Size of padding portion in the address range info. It must be
3085 aligned to twice the pointer size. */
3086 #define DWARF_ARANGES_PAD_SIZE \
3087 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3088 DWARF2_ADDR_SIZE * 2) \
3089 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3090
3091 /* Use assembler line directives if available. */
3092 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3093 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3094 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3095 #else
3096 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3097 #endif
3098 #endif
3099
3100 /* Minimum line offset in a special line info. opcode.
3101 This value was chosen to give a reasonable range of values. */
3102 #define DWARF_LINE_BASE -10
3103
3104 /* First special line opcode - leave room for the standard opcodes. */
3105 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3106
3107 /* Range of line offsets in a special line info. opcode. */
3108 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3109
3110 /* Flag that indicates the initial value of the is_stmt_start flag.
3111 In the present implementation, we do not mark any lines as
3112 the beginning of a source statement, because that information
3113 is not made available by the GCC front-end. */
3114 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3115
3116 /* Maximum number of operations per instruction bundle. */
3117 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3118 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3119 #endif
3120
3121 /* This location is used by calc_die_sizes() to keep track
3122 the offset of each DIE within the .debug_info section. */
3123 static unsigned long next_die_offset;
3124
3125 /* Record the root of the DIE's built for the current compilation unit. */
3126 static GTY(()) dw_die_ref single_comp_unit_die;
3127
3128 /* A list of type DIEs that have been separated into comdat sections. */
3129 static GTY(()) comdat_type_node *comdat_type_list;
3130
3131 /* A list of CU DIEs that have been separated. */
3132 static GTY(()) limbo_die_node *cu_die_list;
3133
3134 /* A list of DIEs with a NULL parent waiting to be relocated. */
3135 static GTY(()) limbo_die_node *limbo_die_list;
3136
3137 /* A list of DIEs for which we may have to generate
3138 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3139 static GTY(()) limbo_die_node *deferred_asm_name;
3140
3141 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3142 {
3143 typedef const char *compare_type;
3144
3145 static hashval_t hash (dwarf_file_data *);
3146 static bool equal (dwarf_file_data *, const char *);
3147 };
3148
3149 /* Filenames referenced by this compilation unit. */
3150 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3151
3152 struct decl_die_hasher : ggc_ptr_hash<die_node>
3153 {
3154 typedef tree compare_type;
3155
3156 static hashval_t hash (die_node *);
3157 static bool equal (die_node *, tree);
3158 };
3159 /* A hash table of references to DIE's that describe declarations.
3160 The key is a DECL_UID() which is a unique number identifying each decl. */
3161 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3162
3163 struct GTY ((for_user)) variable_value_struct {
3164 unsigned int decl_id;
3165 vec<dw_die_ref, va_gc> *dies;
3166 };
3167
3168 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3169 {
3170 typedef tree compare_type;
3171
3172 static hashval_t hash (variable_value_struct *);
3173 static bool equal (variable_value_struct *, tree);
3174 };
3175 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3176 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3177 DECL_CONTEXT of the referenced VAR_DECLs. */
3178 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3179
3180 struct block_die_hasher : ggc_ptr_hash<die_struct>
3181 {
3182 static hashval_t hash (die_struct *);
3183 static bool equal (die_struct *, die_struct *);
3184 };
3185
3186 /* A hash table of references to DIE's that describe COMMON blocks.
3187 The key is DECL_UID() ^ die_parent. */
3188 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3189
3190 typedef struct GTY(()) die_arg_entry_struct {
3191 dw_die_ref die;
3192 tree arg;
3193 } die_arg_entry;
3194
3195
3196 /* Node of the variable location list. */
3197 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3198 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3199 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3200 in mode of the EXPR_LIST node and first EXPR_LIST operand
3201 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3202 location or NULL for padding. For larger bitsizes,
3203 mode is 0 and first operand is a CONCAT with bitsize
3204 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3205 NULL as second operand. */
3206 rtx GTY (()) loc;
3207 const char * GTY (()) label;
3208 struct var_loc_node * GTY (()) next;
3209 };
3210
3211 /* Variable location list. */
3212 struct GTY ((for_user)) var_loc_list_def {
3213 struct var_loc_node * GTY (()) first;
3214
3215 /* Pointer to the last but one or last element of the
3216 chained list. If the list is empty, both first and
3217 last are NULL, if the list contains just one node
3218 or the last node certainly is not redundant, it points
3219 to the last node, otherwise points to the last but one.
3220 Do not mark it for GC because it is marked through the chain. */
3221 struct var_loc_node * GTY ((skip ("%h"))) last;
3222
3223 /* Pointer to the last element before section switch,
3224 if NULL, either sections weren't switched or first
3225 is after section switch. */
3226 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3227
3228 /* DECL_UID of the variable decl. */
3229 unsigned int decl_id;
3230 };
3231 typedef struct var_loc_list_def var_loc_list;
3232
3233 /* Call argument location list. */
3234 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3235 rtx GTY (()) call_arg_loc_note;
3236 const char * GTY (()) label;
3237 tree GTY (()) block;
3238 bool tail_call_p;
3239 rtx GTY (()) symbol_ref;
3240 struct call_arg_loc_node * GTY (()) next;
3241 };
3242
3243
3244 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3245 {
3246 typedef const_tree compare_type;
3247
3248 static hashval_t hash (var_loc_list *);
3249 static bool equal (var_loc_list *, const_tree);
3250 };
3251
3252 /* Table of decl location linked lists. */
3253 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3254
3255 /* Head and tail of call_arg_loc chain. */
3256 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3257 static struct call_arg_loc_node *call_arg_loc_last;
3258
3259 /* Number of call sites in the current function. */
3260 static int call_site_count = -1;
3261 /* Number of tail call sites in the current function. */
3262 static int tail_call_site_count = -1;
3263
3264 /* A cached location list. */
3265 struct GTY ((for_user)) cached_dw_loc_list_def {
3266 /* The DECL_UID of the decl that this entry describes. */
3267 unsigned int decl_id;
3268
3269 /* The cached location list. */
3270 dw_loc_list_ref loc_list;
3271 };
3272 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3273
3274 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3275 {
3276
3277 typedef const_tree compare_type;
3278
3279 static hashval_t hash (cached_dw_loc_list *);
3280 static bool equal (cached_dw_loc_list *, const_tree);
3281 };
3282
3283 /* Table of cached location lists. */
3284 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3285
3286 /* A vector of references to DIE's that are uniquely identified by their tag,
3287 presence/absence of children DIE's, and list of attribute/value pairs. */
3288 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3289
3290 /* A hash map to remember the stack usage for DWARF procedures. The value
3291 stored is the stack size difference between before the DWARF procedure
3292 invokation and after it returned. In other words, for a DWARF procedure
3293 that consumes N stack slots and that pushes M ones, this stores M - N. */
3294 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3295
3296 /* A global counter for generating labels for line number data. */
3297 static unsigned int line_info_label_num;
3298
3299 /* The current table to which we should emit line number information
3300 for the current function. This will be set up at the beginning of
3301 assembly for the function. */
3302 static GTY(()) dw_line_info_table *cur_line_info_table;
3303
3304 /* The two default tables of line number info. */
3305 static GTY(()) dw_line_info_table *text_section_line_info;
3306 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3307
3308 /* The set of all non-default tables of line number info. */
3309 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3310
3311 /* A flag to tell pubnames/types export if there is an info section to
3312 refer to. */
3313 static bool info_section_emitted;
3314
3315 /* A pointer to the base of a table that contains a list of publicly
3316 accessible names. */
3317 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3318
3319 /* A pointer to the base of a table that contains a list of publicly
3320 accessible types. */
3321 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3322
3323 /* A pointer to the base of a table that contains a list of macro
3324 defines/undefines (and file start/end markers). */
3325 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3326
3327 /* True if .debug_macinfo or .debug_macros section is going to be
3328 emitted. */
3329 #define have_macinfo \
3330 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3331 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3332 && !macinfo_table->is_empty ())
3333
3334 /* Vector of dies for which we should generate .debug_ranges info. */
3335 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3336
3337 /* Vector of pairs of labels referenced in ranges_table. */
3338 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3339
3340 /* Whether we have location lists that need outputting */
3341 static GTY(()) bool have_location_lists;
3342
3343 /* Unique label counter. */
3344 static GTY(()) unsigned int loclabel_num;
3345
3346 /* Unique label counter for point-of-call tables. */
3347 static GTY(()) unsigned int poc_label_num;
3348
3349 /* The last file entry emitted by maybe_emit_file(). */
3350 static GTY(()) struct dwarf_file_data * last_emitted_file;
3351
3352 /* Number of internal labels generated by gen_internal_sym(). */
3353 static GTY(()) int label_num;
3354
3355 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3356
3357 /* Instances of generic types for which we need to generate debug
3358 info that describe their generic parameters and arguments. That
3359 generation needs to happen once all types are properly laid out so
3360 we do it at the end of compilation. */
3361 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3362
3363 /* Offset from the "steady-state frame pointer" to the frame base,
3364 within the current function. */
3365 static poly_int64 frame_pointer_fb_offset;
3366 static bool frame_pointer_fb_offset_valid;
3367
3368 static vec<dw_die_ref> base_types;
3369
3370 /* Flags to represent a set of attribute classes for attributes that represent
3371 a scalar value (bounds, pointers, ...). */
3372 enum dw_scalar_form
3373 {
3374 dw_scalar_form_constant = 0x01,
3375 dw_scalar_form_exprloc = 0x02,
3376 dw_scalar_form_reference = 0x04
3377 };
3378
3379 /* Forward declarations for functions defined in this file. */
3380
3381 static int is_pseudo_reg (const_rtx);
3382 static tree type_main_variant (tree);
3383 static int is_tagged_type (const_tree);
3384 static const char *dwarf_tag_name (unsigned);
3385 static const char *dwarf_attr_name (unsigned);
3386 static const char *dwarf_form_name (unsigned);
3387 static tree decl_ultimate_origin (const_tree);
3388 static tree decl_class_context (tree);
3389 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3390 static inline enum dw_val_class AT_class (dw_attr_node *);
3391 static inline unsigned int AT_index (dw_attr_node *);
3392 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3393 static inline unsigned AT_flag (dw_attr_node *);
3394 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3395 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3396 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3397 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3398 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3399 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3400 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3401 unsigned int, unsigned char *);
3402 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3403 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3404 static inline const char *AT_string (dw_attr_node *);
3405 static enum dwarf_form AT_string_form (dw_attr_node *);
3406 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3407 static void add_AT_specification (dw_die_ref, dw_die_ref);
3408 static inline dw_die_ref AT_ref (dw_attr_node *);
3409 static inline int AT_ref_external (dw_attr_node *);
3410 static inline void set_AT_ref_external (dw_attr_node *, int);
3411 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3412 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3413 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3414 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3415 dw_loc_list_ref);
3416 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3417 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3418 static void remove_addr_table_entry (addr_table_entry *);
3419 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3420 static inline rtx AT_addr (dw_attr_node *);
3421 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3422 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3423 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3424 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3425 const char *);
3426 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3427 unsigned HOST_WIDE_INT);
3428 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3429 unsigned long, bool);
3430 static inline const char *AT_lbl (dw_attr_node *);
3431 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3432 static const char *get_AT_low_pc (dw_die_ref);
3433 static const char *get_AT_hi_pc (dw_die_ref);
3434 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3435 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3436 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3437 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3438 static bool is_cxx (void);
3439 static bool is_cxx (const_tree);
3440 static bool is_fortran (void);
3441 static bool is_ada (void);
3442 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3443 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3444 static void add_child_die (dw_die_ref, dw_die_ref);
3445 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3446 static dw_die_ref lookup_type_die (tree);
3447 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3448 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3449 static void equate_type_number_to_die (tree, dw_die_ref);
3450 static dw_die_ref lookup_decl_die (tree);
3451 static var_loc_list *lookup_decl_loc (const_tree);
3452 static void equate_decl_number_to_die (tree, dw_die_ref);
3453 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *);
3454 static void print_spaces (FILE *);
3455 static void print_die (dw_die_ref, FILE *);
3456 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3457 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3458 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3459 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3460 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3461 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3462 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3463 struct md5_ctx *, int *);
3464 struct checksum_attributes;
3465 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3466 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3467 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3468 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3469 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3470 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3471 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3472 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3473 static int is_type_die (dw_die_ref);
3474 static int is_comdat_die (dw_die_ref);
3475 static inline bool is_template_instantiation (dw_die_ref);
3476 static int is_declaration_die (dw_die_ref);
3477 static int should_move_die_to_comdat (dw_die_ref);
3478 static dw_die_ref clone_as_declaration (dw_die_ref);
3479 static dw_die_ref clone_die (dw_die_ref);
3480 static dw_die_ref clone_tree (dw_die_ref);
3481 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3482 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3483 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3484 static dw_die_ref generate_skeleton (dw_die_ref);
3485 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3486 dw_die_ref,
3487 dw_die_ref);
3488 static void break_out_comdat_types (dw_die_ref);
3489 static void copy_decls_for_unworthy_types (dw_die_ref);
3490
3491 static void add_sibling_attributes (dw_die_ref);
3492 static void output_location_lists (dw_die_ref);
3493 static int constant_size (unsigned HOST_WIDE_INT);
3494 static unsigned long size_of_die (dw_die_ref);
3495 static void calc_die_sizes (dw_die_ref);
3496 static void calc_base_type_die_sizes (void);
3497 static void mark_dies (dw_die_ref);
3498 static void unmark_dies (dw_die_ref);
3499 static void unmark_all_dies (dw_die_ref);
3500 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3501 static unsigned long size_of_aranges (void);
3502 static enum dwarf_form value_format (dw_attr_node *);
3503 static void output_value_format (dw_attr_node *);
3504 static void output_abbrev_section (void);
3505 static void output_die_abbrevs (unsigned long, dw_die_ref);
3506 static void output_die (dw_die_ref);
3507 static void output_compilation_unit_header (enum dwarf_unit_type);
3508 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3509 static void output_comdat_type_unit (comdat_type_node *);
3510 static const char *dwarf2_name (tree, int);
3511 static void add_pubname (tree, dw_die_ref);
3512 static void add_enumerator_pubname (const char *, dw_die_ref);
3513 static void add_pubname_string (const char *, dw_die_ref);
3514 static void add_pubtype (tree, dw_die_ref);
3515 static void output_pubnames (vec<pubname_entry, va_gc> *);
3516 static void output_aranges (void);
3517 static unsigned int add_ranges (const_tree, bool = false);
3518 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3519 bool *, bool);
3520 static void output_ranges (void);
3521 static dw_line_info_table *new_line_info_table (void);
3522 static void output_line_info (bool);
3523 static void output_file_names (void);
3524 static dw_die_ref base_type_die (tree, bool);
3525 static int is_base_type (tree);
3526 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3527 static int decl_quals (const_tree);
3528 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3529 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3530 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3531 static int type_is_enum (const_tree);
3532 static unsigned int dbx_reg_number (const_rtx);
3533 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3534 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3535 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3536 enum var_init_status);
3537 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3538 enum var_init_status);
3539 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3540 enum var_init_status);
3541 static int is_based_loc (const_rtx);
3542 static bool resolve_one_addr (rtx *);
3543 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3544 enum var_init_status);
3545 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3546 enum var_init_status);
3547 struct loc_descr_context;
3548 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3549 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3550 static dw_loc_list_ref loc_list_from_tree (tree, int,
3551 struct loc_descr_context *);
3552 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3553 struct loc_descr_context *);
3554 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3555 static tree field_type (const_tree);
3556 static unsigned int simple_type_align_in_bits (const_tree);
3557 static unsigned int simple_decl_align_in_bits (const_tree);
3558 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3559 struct vlr_context;
3560 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3561 HOST_WIDE_INT *);
3562 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3563 dw_loc_list_ref);
3564 static void add_data_member_location_attribute (dw_die_ref, tree,
3565 struct vlr_context *);
3566 static bool add_const_value_attribute (dw_die_ref, rtx);
3567 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3568 static void insert_wide_int (const wide_int &, unsigned char *, int);
3569 static void insert_float (const_rtx, unsigned char *);
3570 static rtx rtl_for_decl_location (tree);
3571 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3572 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3573 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3574 static void add_name_attribute (dw_die_ref, const char *);
3575 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3576 static void add_comp_dir_attribute (dw_die_ref);
3577 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3578 struct loc_descr_context *);
3579 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3580 struct loc_descr_context *);
3581 static void add_subscript_info (dw_die_ref, tree, bool);
3582 static void add_byte_size_attribute (dw_die_ref, tree);
3583 static void add_alignment_attribute (dw_die_ref, tree);
3584 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3585 struct vlr_context *);
3586 static void add_bit_size_attribute (dw_die_ref, tree);
3587 static void add_prototyped_attribute (dw_die_ref, tree);
3588 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3589 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3590 static void add_src_coords_attributes (dw_die_ref, tree);
3591 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3592 static void add_discr_value (dw_die_ref, dw_discr_value *);
3593 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3594 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3595 static void push_decl_scope (tree);
3596 static void pop_decl_scope (void);
3597 static dw_die_ref scope_die_for (tree, dw_die_ref);
3598 static inline int local_scope_p (dw_die_ref);
3599 static inline int class_scope_p (dw_die_ref);
3600 static inline int class_or_namespace_scope_p (dw_die_ref);
3601 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3602 static void add_calling_convention_attribute (dw_die_ref, tree);
3603 static const char *type_tag (const_tree);
3604 static tree member_declared_type (const_tree);
3605 #if 0
3606 static const char *decl_start_label (tree);
3607 #endif
3608 static void gen_array_type_die (tree, dw_die_ref);
3609 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3610 #if 0
3611 static void gen_entry_point_die (tree, dw_die_ref);
3612 #endif
3613 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3614 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3615 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3616 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3617 static void gen_formal_types_die (tree, dw_die_ref);
3618 static void gen_subprogram_die (tree, dw_die_ref);
3619 static void gen_variable_die (tree, tree, dw_die_ref);
3620 static void gen_const_die (tree, dw_die_ref);
3621 static void gen_label_die (tree, dw_die_ref);
3622 static void gen_lexical_block_die (tree, dw_die_ref);
3623 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3624 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3625 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3626 static dw_die_ref gen_compile_unit_die (const char *);
3627 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3628 static void gen_member_die (tree, dw_die_ref);
3629 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3630 enum debug_info_usage);
3631 static void gen_subroutine_type_die (tree, dw_die_ref);
3632 static void gen_typedef_die (tree, dw_die_ref);
3633 static void gen_type_die (tree, dw_die_ref);
3634 static void gen_block_die (tree, dw_die_ref);
3635 static void decls_for_scope (tree, dw_die_ref);
3636 static bool is_naming_typedef_decl (const_tree);
3637 static inline dw_die_ref get_context_die (tree);
3638 static void gen_namespace_die (tree, dw_die_ref);
3639 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3640 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3641 static dw_die_ref force_decl_die (tree);
3642 static dw_die_ref force_type_die (tree);
3643 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3644 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3645 static struct dwarf_file_data * lookup_filename (const char *);
3646 static void retry_incomplete_types (void);
3647 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3648 static void gen_generic_params_dies (tree);
3649 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3650 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3651 static void splice_child_die (dw_die_ref, dw_die_ref);
3652 static int file_info_cmp (const void *, const void *);
3653 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *,
3654 const char *, const char *);
3655 static void output_loc_list (dw_loc_list_ref);
3656 static char *gen_internal_sym (const char *);
3657 static bool want_pubnames (void);
3658
3659 static void prune_unmark_dies (dw_die_ref);
3660 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3661 static void prune_unused_types_mark (dw_die_ref, int);
3662 static void prune_unused_types_walk (dw_die_ref);
3663 static void prune_unused_types_walk_attribs (dw_die_ref);
3664 static void prune_unused_types_prune (dw_die_ref);
3665 static void prune_unused_types (void);
3666 static int maybe_emit_file (struct dwarf_file_data *fd);
3667 static inline const char *AT_vms_delta1 (dw_attr_node *);
3668 static inline const char *AT_vms_delta2 (dw_attr_node *);
3669 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3670 const char *, const char *);
3671 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3672 static void gen_remaining_tmpl_value_param_die_attribute (void);
3673 static bool generic_type_p (tree);
3674 static void schedule_generic_params_dies_gen (tree t);
3675 static void gen_scheduled_generic_parms_dies (void);
3676 static void resolve_variable_values (void);
3677
3678 static const char *comp_dir_string (void);
3679
3680 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3681
3682 /* enum for tracking thread-local variables whose address is really an offset
3683 relative to the TLS pointer, which will need link-time relocation, but will
3684 not need relocation by the DWARF consumer. */
3685
3686 enum dtprel_bool
3687 {
3688 dtprel_false = 0,
3689 dtprel_true = 1
3690 };
3691
3692 /* Return the operator to use for an address of a variable. For dtprel_true, we
3693 use DW_OP_const*. For regular variables, which need both link-time
3694 relocation and consumer-level relocation (e.g., to account for shared objects
3695 loaded at a random address), we use DW_OP_addr*. */
3696
3697 static inline enum dwarf_location_atom
3698 dw_addr_op (enum dtprel_bool dtprel)
3699 {
3700 if (dtprel == dtprel_true)
3701 return (dwarf_split_debug_info ? DW_OP_GNU_const_index
3702 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3703 else
3704 return dwarf_split_debug_info ? DW_OP_GNU_addr_index : DW_OP_addr;
3705 }
3706
3707 /* Return a pointer to a newly allocated address location description. If
3708 dwarf_split_debug_info is true, then record the address with the appropriate
3709 relocation. */
3710 static inline dw_loc_descr_ref
3711 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3712 {
3713 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3714
3715 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3716 ref->dw_loc_oprnd1.v.val_addr = addr;
3717 ref->dtprel = dtprel;
3718 if (dwarf_split_debug_info)
3719 ref->dw_loc_oprnd1.val_entry
3720 = add_addr_table_entry (addr,
3721 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3722 else
3723 ref->dw_loc_oprnd1.val_entry = NULL;
3724
3725 return ref;
3726 }
3727
3728 /* Section names used to hold DWARF debugging information. */
3729
3730 #ifndef DEBUG_INFO_SECTION
3731 #define DEBUG_INFO_SECTION ".debug_info"
3732 #endif
3733 #ifndef DEBUG_DWO_INFO_SECTION
3734 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3735 #endif
3736 #ifndef DEBUG_LTO_INFO_SECTION
3737 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3738 #endif
3739 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3740 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3741 #endif
3742 #ifndef DEBUG_ABBREV_SECTION
3743 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3744 #endif
3745 #ifndef DEBUG_LTO_ABBREV_SECTION
3746 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3747 #endif
3748 #ifndef DEBUG_DWO_ABBREV_SECTION
3749 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3750 #endif
3751 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3752 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3753 #endif
3754 #ifndef DEBUG_ARANGES_SECTION
3755 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3756 #endif
3757 #ifndef DEBUG_ADDR_SECTION
3758 #define DEBUG_ADDR_SECTION ".debug_addr"
3759 #endif
3760 #ifndef DEBUG_MACINFO_SECTION
3761 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3762 #endif
3763 #ifndef DEBUG_LTO_MACINFO_SECTION
3764 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
3765 #endif
3766 #ifndef DEBUG_DWO_MACINFO_SECTION
3767 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
3768 #endif
3769 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
3770 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
3771 #endif
3772 #ifndef DEBUG_MACRO_SECTION
3773 #define DEBUG_MACRO_SECTION ".debug_macro"
3774 #endif
3775 #ifndef DEBUG_LTO_MACRO_SECTION
3776 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
3777 #endif
3778 #ifndef DEBUG_DWO_MACRO_SECTION
3779 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
3780 #endif
3781 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
3782 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
3783 #endif
3784 #ifndef DEBUG_LINE_SECTION
3785 #define DEBUG_LINE_SECTION ".debug_line"
3786 #endif
3787 #ifndef DEBUG_LTO_LINE_SECTION
3788 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
3789 #endif
3790 #ifndef DEBUG_DWO_LINE_SECTION
3791 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
3792 #endif
3793 #ifndef DEBUG_LTO_DWO_LINE_SECTION
3794 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
3795 #endif
3796 #ifndef DEBUG_LOC_SECTION
3797 #define DEBUG_LOC_SECTION ".debug_loc"
3798 #endif
3799 #ifndef DEBUG_DWO_LOC_SECTION
3800 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
3801 #endif
3802 #ifndef DEBUG_LOCLISTS_SECTION
3803 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
3804 #endif
3805 #ifndef DEBUG_DWO_LOCLISTS_SECTION
3806 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
3807 #endif
3808 #ifndef DEBUG_PUBNAMES_SECTION
3809 #define DEBUG_PUBNAMES_SECTION \
3810 ((debug_generate_pub_sections == 2) \
3811 ? ".debug_gnu_pubnames" : ".debug_pubnames")
3812 #endif
3813 #ifndef DEBUG_PUBTYPES_SECTION
3814 #define DEBUG_PUBTYPES_SECTION \
3815 ((debug_generate_pub_sections == 2) \
3816 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
3817 #endif
3818 #ifndef DEBUG_STR_OFFSETS_SECTION
3819 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
3820 #endif
3821 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
3822 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
3823 #endif
3824 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
3825 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
3826 #endif
3827 #ifndef DEBUG_STR_SECTION
3828 #define DEBUG_STR_SECTION ".debug_str"
3829 #endif
3830 #ifndef DEBUG_LTO_STR_SECTION
3831 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
3832 #endif
3833 #ifndef DEBUG_STR_DWO_SECTION
3834 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
3835 #endif
3836 #ifndef DEBUG_LTO_STR_DWO_SECTION
3837 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
3838 #endif
3839 #ifndef DEBUG_RANGES_SECTION
3840 #define DEBUG_RANGES_SECTION ".debug_ranges"
3841 #endif
3842 #ifndef DEBUG_RNGLISTS_SECTION
3843 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
3844 #endif
3845 #ifndef DEBUG_LINE_STR_SECTION
3846 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
3847 #endif
3848 #ifndef DEBUG_LTO_LINE_STR_SECTION
3849 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
3850 #endif
3851
3852 /* Standard ELF section names for compiled code and data. */
3853 #ifndef TEXT_SECTION_NAME
3854 #define TEXT_SECTION_NAME ".text"
3855 #endif
3856
3857 /* Section flags for .debug_str section. */
3858 #define DEBUG_STR_SECTION_FLAGS \
3859 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
3860 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
3861 : SECTION_DEBUG)
3862
3863 /* Section flags for .debug_str.dwo section. */
3864 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
3865
3866 /* Attribute used to refer to the macro section. */
3867 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
3868 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
3869
3870 /* Labels we insert at beginning sections we can reference instead of
3871 the section names themselves. */
3872
3873 #ifndef TEXT_SECTION_LABEL
3874 #define TEXT_SECTION_LABEL "Ltext"
3875 #endif
3876 #ifndef COLD_TEXT_SECTION_LABEL
3877 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
3878 #endif
3879 #ifndef DEBUG_LINE_SECTION_LABEL
3880 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
3881 #endif
3882 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
3883 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
3884 #endif
3885 #ifndef DEBUG_INFO_SECTION_LABEL
3886 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
3887 #endif
3888 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
3889 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
3890 #endif
3891 #ifndef DEBUG_ABBREV_SECTION_LABEL
3892 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
3893 #endif
3894 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
3895 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
3896 #endif
3897 #ifndef DEBUG_ADDR_SECTION_LABEL
3898 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
3899 #endif
3900 #ifndef DEBUG_LOC_SECTION_LABEL
3901 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
3902 #endif
3903 #ifndef DEBUG_RANGES_SECTION_LABEL
3904 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
3905 #endif
3906 #ifndef DEBUG_MACINFO_SECTION_LABEL
3907 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
3908 #endif
3909 #ifndef DEBUG_MACRO_SECTION_LABEL
3910 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
3911 #endif
3912 #define SKELETON_COMP_DIE_ABBREV 1
3913 #define SKELETON_TYPE_DIE_ABBREV 2
3914
3915 /* Definitions of defaults for formats and names of various special
3916 (artificial) labels which may be generated within this file (when the -g
3917 options is used and DWARF2_DEBUGGING_INFO is in effect.
3918 If necessary, these may be overridden from within the tm.h file, but
3919 typically, overriding these defaults is unnecessary. */
3920
3921 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
3922 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3923 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3924 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
3925 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3926 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3927 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3928 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3929 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3930 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3931 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3932 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3933 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
3934 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
3935 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
3936
3937 #ifndef TEXT_END_LABEL
3938 #define TEXT_END_LABEL "Letext"
3939 #endif
3940 #ifndef COLD_END_LABEL
3941 #define COLD_END_LABEL "Letext_cold"
3942 #endif
3943 #ifndef BLOCK_BEGIN_LABEL
3944 #define BLOCK_BEGIN_LABEL "LBB"
3945 #endif
3946 #ifndef BLOCK_END_LABEL
3947 #define BLOCK_END_LABEL "LBE"
3948 #endif
3949 #ifndef LINE_CODE_LABEL
3950 #define LINE_CODE_LABEL "LM"
3951 #endif
3952
3953 \f
3954 /* Return the root of the DIE's built for the current compilation unit. */
3955 static dw_die_ref
3956 comp_unit_die (void)
3957 {
3958 if (!single_comp_unit_die)
3959 single_comp_unit_die = gen_compile_unit_die (NULL);
3960 return single_comp_unit_die;
3961 }
3962
3963 /* We allow a language front-end to designate a function that is to be
3964 called to "demangle" any name before it is put into a DIE. */
3965
3966 static const char *(*demangle_name_func) (const char *);
3967
3968 void
3969 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
3970 {
3971 demangle_name_func = func;
3972 }
3973
3974 /* Test if rtl node points to a pseudo register. */
3975
3976 static inline int
3977 is_pseudo_reg (const_rtx rtl)
3978 {
3979 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
3980 || (GET_CODE (rtl) == SUBREG
3981 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
3982 }
3983
3984 /* Return a reference to a type, with its const and volatile qualifiers
3985 removed. */
3986
3987 static inline tree
3988 type_main_variant (tree type)
3989 {
3990 type = TYPE_MAIN_VARIANT (type);
3991
3992 /* ??? There really should be only one main variant among any group of
3993 variants of a given type (and all of the MAIN_VARIANT values for all
3994 members of the group should point to that one type) but sometimes the C
3995 front-end messes this up for array types, so we work around that bug
3996 here. */
3997 if (TREE_CODE (type) == ARRAY_TYPE)
3998 while (type != TYPE_MAIN_VARIANT (type))
3999 type = TYPE_MAIN_VARIANT (type);
4000
4001 return type;
4002 }
4003
4004 /* Return nonzero if the given type node represents a tagged type. */
4005
4006 static inline int
4007 is_tagged_type (const_tree type)
4008 {
4009 enum tree_code code = TREE_CODE (type);
4010
4011 return (code == RECORD_TYPE || code == UNION_TYPE
4012 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4013 }
4014
4015 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4016
4017 static void
4018 get_ref_die_offset_label (char *label, dw_die_ref ref)
4019 {
4020 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4021 }
4022
4023 /* Return die_offset of a DIE reference to a base type. */
4024
4025 static unsigned long int
4026 get_base_type_offset (dw_die_ref ref)
4027 {
4028 if (ref->die_offset)
4029 return ref->die_offset;
4030 if (comp_unit_die ()->die_abbrev)
4031 {
4032 calc_base_type_die_sizes ();
4033 gcc_assert (ref->die_offset);
4034 }
4035 return ref->die_offset;
4036 }
4037
4038 /* Return die_offset of a DIE reference other than base type. */
4039
4040 static unsigned long int
4041 get_ref_die_offset (dw_die_ref ref)
4042 {
4043 gcc_assert (ref->die_offset);
4044 return ref->die_offset;
4045 }
4046
4047 /* Convert a DIE tag into its string name. */
4048
4049 static const char *
4050 dwarf_tag_name (unsigned int tag)
4051 {
4052 const char *name = get_DW_TAG_name (tag);
4053
4054 if (name != NULL)
4055 return name;
4056
4057 return "DW_TAG_<unknown>";
4058 }
4059
4060 /* Convert a DWARF attribute code into its string name. */
4061
4062 static const char *
4063 dwarf_attr_name (unsigned int attr)
4064 {
4065 const char *name;
4066
4067 switch (attr)
4068 {
4069 #if VMS_DEBUGGING_INFO
4070 case DW_AT_HP_prologue:
4071 return "DW_AT_HP_prologue";
4072 #else
4073 case DW_AT_MIPS_loop_unroll_factor:
4074 return "DW_AT_MIPS_loop_unroll_factor";
4075 #endif
4076
4077 #if VMS_DEBUGGING_INFO
4078 case DW_AT_HP_epilogue:
4079 return "DW_AT_HP_epilogue";
4080 #else
4081 case DW_AT_MIPS_stride:
4082 return "DW_AT_MIPS_stride";
4083 #endif
4084 }
4085
4086 name = get_DW_AT_name (attr);
4087
4088 if (name != NULL)
4089 return name;
4090
4091 return "DW_AT_<unknown>";
4092 }
4093
4094 /* Convert a DWARF value form code into its string name. */
4095
4096 static const char *
4097 dwarf_form_name (unsigned int form)
4098 {
4099 const char *name = get_DW_FORM_name (form);
4100
4101 if (name != NULL)
4102 return name;
4103
4104 return "DW_FORM_<unknown>";
4105 }
4106 \f
4107 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4108 instance of an inlined instance of a decl which is local to an inline
4109 function, so we have to trace all of the way back through the origin chain
4110 to find out what sort of node actually served as the original seed for the
4111 given block. */
4112
4113 static tree
4114 decl_ultimate_origin (const_tree decl)
4115 {
4116 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4117 return NULL_TREE;
4118
4119 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4120 we're trying to output the abstract instance of this function. */
4121 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4122 return NULL_TREE;
4123
4124 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4125 most distant ancestor, this should never happen. */
4126 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4127
4128 return DECL_ABSTRACT_ORIGIN (decl);
4129 }
4130
4131 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4132 of a virtual function may refer to a base class, so we check the 'this'
4133 parameter. */
4134
4135 static tree
4136 decl_class_context (tree decl)
4137 {
4138 tree context = NULL_TREE;
4139
4140 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4141 context = DECL_CONTEXT (decl);
4142 else
4143 context = TYPE_MAIN_VARIANT
4144 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4145
4146 if (context && !TYPE_P (context))
4147 context = NULL_TREE;
4148
4149 return context;
4150 }
4151 \f
4152 /* Add an attribute/value pair to a DIE. */
4153
4154 static inline void
4155 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4156 {
4157 /* Maybe this should be an assert? */
4158 if (die == NULL)
4159 return;
4160
4161 if (flag_checking)
4162 {
4163 /* Check we do not add duplicate attrs. Can't use get_AT here
4164 because that recurses to the specification/abstract origin DIE. */
4165 dw_attr_node *a;
4166 unsigned ix;
4167 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4168 gcc_assert (a->dw_attr != attr->dw_attr);
4169 }
4170
4171 vec_safe_reserve (die->die_attr, 1);
4172 vec_safe_push (die->die_attr, *attr);
4173 }
4174
4175 static inline enum dw_val_class
4176 AT_class (dw_attr_node *a)
4177 {
4178 return a->dw_attr_val.val_class;
4179 }
4180
4181 /* Return the index for any attribute that will be referenced with a
4182 DW_FORM_GNU_addr_index or DW_FORM_GNU_str_index. String indices
4183 are stored in dw_attr_val.v.val_str for reference counting
4184 pruning. */
4185
4186 static inline unsigned int
4187 AT_index (dw_attr_node *a)
4188 {
4189 if (AT_class (a) == dw_val_class_str)
4190 return a->dw_attr_val.v.val_str->index;
4191 else if (a->dw_attr_val.val_entry != NULL)
4192 return a->dw_attr_val.val_entry->index;
4193 return NOT_INDEXED;
4194 }
4195
4196 /* Add a flag value attribute to a DIE. */
4197
4198 static inline void
4199 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4200 {
4201 dw_attr_node attr;
4202
4203 attr.dw_attr = attr_kind;
4204 attr.dw_attr_val.val_class = dw_val_class_flag;
4205 attr.dw_attr_val.val_entry = NULL;
4206 attr.dw_attr_val.v.val_flag = flag;
4207 add_dwarf_attr (die, &attr);
4208 }
4209
4210 static inline unsigned
4211 AT_flag (dw_attr_node *a)
4212 {
4213 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4214 return a->dw_attr_val.v.val_flag;
4215 }
4216
4217 /* Add a signed integer attribute value to a DIE. */
4218
4219 static inline void
4220 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4221 {
4222 dw_attr_node attr;
4223
4224 attr.dw_attr = attr_kind;
4225 attr.dw_attr_val.val_class = dw_val_class_const;
4226 attr.dw_attr_val.val_entry = NULL;
4227 attr.dw_attr_val.v.val_int = int_val;
4228 add_dwarf_attr (die, &attr);
4229 }
4230
4231 static inline HOST_WIDE_INT
4232 AT_int (dw_attr_node *a)
4233 {
4234 gcc_assert (a && (AT_class (a) == dw_val_class_const
4235 || AT_class (a) == dw_val_class_const_implicit));
4236 return a->dw_attr_val.v.val_int;
4237 }
4238
4239 /* Add an unsigned integer attribute value to a DIE. */
4240
4241 static inline void
4242 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4243 unsigned HOST_WIDE_INT unsigned_val)
4244 {
4245 dw_attr_node attr;
4246
4247 attr.dw_attr = attr_kind;
4248 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4249 attr.dw_attr_val.val_entry = NULL;
4250 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4251 add_dwarf_attr (die, &attr);
4252 }
4253
4254 static inline unsigned HOST_WIDE_INT
4255 AT_unsigned (dw_attr_node *a)
4256 {
4257 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4258 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4259 return a->dw_attr_val.v.val_unsigned;
4260 }
4261
4262 /* Add an unsigned wide integer attribute value to a DIE. */
4263
4264 static inline void
4265 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4266 const wide_int& w)
4267 {
4268 dw_attr_node attr;
4269
4270 attr.dw_attr = attr_kind;
4271 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4272 attr.dw_attr_val.val_entry = NULL;
4273 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4274 *attr.dw_attr_val.v.val_wide = w;
4275 add_dwarf_attr (die, &attr);
4276 }
4277
4278 /* Add an unsigned double integer attribute value to a DIE. */
4279
4280 static inline void
4281 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4282 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4283 {
4284 dw_attr_node attr;
4285
4286 attr.dw_attr = attr_kind;
4287 attr.dw_attr_val.val_class = dw_val_class_const_double;
4288 attr.dw_attr_val.val_entry = NULL;
4289 attr.dw_attr_val.v.val_double.high = high;
4290 attr.dw_attr_val.v.val_double.low = low;
4291 add_dwarf_attr (die, &attr);
4292 }
4293
4294 /* Add a floating point attribute value to a DIE and return it. */
4295
4296 static inline void
4297 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4298 unsigned int length, unsigned int elt_size, unsigned char *array)
4299 {
4300 dw_attr_node attr;
4301
4302 attr.dw_attr = attr_kind;
4303 attr.dw_attr_val.val_class = dw_val_class_vec;
4304 attr.dw_attr_val.val_entry = NULL;
4305 attr.dw_attr_val.v.val_vec.length = length;
4306 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4307 attr.dw_attr_val.v.val_vec.array = array;
4308 add_dwarf_attr (die, &attr);
4309 }
4310
4311 /* Add an 8-byte data attribute value to a DIE. */
4312
4313 static inline void
4314 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4315 unsigned char data8[8])
4316 {
4317 dw_attr_node attr;
4318
4319 attr.dw_attr = attr_kind;
4320 attr.dw_attr_val.val_class = dw_val_class_data8;
4321 attr.dw_attr_val.val_entry = NULL;
4322 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4323 add_dwarf_attr (die, &attr);
4324 }
4325
4326 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4327 dwarf_split_debug_info, address attributes in dies destined for the
4328 final executable have force_direct set to avoid using indexed
4329 references. */
4330
4331 static inline void
4332 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4333 bool force_direct)
4334 {
4335 dw_attr_node attr;
4336 char * lbl_id;
4337
4338 lbl_id = xstrdup (lbl_low);
4339 attr.dw_attr = DW_AT_low_pc;
4340 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4341 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4342 if (dwarf_split_debug_info && !force_direct)
4343 attr.dw_attr_val.val_entry
4344 = add_addr_table_entry (lbl_id, ate_kind_label);
4345 else
4346 attr.dw_attr_val.val_entry = NULL;
4347 add_dwarf_attr (die, &attr);
4348
4349 attr.dw_attr = DW_AT_high_pc;
4350 if (dwarf_version < 4)
4351 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4352 else
4353 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4354 lbl_id = xstrdup (lbl_high);
4355 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4356 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4357 && dwarf_split_debug_info && !force_direct)
4358 attr.dw_attr_val.val_entry
4359 = add_addr_table_entry (lbl_id, ate_kind_label);
4360 else
4361 attr.dw_attr_val.val_entry = NULL;
4362 add_dwarf_attr (die, &attr);
4363 }
4364
4365 /* Hash and equality functions for debug_str_hash. */
4366
4367 hashval_t
4368 indirect_string_hasher::hash (indirect_string_node *x)
4369 {
4370 return htab_hash_string (x->str);
4371 }
4372
4373 bool
4374 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4375 {
4376 return strcmp (x1->str, x2) == 0;
4377 }
4378
4379 /* Add STR to the given string hash table. */
4380
4381 static struct indirect_string_node *
4382 find_AT_string_in_table (const char *str,
4383 hash_table<indirect_string_hasher> *table)
4384 {
4385 struct indirect_string_node *node;
4386
4387 indirect_string_node **slot
4388 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4389 if (*slot == NULL)
4390 {
4391 node = ggc_cleared_alloc<indirect_string_node> ();
4392 node->str = ggc_strdup (str);
4393 *slot = node;
4394 }
4395 else
4396 node = *slot;
4397
4398 node->refcount++;
4399 return node;
4400 }
4401
4402 /* Add STR to the indirect string hash table. */
4403
4404 static struct indirect_string_node *
4405 find_AT_string (const char *str)
4406 {
4407 if (! debug_str_hash)
4408 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4409
4410 return find_AT_string_in_table (str, debug_str_hash);
4411 }
4412
4413 /* Add a string attribute value to a DIE. */
4414
4415 static inline void
4416 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4417 {
4418 dw_attr_node attr;
4419 struct indirect_string_node *node;
4420
4421 node = find_AT_string (str);
4422
4423 attr.dw_attr = attr_kind;
4424 attr.dw_attr_val.val_class = dw_val_class_str;
4425 attr.dw_attr_val.val_entry = NULL;
4426 attr.dw_attr_val.v.val_str = node;
4427 add_dwarf_attr (die, &attr);
4428 }
4429
4430 static inline const char *
4431 AT_string (dw_attr_node *a)
4432 {
4433 gcc_assert (a && AT_class (a) == dw_val_class_str);
4434 return a->dw_attr_val.v.val_str->str;
4435 }
4436
4437 /* Call this function directly to bypass AT_string_form's logic to put
4438 the string inline in the die. */
4439
4440 static void
4441 set_indirect_string (struct indirect_string_node *node)
4442 {
4443 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4444 /* Already indirect is a no op. */
4445 if (node->form == DW_FORM_strp
4446 || node->form == DW_FORM_line_strp
4447 || node->form == DW_FORM_GNU_str_index)
4448 {
4449 gcc_assert (node->label);
4450 return;
4451 }
4452 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4453 ++dw2_string_counter;
4454 node->label = xstrdup (label);
4455
4456 if (!dwarf_split_debug_info)
4457 {
4458 node->form = DW_FORM_strp;
4459 node->index = NOT_INDEXED;
4460 }
4461 else
4462 {
4463 node->form = DW_FORM_GNU_str_index;
4464 node->index = NO_INDEX_ASSIGNED;
4465 }
4466 }
4467
4468 /* A helper function for dwarf2out_finish, called to reset indirect
4469 string decisions done for early LTO dwarf output before fat object
4470 dwarf output. */
4471
4472 int
4473 reset_indirect_string (indirect_string_node **h, void *)
4474 {
4475 struct indirect_string_node *node = *h;
4476 if (node->form == DW_FORM_strp || node->form == DW_FORM_GNU_str_index)
4477 {
4478 free (node->label);
4479 node->label = NULL;
4480 node->form = (dwarf_form) 0;
4481 node->index = 0;
4482 }
4483 return 1;
4484 }
4485
4486 /* Find out whether a string should be output inline in DIE
4487 or out-of-line in .debug_str section. */
4488
4489 static enum dwarf_form
4490 find_string_form (struct indirect_string_node *node)
4491 {
4492 unsigned int len;
4493
4494 if (node->form)
4495 return node->form;
4496
4497 len = strlen (node->str) + 1;
4498
4499 /* If the string is shorter or equal to the size of the reference, it is
4500 always better to put it inline. */
4501 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4502 return node->form = DW_FORM_string;
4503
4504 /* If we cannot expect the linker to merge strings in .debug_str
4505 section, only put it into .debug_str if it is worth even in this
4506 single module. */
4507 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4508 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4509 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4510 return node->form = DW_FORM_string;
4511
4512 set_indirect_string (node);
4513
4514 return node->form;
4515 }
4516
4517 /* Find out whether the string referenced from the attribute should be
4518 output inline in DIE or out-of-line in .debug_str section. */
4519
4520 static enum dwarf_form
4521 AT_string_form (dw_attr_node *a)
4522 {
4523 gcc_assert (a && AT_class (a) == dw_val_class_str);
4524 return find_string_form (a->dw_attr_val.v.val_str);
4525 }
4526
4527 /* Add a DIE reference attribute value to a DIE. */
4528
4529 static inline void
4530 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4531 {
4532 dw_attr_node attr;
4533 gcc_checking_assert (targ_die != NULL);
4534
4535 /* With LTO we can end up trying to reference something we didn't create
4536 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4537 if (targ_die == NULL)
4538 return;
4539
4540 attr.dw_attr = attr_kind;
4541 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4542 attr.dw_attr_val.val_entry = NULL;
4543 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4544 attr.dw_attr_val.v.val_die_ref.external = 0;
4545 add_dwarf_attr (die, &attr);
4546 }
4547
4548 /* Change DIE reference REF to point to NEW_DIE instead. */
4549
4550 static inline void
4551 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4552 {
4553 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4554 ref->dw_attr_val.v.val_die_ref.die = new_die;
4555 ref->dw_attr_val.v.val_die_ref.external = 0;
4556 }
4557
4558 /* Add an AT_specification attribute to a DIE, and also make the back
4559 pointer from the specification to the definition. */
4560
4561 static inline void
4562 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4563 {
4564 add_AT_die_ref (die, DW_AT_specification, targ_die);
4565 gcc_assert (!targ_die->die_definition);
4566 targ_die->die_definition = die;
4567 }
4568
4569 static inline dw_die_ref
4570 AT_ref (dw_attr_node *a)
4571 {
4572 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4573 return a->dw_attr_val.v.val_die_ref.die;
4574 }
4575
4576 static inline int
4577 AT_ref_external (dw_attr_node *a)
4578 {
4579 if (a && AT_class (a) == dw_val_class_die_ref)
4580 return a->dw_attr_val.v.val_die_ref.external;
4581
4582 return 0;
4583 }
4584
4585 static inline void
4586 set_AT_ref_external (dw_attr_node *a, int i)
4587 {
4588 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4589 a->dw_attr_val.v.val_die_ref.external = i;
4590 }
4591
4592 /* Add an FDE reference attribute value to a DIE. */
4593
4594 static inline void
4595 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4596 {
4597 dw_attr_node attr;
4598
4599 attr.dw_attr = attr_kind;
4600 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4601 attr.dw_attr_val.val_entry = NULL;
4602 attr.dw_attr_val.v.val_fde_index = targ_fde;
4603 add_dwarf_attr (die, &attr);
4604 }
4605
4606 /* Add a location description attribute value to a DIE. */
4607
4608 static inline void
4609 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4610 {
4611 dw_attr_node attr;
4612
4613 attr.dw_attr = attr_kind;
4614 attr.dw_attr_val.val_class = dw_val_class_loc;
4615 attr.dw_attr_val.val_entry = NULL;
4616 attr.dw_attr_val.v.val_loc = loc;
4617 add_dwarf_attr (die, &attr);
4618 }
4619
4620 static inline dw_loc_descr_ref
4621 AT_loc (dw_attr_node *a)
4622 {
4623 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4624 return a->dw_attr_val.v.val_loc;
4625 }
4626
4627 static inline void
4628 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4629 {
4630 dw_attr_node attr;
4631
4632 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4633 return;
4634
4635 attr.dw_attr = attr_kind;
4636 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4637 attr.dw_attr_val.val_entry = NULL;
4638 attr.dw_attr_val.v.val_loc_list = loc_list;
4639 add_dwarf_attr (die, &attr);
4640 have_location_lists = true;
4641 }
4642
4643 static inline dw_loc_list_ref
4644 AT_loc_list (dw_attr_node *a)
4645 {
4646 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4647 return a->dw_attr_val.v.val_loc_list;
4648 }
4649
4650 static inline dw_loc_list_ref *
4651 AT_loc_list_ptr (dw_attr_node *a)
4652 {
4653 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4654 return &a->dw_attr_val.v.val_loc_list;
4655 }
4656
4657 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4658 {
4659 static hashval_t hash (addr_table_entry *);
4660 static bool equal (addr_table_entry *, addr_table_entry *);
4661 };
4662
4663 /* Table of entries into the .debug_addr section. */
4664
4665 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4666
4667 /* Hash an address_table_entry. */
4668
4669 hashval_t
4670 addr_hasher::hash (addr_table_entry *a)
4671 {
4672 inchash::hash hstate;
4673 switch (a->kind)
4674 {
4675 case ate_kind_rtx:
4676 hstate.add_int (0);
4677 break;
4678 case ate_kind_rtx_dtprel:
4679 hstate.add_int (1);
4680 break;
4681 case ate_kind_label:
4682 return htab_hash_string (a->addr.label);
4683 default:
4684 gcc_unreachable ();
4685 }
4686 inchash::add_rtx (a->addr.rtl, hstate);
4687 return hstate.end ();
4688 }
4689
4690 /* Determine equality for two address_table_entries. */
4691
4692 bool
4693 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4694 {
4695 if (a1->kind != a2->kind)
4696 return 0;
4697 switch (a1->kind)
4698 {
4699 case ate_kind_rtx:
4700 case ate_kind_rtx_dtprel:
4701 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4702 case ate_kind_label:
4703 return strcmp (a1->addr.label, a2->addr.label) == 0;
4704 default:
4705 gcc_unreachable ();
4706 }
4707 }
4708
4709 /* Initialize an addr_table_entry. */
4710
4711 void
4712 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4713 {
4714 e->kind = kind;
4715 switch (kind)
4716 {
4717 case ate_kind_rtx:
4718 case ate_kind_rtx_dtprel:
4719 e->addr.rtl = (rtx) addr;
4720 break;
4721 case ate_kind_label:
4722 e->addr.label = (char *) addr;
4723 break;
4724 }
4725 e->refcount = 0;
4726 e->index = NO_INDEX_ASSIGNED;
4727 }
4728
4729 /* Add attr to the address table entry to the table. Defer setting an
4730 index until output time. */
4731
4732 static addr_table_entry *
4733 add_addr_table_entry (void *addr, enum ate_kind kind)
4734 {
4735 addr_table_entry *node;
4736 addr_table_entry finder;
4737
4738 gcc_assert (dwarf_split_debug_info);
4739 if (! addr_index_table)
4740 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
4741 init_addr_table_entry (&finder, kind, addr);
4742 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
4743
4744 if (*slot == HTAB_EMPTY_ENTRY)
4745 {
4746 node = ggc_cleared_alloc<addr_table_entry> ();
4747 init_addr_table_entry (node, kind, addr);
4748 *slot = node;
4749 }
4750 else
4751 node = *slot;
4752
4753 node->refcount++;
4754 return node;
4755 }
4756
4757 /* Remove an entry from the addr table by decrementing its refcount.
4758 Strictly, decrementing the refcount would be enough, but the
4759 assertion that the entry is actually in the table has found
4760 bugs. */
4761
4762 static void
4763 remove_addr_table_entry (addr_table_entry *entry)
4764 {
4765 gcc_assert (dwarf_split_debug_info && addr_index_table);
4766 /* After an index is assigned, the table is frozen. */
4767 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
4768 entry->refcount--;
4769 }
4770
4771 /* Given a location list, remove all addresses it refers to from the
4772 address_table. */
4773
4774 static void
4775 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
4776 {
4777 for (; descr; descr = descr->dw_loc_next)
4778 if (descr->dw_loc_oprnd1.val_entry != NULL)
4779 {
4780 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
4781 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
4782 }
4783 }
4784
4785 /* A helper function for dwarf2out_finish called through
4786 htab_traverse. Assign an addr_table_entry its index. All entries
4787 must be collected into the table when this function is called,
4788 because the indexing code relies on htab_traverse to traverse nodes
4789 in the same order for each run. */
4790
4791 int
4792 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
4793 {
4794 addr_table_entry *node = *h;
4795
4796 /* Don't index unreferenced nodes. */
4797 if (node->refcount == 0)
4798 return 1;
4799
4800 gcc_assert (node->index == NO_INDEX_ASSIGNED);
4801 node->index = *index;
4802 *index += 1;
4803
4804 return 1;
4805 }
4806
4807 /* Add an address constant attribute value to a DIE. When using
4808 dwarf_split_debug_info, address attributes in dies destined for the
4809 final executable should be direct references--setting the parameter
4810 force_direct ensures this behavior. */
4811
4812 static inline void
4813 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
4814 bool force_direct)
4815 {
4816 dw_attr_node attr;
4817
4818 attr.dw_attr = attr_kind;
4819 attr.dw_attr_val.val_class = dw_val_class_addr;
4820 attr.dw_attr_val.v.val_addr = addr;
4821 if (dwarf_split_debug_info && !force_direct)
4822 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
4823 else
4824 attr.dw_attr_val.val_entry = NULL;
4825 add_dwarf_attr (die, &attr);
4826 }
4827
4828 /* Get the RTX from to an address DIE attribute. */
4829
4830 static inline rtx
4831 AT_addr (dw_attr_node *a)
4832 {
4833 gcc_assert (a && AT_class (a) == dw_val_class_addr);
4834 return a->dw_attr_val.v.val_addr;
4835 }
4836
4837 /* Add a file attribute value to a DIE. */
4838
4839 static inline void
4840 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
4841 struct dwarf_file_data *fd)
4842 {
4843 dw_attr_node attr;
4844
4845 attr.dw_attr = attr_kind;
4846 attr.dw_attr_val.val_class = dw_val_class_file;
4847 attr.dw_attr_val.val_entry = NULL;
4848 attr.dw_attr_val.v.val_file = fd;
4849 add_dwarf_attr (die, &attr);
4850 }
4851
4852 /* Get the dwarf_file_data from a file DIE attribute. */
4853
4854 static inline struct dwarf_file_data *
4855 AT_file (dw_attr_node *a)
4856 {
4857 gcc_assert (a && (AT_class (a) == dw_val_class_file
4858 || AT_class (a) == dw_val_class_file_implicit));
4859 return a->dw_attr_val.v.val_file;
4860 }
4861
4862 /* Add a vms delta attribute value to a DIE. */
4863
4864 static inline void
4865 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
4866 const char *lbl1, const char *lbl2)
4867 {
4868 dw_attr_node attr;
4869
4870 attr.dw_attr = attr_kind;
4871 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
4872 attr.dw_attr_val.val_entry = NULL;
4873 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
4874 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
4875 add_dwarf_attr (die, &attr);
4876 }
4877
4878 /* Add a label identifier attribute value to a DIE. */
4879
4880 static inline void
4881 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
4882 const char *lbl_id)
4883 {
4884 dw_attr_node attr;
4885
4886 attr.dw_attr = attr_kind;
4887 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4888 attr.dw_attr_val.val_entry = NULL;
4889 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
4890 if (dwarf_split_debug_info)
4891 attr.dw_attr_val.val_entry
4892 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
4893 ate_kind_label);
4894 add_dwarf_attr (die, &attr);
4895 }
4896
4897 /* Add a section offset attribute value to a DIE, an offset into the
4898 debug_line section. */
4899
4900 static inline void
4901 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4902 const char *label)
4903 {
4904 dw_attr_node attr;
4905
4906 attr.dw_attr = attr_kind;
4907 attr.dw_attr_val.val_class = dw_val_class_lineptr;
4908 attr.dw_attr_val.val_entry = NULL;
4909 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4910 add_dwarf_attr (die, &attr);
4911 }
4912
4913 /* Add a section offset attribute value to a DIE, an offset into the
4914 debug_loclists section. */
4915
4916 static inline void
4917 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4918 const char *label)
4919 {
4920 dw_attr_node attr;
4921
4922 attr.dw_attr = attr_kind;
4923 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
4924 attr.dw_attr_val.val_entry = NULL;
4925 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4926 add_dwarf_attr (die, &attr);
4927 }
4928
4929 /* Add a section offset attribute value to a DIE, an offset into the
4930 debug_macinfo section. */
4931
4932 static inline void
4933 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
4934 const char *label)
4935 {
4936 dw_attr_node attr;
4937
4938 attr.dw_attr = attr_kind;
4939 attr.dw_attr_val.val_class = dw_val_class_macptr;
4940 attr.dw_attr_val.val_entry = NULL;
4941 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
4942 add_dwarf_attr (die, &attr);
4943 }
4944
4945 /* Add an offset attribute value to a DIE. */
4946
4947 static inline void
4948 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
4949 unsigned HOST_WIDE_INT offset)
4950 {
4951 dw_attr_node attr;
4952
4953 attr.dw_attr = attr_kind;
4954 attr.dw_attr_val.val_class = dw_val_class_offset;
4955 attr.dw_attr_val.val_entry = NULL;
4956 attr.dw_attr_val.v.val_offset = offset;
4957 add_dwarf_attr (die, &attr);
4958 }
4959
4960 /* Add a range_list attribute value to a DIE. When using
4961 dwarf_split_debug_info, address attributes in dies destined for the
4962 final executable should be direct references--setting the parameter
4963 force_direct ensures this behavior. */
4964
4965 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
4966 #define RELOCATED_OFFSET (NULL)
4967
4968 static void
4969 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
4970 long unsigned int offset, bool force_direct)
4971 {
4972 dw_attr_node attr;
4973
4974 attr.dw_attr = attr_kind;
4975 attr.dw_attr_val.val_class = dw_val_class_range_list;
4976 /* For the range_list attribute, use val_entry to store whether the
4977 offset should follow split-debug-info or normal semantics. This
4978 value is read in output_range_list_offset. */
4979 if (dwarf_split_debug_info && !force_direct)
4980 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
4981 else
4982 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
4983 attr.dw_attr_val.v.val_offset = offset;
4984 add_dwarf_attr (die, &attr);
4985 }
4986
4987 /* Return the start label of a delta attribute. */
4988
4989 static inline const char *
4990 AT_vms_delta1 (dw_attr_node *a)
4991 {
4992 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
4993 return a->dw_attr_val.v.val_vms_delta.lbl1;
4994 }
4995
4996 /* Return the end label of a delta attribute. */
4997
4998 static inline const char *
4999 AT_vms_delta2 (dw_attr_node *a)
5000 {
5001 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5002 return a->dw_attr_val.v.val_vms_delta.lbl2;
5003 }
5004
5005 static inline const char *
5006 AT_lbl (dw_attr_node *a)
5007 {
5008 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5009 || AT_class (a) == dw_val_class_lineptr
5010 || AT_class (a) == dw_val_class_macptr
5011 || AT_class (a) == dw_val_class_loclistsptr
5012 || AT_class (a) == dw_val_class_high_pc));
5013 return a->dw_attr_val.v.val_lbl_id;
5014 }
5015
5016 /* Get the attribute of type attr_kind. */
5017
5018 static dw_attr_node *
5019 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5020 {
5021 dw_attr_node *a;
5022 unsigned ix;
5023 dw_die_ref spec = NULL;
5024
5025 if (! die)
5026 return NULL;
5027
5028 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5029 if (a->dw_attr == attr_kind)
5030 return a;
5031 else if (a->dw_attr == DW_AT_specification
5032 || a->dw_attr == DW_AT_abstract_origin)
5033 spec = AT_ref (a);
5034
5035 if (spec)
5036 return get_AT (spec, attr_kind);
5037
5038 return NULL;
5039 }
5040
5041 /* Returns the parent of the declaration of DIE. */
5042
5043 static dw_die_ref
5044 get_die_parent (dw_die_ref die)
5045 {
5046 dw_die_ref t;
5047
5048 if (!die)
5049 return NULL;
5050
5051 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5052 || (t = get_AT_ref (die, DW_AT_specification)))
5053 die = t;
5054
5055 return die->die_parent;
5056 }
5057
5058 /* Return the "low pc" attribute value, typically associated with a subprogram
5059 DIE. Return null if the "low pc" attribute is either not present, or if it
5060 cannot be represented as an assembler label identifier. */
5061
5062 static inline const char *
5063 get_AT_low_pc (dw_die_ref die)
5064 {
5065 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5066
5067 return a ? AT_lbl (a) : NULL;
5068 }
5069
5070 /* Return the "high pc" attribute value, typically associated with a subprogram
5071 DIE. Return null if the "high pc" attribute is either not present, or if it
5072 cannot be represented as an assembler label identifier. */
5073
5074 static inline const char *
5075 get_AT_hi_pc (dw_die_ref die)
5076 {
5077 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5078
5079 return a ? AT_lbl (a) : NULL;
5080 }
5081
5082 /* Return the value of the string attribute designated by ATTR_KIND, or
5083 NULL if it is not present. */
5084
5085 static inline const char *
5086 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5087 {
5088 dw_attr_node *a = get_AT (die, attr_kind);
5089
5090 return a ? AT_string (a) : NULL;
5091 }
5092
5093 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5094 if it is not present. */
5095
5096 static inline int
5097 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5098 {
5099 dw_attr_node *a = get_AT (die, attr_kind);
5100
5101 return a ? AT_flag (a) : 0;
5102 }
5103
5104 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5105 if it is not present. */
5106
5107 static inline unsigned
5108 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5109 {
5110 dw_attr_node *a = get_AT (die, attr_kind);
5111
5112 return a ? AT_unsigned (a) : 0;
5113 }
5114
5115 static inline dw_die_ref
5116 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5117 {
5118 dw_attr_node *a = get_AT (die, attr_kind);
5119
5120 return a ? AT_ref (a) : NULL;
5121 }
5122
5123 static inline struct dwarf_file_data *
5124 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5125 {
5126 dw_attr_node *a = get_AT (die, attr_kind);
5127
5128 return a ? AT_file (a) : NULL;
5129 }
5130
5131 /* Return TRUE if the language is C++. */
5132
5133 static inline bool
5134 is_cxx (void)
5135 {
5136 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5137
5138 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5139 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5140 }
5141
5142 /* Return TRUE if DECL was created by the C++ frontend. */
5143
5144 static bool
5145 is_cxx (const_tree decl)
5146 {
5147 if (in_lto_p)
5148 {
5149 const_tree context = get_ultimate_context (decl);
5150 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5151 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5152 }
5153 return is_cxx ();
5154 }
5155
5156 /* Return TRUE if the language is Fortran. */
5157
5158 static inline bool
5159 is_fortran (void)
5160 {
5161 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5162
5163 return (lang == DW_LANG_Fortran77
5164 || lang == DW_LANG_Fortran90
5165 || lang == DW_LANG_Fortran95
5166 || lang == DW_LANG_Fortran03
5167 || lang == DW_LANG_Fortran08);
5168 }
5169
5170 static inline bool
5171 is_fortran (const_tree decl)
5172 {
5173 if (in_lto_p)
5174 {
5175 const_tree context = get_ultimate_context (decl);
5176 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5177 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5178 "GNU Fortran", 11) == 0
5179 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5180 "GNU F77") == 0);
5181 }
5182 return is_fortran ();
5183 }
5184
5185 /* Return TRUE if the language is Ada. */
5186
5187 static inline bool
5188 is_ada (void)
5189 {
5190 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5191
5192 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5193 }
5194
5195 /* Remove the specified attribute if present. Return TRUE if removal
5196 was successful. */
5197
5198 static bool
5199 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5200 {
5201 dw_attr_node *a;
5202 unsigned ix;
5203
5204 if (! die)
5205 return false;
5206
5207 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5208 if (a->dw_attr == attr_kind)
5209 {
5210 if (AT_class (a) == dw_val_class_str)
5211 if (a->dw_attr_val.v.val_str->refcount)
5212 a->dw_attr_val.v.val_str->refcount--;
5213
5214 /* vec::ordered_remove should help reduce the number of abbrevs
5215 that are needed. */
5216 die->die_attr->ordered_remove (ix);
5217 return true;
5218 }
5219 return false;
5220 }
5221
5222 /* Remove CHILD from its parent. PREV must have the property that
5223 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5224
5225 static void
5226 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5227 {
5228 gcc_assert (child->die_parent == prev->die_parent);
5229 gcc_assert (prev->die_sib == child);
5230 if (prev == child)
5231 {
5232 gcc_assert (child->die_parent->die_child == child);
5233 prev = NULL;
5234 }
5235 else
5236 prev->die_sib = child->die_sib;
5237 if (child->die_parent->die_child == child)
5238 child->die_parent->die_child = prev;
5239 child->die_sib = NULL;
5240 }
5241
5242 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5243 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5244
5245 static void
5246 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5247 {
5248 dw_die_ref parent = old_child->die_parent;
5249
5250 gcc_assert (parent == prev->die_parent);
5251 gcc_assert (prev->die_sib == old_child);
5252
5253 new_child->die_parent = parent;
5254 if (prev == old_child)
5255 {
5256 gcc_assert (parent->die_child == old_child);
5257 new_child->die_sib = new_child;
5258 }
5259 else
5260 {
5261 prev->die_sib = new_child;
5262 new_child->die_sib = old_child->die_sib;
5263 }
5264 if (old_child->die_parent->die_child == old_child)
5265 old_child->die_parent->die_child = new_child;
5266 old_child->die_sib = NULL;
5267 }
5268
5269 /* Move all children from OLD_PARENT to NEW_PARENT. */
5270
5271 static void
5272 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5273 {
5274 dw_die_ref c;
5275 new_parent->die_child = old_parent->die_child;
5276 old_parent->die_child = NULL;
5277 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5278 }
5279
5280 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5281 matches TAG. */
5282
5283 static void
5284 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5285 {
5286 dw_die_ref c;
5287
5288 c = die->die_child;
5289 if (c) do {
5290 dw_die_ref prev = c;
5291 c = c->die_sib;
5292 while (c->die_tag == tag)
5293 {
5294 remove_child_with_prev (c, prev);
5295 c->die_parent = NULL;
5296 /* Might have removed every child. */
5297 if (die->die_child == NULL)
5298 return;
5299 c = prev->die_sib;
5300 }
5301 } while (c != die->die_child);
5302 }
5303
5304 /* Add a CHILD_DIE as the last child of DIE. */
5305
5306 static void
5307 add_child_die (dw_die_ref die, dw_die_ref child_die)
5308 {
5309 /* FIXME this should probably be an assert. */
5310 if (! die || ! child_die)
5311 return;
5312 gcc_assert (die != child_die);
5313
5314 child_die->die_parent = die;
5315 if (die->die_child)
5316 {
5317 child_die->die_sib = die->die_child->die_sib;
5318 die->die_child->die_sib = child_die;
5319 }
5320 else
5321 child_die->die_sib = child_die;
5322 die->die_child = child_die;
5323 }
5324
5325 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5326
5327 static void
5328 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5329 dw_die_ref after_die)
5330 {
5331 gcc_assert (die
5332 && child_die
5333 && after_die
5334 && die->die_child
5335 && die != child_die);
5336
5337 child_die->die_parent = die;
5338 child_die->die_sib = after_die->die_sib;
5339 after_die->die_sib = child_die;
5340 if (die->die_child == after_die)
5341 die->die_child = child_die;
5342 }
5343
5344 /* Unassociate CHILD from its parent, and make its parent be
5345 NEW_PARENT. */
5346
5347 static void
5348 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5349 {
5350 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5351 if (p->die_sib == child)
5352 {
5353 remove_child_with_prev (child, p);
5354 break;
5355 }
5356 add_child_die (new_parent, child);
5357 }
5358
5359 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5360 is the specification, to the end of PARENT's list of children.
5361 This is done by removing and re-adding it. */
5362
5363 static void
5364 splice_child_die (dw_die_ref parent, dw_die_ref child)
5365 {
5366 /* We want the declaration DIE from inside the class, not the
5367 specification DIE at toplevel. */
5368 if (child->die_parent != parent)
5369 {
5370 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5371
5372 if (tmp)
5373 child = tmp;
5374 }
5375
5376 gcc_assert (child->die_parent == parent
5377 || (child->die_parent
5378 == get_AT_ref (parent, DW_AT_specification)));
5379
5380 reparent_child (child, parent);
5381 }
5382
5383 /* Create and return a new die with TAG_VALUE as tag. */
5384
5385 static inline dw_die_ref
5386 new_die_raw (enum dwarf_tag tag_value)
5387 {
5388 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5389 die->die_tag = tag_value;
5390 return die;
5391 }
5392
5393 /* Create and return a new die with a parent of PARENT_DIE. If
5394 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5395 associated tree T must be supplied to determine parenthood
5396 later. */
5397
5398 static inline dw_die_ref
5399 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5400 {
5401 dw_die_ref die = new_die_raw (tag_value);
5402
5403 if (parent_die != NULL)
5404 add_child_die (parent_die, die);
5405 else
5406 {
5407 limbo_die_node *limbo_node;
5408
5409 /* No DIEs created after early dwarf should end up in limbo,
5410 because the limbo list should not persist past LTO
5411 streaming. */
5412 if (tag_value != DW_TAG_compile_unit
5413 /* These are allowed because they're generated while
5414 breaking out COMDAT units late. */
5415 && tag_value != DW_TAG_type_unit
5416 && tag_value != DW_TAG_skeleton_unit
5417 && !early_dwarf
5418 /* Allow nested functions to live in limbo because they will
5419 only temporarily live there, as decls_for_scope will fix
5420 them up. */
5421 && (TREE_CODE (t) != FUNCTION_DECL
5422 || !decl_function_context (t))
5423 /* Same as nested functions above but for types. Types that
5424 are local to a function will be fixed in
5425 decls_for_scope. */
5426 && (!RECORD_OR_UNION_TYPE_P (t)
5427 || !TYPE_CONTEXT (t)
5428 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5429 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5430 especially in the ltrans stage, but once we implement LTO
5431 dwarf streaming, we should remove this exception. */
5432 && !in_lto_p)
5433 {
5434 fprintf (stderr, "symbol ended up in limbo too late:");
5435 debug_generic_stmt (t);
5436 gcc_unreachable ();
5437 }
5438
5439 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5440 limbo_node->die = die;
5441 limbo_node->created_for = t;
5442 limbo_node->next = limbo_die_list;
5443 limbo_die_list = limbo_node;
5444 }
5445
5446 return die;
5447 }
5448
5449 /* Return the DIE associated with the given type specifier. */
5450
5451 static inline dw_die_ref
5452 lookup_type_die (tree type)
5453 {
5454 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5455 if (die && die->removed)
5456 {
5457 TYPE_SYMTAB_DIE (type) = NULL;
5458 return NULL;
5459 }
5460 return die;
5461 }
5462
5463 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5464 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5465 anonymous type instead the one of the naming typedef. */
5466
5467 static inline dw_die_ref
5468 strip_naming_typedef (tree type, dw_die_ref type_die)
5469 {
5470 if (type
5471 && TREE_CODE (type) == RECORD_TYPE
5472 && type_die
5473 && type_die->die_tag == DW_TAG_typedef
5474 && is_naming_typedef_decl (TYPE_NAME (type)))
5475 type_die = get_AT_ref (type_die, DW_AT_type);
5476 return type_die;
5477 }
5478
5479 /* Like lookup_type_die, but if type is an anonymous type named by a
5480 typedef[1], return the DIE of the anonymous type instead the one of
5481 the naming typedef. This is because in gen_typedef_die, we did
5482 equate the anonymous struct named by the typedef with the DIE of
5483 the naming typedef. So by default, lookup_type_die on an anonymous
5484 struct yields the DIE of the naming typedef.
5485
5486 [1]: Read the comment of is_naming_typedef_decl to learn about what
5487 a naming typedef is. */
5488
5489 static inline dw_die_ref
5490 lookup_type_die_strip_naming_typedef (tree type)
5491 {
5492 dw_die_ref die = lookup_type_die (type);
5493 return strip_naming_typedef (type, die);
5494 }
5495
5496 /* Equate a DIE to a given type specifier. */
5497
5498 static inline void
5499 equate_type_number_to_die (tree type, dw_die_ref type_die)
5500 {
5501 TYPE_SYMTAB_DIE (type) = type_die;
5502 }
5503
5504 /* Returns a hash value for X (which really is a die_struct). */
5505
5506 inline hashval_t
5507 decl_die_hasher::hash (die_node *x)
5508 {
5509 return (hashval_t) x->decl_id;
5510 }
5511
5512 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5513
5514 inline bool
5515 decl_die_hasher::equal (die_node *x, tree y)
5516 {
5517 return (x->decl_id == DECL_UID (y));
5518 }
5519
5520 /* Return the DIE associated with a given declaration. */
5521
5522 static inline dw_die_ref
5523 lookup_decl_die (tree decl)
5524 {
5525 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5526 NO_INSERT);
5527 if (!die)
5528 return NULL;
5529 if ((*die)->removed)
5530 {
5531 decl_die_table->clear_slot (die);
5532 return NULL;
5533 }
5534 return *die;
5535 }
5536
5537
5538 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5539 style reference. Return true if we found one refering to a DIE for
5540 DECL, otherwise return false. */
5541
5542 static bool
5543 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5544 unsigned HOST_WIDE_INT *off)
5545 {
5546 dw_die_ref die;
5547
5548 if (flag_wpa && !decl_die_table)
5549 return false;
5550
5551 if (TREE_CODE (decl) == BLOCK)
5552 die = BLOCK_DIE (decl);
5553 else
5554 die = lookup_decl_die (decl);
5555 if (!die)
5556 return false;
5557
5558 /* During WPA stage we currently use DIEs to store the
5559 decl <-> label + offset map. That's quite inefficient but it
5560 works for now. */
5561 if (flag_wpa)
5562 {
5563 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5564 if (!ref)
5565 {
5566 gcc_assert (die == comp_unit_die ());
5567 return false;
5568 }
5569 *off = ref->die_offset;
5570 *sym = ref->die_id.die_symbol;
5571 return true;
5572 }
5573
5574 /* Similar to get_ref_die_offset_label, but using the "correct"
5575 label. */
5576 *off = die->die_offset;
5577 while (die->die_parent)
5578 die = die->die_parent;
5579 /* For the containing CU DIE we compute a die_symbol in
5580 compute_comp_unit_symbol. */
5581 gcc_assert (die->die_tag == DW_TAG_compile_unit
5582 && die->die_id.die_symbol != NULL);
5583 *sym = die->die_id.die_symbol;
5584 return true;
5585 }
5586
5587 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5588
5589 static void
5590 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5591 const char *symbol, HOST_WIDE_INT offset)
5592 {
5593 /* Create a fake DIE that contains the reference. Don't use
5594 new_die because we don't want to end up in the limbo list. */
5595 dw_die_ref ref = new_die_raw (die->die_tag);
5596 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5597 ref->die_offset = offset;
5598 ref->with_offset = 1;
5599 add_AT_die_ref (die, attr_kind, ref);
5600 }
5601
5602 /* Create a DIE for DECL if required and add a reference to a DIE
5603 at SYMBOL + OFFSET which contains attributes dumped early. */
5604
5605 static void
5606 dwarf2out_register_external_die (tree decl, const char *sym,
5607 unsigned HOST_WIDE_INT off)
5608 {
5609 if (debug_info_level == DINFO_LEVEL_NONE)
5610 return;
5611
5612 if (flag_wpa && !decl_die_table)
5613 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5614
5615 dw_die_ref die
5616 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5617 gcc_assert (!die);
5618
5619 tree ctx;
5620 dw_die_ref parent = NULL;
5621 /* Need to lookup a DIE for the decls context - the containing
5622 function or translation unit. */
5623 if (TREE_CODE (decl) == BLOCK)
5624 {
5625 ctx = BLOCK_SUPERCONTEXT (decl);
5626 /* ??? We do not output DIEs for all scopes thus skip as
5627 many DIEs as needed. */
5628 while (TREE_CODE (ctx) == BLOCK
5629 && !BLOCK_DIE (ctx))
5630 ctx = BLOCK_SUPERCONTEXT (ctx);
5631 }
5632 else
5633 ctx = DECL_CONTEXT (decl);
5634 while (ctx && TYPE_P (ctx))
5635 ctx = TYPE_CONTEXT (ctx);
5636 if (ctx)
5637 {
5638 if (TREE_CODE (ctx) == BLOCK)
5639 parent = BLOCK_DIE (ctx);
5640 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5641 /* Keep the 1:1 association during WPA. */
5642 && !flag_wpa)
5643 /* Otherwise all late annotations go to the main CU which
5644 imports the original CUs. */
5645 parent = comp_unit_die ();
5646 else if (TREE_CODE (ctx) == FUNCTION_DECL
5647 && TREE_CODE (decl) != PARM_DECL
5648 && TREE_CODE (decl) != BLOCK)
5649 /* Leave function local entities parent determination to when
5650 we process scope vars. */
5651 ;
5652 else
5653 parent = lookup_decl_die (ctx);
5654 }
5655 else
5656 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5657 Handle this case gracefully by globalizing stuff. */
5658 parent = comp_unit_die ();
5659 /* Create a DIE "stub". */
5660 switch (TREE_CODE (decl))
5661 {
5662 case TRANSLATION_UNIT_DECL:
5663 if (! flag_wpa)
5664 {
5665 die = comp_unit_die ();
5666 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5667 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5668 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5669 to create a DIE for the original CUs. */
5670 return;
5671 }
5672 /* Keep the 1:1 association during WPA. */
5673 die = new_die (DW_TAG_compile_unit, NULL, decl);
5674 break;
5675 case NAMESPACE_DECL:
5676 if (is_fortran (decl))
5677 die = new_die (DW_TAG_module, parent, decl);
5678 else
5679 die = new_die (DW_TAG_namespace, parent, decl);
5680 break;
5681 case FUNCTION_DECL:
5682 die = new_die (DW_TAG_subprogram, parent, decl);
5683 break;
5684 case VAR_DECL:
5685 die = new_die (DW_TAG_variable, parent, decl);
5686 break;
5687 case RESULT_DECL:
5688 die = new_die (DW_TAG_variable, parent, decl);
5689 break;
5690 case PARM_DECL:
5691 die = new_die (DW_TAG_formal_parameter, parent, decl);
5692 break;
5693 case CONST_DECL:
5694 die = new_die (DW_TAG_constant, parent, decl);
5695 break;
5696 case LABEL_DECL:
5697 die = new_die (DW_TAG_label, parent, decl);
5698 break;
5699 case BLOCK:
5700 die = new_die (DW_TAG_lexical_block, parent, decl);
5701 break;
5702 default:
5703 gcc_unreachable ();
5704 }
5705 if (TREE_CODE (decl) == BLOCK)
5706 BLOCK_DIE (decl) = die;
5707 else
5708 equate_decl_number_to_die (decl, die);
5709
5710 /* Add a reference to the DIE providing early debug at $sym + off. */
5711 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
5712 }
5713
5714 /* Returns a hash value for X (which really is a var_loc_list). */
5715
5716 inline hashval_t
5717 decl_loc_hasher::hash (var_loc_list *x)
5718 {
5719 return (hashval_t) x->decl_id;
5720 }
5721
5722 /* Return nonzero if decl_id of var_loc_list X is the same as
5723 UID of decl *Y. */
5724
5725 inline bool
5726 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
5727 {
5728 return (x->decl_id == DECL_UID (y));
5729 }
5730
5731 /* Return the var_loc list associated with a given declaration. */
5732
5733 static inline var_loc_list *
5734 lookup_decl_loc (const_tree decl)
5735 {
5736 if (!decl_loc_table)
5737 return NULL;
5738 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
5739 }
5740
5741 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
5742
5743 inline hashval_t
5744 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
5745 {
5746 return (hashval_t) x->decl_id;
5747 }
5748
5749 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
5750 UID of decl *Y. */
5751
5752 inline bool
5753 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
5754 {
5755 return (x->decl_id == DECL_UID (y));
5756 }
5757
5758 /* Equate a DIE to a particular declaration. */
5759
5760 static void
5761 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
5762 {
5763 unsigned int decl_id = DECL_UID (decl);
5764
5765 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
5766 decl_die->decl_id = decl_id;
5767 }
5768
5769 /* Return how many bits covers PIECE EXPR_LIST. */
5770
5771 static HOST_WIDE_INT
5772 decl_piece_bitsize (rtx piece)
5773 {
5774 int ret = (int) GET_MODE (piece);
5775 if (ret)
5776 return ret;
5777 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
5778 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
5779 return INTVAL (XEXP (XEXP (piece, 0), 0));
5780 }
5781
5782 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
5783
5784 static rtx *
5785 decl_piece_varloc_ptr (rtx piece)
5786 {
5787 if ((int) GET_MODE (piece))
5788 return &XEXP (piece, 0);
5789 else
5790 return &XEXP (XEXP (piece, 0), 1);
5791 }
5792
5793 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
5794 Next is the chain of following piece nodes. */
5795
5796 static rtx_expr_list *
5797 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
5798 {
5799 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
5800 return alloc_EXPR_LIST (bitsize, loc_note, next);
5801 else
5802 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
5803 GEN_INT (bitsize),
5804 loc_note), next);
5805 }
5806
5807 /* Return rtx that should be stored into loc field for
5808 LOC_NOTE and BITPOS/BITSIZE. */
5809
5810 static rtx
5811 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
5812 HOST_WIDE_INT bitsize)
5813 {
5814 if (bitsize != -1)
5815 {
5816 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
5817 if (bitpos != 0)
5818 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
5819 }
5820 return loc_note;
5821 }
5822
5823 /* This function either modifies location piece list *DEST in
5824 place (if SRC and INNER is NULL), or copies location piece list
5825 *SRC to *DEST while modifying it. Location BITPOS is modified
5826 to contain LOC_NOTE, any pieces overlapping it are removed resp.
5827 not copied and if needed some padding around it is added.
5828 When modifying in place, DEST should point to EXPR_LIST where
5829 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
5830 to the start of the whole list and INNER points to the EXPR_LIST
5831 where earlier pieces cover PIECE_BITPOS bits. */
5832
5833 static void
5834 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
5835 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
5836 HOST_WIDE_INT bitsize, rtx loc_note)
5837 {
5838 HOST_WIDE_INT diff;
5839 bool copy = inner != NULL;
5840
5841 if (copy)
5842 {
5843 /* First copy all nodes preceding the current bitpos. */
5844 while (src != inner)
5845 {
5846 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
5847 decl_piece_bitsize (*src), NULL_RTX);
5848 dest = &XEXP (*dest, 1);
5849 src = &XEXP (*src, 1);
5850 }
5851 }
5852 /* Add padding if needed. */
5853 if (bitpos != piece_bitpos)
5854 {
5855 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
5856 copy ? NULL_RTX : *dest);
5857 dest = &XEXP (*dest, 1);
5858 }
5859 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
5860 {
5861 gcc_assert (!copy);
5862 /* A piece with correct bitpos and bitsize already exist,
5863 just update the location for it and return. */
5864 *decl_piece_varloc_ptr (*dest) = loc_note;
5865 return;
5866 }
5867 /* Add the piece that changed. */
5868 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
5869 dest = &XEXP (*dest, 1);
5870 /* Skip over pieces that overlap it. */
5871 diff = bitpos - piece_bitpos + bitsize;
5872 if (!copy)
5873 src = dest;
5874 while (diff > 0 && *src)
5875 {
5876 rtx piece = *src;
5877 diff -= decl_piece_bitsize (piece);
5878 if (copy)
5879 src = &XEXP (piece, 1);
5880 else
5881 {
5882 *src = XEXP (piece, 1);
5883 free_EXPR_LIST_node (piece);
5884 }
5885 }
5886 /* Add padding if needed. */
5887 if (diff < 0 && *src)
5888 {
5889 if (!copy)
5890 dest = src;
5891 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
5892 dest = &XEXP (*dest, 1);
5893 }
5894 if (!copy)
5895 return;
5896 /* Finally copy all nodes following it. */
5897 while (*src)
5898 {
5899 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
5900 decl_piece_bitsize (*src), NULL_RTX);
5901 dest = &XEXP (*dest, 1);
5902 src = &XEXP (*src, 1);
5903 }
5904 }
5905
5906 /* Add a variable location node to the linked list for DECL. */
5907
5908 static struct var_loc_node *
5909 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label)
5910 {
5911 unsigned int decl_id;
5912 var_loc_list *temp;
5913 struct var_loc_node *loc = NULL;
5914 HOST_WIDE_INT bitsize = -1, bitpos = -1;
5915
5916 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
5917 {
5918 tree realdecl = DECL_DEBUG_EXPR (decl);
5919 if (handled_component_p (realdecl)
5920 || (TREE_CODE (realdecl) == MEM_REF
5921 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5922 {
5923 bool reverse;
5924 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
5925 &bitsize, &reverse);
5926 if (!innerdecl
5927 || !DECL_P (innerdecl)
5928 || DECL_IGNORED_P (innerdecl)
5929 || TREE_STATIC (innerdecl)
5930 || bitsize == 0
5931 || bitpos + bitsize > 256)
5932 return NULL;
5933 decl = innerdecl;
5934 }
5935 }
5936
5937 decl_id = DECL_UID (decl);
5938 var_loc_list **slot
5939 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
5940 if (*slot == NULL)
5941 {
5942 temp = ggc_cleared_alloc<var_loc_list> ();
5943 temp->decl_id = decl_id;
5944 *slot = temp;
5945 }
5946 else
5947 temp = *slot;
5948
5949 /* For PARM_DECLs try to keep around the original incoming value,
5950 even if that means we'll emit a zero-range .debug_loc entry. */
5951 if (temp->last
5952 && temp->first == temp->last
5953 && TREE_CODE (decl) == PARM_DECL
5954 && NOTE_P (temp->first->loc)
5955 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
5956 && DECL_INCOMING_RTL (decl)
5957 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
5958 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
5959 == GET_CODE (DECL_INCOMING_RTL (decl))
5960 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
5961 && (bitsize != -1
5962 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
5963 NOTE_VAR_LOCATION_LOC (loc_note))
5964 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
5965 != NOTE_VAR_LOCATION_STATUS (loc_note))))
5966 {
5967 loc = ggc_cleared_alloc<var_loc_node> ();
5968 temp->first->next = loc;
5969 temp->last = loc;
5970 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
5971 }
5972 else if (temp->last)
5973 {
5974 struct var_loc_node *last = temp->last, *unused = NULL;
5975 rtx *piece_loc = NULL, last_loc_note;
5976 HOST_WIDE_INT piece_bitpos = 0;
5977 if (last->next)
5978 {
5979 last = last->next;
5980 gcc_assert (last->next == NULL);
5981 }
5982 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
5983 {
5984 piece_loc = &last->loc;
5985 do
5986 {
5987 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
5988 if (piece_bitpos + cur_bitsize > bitpos)
5989 break;
5990 piece_bitpos += cur_bitsize;
5991 piece_loc = &XEXP (*piece_loc, 1);
5992 }
5993 while (*piece_loc);
5994 }
5995 /* TEMP->LAST here is either pointer to the last but one or
5996 last element in the chained list, LAST is pointer to the
5997 last element. */
5998 if (label && strcmp (last->label, label) == 0)
5999 {
6000 /* For SRA optimized variables if there weren't any real
6001 insns since last note, just modify the last node. */
6002 if (piece_loc != NULL)
6003 {
6004 adjust_piece_list (piece_loc, NULL, NULL,
6005 bitpos, piece_bitpos, bitsize, loc_note);
6006 return NULL;
6007 }
6008 /* If the last note doesn't cover any instructions, remove it. */
6009 if (temp->last != last)
6010 {
6011 temp->last->next = NULL;
6012 unused = last;
6013 last = temp->last;
6014 gcc_assert (strcmp (last->label, label) != 0);
6015 }
6016 else
6017 {
6018 gcc_assert (temp->first == temp->last
6019 || (temp->first->next == temp->last
6020 && TREE_CODE (decl) == PARM_DECL));
6021 memset (temp->last, '\0', sizeof (*temp->last));
6022 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6023 return temp->last;
6024 }
6025 }
6026 if (bitsize == -1 && NOTE_P (last->loc))
6027 last_loc_note = last->loc;
6028 else if (piece_loc != NULL
6029 && *piece_loc != NULL_RTX
6030 && piece_bitpos == bitpos
6031 && decl_piece_bitsize (*piece_loc) == bitsize)
6032 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6033 else
6034 last_loc_note = NULL_RTX;
6035 /* If the current location is the same as the end of the list,
6036 and either both or neither of the locations is uninitialized,
6037 we have nothing to do. */
6038 if (last_loc_note == NULL_RTX
6039 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6040 NOTE_VAR_LOCATION_LOC (loc_note)))
6041 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6042 != NOTE_VAR_LOCATION_STATUS (loc_note))
6043 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6044 == VAR_INIT_STATUS_UNINITIALIZED)
6045 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6046 == VAR_INIT_STATUS_UNINITIALIZED))))
6047 {
6048 /* Add LOC to the end of list and update LAST. If the last
6049 element of the list has been removed above, reuse its
6050 memory for the new node, otherwise allocate a new one. */
6051 if (unused)
6052 {
6053 loc = unused;
6054 memset (loc, '\0', sizeof (*loc));
6055 }
6056 else
6057 loc = ggc_cleared_alloc<var_loc_node> ();
6058 if (bitsize == -1 || piece_loc == NULL)
6059 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6060 else
6061 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6062 bitpos, piece_bitpos, bitsize, loc_note);
6063 last->next = loc;
6064 /* Ensure TEMP->LAST will point either to the new last but one
6065 element of the chain, or to the last element in it. */
6066 if (last != temp->last)
6067 temp->last = last;
6068 }
6069 else if (unused)
6070 ggc_free (unused);
6071 }
6072 else
6073 {
6074 loc = ggc_cleared_alloc<var_loc_node> ();
6075 temp->first = loc;
6076 temp->last = loc;
6077 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6078 }
6079 return loc;
6080 }
6081 \f
6082 /* Keep track of the number of spaces used to indent the
6083 output of the debugging routines that print the structure of
6084 the DIE internal representation. */
6085 static int print_indent;
6086
6087 /* Indent the line the number of spaces given by print_indent. */
6088
6089 static inline void
6090 print_spaces (FILE *outfile)
6091 {
6092 fprintf (outfile, "%*s", print_indent, "");
6093 }
6094
6095 /* Print a type signature in hex. */
6096
6097 static inline void
6098 print_signature (FILE *outfile, char *sig)
6099 {
6100 int i;
6101
6102 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6103 fprintf (outfile, "%02x", sig[i] & 0xff);
6104 }
6105
6106 static inline void
6107 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6108 {
6109 if (discr_value->pos)
6110 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6111 else
6112 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6113 }
6114
6115 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6116
6117 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6118 RECURSE, output location descriptor operations. */
6119
6120 static void
6121 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6122 {
6123 switch (val->val_class)
6124 {
6125 case dw_val_class_addr:
6126 fprintf (outfile, "address");
6127 break;
6128 case dw_val_class_offset:
6129 fprintf (outfile, "offset");
6130 break;
6131 case dw_val_class_loc:
6132 fprintf (outfile, "location descriptor");
6133 if (val->v.val_loc == NULL)
6134 fprintf (outfile, " -> <null>\n");
6135 else if (recurse)
6136 {
6137 fprintf (outfile, ":\n");
6138 print_indent += 4;
6139 print_loc_descr (val->v.val_loc, outfile);
6140 print_indent -= 4;
6141 }
6142 else
6143 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6144 break;
6145 case dw_val_class_loc_list:
6146 fprintf (outfile, "location list -> label:%s",
6147 val->v.val_loc_list->ll_symbol);
6148 break;
6149 case dw_val_class_range_list:
6150 fprintf (outfile, "range list");
6151 break;
6152 case dw_val_class_const:
6153 case dw_val_class_const_implicit:
6154 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6155 break;
6156 case dw_val_class_unsigned_const:
6157 case dw_val_class_unsigned_const_implicit:
6158 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6159 break;
6160 case dw_val_class_const_double:
6161 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6162 HOST_WIDE_INT_PRINT_UNSIGNED")",
6163 val->v.val_double.high,
6164 val->v.val_double.low);
6165 break;
6166 case dw_val_class_wide_int:
6167 {
6168 int i = val->v.val_wide->get_len ();
6169 fprintf (outfile, "constant (");
6170 gcc_assert (i > 0);
6171 if (val->v.val_wide->elt (i - 1) == 0)
6172 fprintf (outfile, "0x");
6173 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6174 val->v.val_wide->elt (--i));
6175 while (--i >= 0)
6176 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6177 val->v.val_wide->elt (i));
6178 fprintf (outfile, ")");
6179 break;
6180 }
6181 case dw_val_class_vec:
6182 fprintf (outfile, "floating-point or vector constant");
6183 break;
6184 case dw_val_class_flag:
6185 fprintf (outfile, "%u", val->v.val_flag);
6186 break;
6187 case dw_val_class_die_ref:
6188 if (val->v.val_die_ref.die != NULL)
6189 {
6190 dw_die_ref die = val->v.val_die_ref.die;
6191
6192 if (die->comdat_type_p)
6193 {
6194 fprintf (outfile, "die -> signature: ");
6195 print_signature (outfile,
6196 die->die_id.die_type_node->signature);
6197 }
6198 else if (die->die_id.die_symbol)
6199 {
6200 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6201 if (die->with_offset)
6202 fprintf (outfile, " + %ld", die->die_offset);
6203 }
6204 else
6205 fprintf (outfile, "die -> %ld", die->die_offset);
6206 fprintf (outfile, " (%p)", (void *) die);
6207 }
6208 else
6209 fprintf (outfile, "die -> <null>");
6210 break;
6211 case dw_val_class_vms_delta:
6212 fprintf (outfile, "delta: @slotcount(%s-%s)",
6213 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6214 break;
6215 case dw_val_class_lbl_id:
6216 case dw_val_class_lineptr:
6217 case dw_val_class_macptr:
6218 case dw_val_class_loclistsptr:
6219 case dw_val_class_high_pc:
6220 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6221 break;
6222 case dw_val_class_str:
6223 if (val->v.val_str->str != NULL)
6224 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6225 else
6226 fprintf (outfile, "<null>");
6227 break;
6228 case dw_val_class_file:
6229 case dw_val_class_file_implicit:
6230 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6231 val->v.val_file->emitted_number);
6232 break;
6233 case dw_val_class_data8:
6234 {
6235 int i;
6236
6237 for (i = 0; i < 8; i++)
6238 fprintf (outfile, "%02x", val->v.val_data8[i]);
6239 break;
6240 }
6241 case dw_val_class_discr_value:
6242 print_discr_value (outfile, &val->v.val_discr_value);
6243 break;
6244 case dw_val_class_discr_list:
6245 for (dw_discr_list_ref node = val->v.val_discr_list;
6246 node != NULL;
6247 node = node->dw_discr_next)
6248 {
6249 if (node->dw_discr_range)
6250 {
6251 fprintf (outfile, " .. ");
6252 print_discr_value (outfile, &node->dw_discr_lower_bound);
6253 print_discr_value (outfile, &node->dw_discr_upper_bound);
6254 }
6255 else
6256 print_discr_value (outfile, &node->dw_discr_lower_bound);
6257
6258 if (node->dw_discr_next != NULL)
6259 fprintf (outfile, " | ");
6260 }
6261 default:
6262 break;
6263 }
6264 }
6265
6266 /* Likewise, for a DIE attribute. */
6267
6268 static void
6269 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6270 {
6271 print_dw_val (&a->dw_attr_val, recurse, outfile);
6272 }
6273
6274
6275 /* Print the list of operands in the LOC location description to OUTFILE. This
6276 routine is a debugging aid only. */
6277
6278 static void
6279 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6280 {
6281 dw_loc_descr_ref l = loc;
6282
6283 if (loc == NULL)
6284 {
6285 print_spaces (outfile);
6286 fprintf (outfile, "<null>\n");
6287 return;
6288 }
6289
6290 for (l = loc; l != NULL; l = l->dw_loc_next)
6291 {
6292 print_spaces (outfile);
6293 fprintf (outfile, "(%p) %s",
6294 (void *) l,
6295 dwarf_stack_op_name (l->dw_loc_opc));
6296 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6297 {
6298 fprintf (outfile, " ");
6299 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6300 }
6301 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6302 {
6303 fprintf (outfile, ", ");
6304 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6305 }
6306 fprintf (outfile, "\n");
6307 }
6308 }
6309
6310 /* Print the information associated with a given DIE, and its children.
6311 This routine is a debugging aid only. */
6312
6313 static void
6314 print_die (dw_die_ref die, FILE *outfile)
6315 {
6316 dw_attr_node *a;
6317 dw_die_ref c;
6318 unsigned ix;
6319
6320 print_spaces (outfile);
6321 fprintf (outfile, "DIE %4ld: %s (%p)\n",
6322 die->die_offset, dwarf_tag_name (die->die_tag),
6323 (void*) die);
6324 print_spaces (outfile);
6325 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6326 fprintf (outfile, " offset: %ld", die->die_offset);
6327 fprintf (outfile, " mark: %d\n", die->die_mark);
6328
6329 if (die->comdat_type_p)
6330 {
6331 print_spaces (outfile);
6332 fprintf (outfile, " signature: ");
6333 print_signature (outfile, die->die_id.die_type_node->signature);
6334 fprintf (outfile, "\n");
6335 }
6336
6337 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6338 {
6339 print_spaces (outfile);
6340 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6341
6342 print_attribute (a, true, outfile);
6343 fprintf (outfile, "\n");
6344 }
6345
6346 if (die->die_child != NULL)
6347 {
6348 print_indent += 4;
6349 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6350 print_indent -= 4;
6351 }
6352 if (print_indent == 0)
6353 fprintf (outfile, "\n");
6354 }
6355
6356 /* Print the list of operations in the LOC location description. */
6357
6358 DEBUG_FUNCTION void
6359 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6360 {
6361 print_loc_descr (loc, stderr);
6362 }
6363
6364 /* Print the information collected for a given DIE. */
6365
6366 DEBUG_FUNCTION void
6367 debug_dwarf_die (dw_die_ref die)
6368 {
6369 print_die (die, stderr);
6370 }
6371
6372 DEBUG_FUNCTION void
6373 debug (die_struct &ref)
6374 {
6375 print_die (&ref, stderr);
6376 }
6377
6378 DEBUG_FUNCTION void
6379 debug (die_struct *ptr)
6380 {
6381 if (ptr)
6382 debug (*ptr);
6383 else
6384 fprintf (stderr, "<nil>\n");
6385 }
6386
6387
6388 /* Print all DWARF information collected for the compilation unit.
6389 This routine is a debugging aid only. */
6390
6391 DEBUG_FUNCTION void
6392 debug_dwarf (void)
6393 {
6394 print_indent = 0;
6395 print_die (comp_unit_die (), stderr);
6396 }
6397
6398 /* Verify the DIE tree structure. */
6399
6400 DEBUG_FUNCTION void
6401 verify_die (dw_die_ref die)
6402 {
6403 gcc_assert (!die->die_mark);
6404 if (die->die_parent == NULL
6405 && die->die_sib == NULL)
6406 return;
6407 /* Verify the die_sib list is cyclic. */
6408 dw_die_ref x = die;
6409 do
6410 {
6411 x->die_mark = 1;
6412 x = x->die_sib;
6413 }
6414 while (x && !x->die_mark);
6415 gcc_assert (x == die);
6416 x = die;
6417 do
6418 {
6419 /* Verify all dies have the same parent. */
6420 gcc_assert (x->die_parent == die->die_parent);
6421 if (x->die_child)
6422 {
6423 /* Verify the child has the proper parent and recurse. */
6424 gcc_assert (x->die_child->die_parent == x);
6425 verify_die (x->die_child);
6426 }
6427 x->die_mark = 0;
6428 x = x->die_sib;
6429 }
6430 while (x && x->die_mark);
6431 }
6432
6433 /* Sanity checks on DIEs. */
6434
6435 static void
6436 check_die (dw_die_ref die)
6437 {
6438 unsigned ix;
6439 dw_attr_node *a;
6440 bool inline_found = false;
6441 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6442 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6443 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6444 {
6445 switch (a->dw_attr)
6446 {
6447 case DW_AT_inline:
6448 if (a->dw_attr_val.v.val_unsigned)
6449 inline_found = true;
6450 break;
6451 case DW_AT_location:
6452 ++n_location;
6453 break;
6454 case DW_AT_low_pc:
6455 ++n_low_pc;
6456 break;
6457 case DW_AT_high_pc:
6458 ++n_high_pc;
6459 break;
6460 case DW_AT_artificial:
6461 ++n_artificial;
6462 break;
6463 case DW_AT_decl_column:
6464 ++n_decl_column;
6465 break;
6466 case DW_AT_decl_line:
6467 ++n_decl_line;
6468 break;
6469 case DW_AT_decl_file:
6470 ++n_decl_file;
6471 break;
6472 default:
6473 break;
6474 }
6475 }
6476 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6477 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6478 {
6479 fprintf (stderr, "Duplicate attributes in DIE:\n");
6480 debug_dwarf_die (die);
6481 gcc_unreachable ();
6482 }
6483 if (inline_found)
6484 {
6485 /* A debugging information entry that is a member of an abstract
6486 instance tree [that has DW_AT_inline] should not contain any
6487 attributes which describe aspects of the subroutine which vary
6488 between distinct inlined expansions or distinct out-of-line
6489 expansions. */
6490 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6491 gcc_assert (a->dw_attr != DW_AT_low_pc
6492 && a->dw_attr != DW_AT_high_pc
6493 && a->dw_attr != DW_AT_location
6494 && a->dw_attr != DW_AT_frame_base
6495 && a->dw_attr != DW_AT_call_all_calls
6496 && a->dw_attr != DW_AT_GNU_all_call_sites);
6497 }
6498 }
6499 \f
6500 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6501 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6502 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6503
6504 /* Calculate the checksum of a location expression. */
6505
6506 static inline void
6507 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6508 {
6509 int tem;
6510 inchash::hash hstate;
6511 hashval_t hash;
6512
6513 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6514 CHECKSUM (tem);
6515 hash_loc_operands (loc, hstate);
6516 hash = hstate.end();
6517 CHECKSUM (hash);
6518 }
6519
6520 /* Calculate the checksum of an attribute. */
6521
6522 static void
6523 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6524 {
6525 dw_loc_descr_ref loc;
6526 rtx r;
6527
6528 CHECKSUM (at->dw_attr);
6529
6530 /* We don't care that this was compiled with a different compiler
6531 snapshot; if the output is the same, that's what matters. */
6532 if (at->dw_attr == DW_AT_producer)
6533 return;
6534
6535 switch (AT_class (at))
6536 {
6537 case dw_val_class_const:
6538 case dw_val_class_const_implicit:
6539 CHECKSUM (at->dw_attr_val.v.val_int);
6540 break;
6541 case dw_val_class_unsigned_const:
6542 case dw_val_class_unsigned_const_implicit:
6543 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6544 break;
6545 case dw_val_class_const_double:
6546 CHECKSUM (at->dw_attr_val.v.val_double);
6547 break;
6548 case dw_val_class_wide_int:
6549 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6550 get_full_len (*at->dw_attr_val.v.val_wide)
6551 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6552 break;
6553 case dw_val_class_vec:
6554 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6555 (at->dw_attr_val.v.val_vec.length
6556 * at->dw_attr_val.v.val_vec.elt_size));
6557 break;
6558 case dw_val_class_flag:
6559 CHECKSUM (at->dw_attr_val.v.val_flag);
6560 break;
6561 case dw_val_class_str:
6562 CHECKSUM_STRING (AT_string (at));
6563 break;
6564
6565 case dw_val_class_addr:
6566 r = AT_addr (at);
6567 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6568 CHECKSUM_STRING (XSTR (r, 0));
6569 break;
6570
6571 case dw_val_class_offset:
6572 CHECKSUM (at->dw_attr_val.v.val_offset);
6573 break;
6574
6575 case dw_val_class_loc:
6576 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6577 loc_checksum (loc, ctx);
6578 break;
6579
6580 case dw_val_class_die_ref:
6581 die_checksum (AT_ref (at), ctx, mark);
6582 break;
6583
6584 case dw_val_class_fde_ref:
6585 case dw_val_class_vms_delta:
6586 case dw_val_class_lbl_id:
6587 case dw_val_class_lineptr:
6588 case dw_val_class_macptr:
6589 case dw_val_class_loclistsptr:
6590 case dw_val_class_high_pc:
6591 break;
6592
6593 case dw_val_class_file:
6594 case dw_val_class_file_implicit:
6595 CHECKSUM_STRING (AT_file (at)->filename);
6596 break;
6597
6598 case dw_val_class_data8:
6599 CHECKSUM (at->dw_attr_val.v.val_data8);
6600 break;
6601
6602 default:
6603 break;
6604 }
6605 }
6606
6607 /* Calculate the checksum of a DIE. */
6608
6609 static void
6610 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6611 {
6612 dw_die_ref c;
6613 dw_attr_node *a;
6614 unsigned ix;
6615
6616 /* To avoid infinite recursion. */
6617 if (die->die_mark)
6618 {
6619 CHECKSUM (die->die_mark);
6620 return;
6621 }
6622 die->die_mark = ++(*mark);
6623
6624 CHECKSUM (die->die_tag);
6625
6626 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6627 attr_checksum (a, ctx, mark);
6628
6629 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6630 }
6631
6632 #undef CHECKSUM
6633 #undef CHECKSUM_BLOCK
6634 #undef CHECKSUM_STRING
6635
6636 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6637 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6638 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6639 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6640 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6641 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6642 #define CHECKSUM_ATTR(FOO) \
6643 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6644
6645 /* Calculate the checksum of a number in signed LEB128 format. */
6646
6647 static void
6648 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6649 {
6650 unsigned char byte;
6651 bool more;
6652
6653 while (1)
6654 {
6655 byte = (value & 0x7f);
6656 value >>= 7;
6657 more = !((value == 0 && (byte & 0x40) == 0)
6658 || (value == -1 && (byte & 0x40) != 0));
6659 if (more)
6660 byte |= 0x80;
6661 CHECKSUM (byte);
6662 if (!more)
6663 break;
6664 }
6665 }
6666
6667 /* Calculate the checksum of a number in unsigned LEB128 format. */
6668
6669 static void
6670 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6671 {
6672 while (1)
6673 {
6674 unsigned char byte = (value & 0x7f);
6675 value >>= 7;
6676 if (value != 0)
6677 /* More bytes to follow. */
6678 byte |= 0x80;
6679 CHECKSUM (byte);
6680 if (value == 0)
6681 break;
6682 }
6683 }
6684
6685 /* Checksum the context of the DIE. This adds the names of any
6686 surrounding namespaces or structures to the checksum. */
6687
6688 static void
6689 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
6690 {
6691 const char *name;
6692 dw_die_ref spec;
6693 int tag = die->die_tag;
6694
6695 if (tag != DW_TAG_namespace
6696 && tag != DW_TAG_structure_type
6697 && tag != DW_TAG_class_type)
6698 return;
6699
6700 name = get_AT_string (die, DW_AT_name);
6701
6702 spec = get_AT_ref (die, DW_AT_specification);
6703 if (spec != NULL)
6704 die = spec;
6705
6706 if (die->die_parent != NULL)
6707 checksum_die_context (die->die_parent, ctx);
6708
6709 CHECKSUM_ULEB128 ('C');
6710 CHECKSUM_ULEB128 (tag);
6711 if (name != NULL)
6712 CHECKSUM_STRING (name);
6713 }
6714
6715 /* Calculate the checksum of a location expression. */
6716
6717 static inline void
6718 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6719 {
6720 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
6721 were emitted as a DW_FORM_sdata instead of a location expression. */
6722 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
6723 {
6724 CHECKSUM_ULEB128 (DW_FORM_sdata);
6725 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
6726 return;
6727 }
6728
6729 /* Otherwise, just checksum the raw location expression. */
6730 while (loc != NULL)
6731 {
6732 inchash::hash hstate;
6733 hashval_t hash;
6734
6735 CHECKSUM_ULEB128 (loc->dtprel);
6736 CHECKSUM_ULEB128 (loc->dw_loc_opc);
6737 hash_loc_operands (loc, hstate);
6738 hash = hstate.end ();
6739 CHECKSUM (hash);
6740 loc = loc->dw_loc_next;
6741 }
6742 }
6743
6744 /* Calculate the checksum of an attribute. */
6745
6746 static void
6747 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
6748 struct md5_ctx *ctx, int *mark)
6749 {
6750 dw_loc_descr_ref loc;
6751 rtx r;
6752
6753 if (AT_class (at) == dw_val_class_die_ref)
6754 {
6755 dw_die_ref target_die = AT_ref (at);
6756
6757 /* For pointer and reference types, we checksum only the (qualified)
6758 name of the target type (if there is a name). For friend entries,
6759 we checksum only the (qualified) name of the target type or function.
6760 This allows the checksum to remain the same whether the target type
6761 is complete or not. */
6762 if ((at->dw_attr == DW_AT_type
6763 && (tag == DW_TAG_pointer_type
6764 || tag == DW_TAG_reference_type
6765 || tag == DW_TAG_rvalue_reference_type
6766 || tag == DW_TAG_ptr_to_member_type))
6767 || (at->dw_attr == DW_AT_friend
6768 && tag == DW_TAG_friend))
6769 {
6770 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
6771
6772 if (name_attr != NULL)
6773 {
6774 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
6775
6776 if (decl == NULL)
6777 decl = target_die;
6778 CHECKSUM_ULEB128 ('N');
6779 CHECKSUM_ULEB128 (at->dw_attr);
6780 if (decl->die_parent != NULL)
6781 checksum_die_context (decl->die_parent, ctx);
6782 CHECKSUM_ULEB128 ('E');
6783 CHECKSUM_STRING (AT_string (name_attr));
6784 return;
6785 }
6786 }
6787
6788 /* For all other references to another DIE, we check to see if the
6789 target DIE has already been visited. If it has, we emit a
6790 backward reference; if not, we descend recursively. */
6791 if (target_die->die_mark > 0)
6792 {
6793 CHECKSUM_ULEB128 ('R');
6794 CHECKSUM_ULEB128 (at->dw_attr);
6795 CHECKSUM_ULEB128 (target_die->die_mark);
6796 }
6797 else
6798 {
6799 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
6800
6801 if (decl == NULL)
6802 decl = target_die;
6803 target_die->die_mark = ++(*mark);
6804 CHECKSUM_ULEB128 ('T');
6805 CHECKSUM_ULEB128 (at->dw_attr);
6806 if (decl->die_parent != NULL)
6807 checksum_die_context (decl->die_parent, ctx);
6808 die_checksum_ordered (target_die, ctx, mark);
6809 }
6810 return;
6811 }
6812
6813 CHECKSUM_ULEB128 ('A');
6814 CHECKSUM_ULEB128 (at->dw_attr);
6815
6816 switch (AT_class (at))
6817 {
6818 case dw_val_class_const:
6819 case dw_val_class_const_implicit:
6820 CHECKSUM_ULEB128 (DW_FORM_sdata);
6821 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
6822 break;
6823
6824 case dw_val_class_unsigned_const:
6825 case dw_val_class_unsigned_const_implicit:
6826 CHECKSUM_ULEB128 (DW_FORM_sdata);
6827 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
6828 break;
6829
6830 case dw_val_class_const_double:
6831 CHECKSUM_ULEB128 (DW_FORM_block);
6832 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
6833 CHECKSUM (at->dw_attr_val.v.val_double);
6834 break;
6835
6836 case dw_val_class_wide_int:
6837 CHECKSUM_ULEB128 (DW_FORM_block);
6838 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
6839 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
6840 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6841 get_full_len (*at->dw_attr_val.v.val_wide)
6842 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6843 break;
6844
6845 case dw_val_class_vec:
6846 CHECKSUM_ULEB128 (DW_FORM_block);
6847 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
6848 * at->dw_attr_val.v.val_vec.elt_size);
6849 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6850 (at->dw_attr_val.v.val_vec.length
6851 * at->dw_attr_val.v.val_vec.elt_size));
6852 break;
6853
6854 case dw_val_class_flag:
6855 CHECKSUM_ULEB128 (DW_FORM_flag);
6856 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
6857 break;
6858
6859 case dw_val_class_str:
6860 CHECKSUM_ULEB128 (DW_FORM_string);
6861 CHECKSUM_STRING (AT_string (at));
6862 break;
6863
6864 case dw_val_class_addr:
6865 r = AT_addr (at);
6866 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6867 CHECKSUM_ULEB128 (DW_FORM_string);
6868 CHECKSUM_STRING (XSTR (r, 0));
6869 break;
6870
6871 case dw_val_class_offset:
6872 CHECKSUM_ULEB128 (DW_FORM_sdata);
6873 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
6874 break;
6875
6876 case dw_val_class_loc:
6877 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6878 loc_checksum_ordered (loc, ctx);
6879 break;
6880
6881 case dw_val_class_fde_ref:
6882 case dw_val_class_lbl_id:
6883 case dw_val_class_lineptr:
6884 case dw_val_class_macptr:
6885 case dw_val_class_loclistsptr:
6886 case dw_val_class_high_pc:
6887 break;
6888
6889 case dw_val_class_file:
6890 case dw_val_class_file_implicit:
6891 CHECKSUM_ULEB128 (DW_FORM_string);
6892 CHECKSUM_STRING (AT_file (at)->filename);
6893 break;
6894
6895 case dw_val_class_data8:
6896 CHECKSUM (at->dw_attr_val.v.val_data8);
6897 break;
6898
6899 default:
6900 break;
6901 }
6902 }
6903
6904 struct checksum_attributes
6905 {
6906 dw_attr_node *at_name;
6907 dw_attr_node *at_type;
6908 dw_attr_node *at_friend;
6909 dw_attr_node *at_accessibility;
6910 dw_attr_node *at_address_class;
6911 dw_attr_node *at_alignment;
6912 dw_attr_node *at_allocated;
6913 dw_attr_node *at_artificial;
6914 dw_attr_node *at_associated;
6915 dw_attr_node *at_binary_scale;
6916 dw_attr_node *at_bit_offset;
6917 dw_attr_node *at_bit_size;
6918 dw_attr_node *at_bit_stride;
6919 dw_attr_node *at_byte_size;
6920 dw_attr_node *at_byte_stride;
6921 dw_attr_node *at_const_value;
6922 dw_attr_node *at_containing_type;
6923 dw_attr_node *at_count;
6924 dw_attr_node *at_data_location;
6925 dw_attr_node *at_data_member_location;
6926 dw_attr_node *at_decimal_scale;
6927 dw_attr_node *at_decimal_sign;
6928 dw_attr_node *at_default_value;
6929 dw_attr_node *at_digit_count;
6930 dw_attr_node *at_discr;
6931 dw_attr_node *at_discr_list;
6932 dw_attr_node *at_discr_value;
6933 dw_attr_node *at_encoding;
6934 dw_attr_node *at_endianity;
6935 dw_attr_node *at_explicit;
6936 dw_attr_node *at_is_optional;
6937 dw_attr_node *at_location;
6938 dw_attr_node *at_lower_bound;
6939 dw_attr_node *at_mutable;
6940 dw_attr_node *at_ordering;
6941 dw_attr_node *at_picture_string;
6942 dw_attr_node *at_prototyped;
6943 dw_attr_node *at_small;
6944 dw_attr_node *at_segment;
6945 dw_attr_node *at_string_length;
6946 dw_attr_node *at_string_length_bit_size;
6947 dw_attr_node *at_string_length_byte_size;
6948 dw_attr_node *at_threads_scaled;
6949 dw_attr_node *at_upper_bound;
6950 dw_attr_node *at_use_location;
6951 dw_attr_node *at_use_UTF8;
6952 dw_attr_node *at_variable_parameter;
6953 dw_attr_node *at_virtuality;
6954 dw_attr_node *at_visibility;
6955 dw_attr_node *at_vtable_elem_location;
6956 };
6957
6958 /* Collect the attributes that we will want to use for the checksum. */
6959
6960 static void
6961 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
6962 {
6963 dw_attr_node *a;
6964 unsigned ix;
6965
6966 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6967 {
6968 switch (a->dw_attr)
6969 {
6970 case DW_AT_name:
6971 attrs->at_name = a;
6972 break;
6973 case DW_AT_type:
6974 attrs->at_type = a;
6975 break;
6976 case DW_AT_friend:
6977 attrs->at_friend = a;
6978 break;
6979 case DW_AT_accessibility:
6980 attrs->at_accessibility = a;
6981 break;
6982 case DW_AT_address_class:
6983 attrs->at_address_class = a;
6984 break;
6985 case DW_AT_alignment:
6986 attrs->at_alignment = a;
6987 break;
6988 case DW_AT_allocated:
6989 attrs->at_allocated = a;
6990 break;
6991 case DW_AT_artificial:
6992 attrs->at_artificial = a;
6993 break;
6994 case DW_AT_associated:
6995 attrs->at_associated = a;
6996 break;
6997 case DW_AT_binary_scale:
6998 attrs->at_binary_scale = a;
6999 break;
7000 case DW_AT_bit_offset:
7001 attrs->at_bit_offset = a;
7002 break;
7003 case DW_AT_bit_size:
7004 attrs->at_bit_size = a;
7005 break;
7006 case DW_AT_bit_stride:
7007 attrs->at_bit_stride = a;
7008 break;
7009 case DW_AT_byte_size:
7010 attrs->at_byte_size = a;
7011 break;
7012 case DW_AT_byte_stride:
7013 attrs->at_byte_stride = a;
7014 break;
7015 case DW_AT_const_value:
7016 attrs->at_const_value = a;
7017 break;
7018 case DW_AT_containing_type:
7019 attrs->at_containing_type = a;
7020 break;
7021 case DW_AT_count:
7022 attrs->at_count = a;
7023 break;
7024 case DW_AT_data_location:
7025 attrs->at_data_location = a;
7026 break;
7027 case DW_AT_data_member_location:
7028 attrs->at_data_member_location = a;
7029 break;
7030 case DW_AT_decimal_scale:
7031 attrs->at_decimal_scale = a;
7032 break;
7033 case DW_AT_decimal_sign:
7034 attrs->at_decimal_sign = a;
7035 break;
7036 case DW_AT_default_value:
7037 attrs->at_default_value = a;
7038 break;
7039 case DW_AT_digit_count:
7040 attrs->at_digit_count = a;
7041 break;
7042 case DW_AT_discr:
7043 attrs->at_discr = a;
7044 break;
7045 case DW_AT_discr_list:
7046 attrs->at_discr_list = a;
7047 break;
7048 case DW_AT_discr_value:
7049 attrs->at_discr_value = a;
7050 break;
7051 case DW_AT_encoding:
7052 attrs->at_encoding = a;
7053 break;
7054 case DW_AT_endianity:
7055 attrs->at_endianity = a;
7056 break;
7057 case DW_AT_explicit:
7058 attrs->at_explicit = a;
7059 break;
7060 case DW_AT_is_optional:
7061 attrs->at_is_optional = a;
7062 break;
7063 case DW_AT_location:
7064 attrs->at_location = a;
7065 break;
7066 case DW_AT_lower_bound:
7067 attrs->at_lower_bound = a;
7068 break;
7069 case DW_AT_mutable:
7070 attrs->at_mutable = a;
7071 break;
7072 case DW_AT_ordering:
7073 attrs->at_ordering = a;
7074 break;
7075 case DW_AT_picture_string:
7076 attrs->at_picture_string = a;
7077 break;
7078 case DW_AT_prototyped:
7079 attrs->at_prototyped = a;
7080 break;
7081 case DW_AT_small:
7082 attrs->at_small = a;
7083 break;
7084 case DW_AT_segment:
7085 attrs->at_segment = a;
7086 break;
7087 case DW_AT_string_length:
7088 attrs->at_string_length = a;
7089 break;
7090 case DW_AT_string_length_bit_size:
7091 attrs->at_string_length_bit_size = a;
7092 break;
7093 case DW_AT_string_length_byte_size:
7094 attrs->at_string_length_byte_size = a;
7095 break;
7096 case DW_AT_threads_scaled:
7097 attrs->at_threads_scaled = a;
7098 break;
7099 case DW_AT_upper_bound:
7100 attrs->at_upper_bound = a;
7101 break;
7102 case DW_AT_use_location:
7103 attrs->at_use_location = a;
7104 break;
7105 case DW_AT_use_UTF8:
7106 attrs->at_use_UTF8 = a;
7107 break;
7108 case DW_AT_variable_parameter:
7109 attrs->at_variable_parameter = a;
7110 break;
7111 case DW_AT_virtuality:
7112 attrs->at_virtuality = a;
7113 break;
7114 case DW_AT_visibility:
7115 attrs->at_visibility = a;
7116 break;
7117 case DW_AT_vtable_elem_location:
7118 attrs->at_vtable_elem_location = a;
7119 break;
7120 default:
7121 break;
7122 }
7123 }
7124 }
7125
7126 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7127
7128 static void
7129 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7130 {
7131 dw_die_ref c;
7132 dw_die_ref decl;
7133 struct checksum_attributes attrs;
7134
7135 CHECKSUM_ULEB128 ('D');
7136 CHECKSUM_ULEB128 (die->die_tag);
7137
7138 memset (&attrs, 0, sizeof (attrs));
7139
7140 decl = get_AT_ref (die, DW_AT_specification);
7141 if (decl != NULL)
7142 collect_checksum_attributes (&attrs, decl);
7143 collect_checksum_attributes (&attrs, die);
7144
7145 CHECKSUM_ATTR (attrs.at_name);
7146 CHECKSUM_ATTR (attrs.at_accessibility);
7147 CHECKSUM_ATTR (attrs.at_address_class);
7148 CHECKSUM_ATTR (attrs.at_allocated);
7149 CHECKSUM_ATTR (attrs.at_artificial);
7150 CHECKSUM_ATTR (attrs.at_associated);
7151 CHECKSUM_ATTR (attrs.at_binary_scale);
7152 CHECKSUM_ATTR (attrs.at_bit_offset);
7153 CHECKSUM_ATTR (attrs.at_bit_size);
7154 CHECKSUM_ATTR (attrs.at_bit_stride);
7155 CHECKSUM_ATTR (attrs.at_byte_size);
7156 CHECKSUM_ATTR (attrs.at_byte_stride);
7157 CHECKSUM_ATTR (attrs.at_const_value);
7158 CHECKSUM_ATTR (attrs.at_containing_type);
7159 CHECKSUM_ATTR (attrs.at_count);
7160 CHECKSUM_ATTR (attrs.at_data_location);
7161 CHECKSUM_ATTR (attrs.at_data_member_location);
7162 CHECKSUM_ATTR (attrs.at_decimal_scale);
7163 CHECKSUM_ATTR (attrs.at_decimal_sign);
7164 CHECKSUM_ATTR (attrs.at_default_value);
7165 CHECKSUM_ATTR (attrs.at_digit_count);
7166 CHECKSUM_ATTR (attrs.at_discr);
7167 CHECKSUM_ATTR (attrs.at_discr_list);
7168 CHECKSUM_ATTR (attrs.at_discr_value);
7169 CHECKSUM_ATTR (attrs.at_encoding);
7170 CHECKSUM_ATTR (attrs.at_endianity);
7171 CHECKSUM_ATTR (attrs.at_explicit);
7172 CHECKSUM_ATTR (attrs.at_is_optional);
7173 CHECKSUM_ATTR (attrs.at_location);
7174 CHECKSUM_ATTR (attrs.at_lower_bound);
7175 CHECKSUM_ATTR (attrs.at_mutable);
7176 CHECKSUM_ATTR (attrs.at_ordering);
7177 CHECKSUM_ATTR (attrs.at_picture_string);
7178 CHECKSUM_ATTR (attrs.at_prototyped);
7179 CHECKSUM_ATTR (attrs.at_small);
7180 CHECKSUM_ATTR (attrs.at_segment);
7181 CHECKSUM_ATTR (attrs.at_string_length);
7182 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7183 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7184 CHECKSUM_ATTR (attrs.at_threads_scaled);
7185 CHECKSUM_ATTR (attrs.at_upper_bound);
7186 CHECKSUM_ATTR (attrs.at_use_location);
7187 CHECKSUM_ATTR (attrs.at_use_UTF8);
7188 CHECKSUM_ATTR (attrs.at_variable_parameter);
7189 CHECKSUM_ATTR (attrs.at_virtuality);
7190 CHECKSUM_ATTR (attrs.at_visibility);
7191 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7192 CHECKSUM_ATTR (attrs.at_type);
7193 CHECKSUM_ATTR (attrs.at_friend);
7194 CHECKSUM_ATTR (attrs.at_alignment);
7195
7196 /* Checksum the child DIEs. */
7197 c = die->die_child;
7198 if (c) do {
7199 dw_attr_node *name_attr;
7200
7201 c = c->die_sib;
7202 name_attr = get_AT (c, DW_AT_name);
7203 if (is_template_instantiation (c))
7204 {
7205 /* Ignore instantiations of member type and function templates. */
7206 }
7207 else if (name_attr != NULL
7208 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7209 {
7210 /* Use a shallow checksum for named nested types and member
7211 functions. */
7212 CHECKSUM_ULEB128 ('S');
7213 CHECKSUM_ULEB128 (c->die_tag);
7214 CHECKSUM_STRING (AT_string (name_attr));
7215 }
7216 else
7217 {
7218 /* Use a deep checksum for other children. */
7219 /* Mark this DIE so it gets processed when unmarking. */
7220 if (c->die_mark == 0)
7221 c->die_mark = -1;
7222 die_checksum_ordered (c, ctx, mark);
7223 }
7224 } while (c != die->die_child);
7225
7226 CHECKSUM_ULEB128 (0);
7227 }
7228
7229 /* Add a type name and tag to a hash. */
7230 static void
7231 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7232 {
7233 CHECKSUM_ULEB128 (tag);
7234 CHECKSUM_STRING (name);
7235 }
7236
7237 #undef CHECKSUM
7238 #undef CHECKSUM_STRING
7239 #undef CHECKSUM_ATTR
7240 #undef CHECKSUM_LEB128
7241 #undef CHECKSUM_ULEB128
7242
7243 /* Generate the type signature for DIE. This is computed by generating an
7244 MD5 checksum over the DIE's tag, its relevant attributes, and its
7245 children. Attributes that are references to other DIEs are processed
7246 by recursion, using the MARK field to prevent infinite recursion.
7247 If the DIE is nested inside a namespace or another type, we also
7248 need to include that context in the signature. The lower 64 bits
7249 of the resulting MD5 checksum comprise the signature. */
7250
7251 static void
7252 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7253 {
7254 int mark;
7255 const char *name;
7256 unsigned char checksum[16];
7257 struct md5_ctx ctx;
7258 dw_die_ref decl;
7259 dw_die_ref parent;
7260
7261 name = get_AT_string (die, DW_AT_name);
7262 decl = get_AT_ref (die, DW_AT_specification);
7263 parent = get_die_parent (die);
7264
7265 /* First, compute a signature for just the type name (and its surrounding
7266 context, if any. This is stored in the type unit DIE for link-time
7267 ODR (one-definition rule) checking. */
7268
7269 if (is_cxx () && name != NULL)
7270 {
7271 md5_init_ctx (&ctx);
7272
7273 /* Checksum the names of surrounding namespaces and structures. */
7274 if (parent != NULL)
7275 checksum_die_context (parent, &ctx);
7276
7277 /* Checksum the current DIE. */
7278 die_odr_checksum (die->die_tag, name, &ctx);
7279 md5_finish_ctx (&ctx, checksum);
7280
7281 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7282 }
7283
7284 /* Next, compute the complete type signature. */
7285
7286 md5_init_ctx (&ctx);
7287 mark = 1;
7288 die->die_mark = mark;
7289
7290 /* Checksum the names of surrounding namespaces and structures. */
7291 if (parent != NULL)
7292 checksum_die_context (parent, &ctx);
7293
7294 /* Checksum the DIE and its children. */
7295 die_checksum_ordered (die, &ctx, &mark);
7296 unmark_all_dies (die);
7297 md5_finish_ctx (&ctx, checksum);
7298
7299 /* Store the signature in the type node and link the type DIE and the
7300 type node together. */
7301 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7302 DWARF_TYPE_SIGNATURE_SIZE);
7303 die->comdat_type_p = true;
7304 die->die_id.die_type_node = type_node;
7305 type_node->type_die = die;
7306
7307 /* If the DIE is a specification, link its declaration to the type node
7308 as well. */
7309 if (decl != NULL)
7310 {
7311 decl->comdat_type_p = true;
7312 decl->die_id.die_type_node = type_node;
7313 }
7314 }
7315
7316 /* Do the location expressions look same? */
7317 static inline int
7318 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7319 {
7320 return loc1->dw_loc_opc == loc2->dw_loc_opc
7321 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7322 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7323 }
7324
7325 /* Do the values look the same? */
7326 static int
7327 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7328 {
7329 dw_loc_descr_ref loc1, loc2;
7330 rtx r1, r2;
7331
7332 if (v1->val_class != v2->val_class)
7333 return 0;
7334
7335 switch (v1->val_class)
7336 {
7337 case dw_val_class_const:
7338 case dw_val_class_const_implicit:
7339 return v1->v.val_int == v2->v.val_int;
7340 case dw_val_class_unsigned_const:
7341 case dw_val_class_unsigned_const_implicit:
7342 return v1->v.val_unsigned == v2->v.val_unsigned;
7343 case dw_val_class_const_double:
7344 return v1->v.val_double.high == v2->v.val_double.high
7345 && v1->v.val_double.low == v2->v.val_double.low;
7346 case dw_val_class_wide_int:
7347 return *v1->v.val_wide == *v2->v.val_wide;
7348 case dw_val_class_vec:
7349 if (v1->v.val_vec.length != v2->v.val_vec.length
7350 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7351 return 0;
7352 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7353 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7354 return 0;
7355 return 1;
7356 case dw_val_class_flag:
7357 return v1->v.val_flag == v2->v.val_flag;
7358 case dw_val_class_str:
7359 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7360
7361 case dw_val_class_addr:
7362 r1 = v1->v.val_addr;
7363 r2 = v2->v.val_addr;
7364 if (GET_CODE (r1) != GET_CODE (r2))
7365 return 0;
7366 return !rtx_equal_p (r1, r2);
7367
7368 case dw_val_class_offset:
7369 return v1->v.val_offset == v2->v.val_offset;
7370
7371 case dw_val_class_loc:
7372 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7373 loc1 && loc2;
7374 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7375 if (!same_loc_p (loc1, loc2, mark))
7376 return 0;
7377 return !loc1 && !loc2;
7378
7379 case dw_val_class_die_ref:
7380 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7381
7382 case dw_val_class_fde_ref:
7383 case dw_val_class_vms_delta:
7384 case dw_val_class_lbl_id:
7385 case dw_val_class_lineptr:
7386 case dw_val_class_macptr:
7387 case dw_val_class_loclistsptr:
7388 case dw_val_class_high_pc:
7389 return 1;
7390
7391 case dw_val_class_file:
7392 case dw_val_class_file_implicit:
7393 return v1->v.val_file == v2->v.val_file;
7394
7395 case dw_val_class_data8:
7396 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7397
7398 default:
7399 return 1;
7400 }
7401 }
7402
7403 /* Do the attributes look the same? */
7404
7405 static int
7406 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7407 {
7408 if (at1->dw_attr != at2->dw_attr)
7409 return 0;
7410
7411 /* We don't care that this was compiled with a different compiler
7412 snapshot; if the output is the same, that's what matters. */
7413 if (at1->dw_attr == DW_AT_producer)
7414 return 1;
7415
7416 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7417 }
7418
7419 /* Do the dies look the same? */
7420
7421 static int
7422 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7423 {
7424 dw_die_ref c1, c2;
7425 dw_attr_node *a1;
7426 unsigned ix;
7427
7428 /* To avoid infinite recursion. */
7429 if (die1->die_mark)
7430 return die1->die_mark == die2->die_mark;
7431 die1->die_mark = die2->die_mark = ++(*mark);
7432
7433 if (die1->die_tag != die2->die_tag)
7434 return 0;
7435
7436 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7437 return 0;
7438
7439 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7440 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7441 return 0;
7442
7443 c1 = die1->die_child;
7444 c2 = die2->die_child;
7445 if (! c1)
7446 {
7447 if (c2)
7448 return 0;
7449 }
7450 else
7451 for (;;)
7452 {
7453 if (!same_die_p (c1, c2, mark))
7454 return 0;
7455 c1 = c1->die_sib;
7456 c2 = c2->die_sib;
7457 if (c1 == die1->die_child)
7458 {
7459 if (c2 == die2->die_child)
7460 break;
7461 else
7462 return 0;
7463 }
7464 }
7465
7466 return 1;
7467 }
7468
7469 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7470 children, and set die_symbol. */
7471
7472 static void
7473 compute_comp_unit_symbol (dw_die_ref unit_die)
7474 {
7475 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7476 const char *base = die_name ? lbasename (die_name) : "anonymous";
7477 char *name = XALLOCAVEC (char, strlen (base) + 64);
7478 char *p;
7479 int i, mark;
7480 unsigned char checksum[16];
7481 struct md5_ctx ctx;
7482
7483 /* Compute the checksum of the DIE, then append part of it as hex digits to
7484 the name filename of the unit. */
7485
7486 md5_init_ctx (&ctx);
7487 mark = 0;
7488 die_checksum (unit_die, &ctx, &mark);
7489 unmark_all_dies (unit_die);
7490 md5_finish_ctx (&ctx, checksum);
7491
7492 /* When we this for comp_unit_die () we have a DW_AT_name that might
7493 not start with a letter but with anything valid for filenames and
7494 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7495 character is not a letter. */
7496 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7497 clean_symbol_name (name);
7498
7499 p = name + strlen (name);
7500 for (i = 0; i < 4; i++)
7501 {
7502 sprintf (p, "%.2x", checksum[i]);
7503 p += 2;
7504 }
7505
7506 unit_die->die_id.die_symbol = xstrdup (name);
7507 }
7508
7509 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7510
7511 static int
7512 is_type_die (dw_die_ref die)
7513 {
7514 switch (die->die_tag)
7515 {
7516 case DW_TAG_array_type:
7517 case DW_TAG_class_type:
7518 case DW_TAG_interface_type:
7519 case DW_TAG_enumeration_type:
7520 case DW_TAG_pointer_type:
7521 case DW_TAG_reference_type:
7522 case DW_TAG_rvalue_reference_type:
7523 case DW_TAG_string_type:
7524 case DW_TAG_structure_type:
7525 case DW_TAG_subroutine_type:
7526 case DW_TAG_union_type:
7527 case DW_TAG_ptr_to_member_type:
7528 case DW_TAG_set_type:
7529 case DW_TAG_subrange_type:
7530 case DW_TAG_base_type:
7531 case DW_TAG_const_type:
7532 case DW_TAG_file_type:
7533 case DW_TAG_packed_type:
7534 case DW_TAG_volatile_type:
7535 case DW_TAG_typedef:
7536 return 1;
7537 default:
7538 return 0;
7539 }
7540 }
7541
7542 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7543 Basically, we want to choose the bits that are likely to be shared between
7544 compilations (types) and leave out the bits that are specific to individual
7545 compilations (functions). */
7546
7547 static int
7548 is_comdat_die (dw_die_ref c)
7549 {
7550 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7551 we do for stabs. The advantage is a greater likelihood of sharing between
7552 objects that don't include headers in the same order (and therefore would
7553 put the base types in a different comdat). jason 8/28/00 */
7554
7555 if (c->die_tag == DW_TAG_base_type)
7556 return 0;
7557
7558 if (c->die_tag == DW_TAG_pointer_type
7559 || c->die_tag == DW_TAG_reference_type
7560 || c->die_tag == DW_TAG_rvalue_reference_type
7561 || c->die_tag == DW_TAG_const_type
7562 || c->die_tag == DW_TAG_volatile_type)
7563 {
7564 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7565
7566 return t ? is_comdat_die (t) : 0;
7567 }
7568
7569 return is_type_die (c);
7570 }
7571
7572 /* Returns true iff C is a compile-unit DIE. */
7573
7574 static inline bool
7575 is_cu_die (dw_die_ref c)
7576 {
7577 return c && (c->die_tag == DW_TAG_compile_unit
7578 || c->die_tag == DW_TAG_skeleton_unit);
7579 }
7580
7581 /* Returns true iff C is a unit DIE of some sort. */
7582
7583 static inline bool
7584 is_unit_die (dw_die_ref c)
7585 {
7586 return c && (c->die_tag == DW_TAG_compile_unit
7587 || c->die_tag == DW_TAG_partial_unit
7588 || c->die_tag == DW_TAG_type_unit
7589 || c->die_tag == DW_TAG_skeleton_unit);
7590 }
7591
7592 /* Returns true iff C is a namespace DIE. */
7593
7594 static inline bool
7595 is_namespace_die (dw_die_ref c)
7596 {
7597 return c && c->die_tag == DW_TAG_namespace;
7598 }
7599
7600 /* Returns true iff C is a class or structure DIE. */
7601
7602 static inline bool
7603 is_class_die (dw_die_ref c)
7604 {
7605 return c && (c->die_tag == DW_TAG_class_type
7606 || c->die_tag == DW_TAG_structure_type);
7607 }
7608
7609 /* Return non-zero if this DIE is a template parameter. */
7610
7611 static inline bool
7612 is_template_parameter (dw_die_ref die)
7613 {
7614 switch (die->die_tag)
7615 {
7616 case DW_TAG_template_type_param:
7617 case DW_TAG_template_value_param:
7618 case DW_TAG_GNU_template_template_param:
7619 case DW_TAG_GNU_template_parameter_pack:
7620 return true;
7621 default:
7622 return false;
7623 }
7624 }
7625
7626 /* Return non-zero if this DIE represents a template instantiation. */
7627
7628 static inline bool
7629 is_template_instantiation (dw_die_ref die)
7630 {
7631 dw_die_ref c;
7632
7633 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7634 return false;
7635 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7636 return false;
7637 }
7638
7639 static char *
7640 gen_internal_sym (const char *prefix)
7641 {
7642 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7643
7644 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7645 return xstrdup (buf);
7646 }
7647
7648 /* Return non-zero if this DIE is a declaration. */
7649
7650 static int
7651 is_declaration_die (dw_die_ref die)
7652 {
7653 dw_attr_node *a;
7654 unsigned ix;
7655
7656 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7657 if (a->dw_attr == DW_AT_declaration)
7658 return 1;
7659
7660 return 0;
7661 }
7662
7663 /* Return non-zero if this DIE is nested inside a subprogram. */
7664
7665 static int
7666 is_nested_in_subprogram (dw_die_ref die)
7667 {
7668 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7669
7670 if (decl == NULL)
7671 decl = die;
7672 return local_scope_p (decl);
7673 }
7674
7675 /* Return non-zero if this DIE contains a defining declaration of a
7676 subprogram. */
7677
7678 static int
7679 contains_subprogram_definition (dw_die_ref die)
7680 {
7681 dw_die_ref c;
7682
7683 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7684 return 1;
7685 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7686 return 0;
7687 }
7688
7689 /* Return non-zero if this is a type DIE that should be moved to a
7690 COMDAT .debug_types section or .debug_info section with DW_UT_*type
7691 unit type. */
7692
7693 static int
7694 should_move_die_to_comdat (dw_die_ref die)
7695 {
7696 switch (die->die_tag)
7697 {
7698 case DW_TAG_class_type:
7699 case DW_TAG_structure_type:
7700 case DW_TAG_enumeration_type:
7701 case DW_TAG_union_type:
7702 /* Don't move declarations, inlined instances, types nested in a
7703 subprogram, or types that contain subprogram definitions. */
7704 if (is_declaration_die (die)
7705 || get_AT (die, DW_AT_abstract_origin)
7706 || is_nested_in_subprogram (die)
7707 || contains_subprogram_definition (die))
7708 return 0;
7709 return 1;
7710 case DW_TAG_array_type:
7711 case DW_TAG_interface_type:
7712 case DW_TAG_pointer_type:
7713 case DW_TAG_reference_type:
7714 case DW_TAG_rvalue_reference_type:
7715 case DW_TAG_string_type:
7716 case DW_TAG_subroutine_type:
7717 case DW_TAG_ptr_to_member_type:
7718 case DW_TAG_set_type:
7719 case DW_TAG_subrange_type:
7720 case DW_TAG_base_type:
7721 case DW_TAG_const_type:
7722 case DW_TAG_file_type:
7723 case DW_TAG_packed_type:
7724 case DW_TAG_volatile_type:
7725 case DW_TAG_typedef:
7726 default:
7727 return 0;
7728 }
7729 }
7730
7731 /* Make a clone of DIE. */
7732
7733 static dw_die_ref
7734 clone_die (dw_die_ref die)
7735 {
7736 dw_die_ref clone = new_die_raw (die->die_tag);
7737 dw_attr_node *a;
7738 unsigned ix;
7739
7740 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7741 add_dwarf_attr (clone, a);
7742
7743 return clone;
7744 }
7745
7746 /* Make a clone of the tree rooted at DIE. */
7747
7748 static dw_die_ref
7749 clone_tree (dw_die_ref die)
7750 {
7751 dw_die_ref c;
7752 dw_die_ref clone = clone_die (die);
7753
7754 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
7755
7756 return clone;
7757 }
7758
7759 /* Make a clone of DIE as a declaration. */
7760
7761 static dw_die_ref
7762 clone_as_declaration (dw_die_ref die)
7763 {
7764 dw_die_ref clone;
7765 dw_die_ref decl;
7766 dw_attr_node *a;
7767 unsigned ix;
7768
7769 /* If the DIE is already a declaration, just clone it. */
7770 if (is_declaration_die (die))
7771 return clone_die (die);
7772
7773 /* If the DIE is a specification, just clone its declaration DIE. */
7774 decl = get_AT_ref (die, DW_AT_specification);
7775 if (decl != NULL)
7776 {
7777 clone = clone_die (decl);
7778 if (die->comdat_type_p)
7779 add_AT_die_ref (clone, DW_AT_signature, die);
7780 return clone;
7781 }
7782
7783 clone = new_die_raw (die->die_tag);
7784
7785 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7786 {
7787 /* We don't want to copy over all attributes.
7788 For example we don't want DW_AT_byte_size because otherwise we will no
7789 longer have a declaration and GDB will treat it as a definition. */
7790
7791 switch (a->dw_attr)
7792 {
7793 case DW_AT_abstract_origin:
7794 case DW_AT_artificial:
7795 case DW_AT_containing_type:
7796 case DW_AT_external:
7797 case DW_AT_name:
7798 case DW_AT_type:
7799 case DW_AT_virtuality:
7800 case DW_AT_linkage_name:
7801 case DW_AT_MIPS_linkage_name:
7802 add_dwarf_attr (clone, a);
7803 break;
7804 case DW_AT_byte_size:
7805 case DW_AT_alignment:
7806 default:
7807 break;
7808 }
7809 }
7810
7811 if (die->comdat_type_p)
7812 add_AT_die_ref (clone, DW_AT_signature, die);
7813
7814 add_AT_flag (clone, DW_AT_declaration, 1);
7815 return clone;
7816 }
7817
7818
7819 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
7820
7821 struct decl_table_entry
7822 {
7823 dw_die_ref orig;
7824 dw_die_ref copy;
7825 };
7826
7827 /* Helpers to manipulate hash table of copied declarations. */
7828
7829 /* Hashtable helpers. */
7830
7831 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
7832 {
7833 typedef die_struct *compare_type;
7834 static inline hashval_t hash (const decl_table_entry *);
7835 static inline bool equal (const decl_table_entry *, const die_struct *);
7836 };
7837
7838 inline hashval_t
7839 decl_table_entry_hasher::hash (const decl_table_entry *entry)
7840 {
7841 return htab_hash_pointer (entry->orig);
7842 }
7843
7844 inline bool
7845 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
7846 const die_struct *entry2)
7847 {
7848 return entry1->orig == entry2;
7849 }
7850
7851 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
7852
7853 /* Copy DIE and its ancestors, up to, but not including, the compile unit
7854 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
7855 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
7856 to check if the ancestor has already been copied into UNIT. */
7857
7858 static dw_die_ref
7859 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
7860 decl_hash_type *decl_table)
7861 {
7862 dw_die_ref parent = die->die_parent;
7863 dw_die_ref new_parent = unit;
7864 dw_die_ref copy;
7865 decl_table_entry **slot = NULL;
7866 struct decl_table_entry *entry = NULL;
7867
7868 if (decl_table)
7869 {
7870 /* Check if the entry has already been copied to UNIT. */
7871 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
7872 INSERT);
7873 if (*slot != HTAB_EMPTY_ENTRY)
7874 {
7875 entry = *slot;
7876 return entry->copy;
7877 }
7878
7879 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
7880 entry = XCNEW (struct decl_table_entry);
7881 entry->orig = die;
7882 entry->copy = NULL;
7883 *slot = entry;
7884 }
7885
7886 if (parent != NULL)
7887 {
7888 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
7889 if (spec != NULL)
7890 parent = spec;
7891 if (!is_unit_die (parent))
7892 new_parent = copy_ancestor_tree (unit, parent, decl_table);
7893 }
7894
7895 copy = clone_as_declaration (die);
7896 add_child_die (new_parent, copy);
7897
7898 if (decl_table)
7899 {
7900 /* Record the pointer to the copy. */
7901 entry->copy = copy;
7902 }
7903
7904 return copy;
7905 }
7906 /* Copy the declaration context to the new type unit DIE. This includes
7907 any surrounding namespace or type declarations. If the DIE has an
7908 AT_specification attribute, it also includes attributes and children
7909 attached to the specification, and returns a pointer to the original
7910 parent of the declaration DIE. Returns NULL otherwise. */
7911
7912 static dw_die_ref
7913 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
7914 {
7915 dw_die_ref decl;
7916 dw_die_ref new_decl;
7917 dw_die_ref orig_parent = NULL;
7918
7919 decl = get_AT_ref (die, DW_AT_specification);
7920 if (decl == NULL)
7921 decl = die;
7922 else
7923 {
7924 unsigned ix;
7925 dw_die_ref c;
7926 dw_attr_node *a;
7927
7928 /* The original DIE will be changed to a declaration, and must
7929 be moved to be a child of the original declaration DIE. */
7930 orig_parent = decl->die_parent;
7931
7932 /* Copy the type node pointer from the new DIE to the original
7933 declaration DIE so we can forward references later. */
7934 decl->comdat_type_p = true;
7935 decl->die_id.die_type_node = die->die_id.die_type_node;
7936
7937 remove_AT (die, DW_AT_specification);
7938
7939 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
7940 {
7941 if (a->dw_attr != DW_AT_name
7942 && a->dw_attr != DW_AT_declaration
7943 && a->dw_attr != DW_AT_external)
7944 add_dwarf_attr (die, a);
7945 }
7946
7947 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
7948 }
7949
7950 if (decl->die_parent != NULL
7951 && !is_unit_die (decl->die_parent))
7952 {
7953 new_decl = copy_ancestor_tree (unit, decl, NULL);
7954 if (new_decl != NULL)
7955 {
7956 remove_AT (new_decl, DW_AT_signature);
7957 add_AT_specification (die, new_decl);
7958 }
7959 }
7960
7961 return orig_parent;
7962 }
7963
7964 /* Generate the skeleton ancestor tree for the given NODE, then clone
7965 the DIE and add the clone into the tree. */
7966
7967 static void
7968 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
7969 {
7970 if (node->new_die != NULL)
7971 return;
7972
7973 node->new_die = clone_as_declaration (node->old_die);
7974
7975 if (node->parent != NULL)
7976 {
7977 generate_skeleton_ancestor_tree (node->parent);
7978 add_child_die (node->parent->new_die, node->new_die);
7979 }
7980 }
7981
7982 /* Generate a skeleton tree of DIEs containing any declarations that are
7983 found in the original tree. We traverse the tree looking for declaration
7984 DIEs, and construct the skeleton from the bottom up whenever we find one. */
7985
7986 static void
7987 generate_skeleton_bottom_up (skeleton_chain_node *parent)
7988 {
7989 skeleton_chain_node node;
7990 dw_die_ref c;
7991 dw_die_ref first;
7992 dw_die_ref prev = NULL;
7993 dw_die_ref next = NULL;
7994
7995 node.parent = parent;
7996
7997 first = c = parent->old_die->die_child;
7998 if (c)
7999 next = c->die_sib;
8000 if (c) do {
8001 if (prev == NULL || prev->die_sib == c)
8002 prev = c;
8003 c = next;
8004 next = (c == first ? NULL : c->die_sib);
8005 node.old_die = c;
8006 node.new_die = NULL;
8007 if (is_declaration_die (c))
8008 {
8009 if (is_template_instantiation (c))
8010 {
8011 /* Instantiated templates do not need to be cloned into the
8012 type unit. Just move the DIE and its children back to
8013 the skeleton tree (in the main CU). */
8014 remove_child_with_prev (c, prev);
8015 add_child_die (parent->new_die, c);
8016 c = prev;
8017 }
8018 else if (c->comdat_type_p)
8019 {
8020 /* This is the skeleton of earlier break_out_comdat_types
8021 type. Clone the existing DIE, but keep the children
8022 under the original (which is in the main CU). */
8023 dw_die_ref clone = clone_die (c);
8024
8025 replace_child (c, clone, prev);
8026 generate_skeleton_ancestor_tree (parent);
8027 add_child_die (parent->new_die, c);
8028 c = clone;
8029 continue;
8030 }
8031 else
8032 {
8033 /* Clone the existing DIE, move the original to the skeleton
8034 tree (which is in the main CU), and put the clone, with
8035 all the original's children, where the original came from
8036 (which is about to be moved to the type unit). */
8037 dw_die_ref clone = clone_die (c);
8038 move_all_children (c, clone);
8039
8040 /* If the original has a DW_AT_object_pointer attribute,
8041 it would now point to a child DIE just moved to the
8042 cloned tree, so we need to remove that attribute from
8043 the original. */
8044 remove_AT (c, DW_AT_object_pointer);
8045
8046 replace_child (c, clone, prev);
8047 generate_skeleton_ancestor_tree (parent);
8048 add_child_die (parent->new_die, c);
8049 node.old_die = clone;
8050 node.new_die = c;
8051 c = clone;
8052 }
8053 }
8054 generate_skeleton_bottom_up (&node);
8055 } while (next != NULL);
8056 }
8057
8058 /* Wrapper function for generate_skeleton_bottom_up. */
8059
8060 static dw_die_ref
8061 generate_skeleton (dw_die_ref die)
8062 {
8063 skeleton_chain_node node;
8064
8065 node.old_die = die;
8066 node.new_die = NULL;
8067 node.parent = NULL;
8068
8069 /* If this type definition is nested inside another type,
8070 and is not an instantiation of a template, always leave
8071 at least a declaration in its place. */
8072 if (die->die_parent != NULL
8073 && is_type_die (die->die_parent)
8074 && !is_template_instantiation (die))
8075 node.new_die = clone_as_declaration (die);
8076
8077 generate_skeleton_bottom_up (&node);
8078 return node.new_die;
8079 }
8080
8081 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8082 declaration. The original DIE is moved to a new compile unit so that
8083 existing references to it follow it to the new location. If any of the
8084 original DIE's descendants is a declaration, we need to replace the
8085 original DIE with a skeleton tree and move the declarations back into the
8086 skeleton tree. */
8087
8088 static dw_die_ref
8089 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8090 dw_die_ref prev)
8091 {
8092 dw_die_ref skeleton, orig_parent;
8093
8094 /* Copy the declaration context to the type unit DIE. If the returned
8095 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8096 that DIE. */
8097 orig_parent = copy_declaration_context (unit, child);
8098
8099 skeleton = generate_skeleton (child);
8100 if (skeleton == NULL)
8101 remove_child_with_prev (child, prev);
8102 else
8103 {
8104 skeleton->comdat_type_p = true;
8105 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8106
8107 /* If the original DIE was a specification, we need to put
8108 the skeleton under the parent DIE of the declaration.
8109 This leaves the original declaration in the tree, but
8110 it will be pruned later since there are no longer any
8111 references to it. */
8112 if (orig_parent != NULL)
8113 {
8114 remove_child_with_prev (child, prev);
8115 add_child_die (orig_parent, skeleton);
8116 }
8117 else
8118 replace_child (child, skeleton, prev);
8119 }
8120
8121 return skeleton;
8122 }
8123
8124 static void
8125 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8126 comdat_type_node *type_node,
8127 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8128
8129 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8130 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8131 DWARF procedure references in the DW_AT_location attribute. */
8132
8133 static dw_die_ref
8134 copy_dwarf_procedure (dw_die_ref die,
8135 comdat_type_node *type_node,
8136 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8137 {
8138 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8139
8140 /* DWARF procedures are not supposed to have children... */
8141 gcc_assert (die->die_child == NULL);
8142
8143 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8144 gcc_assert (vec_safe_length (die->die_attr) == 1
8145 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8146
8147 /* Do not copy more than once DWARF procedures. */
8148 bool existed;
8149 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8150 if (existed)
8151 return die_copy;
8152
8153 die_copy = clone_die (die);
8154 add_child_die (type_node->root_die, die_copy);
8155 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8156 return die_copy;
8157 }
8158
8159 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8160 procedures in DIE's attributes. */
8161
8162 static void
8163 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8164 comdat_type_node *type_node,
8165 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8166 {
8167 dw_attr_node *a;
8168 unsigned i;
8169
8170 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8171 {
8172 dw_loc_descr_ref loc;
8173
8174 if (a->dw_attr_val.val_class != dw_val_class_loc)
8175 continue;
8176
8177 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8178 {
8179 switch (loc->dw_loc_opc)
8180 {
8181 case DW_OP_call2:
8182 case DW_OP_call4:
8183 case DW_OP_call_ref:
8184 gcc_assert (loc->dw_loc_oprnd1.val_class
8185 == dw_val_class_die_ref);
8186 loc->dw_loc_oprnd1.v.val_die_ref.die
8187 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8188 type_node,
8189 copied_dwarf_procs);
8190
8191 default:
8192 break;
8193 }
8194 }
8195 }
8196 }
8197
8198 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8199 rewrite references to point to the copies.
8200
8201 References are looked for in DIE's attributes and recursively in all its
8202 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8203 mapping from old DWARF procedures to their copy. It is used not to copy
8204 twice the same DWARF procedure under TYPE_NODE. */
8205
8206 static void
8207 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8208 comdat_type_node *type_node,
8209 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8210 {
8211 dw_die_ref c;
8212
8213 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8214 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8215 type_node,
8216 copied_dwarf_procs));
8217 }
8218
8219 /* Traverse the DIE and set up additional .debug_types or .debug_info
8220 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8221 section. */
8222
8223 static void
8224 break_out_comdat_types (dw_die_ref die)
8225 {
8226 dw_die_ref c;
8227 dw_die_ref first;
8228 dw_die_ref prev = NULL;
8229 dw_die_ref next = NULL;
8230 dw_die_ref unit = NULL;
8231
8232 first = c = die->die_child;
8233 if (c)
8234 next = c->die_sib;
8235 if (c) do {
8236 if (prev == NULL || prev->die_sib == c)
8237 prev = c;
8238 c = next;
8239 next = (c == first ? NULL : c->die_sib);
8240 if (should_move_die_to_comdat (c))
8241 {
8242 dw_die_ref replacement;
8243 comdat_type_node *type_node;
8244
8245 /* Break out nested types into their own type units. */
8246 break_out_comdat_types (c);
8247
8248 /* Create a new type unit DIE as the root for the new tree, and
8249 add it to the list of comdat types. */
8250 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8251 add_AT_unsigned (unit, DW_AT_language,
8252 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8253 type_node = ggc_cleared_alloc<comdat_type_node> ();
8254 type_node->root_die = unit;
8255 type_node->next = comdat_type_list;
8256 comdat_type_list = type_node;
8257
8258 /* Generate the type signature. */
8259 generate_type_signature (c, type_node);
8260
8261 /* Copy the declaration context, attributes, and children of the
8262 declaration into the new type unit DIE, then remove this DIE
8263 from the main CU (or replace it with a skeleton if necessary). */
8264 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8265 type_node->skeleton_die = replacement;
8266
8267 /* Add the DIE to the new compunit. */
8268 add_child_die (unit, c);
8269
8270 /* Types can reference DWARF procedures for type size or data location
8271 expressions. Calls in DWARF expressions cannot target procedures
8272 that are not in the same section. So we must copy DWARF procedures
8273 along with this type and then rewrite references to them. */
8274 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8275 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8276
8277 if (replacement != NULL)
8278 c = replacement;
8279 }
8280 else if (c->die_tag == DW_TAG_namespace
8281 || c->die_tag == DW_TAG_class_type
8282 || c->die_tag == DW_TAG_structure_type
8283 || c->die_tag == DW_TAG_union_type)
8284 {
8285 /* Look for nested types that can be broken out. */
8286 break_out_comdat_types (c);
8287 }
8288 } while (next != NULL);
8289 }
8290
8291 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8292 Enter all the cloned children into the hash table decl_table. */
8293
8294 static dw_die_ref
8295 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8296 {
8297 dw_die_ref c;
8298 dw_die_ref clone;
8299 struct decl_table_entry *entry;
8300 decl_table_entry **slot;
8301
8302 if (die->die_tag == DW_TAG_subprogram)
8303 clone = clone_as_declaration (die);
8304 else
8305 clone = clone_die (die);
8306
8307 slot = decl_table->find_slot_with_hash (die,
8308 htab_hash_pointer (die), INSERT);
8309
8310 /* Assert that DIE isn't in the hash table yet. If it would be there
8311 before, the ancestors would be necessarily there as well, therefore
8312 clone_tree_partial wouldn't be called. */
8313 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8314
8315 entry = XCNEW (struct decl_table_entry);
8316 entry->orig = die;
8317 entry->copy = clone;
8318 *slot = entry;
8319
8320 if (die->die_tag != DW_TAG_subprogram)
8321 FOR_EACH_CHILD (die, c,
8322 add_child_die (clone, clone_tree_partial (c, decl_table)));
8323
8324 return clone;
8325 }
8326
8327 /* Walk the DIE and its children, looking for references to incomplete
8328 or trivial types that are unmarked (i.e., that are not in the current
8329 type_unit). */
8330
8331 static void
8332 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8333 {
8334 dw_die_ref c;
8335 dw_attr_node *a;
8336 unsigned ix;
8337
8338 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8339 {
8340 if (AT_class (a) == dw_val_class_die_ref)
8341 {
8342 dw_die_ref targ = AT_ref (a);
8343 decl_table_entry **slot;
8344 struct decl_table_entry *entry;
8345
8346 if (targ->die_mark != 0 || targ->comdat_type_p)
8347 continue;
8348
8349 slot = decl_table->find_slot_with_hash (targ,
8350 htab_hash_pointer (targ),
8351 INSERT);
8352
8353 if (*slot != HTAB_EMPTY_ENTRY)
8354 {
8355 /* TARG has already been copied, so we just need to
8356 modify the reference to point to the copy. */
8357 entry = *slot;
8358 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8359 }
8360 else
8361 {
8362 dw_die_ref parent = unit;
8363 dw_die_ref copy = clone_die (targ);
8364
8365 /* Record in DECL_TABLE that TARG has been copied.
8366 Need to do this now, before the recursive call,
8367 because DECL_TABLE may be expanded and SLOT
8368 would no longer be a valid pointer. */
8369 entry = XCNEW (struct decl_table_entry);
8370 entry->orig = targ;
8371 entry->copy = copy;
8372 *slot = entry;
8373
8374 /* If TARG is not a declaration DIE, we need to copy its
8375 children. */
8376 if (!is_declaration_die (targ))
8377 {
8378 FOR_EACH_CHILD (
8379 targ, c,
8380 add_child_die (copy,
8381 clone_tree_partial (c, decl_table)));
8382 }
8383
8384 /* Make sure the cloned tree is marked as part of the
8385 type unit. */
8386 mark_dies (copy);
8387
8388 /* If TARG has surrounding context, copy its ancestor tree
8389 into the new type unit. */
8390 if (targ->die_parent != NULL
8391 && !is_unit_die (targ->die_parent))
8392 parent = copy_ancestor_tree (unit, targ->die_parent,
8393 decl_table);
8394
8395 add_child_die (parent, copy);
8396 a->dw_attr_val.v.val_die_ref.die = copy;
8397
8398 /* Make sure the newly-copied DIE is walked. If it was
8399 installed in a previously-added context, it won't
8400 get visited otherwise. */
8401 if (parent != unit)
8402 {
8403 /* Find the highest point of the newly-added tree,
8404 mark each node along the way, and walk from there. */
8405 parent->die_mark = 1;
8406 while (parent->die_parent
8407 && parent->die_parent->die_mark == 0)
8408 {
8409 parent = parent->die_parent;
8410 parent->die_mark = 1;
8411 }
8412 copy_decls_walk (unit, parent, decl_table);
8413 }
8414 }
8415 }
8416 }
8417
8418 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8419 }
8420
8421 /* Copy declarations for "unworthy" types into the new comdat section.
8422 Incomplete types, modified types, and certain other types aren't broken
8423 out into comdat sections of their own, so they don't have a signature,
8424 and we need to copy the declaration into the same section so that we
8425 don't have an external reference. */
8426
8427 static void
8428 copy_decls_for_unworthy_types (dw_die_ref unit)
8429 {
8430 mark_dies (unit);
8431 decl_hash_type decl_table (10);
8432 copy_decls_walk (unit, unit, &decl_table);
8433 unmark_dies (unit);
8434 }
8435
8436 /* Traverse the DIE and add a sibling attribute if it may have the
8437 effect of speeding up access to siblings. To save some space,
8438 avoid generating sibling attributes for DIE's without children. */
8439
8440 static void
8441 add_sibling_attributes (dw_die_ref die)
8442 {
8443 dw_die_ref c;
8444
8445 if (! die->die_child)
8446 return;
8447
8448 if (die->die_parent && die != die->die_parent->die_child)
8449 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8450
8451 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8452 }
8453
8454 /* Output all location lists for the DIE and its children. */
8455
8456 static void
8457 output_location_lists (dw_die_ref die)
8458 {
8459 dw_die_ref c;
8460 dw_attr_node *a;
8461 unsigned ix;
8462
8463 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8464 if (AT_class (a) == dw_val_class_loc_list)
8465 output_loc_list (AT_loc_list (a));
8466
8467 FOR_EACH_CHILD (die, c, output_location_lists (c));
8468 }
8469
8470 /* During assign_location_list_indexes and output_loclists_offset the
8471 current index, after it the number of assigned indexes (i.e. how
8472 large the .debug_loclists* offset table should be). */
8473 static unsigned int loc_list_idx;
8474
8475 /* Output all location list offsets for the DIE and its children. */
8476
8477 static void
8478 output_loclists_offsets (dw_die_ref die)
8479 {
8480 dw_die_ref c;
8481 dw_attr_node *a;
8482 unsigned ix;
8483
8484 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8485 if (AT_class (a) == dw_val_class_loc_list)
8486 {
8487 dw_loc_list_ref l = AT_loc_list (a);
8488 if (l->offset_emitted)
8489 continue;
8490 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8491 loc_section_label, NULL);
8492 gcc_assert (l->hash == loc_list_idx);
8493 loc_list_idx++;
8494 l->offset_emitted = true;
8495 }
8496
8497 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8498 }
8499
8500 /* Recursively set indexes of location lists. */
8501
8502 static void
8503 assign_location_list_indexes (dw_die_ref die)
8504 {
8505 dw_die_ref c;
8506 dw_attr_node *a;
8507 unsigned ix;
8508
8509 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8510 if (AT_class (a) == dw_val_class_loc_list)
8511 {
8512 dw_loc_list_ref list = AT_loc_list (a);
8513 if (!list->num_assigned)
8514 {
8515 list->num_assigned = true;
8516 list->hash = loc_list_idx++;
8517 }
8518 }
8519
8520 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8521 }
8522
8523 /* We want to limit the number of external references, because they are
8524 larger than local references: a relocation takes multiple words, and
8525 even a sig8 reference is always eight bytes, whereas a local reference
8526 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8527 So if we encounter multiple external references to the same type DIE, we
8528 make a local typedef stub for it and redirect all references there.
8529
8530 This is the element of the hash table for keeping track of these
8531 references. */
8532
8533 struct external_ref
8534 {
8535 dw_die_ref type;
8536 dw_die_ref stub;
8537 unsigned n_refs;
8538 };
8539
8540 /* Hashtable helpers. */
8541
8542 struct external_ref_hasher : free_ptr_hash <external_ref>
8543 {
8544 static inline hashval_t hash (const external_ref *);
8545 static inline bool equal (const external_ref *, const external_ref *);
8546 };
8547
8548 inline hashval_t
8549 external_ref_hasher::hash (const external_ref *r)
8550 {
8551 dw_die_ref die = r->type;
8552 hashval_t h = 0;
8553
8554 /* We can't use the address of the DIE for hashing, because
8555 that will make the order of the stub DIEs non-deterministic. */
8556 if (! die->comdat_type_p)
8557 /* We have a symbol; use it to compute a hash. */
8558 h = htab_hash_string (die->die_id.die_symbol);
8559 else
8560 {
8561 /* We have a type signature; use a subset of the bits as the hash.
8562 The 8-byte signature is at least as large as hashval_t. */
8563 comdat_type_node *type_node = die->die_id.die_type_node;
8564 memcpy (&h, type_node->signature, sizeof (h));
8565 }
8566 return h;
8567 }
8568
8569 inline bool
8570 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8571 {
8572 return r1->type == r2->type;
8573 }
8574
8575 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8576
8577 /* Return a pointer to the external_ref for references to DIE. */
8578
8579 static struct external_ref *
8580 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8581 {
8582 struct external_ref ref, *ref_p;
8583 external_ref **slot;
8584
8585 ref.type = die;
8586 slot = map->find_slot (&ref, INSERT);
8587 if (*slot != HTAB_EMPTY_ENTRY)
8588 return *slot;
8589
8590 ref_p = XCNEW (struct external_ref);
8591 ref_p->type = die;
8592 *slot = ref_p;
8593 return ref_p;
8594 }
8595
8596 /* Subroutine of optimize_external_refs, below.
8597
8598 If we see a type skeleton, record it as our stub. If we see external
8599 references, remember how many we've seen. */
8600
8601 static void
8602 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8603 {
8604 dw_die_ref c;
8605 dw_attr_node *a;
8606 unsigned ix;
8607 struct external_ref *ref_p;
8608
8609 if (is_type_die (die)
8610 && (c = get_AT_ref (die, DW_AT_signature)))
8611 {
8612 /* This is a local skeleton; use it for local references. */
8613 ref_p = lookup_external_ref (map, c);
8614 ref_p->stub = die;
8615 }
8616
8617 /* Scan the DIE references, and remember any that refer to DIEs from
8618 other CUs (i.e. those which are not marked). */
8619 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8620 if (AT_class (a) == dw_val_class_die_ref
8621 && (c = AT_ref (a))->die_mark == 0
8622 && is_type_die (c))
8623 {
8624 ref_p = lookup_external_ref (map, c);
8625 ref_p->n_refs++;
8626 }
8627
8628 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8629 }
8630
8631 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8632 points to an external_ref, DATA is the CU we're processing. If we don't
8633 already have a local stub, and we have multiple refs, build a stub. */
8634
8635 int
8636 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8637 {
8638 struct external_ref *ref_p = *slot;
8639
8640 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8641 {
8642 /* We have multiple references to this type, so build a small stub.
8643 Both of these forms are a bit dodgy from the perspective of the
8644 DWARF standard, since technically they should have names. */
8645 dw_die_ref cu = data;
8646 dw_die_ref type = ref_p->type;
8647 dw_die_ref stub = NULL;
8648
8649 if (type->comdat_type_p)
8650 {
8651 /* If we refer to this type via sig8, use AT_signature. */
8652 stub = new_die (type->die_tag, cu, NULL_TREE);
8653 add_AT_die_ref (stub, DW_AT_signature, type);
8654 }
8655 else
8656 {
8657 /* Otherwise, use a typedef with no name. */
8658 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8659 add_AT_die_ref (stub, DW_AT_type, type);
8660 }
8661
8662 stub->die_mark++;
8663 ref_p->stub = stub;
8664 }
8665 return 1;
8666 }
8667
8668 /* DIE is a unit; look through all the DIE references to see if there are
8669 any external references to types, and if so, create local stubs for
8670 them which will be applied in build_abbrev_table. This is useful because
8671 references to local DIEs are smaller. */
8672
8673 static external_ref_hash_type *
8674 optimize_external_refs (dw_die_ref die)
8675 {
8676 external_ref_hash_type *map = new external_ref_hash_type (10);
8677 optimize_external_refs_1 (die, map);
8678 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8679 return map;
8680 }
8681
8682 /* The following 3 variables are temporaries that are computed only during the
8683 build_abbrev_table call and used and released during the following
8684 optimize_abbrev_table call. */
8685
8686 /* First abbrev_id that can be optimized based on usage. */
8687 static unsigned int abbrev_opt_start;
8688
8689 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
8690 abbrev_id smaller than this, because they must be already sized
8691 during build_abbrev_table). */
8692 static unsigned int abbrev_opt_base_type_end;
8693
8694 /* Vector of usage counts during build_abbrev_table. Indexed by
8695 abbrev_id - abbrev_opt_start. */
8696 static vec<unsigned int> abbrev_usage_count;
8697
8698 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
8699 static vec<dw_die_ref> sorted_abbrev_dies;
8700
8701 /* The format of each DIE (and its attribute value pairs) is encoded in an
8702 abbreviation table. This routine builds the abbreviation table and assigns
8703 a unique abbreviation id for each abbreviation entry. The children of each
8704 die are visited recursively. */
8705
8706 static void
8707 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
8708 {
8709 unsigned int abbrev_id = 0;
8710 dw_die_ref c;
8711 dw_attr_node *a;
8712 unsigned ix;
8713 dw_die_ref abbrev;
8714
8715 /* Scan the DIE references, and replace any that refer to
8716 DIEs from other CUs (i.e. those which are not marked) with
8717 the local stubs we built in optimize_external_refs. */
8718 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8719 if (AT_class (a) == dw_val_class_die_ref
8720 && (c = AT_ref (a))->die_mark == 0)
8721 {
8722 struct external_ref *ref_p;
8723 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
8724
8725 ref_p = lookup_external_ref (extern_map, c);
8726 if (ref_p->stub && ref_p->stub != die)
8727 change_AT_die_ref (a, ref_p->stub);
8728 else
8729 /* We aren't changing this reference, so mark it external. */
8730 set_AT_ref_external (a, 1);
8731 }
8732
8733 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
8734 {
8735 dw_attr_node *die_a, *abbrev_a;
8736 unsigned ix;
8737 bool ok = true;
8738
8739 if (abbrev_id == 0)
8740 continue;
8741 if (abbrev->die_tag != die->die_tag)
8742 continue;
8743 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
8744 continue;
8745
8746 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
8747 continue;
8748
8749 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
8750 {
8751 abbrev_a = &(*abbrev->die_attr)[ix];
8752 if ((abbrev_a->dw_attr != die_a->dw_attr)
8753 || (value_format (abbrev_a) != value_format (die_a)))
8754 {
8755 ok = false;
8756 break;
8757 }
8758 }
8759 if (ok)
8760 break;
8761 }
8762
8763 if (abbrev_id >= vec_safe_length (abbrev_die_table))
8764 {
8765 vec_safe_push (abbrev_die_table, die);
8766 if (abbrev_opt_start)
8767 abbrev_usage_count.safe_push (0);
8768 }
8769 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
8770 {
8771 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
8772 sorted_abbrev_dies.safe_push (die);
8773 }
8774
8775 die->die_abbrev = abbrev_id;
8776 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
8777 }
8778
8779 /* Callback function for sorted_abbrev_dies vector sorting. We sort
8780 by die_abbrev's usage count, from the most commonly used
8781 abbreviation to the least. */
8782
8783 static int
8784 die_abbrev_cmp (const void *p1, const void *p2)
8785 {
8786 dw_die_ref die1 = *(const dw_die_ref *) p1;
8787 dw_die_ref die2 = *(const dw_die_ref *) p2;
8788
8789 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
8790 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
8791
8792 if (die1->die_abbrev >= abbrev_opt_base_type_end
8793 && die2->die_abbrev >= abbrev_opt_base_type_end)
8794 {
8795 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
8796 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
8797 return -1;
8798 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
8799 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
8800 return 1;
8801 }
8802
8803 /* Stabilize the sort. */
8804 if (die1->die_abbrev < die2->die_abbrev)
8805 return -1;
8806 if (die1->die_abbrev > die2->die_abbrev)
8807 return 1;
8808
8809 return 0;
8810 }
8811
8812 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
8813 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
8814 into dw_val_class_const_implicit or
8815 dw_val_class_unsigned_const_implicit. */
8816
8817 static void
8818 optimize_implicit_const (unsigned int first_id, unsigned int end,
8819 vec<bool> &implicit_consts)
8820 {
8821 /* It never makes sense if there is just one DIE using the abbreviation. */
8822 if (end < first_id + 2)
8823 return;
8824
8825 dw_attr_node *a;
8826 unsigned ix, i;
8827 dw_die_ref die = sorted_abbrev_dies[first_id];
8828 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8829 if (implicit_consts[ix])
8830 {
8831 enum dw_val_class new_class = dw_val_class_none;
8832 switch (AT_class (a))
8833 {
8834 case dw_val_class_unsigned_const:
8835 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
8836 continue;
8837
8838 /* The .debug_abbrev section will grow by
8839 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
8840 in all the DIEs using that abbreviation. */
8841 if (constant_size (AT_unsigned (a)) * (end - first_id)
8842 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
8843 continue;
8844
8845 new_class = dw_val_class_unsigned_const_implicit;
8846 break;
8847
8848 case dw_val_class_const:
8849 new_class = dw_val_class_const_implicit;
8850 break;
8851
8852 case dw_val_class_file:
8853 new_class = dw_val_class_file_implicit;
8854 break;
8855
8856 default:
8857 continue;
8858 }
8859 for (i = first_id; i < end; i++)
8860 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
8861 = new_class;
8862 }
8863 }
8864
8865 /* Attempt to optimize abbreviation table from abbrev_opt_start
8866 abbreviation above. */
8867
8868 static void
8869 optimize_abbrev_table (void)
8870 {
8871 if (abbrev_opt_start
8872 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
8873 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
8874 {
8875 auto_vec<bool, 32> implicit_consts;
8876 sorted_abbrev_dies.qsort (die_abbrev_cmp);
8877
8878 unsigned int abbrev_id = abbrev_opt_start - 1;
8879 unsigned int first_id = ~0U;
8880 unsigned int last_abbrev_id = 0;
8881 unsigned int i;
8882 dw_die_ref die;
8883 if (abbrev_opt_base_type_end > abbrev_opt_start)
8884 abbrev_id = abbrev_opt_base_type_end - 1;
8885 /* Reassign abbreviation ids from abbrev_opt_start above, so that
8886 most commonly used abbreviations come first. */
8887 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
8888 {
8889 dw_attr_node *a;
8890 unsigned ix;
8891
8892 /* If calc_base_type_die_sizes has been called, the CU and
8893 base types after it can't be optimized, because we've already
8894 calculated their DIE offsets. We've sorted them first. */
8895 if (die->die_abbrev < abbrev_opt_base_type_end)
8896 continue;
8897 if (die->die_abbrev != last_abbrev_id)
8898 {
8899 last_abbrev_id = die->die_abbrev;
8900 if (dwarf_version >= 5 && first_id != ~0U)
8901 optimize_implicit_const (first_id, i, implicit_consts);
8902 abbrev_id++;
8903 (*abbrev_die_table)[abbrev_id] = die;
8904 if (dwarf_version >= 5)
8905 {
8906 first_id = i;
8907 implicit_consts.truncate (0);
8908
8909 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8910 switch (AT_class (a))
8911 {
8912 case dw_val_class_const:
8913 case dw_val_class_unsigned_const:
8914 case dw_val_class_file:
8915 implicit_consts.safe_push (true);
8916 break;
8917 default:
8918 implicit_consts.safe_push (false);
8919 break;
8920 }
8921 }
8922 }
8923 else if (dwarf_version >= 5)
8924 {
8925 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8926 if (!implicit_consts[ix])
8927 continue;
8928 else
8929 {
8930 dw_attr_node *other_a
8931 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
8932 if (!dw_val_equal_p (&a->dw_attr_val,
8933 &other_a->dw_attr_val))
8934 implicit_consts[ix] = false;
8935 }
8936 }
8937 die->die_abbrev = abbrev_id;
8938 }
8939 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
8940 if (dwarf_version >= 5 && first_id != ~0U)
8941 optimize_implicit_const (first_id, i, implicit_consts);
8942 }
8943
8944 abbrev_opt_start = 0;
8945 abbrev_opt_base_type_end = 0;
8946 abbrev_usage_count.release ();
8947 sorted_abbrev_dies.release ();
8948 }
8949 \f
8950 /* Return the power-of-two number of bytes necessary to represent VALUE. */
8951
8952 static int
8953 constant_size (unsigned HOST_WIDE_INT value)
8954 {
8955 int log;
8956
8957 if (value == 0)
8958 log = 0;
8959 else
8960 log = floor_log2 (value);
8961
8962 log = log / 8;
8963 log = 1 << (floor_log2 (log) + 1);
8964
8965 return log;
8966 }
8967
8968 /* Return the size of a DIE as it is represented in the
8969 .debug_info section. */
8970
8971 static unsigned long
8972 size_of_die (dw_die_ref die)
8973 {
8974 unsigned long size = 0;
8975 dw_attr_node *a;
8976 unsigned ix;
8977 enum dwarf_form form;
8978
8979 size += size_of_uleb128 (die->die_abbrev);
8980 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8981 {
8982 switch (AT_class (a))
8983 {
8984 case dw_val_class_addr:
8985 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
8986 {
8987 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
8988 size += size_of_uleb128 (AT_index (a));
8989 }
8990 else
8991 size += DWARF2_ADDR_SIZE;
8992 break;
8993 case dw_val_class_offset:
8994 size += DWARF_OFFSET_SIZE;
8995 break;
8996 case dw_val_class_loc:
8997 {
8998 unsigned long lsize = size_of_locs (AT_loc (a));
8999
9000 /* Block length. */
9001 if (dwarf_version >= 4)
9002 size += size_of_uleb128 (lsize);
9003 else
9004 size += constant_size (lsize);
9005 size += lsize;
9006 }
9007 break;
9008 case dw_val_class_loc_list:
9009 if (dwarf_split_debug_info && dwarf_version >= 5)
9010 {
9011 gcc_assert (AT_loc_list (a)->num_assigned);
9012 size += size_of_uleb128 (AT_loc_list (a)->hash);
9013 }
9014 else
9015 size += DWARF_OFFSET_SIZE;
9016 break;
9017 case dw_val_class_range_list:
9018 if (value_format (a) == DW_FORM_rnglistx)
9019 {
9020 gcc_assert (rnglist_idx);
9021 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9022 size += size_of_uleb128 (r->idx);
9023 }
9024 else
9025 size += DWARF_OFFSET_SIZE;
9026 break;
9027 case dw_val_class_const:
9028 size += size_of_sleb128 (AT_int (a));
9029 break;
9030 case dw_val_class_unsigned_const:
9031 {
9032 int csize = constant_size (AT_unsigned (a));
9033 if (dwarf_version == 3
9034 && a->dw_attr == DW_AT_data_member_location
9035 && csize >= 4)
9036 size += size_of_uleb128 (AT_unsigned (a));
9037 else
9038 size += csize;
9039 }
9040 break;
9041 case dw_val_class_const_implicit:
9042 case dw_val_class_unsigned_const_implicit:
9043 case dw_val_class_file_implicit:
9044 /* These occupy no size in the DIE, just an extra sleb128 in
9045 .debug_abbrev. */
9046 break;
9047 case dw_val_class_const_double:
9048 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9049 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9050 size++; /* block */
9051 break;
9052 case dw_val_class_wide_int:
9053 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9054 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9055 if (get_full_len (*a->dw_attr_val.v.val_wide)
9056 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9057 size++; /* block */
9058 break;
9059 case dw_val_class_vec:
9060 size += constant_size (a->dw_attr_val.v.val_vec.length
9061 * a->dw_attr_val.v.val_vec.elt_size)
9062 + a->dw_attr_val.v.val_vec.length
9063 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9064 break;
9065 case dw_val_class_flag:
9066 if (dwarf_version >= 4)
9067 /* Currently all add_AT_flag calls pass in 1 as last argument,
9068 so DW_FORM_flag_present can be used. If that ever changes,
9069 we'll need to use DW_FORM_flag and have some optimization
9070 in build_abbrev_table that will change those to
9071 DW_FORM_flag_present if it is set to 1 in all DIEs using
9072 the same abbrev entry. */
9073 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9074 else
9075 size += 1;
9076 break;
9077 case dw_val_class_die_ref:
9078 if (AT_ref_external (a))
9079 {
9080 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9081 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9082 is sized by target address length, whereas in DWARF3
9083 it's always sized as an offset. */
9084 if (use_debug_types)
9085 size += DWARF_TYPE_SIGNATURE_SIZE;
9086 else if (dwarf_version == 2)
9087 size += DWARF2_ADDR_SIZE;
9088 else
9089 size += DWARF_OFFSET_SIZE;
9090 }
9091 else
9092 size += DWARF_OFFSET_SIZE;
9093 break;
9094 case dw_val_class_fde_ref:
9095 size += DWARF_OFFSET_SIZE;
9096 break;
9097 case dw_val_class_lbl_id:
9098 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9099 {
9100 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9101 size += size_of_uleb128 (AT_index (a));
9102 }
9103 else
9104 size += DWARF2_ADDR_SIZE;
9105 break;
9106 case dw_val_class_lineptr:
9107 case dw_val_class_macptr:
9108 case dw_val_class_loclistsptr:
9109 size += DWARF_OFFSET_SIZE;
9110 break;
9111 case dw_val_class_str:
9112 form = AT_string_form (a);
9113 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9114 size += DWARF_OFFSET_SIZE;
9115 else if (form == DW_FORM_GNU_str_index)
9116 size += size_of_uleb128 (AT_index (a));
9117 else
9118 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9119 break;
9120 case dw_val_class_file:
9121 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9122 break;
9123 case dw_val_class_data8:
9124 size += 8;
9125 break;
9126 case dw_val_class_vms_delta:
9127 size += DWARF_OFFSET_SIZE;
9128 break;
9129 case dw_val_class_high_pc:
9130 size += DWARF2_ADDR_SIZE;
9131 break;
9132 case dw_val_class_discr_value:
9133 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9134 break;
9135 case dw_val_class_discr_list:
9136 {
9137 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9138
9139 /* This is a block, so we have the block length and then its
9140 data. */
9141 size += constant_size (block_size) + block_size;
9142 }
9143 break;
9144 default:
9145 gcc_unreachable ();
9146 }
9147 }
9148
9149 return size;
9150 }
9151
9152 /* Size the debugging information associated with a given DIE. Visits the
9153 DIE's children recursively. Updates the global variable next_die_offset, on
9154 each time through. Uses the current value of next_die_offset to update the
9155 die_offset field in each DIE. */
9156
9157 static void
9158 calc_die_sizes (dw_die_ref die)
9159 {
9160 dw_die_ref c;
9161
9162 gcc_assert (die->die_offset == 0
9163 || (unsigned long int) die->die_offset == next_die_offset);
9164 die->die_offset = next_die_offset;
9165 next_die_offset += size_of_die (die);
9166
9167 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9168
9169 if (die->die_child != NULL)
9170 /* Count the null byte used to terminate sibling lists. */
9171 next_die_offset += 1;
9172 }
9173
9174 /* Size just the base type children at the start of the CU.
9175 This is needed because build_abbrev needs to size locs
9176 and sizing of type based stack ops needs to know die_offset
9177 values for the base types. */
9178
9179 static void
9180 calc_base_type_die_sizes (void)
9181 {
9182 unsigned long die_offset = (dwarf_split_debug_info
9183 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9184 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9185 unsigned int i;
9186 dw_die_ref base_type;
9187 #if ENABLE_ASSERT_CHECKING
9188 dw_die_ref prev = comp_unit_die ()->die_child;
9189 #endif
9190
9191 die_offset += size_of_die (comp_unit_die ());
9192 for (i = 0; base_types.iterate (i, &base_type); i++)
9193 {
9194 #if ENABLE_ASSERT_CHECKING
9195 gcc_assert (base_type->die_offset == 0
9196 && prev->die_sib == base_type
9197 && base_type->die_child == NULL
9198 && base_type->die_abbrev);
9199 prev = base_type;
9200 #endif
9201 if (abbrev_opt_start
9202 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9203 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9204 base_type->die_offset = die_offset;
9205 die_offset += size_of_die (base_type);
9206 }
9207 }
9208
9209 /* Set the marks for a die and its children. We do this so
9210 that we know whether or not a reference needs to use FORM_ref_addr; only
9211 DIEs in the same CU will be marked. We used to clear out the offset
9212 and use that as the flag, but ran into ordering problems. */
9213
9214 static void
9215 mark_dies (dw_die_ref die)
9216 {
9217 dw_die_ref c;
9218
9219 gcc_assert (!die->die_mark);
9220
9221 die->die_mark = 1;
9222 FOR_EACH_CHILD (die, c, mark_dies (c));
9223 }
9224
9225 /* Clear the marks for a die and its children. */
9226
9227 static void
9228 unmark_dies (dw_die_ref die)
9229 {
9230 dw_die_ref c;
9231
9232 if (! use_debug_types)
9233 gcc_assert (die->die_mark);
9234
9235 die->die_mark = 0;
9236 FOR_EACH_CHILD (die, c, unmark_dies (c));
9237 }
9238
9239 /* Clear the marks for a die, its children and referred dies. */
9240
9241 static void
9242 unmark_all_dies (dw_die_ref die)
9243 {
9244 dw_die_ref c;
9245 dw_attr_node *a;
9246 unsigned ix;
9247
9248 if (!die->die_mark)
9249 return;
9250 die->die_mark = 0;
9251
9252 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9253
9254 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9255 if (AT_class (a) == dw_val_class_die_ref)
9256 unmark_all_dies (AT_ref (a));
9257 }
9258
9259 /* Calculate if the entry should appear in the final output file. It may be
9260 from a pruned a type. */
9261
9262 static bool
9263 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9264 {
9265 /* By limiting gnu pubnames to definitions only, gold can generate a
9266 gdb index without entries for declarations, which don't include
9267 enough information to be useful. */
9268 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9269 return false;
9270
9271 if (table == pubname_table)
9272 {
9273 /* Enumerator names are part of the pubname table, but the
9274 parent DW_TAG_enumeration_type die may have been pruned.
9275 Don't output them if that is the case. */
9276 if (p->die->die_tag == DW_TAG_enumerator &&
9277 (p->die->die_parent == NULL
9278 || !p->die->die_parent->die_perennial_p))
9279 return false;
9280
9281 /* Everything else in the pubname table is included. */
9282 return true;
9283 }
9284
9285 /* The pubtypes table shouldn't include types that have been
9286 pruned. */
9287 return (p->die->die_offset != 0
9288 || !flag_eliminate_unused_debug_types);
9289 }
9290
9291 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9292 generated for the compilation unit. */
9293
9294 static unsigned long
9295 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9296 {
9297 unsigned long size;
9298 unsigned i;
9299 pubname_entry *p;
9300 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9301
9302 size = DWARF_PUBNAMES_HEADER_SIZE;
9303 FOR_EACH_VEC_ELT (*names, i, p)
9304 if (include_pubname_in_output (names, p))
9305 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9306
9307 size += DWARF_OFFSET_SIZE;
9308 return size;
9309 }
9310
9311 /* Return the size of the information in the .debug_aranges section. */
9312
9313 static unsigned long
9314 size_of_aranges (void)
9315 {
9316 unsigned long size;
9317
9318 size = DWARF_ARANGES_HEADER_SIZE;
9319
9320 /* Count the address/length pair for this compilation unit. */
9321 if (text_section_used)
9322 size += 2 * DWARF2_ADDR_SIZE;
9323 if (cold_text_section_used)
9324 size += 2 * DWARF2_ADDR_SIZE;
9325 if (have_multiple_function_sections)
9326 {
9327 unsigned fde_idx;
9328 dw_fde_ref fde;
9329
9330 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9331 {
9332 if (DECL_IGNORED_P (fde->decl))
9333 continue;
9334 if (!fde->in_std_section)
9335 size += 2 * DWARF2_ADDR_SIZE;
9336 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9337 size += 2 * DWARF2_ADDR_SIZE;
9338 }
9339 }
9340
9341 /* Count the two zero words used to terminated the address range table. */
9342 size += 2 * DWARF2_ADDR_SIZE;
9343 return size;
9344 }
9345 \f
9346 /* Select the encoding of an attribute value. */
9347
9348 static enum dwarf_form
9349 value_format (dw_attr_node *a)
9350 {
9351 switch (AT_class (a))
9352 {
9353 case dw_val_class_addr:
9354 /* Only very few attributes allow DW_FORM_addr. */
9355 switch (a->dw_attr)
9356 {
9357 case DW_AT_low_pc:
9358 case DW_AT_high_pc:
9359 case DW_AT_entry_pc:
9360 case DW_AT_trampoline:
9361 return (AT_index (a) == NOT_INDEXED
9362 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9363 default:
9364 break;
9365 }
9366 switch (DWARF2_ADDR_SIZE)
9367 {
9368 case 1:
9369 return DW_FORM_data1;
9370 case 2:
9371 return DW_FORM_data2;
9372 case 4:
9373 return DW_FORM_data4;
9374 case 8:
9375 return DW_FORM_data8;
9376 default:
9377 gcc_unreachable ();
9378 }
9379 case dw_val_class_loc_list:
9380 if (dwarf_split_debug_info
9381 && dwarf_version >= 5
9382 && AT_loc_list (a)->num_assigned)
9383 return DW_FORM_loclistx;
9384 /* FALLTHRU */
9385 case dw_val_class_range_list:
9386 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9387 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9388 care about sizes of .debug* sections in shared libraries and
9389 executables and don't take into account relocations that affect just
9390 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9391 table in the .debug_rnglists section. */
9392 if (dwarf_split_debug_info
9393 && dwarf_version >= 5
9394 && AT_class (a) == dw_val_class_range_list
9395 && rnglist_idx
9396 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9397 return DW_FORM_rnglistx;
9398 if (dwarf_version >= 4)
9399 return DW_FORM_sec_offset;
9400 /* FALLTHRU */
9401 case dw_val_class_vms_delta:
9402 case dw_val_class_offset:
9403 switch (DWARF_OFFSET_SIZE)
9404 {
9405 case 4:
9406 return DW_FORM_data4;
9407 case 8:
9408 return DW_FORM_data8;
9409 default:
9410 gcc_unreachable ();
9411 }
9412 case dw_val_class_loc:
9413 if (dwarf_version >= 4)
9414 return DW_FORM_exprloc;
9415 switch (constant_size (size_of_locs (AT_loc (a))))
9416 {
9417 case 1:
9418 return DW_FORM_block1;
9419 case 2:
9420 return DW_FORM_block2;
9421 case 4:
9422 return DW_FORM_block4;
9423 default:
9424 gcc_unreachable ();
9425 }
9426 case dw_val_class_const:
9427 return DW_FORM_sdata;
9428 case dw_val_class_unsigned_const:
9429 switch (constant_size (AT_unsigned (a)))
9430 {
9431 case 1:
9432 return DW_FORM_data1;
9433 case 2:
9434 return DW_FORM_data2;
9435 case 4:
9436 /* In DWARF3 DW_AT_data_member_location with
9437 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9438 constant, so we need to use DW_FORM_udata if we need
9439 a large constant. */
9440 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9441 return DW_FORM_udata;
9442 return DW_FORM_data4;
9443 case 8:
9444 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9445 return DW_FORM_udata;
9446 return DW_FORM_data8;
9447 default:
9448 gcc_unreachable ();
9449 }
9450 case dw_val_class_const_implicit:
9451 case dw_val_class_unsigned_const_implicit:
9452 case dw_val_class_file_implicit:
9453 return DW_FORM_implicit_const;
9454 case dw_val_class_const_double:
9455 switch (HOST_BITS_PER_WIDE_INT)
9456 {
9457 case 8:
9458 return DW_FORM_data2;
9459 case 16:
9460 return DW_FORM_data4;
9461 case 32:
9462 return DW_FORM_data8;
9463 case 64:
9464 if (dwarf_version >= 5)
9465 return DW_FORM_data16;
9466 /* FALLTHRU */
9467 default:
9468 return DW_FORM_block1;
9469 }
9470 case dw_val_class_wide_int:
9471 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9472 {
9473 case 8:
9474 return DW_FORM_data1;
9475 case 16:
9476 return DW_FORM_data2;
9477 case 32:
9478 return DW_FORM_data4;
9479 case 64:
9480 return DW_FORM_data8;
9481 case 128:
9482 if (dwarf_version >= 5)
9483 return DW_FORM_data16;
9484 /* FALLTHRU */
9485 default:
9486 return DW_FORM_block1;
9487 }
9488 case dw_val_class_vec:
9489 switch (constant_size (a->dw_attr_val.v.val_vec.length
9490 * a->dw_attr_val.v.val_vec.elt_size))
9491 {
9492 case 1:
9493 return DW_FORM_block1;
9494 case 2:
9495 return DW_FORM_block2;
9496 case 4:
9497 return DW_FORM_block4;
9498 default:
9499 gcc_unreachable ();
9500 }
9501 case dw_val_class_flag:
9502 if (dwarf_version >= 4)
9503 {
9504 /* Currently all add_AT_flag calls pass in 1 as last argument,
9505 so DW_FORM_flag_present can be used. If that ever changes,
9506 we'll need to use DW_FORM_flag and have some optimization
9507 in build_abbrev_table that will change those to
9508 DW_FORM_flag_present if it is set to 1 in all DIEs using
9509 the same abbrev entry. */
9510 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9511 return DW_FORM_flag_present;
9512 }
9513 return DW_FORM_flag;
9514 case dw_val_class_die_ref:
9515 if (AT_ref_external (a))
9516 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9517 else
9518 return DW_FORM_ref;
9519 case dw_val_class_fde_ref:
9520 return DW_FORM_data;
9521 case dw_val_class_lbl_id:
9522 return (AT_index (a) == NOT_INDEXED
9523 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9524 case dw_val_class_lineptr:
9525 case dw_val_class_macptr:
9526 case dw_val_class_loclistsptr:
9527 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9528 case dw_val_class_str:
9529 return AT_string_form (a);
9530 case dw_val_class_file:
9531 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9532 {
9533 case 1:
9534 return DW_FORM_data1;
9535 case 2:
9536 return DW_FORM_data2;
9537 case 4:
9538 return DW_FORM_data4;
9539 default:
9540 gcc_unreachable ();
9541 }
9542
9543 case dw_val_class_data8:
9544 return DW_FORM_data8;
9545
9546 case dw_val_class_high_pc:
9547 switch (DWARF2_ADDR_SIZE)
9548 {
9549 case 1:
9550 return DW_FORM_data1;
9551 case 2:
9552 return DW_FORM_data2;
9553 case 4:
9554 return DW_FORM_data4;
9555 case 8:
9556 return DW_FORM_data8;
9557 default:
9558 gcc_unreachable ();
9559 }
9560
9561 case dw_val_class_discr_value:
9562 return (a->dw_attr_val.v.val_discr_value.pos
9563 ? DW_FORM_udata
9564 : DW_FORM_sdata);
9565 case dw_val_class_discr_list:
9566 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9567 {
9568 case 1:
9569 return DW_FORM_block1;
9570 case 2:
9571 return DW_FORM_block2;
9572 case 4:
9573 return DW_FORM_block4;
9574 default:
9575 gcc_unreachable ();
9576 }
9577
9578 default:
9579 gcc_unreachable ();
9580 }
9581 }
9582
9583 /* Output the encoding of an attribute value. */
9584
9585 static void
9586 output_value_format (dw_attr_node *a)
9587 {
9588 enum dwarf_form form = value_format (a);
9589
9590 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9591 }
9592
9593 /* Given a die and id, produce the appropriate abbreviations. */
9594
9595 static void
9596 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9597 {
9598 unsigned ix;
9599 dw_attr_node *a_attr;
9600
9601 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9602 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9603 dwarf_tag_name (abbrev->die_tag));
9604
9605 if (abbrev->die_child != NULL)
9606 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9607 else
9608 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9609
9610 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9611 {
9612 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9613 dwarf_attr_name (a_attr->dw_attr));
9614 output_value_format (a_attr);
9615 if (value_format (a_attr) == DW_FORM_implicit_const)
9616 {
9617 if (AT_class (a_attr) == dw_val_class_file_implicit)
9618 {
9619 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9620 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9621 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9622 }
9623 else
9624 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9625 }
9626 }
9627
9628 dw2_asm_output_data (1, 0, NULL);
9629 dw2_asm_output_data (1, 0, NULL);
9630 }
9631
9632
9633 /* Output the .debug_abbrev section which defines the DIE abbreviation
9634 table. */
9635
9636 static void
9637 output_abbrev_section (void)
9638 {
9639 unsigned int abbrev_id;
9640 dw_die_ref abbrev;
9641
9642 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9643 if (abbrev_id != 0)
9644 output_die_abbrevs (abbrev_id, abbrev);
9645
9646 /* Terminate the table. */
9647 dw2_asm_output_data (1, 0, NULL);
9648 }
9649
9650 /* Return a new location list, given the begin and end range, and the
9651 expression. */
9652
9653 static inline dw_loc_list_ref
9654 new_loc_list (dw_loc_descr_ref expr, const char *begin, const char *end,
9655 const char *section)
9656 {
9657 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
9658
9659 retlist->begin = begin;
9660 retlist->begin_entry = NULL;
9661 retlist->end = end;
9662 retlist->expr = expr;
9663 retlist->section = section;
9664
9665 return retlist;
9666 }
9667
9668 /* Generate a new internal symbol for this location list node, if it
9669 hasn't got one yet. */
9670
9671 static inline void
9672 gen_llsym (dw_loc_list_ref list)
9673 {
9674 gcc_assert (!list->ll_symbol);
9675 list->ll_symbol = gen_internal_sym ("LLST");
9676 }
9677
9678 /* Output the location list given to us. */
9679
9680 static void
9681 output_loc_list (dw_loc_list_ref list_head)
9682 {
9683 if (list_head->emitted)
9684 return;
9685 list_head->emitted = true;
9686
9687 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
9688
9689 dw_loc_list_ref curr = list_head;
9690 const char *last_section = NULL;
9691 const char *base_label = NULL;
9692
9693 /* Walk the location list, and output each range + expression. */
9694 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
9695 {
9696 unsigned long size;
9697 /* Don't output an entry that starts and ends at the same address. */
9698 if (strcmp (curr->begin, curr->end) == 0 && !curr->force)
9699 continue;
9700 size = size_of_locs (curr->expr);
9701 /* If the expression is too large, drop it on the floor. We could
9702 perhaps put it into DW_TAG_dwarf_procedure and refer to that
9703 in the expression, but >= 64KB expressions for a single value
9704 in a single range are unlikely very useful. */
9705 if (dwarf_version < 5 && size > 0xffff)
9706 continue;
9707 if (dwarf_version >= 5)
9708 {
9709 if (dwarf_split_debug_info)
9710 {
9711 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
9712 uleb128 index into .debug_addr and uleb128 length. */
9713 dw2_asm_output_data (1, DW_LLE_startx_length,
9714 "DW_LLE_startx_length (%s)",
9715 list_head->ll_symbol);
9716 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
9717 "Location list range start index "
9718 "(%s)", curr->begin);
9719 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
9720 For that case we probably need to emit DW_LLE_startx_endx,
9721 but we'd need 2 .debug_addr entries rather than just one. */
9722 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
9723 "Location list length (%s)",
9724 list_head->ll_symbol);
9725 }
9726 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
9727 {
9728 /* If all code is in .text section, the base address is
9729 already provided by the CU attributes. Use
9730 DW_LLE_offset_pair where both addresses are uleb128 encoded
9731 offsets against that base. */
9732 dw2_asm_output_data (1, DW_LLE_offset_pair,
9733 "DW_LLE_offset_pair (%s)",
9734 list_head->ll_symbol);
9735 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
9736 "Location list begin address (%s)",
9737 list_head->ll_symbol);
9738 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
9739 "Location list end address (%s)",
9740 list_head->ll_symbol);
9741 }
9742 else if (HAVE_AS_LEB128)
9743 {
9744 /* Otherwise, find out how many consecutive entries could share
9745 the same base entry. If just one, emit DW_LLE_start_length,
9746 otherwise emit DW_LLE_base_address for the base address
9747 followed by a series of DW_LLE_offset_pair. */
9748 if (last_section == NULL || curr->section != last_section)
9749 {
9750 dw_loc_list_ref curr2;
9751 for (curr2 = curr->dw_loc_next; curr2 != NULL;
9752 curr2 = curr2->dw_loc_next)
9753 {
9754 if (strcmp (curr2->begin, curr2->end) == 0
9755 && !curr2->force)
9756 continue;
9757 break;
9758 }
9759 if (curr2 == NULL || curr->section != curr2->section)
9760 last_section = NULL;
9761 else
9762 {
9763 last_section = curr->section;
9764 base_label = curr->begin;
9765 dw2_asm_output_data (1, DW_LLE_base_address,
9766 "DW_LLE_base_address (%s)",
9767 list_head->ll_symbol);
9768 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
9769 "Base address (%s)",
9770 list_head->ll_symbol);
9771 }
9772 }
9773 /* Only one entry with the same base address. Use
9774 DW_LLE_start_length with absolute address and uleb128
9775 length. */
9776 if (last_section == NULL)
9777 {
9778 dw2_asm_output_data (1, DW_LLE_start_length,
9779 "DW_LLE_start_length (%s)",
9780 list_head->ll_symbol);
9781 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9782 "Location list begin address (%s)",
9783 list_head->ll_symbol);
9784 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
9785 "Location list length "
9786 "(%s)", list_head->ll_symbol);
9787 }
9788 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
9789 DW_LLE_base_address. */
9790 else
9791 {
9792 dw2_asm_output_data (1, DW_LLE_offset_pair,
9793 "DW_LLE_offset_pair (%s)",
9794 list_head->ll_symbol);
9795 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
9796 "Location list begin address "
9797 "(%s)", list_head->ll_symbol);
9798 dw2_asm_output_delta_uleb128 (curr->end, base_label,
9799 "Location list end address "
9800 "(%s)", list_head->ll_symbol);
9801 }
9802 }
9803 /* The assembler does not support .uleb128 directive. Emit
9804 DW_LLE_start_end with a pair of absolute addresses. */
9805 else
9806 {
9807 dw2_asm_output_data (1, DW_LLE_start_end,
9808 "DW_LLE_start_end (%s)",
9809 list_head->ll_symbol);
9810 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9811 "Location list begin address (%s)",
9812 list_head->ll_symbol);
9813 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
9814 "Location list end address (%s)",
9815 list_head->ll_symbol);
9816 }
9817 }
9818 else if (dwarf_split_debug_info)
9819 {
9820 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
9821 and 4 byte length. */
9822 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
9823 "Location list start/length entry (%s)",
9824 list_head->ll_symbol);
9825 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
9826 "Location list range start index (%s)",
9827 curr->begin);
9828 /* The length field is 4 bytes. If we ever need to support
9829 an 8-byte length, we can add a new DW_LLE code or fall back
9830 to DW_LLE_GNU_start_end_entry. */
9831 dw2_asm_output_delta (4, curr->end, curr->begin,
9832 "Location list range length (%s)",
9833 list_head->ll_symbol);
9834 }
9835 else if (!have_multiple_function_sections)
9836 {
9837 /* Pair of relative addresses against start of text section. */
9838 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
9839 "Location list begin address (%s)",
9840 list_head->ll_symbol);
9841 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
9842 "Location list end address (%s)",
9843 list_head->ll_symbol);
9844 }
9845 else
9846 {
9847 /* Pair of absolute addresses. */
9848 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
9849 "Location list begin address (%s)",
9850 list_head->ll_symbol);
9851 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
9852 "Location list end address (%s)",
9853 list_head->ll_symbol);
9854 }
9855
9856 /* Output the block length for this list of location operations. */
9857 if (dwarf_version >= 5)
9858 dw2_asm_output_data_uleb128 (size, "Location expression size");
9859 else
9860 {
9861 gcc_assert (size <= 0xffff);
9862 dw2_asm_output_data (2, size, "Location expression size");
9863 }
9864
9865 output_loc_sequence (curr->expr, -1);
9866 }
9867
9868 /* And finally list termination. */
9869 if (dwarf_version >= 5)
9870 dw2_asm_output_data (1, DW_LLE_end_of_list,
9871 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
9872 else if (dwarf_split_debug_info)
9873 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
9874 "Location list terminator (%s)",
9875 list_head->ll_symbol);
9876 else
9877 {
9878 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
9879 "Location list terminator begin (%s)",
9880 list_head->ll_symbol);
9881 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
9882 "Location list terminator end (%s)",
9883 list_head->ll_symbol);
9884 }
9885 }
9886
9887 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
9888 section. Emit a relocated reference if val_entry is NULL, otherwise,
9889 emit an indirect reference. */
9890
9891 static void
9892 output_range_list_offset (dw_attr_node *a)
9893 {
9894 const char *name = dwarf_attr_name (a->dw_attr);
9895
9896 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
9897 {
9898 if (dwarf_version >= 5)
9899 {
9900 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9901 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
9902 debug_ranges_section, "%s", name);
9903 }
9904 else
9905 {
9906 char *p = strchr (ranges_section_label, '\0');
9907 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
9908 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
9909 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
9910 debug_ranges_section, "%s", name);
9911 *p = '\0';
9912 }
9913 }
9914 else if (dwarf_version >= 5)
9915 {
9916 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9917 gcc_assert (rnglist_idx);
9918 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
9919 }
9920 else
9921 dw2_asm_output_data (DWARF_OFFSET_SIZE,
9922 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
9923 "%s (offset from %s)", name, ranges_section_label);
9924 }
9925
9926 /* Output the offset into the debug_loc section. */
9927
9928 static void
9929 output_loc_list_offset (dw_attr_node *a)
9930 {
9931 char *sym = AT_loc_list (a)->ll_symbol;
9932
9933 gcc_assert (sym);
9934 if (!dwarf_split_debug_info)
9935 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
9936 "%s", dwarf_attr_name (a->dw_attr));
9937 else if (dwarf_version >= 5)
9938 {
9939 gcc_assert (AT_loc_list (a)->num_assigned);
9940 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
9941 dwarf_attr_name (a->dw_attr),
9942 sym);
9943 }
9944 else
9945 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
9946 "%s", dwarf_attr_name (a->dw_attr));
9947 }
9948
9949 /* Output an attribute's index or value appropriately. */
9950
9951 static void
9952 output_attr_index_or_value (dw_attr_node *a)
9953 {
9954 const char *name = dwarf_attr_name (a->dw_attr);
9955
9956 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9957 {
9958 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
9959 return;
9960 }
9961 switch (AT_class (a))
9962 {
9963 case dw_val_class_addr:
9964 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
9965 break;
9966 case dw_val_class_high_pc:
9967 case dw_val_class_lbl_id:
9968 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
9969 break;
9970 default:
9971 gcc_unreachable ();
9972 }
9973 }
9974
9975 /* Output a type signature. */
9976
9977 static inline void
9978 output_signature (const char *sig, const char *name)
9979 {
9980 int i;
9981
9982 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
9983 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
9984 }
9985
9986 /* Output a discriminant value. */
9987
9988 static inline void
9989 output_discr_value (dw_discr_value *discr_value, const char *name)
9990 {
9991 if (discr_value->pos)
9992 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
9993 else
9994 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
9995 }
9996
9997 /* Output the DIE and its attributes. Called recursively to generate
9998 the definitions of each child DIE. */
9999
10000 static void
10001 output_die (dw_die_ref die)
10002 {
10003 dw_attr_node *a;
10004 dw_die_ref c;
10005 unsigned long size;
10006 unsigned ix;
10007
10008 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10009 (unsigned long)die->die_offset,
10010 dwarf_tag_name (die->die_tag));
10011
10012 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10013 {
10014 const char *name = dwarf_attr_name (a->dw_attr);
10015
10016 switch (AT_class (a))
10017 {
10018 case dw_val_class_addr:
10019 output_attr_index_or_value (a);
10020 break;
10021
10022 case dw_val_class_offset:
10023 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10024 "%s", name);
10025 break;
10026
10027 case dw_val_class_range_list:
10028 output_range_list_offset (a);
10029 break;
10030
10031 case dw_val_class_loc:
10032 size = size_of_locs (AT_loc (a));
10033
10034 /* Output the block length for this list of location operations. */
10035 if (dwarf_version >= 4)
10036 dw2_asm_output_data_uleb128 (size, "%s", name);
10037 else
10038 dw2_asm_output_data (constant_size (size), size, "%s", name);
10039
10040 output_loc_sequence (AT_loc (a), -1);
10041 break;
10042
10043 case dw_val_class_const:
10044 /* ??? It would be slightly more efficient to use a scheme like is
10045 used for unsigned constants below, but gdb 4.x does not sign
10046 extend. Gdb 5.x does sign extend. */
10047 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10048 break;
10049
10050 case dw_val_class_unsigned_const:
10051 {
10052 int csize = constant_size (AT_unsigned (a));
10053 if (dwarf_version == 3
10054 && a->dw_attr == DW_AT_data_member_location
10055 && csize >= 4)
10056 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10057 else
10058 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10059 }
10060 break;
10061
10062 case dw_val_class_const_implicit:
10063 if (flag_debug_asm)
10064 fprintf (asm_out_file, "\t\t\t%s %s ("
10065 HOST_WIDE_INT_PRINT_DEC ")\n",
10066 ASM_COMMENT_START, name, AT_int (a));
10067 break;
10068
10069 case dw_val_class_unsigned_const_implicit:
10070 if (flag_debug_asm)
10071 fprintf (asm_out_file, "\t\t\t%s %s ("
10072 HOST_WIDE_INT_PRINT_HEX ")\n",
10073 ASM_COMMENT_START, name, AT_unsigned (a));
10074 break;
10075
10076 case dw_val_class_const_double:
10077 {
10078 unsigned HOST_WIDE_INT first, second;
10079
10080 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10081 dw2_asm_output_data (1,
10082 HOST_BITS_PER_DOUBLE_INT
10083 / HOST_BITS_PER_CHAR,
10084 NULL);
10085
10086 if (WORDS_BIG_ENDIAN)
10087 {
10088 first = a->dw_attr_val.v.val_double.high;
10089 second = a->dw_attr_val.v.val_double.low;
10090 }
10091 else
10092 {
10093 first = a->dw_attr_val.v.val_double.low;
10094 second = a->dw_attr_val.v.val_double.high;
10095 }
10096
10097 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10098 first, "%s", name);
10099 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10100 second, NULL);
10101 }
10102 break;
10103
10104 case dw_val_class_wide_int:
10105 {
10106 int i;
10107 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10108 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10109 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10110 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10111 * l, NULL);
10112
10113 if (WORDS_BIG_ENDIAN)
10114 for (i = len - 1; i >= 0; --i)
10115 {
10116 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10117 "%s", name);
10118 name = "";
10119 }
10120 else
10121 for (i = 0; i < len; ++i)
10122 {
10123 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10124 "%s", name);
10125 name = "";
10126 }
10127 }
10128 break;
10129
10130 case dw_val_class_vec:
10131 {
10132 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10133 unsigned int len = a->dw_attr_val.v.val_vec.length;
10134 unsigned int i;
10135 unsigned char *p;
10136
10137 dw2_asm_output_data (constant_size (len * elt_size),
10138 len * elt_size, "%s", name);
10139 if (elt_size > sizeof (HOST_WIDE_INT))
10140 {
10141 elt_size /= 2;
10142 len *= 2;
10143 }
10144 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10145 i < len;
10146 i++, p += elt_size)
10147 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10148 "fp or vector constant word %u", i);
10149 break;
10150 }
10151
10152 case dw_val_class_flag:
10153 if (dwarf_version >= 4)
10154 {
10155 /* Currently all add_AT_flag calls pass in 1 as last argument,
10156 so DW_FORM_flag_present can be used. If that ever changes,
10157 we'll need to use DW_FORM_flag and have some optimization
10158 in build_abbrev_table that will change those to
10159 DW_FORM_flag_present if it is set to 1 in all DIEs using
10160 the same abbrev entry. */
10161 gcc_assert (AT_flag (a) == 1);
10162 if (flag_debug_asm)
10163 fprintf (asm_out_file, "\t\t\t%s %s\n",
10164 ASM_COMMENT_START, name);
10165 break;
10166 }
10167 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10168 break;
10169
10170 case dw_val_class_loc_list:
10171 output_loc_list_offset (a);
10172 break;
10173
10174 case dw_val_class_die_ref:
10175 if (AT_ref_external (a))
10176 {
10177 if (AT_ref (a)->comdat_type_p)
10178 {
10179 comdat_type_node *type_node
10180 = AT_ref (a)->die_id.die_type_node;
10181
10182 gcc_assert (type_node);
10183 output_signature (type_node->signature, name);
10184 }
10185 else
10186 {
10187 const char *sym = AT_ref (a)->die_id.die_symbol;
10188 int size;
10189
10190 gcc_assert (sym);
10191 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10192 length, whereas in DWARF3 it's always sized as an
10193 offset. */
10194 if (dwarf_version == 2)
10195 size = DWARF2_ADDR_SIZE;
10196 else
10197 size = DWARF_OFFSET_SIZE;
10198 /* ??? We cannot unconditionally output die_offset if
10199 non-zero - others might create references to those
10200 DIEs via symbols.
10201 And we do not clear its DIE offset after outputting it
10202 (and the label refers to the actual DIEs, not the
10203 DWARF CU unit header which is when using label + offset
10204 would be the correct thing to do).
10205 ??? This is the reason for the with_offset flag. */
10206 if (AT_ref (a)->with_offset)
10207 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10208 debug_info_section, "%s", name);
10209 else
10210 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10211 name);
10212 }
10213 }
10214 else
10215 {
10216 gcc_assert (AT_ref (a)->die_offset);
10217 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10218 "%s", name);
10219 }
10220 break;
10221
10222 case dw_val_class_fde_ref:
10223 {
10224 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10225
10226 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10227 a->dw_attr_val.v.val_fde_index * 2);
10228 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10229 "%s", name);
10230 }
10231 break;
10232
10233 case dw_val_class_vms_delta:
10234 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10235 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10236 AT_vms_delta2 (a), AT_vms_delta1 (a),
10237 "%s", name);
10238 #else
10239 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10240 AT_vms_delta2 (a), AT_vms_delta1 (a),
10241 "%s", name);
10242 #endif
10243 break;
10244
10245 case dw_val_class_lbl_id:
10246 output_attr_index_or_value (a);
10247 break;
10248
10249 case dw_val_class_lineptr:
10250 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10251 debug_line_section, "%s", name);
10252 break;
10253
10254 case dw_val_class_macptr:
10255 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10256 debug_macinfo_section, "%s", name);
10257 break;
10258
10259 case dw_val_class_loclistsptr:
10260 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10261 debug_loc_section, "%s", name);
10262 break;
10263
10264 case dw_val_class_str:
10265 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10266 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10267 a->dw_attr_val.v.val_str->label,
10268 debug_str_section,
10269 "%s: \"%s\"", name, AT_string (a));
10270 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10271 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10272 a->dw_attr_val.v.val_str->label,
10273 debug_line_str_section,
10274 "%s: \"%s\"", name, AT_string (a));
10275 else if (a->dw_attr_val.v.val_str->form == DW_FORM_GNU_str_index)
10276 dw2_asm_output_data_uleb128 (AT_index (a),
10277 "%s: \"%s\"", name, AT_string (a));
10278 else
10279 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10280 break;
10281
10282 case dw_val_class_file:
10283 {
10284 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10285
10286 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10287 a->dw_attr_val.v.val_file->filename);
10288 break;
10289 }
10290
10291 case dw_val_class_file_implicit:
10292 if (flag_debug_asm)
10293 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10294 ASM_COMMENT_START, name,
10295 maybe_emit_file (a->dw_attr_val.v.val_file),
10296 a->dw_attr_val.v.val_file->filename);
10297 break;
10298
10299 case dw_val_class_data8:
10300 {
10301 int i;
10302
10303 for (i = 0; i < 8; i++)
10304 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10305 i == 0 ? "%s" : NULL, name);
10306 break;
10307 }
10308
10309 case dw_val_class_high_pc:
10310 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10311 get_AT_low_pc (die), "DW_AT_high_pc");
10312 break;
10313
10314 case dw_val_class_discr_value:
10315 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10316 break;
10317
10318 case dw_val_class_discr_list:
10319 {
10320 dw_discr_list_ref list = AT_discr_list (a);
10321 const int size = size_of_discr_list (list);
10322
10323 /* This is a block, so output its length first. */
10324 dw2_asm_output_data (constant_size (size), size,
10325 "%s: block size", name);
10326
10327 for (; list != NULL; list = list->dw_discr_next)
10328 {
10329 /* One byte for the discriminant value descriptor, and then as
10330 many LEB128 numbers as required. */
10331 if (list->dw_discr_range)
10332 dw2_asm_output_data (1, DW_DSC_range,
10333 "%s: DW_DSC_range", name);
10334 else
10335 dw2_asm_output_data (1, DW_DSC_label,
10336 "%s: DW_DSC_label", name);
10337
10338 output_discr_value (&list->dw_discr_lower_bound, name);
10339 if (list->dw_discr_range)
10340 output_discr_value (&list->dw_discr_upper_bound, name);
10341 }
10342 break;
10343 }
10344
10345 default:
10346 gcc_unreachable ();
10347 }
10348 }
10349
10350 FOR_EACH_CHILD (die, c, output_die (c));
10351
10352 /* Add null byte to terminate sibling list. */
10353 if (die->die_child != NULL)
10354 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10355 (unsigned long) die->die_offset);
10356 }
10357
10358 /* Output the compilation unit that appears at the beginning of the
10359 .debug_info section, and precedes the DIE descriptions. */
10360
10361 static void
10362 output_compilation_unit_header (enum dwarf_unit_type ut)
10363 {
10364 if (!XCOFF_DEBUGGING_INFO)
10365 {
10366 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10367 dw2_asm_output_data (4, 0xffffffff,
10368 "Initial length escape value indicating 64-bit DWARF extension");
10369 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10370 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10371 "Length of Compilation Unit Info");
10372 }
10373
10374 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10375 if (dwarf_version >= 5)
10376 {
10377 const char *name;
10378 switch (ut)
10379 {
10380 case DW_UT_compile: name = "DW_UT_compile"; break;
10381 case DW_UT_type: name = "DW_UT_type"; break;
10382 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10383 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10384 default: gcc_unreachable ();
10385 }
10386 dw2_asm_output_data (1, ut, "%s", name);
10387 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10388 }
10389 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10390 debug_abbrev_section,
10391 "Offset Into Abbrev. Section");
10392 if (dwarf_version < 5)
10393 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10394 }
10395
10396 /* Output the compilation unit DIE and its children. */
10397
10398 static void
10399 output_comp_unit (dw_die_ref die, int output_if_empty,
10400 const unsigned char *dwo_id)
10401 {
10402 const char *secname, *oldsym;
10403 char *tmp;
10404
10405 /* Unless we are outputting main CU, we may throw away empty ones. */
10406 if (!output_if_empty && die->die_child == NULL)
10407 return;
10408
10409 /* Even if there are no children of this DIE, we must output the information
10410 about the compilation unit. Otherwise, on an empty translation unit, we
10411 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
10412 will then complain when examining the file. First mark all the DIEs in
10413 this CU so we know which get local refs. */
10414 mark_dies (die);
10415
10416 external_ref_hash_type *extern_map = optimize_external_refs (die);
10417
10418 /* For now, optimize only the main CU, in order to optimize the rest
10419 we'd need to see all of them earlier. Leave the rest for post-linking
10420 tools like DWZ. */
10421 if (die == comp_unit_die ())
10422 abbrev_opt_start = vec_safe_length (abbrev_die_table);
10423
10424 build_abbrev_table (die, extern_map);
10425
10426 optimize_abbrev_table ();
10427
10428 delete extern_map;
10429
10430 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
10431 next_die_offset = (dwo_id
10432 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
10433 : DWARF_COMPILE_UNIT_HEADER_SIZE);
10434 calc_die_sizes (die);
10435
10436 oldsym = die->die_id.die_symbol;
10437 if (oldsym && die->comdat_type_p)
10438 {
10439 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
10440
10441 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
10442 secname = tmp;
10443 die->die_id.die_symbol = NULL;
10444 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
10445 }
10446 else
10447 {
10448 switch_to_section (debug_info_section);
10449 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
10450 info_section_emitted = true;
10451 }
10452
10453 /* For LTO cross unit DIE refs we want a symbol on the start of the
10454 debuginfo section, not on the CU DIE. */
10455 if ((flag_generate_lto || flag_generate_offload) && oldsym)
10456 {
10457 /* ??? No way to get visibility assembled without a decl. */
10458 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
10459 get_identifier (oldsym), char_type_node);
10460 TREE_PUBLIC (decl) = true;
10461 TREE_STATIC (decl) = true;
10462 DECL_ARTIFICIAL (decl) = true;
10463 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
10464 DECL_VISIBILITY_SPECIFIED (decl) = true;
10465 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
10466 #ifdef ASM_WEAKEN_LABEL
10467 /* We prefer a .weak because that handles duplicates from duplicate
10468 archive members in a graceful way. */
10469 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
10470 #else
10471 targetm.asm_out.globalize_label (asm_out_file, oldsym);
10472 #endif
10473 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
10474 }
10475
10476 /* Output debugging information. */
10477 output_compilation_unit_header (dwo_id
10478 ? DW_UT_split_compile : DW_UT_compile);
10479 if (dwarf_version >= 5)
10480 {
10481 if (dwo_id != NULL)
10482 for (int i = 0; i < 8; i++)
10483 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
10484 }
10485 output_die (die);
10486
10487 /* Leave the marks on the main CU, so we can check them in
10488 output_pubnames. */
10489 if (oldsym)
10490 {
10491 unmark_dies (die);
10492 die->die_id.die_symbol = oldsym;
10493 }
10494 }
10495
10496 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
10497 and .debug_pubtypes. This is configured per-target, but can be
10498 overridden by the -gpubnames or -gno-pubnames options. */
10499
10500 static inline bool
10501 want_pubnames (void)
10502 {
10503 if (debug_info_level <= DINFO_LEVEL_TERSE)
10504 return false;
10505 if (debug_generate_pub_sections != -1)
10506 return debug_generate_pub_sections;
10507 return targetm.want_debug_pub_sections;
10508 }
10509
10510 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
10511
10512 static void
10513 add_AT_pubnames (dw_die_ref die)
10514 {
10515 if (want_pubnames ())
10516 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
10517 }
10518
10519 /* Add a string attribute value to a skeleton DIE. */
10520
10521 static inline void
10522 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
10523 const char *str)
10524 {
10525 dw_attr_node attr;
10526 struct indirect_string_node *node;
10527
10528 if (! skeleton_debug_str_hash)
10529 skeleton_debug_str_hash
10530 = hash_table<indirect_string_hasher>::create_ggc (10);
10531
10532 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
10533 find_string_form (node);
10534 if (node->form == DW_FORM_GNU_str_index)
10535 node->form = DW_FORM_strp;
10536
10537 attr.dw_attr = attr_kind;
10538 attr.dw_attr_val.val_class = dw_val_class_str;
10539 attr.dw_attr_val.val_entry = NULL;
10540 attr.dw_attr_val.v.val_str = node;
10541 add_dwarf_attr (die, &attr);
10542 }
10543
10544 /* Helper function to generate top-level dies for skeleton debug_info and
10545 debug_types. */
10546
10547 static void
10548 add_top_level_skeleton_die_attrs (dw_die_ref die)
10549 {
10550 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
10551 const char *comp_dir = comp_dir_string ();
10552
10553 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
10554 if (comp_dir != NULL)
10555 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
10556 add_AT_pubnames (die);
10557 add_AT_lineptr (die, DW_AT_GNU_addr_base, debug_addr_section_label);
10558 }
10559
10560 /* Output skeleton debug sections that point to the dwo file. */
10561
10562 static void
10563 output_skeleton_debug_sections (dw_die_ref comp_unit,
10564 const unsigned char *dwo_id)
10565 {
10566 /* These attributes will be found in the full debug_info section. */
10567 remove_AT (comp_unit, DW_AT_producer);
10568 remove_AT (comp_unit, DW_AT_language);
10569
10570 switch_to_section (debug_skeleton_info_section);
10571 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
10572
10573 /* Produce the skeleton compilation-unit header. This one differs enough from
10574 a normal CU header that it's better not to call output_compilation_unit
10575 header. */
10576 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10577 dw2_asm_output_data (4, 0xffffffff,
10578 "Initial length escape value indicating 64-bit "
10579 "DWARF extension");
10580
10581 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10582 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
10583 - DWARF_INITIAL_LENGTH_SIZE
10584 + size_of_die (comp_unit),
10585 "Length of Compilation Unit Info");
10586 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10587 if (dwarf_version >= 5)
10588 {
10589 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
10590 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10591 }
10592 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
10593 debug_skeleton_abbrev_section,
10594 "Offset Into Abbrev. Section");
10595 if (dwarf_version < 5)
10596 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10597 else
10598 for (int i = 0; i < 8; i++)
10599 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
10600
10601 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
10602 output_die (comp_unit);
10603
10604 /* Build the skeleton debug_abbrev section. */
10605 switch_to_section (debug_skeleton_abbrev_section);
10606 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
10607
10608 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
10609
10610 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
10611 }
10612
10613 /* Output a comdat type unit DIE and its children. */
10614
10615 static void
10616 output_comdat_type_unit (comdat_type_node *node)
10617 {
10618 const char *secname;
10619 char *tmp;
10620 int i;
10621 #if defined (OBJECT_FORMAT_ELF)
10622 tree comdat_key;
10623 #endif
10624
10625 /* First mark all the DIEs in this CU so we know which get local refs. */
10626 mark_dies (node->root_die);
10627
10628 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
10629
10630 build_abbrev_table (node->root_die, extern_map);
10631
10632 delete extern_map;
10633 extern_map = NULL;
10634
10635 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
10636 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
10637 calc_die_sizes (node->root_die);
10638
10639 #if defined (OBJECT_FORMAT_ELF)
10640 if (dwarf_version >= 5)
10641 {
10642 if (!dwarf_split_debug_info)
10643 secname = ".debug_info";
10644 else
10645 secname = ".debug_info.dwo";
10646 }
10647 else if (!dwarf_split_debug_info)
10648 secname = ".debug_types";
10649 else
10650 secname = ".debug_types.dwo";
10651
10652 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
10653 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
10654 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10655 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
10656 comdat_key = get_identifier (tmp);
10657 targetm.asm_out.named_section (secname,
10658 SECTION_DEBUG | SECTION_LINKONCE,
10659 comdat_key);
10660 #else
10661 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
10662 sprintf (tmp, (dwarf_version >= 5
10663 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
10664 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10665 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
10666 secname = tmp;
10667 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
10668 #endif
10669
10670 /* Output debugging information. */
10671 output_compilation_unit_header (dwarf_split_debug_info
10672 ? DW_UT_split_type : DW_UT_type);
10673 output_signature (node->signature, "Type Signature");
10674 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
10675 "Offset to Type DIE");
10676 output_die (node->root_die);
10677
10678 unmark_dies (node->root_die);
10679 }
10680
10681 /* Return the DWARF2/3 pubname associated with a decl. */
10682
10683 static const char *
10684 dwarf2_name (tree decl, int scope)
10685 {
10686 if (DECL_NAMELESS (decl))
10687 return NULL;
10688 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
10689 }
10690
10691 /* Add a new entry to .debug_pubnames if appropriate. */
10692
10693 static void
10694 add_pubname_string (const char *str, dw_die_ref die)
10695 {
10696 pubname_entry e;
10697
10698 e.die = die;
10699 e.name = xstrdup (str);
10700 vec_safe_push (pubname_table, e);
10701 }
10702
10703 static void
10704 add_pubname (tree decl, dw_die_ref die)
10705 {
10706 if (!want_pubnames ())
10707 return;
10708
10709 /* Don't add items to the table when we expect that the consumer will have
10710 just read the enclosing die. For example, if the consumer is looking at a
10711 class_member, it will either be inside the class already, or will have just
10712 looked up the class to find the member. Either way, searching the class is
10713 faster than searching the index. */
10714 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
10715 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
10716 {
10717 const char *name = dwarf2_name (decl, 1);
10718
10719 if (name)
10720 add_pubname_string (name, die);
10721 }
10722 }
10723
10724 /* Add an enumerator to the pubnames section. */
10725
10726 static void
10727 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
10728 {
10729 pubname_entry e;
10730
10731 gcc_assert (scope_name);
10732 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
10733 e.die = die;
10734 vec_safe_push (pubname_table, e);
10735 }
10736
10737 /* Add a new entry to .debug_pubtypes if appropriate. */
10738
10739 static void
10740 add_pubtype (tree decl, dw_die_ref die)
10741 {
10742 pubname_entry e;
10743
10744 if (!want_pubnames ())
10745 return;
10746
10747 if ((TREE_PUBLIC (decl)
10748 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
10749 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
10750 {
10751 tree scope = NULL;
10752 const char *scope_name = "";
10753 const char *sep = is_cxx () ? "::" : ".";
10754 const char *name;
10755
10756 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
10757 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
10758 {
10759 scope_name = lang_hooks.dwarf_name (scope, 1);
10760 if (scope_name != NULL && scope_name[0] != '\0')
10761 scope_name = concat (scope_name, sep, NULL);
10762 else
10763 scope_name = "";
10764 }
10765
10766 if (TYPE_P (decl))
10767 name = type_tag (decl);
10768 else
10769 name = lang_hooks.dwarf_name (decl, 1);
10770
10771 /* If we don't have a name for the type, there's no point in adding
10772 it to the table. */
10773 if (name != NULL && name[0] != '\0')
10774 {
10775 e.die = die;
10776 e.name = concat (scope_name, name, NULL);
10777 vec_safe_push (pubtype_table, e);
10778 }
10779
10780 /* Although it might be more consistent to add the pubinfo for the
10781 enumerators as their dies are created, they should only be added if the
10782 enum type meets the criteria above. So rather than re-check the parent
10783 enum type whenever an enumerator die is created, just output them all
10784 here. This isn't protected by the name conditional because anonymous
10785 enums don't have names. */
10786 if (die->die_tag == DW_TAG_enumeration_type)
10787 {
10788 dw_die_ref c;
10789
10790 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
10791 }
10792 }
10793 }
10794
10795 /* Output a single entry in the pubnames table. */
10796
10797 static void
10798 output_pubname (dw_offset die_offset, pubname_entry *entry)
10799 {
10800 dw_die_ref die = entry->die;
10801 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
10802
10803 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
10804
10805 if (debug_generate_pub_sections == 2)
10806 {
10807 /* This logic follows gdb's method for determining the value of the flag
10808 byte. */
10809 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
10810 switch (die->die_tag)
10811 {
10812 case DW_TAG_typedef:
10813 case DW_TAG_base_type:
10814 case DW_TAG_subrange_type:
10815 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
10816 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
10817 break;
10818 case DW_TAG_enumerator:
10819 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10820 GDB_INDEX_SYMBOL_KIND_VARIABLE);
10821 if (!is_cxx ())
10822 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
10823 break;
10824 case DW_TAG_subprogram:
10825 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10826 GDB_INDEX_SYMBOL_KIND_FUNCTION);
10827 if (!is_ada ())
10828 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
10829 break;
10830 case DW_TAG_constant:
10831 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10832 GDB_INDEX_SYMBOL_KIND_VARIABLE);
10833 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
10834 break;
10835 case DW_TAG_variable:
10836 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
10837 GDB_INDEX_SYMBOL_KIND_VARIABLE);
10838 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
10839 break;
10840 case DW_TAG_namespace:
10841 case DW_TAG_imported_declaration:
10842 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
10843 break;
10844 case DW_TAG_class_type:
10845 case DW_TAG_interface_type:
10846 case DW_TAG_structure_type:
10847 case DW_TAG_union_type:
10848 case DW_TAG_enumeration_type:
10849 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
10850 if (!is_cxx ())
10851 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
10852 break;
10853 default:
10854 /* An unusual tag. Leave the flag-byte empty. */
10855 break;
10856 }
10857 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
10858 "GDB-index flags");
10859 }
10860
10861 dw2_asm_output_nstring (entry->name, -1, "external name");
10862 }
10863
10864
10865 /* Output the public names table used to speed up access to externally
10866 visible names; or the public types table used to find type definitions. */
10867
10868 static void
10869 output_pubnames (vec<pubname_entry, va_gc> *names)
10870 {
10871 unsigned i;
10872 unsigned long pubnames_length = size_of_pubnames (names);
10873 pubname_entry *pub;
10874
10875 if (!XCOFF_DEBUGGING_INFO)
10876 {
10877 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10878 dw2_asm_output_data (4, 0xffffffff,
10879 "Initial length escape value indicating 64-bit DWARF extension");
10880 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
10881 "Pub Info Length");
10882 }
10883
10884 /* Version number for pubnames/pubtypes is independent of dwarf version. */
10885 dw2_asm_output_data (2, 2, "DWARF Version");
10886
10887 if (dwarf_split_debug_info)
10888 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
10889 debug_skeleton_info_section,
10890 "Offset of Compilation Unit Info");
10891 else
10892 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
10893 debug_info_section,
10894 "Offset of Compilation Unit Info");
10895 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
10896 "Compilation Unit Length");
10897
10898 FOR_EACH_VEC_ELT (*names, i, pub)
10899 {
10900 if (include_pubname_in_output (names, pub))
10901 {
10902 dw_offset die_offset = pub->die->die_offset;
10903
10904 /* We shouldn't see pubnames for DIEs outside of the main CU. */
10905 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
10906 gcc_assert (pub->die->die_mark);
10907
10908 /* If we're putting types in their own .debug_types sections,
10909 the .debug_pubtypes table will still point to the compile
10910 unit (not the type unit), so we want to use the offset of
10911 the skeleton DIE (if there is one). */
10912 if (pub->die->comdat_type_p && names == pubtype_table)
10913 {
10914 comdat_type_node *type_node = pub->die->die_id.die_type_node;
10915
10916 if (type_node != NULL)
10917 die_offset = (type_node->skeleton_die != NULL
10918 ? type_node->skeleton_die->die_offset
10919 : comp_unit_die ()->die_offset);
10920 }
10921
10922 output_pubname (die_offset, pub);
10923 }
10924 }
10925
10926 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
10927 }
10928
10929 /* Output public names and types tables if necessary. */
10930
10931 static void
10932 output_pubtables (void)
10933 {
10934 if (!want_pubnames () || !info_section_emitted)
10935 return;
10936
10937 switch_to_section (debug_pubnames_section);
10938 output_pubnames (pubname_table);
10939 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
10940 It shouldn't hurt to emit it always, since pure DWARF2 consumers
10941 simply won't look for the section. */
10942 switch_to_section (debug_pubtypes_section);
10943 output_pubnames (pubtype_table);
10944 }
10945
10946
10947 /* Output the information that goes into the .debug_aranges table.
10948 Namely, define the beginning and ending address range of the
10949 text section generated for this compilation unit. */
10950
10951 static void
10952 output_aranges (void)
10953 {
10954 unsigned i;
10955 unsigned long aranges_length = size_of_aranges ();
10956
10957 if (!XCOFF_DEBUGGING_INFO)
10958 {
10959 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10960 dw2_asm_output_data (4, 0xffffffff,
10961 "Initial length escape value indicating 64-bit DWARF extension");
10962 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
10963 "Length of Address Ranges Info");
10964 }
10965
10966 /* Version number for aranges is still 2, even up to DWARF5. */
10967 dw2_asm_output_data (2, 2, "DWARF Version");
10968 if (dwarf_split_debug_info)
10969 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
10970 debug_skeleton_info_section,
10971 "Offset of Compilation Unit Info");
10972 else
10973 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
10974 debug_info_section,
10975 "Offset of Compilation Unit Info");
10976 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
10977 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
10978
10979 /* We need to align to twice the pointer size here. */
10980 if (DWARF_ARANGES_PAD_SIZE)
10981 {
10982 /* Pad using a 2 byte words so that padding is correct for any
10983 pointer size. */
10984 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
10985 2 * DWARF2_ADDR_SIZE);
10986 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
10987 dw2_asm_output_data (2, 0, NULL);
10988 }
10989
10990 /* It is necessary not to output these entries if the sections were
10991 not used; if the sections were not used, the length will be 0 and
10992 the address may end up as 0 if the section is discarded by ld
10993 --gc-sections, leaving an invalid (0, 0) entry that can be
10994 confused with the terminator. */
10995 if (text_section_used)
10996 {
10997 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
10998 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
10999 text_section_label, "Length");
11000 }
11001 if (cold_text_section_used)
11002 {
11003 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11004 "Address");
11005 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11006 cold_text_section_label, "Length");
11007 }
11008
11009 if (have_multiple_function_sections)
11010 {
11011 unsigned fde_idx;
11012 dw_fde_ref fde;
11013
11014 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11015 {
11016 if (DECL_IGNORED_P (fde->decl))
11017 continue;
11018 if (!fde->in_std_section)
11019 {
11020 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11021 "Address");
11022 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11023 fde->dw_fde_begin, "Length");
11024 }
11025 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11026 {
11027 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11028 "Address");
11029 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11030 fde->dw_fde_second_begin, "Length");
11031 }
11032 }
11033 }
11034
11035 /* Output the terminator words. */
11036 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11037 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11038 }
11039
11040 /* Add a new entry to .debug_ranges. Return its index into
11041 ranges_table vector. */
11042
11043 static unsigned int
11044 add_ranges_num (int num, bool maybe_new_sec)
11045 {
11046 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11047 vec_safe_push (ranges_table, r);
11048 return vec_safe_length (ranges_table) - 1;
11049 }
11050
11051 /* Add a new entry to .debug_ranges corresponding to a block, or a
11052 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11053 this entry might be in a different section from previous range. */
11054
11055 static unsigned int
11056 add_ranges (const_tree block, bool maybe_new_sec)
11057 {
11058 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11059 }
11060
11061 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11062 chain, or middle entry of a chain that will be directly referred to. */
11063
11064 static void
11065 note_rnglist_head (unsigned int offset)
11066 {
11067 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11068 return;
11069 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11070 }
11071
11072 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11073 When using dwarf_split_debug_info, address attributes in dies destined
11074 for the final executable should be direct references--setting the
11075 parameter force_direct ensures this behavior. */
11076
11077 static void
11078 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11079 bool *added, bool force_direct)
11080 {
11081 unsigned int in_use = vec_safe_length (ranges_by_label);
11082 unsigned int offset;
11083 dw_ranges_by_label rbl = { begin, end };
11084 vec_safe_push (ranges_by_label, rbl);
11085 offset = add_ranges_num (-(int)in_use - 1, true);
11086 if (!*added)
11087 {
11088 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11089 *added = true;
11090 note_rnglist_head (offset);
11091 }
11092 }
11093
11094 /* Emit .debug_ranges section. */
11095
11096 static void
11097 output_ranges (void)
11098 {
11099 unsigned i;
11100 static const char *const start_fmt = "Offset %#x";
11101 const char *fmt = start_fmt;
11102 dw_ranges *r;
11103
11104 switch_to_section (debug_ranges_section);
11105 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11106 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11107 {
11108 int block_num = r->num;
11109
11110 if (block_num > 0)
11111 {
11112 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11113 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11114
11115 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11116 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11117
11118 /* If all code is in the text section, then the compilation
11119 unit base address defaults to DW_AT_low_pc, which is the
11120 base of the text section. */
11121 if (!have_multiple_function_sections)
11122 {
11123 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11124 text_section_label,
11125 fmt, i * 2 * DWARF2_ADDR_SIZE);
11126 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11127 text_section_label, NULL);
11128 }
11129
11130 /* Otherwise, the compilation unit base address is zero,
11131 which allows us to use absolute addresses, and not worry
11132 about whether the target supports cross-section
11133 arithmetic. */
11134 else
11135 {
11136 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11137 fmt, i * 2 * DWARF2_ADDR_SIZE);
11138 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11139 }
11140
11141 fmt = NULL;
11142 }
11143
11144 /* Negative block_num stands for an index into ranges_by_label. */
11145 else if (block_num < 0)
11146 {
11147 int lab_idx = - block_num - 1;
11148
11149 if (!have_multiple_function_sections)
11150 {
11151 gcc_unreachable ();
11152 #if 0
11153 /* If we ever use add_ranges_by_labels () for a single
11154 function section, all we have to do is to take out
11155 the #if 0 above. */
11156 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11157 (*ranges_by_label)[lab_idx].begin,
11158 text_section_label,
11159 fmt, i * 2 * DWARF2_ADDR_SIZE);
11160 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11161 (*ranges_by_label)[lab_idx].end,
11162 text_section_label, NULL);
11163 #endif
11164 }
11165 else
11166 {
11167 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11168 (*ranges_by_label)[lab_idx].begin,
11169 fmt, i * 2 * DWARF2_ADDR_SIZE);
11170 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11171 (*ranges_by_label)[lab_idx].end,
11172 NULL);
11173 }
11174 }
11175 else
11176 {
11177 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11178 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11179 fmt = start_fmt;
11180 }
11181 }
11182 }
11183
11184 /* Non-zero if .debug_line_str should be used for .debug_line section
11185 strings or strings that are likely shareable with those. */
11186 #define DWARF5_USE_DEBUG_LINE_STR \
11187 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11188 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11189 /* FIXME: there is no .debug_line_str.dwo section, \
11190 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11191 && !dwarf_split_debug_info)
11192
11193 /* Assign .debug_rnglists indexes. */
11194
11195 static void
11196 index_rnglists (void)
11197 {
11198 unsigned i;
11199 dw_ranges *r;
11200
11201 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11202 if (r->label)
11203 r->idx = rnglist_idx++;
11204 }
11205
11206 /* Emit .debug_rnglists section. */
11207
11208 static void
11209 output_rnglists (unsigned generation)
11210 {
11211 unsigned i;
11212 dw_ranges *r;
11213 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11214 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11215 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11216
11217 switch_to_section (debug_ranges_section);
11218 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11219 /* There are up to 4 unique ranges labels per generation.
11220 See also init_sections_and_labels. */
11221 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11222 2 + generation * 4);
11223 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11224 3 + generation * 4);
11225 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11226 dw2_asm_output_data (4, 0xffffffff,
11227 "Initial length escape value indicating "
11228 "64-bit DWARF extension");
11229 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11230 "Length of Range Lists");
11231 ASM_OUTPUT_LABEL (asm_out_file, l1);
11232 dw2_asm_output_data (2, dwarf_version, "DWARF Version");
11233 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11234 dw2_asm_output_data (1, 0, "Segment Size");
11235 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11236 about relocation sizes and primarily care about the size of .debug*
11237 sections in linked shared libraries and executables, then
11238 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11239 into it are usually larger than just DW_FORM_sec_offset offsets
11240 into the .debug_rnglists section. */
11241 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11242 "Offset Entry Count");
11243 if (dwarf_split_debug_info)
11244 {
11245 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11246 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11247 if (r->label)
11248 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11249 ranges_base_label, NULL);
11250 }
11251
11252 const char *lab = "";
11253 unsigned int len = vec_safe_length (ranges_table);
11254 const char *base = NULL;
11255 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11256 {
11257 int block_num = r->num;
11258
11259 if (r->label)
11260 {
11261 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11262 lab = r->label;
11263 }
11264 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11265 base = NULL;
11266 if (block_num > 0)
11267 {
11268 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11269 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11270
11271 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11272 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11273
11274 if (HAVE_AS_LEB128)
11275 {
11276 /* If all code is in the text section, then the compilation
11277 unit base address defaults to DW_AT_low_pc, which is the
11278 base of the text section. */
11279 if (!have_multiple_function_sections)
11280 {
11281 dw2_asm_output_data (1, DW_RLE_offset_pair,
11282 "DW_RLE_offset_pair (%s)", lab);
11283 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11284 "Range begin address (%s)", lab);
11285 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11286 "Range end address (%s)", lab);
11287 continue;
11288 }
11289 if (base == NULL)
11290 {
11291 dw_ranges *r2 = NULL;
11292 if (i < len - 1)
11293 r2 = &(*ranges_table)[i + 1];
11294 if (r2
11295 && r2->num != 0
11296 && r2->label == NULL
11297 && !r2->maybe_new_sec)
11298 {
11299 dw2_asm_output_data (1, DW_RLE_base_address,
11300 "DW_RLE_base_address (%s)", lab);
11301 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11302 "Base address (%s)", lab);
11303 strcpy (basebuf, blabel);
11304 base = basebuf;
11305 }
11306 }
11307 if (base)
11308 {
11309 dw2_asm_output_data (1, DW_RLE_offset_pair,
11310 "DW_RLE_offset_pair (%s)", lab);
11311 dw2_asm_output_delta_uleb128 (blabel, base,
11312 "Range begin address (%s)", lab);
11313 dw2_asm_output_delta_uleb128 (elabel, base,
11314 "Range end address (%s)", lab);
11315 continue;
11316 }
11317 dw2_asm_output_data (1, DW_RLE_start_length,
11318 "DW_RLE_start_length (%s)", lab);
11319 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11320 "Range begin address (%s)", lab);
11321 dw2_asm_output_delta_uleb128 (elabel, blabel,
11322 "Range length (%s)", lab);
11323 }
11324 else
11325 {
11326 dw2_asm_output_data (1, DW_RLE_start_end,
11327 "DW_RLE_start_end (%s)", lab);
11328 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11329 "Range begin address (%s)", lab);
11330 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11331 "Range end address (%s)", lab);
11332 }
11333 }
11334
11335 /* Negative block_num stands for an index into ranges_by_label. */
11336 else if (block_num < 0)
11337 {
11338 int lab_idx = - block_num - 1;
11339 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11340 const char *elabel = (*ranges_by_label)[lab_idx].end;
11341
11342 if (!have_multiple_function_sections)
11343 gcc_unreachable ();
11344 if (HAVE_AS_LEB128)
11345 {
11346 dw2_asm_output_data (1, DW_RLE_start_length,
11347 "DW_RLE_start_length (%s)", lab);
11348 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11349 "Range begin address (%s)", lab);
11350 dw2_asm_output_delta_uleb128 (elabel, blabel,
11351 "Range length (%s)", lab);
11352 }
11353 else
11354 {
11355 dw2_asm_output_data (1, DW_RLE_start_end,
11356 "DW_RLE_start_end (%s)", lab);
11357 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11358 "Range begin address (%s)", lab);
11359 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11360 "Range end address (%s)", lab);
11361 }
11362 }
11363 else
11364 dw2_asm_output_data (1, DW_RLE_end_of_list,
11365 "DW_RLE_end_of_list (%s)", lab);
11366 }
11367 ASM_OUTPUT_LABEL (asm_out_file, l2);
11368 }
11369
11370 /* Data structure containing information about input files. */
11371 struct file_info
11372 {
11373 const char *path; /* Complete file name. */
11374 const char *fname; /* File name part. */
11375 int length; /* Length of entire string. */
11376 struct dwarf_file_data * file_idx; /* Index in input file table. */
11377 int dir_idx; /* Index in directory table. */
11378 };
11379
11380 /* Data structure containing information about directories with source
11381 files. */
11382 struct dir_info
11383 {
11384 const char *path; /* Path including directory name. */
11385 int length; /* Path length. */
11386 int prefix; /* Index of directory entry which is a prefix. */
11387 int count; /* Number of files in this directory. */
11388 int dir_idx; /* Index of directory used as base. */
11389 };
11390
11391 /* Callback function for file_info comparison. We sort by looking at
11392 the directories in the path. */
11393
11394 static int
11395 file_info_cmp (const void *p1, const void *p2)
11396 {
11397 const struct file_info *const s1 = (const struct file_info *) p1;
11398 const struct file_info *const s2 = (const struct file_info *) p2;
11399 const unsigned char *cp1;
11400 const unsigned char *cp2;
11401
11402 /* Take care of file names without directories. We need to make sure that
11403 we return consistent values to qsort since some will get confused if
11404 we return the same value when identical operands are passed in opposite
11405 orders. So if neither has a directory, return 0 and otherwise return
11406 1 or -1 depending on which one has the directory. */
11407 if ((s1->path == s1->fname || s2->path == s2->fname))
11408 return (s2->path == s2->fname) - (s1->path == s1->fname);
11409
11410 cp1 = (const unsigned char *) s1->path;
11411 cp2 = (const unsigned char *) s2->path;
11412
11413 while (1)
11414 {
11415 ++cp1;
11416 ++cp2;
11417 /* Reached the end of the first path? If so, handle like above. */
11418 if ((cp1 == (const unsigned char *) s1->fname)
11419 || (cp2 == (const unsigned char *) s2->fname))
11420 return ((cp2 == (const unsigned char *) s2->fname)
11421 - (cp1 == (const unsigned char *) s1->fname));
11422
11423 /* Character of current path component the same? */
11424 else if (*cp1 != *cp2)
11425 return *cp1 - *cp2;
11426 }
11427 }
11428
11429 struct file_name_acquire_data
11430 {
11431 struct file_info *files;
11432 int used_files;
11433 int max_files;
11434 };
11435
11436 /* Traversal function for the hash table. */
11437
11438 int
11439 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
11440 {
11441 struct dwarf_file_data *d = *slot;
11442 struct file_info *fi;
11443 const char *f;
11444
11445 gcc_assert (fnad->max_files >= d->emitted_number);
11446
11447 if (! d->emitted_number)
11448 return 1;
11449
11450 gcc_assert (fnad->max_files != fnad->used_files);
11451
11452 fi = fnad->files + fnad->used_files++;
11453
11454 /* Skip all leading "./". */
11455 f = d->filename;
11456 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
11457 f += 2;
11458
11459 /* Create a new array entry. */
11460 fi->path = f;
11461 fi->length = strlen (f);
11462 fi->file_idx = d;
11463
11464 /* Search for the file name part. */
11465 f = strrchr (f, DIR_SEPARATOR);
11466 #if defined (DIR_SEPARATOR_2)
11467 {
11468 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
11469
11470 if (g != NULL)
11471 {
11472 if (f == NULL || f < g)
11473 f = g;
11474 }
11475 }
11476 #endif
11477
11478 fi->fname = f == NULL ? fi->path : f + 1;
11479 return 1;
11480 }
11481
11482 /* Helper function for output_file_names. Emit a FORM encoded
11483 string STR, with assembly comment start ENTRY_KIND and
11484 index IDX */
11485
11486 static void
11487 output_line_string (enum dwarf_form form, const char *str,
11488 const char *entry_kind, unsigned int idx)
11489 {
11490 switch (form)
11491 {
11492 case DW_FORM_string:
11493 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
11494 break;
11495 case DW_FORM_line_strp:
11496 if (!debug_line_str_hash)
11497 debug_line_str_hash
11498 = hash_table<indirect_string_hasher>::create_ggc (10);
11499
11500 struct indirect_string_node *node;
11501 node = find_AT_string_in_table (str, debug_line_str_hash);
11502 set_indirect_string (node);
11503 node->form = form;
11504 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
11505 debug_line_str_section, "%s: %#x: \"%s\"",
11506 entry_kind, 0, node->str);
11507 break;
11508 default:
11509 gcc_unreachable ();
11510 }
11511 }
11512
11513 /* Output the directory table and the file name table. We try to minimize
11514 the total amount of memory needed. A heuristic is used to avoid large
11515 slowdowns with many input files. */
11516
11517 static void
11518 output_file_names (void)
11519 {
11520 struct file_name_acquire_data fnad;
11521 int numfiles;
11522 struct file_info *files;
11523 struct dir_info *dirs;
11524 int *saved;
11525 int *savehere;
11526 int *backmap;
11527 int ndirs;
11528 int idx_offset;
11529 int i;
11530
11531 if (!last_emitted_file)
11532 {
11533 if (dwarf_version >= 5)
11534 {
11535 dw2_asm_output_data (1, 0, "Directory entry format count");
11536 dw2_asm_output_data_uleb128 (0, "Directories count");
11537 dw2_asm_output_data (1, 0, "File name entry format count");
11538 dw2_asm_output_data_uleb128 (0, "File names count");
11539 }
11540 else
11541 {
11542 dw2_asm_output_data (1, 0, "End directory table");
11543 dw2_asm_output_data (1, 0, "End file name table");
11544 }
11545 return;
11546 }
11547
11548 numfiles = last_emitted_file->emitted_number;
11549
11550 /* Allocate the various arrays we need. */
11551 files = XALLOCAVEC (struct file_info, numfiles);
11552 dirs = XALLOCAVEC (struct dir_info, numfiles);
11553
11554 fnad.files = files;
11555 fnad.used_files = 0;
11556 fnad.max_files = numfiles;
11557 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
11558 gcc_assert (fnad.used_files == fnad.max_files);
11559
11560 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
11561
11562 /* Find all the different directories used. */
11563 dirs[0].path = files[0].path;
11564 dirs[0].length = files[0].fname - files[0].path;
11565 dirs[0].prefix = -1;
11566 dirs[0].count = 1;
11567 dirs[0].dir_idx = 0;
11568 files[0].dir_idx = 0;
11569 ndirs = 1;
11570
11571 for (i = 1; i < numfiles; i++)
11572 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
11573 && memcmp (dirs[ndirs - 1].path, files[i].path,
11574 dirs[ndirs - 1].length) == 0)
11575 {
11576 /* Same directory as last entry. */
11577 files[i].dir_idx = ndirs - 1;
11578 ++dirs[ndirs - 1].count;
11579 }
11580 else
11581 {
11582 int j;
11583
11584 /* This is a new directory. */
11585 dirs[ndirs].path = files[i].path;
11586 dirs[ndirs].length = files[i].fname - files[i].path;
11587 dirs[ndirs].count = 1;
11588 dirs[ndirs].dir_idx = ndirs;
11589 files[i].dir_idx = ndirs;
11590
11591 /* Search for a prefix. */
11592 dirs[ndirs].prefix = -1;
11593 for (j = 0; j < ndirs; j++)
11594 if (dirs[j].length < dirs[ndirs].length
11595 && dirs[j].length > 1
11596 && (dirs[ndirs].prefix == -1
11597 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
11598 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
11599 dirs[ndirs].prefix = j;
11600
11601 ++ndirs;
11602 }
11603
11604 /* Now to the actual work. We have to find a subset of the directories which
11605 allow expressing the file name using references to the directory table
11606 with the least amount of characters. We do not do an exhaustive search
11607 where we would have to check out every combination of every single
11608 possible prefix. Instead we use a heuristic which provides nearly optimal
11609 results in most cases and never is much off. */
11610 saved = XALLOCAVEC (int, ndirs);
11611 savehere = XALLOCAVEC (int, ndirs);
11612
11613 memset (saved, '\0', ndirs * sizeof (saved[0]));
11614 for (i = 0; i < ndirs; i++)
11615 {
11616 int j;
11617 int total;
11618
11619 /* We can always save some space for the current directory. But this
11620 does not mean it will be enough to justify adding the directory. */
11621 savehere[i] = dirs[i].length;
11622 total = (savehere[i] - saved[i]) * dirs[i].count;
11623
11624 for (j = i + 1; j < ndirs; j++)
11625 {
11626 savehere[j] = 0;
11627 if (saved[j] < dirs[i].length)
11628 {
11629 /* Determine whether the dirs[i] path is a prefix of the
11630 dirs[j] path. */
11631 int k;
11632
11633 k = dirs[j].prefix;
11634 while (k != -1 && k != (int) i)
11635 k = dirs[k].prefix;
11636
11637 if (k == (int) i)
11638 {
11639 /* Yes it is. We can possibly save some memory by
11640 writing the filenames in dirs[j] relative to
11641 dirs[i]. */
11642 savehere[j] = dirs[i].length;
11643 total += (savehere[j] - saved[j]) * dirs[j].count;
11644 }
11645 }
11646 }
11647
11648 /* Check whether we can save enough to justify adding the dirs[i]
11649 directory. */
11650 if (total > dirs[i].length + 1)
11651 {
11652 /* It's worthwhile adding. */
11653 for (j = i; j < ndirs; j++)
11654 if (savehere[j] > 0)
11655 {
11656 /* Remember how much we saved for this directory so far. */
11657 saved[j] = savehere[j];
11658
11659 /* Remember the prefix directory. */
11660 dirs[j].dir_idx = i;
11661 }
11662 }
11663 }
11664
11665 /* Emit the directory name table. */
11666 idx_offset = dirs[0].length > 0 ? 1 : 0;
11667 enum dwarf_form str_form = DW_FORM_string;
11668 enum dwarf_form idx_form = DW_FORM_udata;
11669 if (dwarf_version >= 5)
11670 {
11671 const char *comp_dir = comp_dir_string ();
11672 if (comp_dir == NULL)
11673 comp_dir = "";
11674 dw2_asm_output_data (1, 1, "Directory entry format count");
11675 if (DWARF5_USE_DEBUG_LINE_STR)
11676 str_form = DW_FORM_line_strp;
11677 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
11678 dw2_asm_output_data_uleb128 (str_form, "%s",
11679 get_DW_FORM_name (str_form));
11680 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
11681 if (str_form == DW_FORM_string)
11682 {
11683 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
11684 for (i = 1 - idx_offset; i < ndirs; i++)
11685 dw2_asm_output_nstring (dirs[i].path,
11686 dirs[i].length
11687 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
11688 "Directory Entry: %#x", i + idx_offset);
11689 }
11690 else
11691 {
11692 output_line_string (str_form, comp_dir, "Directory Entry", 0);
11693 for (i = 1 - idx_offset; i < ndirs; i++)
11694 {
11695 const char *str
11696 = ggc_alloc_string (dirs[i].path,
11697 dirs[i].length
11698 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
11699 output_line_string (str_form, str, "Directory Entry",
11700 (unsigned) i + idx_offset);
11701 }
11702 }
11703 }
11704 else
11705 {
11706 for (i = 1 - idx_offset; i < ndirs; i++)
11707 dw2_asm_output_nstring (dirs[i].path,
11708 dirs[i].length
11709 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
11710 "Directory Entry: %#x", i + idx_offset);
11711
11712 dw2_asm_output_data (1, 0, "End directory table");
11713 }
11714
11715 /* We have to emit them in the order of emitted_number since that's
11716 used in the debug info generation. To do this efficiently we
11717 generate a back-mapping of the indices first. */
11718 backmap = XALLOCAVEC (int, numfiles);
11719 for (i = 0; i < numfiles; i++)
11720 backmap[files[i].file_idx->emitted_number - 1] = i;
11721
11722 if (dwarf_version >= 5)
11723 {
11724 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
11725 if (filename0 == NULL)
11726 filename0 = "";
11727 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
11728 DW_FORM_data2. Choose one based on the number of directories
11729 and how much space would they occupy in each encoding.
11730 If we have at most 256 directories, all indexes fit into
11731 a single byte, so DW_FORM_data1 is most compact (if there
11732 are at most 128 directories, DW_FORM_udata would be as
11733 compact as that, but not shorter and slower to decode). */
11734 if (ndirs + idx_offset <= 256)
11735 idx_form = DW_FORM_data1;
11736 /* If there are more than 65536 directories, we have to use
11737 DW_FORM_udata, DW_FORM_data2 can't refer to them.
11738 Otherwise, compute what space would occupy if all the indexes
11739 used DW_FORM_udata - sum - and compare that to how large would
11740 be DW_FORM_data2 encoding, and pick the more efficient one. */
11741 else if (ndirs + idx_offset <= 65536)
11742 {
11743 unsigned HOST_WIDE_INT sum = 1;
11744 for (i = 0; i < numfiles; i++)
11745 {
11746 int file_idx = backmap[i];
11747 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
11748 sum += size_of_uleb128 (dir_idx);
11749 }
11750 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
11751 idx_form = DW_FORM_data2;
11752 }
11753 #ifdef VMS_DEBUGGING_INFO
11754 dw2_asm_output_data (1, 4, "File name entry format count");
11755 #else
11756 dw2_asm_output_data (1, 2, "File name entry format count");
11757 #endif
11758 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
11759 dw2_asm_output_data_uleb128 (str_form, "%s",
11760 get_DW_FORM_name (str_form));
11761 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
11762 "DW_LNCT_directory_index");
11763 dw2_asm_output_data_uleb128 (idx_form, "%s",
11764 get_DW_FORM_name (idx_form));
11765 #ifdef VMS_DEBUGGING_INFO
11766 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
11767 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
11768 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
11769 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
11770 #endif
11771 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
11772
11773 output_line_string (str_form, filename0, "File Entry", 0);
11774
11775 /* Include directory index. */
11776 if (idx_form != DW_FORM_udata)
11777 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
11778 0, NULL);
11779 else
11780 dw2_asm_output_data_uleb128 (0, NULL);
11781
11782 #ifdef VMS_DEBUGGING_INFO
11783 dw2_asm_output_data_uleb128 (0, NULL);
11784 dw2_asm_output_data_uleb128 (0, NULL);
11785 #endif
11786 }
11787
11788 /* Now write all the file names. */
11789 for (i = 0; i < numfiles; i++)
11790 {
11791 int file_idx = backmap[i];
11792 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
11793
11794 #ifdef VMS_DEBUGGING_INFO
11795 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
11796
11797 /* Setting these fields can lead to debugger miscomparisons,
11798 but VMS Debug requires them to be set correctly. */
11799
11800 int ver;
11801 long long cdt;
11802 long siz;
11803 int maxfilelen = (strlen (files[file_idx].path)
11804 + dirs[dir_idx].length
11805 + MAX_VMS_VERSION_LEN + 1);
11806 char *filebuf = XALLOCAVEC (char, maxfilelen);
11807
11808 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
11809 snprintf (filebuf, maxfilelen, "%s;%d",
11810 files[file_idx].path + dirs[dir_idx].length, ver);
11811
11812 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
11813
11814 /* Include directory index. */
11815 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
11816 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
11817 dir_idx + idx_offset, NULL);
11818 else
11819 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
11820
11821 /* Modification time. */
11822 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
11823 &cdt, 0, 0, 0) == 0)
11824 ? cdt : 0, NULL);
11825
11826 /* File length in bytes. */
11827 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
11828 0, &siz, 0, 0) == 0)
11829 ? siz : 0, NULL);
11830 #else
11831 output_line_string (str_form,
11832 files[file_idx].path + dirs[dir_idx].length,
11833 "File Entry", (unsigned) i + 1);
11834
11835 /* Include directory index. */
11836 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
11837 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
11838 dir_idx + idx_offset, NULL);
11839 else
11840 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
11841
11842 if (dwarf_version >= 5)
11843 continue;
11844
11845 /* Modification time. */
11846 dw2_asm_output_data_uleb128 (0, NULL);
11847
11848 /* File length in bytes. */
11849 dw2_asm_output_data_uleb128 (0, NULL);
11850 #endif /* VMS_DEBUGGING_INFO */
11851 }
11852
11853 if (dwarf_version < 5)
11854 dw2_asm_output_data (1, 0, "End file name table");
11855 }
11856
11857
11858 /* Output one line number table into the .debug_line section. */
11859
11860 static void
11861 output_one_line_info_table (dw_line_info_table *table)
11862 {
11863 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
11864 unsigned int current_line = 1;
11865 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
11866 dw_line_info_entry *ent;
11867 size_t i;
11868
11869 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
11870 {
11871 switch (ent->opcode)
11872 {
11873 case LI_set_address:
11874 /* ??? Unfortunately, we have little choice here currently, and
11875 must always use the most general form. GCC does not know the
11876 address delta itself, so we can't use DW_LNS_advance_pc. Many
11877 ports do have length attributes which will give an upper bound
11878 on the address range. We could perhaps use length attributes
11879 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
11880 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
11881
11882 /* This can handle any delta. This takes
11883 4+DWARF2_ADDR_SIZE bytes. */
11884 dw2_asm_output_data (1, 0, "set address %s", line_label);
11885 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
11886 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
11887 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
11888 break;
11889
11890 case LI_set_line:
11891 if (ent->val == current_line)
11892 {
11893 /* We still need to start a new row, so output a copy insn. */
11894 dw2_asm_output_data (1, DW_LNS_copy,
11895 "copy line %u", current_line);
11896 }
11897 else
11898 {
11899 int line_offset = ent->val - current_line;
11900 int line_delta = line_offset - DWARF_LINE_BASE;
11901
11902 current_line = ent->val;
11903 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
11904 {
11905 /* This can handle deltas from -10 to 234, using the current
11906 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
11907 This takes 1 byte. */
11908 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
11909 "line %u", current_line);
11910 }
11911 else
11912 {
11913 /* This can handle any delta. This takes at least 4 bytes,
11914 depending on the value being encoded. */
11915 dw2_asm_output_data (1, DW_LNS_advance_line,
11916 "advance to line %u", current_line);
11917 dw2_asm_output_data_sleb128 (line_offset, NULL);
11918 dw2_asm_output_data (1, DW_LNS_copy, NULL);
11919 }
11920 }
11921 break;
11922
11923 case LI_set_file:
11924 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
11925 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
11926 break;
11927
11928 case LI_set_column:
11929 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
11930 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
11931 break;
11932
11933 case LI_negate_stmt:
11934 current_is_stmt = !current_is_stmt;
11935 dw2_asm_output_data (1, DW_LNS_negate_stmt,
11936 "is_stmt %d", current_is_stmt);
11937 break;
11938
11939 case LI_set_prologue_end:
11940 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
11941 "set prologue end");
11942 break;
11943
11944 case LI_set_epilogue_begin:
11945 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
11946 "set epilogue begin");
11947 break;
11948
11949 case LI_set_discriminator:
11950 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
11951 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
11952 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
11953 dw2_asm_output_data_uleb128 (ent->val, NULL);
11954 break;
11955 }
11956 }
11957
11958 /* Emit debug info for the address of the end of the table. */
11959 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
11960 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
11961 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
11962 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
11963
11964 dw2_asm_output_data (1, 0, "end sequence");
11965 dw2_asm_output_data_uleb128 (1, NULL);
11966 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
11967 }
11968
11969 /* Output the source line number correspondence information. This
11970 information goes into the .debug_line section. */
11971
11972 static void
11973 output_line_info (bool prologue_only)
11974 {
11975 static unsigned int generation;
11976 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
11977 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
11978 bool saw_one = false;
11979 int opc;
11980
11981 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
11982 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
11983 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
11984 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
11985
11986 if (!XCOFF_DEBUGGING_INFO)
11987 {
11988 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11989 dw2_asm_output_data (4, 0xffffffff,
11990 "Initial length escape value indicating 64-bit DWARF extension");
11991 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11992 "Length of Source Line Info");
11993 }
11994
11995 ASM_OUTPUT_LABEL (asm_out_file, l1);
11996
11997 dw2_asm_output_data (2, dwarf_version, "DWARF Version");
11998 if (dwarf_version >= 5)
11999 {
12000 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12001 dw2_asm_output_data (1, 0, "Segment Size");
12002 }
12003 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12004 ASM_OUTPUT_LABEL (asm_out_file, p1);
12005
12006 /* Define the architecture-dependent minimum instruction length (in bytes).
12007 In this implementation of DWARF, this field is used for information
12008 purposes only. Since GCC generates assembly language, we have no
12009 a priori knowledge of how many instruction bytes are generated for each
12010 source line, and therefore can use only the DW_LNE_set_address and
12011 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12012 this as '1', which is "correct enough" for all architectures,
12013 and don't let the target override. */
12014 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12015
12016 if (dwarf_version >= 4)
12017 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12018 "Maximum Operations Per Instruction");
12019 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12020 "Default is_stmt_start flag");
12021 dw2_asm_output_data (1, DWARF_LINE_BASE,
12022 "Line Base Value (Special Opcodes)");
12023 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12024 "Line Range Value (Special Opcodes)");
12025 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12026 "Special Opcode Base");
12027
12028 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12029 {
12030 int n_op_args;
12031 switch (opc)
12032 {
12033 case DW_LNS_advance_pc:
12034 case DW_LNS_advance_line:
12035 case DW_LNS_set_file:
12036 case DW_LNS_set_column:
12037 case DW_LNS_fixed_advance_pc:
12038 case DW_LNS_set_isa:
12039 n_op_args = 1;
12040 break;
12041 default:
12042 n_op_args = 0;
12043 break;
12044 }
12045
12046 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12047 opc, n_op_args);
12048 }
12049
12050 /* Write out the information about the files we use. */
12051 output_file_names ();
12052 ASM_OUTPUT_LABEL (asm_out_file, p2);
12053 if (prologue_only)
12054 {
12055 /* Output the marker for the end of the line number info. */
12056 ASM_OUTPUT_LABEL (asm_out_file, l2);
12057 return;
12058 }
12059
12060 if (separate_line_info)
12061 {
12062 dw_line_info_table *table;
12063 size_t i;
12064
12065 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12066 if (table->in_use)
12067 {
12068 output_one_line_info_table (table);
12069 saw_one = true;
12070 }
12071 }
12072 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12073 {
12074 output_one_line_info_table (cold_text_section_line_info);
12075 saw_one = true;
12076 }
12077
12078 /* ??? Some Darwin linkers crash on a .debug_line section with no
12079 sequences. Further, merely a DW_LNE_end_sequence entry is not
12080 sufficient -- the address column must also be initialized.
12081 Make sure to output at least one set_address/end_sequence pair,
12082 choosing .text since that section is always present. */
12083 if (text_section_line_info->in_use || !saw_one)
12084 output_one_line_info_table (text_section_line_info);
12085
12086 /* Output the marker for the end of the line number info. */
12087 ASM_OUTPUT_LABEL (asm_out_file, l2);
12088 }
12089 \f
12090 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12091
12092 static inline bool
12093 need_endianity_attribute_p (bool reverse)
12094 {
12095 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12096 }
12097
12098 /* Given a pointer to a tree node for some base type, return a pointer to
12099 a DIE that describes the given type. REVERSE is true if the type is
12100 to be interpreted in the reverse storage order wrt the target order.
12101
12102 This routine must only be called for GCC type nodes that correspond to
12103 Dwarf base (fundamental) types. */
12104
12105 static dw_die_ref
12106 base_type_die (tree type, bool reverse)
12107 {
12108 dw_die_ref base_type_result;
12109 enum dwarf_type encoding;
12110 bool fpt_used = false;
12111 struct fixed_point_type_info fpt_info;
12112 tree type_bias = NULL_TREE;
12113
12114 /* If this is a subtype that should not be emitted as a subrange type,
12115 use the base type. See subrange_type_for_debug_p. */
12116 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12117 type = TREE_TYPE (type);
12118
12119 switch (TREE_CODE (type))
12120 {
12121 case INTEGER_TYPE:
12122 if ((dwarf_version >= 4 || !dwarf_strict)
12123 && TYPE_NAME (type)
12124 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12125 && DECL_IS_BUILTIN (TYPE_NAME (type))
12126 && DECL_NAME (TYPE_NAME (type)))
12127 {
12128 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12129 if (strcmp (name, "char16_t") == 0
12130 || strcmp (name, "char32_t") == 0)
12131 {
12132 encoding = DW_ATE_UTF;
12133 break;
12134 }
12135 }
12136 if ((dwarf_version >= 3 || !dwarf_strict)
12137 && lang_hooks.types.get_fixed_point_type_info)
12138 {
12139 memset (&fpt_info, 0, sizeof (fpt_info));
12140 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12141 {
12142 fpt_used = true;
12143 encoding = ((TYPE_UNSIGNED (type))
12144 ? DW_ATE_unsigned_fixed
12145 : DW_ATE_signed_fixed);
12146 break;
12147 }
12148 }
12149 if (TYPE_STRING_FLAG (type))
12150 {
12151 if (TYPE_UNSIGNED (type))
12152 encoding = DW_ATE_unsigned_char;
12153 else
12154 encoding = DW_ATE_signed_char;
12155 }
12156 else if (TYPE_UNSIGNED (type))
12157 encoding = DW_ATE_unsigned;
12158 else
12159 encoding = DW_ATE_signed;
12160
12161 if (!dwarf_strict
12162 && lang_hooks.types.get_type_bias)
12163 type_bias = lang_hooks.types.get_type_bias (type);
12164 break;
12165
12166 case REAL_TYPE:
12167 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12168 {
12169 if (dwarf_version >= 3 || !dwarf_strict)
12170 encoding = DW_ATE_decimal_float;
12171 else
12172 encoding = DW_ATE_lo_user;
12173 }
12174 else
12175 encoding = DW_ATE_float;
12176 break;
12177
12178 case FIXED_POINT_TYPE:
12179 if (!(dwarf_version >= 3 || !dwarf_strict))
12180 encoding = DW_ATE_lo_user;
12181 else if (TYPE_UNSIGNED (type))
12182 encoding = DW_ATE_unsigned_fixed;
12183 else
12184 encoding = DW_ATE_signed_fixed;
12185 break;
12186
12187 /* Dwarf2 doesn't know anything about complex ints, so use
12188 a user defined type for it. */
12189 case COMPLEX_TYPE:
12190 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12191 encoding = DW_ATE_complex_float;
12192 else
12193 encoding = DW_ATE_lo_user;
12194 break;
12195
12196 case BOOLEAN_TYPE:
12197 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12198 encoding = DW_ATE_boolean;
12199 break;
12200
12201 default:
12202 /* No other TREE_CODEs are Dwarf fundamental types. */
12203 gcc_unreachable ();
12204 }
12205
12206 base_type_result = new_die_raw (DW_TAG_base_type);
12207
12208 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12209 int_size_in_bytes (type));
12210 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12211
12212 if (need_endianity_attribute_p (reverse))
12213 add_AT_unsigned (base_type_result, DW_AT_endianity,
12214 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12215
12216 add_alignment_attribute (base_type_result, type);
12217
12218 if (fpt_used)
12219 {
12220 switch (fpt_info.scale_factor_kind)
12221 {
12222 case fixed_point_scale_factor_binary:
12223 add_AT_int (base_type_result, DW_AT_binary_scale,
12224 fpt_info.scale_factor.binary);
12225 break;
12226
12227 case fixed_point_scale_factor_decimal:
12228 add_AT_int (base_type_result, DW_AT_decimal_scale,
12229 fpt_info.scale_factor.decimal);
12230 break;
12231
12232 case fixed_point_scale_factor_arbitrary:
12233 /* Arbitrary scale factors cannot be described in standard DWARF,
12234 yet. */
12235 if (!dwarf_strict)
12236 {
12237 /* Describe the scale factor as a rational constant. */
12238 const dw_die_ref scale_factor
12239 = new_die (DW_TAG_constant, comp_unit_die (), type);
12240
12241 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12242 fpt_info.scale_factor.arbitrary.numerator);
12243 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12244 fpt_info.scale_factor.arbitrary.denominator);
12245
12246 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12247 }
12248 break;
12249
12250 default:
12251 gcc_unreachable ();
12252 }
12253 }
12254
12255 if (type_bias)
12256 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12257 dw_scalar_form_constant
12258 | dw_scalar_form_exprloc
12259 | dw_scalar_form_reference,
12260 NULL);
12261
12262 return base_type_result;
12263 }
12264
12265 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12266 named 'auto' in its type: return true for it, false otherwise. */
12267
12268 static inline bool
12269 is_cxx_auto (tree type)
12270 {
12271 if (is_cxx ())
12272 {
12273 tree name = TYPE_IDENTIFIER (type);
12274 if (name == get_identifier ("auto")
12275 || name == get_identifier ("decltype(auto)"))
12276 return true;
12277 }
12278 return false;
12279 }
12280
12281 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12282 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12283
12284 static inline int
12285 is_base_type (tree type)
12286 {
12287 switch (TREE_CODE (type))
12288 {
12289 case INTEGER_TYPE:
12290 case REAL_TYPE:
12291 case FIXED_POINT_TYPE:
12292 case COMPLEX_TYPE:
12293 case BOOLEAN_TYPE:
12294 case POINTER_BOUNDS_TYPE:
12295 return 1;
12296
12297 case VOID_TYPE:
12298 case ARRAY_TYPE:
12299 case RECORD_TYPE:
12300 case UNION_TYPE:
12301 case QUAL_UNION_TYPE:
12302 case ENUMERAL_TYPE:
12303 case FUNCTION_TYPE:
12304 case METHOD_TYPE:
12305 case POINTER_TYPE:
12306 case REFERENCE_TYPE:
12307 case NULLPTR_TYPE:
12308 case OFFSET_TYPE:
12309 case LANG_TYPE:
12310 case VECTOR_TYPE:
12311 return 0;
12312
12313 default:
12314 if (is_cxx_auto (type))
12315 return 0;
12316 gcc_unreachable ();
12317 }
12318
12319 return 0;
12320 }
12321
12322 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12323 node, return the size in bits for the type if it is a constant, or else
12324 return the alignment for the type if the type's size is not constant, or
12325 else return BITS_PER_WORD if the type actually turns out to be an
12326 ERROR_MARK node. */
12327
12328 static inline unsigned HOST_WIDE_INT
12329 simple_type_size_in_bits (const_tree type)
12330 {
12331 if (TREE_CODE (type) == ERROR_MARK)
12332 return BITS_PER_WORD;
12333 else if (TYPE_SIZE (type) == NULL_TREE)
12334 return 0;
12335 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12336 return tree_to_uhwi (TYPE_SIZE (type));
12337 else
12338 return TYPE_ALIGN (type);
12339 }
12340
12341 /* Similarly, but return an offset_int instead of UHWI. */
12342
12343 static inline offset_int
12344 offset_int_type_size_in_bits (const_tree type)
12345 {
12346 if (TREE_CODE (type) == ERROR_MARK)
12347 return BITS_PER_WORD;
12348 else if (TYPE_SIZE (type) == NULL_TREE)
12349 return 0;
12350 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12351 return wi::to_offset (TYPE_SIZE (type));
12352 else
12353 return TYPE_ALIGN (type);
12354 }
12355
12356 /* Given a pointer to a tree node for a subrange type, return a pointer
12357 to a DIE that describes the given type. */
12358
12359 static dw_die_ref
12360 subrange_type_die (tree type, tree low, tree high, tree bias,
12361 dw_die_ref context_die)
12362 {
12363 dw_die_ref subrange_die;
12364 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12365
12366 if (context_die == NULL)
12367 context_die = comp_unit_die ();
12368
12369 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12370
12371 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12372 {
12373 /* The size of the subrange type and its base type do not match,
12374 so we need to generate a size attribute for the subrange type. */
12375 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
12376 }
12377
12378 add_alignment_attribute (subrange_die, type);
12379
12380 if (low)
12381 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
12382 if (high)
12383 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
12384 if (bias && !dwarf_strict)
12385 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
12386 dw_scalar_form_constant
12387 | dw_scalar_form_exprloc
12388 | dw_scalar_form_reference,
12389 NULL);
12390
12391 return subrange_die;
12392 }
12393
12394 /* Returns the (const and/or volatile) cv_qualifiers associated with
12395 the decl node. This will normally be augmented with the
12396 cv_qualifiers of the underlying type in add_type_attribute. */
12397
12398 static int
12399 decl_quals (const_tree decl)
12400 {
12401 return ((TREE_READONLY (decl)
12402 /* The C++ front-end correctly marks reference-typed
12403 variables as readonly, but from a language (and debug
12404 info) standpoint they are not const-qualified. */
12405 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
12406 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
12407 | (TREE_THIS_VOLATILE (decl)
12408 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
12409 }
12410
12411 /* Determine the TYPE whose qualifiers match the largest strict subset
12412 of the given TYPE_QUALS, and return its qualifiers. Ignore all
12413 qualifiers outside QUAL_MASK. */
12414
12415 static int
12416 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
12417 {
12418 tree t;
12419 int best_rank = 0, best_qual = 0, max_rank;
12420
12421 type_quals &= qual_mask;
12422 max_rank = popcount_hwi (type_quals) - 1;
12423
12424 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
12425 t = TYPE_NEXT_VARIANT (t))
12426 {
12427 int q = TYPE_QUALS (t) & qual_mask;
12428
12429 if ((q & type_quals) == q && q != type_quals
12430 && check_base_type (t, type))
12431 {
12432 int rank = popcount_hwi (q);
12433
12434 if (rank > best_rank)
12435 {
12436 best_rank = rank;
12437 best_qual = q;
12438 }
12439 }
12440 }
12441
12442 return best_qual;
12443 }
12444
12445 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
12446 static const dwarf_qual_info_t dwarf_qual_info[] =
12447 {
12448 { TYPE_QUAL_CONST, DW_TAG_const_type },
12449 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
12450 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
12451 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
12452 };
12453 static const unsigned int dwarf_qual_info_size
12454 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
12455
12456 /* If DIE is a qualified DIE of some base DIE with the same parent,
12457 return the base DIE, otherwise return NULL. Set MASK to the
12458 qualifiers added compared to the returned DIE. */
12459
12460 static dw_die_ref
12461 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
12462 {
12463 unsigned int i;
12464 for (i = 0; i < dwarf_qual_info_size; i++)
12465 if (die->die_tag == dwarf_qual_info[i].t)
12466 break;
12467 if (i == dwarf_qual_info_size)
12468 return NULL;
12469 if (vec_safe_length (die->die_attr) != 1)
12470 return NULL;
12471 dw_die_ref type = get_AT_ref (die, DW_AT_type);
12472 if (type == NULL || type->die_parent != die->die_parent)
12473 return NULL;
12474 *mask |= dwarf_qual_info[i].q;
12475 if (depth)
12476 {
12477 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
12478 if (ret)
12479 return ret;
12480 }
12481 return type;
12482 }
12483
12484 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
12485 entry that chains the modifiers specified by CV_QUALS in front of the
12486 given type. REVERSE is true if the type is to be interpreted in the
12487 reverse storage order wrt the target order. */
12488
12489 static dw_die_ref
12490 modified_type_die (tree type, int cv_quals, bool reverse,
12491 dw_die_ref context_die)
12492 {
12493 enum tree_code code = TREE_CODE (type);
12494 dw_die_ref mod_type_die;
12495 dw_die_ref sub_die = NULL;
12496 tree item_type = NULL;
12497 tree qualified_type;
12498 tree name, low, high;
12499 dw_die_ref mod_scope;
12500 /* Only these cv-qualifiers are currently handled. */
12501 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
12502 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
12503 ENCODE_QUAL_ADDR_SPACE(~0U));
12504 const bool reverse_base_type
12505 = need_endianity_attribute_p (reverse) && is_base_type (type);
12506
12507 if (code == ERROR_MARK)
12508 return NULL;
12509
12510 if (lang_hooks.types.get_debug_type)
12511 {
12512 tree debug_type = lang_hooks.types.get_debug_type (type);
12513
12514 if (debug_type != NULL_TREE && debug_type != type)
12515 return modified_type_die (debug_type, cv_quals, reverse, context_die);
12516 }
12517
12518 cv_quals &= cv_qual_mask;
12519
12520 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
12521 tag modifier (and not an attribute) old consumers won't be able
12522 to handle it. */
12523 if (dwarf_version < 3)
12524 cv_quals &= ~TYPE_QUAL_RESTRICT;
12525
12526 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
12527 if (dwarf_version < 5)
12528 cv_quals &= ~TYPE_QUAL_ATOMIC;
12529
12530 /* See if we already have the appropriately qualified variant of
12531 this type. */
12532 qualified_type = get_qualified_type (type, cv_quals);
12533
12534 if (qualified_type == sizetype)
12535 {
12536 /* Try not to expose the internal sizetype type's name. */
12537 if (TYPE_NAME (qualified_type)
12538 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
12539 {
12540 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
12541
12542 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
12543 && (TYPE_PRECISION (t)
12544 == TYPE_PRECISION (qualified_type))
12545 && (TYPE_UNSIGNED (t)
12546 == TYPE_UNSIGNED (qualified_type)));
12547 qualified_type = t;
12548 }
12549 else if (qualified_type == sizetype
12550 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
12551 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
12552 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
12553 qualified_type = size_type_node;
12554 }
12555
12556 /* If we do, then we can just use its DIE, if it exists. */
12557 if (qualified_type)
12558 {
12559 mod_type_die = lookup_type_die (qualified_type);
12560
12561 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
12562 dealt with specially: the DIE with the attribute, if it exists, is
12563 placed immediately after the regular DIE for the same base type. */
12564 if (mod_type_die
12565 && (!reverse_base_type
12566 || ((mod_type_die = mod_type_die->die_sib) != NULL
12567 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
12568 return mod_type_die;
12569 }
12570
12571 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
12572
12573 /* Handle C typedef types. */
12574 if (name
12575 && TREE_CODE (name) == TYPE_DECL
12576 && DECL_ORIGINAL_TYPE (name)
12577 && !DECL_ARTIFICIAL (name))
12578 {
12579 tree dtype = TREE_TYPE (name);
12580
12581 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
12582 if (qualified_type == dtype && !reverse_base_type)
12583 {
12584 tree origin = decl_ultimate_origin (name);
12585
12586 /* Typedef variants that have an abstract origin don't get their own
12587 type DIE (see gen_typedef_die), so fall back on the ultimate
12588 abstract origin instead. */
12589 if (origin != NULL && origin != name)
12590 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
12591 context_die);
12592
12593 /* For a named type, use the typedef. */
12594 gen_type_die (qualified_type, context_die);
12595 return lookup_type_die (qualified_type);
12596 }
12597 else
12598 {
12599 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
12600 dquals &= cv_qual_mask;
12601 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
12602 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
12603 /* cv-unqualified version of named type. Just use
12604 the unnamed type to which it refers. */
12605 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
12606 reverse, context_die);
12607 /* Else cv-qualified version of named type; fall through. */
12608 }
12609 }
12610
12611 mod_scope = scope_die_for (type, context_die);
12612
12613 if (cv_quals)
12614 {
12615 int sub_quals = 0, first_quals = 0;
12616 unsigned i;
12617 dw_die_ref first = NULL, last = NULL;
12618
12619 /* Determine a lesser qualified type that most closely matches
12620 this one. Then generate DW_TAG_* entries for the remaining
12621 qualifiers. */
12622 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
12623 cv_qual_mask);
12624 if (sub_quals && use_debug_types)
12625 {
12626 bool needed = false;
12627 /* If emitting type units, make sure the order of qualifiers
12628 is canonical. Thus, start from unqualified type if
12629 an earlier qualifier is missing in sub_quals, but some later
12630 one is present there. */
12631 for (i = 0; i < dwarf_qual_info_size; i++)
12632 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
12633 needed = true;
12634 else if (needed && (dwarf_qual_info[i].q & cv_quals))
12635 {
12636 sub_quals = 0;
12637 break;
12638 }
12639 }
12640 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
12641 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
12642 {
12643 /* As not all intermediate qualified DIEs have corresponding
12644 tree types, ensure that qualified DIEs in the same scope
12645 as their DW_AT_type are emitted after their DW_AT_type,
12646 only with other qualified DIEs for the same type possibly
12647 in between them. Determine the range of such qualified
12648 DIEs now (first being the base type, last being corresponding
12649 last qualified DIE for it). */
12650 unsigned int count = 0;
12651 first = qualified_die_p (mod_type_die, &first_quals,
12652 dwarf_qual_info_size);
12653 if (first == NULL)
12654 first = mod_type_die;
12655 gcc_assert ((first_quals & ~sub_quals) == 0);
12656 for (count = 0, last = first;
12657 count < (1U << dwarf_qual_info_size);
12658 count++, last = last->die_sib)
12659 {
12660 int quals = 0;
12661 if (last == mod_scope->die_child)
12662 break;
12663 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
12664 != first)
12665 break;
12666 }
12667 }
12668
12669 for (i = 0; i < dwarf_qual_info_size; i++)
12670 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
12671 {
12672 dw_die_ref d;
12673 if (first && first != last)
12674 {
12675 for (d = first->die_sib; ; d = d->die_sib)
12676 {
12677 int quals = 0;
12678 qualified_die_p (d, &quals, dwarf_qual_info_size);
12679 if (quals == (first_quals | dwarf_qual_info[i].q))
12680 break;
12681 if (d == last)
12682 {
12683 d = NULL;
12684 break;
12685 }
12686 }
12687 if (d)
12688 {
12689 mod_type_die = d;
12690 continue;
12691 }
12692 }
12693 if (first)
12694 {
12695 d = new_die_raw (dwarf_qual_info[i].t);
12696 add_child_die_after (mod_scope, d, last);
12697 last = d;
12698 }
12699 else
12700 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
12701 if (mod_type_die)
12702 add_AT_die_ref (d, DW_AT_type, mod_type_die);
12703 mod_type_die = d;
12704 first_quals |= dwarf_qual_info[i].q;
12705 }
12706 }
12707 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
12708 {
12709 dwarf_tag tag = DW_TAG_pointer_type;
12710 if (code == REFERENCE_TYPE)
12711 {
12712 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
12713 tag = DW_TAG_rvalue_reference_type;
12714 else
12715 tag = DW_TAG_reference_type;
12716 }
12717 mod_type_die = new_die (tag, mod_scope, type);
12718
12719 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
12720 simple_type_size_in_bits (type) / BITS_PER_UNIT);
12721 add_alignment_attribute (mod_type_die, type);
12722 item_type = TREE_TYPE (type);
12723
12724 addr_space_t as = TYPE_ADDR_SPACE (item_type);
12725 if (!ADDR_SPACE_GENERIC_P (as))
12726 {
12727 int action = targetm.addr_space.debug (as);
12728 if (action >= 0)
12729 {
12730 /* Positive values indicate an address_class. */
12731 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
12732 }
12733 else
12734 {
12735 /* Negative values indicate an (inverted) segment base reg. */
12736 dw_loc_descr_ref d
12737 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
12738 add_AT_loc (mod_type_die, DW_AT_segment, d);
12739 }
12740 }
12741 }
12742 else if (code == INTEGER_TYPE
12743 && TREE_TYPE (type) != NULL_TREE
12744 && subrange_type_for_debug_p (type, &low, &high))
12745 {
12746 tree bias = NULL_TREE;
12747 if (lang_hooks.types.get_type_bias)
12748 bias = lang_hooks.types.get_type_bias (type);
12749 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
12750 item_type = TREE_TYPE (type);
12751 }
12752 else if (is_base_type (type))
12753 {
12754 mod_type_die = base_type_die (type, reverse);
12755
12756 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
12757 if (reverse_base_type)
12758 {
12759 dw_die_ref after_die
12760 = modified_type_die (type, cv_quals, false, context_die);
12761 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
12762 }
12763 else
12764 add_child_die (comp_unit_die (), mod_type_die);
12765
12766 add_pubtype (type, mod_type_die);
12767 }
12768 else
12769 {
12770 gen_type_die (type, context_die);
12771
12772 /* We have to get the type_main_variant here (and pass that to the
12773 `lookup_type_die' routine) because the ..._TYPE node we have
12774 might simply be a *copy* of some original type node (where the
12775 copy was created to help us keep track of typedef names) and
12776 that copy might have a different TYPE_UID from the original
12777 ..._TYPE node. */
12778 if (TREE_CODE (type) == FUNCTION_TYPE
12779 || TREE_CODE (type) == METHOD_TYPE)
12780 {
12781 /* For function/method types, can't just use type_main_variant here,
12782 because that can have different ref-qualifiers for C++,
12783 but try to canonicalize. */
12784 tree main = TYPE_MAIN_VARIANT (type);
12785 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
12786 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
12787 && check_base_type (t, main)
12788 && check_lang_type (t, type))
12789 return lookup_type_die (t);
12790 return lookup_type_die (type);
12791 }
12792 else if (TREE_CODE (type) != VECTOR_TYPE
12793 && TREE_CODE (type) != ARRAY_TYPE)
12794 return lookup_type_die (type_main_variant (type));
12795 else
12796 /* Vectors have the debugging information in the type,
12797 not the main variant. */
12798 return lookup_type_die (type);
12799 }
12800
12801 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
12802 don't output a DW_TAG_typedef, since there isn't one in the
12803 user's program; just attach a DW_AT_name to the type.
12804 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
12805 if the base type already has the same name. */
12806 if (name
12807 && ((TREE_CODE (name) != TYPE_DECL
12808 && (qualified_type == TYPE_MAIN_VARIANT (type)
12809 || (cv_quals == TYPE_UNQUALIFIED)))
12810 || (TREE_CODE (name) == TYPE_DECL
12811 && TREE_TYPE (name) == qualified_type
12812 && DECL_NAME (name))))
12813 {
12814 if (TREE_CODE (name) == TYPE_DECL)
12815 /* Could just call add_name_and_src_coords_attributes here,
12816 but since this is a builtin type it doesn't have any
12817 useful source coordinates anyway. */
12818 name = DECL_NAME (name);
12819 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
12820 }
12821 /* This probably indicates a bug. */
12822 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
12823 {
12824 name = TYPE_IDENTIFIER (type);
12825 add_name_attribute (mod_type_die,
12826 name ? IDENTIFIER_POINTER (name) : "__unknown__");
12827 }
12828
12829 if (qualified_type && !reverse_base_type)
12830 equate_type_number_to_die (qualified_type, mod_type_die);
12831
12832 if (item_type)
12833 /* We must do this after the equate_type_number_to_die call, in case
12834 this is a recursive type. This ensures that the modified_type_die
12835 recursion will terminate even if the type is recursive. Recursive
12836 types are possible in Ada. */
12837 sub_die = modified_type_die (item_type,
12838 TYPE_QUALS_NO_ADDR_SPACE (item_type),
12839 reverse,
12840 context_die);
12841
12842 if (sub_die != NULL)
12843 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
12844
12845 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
12846 if (TYPE_ARTIFICIAL (type))
12847 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
12848
12849 return mod_type_die;
12850 }
12851
12852 /* Generate DIEs for the generic parameters of T.
12853 T must be either a generic type or a generic function.
12854 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
12855
12856 static void
12857 gen_generic_params_dies (tree t)
12858 {
12859 tree parms, args;
12860 int parms_num, i;
12861 dw_die_ref die = NULL;
12862 int non_default;
12863
12864 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
12865 return;
12866
12867 if (TYPE_P (t))
12868 die = lookup_type_die (t);
12869 else if (DECL_P (t))
12870 die = lookup_decl_die (t);
12871
12872 gcc_assert (die);
12873
12874 parms = lang_hooks.get_innermost_generic_parms (t);
12875 if (!parms)
12876 /* T has no generic parameter. It means T is neither a generic type
12877 or function. End of story. */
12878 return;
12879
12880 parms_num = TREE_VEC_LENGTH (parms);
12881 args = lang_hooks.get_innermost_generic_args (t);
12882 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
12883 non_default = int_cst_value (TREE_CHAIN (args));
12884 else
12885 non_default = TREE_VEC_LENGTH (args);
12886 for (i = 0; i < parms_num; i++)
12887 {
12888 tree parm, arg, arg_pack_elems;
12889 dw_die_ref parm_die;
12890
12891 parm = TREE_VEC_ELT (parms, i);
12892 arg = TREE_VEC_ELT (args, i);
12893 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
12894 gcc_assert (parm && TREE_VALUE (parm) && arg);
12895
12896 if (parm && TREE_VALUE (parm) && arg)
12897 {
12898 /* If PARM represents a template parameter pack,
12899 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
12900 by DW_TAG_template_*_parameter DIEs for the argument
12901 pack elements of ARG. Note that ARG would then be
12902 an argument pack. */
12903 if (arg_pack_elems)
12904 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
12905 arg_pack_elems,
12906 die);
12907 else
12908 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
12909 true /* emit name */, die);
12910 if (i >= non_default)
12911 add_AT_flag (parm_die, DW_AT_default_value, 1);
12912 }
12913 }
12914 }
12915
12916 /* Create and return a DIE for PARM which should be
12917 the representation of a generic type parameter.
12918 For instance, in the C++ front end, PARM would be a template parameter.
12919 ARG is the argument to PARM.
12920 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
12921 name of the PARM.
12922 PARENT_DIE is the parent DIE which the new created DIE should be added to,
12923 as a child node. */
12924
12925 static dw_die_ref
12926 generic_parameter_die (tree parm, tree arg,
12927 bool emit_name_p,
12928 dw_die_ref parent_die)
12929 {
12930 dw_die_ref tmpl_die = NULL;
12931 const char *name = NULL;
12932
12933 if (!parm || !DECL_NAME (parm) || !arg)
12934 return NULL;
12935
12936 /* We support non-type generic parameters and arguments,
12937 type generic parameters and arguments, as well as
12938 generic generic parameters (a.k.a. template template parameters in C++)
12939 and arguments. */
12940 if (TREE_CODE (parm) == PARM_DECL)
12941 /* PARM is a nontype generic parameter */
12942 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
12943 else if (TREE_CODE (parm) == TYPE_DECL)
12944 /* PARM is a type generic parameter. */
12945 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
12946 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
12947 /* PARM is a generic generic parameter.
12948 Its DIE is a GNU extension. It shall have a
12949 DW_AT_name attribute to represent the name of the template template
12950 parameter, and a DW_AT_GNU_template_name attribute to represent the
12951 name of the template template argument. */
12952 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
12953 parent_die, parm);
12954 else
12955 gcc_unreachable ();
12956
12957 if (tmpl_die)
12958 {
12959 tree tmpl_type;
12960
12961 /* If PARM is a generic parameter pack, it means we are
12962 emitting debug info for a template argument pack element.
12963 In other terms, ARG is a template argument pack element.
12964 In that case, we don't emit any DW_AT_name attribute for
12965 the die. */
12966 if (emit_name_p)
12967 {
12968 name = IDENTIFIER_POINTER (DECL_NAME (parm));
12969 gcc_assert (name);
12970 add_AT_string (tmpl_die, DW_AT_name, name);
12971 }
12972
12973 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
12974 {
12975 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
12976 TMPL_DIE should have a child DW_AT_type attribute that is set
12977 to the type of the argument to PARM, which is ARG.
12978 If PARM is a type generic parameter, TMPL_DIE should have a
12979 child DW_AT_type that is set to ARG. */
12980 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
12981 add_type_attribute (tmpl_die, tmpl_type,
12982 (TREE_THIS_VOLATILE (tmpl_type)
12983 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
12984 false, parent_die);
12985 }
12986 else
12987 {
12988 /* So TMPL_DIE is a DIE representing a
12989 a generic generic template parameter, a.k.a template template
12990 parameter in C++ and arg is a template. */
12991
12992 /* The DW_AT_GNU_template_name attribute of the DIE must be set
12993 to the name of the argument. */
12994 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
12995 if (name)
12996 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
12997 }
12998
12999 if (TREE_CODE (parm) == PARM_DECL)
13000 /* So PARM is a non-type generic parameter.
13001 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13002 attribute of TMPL_DIE which value represents the value
13003 of ARG.
13004 We must be careful here:
13005 The value of ARG might reference some function decls.
13006 We might currently be emitting debug info for a generic
13007 type and types are emitted before function decls, we don't
13008 know if the function decls referenced by ARG will actually be
13009 emitted after cgraph computations.
13010 So must defer the generation of the DW_AT_const_value to
13011 after cgraph is ready. */
13012 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13013 }
13014
13015 return tmpl_die;
13016 }
13017
13018 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13019 PARM_PACK must be a template parameter pack. The returned DIE
13020 will be child DIE of PARENT_DIE. */
13021
13022 static dw_die_ref
13023 template_parameter_pack_die (tree parm_pack,
13024 tree parm_pack_args,
13025 dw_die_ref parent_die)
13026 {
13027 dw_die_ref die;
13028 int j;
13029
13030 gcc_assert (parent_die && parm_pack);
13031
13032 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13033 add_name_and_src_coords_attributes (die, parm_pack);
13034 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13035 generic_parameter_die (parm_pack,
13036 TREE_VEC_ELT (parm_pack_args, j),
13037 false /* Don't emit DW_AT_name */,
13038 die);
13039 return die;
13040 }
13041
13042 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13043 an enumerated type. */
13044
13045 static inline int
13046 type_is_enum (const_tree type)
13047 {
13048 return TREE_CODE (type) == ENUMERAL_TYPE;
13049 }
13050
13051 /* Return the DBX register number described by a given RTL node. */
13052
13053 static unsigned int
13054 dbx_reg_number (const_rtx rtl)
13055 {
13056 unsigned regno = REGNO (rtl);
13057
13058 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13059
13060 #ifdef LEAF_REG_REMAP
13061 if (crtl->uses_only_leaf_regs)
13062 {
13063 int leaf_reg = LEAF_REG_REMAP (regno);
13064 if (leaf_reg != -1)
13065 regno = (unsigned) leaf_reg;
13066 }
13067 #endif
13068
13069 regno = DBX_REGISTER_NUMBER (regno);
13070 gcc_assert (regno != INVALID_REGNUM);
13071 return regno;
13072 }
13073
13074 /* Optionally add a DW_OP_piece term to a location description expression.
13075 DW_OP_piece is only added if the location description expression already
13076 doesn't end with DW_OP_piece. */
13077
13078 static void
13079 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13080 {
13081 dw_loc_descr_ref loc;
13082
13083 if (*list_head != NULL)
13084 {
13085 /* Find the end of the chain. */
13086 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13087 ;
13088
13089 if (loc->dw_loc_opc != DW_OP_piece)
13090 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13091 }
13092 }
13093
13094 /* Return a location descriptor that designates a machine register or
13095 zero if there is none. */
13096
13097 static dw_loc_descr_ref
13098 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13099 {
13100 rtx regs;
13101
13102 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13103 return 0;
13104
13105 /* We only use "frame base" when we're sure we're talking about the
13106 post-prologue local stack frame. We do this by *not* running
13107 register elimination until this point, and recognizing the special
13108 argument pointer and soft frame pointer rtx's.
13109 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13110 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13111 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13112 {
13113 dw_loc_descr_ref result = NULL;
13114
13115 if (dwarf_version >= 4 || !dwarf_strict)
13116 {
13117 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13118 initialized);
13119 if (result)
13120 add_loc_descr (&result,
13121 new_loc_descr (DW_OP_stack_value, 0, 0));
13122 }
13123 return result;
13124 }
13125
13126 regs = targetm.dwarf_register_span (rtl);
13127
13128 if (REG_NREGS (rtl) > 1 || regs)
13129 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13130 else
13131 {
13132 unsigned int dbx_regnum = dbx_reg_number (rtl);
13133 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13134 return 0;
13135 return one_reg_loc_descriptor (dbx_regnum, initialized);
13136 }
13137 }
13138
13139 /* Return a location descriptor that designates a machine register for
13140 a given hard register number. */
13141
13142 static dw_loc_descr_ref
13143 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13144 {
13145 dw_loc_descr_ref reg_loc_descr;
13146
13147 if (regno <= 31)
13148 reg_loc_descr
13149 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13150 else
13151 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13152
13153 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13154 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13155
13156 return reg_loc_descr;
13157 }
13158
13159 /* Given an RTL of a register, return a location descriptor that
13160 designates a value that spans more than one register. */
13161
13162 static dw_loc_descr_ref
13163 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13164 enum var_init_status initialized)
13165 {
13166 int size, i;
13167 dw_loc_descr_ref loc_result = NULL;
13168
13169 /* Simple, contiguous registers. */
13170 if (regs == NULL_RTX)
13171 {
13172 unsigned reg = REGNO (rtl);
13173 int nregs;
13174
13175 #ifdef LEAF_REG_REMAP
13176 if (crtl->uses_only_leaf_regs)
13177 {
13178 int leaf_reg = LEAF_REG_REMAP (reg);
13179 if (leaf_reg != -1)
13180 reg = (unsigned) leaf_reg;
13181 }
13182 #endif
13183
13184 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13185 nregs = REG_NREGS (rtl);
13186
13187 size = GET_MODE_SIZE (GET_MODE (rtl)) / nregs;
13188
13189 loc_result = NULL;
13190 while (nregs--)
13191 {
13192 dw_loc_descr_ref t;
13193
13194 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13195 VAR_INIT_STATUS_INITIALIZED);
13196 add_loc_descr (&loc_result, t);
13197 add_loc_descr_op_piece (&loc_result, size);
13198 ++reg;
13199 }
13200 return loc_result;
13201 }
13202
13203 /* Now onto stupid register sets in non contiguous locations. */
13204
13205 gcc_assert (GET_CODE (regs) == PARALLEL);
13206
13207 size = GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0)));
13208 loc_result = NULL;
13209
13210 for (i = 0; i < XVECLEN (regs, 0); ++i)
13211 {
13212 dw_loc_descr_ref t;
13213
13214 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13215 VAR_INIT_STATUS_INITIALIZED);
13216 add_loc_descr (&loc_result, t);
13217 add_loc_descr_op_piece (&loc_result, size);
13218 }
13219
13220 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13221 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13222 return loc_result;
13223 }
13224
13225 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13226
13227 /* Return a location descriptor that designates a constant i,
13228 as a compound operation from constant (i >> shift), constant shift
13229 and DW_OP_shl. */
13230
13231 static dw_loc_descr_ref
13232 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13233 {
13234 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13235 add_loc_descr (&ret, int_loc_descriptor (shift));
13236 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13237 return ret;
13238 }
13239
13240 /* Return a location descriptor that designates constant POLY_I. */
13241
13242 static dw_loc_descr_ref
13243 int_loc_descriptor (poly_int64 poly_i)
13244 {
13245 enum dwarf_location_atom op;
13246
13247 HOST_WIDE_INT i;
13248 if (!poly_i.is_constant (&i))
13249 {
13250 /* Create location descriptions for the non-constant part and
13251 add any constant offset at the end. */
13252 dw_loc_descr_ref ret = NULL;
13253 HOST_WIDE_INT constant = poly_i.coeffs[0];
13254 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13255 {
13256 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13257 if (coeff != 0)
13258 {
13259 dw_loc_descr_ref start = ret;
13260 unsigned int factor;
13261 int bias;
13262 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13263 (j, &factor, &bias);
13264
13265 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13266 add COEFF * (REGNO / FACTOR) now and subtract
13267 COEFF * BIAS from the final constant part. */
13268 constant -= coeff * bias;
13269 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13270 if (coeff % factor == 0)
13271 coeff /= factor;
13272 else
13273 {
13274 int amount = exact_log2 (factor);
13275 gcc_assert (amount >= 0);
13276 add_loc_descr (&ret, int_loc_descriptor (amount));
13277 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13278 }
13279 if (coeff != 1)
13280 {
13281 add_loc_descr (&ret, int_loc_descriptor (coeff));
13282 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13283 }
13284 if (start)
13285 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13286 }
13287 }
13288 loc_descr_plus_const (&ret, constant);
13289 return ret;
13290 }
13291
13292 /* Pick the smallest representation of a constant, rather than just
13293 defaulting to the LEB encoding. */
13294 if (i >= 0)
13295 {
13296 int clz = clz_hwi (i);
13297 int ctz = ctz_hwi (i);
13298 if (i <= 31)
13299 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13300 else if (i <= 0xff)
13301 op = DW_OP_const1u;
13302 else if (i <= 0xffff)
13303 op = DW_OP_const2u;
13304 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13305 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13306 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13307 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13308 while DW_OP_const4u is 5 bytes. */
13309 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13310 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13311 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13312 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13313 while DW_OP_const4u is 5 bytes. */
13314 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13315
13316 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13317 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13318 <= 4)
13319 {
13320 /* As i >= 2**31, the double cast above will yield a negative number.
13321 Since wrapping is defined in DWARF expressions we can output big
13322 positive integers as small negative ones, regardless of the size
13323 of host wide ints.
13324
13325 Here, since the evaluator will handle 32-bit values and since i >=
13326 2**31, we know it's going to be interpreted as a negative literal:
13327 store it this way if we can do better than 5 bytes this way. */
13328 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13329 }
13330 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13331 op = DW_OP_const4u;
13332
13333 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13334 least 6 bytes: see if we can do better before falling back to it. */
13335 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13336 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13337 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13338 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13339 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13340 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13341 >= HOST_BITS_PER_WIDE_INT)
13342 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13343 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13344 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13345 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13346 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13347 && size_of_uleb128 (i) > 6)
13348 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13349 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13350 else
13351 op = DW_OP_constu;
13352 }
13353 else
13354 {
13355 if (i >= -0x80)
13356 op = DW_OP_const1s;
13357 else if (i >= -0x8000)
13358 op = DW_OP_const2s;
13359 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13360 {
13361 if (size_of_int_loc_descriptor (i) < 5)
13362 {
13363 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13364 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13365 return ret;
13366 }
13367 op = DW_OP_const4s;
13368 }
13369 else
13370 {
13371 if (size_of_int_loc_descriptor (i)
13372 < (unsigned long) 1 + size_of_sleb128 (i))
13373 {
13374 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13375 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13376 return ret;
13377 }
13378 op = DW_OP_consts;
13379 }
13380 }
13381
13382 return new_loc_descr (op, i, 0);
13383 }
13384
13385 /* Likewise, for unsigned constants. */
13386
13387 static dw_loc_descr_ref
13388 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
13389 {
13390 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
13391 const unsigned HOST_WIDE_INT max_uint
13392 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
13393
13394 /* If possible, use the clever signed constants handling. */
13395 if (i <= max_int)
13396 return int_loc_descriptor ((HOST_WIDE_INT) i);
13397
13398 /* Here, we are left with positive numbers that cannot be represented as
13399 HOST_WIDE_INT, i.e.:
13400 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
13401
13402 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
13403 whereas may be better to output a negative integer: thanks to integer
13404 wrapping, we know that:
13405 x = x - 2 ** DWARF2_ADDR_SIZE
13406 = x - 2 * (max (HOST_WIDE_INT) + 1)
13407 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
13408 small negative integers. Let's try that in cases it will clearly improve
13409 the encoding: there is no gain turning DW_OP_const4u into
13410 DW_OP_const4s. */
13411 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
13412 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
13413 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
13414 {
13415 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
13416
13417 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
13418 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
13419 const HOST_WIDE_INT second_shift
13420 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
13421
13422 /* So we finally have:
13423 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
13424 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
13425 return int_loc_descriptor (second_shift);
13426 }
13427
13428 /* Last chance: fallback to a simple constant operation. */
13429 return new_loc_descr
13430 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13431 ? DW_OP_const4u
13432 : DW_OP_const8u,
13433 i, 0);
13434 }
13435
13436 /* Generate and return a location description that computes the unsigned
13437 comparison of the two stack top entries (a OP b where b is the top-most
13438 entry and a is the second one). The KIND of comparison can be LT_EXPR,
13439 LE_EXPR, GT_EXPR or GE_EXPR. */
13440
13441 static dw_loc_descr_ref
13442 uint_comparison_loc_list (enum tree_code kind)
13443 {
13444 enum dwarf_location_atom op, flip_op;
13445 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
13446
13447 switch (kind)
13448 {
13449 case LT_EXPR:
13450 op = DW_OP_lt;
13451 break;
13452 case LE_EXPR:
13453 op = DW_OP_le;
13454 break;
13455 case GT_EXPR:
13456 op = DW_OP_gt;
13457 break;
13458 case GE_EXPR:
13459 op = DW_OP_ge;
13460 break;
13461 default:
13462 gcc_unreachable ();
13463 }
13464
13465 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
13466 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
13467
13468 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
13469 possible to perform unsigned comparisons: we just have to distinguish
13470 three cases:
13471
13472 1. when a and b have the same sign (as signed integers); then we should
13473 return: a OP(signed) b;
13474
13475 2. when a is a negative signed integer while b is a positive one, then a
13476 is a greater unsigned integer than b; likewise when a and b's roles
13477 are flipped.
13478
13479 So first, compare the sign of the two operands. */
13480 ret = new_loc_descr (DW_OP_over, 0, 0);
13481 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
13482 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
13483 /* If they have different signs (i.e. they have different sign bits), then
13484 the stack top value has now the sign bit set and thus it's smaller than
13485 zero. */
13486 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
13487 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
13488 add_loc_descr (&ret, bra_node);
13489
13490 /* We are in case 1. At this point, we know both operands have the same
13491 sign, to it's safe to use the built-in signed comparison. */
13492 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
13493 add_loc_descr (&ret, jmp_node);
13494
13495 /* We are in case 2. Here, we know both operands do not have the same sign,
13496 so we have to flip the signed comparison. */
13497 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
13498 tmp = new_loc_descr (flip_op, 0, 0);
13499 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
13500 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
13501 add_loc_descr (&ret, tmp);
13502
13503 /* This dummy operation is necessary to make the two branches join. */
13504 tmp = new_loc_descr (DW_OP_nop, 0, 0);
13505 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
13506 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
13507 add_loc_descr (&ret, tmp);
13508
13509 return ret;
13510 }
13511
13512 /* Likewise, but takes the location description lists (might be destructive on
13513 them). Return NULL if either is NULL or if concatenation fails. */
13514
13515 static dw_loc_list_ref
13516 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
13517 enum tree_code kind)
13518 {
13519 if (left == NULL || right == NULL)
13520 return NULL;
13521
13522 add_loc_list (&left, right);
13523 if (left == NULL)
13524 return NULL;
13525
13526 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
13527 return left;
13528 }
13529
13530 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
13531 without actually allocating it. */
13532
13533 static unsigned long
13534 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13535 {
13536 return size_of_int_loc_descriptor (i >> shift)
13537 + size_of_int_loc_descriptor (shift)
13538 + 1;
13539 }
13540
13541 /* Return size_of_locs (int_loc_descriptor (i)) without
13542 actually allocating it. */
13543
13544 static unsigned long
13545 size_of_int_loc_descriptor (HOST_WIDE_INT i)
13546 {
13547 unsigned long s;
13548
13549 if (i >= 0)
13550 {
13551 int clz, ctz;
13552 if (i <= 31)
13553 return 1;
13554 else if (i <= 0xff)
13555 return 2;
13556 else if (i <= 0xffff)
13557 return 3;
13558 clz = clz_hwi (i);
13559 ctz = ctz_hwi (i);
13560 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13561 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13562 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13563 - clz - 5);
13564 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13565 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13566 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13567 - clz - 8);
13568 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13569 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13570 <= 4)
13571 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13572 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13573 return 5;
13574 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
13575 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13576 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13577 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13578 - clz - 8);
13579 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13580 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
13581 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13582 - clz - 16);
13583 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13584 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13585 && s > 6)
13586 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
13587 - clz - 32);
13588 else
13589 return 1 + s;
13590 }
13591 else
13592 {
13593 if (i >= -0x80)
13594 return 2;
13595 else if (i >= -0x8000)
13596 return 3;
13597 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13598 {
13599 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
13600 {
13601 s = size_of_int_loc_descriptor (-i) + 1;
13602 if (s < 5)
13603 return s;
13604 }
13605 return 5;
13606 }
13607 else
13608 {
13609 unsigned long r = 1 + size_of_sleb128 (i);
13610 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
13611 {
13612 s = size_of_int_loc_descriptor (-i) + 1;
13613 if (s < r)
13614 return s;
13615 }
13616 return r;
13617 }
13618 }
13619 }
13620
13621 /* Return loc description representing "address" of integer value.
13622 This can appear only as toplevel expression. */
13623
13624 static dw_loc_descr_ref
13625 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
13626 {
13627 int litsize;
13628 dw_loc_descr_ref loc_result = NULL;
13629
13630 if (!(dwarf_version >= 4 || !dwarf_strict))
13631 return NULL;
13632
13633 litsize = size_of_int_loc_descriptor (i);
13634 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
13635 is more compact. For DW_OP_stack_value we need:
13636 litsize + 1 (DW_OP_stack_value)
13637 and for DW_OP_implicit_value:
13638 1 (DW_OP_implicit_value) + 1 (length) + size. */
13639 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
13640 {
13641 loc_result = int_loc_descriptor (i);
13642 add_loc_descr (&loc_result,
13643 new_loc_descr (DW_OP_stack_value, 0, 0));
13644 return loc_result;
13645 }
13646
13647 loc_result = new_loc_descr (DW_OP_implicit_value,
13648 size, 0);
13649 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
13650 loc_result->dw_loc_oprnd2.v.val_int = i;
13651 return loc_result;
13652 }
13653
13654 /* Return a location descriptor that designates a base+offset location. */
13655
13656 static dw_loc_descr_ref
13657 based_loc_descr (rtx reg, poly_int64 offset,
13658 enum var_init_status initialized)
13659 {
13660 unsigned int regno;
13661 dw_loc_descr_ref result;
13662 dw_fde_ref fde = cfun->fde;
13663
13664 /* We only use "frame base" when we're sure we're talking about the
13665 post-prologue local stack frame. We do this by *not* running
13666 register elimination until this point, and recognizing the special
13667 argument pointer and soft frame pointer rtx's. */
13668 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
13669 {
13670 rtx elim = (ira_use_lra_p
13671 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
13672 : eliminate_regs (reg, VOIDmode, NULL_RTX));
13673
13674 if (elim != reg)
13675 {
13676 elim = strip_offset_and_add (elim, &offset);
13677 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
13678 && (elim == hard_frame_pointer_rtx
13679 || elim == stack_pointer_rtx))
13680 || elim == (frame_pointer_needed
13681 ? hard_frame_pointer_rtx
13682 : stack_pointer_rtx));
13683
13684 /* If drap register is used to align stack, use frame
13685 pointer + offset to access stack variables. If stack
13686 is aligned without drap, use stack pointer + offset to
13687 access stack variables. */
13688 if (crtl->stack_realign_tried
13689 && reg == frame_pointer_rtx)
13690 {
13691 int base_reg
13692 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
13693 ? HARD_FRAME_POINTER_REGNUM
13694 : REGNO (elim));
13695 return new_reg_loc_descr (base_reg, offset);
13696 }
13697
13698 gcc_assert (frame_pointer_fb_offset_valid);
13699 offset += frame_pointer_fb_offset;
13700 HOST_WIDE_INT const_offset;
13701 if (offset.is_constant (&const_offset))
13702 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
13703 else
13704 {
13705 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
13706 loc_descr_plus_const (&ret, offset);
13707 return ret;
13708 }
13709 }
13710 }
13711
13712 regno = REGNO (reg);
13713 #ifdef LEAF_REG_REMAP
13714 if (crtl->uses_only_leaf_regs)
13715 {
13716 int leaf_reg = LEAF_REG_REMAP (regno);
13717 if (leaf_reg != -1)
13718 regno = (unsigned) leaf_reg;
13719 }
13720 #endif
13721 regno = DWARF_FRAME_REGNUM (regno);
13722
13723 HOST_WIDE_INT const_offset;
13724 if (!optimize && fde
13725 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
13726 && offset.is_constant (&const_offset))
13727 {
13728 /* Use cfa+offset to represent the location of arguments passed
13729 on the stack when drap is used to align stack.
13730 Only do this when not optimizing, for optimized code var-tracking
13731 is supposed to track where the arguments live and the register
13732 used as vdrap or drap in some spot might be used for something
13733 else in other part of the routine. */
13734 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
13735 }
13736
13737 result = new_reg_loc_descr (regno, offset);
13738
13739 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13740 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13741
13742 return result;
13743 }
13744
13745 /* Return true if this RTL expression describes a base+offset calculation. */
13746
13747 static inline int
13748 is_based_loc (const_rtx rtl)
13749 {
13750 return (GET_CODE (rtl) == PLUS
13751 && ((REG_P (XEXP (rtl, 0))
13752 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
13753 && CONST_INT_P (XEXP (rtl, 1)))));
13754 }
13755
13756 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
13757 failed. */
13758
13759 static dw_loc_descr_ref
13760 tls_mem_loc_descriptor (rtx mem)
13761 {
13762 tree base;
13763 dw_loc_descr_ref loc_result;
13764
13765 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
13766 return NULL;
13767
13768 base = get_base_address (MEM_EXPR (mem));
13769 if (base == NULL
13770 || !VAR_P (base)
13771 || !DECL_THREAD_LOCAL_P (base))
13772 return NULL;
13773
13774 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
13775 if (loc_result == NULL)
13776 return NULL;
13777
13778 if (maybe_ne (MEM_OFFSET (mem), 0))
13779 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
13780
13781 return loc_result;
13782 }
13783
13784 /* Output debug info about reason why we failed to expand expression as dwarf
13785 expression. */
13786
13787 static void
13788 expansion_failed (tree expr, rtx rtl, char const *reason)
13789 {
13790 if (dump_file && (dump_flags & TDF_DETAILS))
13791 {
13792 fprintf (dump_file, "Failed to expand as dwarf: ");
13793 if (expr)
13794 print_generic_expr (dump_file, expr, dump_flags);
13795 if (rtl)
13796 {
13797 fprintf (dump_file, "\n");
13798 print_rtl (dump_file, rtl);
13799 }
13800 fprintf (dump_file, "\nReason: %s\n", reason);
13801 }
13802 }
13803
13804 /* Helper function for const_ok_for_output. */
13805
13806 static bool
13807 const_ok_for_output_1 (rtx rtl)
13808 {
13809 if (targetm.const_not_ok_for_debug_p (rtl))
13810 {
13811 if (GET_CODE (rtl) != UNSPEC)
13812 {
13813 expansion_failed (NULL_TREE, rtl,
13814 "Expression rejected for debug by the backend.\n");
13815 return false;
13816 }
13817
13818 /* If delegitimize_address couldn't do anything with the UNSPEC, and
13819 the target hook doesn't explicitly allow it in debug info, assume
13820 we can't express it in the debug info. */
13821 /* Don't complain about TLS UNSPECs, those are just too hard to
13822 delegitimize. Note this could be a non-decl SYMBOL_REF such as
13823 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
13824 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
13825 if (flag_checking
13826 && (XVECLEN (rtl, 0) == 0
13827 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
13828 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
13829 inform (current_function_decl
13830 ? DECL_SOURCE_LOCATION (current_function_decl)
13831 : UNKNOWN_LOCATION,
13832 #if NUM_UNSPEC_VALUES > 0
13833 "non-delegitimized UNSPEC %s (%d) found in variable location",
13834 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
13835 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
13836 XINT (rtl, 1));
13837 #else
13838 "non-delegitimized UNSPEC %d found in variable location",
13839 XINT (rtl, 1));
13840 #endif
13841 expansion_failed (NULL_TREE, rtl,
13842 "UNSPEC hasn't been delegitimized.\n");
13843 return false;
13844 }
13845
13846 if (CONST_POLY_INT_P (rtl))
13847 return false;
13848
13849 if (targetm.const_not_ok_for_debug_p (rtl))
13850 {
13851 expansion_failed (NULL_TREE, rtl,
13852 "Expression rejected for debug by the backend.\n");
13853 return false;
13854 }
13855
13856 /* FIXME: Refer to PR60655. It is possible for simplification
13857 of rtl expressions in var tracking to produce such expressions.
13858 We should really identify / validate expressions
13859 enclosed in CONST that can be handled by assemblers on various
13860 targets and only handle legitimate cases here. */
13861 switch (GET_CODE (rtl))
13862 {
13863 case SYMBOL_REF:
13864 break;
13865 case NOT:
13866 case NEG:
13867 return false;
13868 default:
13869 return true;
13870 }
13871
13872 if (CONSTANT_POOL_ADDRESS_P (rtl))
13873 {
13874 bool marked;
13875 get_pool_constant_mark (rtl, &marked);
13876 /* If all references to this pool constant were optimized away,
13877 it was not output and thus we can't represent it. */
13878 if (!marked)
13879 {
13880 expansion_failed (NULL_TREE, rtl,
13881 "Constant was removed from constant pool.\n");
13882 return false;
13883 }
13884 }
13885
13886 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
13887 return false;
13888
13889 /* Avoid references to external symbols in debug info, on several targets
13890 the linker might even refuse to link when linking a shared library,
13891 and in many other cases the relocations for .debug_info/.debug_loc are
13892 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
13893 to be defined within the same shared library or executable are fine. */
13894 if (SYMBOL_REF_EXTERNAL_P (rtl))
13895 {
13896 tree decl = SYMBOL_REF_DECL (rtl);
13897
13898 if (decl == NULL || !targetm.binds_local_p (decl))
13899 {
13900 expansion_failed (NULL_TREE, rtl,
13901 "Symbol not defined in current TU.\n");
13902 return false;
13903 }
13904 }
13905
13906 return true;
13907 }
13908
13909 /* Return true if constant RTL can be emitted in DW_OP_addr or
13910 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
13911 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
13912
13913 static bool
13914 const_ok_for_output (rtx rtl)
13915 {
13916 if (GET_CODE (rtl) == SYMBOL_REF)
13917 return const_ok_for_output_1 (rtl);
13918
13919 if (GET_CODE (rtl) == CONST)
13920 {
13921 subrtx_var_iterator::array_type array;
13922 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
13923 if (!const_ok_for_output_1 (*iter))
13924 return false;
13925 return true;
13926 }
13927
13928 return true;
13929 }
13930
13931 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
13932 if possible, NULL otherwise. */
13933
13934 static dw_die_ref
13935 base_type_for_mode (machine_mode mode, bool unsignedp)
13936 {
13937 dw_die_ref type_die;
13938 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
13939
13940 if (type == NULL)
13941 return NULL;
13942 switch (TREE_CODE (type))
13943 {
13944 case INTEGER_TYPE:
13945 case REAL_TYPE:
13946 break;
13947 default:
13948 return NULL;
13949 }
13950 type_die = lookup_type_die (type);
13951 if (!type_die)
13952 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
13953 comp_unit_die ());
13954 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
13955 return NULL;
13956 return type_die;
13957 }
13958
13959 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
13960 type matching MODE, or, if MODE is narrower than or as wide as
13961 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
13962 possible. */
13963
13964 static dw_loc_descr_ref
13965 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
13966 {
13967 machine_mode outer_mode = mode;
13968 dw_die_ref type_die;
13969 dw_loc_descr_ref cvt;
13970
13971 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
13972 {
13973 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
13974 return op;
13975 }
13976 type_die = base_type_for_mode (outer_mode, 1);
13977 if (type_die == NULL)
13978 return NULL;
13979 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
13980 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
13981 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
13982 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
13983 add_loc_descr (&op, cvt);
13984 return op;
13985 }
13986
13987 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
13988
13989 static dw_loc_descr_ref
13990 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
13991 dw_loc_descr_ref op1)
13992 {
13993 dw_loc_descr_ref ret = op0;
13994 add_loc_descr (&ret, op1);
13995 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
13996 if (STORE_FLAG_VALUE != 1)
13997 {
13998 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
13999 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14000 }
14001 return ret;
14002 }
14003
14004 /* Subroutine of scompare_loc_descriptor for the case in which we're
14005 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14006 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14007
14008 static dw_loc_descr_ref
14009 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14010 scalar_int_mode op_mode,
14011 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14012 {
14013 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14014 dw_loc_descr_ref cvt;
14015
14016 if (type_die == NULL)
14017 return NULL;
14018 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14019 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14020 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14021 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14022 add_loc_descr (&op0, cvt);
14023 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14024 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14025 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14026 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14027 add_loc_descr (&op1, cvt);
14028 return compare_loc_descriptor (op, op0, op1);
14029 }
14030
14031 /* Subroutine of scompare_loc_descriptor for the case in which we're
14032 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14033 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14034
14035 static dw_loc_descr_ref
14036 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14037 scalar_int_mode op_mode,
14038 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14039 {
14040 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14041 /* For eq/ne, if the operands are known to be zero-extended,
14042 there is no need to do the fancy shifting up. */
14043 if (op == DW_OP_eq || op == DW_OP_ne)
14044 {
14045 dw_loc_descr_ref last0, last1;
14046 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14047 ;
14048 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14049 ;
14050 /* deref_size zero extends, and for constants we can check
14051 whether they are zero extended or not. */
14052 if (((last0->dw_loc_opc == DW_OP_deref_size
14053 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14054 || (CONST_INT_P (XEXP (rtl, 0))
14055 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14056 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14057 && ((last1->dw_loc_opc == DW_OP_deref_size
14058 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14059 || (CONST_INT_P (XEXP (rtl, 1))
14060 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14061 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14062 return compare_loc_descriptor (op, op0, op1);
14063
14064 /* EQ/NE comparison against constant in narrower type than
14065 DWARF2_ADDR_SIZE can be performed either as
14066 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14067 DW_OP_{eq,ne}
14068 or
14069 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14070 DW_OP_{eq,ne}. Pick whatever is shorter. */
14071 if (CONST_INT_P (XEXP (rtl, 1))
14072 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14073 && (size_of_int_loc_descriptor (shift) + 1
14074 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14075 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14076 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14077 & GET_MODE_MASK (op_mode))))
14078 {
14079 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14080 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14081 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14082 & GET_MODE_MASK (op_mode));
14083 return compare_loc_descriptor (op, op0, op1);
14084 }
14085 }
14086 add_loc_descr (&op0, int_loc_descriptor (shift));
14087 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14088 if (CONST_INT_P (XEXP (rtl, 1)))
14089 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14090 else
14091 {
14092 add_loc_descr (&op1, int_loc_descriptor (shift));
14093 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14094 }
14095 return compare_loc_descriptor (op, op0, op1);
14096 }
14097
14098 /* Return location descriptor for unsigned comparison OP RTL. */
14099
14100 static dw_loc_descr_ref
14101 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14102 machine_mode mem_mode)
14103 {
14104 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14105 dw_loc_descr_ref op0, op1;
14106
14107 if (op_mode == VOIDmode)
14108 op_mode = GET_MODE (XEXP (rtl, 1));
14109 if (op_mode == VOIDmode)
14110 return NULL;
14111
14112 scalar_int_mode int_op_mode;
14113 if (dwarf_strict
14114 && dwarf_version < 5
14115 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14116 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14117 return NULL;
14118
14119 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14120 VAR_INIT_STATUS_INITIALIZED);
14121 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14122 VAR_INIT_STATUS_INITIALIZED);
14123
14124 if (op0 == NULL || op1 == NULL)
14125 return NULL;
14126
14127 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14128 {
14129 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14130 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14131
14132 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14133 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14134 }
14135 return compare_loc_descriptor (op, op0, op1);
14136 }
14137
14138 /* Return location descriptor for unsigned comparison OP RTL. */
14139
14140 static dw_loc_descr_ref
14141 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14142 machine_mode mem_mode)
14143 {
14144 dw_loc_descr_ref op0, op1;
14145
14146 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14147 if (test_op_mode == VOIDmode)
14148 test_op_mode = GET_MODE (XEXP (rtl, 1));
14149
14150 scalar_int_mode op_mode;
14151 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14152 return NULL;
14153
14154 if (dwarf_strict
14155 && dwarf_version < 5
14156 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14157 return NULL;
14158
14159 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14160 VAR_INIT_STATUS_INITIALIZED);
14161 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14162 VAR_INIT_STATUS_INITIALIZED);
14163
14164 if (op0 == NULL || op1 == NULL)
14165 return NULL;
14166
14167 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14168 {
14169 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14170 dw_loc_descr_ref last0, last1;
14171 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14172 ;
14173 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14174 ;
14175 if (CONST_INT_P (XEXP (rtl, 0)))
14176 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14177 /* deref_size zero extends, so no need to mask it again. */
14178 else if (last0->dw_loc_opc != DW_OP_deref_size
14179 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14180 {
14181 add_loc_descr (&op0, int_loc_descriptor (mask));
14182 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14183 }
14184 if (CONST_INT_P (XEXP (rtl, 1)))
14185 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14186 /* deref_size zero extends, so no need to mask it again. */
14187 else if (last1->dw_loc_opc != DW_OP_deref_size
14188 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14189 {
14190 add_loc_descr (&op1, int_loc_descriptor (mask));
14191 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14192 }
14193 }
14194 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14195 {
14196 HOST_WIDE_INT bias = 1;
14197 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14198 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14199 if (CONST_INT_P (XEXP (rtl, 1)))
14200 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14201 + INTVAL (XEXP (rtl, 1)));
14202 else
14203 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14204 bias, 0));
14205 }
14206 return compare_loc_descriptor (op, op0, op1);
14207 }
14208
14209 /* Return location descriptor for {U,S}{MIN,MAX}. */
14210
14211 static dw_loc_descr_ref
14212 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14213 machine_mode mem_mode)
14214 {
14215 enum dwarf_location_atom op;
14216 dw_loc_descr_ref op0, op1, ret;
14217 dw_loc_descr_ref bra_node, drop_node;
14218
14219 scalar_int_mode int_mode;
14220 if (dwarf_strict
14221 && dwarf_version < 5
14222 && (!is_a <scalar_int_mode> (mode, &int_mode)
14223 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14224 return NULL;
14225
14226 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14227 VAR_INIT_STATUS_INITIALIZED);
14228 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14229 VAR_INIT_STATUS_INITIALIZED);
14230
14231 if (op0 == NULL || op1 == NULL)
14232 return NULL;
14233
14234 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14235 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14236 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14237 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14238 {
14239 /* Checked by the caller. */
14240 int_mode = as_a <scalar_int_mode> (mode);
14241 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14242 {
14243 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14244 add_loc_descr (&op0, int_loc_descriptor (mask));
14245 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14246 add_loc_descr (&op1, int_loc_descriptor (mask));
14247 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14248 }
14249 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14250 {
14251 HOST_WIDE_INT bias = 1;
14252 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14253 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14254 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14255 }
14256 }
14257 else if (is_a <scalar_int_mode> (mode, &int_mode)
14258 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14259 {
14260 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14261 add_loc_descr (&op0, int_loc_descriptor (shift));
14262 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14263 add_loc_descr (&op1, int_loc_descriptor (shift));
14264 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14265 }
14266 else if (is_a <scalar_int_mode> (mode, &int_mode)
14267 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14268 {
14269 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14270 dw_loc_descr_ref cvt;
14271 if (type_die == NULL)
14272 return NULL;
14273 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14274 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14275 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14276 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14277 add_loc_descr (&op0, cvt);
14278 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14279 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14280 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14281 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14282 add_loc_descr (&op1, cvt);
14283 }
14284
14285 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14286 op = DW_OP_lt;
14287 else
14288 op = DW_OP_gt;
14289 ret = op0;
14290 add_loc_descr (&ret, op1);
14291 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14292 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14293 add_loc_descr (&ret, bra_node);
14294 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14295 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14296 add_loc_descr (&ret, drop_node);
14297 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14298 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14299 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14300 && is_a <scalar_int_mode> (mode, &int_mode)
14301 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14302 ret = convert_descriptor_to_mode (int_mode, ret);
14303 return ret;
14304 }
14305
14306 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14307 but after converting arguments to type_die, afterwards
14308 convert back to unsigned. */
14309
14310 static dw_loc_descr_ref
14311 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14312 scalar_int_mode mode, machine_mode mem_mode)
14313 {
14314 dw_loc_descr_ref cvt, op0, op1;
14315
14316 if (type_die == NULL)
14317 return NULL;
14318 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14319 VAR_INIT_STATUS_INITIALIZED);
14320 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14321 VAR_INIT_STATUS_INITIALIZED);
14322 if (op0 == NULL || op1 == NULL)
14323 return NULL;
14324 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14325 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14326 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14327 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14328 add_loc_descr (&op0, cvt);
14329 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14330 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14331 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14332 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14333 add_loc_descr (&op1, cvt);
14334 add_loc_descr (&op0, op1);
14335 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14336 return convert_descriptor_to_mode (mode, op0);
14337 }
14338
14339 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14340 const0 is DW_OP_lit0 or corresponding typed constant,
14341 const1 is DW_OP_lit1 or corresponding typed constant
14342 and constMSB is constant with just the MSB bit set
14343 for the mode):
14344 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14345 L1: const0 DW_OP_swap
14346 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14347 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14348 L3: DW_OP_drop
14349 L4: DW_OP_nop
14350
14351 CTZ is similar:
14352 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14353 L1: const0 DW_OP_swap
14354 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14355 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14356 L3: DW_OP_drop
14357 L4: DW_OP_nop
14358
14359 FFS is similar:
14360 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14361 L1: const1 DW_OP_swap
14362 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14363 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14364 L3: DW_OP_drop
14365 L4: DW_OP_nop */
14366
14367 static dw_loc_descr_ref
14368 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14369 machine_mode mem_mode)
14370 {
14371 dw_loc_descr_ref op0, ret, tmp;
14372 HOST_WIDE_INT valv;
14373 dw_loc_descr_ref l1jump, l1label;
14374 dw_loc_descr_ref l2jump, l2label;
14375 dw_loc_descr_ref l3jump, l3label;
14376 dw_loc_descr_ref l4jump, l4label;
14377 rtx msb;
14378
14379 if (GET_MODE (XEXP (rtl, 0)) != mode)
14380 return NULL;
14381
14382 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14383 VAR_INIT_STATUS_INITIALIZED);
14384 if (op0 == NULL)
14385 return NULL;
14386 ret = op0;
14387 if (GET_CODE (rtl) == CLZ)
14388 {
14389 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14390 valv = GET_MODE_BITSIZE (mode);
14391 }
14392 else if (GET_CODE (rtl) == FFS)
14393 valv = 0;
14394 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14395 valv = GET_MODE_BITSIZE (mode);
14396 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14397 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
14398 add_loc_descr (&ret, l1jump);
14399 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14400 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
14401 VAR_INIT_STATUS_INITIALIZED);
14402 if (tmp == NULL)
14403 return NULL;
14404 add_loc_descr (&ret, tmp);
14405 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
14406 add_loc_descr (&ret, l4jump);
14407 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
14408 ? const1_rtx : const0_rtx,
14409 mode, mem_mode,
14410 VAR_INIT_STATUS_INITIALIZED);
14411 if (l1label == NULL)
14412 return NULL;
14413 add_loc_descr (&ret, l1label);
14414 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14415 l2label = new_loc_descr (DW_OP_dup, 0, 0);
14416 add_loc_descr (&ret, l2label);
14417 if (GET_CODE (rtl) != CLZ)
14418 msb = const1_rtx;
14419 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
14420 msb = GEN_INT (HOST_WIDE_INT_1U
14421 << (GET_MODE_BITSIZE (mode) - 1));
14422 else
14423 msb = immed_wide_int_const
14424 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
14425 GET_MODE_PRECISION (mode)), mode);
14426 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
14427 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
14428 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
14429 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
14430 else
14431 tmp = mem_loc_descriptor (msb, mode, mem_mode,
14432 VAR_INIT_STATUS_INITIALIZED);
14433 if (tmp == NULL)
14434 return NULL;
14435 add_loc_descr (&ret, tmp);
14436 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
14437 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
14438 add_loc_descr (&ret, l3jump);
14439 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
14440 VAR_INIT_STATUS_INITIALIZED);
14441 if (tmp == NULL)
14442 return NULL;
14443 add_loc_descr (&ret, tmp);
14444 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
14445 ? DW_OP_shl : DW_OP_shr, 0, 0));
14446 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14447 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
14448 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14449 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
14450 add_loc_descr (&ret, l2jump);
14451 l3label = new_loc_descr (DW_OP_drop, 0, 0);
14452 add_loc_descr (&ret, l3label);
14453 l4label = new_loc_descr (DW_OP_nop, 0, 0);
14454 add_loc_descr (&ret, l4label);
14455 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14456 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
14457 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14458 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
14459 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14460 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
14461 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14462 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
14463 return ret;
14464 }
14465
14466 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
14467 const1 is DW_OP_lit1 or corresponding typed constant):
14468 const0 DW_OP_swap
14469 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
14470 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
14471 L2: DW_OP_drop
14472
14473 PARITY is similar:
14474 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
14475 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
14476 L2: DW_OP_drop */
14477
14478 static dw_loc_descr_ref
14479 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
14480 machine_mode mem_mode)
14481 {
14482 dw_loc_descr_ref op0, ret, tmp;
14483 dw_loc_descr_ref l1jump, l1label;
14484 dw_loc_descr_ref l2jump, l2label;
14485
14486 if (GET_MODE (XEXP (rtl, 0)) != mode)
14487 return NULL;
14488
14489 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14490 VAR_INIT_STATUS_INITIALIZED);
14491 if (op0 == NULL)
14492 return NULL;
14493 ret = op0;
14494 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
14495 VAR_INIT_STATUS_INITIALIZED);
14496 if (tmp == NULL)
14497 return NULL;
14498 add_loc_descr (&ret, tmp);
14499 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14500 l1label = new_loc_descr (DW_OP_dup, 0, 0);
14501 add_loc_descr (&ret, l1label);
14502 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
14503 add_loc_descr (&ret, l2jump);
14504 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14505 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
14506 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
14507 VAR_INIT_STATUS_INITIALIZED);
14508 if (tmp == NULL)
14509 return NULL;
14510 add_loc_descr (&ret, tmp);
14511 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
14512 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
14513 ? DW_OP_plus : DW_OP_xor, 0, 0));
14514 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14515 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
14516 VAR_INIT_STATUS_INITIALIZED);
14517 add_loc_descr (&ret, tmp);
14518 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14519 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
14520 add_loc_descr (&ret, l1jump);
14521 l2label = new_loc_descr (DW_OP_drop, 0, 0);
14522 add_loc_descr (&ret, l2label);
14523 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14524 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
14525 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14526 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
14527 return ret;
14528 }
14529
14530 /* BSWAP (constS is initial shift count, either 56 or 24):
14531 constS const0
14532 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
14533 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
14534 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
14535 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
14536 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
14537
14538 static dw_loc_descr_ref
14539 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
14540 machine_mode mem_mode)
14541 {
14542 dw_loc_descr_ref op0, ret, tmp;
14543 dw_loc_descr_ref l1jump, l1label;
14544 dw_loc_descr_ref l2jump, l2label;
14545
14546 if (BITS_PER_UNIT != 8
14547 || (GET_MODE_BITSIZE (mode) != 32
14548 && GET_MODE_BITSIZE (mode) != 64))
14549 return NULL;
14550
14551 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14552 VAR_INIT_STATUS_INITIALIZED);
14553 if (op0 == NULL)
14554 return NULL;
14555
14556 ret = op0;
14557 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
14558 mode, mem_mode,
14559 VAR_INIT_STATUS_INITIALIZED);
14560 if (tmp == NULL)
14561 return NULL;
14562 add_loc_descr (&ret, tmp);
14563 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
14564 VAR_INIT_STATUS_INITIALIZED);
14565 if (tmp == NULL)
14566 return NULL;
14567 add_loc_descr (&ret, tmp);
14568 l1label = new_loc_descr (DW_OP_pick, 2, 0);
14569 add_loc_descr (&ret, l1label);
14570 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
14571 mode, mem_mode,
14572 VAR_INIT_STATUS_INITIALIZED);
14573 add_loc_descr (&ret, tmp);
14574 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
14575 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
14576 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14577 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
14578 VAR_INIT_STATUS_INITIALIZED);
14579 if (tmp == NULL)
14580 return NULL;
14581 add_loc_descr (&ret, tmp);
14582 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
14583 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
14584 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
14585 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
14586 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14587 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14588 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
14589 VAR_INIT_STATUS_INITIALIZED);
14590 add_loc_descr (&ret, tmp);
14591 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
14592 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
14593 add_loc_descr (&ret, l2jump);
14594 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
14595 VAR_INIT_STATUS_INITIALIZED);
14596 add_loc_descr (&ret, tmp);
14597 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
14598 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14599 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
14600 add_loc_descr (&ret, l1jump);
14601 l2label = new_loc_descr (DW_OP_drop, 0, 0);
14602 add_loc_descr (&ret, l2label);
14603 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14604 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14605 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14606 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
14607 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14608 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
14609 return ret;
14610 }
14611
14612 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
14613 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
14614 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
14615 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
14616
14617 ROTATERT is similar:
14618 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
14619 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
14620 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
14621
14622 static dw_loc_descr_ref
14623 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
14624 machine_mode mem_mode)
14625 {
14626 rtx rtlop1 = XEXP (rtl, 1);
14627 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
14628 int i;
14629
14630 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
14631 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
14632 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14633 VAR_INIT_STATUS_INITIALIZED);
14634 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
14635 VAR_INIT_STATUS_INITIALIZED);
14636 if (op0 == NULL || op1 == NULL)
14637 return NULL;
14638 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
14639 for (i = 0; i < 2; i++)
14640 {
14641 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
14642 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
14643 mode, mem_mode,
14644 VAR_INIT_STATUS_INITIALIZED);
14645 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
14646 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
14647 ? DW_OP_const4u
14648 : HOST_BITS_PER_WIDE_INT == 64
14649 ? DW_OP_const8u : DW_OP_constu,
14650 GET_MODE_MASK (mode), 0);
14651 else
14652 mask[i] = NULL;
14653 if (mask[i] == NULL)
14654 return NULL;
14655 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
14656 }
14657 ret = op0;
14658 add_loc_descr (&ret, op1);
14659 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14660 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14661 if (GET_CODE (rtl) == ROTATERT)
14662 {
14663 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14664 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
14665 GET_MODE_BITSIZE (mode), 0));
14666 }
14667 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
14668 if (mask[0] != NULL)
14669 add_loc_descr (&ret, mask[0]);
14670 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
14671 if (mask[1] != NULL)
14672 {
14673 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14674 add_loc_descr (&ret, mask[1]);
14675 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14676 }
14677 if (GET_CODE (rtl) == ROTATE)
14678 {
14679 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14680 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
14681 GET_MODE_BITSIZE (mode), 0));
14682 }
14683 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14684 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
14685 return ret;
14686 }
14687
14688 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
14689 for DEBUG_PARAMETER_REF RTL. */
14690
14691 static dw_loc_descr_ref
14692 parameter_ref_descriptor (rtx rtl)
14693 {
14694 dw_loc_descr_ref ret;
14695 dw_die_ref ref;
14696
14697 if (dwarf_strict)
14698 return NULL;
14699 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
14700 /* With LTO during LTRANS we get the late DIE that refers to the early
14701 DIE, thus we add another indirection here. This seems to confuse
14702 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
14703 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
14704 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
14705 if (ref)
14706 {
14707 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14708 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
14709 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
14710 }
14711 else
14712 {
14713 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
14714 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
14715 }
14716 return ret;
14717 }
14718
14719 /* The following routine converts the RTL for a variable or parameter
14720 (resident in memory) into an equivalent Dwarf representation of a
14721 mechanism for getting the address of that same variable onto the top of a
14722 hypothetical "address evaluation" stack.
14723
14724 When creating memory location descriptors, we are effectively transforming
14725 the RTL for a memory-resident object into its Dwarf postfix expression
14726 equivalent. This routine recursively descends an RTL tree, turning
14727 it into Dwarf postfix code as it goes.
14728
14729 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
14730
14731 MEM_MODE is the mode of the memory reference, needed to handle some
14732 autoincrement addressing modes.
14733
14734 Return 0 if we can't represent the location. */
14735
14736 dw_loc_descr_ref
14737 mem_loc_descriptor (rtx rtl, machine_mode mode,
14738 machine_mode mem_mode,
14739 enum var_init_status initialized)
14740 {
14741 dw_loc_descr_ref mem_loc_result = NULL;
14742 enum dwarf_location_atom op;
14743 dw_loc_descr_ref op0, op1;
14744 rtx inner = NULL_RTX;
14745 poly_int64 offset;
14746
14747 if (mode == VOIDmode)
14748 mode = GET_MODE (rtl);
14749
14750 /* Note that for a dynamically sized array, the location we will generate a
14751 description of here will be the lowest numbered location which is
14752 actually within the array. That's *not* necessarily the same as the
14753 zeroth element of the array. */
14754
14755 rtl = targetm.delegitimize_address (rtl);
14756
14757 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
14758 return NULL;
14759
14760 scalar_int_mode int_mode, inner_mode, op1_mode;
14761 switch (GET_CODE (rtl))
14762 {
14763 case POST_INC:
14764 case POST_DEC:
14765 case POST_MODIFY:
14766 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
14767
14768 case SUBREG:
14769 /* The case of a subreg may arise when we have a local (register)
14770 variable or a formal (register) parameter which doesn't quite fill
14771 up an entire register. For now, just assume that it is
14772 legitimate to make the Dwarf info refer to the whole register which
14773 contains the given subreg. */
14774 if (!subreg_lowpart_p (rtl))
14775 break;
14776 inner = SUBREG_REG (rtl);
14777 /* FALLTHRU */
14778 case TRUNCATE:
14779 if (inner == NULL_RTX)
14780 inner = XEXP (rtl, 0);
14781 if (is_a <scalar_int_mode> (mode, &int_mode)
14782 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
14783 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
14784 #ifdef POINTERS_EXTEND_UNSIGNED
14785 || (int_mode == Pmode && mem_mode != VOIDmode)
14786 #endif
14787 )
14788 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
14789 {
14790 mem_loc_result = mem_loc_descriptor (inner,
14791 inner_mode,
14792 mem_mode, initialized);
14793 break;
14794 }
14795 if (dwarf_strict && dwarf_version < 5)
14796 break;
14797 if (is_a <scalar_int_mode> (mode, &int_mode)
14798 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
14799 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
14800 : GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (inner)))
14801 {
14802 dw_die_ref type_die;
14803 dw_loc_descr_ref cvt;
14804
14805 mem_loc_result = mem_loc_descriptor (inner,
14806 GET_MODE (inner),
14807 mem_mode, initialized);
14808 if (mem_loc_result == NULL)
14809 break;
14810 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
14811 if (type_die == NULL)
14812 {
14813 mem_loc_result = NULL;
14814 break;
14815 }
14816 if (GET_MODE_SIZE (mode)
14817 != GET_MODE_SIZE (GET_MODE (inner)))
14818 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14819 else
14820 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
14821 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14822 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14823 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14824 add_loc_descr (&mem_loc_result, cvt);
14825 if (is_a <scalar_int_mode> (mode, &int_mode)
14826 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
14827 {
14828 /* Convert it to untyped afterwards. */
14829 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14830 add_loc_descr (&mem_loc_result, cvt);
14831 }
14832 }
14833 break;
14834
14835 case REG:
14836 if (!is_a <scalar_int_mode> (mode, &int_mode)
14837 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
14838 && rtl != arg_pointer_rtx
14839 && rtl != frame_pointer_rtx
14840 #ifdef POINTERS_EXTEND_UNSIGNED
14841 && (int_mode != Pmode || mem_mode == VOIDmode)
14842 #endif
14843 ))
14844 {
14845 dw_die_ref type_die;
14846 unsigned int dbx_regnum;
14847
14848 if (dwarf_strict && dwarf_version < 5)
14849 break;
14850 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
14851 break;
14852 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
14853 if (type_die == NULL)
14854 break;
14855
14856 dbx_regnum = dbx_reg_number (rtl);
14857 if (dbx_regnum == IGNORED_DWARF_REGNUM)
14858 break;
14859 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
14860 dbx_regnum, 0);
14861 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
14862 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
14863 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
14864 break;
14865 }
14866 /* Whenever a register number forms a part of the description of the
14867 method for calculating the (dynamic) address of a memory resident
14868 object, DWARF rules require the register number be referred to as
14869 a "base register". This distinction is not based in any way upon
14870 what category of register the hardware believes the given register
14871 belongs to. This is strictly DWARF terminology we're dealing with
14872 here. Note that in cases where the location of a memory-resident
14873 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
14874 OP_CONST (0)) the actual DWARF location descriptor that we generate
14875 may just be OP_BASEREG (basereg). This may look deceptively like
14876 the object in question was allocated to a register (rather than in
14877 memory) so DWARF consumers need to be aware of the subtle
14878 distinction between OP_REG and OP_BASEREG. */
14879 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
14880 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
14881 else if (stack_realign_drap
14882 && crtl->drap_reg
14883 && crtl->args.internal_arg_pointer == rtl
14884 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
14885 {
14886 /* If RTL is internal_arg_pointer, which has been optimized
14887 out, use DRAP instead. */
14888 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
14889 VAR_INIT_STATUS_INITIALIZED);
14890 }
14891 break;
14892
14893 case SIGN_EXTEND:
14894 case ZERO_EXTEND:
14895 if (!is_a <scalar_int_mode> (mode, &int_mode)
14896 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
14897 break;
14898 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
14899 mem_mode, VAR_INIT_STATUS_INITIALIZED);
14900 if (op0 == 0)
14901 break;
14902 else if (GET_CODE (rtl) == ZERO_EXTEND
14903 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
14904 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
14905 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
14906 to expand zero extend as two shifts instead of
14907 masking. */
14908 && GET_MODE_SIZE (inner_mode) <= 4)
14909 {
14910 mem_loc_result = op0;
14911 add_loc_descr (&mem_loc_result,
14912 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
14913 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
14914 }
14915 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
14916 {
14917 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
14918 shift *= BITS_PER_UNIT;
14919 if (GET_CODE (rtl) == SIGN_EXTEND)
14920 op = DW_OP_shra;
14921 else
14922 op = DW_OP_shr;
14923 mem_loc_result = op0;
14924 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
14925 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
14926 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
14927 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
14928 }
14929 else if (!dwarf_strict || dwarf_version >= 5)
14930 {
14931 dw_die_ref type_die1, type_die2;
14932 dw_loc_descr_ref cvt;
14933
14934 type_die1 = base_type_for_mode (inner_mode,
14935 GET_CODE (rtl) == ZERO_EXTEND);
14936 if (type_die1 == NULL)
14937 break;
14938 type_die2 = base_type_for_mode (int_mode, 1);
14939 if (type_die2 == NULL)
14940 break;
14941 mem_loc_result = op0;
14942 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14943 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14944 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
14945 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14946 add_loc_descr (&mem_loc_result, cvt);
14947 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14948 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14949 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
14950 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14951 add_loc_descr (&mem_loc_result, cvt);
14952 }
14953 break;
14954
14955 case MEM:
14956 {
14957 rtx new_rtl = avoid_constant_pool_reference (rtl);
14958 if (new_rtl != rtl)
14959 {
14960 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
14961 initialized);
14962 if (mem_loc_result != NULL)
14963 return mem_loc_result;
14964 }
14965 }
14966 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
14967 get_address_mode (rtl), mode,
14968 VAR_INIT_STATUS_INITIALIZED);
14969 if (mem_loc_result == NULL)
14970 mem_loc_result = tls_mem_loc_descriptor (rtl);
14971 if (mem_loc_result != NULL)
14972 {
14973 if (!is_a <scalar_int_mode> (mode, &int_mode)
14974 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14975 {
14976 dw_die_ref type_die;
14977 dw_loc_descr_ref deref;
14978
14979 if (dwarf_strict && dwarf_version < 5)
14980 return NULL;
14981 type_die
14982 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
14983 if (type_die == NULL)
14984 return NULL;
14985 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type),
14986 GET_MODE_SIZE (mode), 0);
14987 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
14988 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
14989 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
14990 add_loc_descr (&mem_loc_result, deref);
14991 }
14992 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14993 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
14994 else
14995 add_loc_descr (&mem_loc_result,
14996 new_loc_descr (DW_OP_deref_size,
14997 GET_MODE_SIZE (int_mode), 0));
14998 }
14999 break;
15000
15001 case LO_SUM:
15002 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15003
15004 case LABEL_REF:
15005 /* Some ports can transform a symbol ref into a label ref, because
15006 the symbol ref is too far away and has to be dumped into a constant
15007 pool. */
15008 case CONST:
15009 case SYMBOL_REF:
15010 if (!is_a <scalar_int_mode> (mode, &int_mode)
15011 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15012 #ifdef POINTERS_EXTEND_UNSIGNED
15013 && (int_mode != Pmode || mem_mode == VOIDmode)
15014 #endif
15015 ))
15016 break;
15017 if (GET_CODE (rtl) == SYMBOL_REF
15018 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15019 {
15020 dw_loc_descr_ref temp;
15021
15022 /* If this is not defined, we have no way to emit the data. */
15023 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15024 break;
15025
15026 temp = new_addr_loc_descr (rtl, dtprel_true);
15027
15028 /* We check for DWARF 5 here because gdb did not implement
15029 DW_OP_form_tls_address until after 7.12. */
15030 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15031 ? DW_OP_form_tls_address
15032 : DW_OP_GNU_push_tls_address),
15033 0, 0);
15034 add_loc_descr (&mem_loc_result, temp);
15035
15036 break;
15037 }
15038
15039 if (!const_ok_for_output (rtl))
15040 {
15041 if (GET_CODE (rtl) == CONST)
15042 switch (GET_CODE (XEXP (rtl, 0)))
15043 {
15044 case NOT:
15045 op = DW_OP_not;
15046 goto try_const_unop;
15047 case NEG:
15048 op = DW_OP_neg;
15049 goto try_const_unop;
15050 try_const_unop:
15051 rtx arg;
15052 arg = XEXP (XEXP (rtl, 0), 0);
15053 if (!CONSTANT_P (arg))
15054 arg = gen_rtx_CONST (int_mode, arg);
15055 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15056 initialized);
15057 if (op0)
15058 {
15059 mem_loc_result = op0;
15060 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15061 }
15062 break;
15063 default:
15064 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15065 mem_mode, initialized);
15066 break;
15067 }
15068 break;
15069 }
15070
15071 symref:
15072 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15073 vec_safe_push (used_rtx_array, rtl);
15074 break;
15075
15076 case CONCAT:
15077 case CONCATN:
15078 case VAR_LOCATION:
15079 case DEBUG_IMPLICIT_PTR:
15080 expansion_failed (NULL_TREE, rtl,
15081 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15082 return 0;
15083
15084 case ENTRY_VALUE:
15085 if (dwarf_strict && dwarf_version < 5)
15086 return NULL;
15087 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15088 {
15089 if (!is_a <scalar_int_mode> (mode, &int_mode)
15090 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15091 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15092 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15093 else
15094 {
15095 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15096 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15097 return NULL;
15098 op0 = one_reg_loc_descriptor (dbx_regnum,
15099 VAR_INIT_STATUS_INITIALIZED);
15100 }
15101 }
15102 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15103 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15104 {
15105 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15106 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15107 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15108 return NULL;
15109 }
15110 else
15111 gcc_unreachable ();
15112 if (op0 == NULL)
15113 return NULL;
15114 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15115 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15116 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15117 break;
15118
15119 case DEBUG_PARAMETER_REF:
15120 mem_loc_result = parameter_ref_descriptor (rtl);
15121 break;
15122
15123 case PRE_MODIFY:
15124 /* Extract the PLUS expression nested inside and fall into
15125 PLUS code below. */
15126 rtl = XEXP (rtl, 1);
15127 goto plus;
15128
15129 case PRE_INC:
15130 case PRE_DEC:
15131 /* Turn these into a PLUS expression and fall into the PLUS code
15132 below. */
15133 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15134 gen_int_mode (GET_CODE (rtl) == PRE_INC
15135 ? GET_MODE_UNIT_SIZE (mem_mode)
15136 : -GET_MODE_UNIT_SIZE (mem_mode),
15137 mode));
15138
15139 /* fall through */
15140
15141 case PLUS:
15142 plus:
15143 if (is_based_loc (rtl)
15144 && is_a <scalar_int_mode> (mode, &int_mode)
15145 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15146 || XEXP (rtl, 0) == arg_pointer_rtx
15147 || XEXP (rtl, 0) == frame_pointer_rtx))
15148 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15149 INTVAL (XEXP (rtl, 1)),
15150 VAR_INIT_STATUS_INITIALIZED);
15151 else
15152 {
15153 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15154 VAR_INIT_STATUS_INITIALIZED);
15155 if (mem_loc_result == 0)
15156 break;
15157
15158 if (CONST_INT_P (XEXP (rtl, 1))
15159 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15160 <= DWARF2_ADDR_SIZE))
15161 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15162 else
15163 {
15164 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15165 VAR_INIT_STATUS_INITIALIZED);
15166 if (op1 == 0)
15167 return NULL;
15168 add_loc_descr (&mem_loc_result, op1);
15169 add_loc_descr (&mem_loc_result,
15170 new_loc_descr (DW_OP_plus, 0, 0));
15171 }
15172 }
15173 break;
15174
15175 /* If a pseudo-reg is optimized away, it is possible for it to
15176 be replaced with a MEM containing a multiply or shift. */
15177 case MINUS:
15178 op = DW_OP_minus;
15179 goto do_binop;
15180
15181 case MULT:
15182 op = DW_OP_mul;
15183 goto do_binop;
15184
15185 case DIV:
15186 if ((!dwarf_strict || dwarf_version >= 5)
15187 && is_a <scalar_int_mode> (mode, &int_mode)
15188 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15189 {
15190 mem_loc_result = typed_binop (DW_OP_div, rtl,
15191 base_type_for_mode (mode, 0),
15192 int_mode, mem_mode);
15193 break;
15194 }
15195 op = DW_OP_div;
15196 goto do_binop;
15197
15198 case UMOD:
15199 op = DW_OP_mod;
15200 goto do_binop;
15201
15202 case ASHIFT:
15203 op = DW_OP_shl;
15204 goto do_shift;
15205
15206 case ASHIFTRT:
15207 op = DW_OP_shra;
15208 goto do_shift;
15209
15210 case LSHIFTRT:
15211 op = DW_OP_shr;
15212 goto do_shift;
15213
15214 do_shift:
15215 if (!is_a <scalar_int_mode> (mode, &int_mode))
15216 break;
15217 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15218 VAR_INIT_STATUS_INITIALIZED);
15219 {
15220 rtx rtlop1 = XEXP (rtl, 1);
15221 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15222 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15223 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15224 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15225 VAR_INIT_STATUS_INITIALIZED);
15226 }
15227
15228 if (op0 == 0 || op1 == 0)
15229 break;
15230
15231 mem_loc_result = op0;
15232 add_loc_descr (&mem_loc_result, op1);
15233 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15234 break;
15235
15236 case AND:
15237 op = DW_OP_and;
15238 goto do_binop;
15239
15240 case IOR:
15241 op = DW_OP_or;
15242 goto do_binop;
15243
15244 case XOR:
15245 op = DW_OP_xor;
15246 goto do_binop;
15247
15248 do_binop:
15249 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15250 VAR_INIT_STATUS_INITIALIZED);
15251 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15252 VAR_INIT_STATUS_INITIALIZED);
15253
15254 if (op0 == 0 || op1 == 0)
15255 break;
15256
15257 mem_loc_result = op0;
15258 add_loc_descr (&mem_loc_result, op1);
15259 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15260 break;
15261
15262 case MOD:
15263 if ((!dwarf_strict || dwarf_version >= 5)
15264 && is_a <scalar_int_mode> (mode, &int_mode)
15265 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15266 {
15267 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15268 base_type_for_mode (mode, 0),
15269 int_mode, mem_mode);
15270 break;
15271 }
15272
15273 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15274 VAR_INIT_STATUS_INITIALIZED);
15275 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15276 VAR_INIT_STATUS_INITIALIZED);
15277
15278 if (op0 == 0 || op1 == 0)
15279 break;
15280
15281 mem_loc_result = op0;
15282 add_loc_descr (&mem_loc_result, op1);
15283 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15284 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15285 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15286 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15287 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15288 break;
15289
15290 case UDIV:
15291 if ((!dwarf_strict || dwarf_version >= 5)
15292 && is_a <scalar_int_mode> (mode, &int_mode))
15293 {
15294 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15295 {
15296 op = DW_OP_div;
15297 goto do_binop;
15298 }
15299 mem_loc_result = typed_binop (DW_OP_div, rtl,
15300 base_type_for_mode (int_mode, 1),
15301 int_mode, mem_mode);
15302 }
15303 break;
15304
15305 case NOT:
15306 op = DW_OP_not;
15307 goto do_unop;
15308
15309 case ABS:
15310 op = DW_OP_abs;
15311 goto do_unop;
15312
15313 case NEG:
15314 op = DW_OP_neg;
15315 goto do_unop;
15316
15317 do_unop:
15318 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15319 VAR_INIT_STATUS_INITIALIZED);
15320
15321 if (op0 == 0)
15322 break;
15323
15324 mem_loc_result = op0;
15325 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15326 break;
15327
15328 case CONST_INT:
15329 if (!is_a <scalar_int_mode> (mode, &int_mode)
15330 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15331 #ifdef POINTERS_EXTEND_UNSIGNED
15332 || (int_mode == Pmode
15333 && mem_mode != VOIDmode
15334 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15335 #endif
15336 )
15337 {
15338 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15339 break;
15340 }
15341 if ((!dwarf_strict || dwarf_version >= 5)
15342 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15343 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15344 {
15345 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15346 scalar_int_mode amode;
15347 if (type_die == NULL)
15348 return NULL;
15349 if (INTVAL (rtl) >= 0
15350 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15351 .exists (&amode))
15352 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15353 /* const DW_OP_convert <XXX> vs.
15354 DW_OP_const_type <XXX, 1, const>. */
15355 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15356 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15357 {
15358 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15359 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15360 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15361 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15362 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15363 add_loc_descr (&mem_loc_result, op0);
15364 return mem_loc_result;
15365 }
15366 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15367 INTVAL (rtl));
15368 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15369 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15370 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15371 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
15372 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
15373 else
15374 {
15375 mem_loc_result->dw_loc_oprnd2.val_class
15376 = dw_val_class_const_double;
15377 mem_loc_result->dw_loc_oprnd2.v.val_double
15378 = double_int::from_shwi (INTVAL (rtl));
15379 }
15380 }
15381 break;
15382
15383 case CONST_DOUBLE:
15384 if (!dwarf_strict || dwarf_version >= 5)
15385 {
15386 dw_die_ref type_die;
15387
15388 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
15389 CONST_DOUBLE rtx could represent either a large integer
15390 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
15391 the value is always a floating point constant.
15392
15393 When it is an integer, a CONST_DOUBLE is used whenever
15394 the constant requires 2 HWIs to be adequately represented.
15395 We output CONST_DOUBLEs as blocks. */
15396 if (mode == VOIDmode
15397 || (GET_MODE (rtl) == VOIDmode
15398 && maybe_ne (GET_MODE_BITSIZE (mode),
15399 HOST_BITS_PER_DOUBLE_INT)))
15400 break;
15401 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15402 if (type_die == NULL)
15403 return NULL;
15404 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15405 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15406 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15407 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15408 #if TARGET_SUPPORTS_WIDE_INT == 0
15409 if (!SCALAR_FLOAT_MODE_P (mode))
15410 {
15411 mem_loc_result->dw_loc_oprnd2.val_class
15412 = dw_val_class_const_double;
15413 mem_loc_result->dw_loc_oprnd2.v.val_double
15414 = rtx_to_double_int (rtl);
15415 }
15416 else
15417 #endif
15418 {
15419 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
15420 unsigned int length = GET_MODE_SIZE (float_mode);
15421 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
15422
15423 insert_float (rtl, array);
15424 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
15425 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
15426 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
15427 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
15428 }
15429 }
15430 break;
15431
15432 case CONST_WIDE_INT:
15433 if (!dwarf_strict || dwarf_version >= 5)
15434 {
15435 dw_die_ref type_die;
15436
15437 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15438 if (type_die == NULL)
15439 return NULL;
15440 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15441 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15442 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15443 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15444 mem_loc_result->dw_loc_oprnd2.val_class
15445 = dw_val_class_wide_int;
15446 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
15447 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
15448 }
15449 break;
15450
15451 case CONST_POLY_INT:
15452 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
15453 break;
15454
15455 case EQ:
15456 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
15457 break;
15458
15459 case GE:
15460 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
15461 break;
15462
15463 case GT:
15464 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
15465 break;
15466
15467 case LE:
15468 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
15469 break;
15470
15471 case LT:
15472 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
15473 break;
15474
15475 case NE:
15476 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
15477 break;
15478
15479 case GEU:
15480 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
15481 break;
15482
15483 case GTU:
15484 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
15485 break;
15486
15487 case LEU:
15488 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
15489 break;
15490
15491 case LTU:
15492 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
15493 break;
15494
15495 case UMIN:
15496 case UMAX:
15497 if (!SCALAR_INT_MODE_P (mode))
15498 break;
15499 /* FALLTHRU */
15500 case SMIN:
15501 case SMAX:
15502 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
15503 break;
15504
15505 case ZERO_EXTRACT:
15506 case SIGN_EXTRACT:
15507 if (CONST_INT_P (XEXP (rtl, 1))
15508 && CONST_INT_P (XEXP (rtl, 2))
15509 && is_a <scalar_int_mode> (mode, &int_mode)
15510 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
15511 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15512 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
15513 && ((unsigned) INTVAL (XEXP (rtl, 1))
15514 + (unsigned) INTVAL (XEXP (rtl, 2))
15515 <= GET_MODE_BITSIZE (int_mode)))
15516 {
15517 int shift, size;
15518 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15519 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15520 if (op0 == 0)
15521 break;
15522 if (GET_CODE (rtl) == SIGN_EXTRACT)
15523 op = DW_OP_shra;
15524 else
15525 op = DW_OP_shr;
15526 mem_loc_result = op0;
15527 size = INTVAL (XEXP (rtl, 1));
15528 shift = INTVAL (XEXP (rtl, 2));
15529 if (BITS_BIG_ENDIAN)
15530 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
15531 if (shift + size != (int) DWARF2_ADDR_SIZE)
15532 {
15533 add_loc_descr (&mem_loc_result,
15534 int_loc_descriptor (DWARF2_ADDR_SIZE
15535 - shift - size));
15536 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15537 }
15538 if (size != (int) DWARF2_ADDR_SIZE)
15539 {
15540 add_loc_descr (&mem_loc_result,
15541 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
15542 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15543 }
15544 }
15545 break;
15546
15547 case IF_THEN_ELSE:
15548 {
15549 dw_loc_descr_ref op2, bra_node, drop_node;
15550 op0 = mem_loc_descriptor (XEXP (rtl, 0),
15551 GET_MODE (XEXP (rtl, 0)) == VOIDmode
15552 ? word_mode : GET_MODE (XEXP (rtl, 0)),
15553 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15554 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15555 VAR_INIT_STATUS_INITIALIZED);
15556 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
15557 VAR_INIT_STATUS_INITIALIZED);
15558 if (op0 == NULL || op1 == NULL || op2 == NULL)
15559 break;
15560
15561 mem_loc_result = op1;
15562 add_loc_descr (&mem_loc_result, op2);
15563 add_loc_descr (&mem_loc_result, op0);
15564 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
15565 add_loc_descr (&mem_loc_result, bra_node);
15566 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
15567 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
15568 add_loc_descr (&mem_loc_result, drop_node);
15569 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
15570 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
15571 }
15572 break;
15573
15574 case FLOAT_EXTEND:
15575 case FLOAT_TRUNCATE:
15576 case FLOAT:
15577 case UNSIGNED_FLOAT:
15578 case FIX:
15579 case UNSIGNED_FIX:
15580 if (!dwarf_strict || dwarf_version >= 5)
15581 {
15582 dw_die_ref type_die;
15583 dw_loc_descr_ref cvt;
15584
15585 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
15586 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15587 if (op0 == NULL)
15588 break;
15589 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
15590 && (GET_CODE (rtl) == FLOAT
15591 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
15592 {
15593 type_die = base_type_for_mode (int_mode,
15594 GET_CODE (rtl) == UNSIGNED_FLOAT);
15595 if (type_die == NULL)
15596 break;
15597 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15598 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15599 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15600 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15601 add_loc_descr (&op0, cvt);
15602 }
15603 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
15604 if (type_die == NULL)
15605 break;
15606 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15607 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15608 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15609 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15610 add_loc_descr (&op0, cvt);
15611 if (is_a <scalar_int_mode> (mode, &int_mode)
15612 && (GET_CODE (rtl) == FIX
15613 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
15614 {
15615 op0 = convert_descriptor_to_mode (int_mode, op0);
15616 if (op0 == NULL)
15617 break;
15618 }
15619 mem_loc_result = op0;
15620 }
15621 break;
15622
15623 case CLZ:
15624 case CTZ:
15625 case FFS:
15626 if (is_a <scalar_int_mode> (mode, &int_mode))
15627 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
15628 break;
15629
15630 case POPCOUNT:
15631 case PARITY:
15632 if (is_a <scalar_int_mode> (mode, &int_mode))
15633 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
15634 break;
15635
15636 case BSWAP:
15637 if (is_a <scalar_int_mode> (mode, &int_mode))
15638 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
15639 break;
15640
15641 case ROTATE:
15642 case ROTATERT:
15643 if (is_a <scalar_int_mode> (mode, &int_mode))
15644 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
15645 break;
15646
15647 case COMPARE:
15648 /* In theory, we could implement the above. */
15649 /* DWARF cannot represent the unsigned compare operations
15650 natively. */
15651 case SS_MULT:
15652 case US_MULT:
15653 case SS_DIV:
15654 case US_DIV:
15655 case SS_PLUS:
15656 case US_PLUS:
15657 case SS_MINUS:
15658 case US_MINUS:
15659 case SS_NEG:
15660 case US_NEG:
15661 case SS_ABS:
15662 case SS_ASHIFT:
15663 case US_ASHIFT:
15664 case SS_TRUNCATE:
15665 case US_TRUNCATE:
15666 case UNORDERED:
15667 case ORDERED:
15668 case UNEQ:
15669 case UNGE:
15670 case UNGT:
15671 case UNLE:
15672 case UNLT:
15673 case LTGT:
15674 case FRACT_CONVERT:
15675 case UNSIGNED_FRACT_CONVERT:
15676 case SAT_FRACT:
15677 case UNSIGNED_SAT_FRACT:
15678 case SQRT:
15679 case ASM_OPERANDS:
15680 case VEC_MERGE:
15681 case VEC_SELECT:
15682 case VEC_CONCAT:
15683 case VEC_DUPLICATE:
15684 case VEC_SERIES:
15685 case UNSPEC:
15686 case HIGH:
15687 case FMA:
15688 case STRICT_LOW_PART:
15689 case CONST_VECTOR:
15690 case CONST_FIXED:
15691 case CLRSB:
15692 case CLOBBER:
15693 /* If delegitimize_address couldn't do anything with the UNSPEC, we
15694 can't express it in the debug info. This can happen e.g. with some
15695 TLS UNSPECs. */
15696 break;
15697
15698 case CONST_STRING:
15699 resolve_one_addr (&rtl);
15700 goto symref;
15701
15702 /* RTL sequences inside PARALLEL record a series of DWARF operations for
15703 the expression. An UNSPEC rtx represents a raw DWARF operation,
15704 new_loc_descr is called for it to build the operation directly.
15705 Otherwise mem_loc_descriptor is called recursively. */
15706 case PARALLEL:
15707 {
15708 int index = 0;
15709 dw_loc_descr_ref exp_result = NULL;
15710
15711 for (; index < XVECLEN (rtl, 0); index++)
15712 {
15713 rtx elem = XVECEXP (rtl, 0, index);
15714 if (GET_CODE (elem) == UNSPEC)
15715 {
15716 /* Each DWARF operation UNSPEC contain two operands, if
15717 one operand is not used for the operation, const0_rtx is
15718 passed. */
15719 gcc_assert (XVECLEN (elem, 0) == 2);
15720
15721 HOST_WIDE_INT dw_op = XINT (elem, 1);
15722 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
15723 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
15724 exp_result
15725 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
15726 oprnd2);
15727 }
15728 else
15729 exp_result
15730 = mem_loc_descriptor (elem, mode, mem_mode,
15731 VAR_INIT_STATUS_INITIALIZED);
15732
15733 if (!mem_loc_result)
15734 mem_loc_result = exp_result;
15735 else
15736 add_loc_descr (&mem_loc_result, exp_result);
15737 }
15738
15739 break;
15740 }
15741
15742 default:
15743 if (flag_checking)
15744 {
15745 print_rtl (stderr, rtl);
15746 gcc_unreachable ();
15747 }
15748 break;
15749 }
15750
15751 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
15752 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
15753
15754 return mem_loc_result;
15755 }
15756
15757 /* Return a descriptor that describes the concatenation of two locations.
15758 This is typically a complex variable. */
15759
15760 static dw_loc_descr_ref
15761 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
15762 {
15763 dw_loc_descr_ref cc_loc_result = NULL;
15764 dw_loc_descr_ref x0_ref
15765 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15766 dw_loc_descr_ref x1_ref
15767 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15768
15769 if (x0_ref == 0 || x1_ref == 0)
15770 return 0;
15771
15772 cc_loc_result = x0_ref;
15773 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x0)));
15774
15775 add_loc_descr (&cc_loc_result, x1_ref);
15776 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x1)));
15777
15778 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
15779 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
15780
15781 return cc_loc_result;
15782 }
15783
15784 /* Return a descriptor that describes the concatenation of N
15785 locations. */
15786
15787 static dw_loc_descr_ref
15788 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
15789 {
15790 unsigned int i;
15791 dw_loc_descr_ref cc_loc_result = NULL;
15792 unsigned int n = XVECLEN (concatn, 0);
15793
15794 for (i = 0; i < n; ++i)
15795 {
15796 dw_loc_descr_ref ref;
15797 rtx x = XVECEXP (concatn, 0, i);
15798
15799 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15800 if (ref == NULL)
15801 return NULL;
15802
15803 add_loc_descr (&cc_loc_result, ref);
15804 add_loc_descr_op_piece (&cc_loc_result, GET_MODE_SIZE (GET_MODE (x)));
15805 }
15806
15807 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
15808 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
15809
15810 return cc_loc_result;
15811 }
15812
15813 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
15814 for DEBUG_IMPLICIT_PTR RTL. */
15815
15816 static dw_loc_descr_ref
15817 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
15818 {
15819 dw_loc_descr_ref ret;
15820 dw_die_ref ref;
15821
15822 if (dwarf_strict && dwarf_version < 5)
15823 return NULL;
15824 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
15825 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
15826 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
15827 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
15828 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
15829 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
15830 if (ref)
15831 {
15832 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15833 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15834 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15835 }
15836 else
15837 {
15838 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15839 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
15840 }
15841 return ret;
15842 }
15843
15844 /* Output a proper Dwarf location descriptor for a variable or parameter
15845 which is either allocated in a register or in a memory location. For a
15846 register, we just generate an OP_REG and the register number. For a
15847 memory location we provide a Dwarf postfix expression describing how to
15848 generate the (dynamic) address of the object onto the address stack.
15849
15850 MODE is mode of the decl if this loc_descriptor is going to be used in
15851 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
15852 allowed, VOIDmode otherwise.
15853
15854 If we don't know how to describe it, return 0. */
15855
15856 static dw_loc_descr_ref
15857 loc_descriptor (rtx rtl, machine_mode mode,
15858 enum var_init_status initialized)
15859 {
15860 dw_loc_descr_ref loc_result = NULL;
15861 scalar_int_mode int_mode;
15862
15863 switch (GET_CODE (rtl))
15864 {
15865 case SUBREG:
15866 /* The case of a subreg may arise when we have a local (register)
15867 variable or a formal (register) parameter which doesn't quite fill
15868 up an entire register. For now, just assume that it is
15869 legitimate to make the Dwarf info refer to the whole register which
15870 contains the given subreg. */
15871 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
15872 loc_result = loc_descriptor (SUBREG_REG (rtl),
15873 GET_MODE (SUBREG_REG (rtl)), initialized);
15874 else
15875 goto do_default;
15876 break;
15877
15878 case REG:
15879 loc_result = reg_loc_descriptor (rtl, initialized);
15880 break;
15881
15882 case MEM:
15883 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
15884 GET_MODE (rtl), initialized);
15885 if (loc_result == NULL)
15886 loc_result = tls_mem_loc_descriptor (rtl);
15887 if (loc_result == NULL)
15888 {
15889 rtx new_rtl = avoid_constant_pool_reference (rtl);
15890 if (new_rtl != rtl)
15891 loc_result = loc_descriptor (new_rtl, mode, initialized);
15892 }
15893 break;
15894
15895 case CONCAT:
15896 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
15897 initialized);
15898 break;
15899
15900 case CONCATN:
15901 loc_result = concatn_loc_descriptor (rtl, initialized);
15902 break;
15903
15904 case VAR_LOCATION:
15905 /* Single part. */
15906 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
15907 {
15908 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
15909 if (GET_CODE (loc) == EXPR_LIST)
15910 loc = XEXP (loc, 0);
15911 loc_result = loc_descriptor (loc, mode, initialized);
15912 break;
15913 }
15914
15915 rtl = XEXP (rtl, 1);
15916 /* FALLTHRU */
15917
15918 case PARALLEL:
15919 {
15920 rtvec par_elems = XVEC (rtl, 0);
15921 int num_elem = GET_NUM_ELEM (par_elems);
15922 machine_mode mode;
15923 int i;
15924
15925 /* Create the first one, so we have something to add to. */
15926 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
15927 VOIDmode, initialized);
15928 if (loc_result == NULL)
15929 return NULL;
15930 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
15931 add_loc_descr_op_piece (&loc_result, GET_MODE_SIZE (mode));
15932 for (i = 1; i < num_elem; i++)
15933 {
15934 dw_loc_descr_ref temp;
15935
15936 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
15937 VOIDmode, initialized);
15938 if (temp == NULL)
15939 return NULL;
15940 add_loc_descr (&loc_result, temp);
15941 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
15942 add_loc_descr_op_piece (&loc_result, GET_MODE_SIZE (mode));
15943 }
15944 }
15945 break;
15946
15947 case CONST_INT:
15948 if (mode != VOIDmode && mode != BLKmode)
15949 {
15950 int_mode = as_a <scalar_int_mode> (mode);
15951 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
15952 INTVAL (rtl));
15953 }
15954 break;
15955
15956 case CONST_DOUBLE:
15957 if (mode == VOIDmode)
15958 mode = GET_MODE (rtl);
15959
15960 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
15961 {
15962 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
15963
15964 /* Note that a CONST_DOUBLE rtx could represent either an integer
15965 or a floating-point constant. A CONST_DOUBLE is used whenever
15966 the constant requires more than one word in order to be
15967 adequately represented. We output CONST_DOUBLEs as blocks. */
15968 scalar_mode smode = as_a <scalar_mode> (mode);
15969 loc_result = new_loc_descr (DW_OP_implicit_value,
15970 GET_MODE_SIZE (smode), 0);
15971 #if TARGET_SUPPORTS_WIDE_INT == 0
15972 if (!SCALAR_FLOAT_MODE_P (smode))
15973 {
15974 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
15975 loc_result->dw_loc_oprnd2.v.val_double
15976 = rtx_to_double_int (rtl);
15977 }
15978 else
15979 #endif
15980 {
15981 unsigned int length = GET_MODE_SIZE (smode);
15982 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
15983
15984 insert_float (rtl, array);
15985 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
15986 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
15987 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
15988 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
15989 }
15990 }
15991 break;
15992
15993 case CONST_WIDE_INT:
15994 if (mode == VOIDmode)
15995 mode = GET_MODE (rtl);
15996
15997 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
15998 {
15999 int_mode = as_a <scalar_int_mode> (mode);
16000 loc_result = new_loc_descr (DW_OP_implicit_value,
16001 GET_MODE_SIZE (int_mode), 0);
16002 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16003 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16004 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16005 }
16006 break;
16007
16008 case CONST_VECTOR:
16009 if (mode == VOIDmode)
16010 mode = GET_MODE (rtl);
16011
16012 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16013 {
16014 unsigned int length;
16015 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16016 return NULL;
16017
16018 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16019 unsigned char *array
16020 = ggc_vec_alloc<unsigned char> (length * elt_size);
16021 unsigned int i;
16022 unsigned char *p;
16023 machine_mode imode = GET_MODE_INNER (mode);
16024
16025 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16026 switch (GET_MODE_CLASS (mode))
16027 {
16028 case MODE_VECTOR_INT:
16029 for (i = 0, p = array; i < length; i++, p += elt_size)
16030 {
16031 rtx elt = CONST_VECTOR_ELT (rtl, i);
16032 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16033 }
16034 break;
16035
16036 case MODE_VECTOR_FLOAT:
16037 for (i = 0, p = array; i < length; i++, p += elt_size)
16038 {
16039 rtx elt = CONST_VECTOR_ELT (rtl, i);
16040 insert_float (elt, p);
16041 }
16042 break;
16043
16044 default:
16045 gcc_unreachable ();
16046 }
16047
16048 loc_result = new_loc_descr (DW_OP_implicit_value,
16049 length * elt_size, 0);
16050 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16051 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16052 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16053 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16054 }
16055 break;
16056
16057 case CONST:
16058 if (mode == VOIDmode
16059 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16060 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16061 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16062 {
16063 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16064 break;
16065 }
16066 /* FALLTHROUGH */
16067 case SYMBOL_REF:
16068 if (!const_ok_for_output (rtl))
16069 break;
16070 /* FALLTHROUGH */
16071 case LABEL_REF:
16072 if (is_a <scalar_int_mode> (mode, &int_mode)
16073 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16074 && (dwarf_version >= 4 || !dwarf_strict))
16075 {
16076 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16077 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16078 vec_safe_push (used_rtx_array, rtl);
16079 }
16080 break;
16081
16082 case DEBUG_IMPLICIT_PTR:
16083 loc_result = implicit_ptr_descriptor (rtl, 0);
16084 break;
16085
16086 case PLUS:
16087 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16088 && CONST_INT_P (XEXP (rtl, 1)))
16089 {
16090 loc_result
16091 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16092 break;
16093 }
16094 /* FALLTHRU */
16095 do_default:
16096 default:
16097 if ((is_a <scalar_int_mode> (mode, &int_mode)
16098 && GET_MODE (rtl) == int_mode
16099 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16100 && dwarf_version >= 4)
16101 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16102 {
16103 /* Value expression. */
16104 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16105 if (loc_result)
16106 add_loc_descr (&loc_result,
16107 new_loc_descr (DW_OP_stack_value, 0, 0));
16108 }
16109 break;
16110 }
16111
16112 return loc_result;
16113 }
16114
16115 /* We need to figure out what section we should use as the base for the
16116 address ranges where a given location is valid.
16117 1. If this particular DECL has a section associated with it, use that.
16118 2. If this function has a section associated with it, use that.
16119 3. Otherwise, use the text section.
16120 XXX: If you split a variable across multiple sections, we won't notice. */
16121
16122 static const char *
16123 secname_for_decl (const_tree decl)
16124 {
16125 const char *secname;
16126
16127 if (VAR_OR_FUNCTION_DECL_P (decl)
16128 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16129 && DECL_SECTION_NAME (decl))
16130 secname = DECL_SECTION_NAME (decl);
16131 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16132 secname = DECL_SECTION_NAME (current_function_decl);
16133 else if (cfun && in_cold_section_p)
16134 secname = crtl->subsections.cold_section_label;
16135 else
16136 secname = text_section_label;
16137
16138 return secname;
16139 }
16140
16141 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16142
16143 static bool
16144 decl_by_reference_p (tree decl)
16145 {
16146 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16147 || VAR_P (decl))
16148 && DECL_BY_REFERENCE (decl));
16149 }
16150
16151 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16152 for VARLOC. */
16153
16154 static dw_loc_descr_ref
16155 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16156 enum var_init_status initialized)
16157 {
16158 int have_address = 0;
16159 dw_loc_descr_ref descr;
16160 machine_mode mode;
16161
16162 if (want_address != 2)
16163 {
16164 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16165 /* Single part. */
16166 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16167 {
16168 varloc = PAT_VAR_LOCATION_LOC (varloc);
16169 if (GET_CODE (varloc) == EXPR_LIST)
16170 varloc = XEXP (varloc, 0);
16171 mode = GET_MODE (varloc);
16172 if (MEM_P (varloc))
16173 {
16174 rtx addr = XEXP (varloc, 0);
16175 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16176 mode, initialized);
16177 if (descr)
16178 have_address = 1;
16179 else
16180 {
16181 rtx x = avoid_constant_pool_reference (varloc);
16182 if (x != varloc)
16183 descr = mem_loc_descriptor (x, mode, VOIDmode,
16184 initialized);
16185 }
16186 }
16187 else
16188 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16189 }
16190 else
16191 return 0;
16192 }
16193 else
16194 {
16195 if (GET_CODE (varloc) == VAR_LOCATION)
16196 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16197 else
16198 mode = DECL_MODE (loc);
16199 descr = loc_descriptor (varloc, mode, initialized);
16200 have_address = 1;
16201 }
16202
16203 if (!descr)
16204 return 0;
16205
16206 if (want_address == 2 && !have_address
16207 && (dwarf_version >= 4 || !dwarf_strict))
16208 {
16209 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16210 {
16211 expansion_failed (loc, NULL_RTX,
16212 "DWARF address size mismatch");
16213 return 0;
16214 }
16215 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16216 have_address = 1;
16217 }
16218 /* Show if we can't fill the request for an address. */
16219 if (want_address && !have_address)
16220 {
16221 expansion_failed (loc, NULL_RTX,
16222 "Want address and only have value");
16223 return 0;
16224 }
16225
16226 /* If we've got an address and don't want one, dereference. */
16227 if (!want_address && have_address)
16228 {
16229 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16230 enum dwarf_location_atom op;
16231
16232 if (size > DWARF2_ADDR_SIZE || size == -1)
16233 {
16234 expansion_failed (loc, NULL_RTX,
16235 "DWARF address size mismatch");
16236 return 0;
16237 }
16238 else if (size == DWARF2_ADDR_SIZE)
16239 op = DW_OP_deref;
16240 else
16241 op = DW_OP_deref_size;
16242
16243 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16244 }
16245
16246 return descr;
16247 }
16248
16249 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16250 if it is not possible. */
16251
16252 static dw_loc_descr_ref
16253 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16254 {
16255 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16256 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16257 else if (dwarf_version >= 3 || !dwarf_strict)
16258 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16259 else
16260 return NULL;
16261 }
16262
16263 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16264 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16265
16266 static dw_loc_descr_ref
16267 dw_sra_loc_expr (tree decl, rtx loc)
16268 {
16269 rtx p;
16270 unsigned HOST_WIDE_INT padsize = 0;
16271 dw_loc_descr_ref descr, *descr_tail;
16272 unsigned HOST_WIDE_INT decl_size;
16273 rtx varloc;
16274 enum var_init_status initialized;
16275
16276 if (DECL_SIZE (decl) == NULL
16277 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16278 return NULL;
16279
16280 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16281 descr = NULL;
16282 descr_tail = &descr;
16283
16284 for (p = loc; p; p = XEXP (p, 1))
16285 {
16286 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16287 rtx loc_note = *decl_piece_varloc_ptr (p);
16288 dw_loc_descr_ref cur_descr;
16289 dw_loc_descr_ref *tail, last = NULL;
16290 unsigned HOST_WIDE_INT opsize = 0;
16291
16292 if (loc_note == NULL_RTX
16293 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16294 {
16295 padsize += bitsize;
16296 continue;
16297 }
16298 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16299 varloc = NOTE_VAR_LOCATION (loc_note);
16300 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16301 if (cur_descr == NULL)
16302 {
16303 padsize += bitsize;
16304 continue;
16305 }
16306
16307 /* Check that cur_descr either doesn't use
16308 DW_OP_*piece operations, or their sum is equal
16309 to bitsize. Otherwise we can't embed it. */
16310 for (tail = &cur_descr; *tail != NULL;
16311 tail = &(*tail)->dw_loc_next)
16312 if ((*tail)->dw_loc_opc == DW_OP_piece)
16313 {
16314 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16315 * BITS_PER_UNIT;
16316 last = *tail;
16317 }
16318 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16319 {
16320 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16321 last = *tail;
16322 }
16323
16324 if (last != NULL && opsize != bitsize)
16325 {
16326 padsize += bitsize;
16327 /* Discard the current piece of the descriptor and release any
16328 addr_table entries it uses. */
16329 remove_loc_list_addr_table_entries (cur_descr);
16330 continue;
16331 }
16332
16333 /* If there is a hole, add DW_OP_*piece after empty DWARF
16334 expression, which means that those bits are optimized out. */
16335 if (padsize)
16336 {
16337 if (padsize > decl_size)
16338 {
16339 remove_loc_list_addr_table_entries (cur_descr);
16340 goto discard_descr;
16341 }
16342 decl_size -= padsize;
16343 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16344 if (*descr_tail == NULL)
16345 {
16346 remove_loc_list_addr_table_entries (cur_descr);
16347 goto discard_descr;
16348 }
16349 descr_tail = &(*descr_tail)->dw_loc_next;
16350 padsize = 0;
16351 }
16352 *descr_tail = cur_descr;
16353 descr_tail = tail;
16354 if (bitsize > decl_size)
16355 goto discard_descr;
16356 decl_size -= bitsize;
16357 if (last == NULL)
16358 {
16359 HOST_WIDE_INT offset = 0;
16360 if (GET_CODE (varloc) == VAR_LOCATION
16361 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16362 {
16363 varloc = PAT_VAR_LOCATION_LOC (varloc);
16364 if (GET_CODE (varloc) == EXPR_LIST)
16365 varloc = XEXP (varloc, 0);
16366 }
16367 do
16368 {
16369 if (GET_CODE (varloc) == CONST
16370 || GET_CODE (varloc) == SIGN_EXTEND
16371 || GET_CODE (varloc) == ZERO_EXTEND)
16372 varloc = XEXP (varloc, 0);
16373 else if (GET_CODE (varloc) == SUBREG)
16374 varloc = SUBREG_REG (varloc);
16375 else
16376 break;
16377 }
16378 while (1);
16379 /* DW_OP_bit_size offset should be zero for register
16380 or implicit location descriptions and empty location
16381 descriptions, but for memory addresses needs big endian
16382 adjustment. */
16383 if (MEM_P (varloc))
16384 {
16385 unsigned HOST_WIDE_INT memsize;
16386 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
16387 goto discard_descr;
16388 memsize *= BITS_PER_UNIT;
16389 if (memsize != bitsize)
16390 {
16391 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
16392 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
16393 goto discard_descr;
16394 if (memsize < bitsize)
16395 goto discard_descr;
16396 if (BITS_BIG_ENDIAN)
16397 offset = memsize - bitsize;
16398 }
16399 }
16400
16401 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
16402 if (*descr_tail == NULL)
16403 goto discard_descr;
16404 descr_tail = &(*descr_tail)->dw_loc_next;
16405 }
16406 }
16407
16408 /* If there were any non-empty expressions, add padding till the end of
16409 the decl. */
16410 if (descr != NULL && decl_size != 0)
16411 {
16412 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
16413 if (*descr_tail == NULL)
16414 goto discard_descr;
16415 }
16416 return descr;
16417
16418 discard_descr:
16419 /* Discard the descriptor and release any addr_table entries it uses. */
16420 remove_loc_list_addr_table_entries (descr);
16421 return NULL;
16422 }
16423
16424 /* Return the dwarf representation of the location list LOC_LIST of
16425 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
16426 function. */
16427
16428 static dw_loc_list_ref
16429 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
16430 {
16431 const char *endname, *secname;
16432 rtx varloc;
16433 enum var_init_status initialized;
16434 struct var_loc_node *node;
16435 dw_loc_descr_ref descr;
16436 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
16437 dw_loc_list_ref list = NULL;
16438 dw_loc_list_ref *listp = &list;
16439
16440 /* Now that we know what section we are using for a base,
16441 actually construct the list of locations.
16442 The first location information is what is passed to the
16443 function that creates the location list, and the remaining
16444 locations just get added on to that list.
16445 Note that we only know the start address for a location
16446 (IE location changes), so to build the range, we use
16447 the range [current location start, next location start].
16448 This means we have to special case the last node, and generate
16449 a range of [last location start, end of function label]. */
16450
16451 if (cfun && crtl->has_bb_partition)
16452 {
16453 bool save_in_cold_section_p = in_cold_section_p;
16454 in_cold_section_p = first_function_block_is_cold;
16455 if (loc_list->last_before_switch == NULL)
16456 in_cold_section_p = !in_cold_section_p;
16457 secname = secname_for_decl (decl);
16458 in_cold_section_p = save_in_cold_section_p;
16459 }
16460 else
16461 secname = secname_for_decl (decl);
16462
16463 for (node = loc_list->first; node; node = node->next)
16464 {
16465 bool range_across_switch = false;
16466 if (GET_CODE (node->loc) == EXPR_LIST
16467 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
16468 {
16469 if (GET_CODE (node->loc) == EXPR_LIST)
16470 {
16471 descr = NULL;
16472 /* This requires DW_OP_{,bit_}piece, which is not usable
16473 inside DWARF expressions. */
16474 if (want_address == 2)
16475 descr = dw_sra_loc_expr (decl, node->loc);
16476 }
16477 else
16478 {
16479 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
16480 varloc = NOTE_VAR_LOCATION (node->loc);
16481 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
16482 }
16483 if (descr)
16484 {
16485 /* If section switch happens in between node->label
16486 and node->next->label (or end of function) and
16487 we can't emit it as a single entry list,
16488 emit two ranges, first one ending at the end
16489 of first partition and second one starting at the
16490 beginning of second partition. */
16491 if (node == loc_list->last_before_switch
16492 && (node != loc_list->first || loc_list->first->next)
16493 && current_function_decl)
16494 {
16495 endname = cfun->fde->dw_fde_end;
16496 range_across_switch = true;
16497 }
16498 /* The variable has a location between NODE->LABEL and
16499 NODE->NEXT->LABEL. */
16500 else if (node->next)
16501 endname = node->next->label;
16502 /* If the variable has a location at the last label
16503 it keeps its location until the end of function. */
16504 else if (!current_function_decl)
16505 endname = text_end_label;
16506 else
16507 {
16508 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
16509 current_function_funcdef_no);
16510 endname = ggc_strdup (label_id);
16511 }
16512
16513 *listp = new_loc_list (descr, node->label, endname, secname);
16514 if (TREE_CODE (decl) == PARM_DECL
16515 && node == loc_list->first
16516 && NOTE_P (node->loc)
16517 && strcmp (node->label, endname) == 0)
16518 (*listp)->force = true;
16519 listp = &(*listp)->dw_loc_next;
16520 }
16521 }
16522
16523 if (cfun
16524 && crtl->has_bb_partition
16525 && node == loc_list->last_before_switch)
16526 {
16527 bool save_in_cold_section_p = in_cold_section_p;
16528 in_cold_section_p = !first_function_block_is_cold;
16529 secname = secname_for_decl (decl);
16530 in_cold_section_p = save_in_cold_section_p;
16531 }
16532
16533 if (range_across_switch)
16534 {
16535 if (GET_CODE (node->loc) == EXPR_LIST)
16536 descr = dw_sra_loc_expr (decl, node->loc);
16537 else
16538 {
16539 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
16540 varloc = NOTE_VAR_LOCATION (node->loc);
16541 descr = dw_loc_list_1 (decl, varloc, want_address,
16542 initialized);
16543 }
16544 gcc_assert (descr);
16545 /* The variable has a location between NODE->LABEL and
16546 NODE->NEXT->LABEL. */
16547 if (node->next)
16548 endname = node->next->label;
16549 else
16550 endname = cfun->fde->dw_fde_second_end;
16551 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin,
16552 endname, secname);
16553 listp = &(*listp)->dw_loc_next;
16554 }
16555 }
16556
16557 /* Try to avoid the overhead of a location list emitting a location
16558 expression instead, but only if we didn't have more than one
16559 location entry in the first place. If some entries were not
16560 representable, we don't want to pretend a single entry that was
16561 applies to the entire scope in which the variable is
16562 available. */
16563 if (list && loc_list->first->next)
16564 gen_llsym (list);
16565
16566 return list;
16567 }
16568
16569 /* Return if the loc_list has only single element and thus can be represented
16570 as location description. */
16571
16572 static bool
16573 single_element_loc_list_p (dw_loc_list_ref list)
16574 {
16575 gcc_assert (!list->dw_loc_next || list->ll_symbol);
16576 return !list->ll_symbol;
16577 }
16578
16579 /* Duplicate a single element of location list. */
16580
16581 static inline dw_loc_descr_ref
16582 copy_loc_descr (dw_loc_descr_ref ref)
16583 {
16584 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
16585 memcpy (copy, ref, sizeof (dw_loc_descr_node));
16586 return copy;
16587 }
16588
16589 /* To each location in list LIST append loc descr REF. */
16590
16591 static void
16592 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
16593 {
16594 dw_loc_descr_ref copy;
16595 add_loc_descr (&list->expr, ref);
16596 list = list->dw_loc_next;
16597 while (list)
16598 {
16599 copy = copy_loc_descr (ref);
16600 add_loc_descr (&list->expr, copy);
16601 while (copy->dw_loc_next)
16602 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
16603 list = list->dw_loc_next;
16604 }
16605 }
16606
16607 /* To each location in list LIST prepend loc descr REF. */
16608
16609 static void
16610 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
16611 {
16612 dw_loc_descr_ref copy;
16613 dw_loc_descr_ref ref_end = list->expr;
16614 add_loc_descr (&ref, list->expr);
16615 list->expr = ref;
16616 list = list->dw_loc_next;
16617 while (list)
16618 {
16619 dw_loc_descr_ref end = list->expr;
16620 list->expr = copy = copy_loc_descr (ref);
16621 while (copy->dw_loc_next != ref_end)
16622 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
16623 copy->dw_loc_next = end;
16624 list = list->dw_loc_next;
16625 }
16626 }
16627
16628 /* Given two lists RET and LIST
16629 produce location list that is result of adding expression in LIST
16630 to expression in RET on each position in program.
16631 Might be destructive on both RET and LIST.
16632
16633 TODO: We handle only simple cases of RET or LIST having at most one
16634 element. General case would involve sorting the lists in program order
16635 and merging them that will need some additional work.
16636 Adding that will improve quality of debug info especially for SRA-ed
16637 structures. */
16638
16639 static void
16640 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
16641 {
16642 if (!list)
16643 return;
16644 if (!*ret)
16645 {
16646 *ret = list;
16647 return;
16648 }
16649 if (!list->dw_loc_next)
16650 {
16651 add_loc_descr_to_each (*ret, list->expr);
16652 return;
16653 }
16654 if (!(*ret)->dw_loc_next)
16655 {
16656 prepend_loc_descr_to_each (list, (*ret)->expr);
16657 *ret = list;
16658 return;
16659 }
16660 expansion_failed (NULL_TREE, NULL_RTX,
16661 "Don't know how to merge two non-trivial"
16662 " location lists.\n");
16663 *ret = NULL;
16664 return;
16665 }
16666
16667 /* LOC is constant expression. Try a luck, look it up in constant
16668 pool and return its loc_descr of its address. */
16669
16670 static dw_loc_descr_ref
16671 cst_pool_loc_descr (tree loc)
16672 {
16673 /* Get an RTL for this, if something has been emitted. */
16674 rtx rtl = lookup_constant_def (loc);
16675
16676 if (!rtl || !MEM_P (rtl))
16677 {
16678 gcc_assert (!rtl);
16679 return 0;
16680 }
16681 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
16682
16683 /* TODO: We might get more coverage if we was actually delaying expansion
16684 of all expressions till end of compilation when constant pools are fully
16685 populated. */
16686 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
16687 {
16688 expansion_failed (loc, NULL_RTX,
16689 "CST value in contant pool but not marked.");
16690 return 0;
16691 }
16692 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16693 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
16694 }
16695
16696 /* Return dw_loc_list representing address of addr_expr LOC
16697 by looking for inner INDIRECT_REF expression and turning
16698 it into simple arithmetics.
16699
16700 See loc_list_from_tree for the meaning of CONTEXT. */
16701
16702 static dw_loc_list_ref
16703 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
16704 loc_descr_context *context)
16705 {
16706 tree obj, offset;
16707 poly_int64 bitsize, bitpos, bytepos;
16708 machine_mode mode;
16709 int unsignedp, reversep, volatilep = 0;
16710 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
16711
16712 obj = get_inner_reference (TREE_OPERAND (loc, 0),
16713 &bitsize, &bitpos, &offset, &mode,
16714 &unsignedp, &reversep, &volatilep);
16715 STRIP_NOPS (obj);
16716 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
16717 {
16718 expansion_failed (loc, NULL_RTX, "bitfield access");
16719 return 0;
16720 }
16721 if (!INDIRECT_REF_P (obj))
16722 {
16723 expansion_failed (obj,
16724 NULL_RTX, "no indirect ref in inner refrence");
16725 return 0;
16726 }
16727 if (!offset && known_eq (bitpos, 0))
16728 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
16729 context);
16730 else if (toplev
16731 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
16732 && (dwarf_version >= 4 || !dwarf_strict))
16733 {
16734 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
16735 if (!list_ret)
16736 return 0;
16737 if (offset)
16738 {
16739 /* Variable offset. */
16740 list_ret1 = loc_list_from_tree (offset, 0, context);
16741 if (list_ret1 == 0)
16742 return 0;
16743 add_loc_list (&list_ret, list_ret1);
16744 if (!list_ret)
16745 return 0;
16746 add_loc_descr_to_each (list_ret,
16747 new_loc_descr (DW_OP_plus, 0, 0));
16748 }
16749 HOST_WIDE_INT value;
16750 if (bytepos.is_constant (&value) && value > 0)
16751 add_loc_descr_to_each (list_ret,
16752 new_loc_descr (DW_OP_plus_uconst, value, 0));
16753 else if (maybe_ne (bytepos, 0))
16754 loc_list_plus_const (list_ret, bytepos);
16755 add_loc_descr_to_each (list_ret,
16756 new_loc_descr (DW_OP_stack_value, 0, 0));
16757 }
16758 return list_ret;
16759 }
16760
16761 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
16762 all operations from LOC are nops, move to the last one. Insert in NOPS all
16763 operations that are skipped. */
16764
16765 static void
16766 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
16767 hash_set<dw_loc_descr_ref> &nops)
16768 {
16769 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
16770 {
16771 nops.add (loc);
16772 loc = loc->dw_loc_next;
16773 }
16774 }
16775
16776 /* Helper for loc_descr_without_nops: free the location description operation
16777 P. */
16778
16779 bool
16780 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
16781 {
16782 ggc_free (loc);
16783 return true;
16784 }
16785
16786 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
16787 finishes LOC. */
16788
16789 static void
16790 loc_descr_without_nops (dw_loc_descr_ref &loc)
16791 {
16792 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
16793 return;
16794
16795 /* Set of all DW_OP_nop operations we remove. */
16796 hash_set<dw_loc_descr_ref> nops;
16797
16798 /* First, strip all prefix NOP operations in order to keep the head of the
16799 operations list. */
16800 loc_descr_to_next_no_nop (loc, nops);
16801
16802 for (dw_loc_descr_ref cur = loc; cur != NULL;)
16803 {
16804 /* For control flow operations: strip "prefix" nops in destination
16805 labels. */
16806 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
16807 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
16808 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
16809 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
16810
16811 /* Do the same for the operations that follow, then move to the next
16812 iteration. */
16813 if (cur->dw_loc_next != NULL)
16814 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
16815 cur = cur->dw_loc_next;
16816 }
16817
16818 nops.traverse<void *, free_loc_descr> (NULL);
16819 }
16820
16821
16822 struct dwarf_procedure_info;
16823
16824 /* Helper structure for location descriptions generation. */
16825 struct loc_descr_context
16826 {
16827 /* The type that is implicitly referenced by DW_OP_push_object_address, or
16828 NULL_TREE if DW_OP_push_object_address in invalid for this location
16829 description. This is used when processing PLACEHOLDER_EXPR nodes. */
16830 tree context_type;
16831 /* The ..._DECL node that should be translated as a
16832 DW_OP_push_object_address operation. */
16833 tree base_decl;
16834 /* Information about the DWARF procedure we are currently generating. NULL if
16835 we are not generating a DWARF procedure. */
16836 struct dwarf_procedure_info *dpi;
16837 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
16838 by consumer. Used for DW_TAG_generic_subrange attributes. */
16839 bool placeholder_arg;
16840 /* True if PLACEHOLDER_EXPR has been seen. */
16841 bool placeholder_seen;
16842 };
16843
16844 /* DWARF procedures generation
16845
16846 DWARF expressions (aka. location descriptions) are used to encode variable
16847 things such as sizes or offsets. Such computations can have redundant parts
16848 that can be factorized in order to reduce the size of the output debug
16849 information. This is the whole point of DWARF procedures.
16850
16851 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
16852 already factorized into functions ("size functions") in order to handle very
16853 big and complex types. Such functions are quite simple: they have integral
16854 arguments, they return an integral result and their body contains only a
16855 return statement with arithmetic expressions. This is the only kind of
16856 function we are interested in translating into DWARF procedures, here.
16857
16858 DWARF expressions and DWARF procedure are executed using a stack, so we have
16859 to define some calling convention for them to interact. Let's say that:
16860
16861 - Before calling a DWARF procedure, DWARF expressions must push on the stack
16862 all arguments in reverse order (right-to-left) so that when the DWARF
16863 procedure execution starts, the first argument is the top of the stack.
16864
16865 - Then, when returning, the DWARF procedure must have consumed all arguments
16866 on the stack, must have pushed the result and touched nothing else.
16867
16868 - Each integral argument and the result are integral types can be hold in a
16869 single stack slot.
16870
16871 - We call "frame offset" the number of stack slots that are "under DWARF
16872 procedure control": it includes the arguments slots, the temporaries and
16873 the result slot. Thus, it is equal to the number of arguments when the
16874 procedure execution starts and must be equal to one (the result) when it
16875 returns. */
16876
16877 /* Helper structure used when generating operations for a DWARF procedure. */
16878 struct dwarf_procedure_info
16879 {
16880 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
16881 currently translated. */
16882 tree fndecl;
16883 /* The number of arguments FNDECL takes. */
16884 unsigned args_count;
16885 };
16886
16887 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
16888 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
16889 equate it to this DIE. */
16890
16891 static dw_die_ref
16892 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
16893 dw_die_ref parent_die)
16894 {
16895 dw_die_ref dwarf_proc_die;
16896
16897 if ((dwarf_version < 3 && dwarf_strict)
16898 || location == NULL)
16899 return NULL;
16900
16901 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
16902 if (fndecl)
16903 equate_decl_number_to_die (fndecl, dwarf_proc_die);
16904 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
16905 return dwarf_proc_die;
16906 }
16907
16908 /* Return whether TYPE is a supported type as a DWARF procedure argument
16909 type or return type (we handle only scalar types and pointer types that
16910 aren't wider than the DWARF expression evaluation stack. */
16911
16912 static bool
16913 is_handled_procedure_type (tree type)
16914 {
16915 return ((INTEGRAL_TYPE_P (type)
16916 || TREE_CODE (type) == OFFSET_TYPE
16917 || TREE_CODE (type) == POINTER_TYPE)
16918 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
16919 }
16920
16921 /* Helper for resolve_args_picking: do the same but stop when coming across
16922 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
16923 offset *before* evaluating the corresponding operation. */
16924
16925 static bool
16926 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
16927 struct dwarf_procedure_info *dpi,
16928 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
16929 {
16930 /* The "frame_offset" identifier is already used to name a macro... */
16931 unsigned frame_offset_ = initial_frame_offset;
16932 dw_loc_descr_ref l;
16933
16934 for (l = loc; l != NULL;)
16935 {
16936 bool existed;
16937 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
16938
16939 /* If we already met this node, there is nothing to compute anymore. */
16940 if (existed)
16941 {
16942 /* Make sure that the stack size is consistent wherever the execution
16943 flow comes from. */
16944 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
16945 break;
16946 }
16947 l_frame_offset = frame_offset_;
16948
16949 /* If needed, relocate the picking offset with respect to the frame
16950 offset. */
16951 if (l->frame_offset_rel)
16952 {
16953 unsigned HOST_WIDE_INT off;
16954 switch (l->dw_loc_opc)
16955 {
16956 case DW_OP_pick:
16957 off = l->dw_loc_oprnd1.v.val_unsigned;
16958 break;
16959 case DW_OP_dup:
16960 off = 0;
16961 break;
16962 case DW_OP_over:
16963 off = 1;
16964 break;
16965 default:
16966 gcc_unreachable ();
16967 }
16968 /* frame_offset_ is the size of the current stack frame, including
16969 incoming arguments. Besides, the arguments are pushed
16970 right-to-left. Thus, in order to access the Nth argument from
16971 this operation node, the picking has to skip temporaries *plus*
16972 one stack slot per argument (0 for the first one, 1 for the second
16973 one, etc.).
16974
16975 The targetted argument number (N) is already set as the operand,
16976 and the number of temporaries can be computed with:
16977 frame_offsets_ - dpi->args_count */
16978 off += frame_offset_ - dpi->args_count;
16979
16980 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
16981 if (off > 255)
16982 return false;
16983
16984 if (off == 0)
16985 {
16986 l->dw_loc_opc = DW_OP_dup;
16987 l->dw_loc_oprnd1.v.val_unsigned = 0;
16988 }
16989 else if (off == 1)
16990 {
16991 l->dw_loc_opc = DW_OP_over;
16992 l->dw_loc_oprnd1.v.val_unsigned = 0;
16993 }
16994 else
16995 {
16996 l->dw_loc_opc = DW_OP_pick;
16997 l->dw_loc_oprnd1.v.val_unsigned = off;
16998 }
16999 }
17000
17001 /* Update frame_offset according to the effect the current operation has
17002 on the stack. */
17003 switch (l->dw_loc_opc)
17004 {
17005 case DW_OP_deref:
17006 case DW_OP_swap:
17007 case DW_OP_rot:
17008 case DW_OP_abs:
17009 case DW_OP_neg:
17010 case DW_OP_not:
17011 case DW_OP_plus_uconst:
17012 case DW_OP_skip:
17013 case DW_OP_reg0:
17014 case DW_OP_reg1:
17015 case DW_OP_reg2:
17016 case DW_OP_reg3:
17017 case DW_OP_reg4:
17018 case DW_OP_reg5:
17019 case DW_OP_reg6:
17020 case DW_OP_reg7:
17021 case DW_OP_reg8:
17022 case DW_OP_reg9:
17023 case DW_OP_reg10:
17024 case DW_OP_reg11:
17025 case DW_OP_reg12:
17026 case DW_OP_reg13:
17027 case DW_OP_reg14:
17028 case DW_OP_reg15:
17029 case DW_OP_reg16:
17030 case DW_OP_reg17:
17031 case DW_OP_reg18:
17032 case DW_OP_reg19:
17033 case DW_OP_reg20:
17034 case DW_OP_reg21:
17035 case DW_OP_reg22:
17036 case DW_OP_reg23:
17037 case DW_OP_reg24:
17038 case DW_OP_reg25:
17039 case DW_OP_reg26:
17040 case DW_OP_reg27:
17041 case DW_OP_reg28:
17042 case DW_OP_reg29:
17043 case DW_OP_reg30:
17044 case DW_OP_reg31:
17045 case DW_OP_bregx:
17046 case DW_OP_piece:
17047 case DW_OP_deref_size:
17048 case DW_OP_nop:
17049 case DW_OP_bit_piece:
17050 case DW_OP_implicit_value:
17051 case DW_OP_stack_value:
17052 break;
17053
17054 case DW_OP_addr:
17055 case DW_OP_const1u:
17056 case DW_OP_const1s:
17057 case DW_OP_const2u:
17058 case DW_OP_const2s:
17059 case DW_OP_const4u:
17060 case DW_OP_const4s:
17061 case DW_OP_const8u:
17062 case DW_OP_const8s:
17063 case DW_OP_constu:
17064 case DW_OP_consts:
17065 case DW_OP_dup:
17066 case DW_OP_over:
17067 case DW_OP_pick:
17068 case DW_OP_lit0:
17069 case DW_OP_lit1:
17070 case DW_OP_lit2:
17071 case DW_OP_lit3:
17072 case DW_OP_lit4:
17073 case DW_OP_lit5:
17074 case DW_OP_lit6:
17075 case DW_OP_lit7:
17076 case DW_OP_lit8:
17077 case DW_OP_lit9:
17078 case DW_OP_lit10:
17079 case DW_OP_lit11:
17080 case DW_OP_lit12:
17081 case DW_OP_lit13:
17082 case DW_OP_lit14:
17083 case DW_OP_lit15:
17084 case DW_OP_lit16:
17085 case DW_OP_lit17:
17086 case DW_OP_lit18:
17087 case DW_OP_lit19:
17088 case DW_OP_lit20:
17089 case DW_OP_lit21:
17090 case DW_OP_lit22:
17091 case DW_OP_lit23:
17092 case DW_OP_lit24:
17093 case DW_OP_lit25:
17094 case DW_OP_lit26:
17095 case DW_OP_lit27:
17096 case DW_OP_lit28:
17097 case DW_OP_lit29:
17098 case DW_OP_lit30:
17099 case DW_OP_lit31:
17100 case DW_OP_breg0:
17101 case DW_OP_breg1:
17102 case DW_OP_breg2:
17103 case DW_OP_breg3:
17104 case DW_OP_breg4:
17105 case DW_OP_breg5:
17106 case DW_OP_breg6:
17107 case DW_OP_breg7:
17108 case DW_OP_breg8:
17109 case DW_OP_breg9:
17110 case DW_OP_breg10:
17111 case DW_OP_breg11:
17112 case DW_OP_breg12:
17113 case DW_OP_breg13:
17114 case DW_OP_breg14:
17115 case DW_OP_breg15:
17116 case DW_OP_breg16:
17117 case DW_OP_breg17:
17118 case DW_OP_breg18:
17119 case DW_OP_breg19:
17120 case DW_OP_breg20:
17121 case DW_OP_breg21:
17122 case DW_OP_breg22:
17123 case DW_OP_breg23:
17124 case DW_OP_breg24:
17125 case DW_OP_breg25:
17126 case DW_OP_breg26:
17127 case DW_OP_breg27:
17128 case DW_OP_breg28:
17129 case DW_OP_breg29:
17130 case DW_OP_breg30:
17131 case DW_OP_breg31:
17132 case DW_OP_fbreg:
17133 case DW_OP_push_object_address:
17134 case DW_OP_call_frame_cfa:
17135 case DW_OP_GNU_variable_value:
17136 ++frame_offset_;
17137 break;
17138
17139 case DW_OP_drop:
17140 case DW_OP_xderef:
17141 case DW_OP_and:
17142 case DW_OP_div:
17143 case DW_OP_minus:
17144 case DW_OP_mod:
17145 case DW_OP_mul:
17146 case DW_OP_or:
17147 case DW_OP_plus:
17148 case DW_OP_shl:
17149 case DW_OP_shr:
17150 case DW_OP_shra:
17151 case DW_OP_xor:
17152 case DW_OP_bra:
17153 case DW_OP_eq:
17154 case DW_OP_ge:
17155 case DW_OP_gt:
17156 case DW_OP_le:
17157 case DW_OP_lt:
17158 case DW_OP_ne:
17159 case DW_OP_regx:
17160 case DW_OP_xderef_size:
17161 --frame_offset_;
17162 break;
17163
17164 case DW_OP_call2:
17165 case DW_OP_call4:
17166 case DW_OP_call_ref:
17167 {
17168 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17169 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17170
17171 if (stack_usage == NULL)
17172 return false;
17173 frame_offset_ += *stack_usage;
17174 break;
17175 }
17176
17177 case DW_OP_implicit_pointer:
17178 case DW_OP_entry_value:
17179 case DW_OP_const_type:
17180 case DW_OP_regval_type:
17181 case DW_OP_deref_type:
17182 case DW_OP_convert:
17183 case DW_OP_reinterpret:
17184 case DW_OP_form_tls_address:
17185 case DW_OP_GNU_push_tls_address:
17186 case DW_OP_GNU_uninit:
17187 case DW_OP_GNU_encoded_addr:
17188 case DW_OP_GNU_implicit_pointer:
17189 case DW_OP_GNU_entry_value:
17190 case DW_OP_GNU_const_type:
17191 case DW_OP_GNU_regval_type:
17192 case DW_OP_GNU_deref_type:
17193 case DW_OP_GNU_convert:
17194 case DW_OP_GNU_reinterpret:
17195 case DW_OP_GNU_parameter_ref:
17196 /* loc_list_from_tree will probably not output these operations for
17197 size functions, so assume they will not appear here. */
17198 /* Fall through... */
17199
17200 default:
17201 gcc_unreachable ();
17202 }
17203
17204 /* Now, follow the control flow (except subroutine calls). */
17205 switch (l->dw_loc_opc)
17206 {
17207 case DW_OP_bra:
17208 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17209 frame_offsets))
17210 return false;
17211 /* Fall through. */
17212
17213 case DW_OP_skip:
17214 l = l->dw_loc_oprnd1.v.val_loc;
17215 break;
17216
17217 case DW_OP_stack_value:
17218 return true;
17219
17220 default:
17221 l = l->dw_loc_next;
17222 break;
17223 }
17224 }
17225
17226 return true;
17227 }
17228
17229 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17230 operations) in order to resolve the operand of DW_OP_pick operations that
17231 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17232 offset *before* LOC is executed. Return if all relocations were
17233 successful. */
17234
17235 static bool
17236 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17237 struct dwarf_procedure_info *dpi)
17238 {
17239 /* Associate to all visited operations the frame offset *before* evaluating
17240 this operation. */
17241 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17242
17243 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17244 frame_offsets);
17245 }
17246
17247 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17248 Return NULL if it is not possible. */
17249
17250 static dw_die_ref
17251 function_to_dwarf_procedure (tree fndecl)
17252 {
17253 struct loc_descr_context ctx;
17254 struct dwarf_procedure_info dpi;
17255 dw_die_ref dwarf_proc_die;
17256 tree tree_body = DECL_SAVED_TREE (fndecl);
17257 dw_loc_descr_ref loc_body, epilogue;
17258
17259 tree cursor;
17260 unsigned i;
17261
17262 /* Do not generate multiple DWARF procedures for the same function
17263 declaration. */
17264 dwarf_proc_die = lookup_decl_die (fndecl);
17265 if (dwarf_proc_die != NULL)
17266 return dwarf_proc_die;
17267
17268 /* DWARF procedures are available starting with the DWARFv3 standard. */
17269 if (dwarf_version < 3 && dwarf_strict)
17270 return NULL;
17271
17272 /* We handle only functions for which we still have a body, that return a
17273 supported type and that takes arguments with supported types. Note that
17274 there is no point translating functions that return nothing. */
17275 if (tree_body == NULL_TREE
17276 || DECL_RESULT (fndecl) == NULL_TREE
17277 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17278 return NULL;
17279
17280 for (cursor = DECL_ARGUMENTS (fndecl);
17281 cursor != NULL_TREE;
17282 cursor = TREE_CHAIN (cursor))
17283 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17284 return NULL;
17285
17286 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17287 if (TREE_CODE (tree_body) != RETURN_EXPR)
17288 return NULL;
17289 tree_body = TREE_OPERAND (tree_body, 0);
17290 if (TREE_CODE (tree_body) != MODIFY_EXPR
17291 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17292 return NULL;
17293 tree_body = TREE_OPERAND (tree_body, 1);
17294
17295 /* Try to translate the body expression itself. Note that this will probably
17296 cause an infinite recursion if its call graph has a cycle. This is very
17297 unlikely for size functions, however, so don't bother with such things at
17298 the moment. */
17299 ctx.context_type = NULL_TREE;
17300 ctx.base_decl = NULL_TREE;
17301 ctx.dpi = &dpi;
17302 ctx.placeholder_arg = false;
17303 ctx.placeholder_seen = false;
17304 dpi.fndecl = fndecl;
17305 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17306 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17307 if (!loc_body)
17308 return NULL;
17309
17310 /* After evaluating all operands in "loc_body", we should still have on the
17311 stack all arguments plus the desired function result (top of the stack).
17312 Generate code in order to keep only the result in our stack frame. */
17313 epilogue = NULL;
17314 for (i = 0; i < dpi.args_count; ++i)
17315 {
17316 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17317 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17318 op_couple->dw_loc_next->dw_loc_next = epilogue;
17319 epilogue = op_couple;
17320 }
17321 add_loc_descr (&loc_body, epilogue);
17322 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17323 return NULL;
17324
17325 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17326 because they are considered useful. Now there is an epilogue, they are
17327 not anymore, so give it another try. */
17328 loc_descr_without_nops (loc_body);
17329
17330 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17331 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17332 though, given that size functions do not come from source, so they should
17333 not have a dedicated DW_TAG_subprogram DIE. */
17334 dwarf_proc_die
17335 = new_dwarf_proc_die (loc_body, fndecl,
17336 get_context_die (DECL_CONTEXT (fndecl)));
17337
17338 /* The called DWARF procedure consumes one stack slot per argument and
17339 returns one stack slot. */
17340 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
17341
17342 return dwarf_proc_die;
17343 }
17344
17345
17346 /* Generate Dwarf location list representing LOC.
17347 If WANT_ADDRESS is false, expression computing LOC will be computed
17348 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
17349 if WANT_ADDRESS is 2, expression computing address useable in location
17350 will be returned (i.e. DW_OP_reg can be used
17351 to refer to register values).
17352
17353 CONTEXT provides information to customize the location descriptions
17354 generation. Its context_type field specifies what type is implicitly
17355 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
17356 will not be generated.
17357
17358 Its DPI field determines whether we are generating a DWARF expression for a
17359 DWARF procedure, so PARM_DECL references are processed specifically.
17360
17361 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
17362 and dpi fields were null. */
17363
17364 static dw_loc_list_ref
17365 loc_list_from_tree_1 (tree loc, int want_address,
17366 struct loc_descr_context *context)
17367 {
17368 dw_loc_descr_ref ret = NULL, ret1 = NULL;
17369 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17370 int have_address = 0;
17371 enum dwarf_location_atom op;
17372
17373 /* ??? Most of the time we do not take proper care for sign/zero
17374 extending the values properly. Hopefully this won't be a real
17375 problem... */
17376
17377 if (context != NULL
17378 && context->base_decl == loc
17379 && want_address == 0)
17380 {
17381 if (dwarf_version >= 3 || !dwarf_strict)
17382 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
17383 NULL, NULL, NULL);
17384 else
17385 return NULL;
17386 }
17387
17388 switch (TREE_CODE (loc))
17389 {
17390 case ERROR_MARK:
17391 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
17392 return 0;
17393
17394 case PLACEHOLDER_EXPR:
17395 /* This case involves extracting fields from an object to determine the
17396 position of other fields. It is supposed to appear only as the first
17397 operand of COMPONENT_REF nodes and to reference precisely the type
17398 that the context allows. */
17399 if (context != NULL
17400 && TREE_TYPE (loc) == context->context_type
17401 && want_address >= 1)
17402 {
17403 if (dwarf_version >= 3 || !dwarf_strict)
17404 {
17405 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
17406 have_address = 1;
17407 break;
17408 }
17409 else
17410 return NULL;
17411 }
17412 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
17413 the single argument passed by consumer. */
17414 else if (context != NULL
17415 && context->placeholder_arg
17416 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
17417 && want_address == 0)
17418 {
17419 ret = new_loc_descr (DW_OP_pick, 0, 0);
17420 ret->frame_offset_rel = 1;
17421 context->placeholder_seen = true;
17422 break;
17423 }
17424 else
17425 expansion_failed (loc, NULL_RTX,
17426 "PLACEHOLDER_EXPR for an unexpected type");
17427 break;
17428
17429 case CALL_EXPR:
17430 {
17431 const int nargs = call_expr_nargs (loc);
17432 tree callee = get_callee_fndecl (loc);
17433 int i;
17434 dw_die_ref dwarf_proc;
17435
17436 if (callee == NULL_TREE)
17437 goto call_expansion_failed;
17438
17439 /* We handle only functions that return an integer. */
17440 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
17441 goto call_expansion_failed;
17442
17443 dwarf_proc = function_to_dwarf_procedure (callee);
17444 if (dwarf_proc == NULL)
17445 goto call_expansion_failed;
17446
17447 /* Evaluate arguments right-to-left so that the first argument will
17448 be the top-most one on the stack. */
17449 for (i = nargs - 1; i >= 0; --i)
17450 {
17451 dw_loc_descr_ref loc_descr
17452 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
17453 context);
17454
17455 if (loc_descr == NULL)
17456 goto call_expansion_failed;
17457
17458 add_loc_descr (&ret, loc_descr);
17459 }
17460
17461 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
17462 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
17463 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
17464 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
17465 add_loc_descr (&ret, ret1);
17466 break;
17467
17468 call_expansion_failed:
17469 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
17470 /* There are no opcodes for these operations. */
17471 return 0;
17472 }
17473
17474 case PREINCREMENT_EXPR:
17475 case PREDECREMENT_EXPR:
17476 case POSTINCREMENT_EXPR:
17477 case POSTDECREMENT_EXPR:
17478 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
17479 /* There are no opcodes for these operations. */
17480 return 0;
17481
17482 case ADDR_EXPR:
17483 /* If we already want an address, see if there is INDIRECT_REF inside
17484 e.g. for &this->field. */
17485 if (want_address)
17486 {
17487 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
17488 (loc, want_address == 2, context);
17489 if (list_ret)
17490 have_address = 1;
17491 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
17492 && (ret = cst_pool_loc_descr (loc)))
17493 have_address = 1;
17494 }
17495 /* Otherwise, process the argument and look for the address. */
17496 if (!list_ret && !ret)
17497 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
17498 else
17499 {
17500 if (want_address)
17501 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
17502 return NULL;
17503 }
17504 break;
17505
17506 case VAR_DECL:
17507 if (DECL_THREAD_LOCAL_P (loc))
17508 {
17509 rtx rtl;
17510 enum dwarf_location_atom tls_op;
17511 enum dtprel_bool dtprel = dtprel_false;
17512
17513 if (targetm.have_tls)
17514 {
17515 /* If this is not defined, we have no way to emit the
17516 data. */
17517 if (!targetm.asm_out.output_dwarf_dtprel)
17518 return 0;
17519
17520 /* The way DW_OP_GNU_push_tls_address is specified, we
17521 can only look up addresses of objects in the current
17522 module. We used DW_OP_addr as first op, but that's
17523 wrong, because DW_OP_addr is relocated by the debug
17524 info consumer, while DW_OP_GNU_push_tls_address
17525 operand shouldn't be. */
17526 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
17527 return 0;
17528 dtprel = dtprel_true;
17529 /* We check for DWARF 5 here because gdb did not implement
17530 DW_OP_form_tls_address until after 7.12. */
17531 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
17532 : DW_OP_GNU_push_tls_address);
17533 }
17534 else
17535 {
17536 if (!targetm.emutls.debug_form_tls_address
17537 || !(dwarf_version >= 3 || !dwarf_strict))
17538 return 0;
17539 /* We stuffed the control variable into the DECL_VALUE_EXPR
17540 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
17541 no longer appear in gimple code. We used the control
17542 variable in specific so that we could pick it up here. */
17543 loc = DECL_VALUE_EXPR (loc);
17544 tls_op = DW_OP_form_tls_address;
17545 }
17546
17547 rtl = rtl_for_decl_location (loc);
17548 if (rtl == NULL_RTX)
17549 return 0;
17550
17551 if (!MEM_P (rtl))
17552 return 0;
17553 rtl = XEXP (rtl, 0);
17554 if (! CONSTANT_P (rtl))
17555 return 0;
17556
17557 ret = new_addr_loc_descr (rtl, dtprel);
17558 ret1 = new_loc_descr (tls_op, 0, 0);
17559 add_loc_descr (&ret, ret1);
17560
17561 have_address = 1;
17562 break;
17563 }
17564 /* FALLTHRU */
17565
17566 case PARM_DECL:
17567 if (context != NULL && context->dpi != NULL
17568 && DECL_CONTEXT (loc) == context->dpi->fndecl)
17569 {
17570 /* We are generating code for a DWARF procedure and we want to access
17571 one of its arguments: find the appropriate argument offset and let
17572 the resolve_args_picking pass compute the offset that complies
17573 with the stack frame size. */
17574 unsigned i = 0;
17575 tree cursor;
17576
17577 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
17578 cursor != NULL_TREE && cursor != loc;
17579 cursor = TREE_CHAIN (cursor), ++i)
17580 ;
17581 /* If we are translating a DWARF procedure, all referenced parameters
17582 must belong to the current function. */
17583 gcc_assert (cursor != NULL_TREE);
17584
17585 ret = new_loc_descr (DW_OP_pick, i, 0);
17586 ret->frame_offset_rel = 1;
17587 break;
17588 }
17589 /* FALLTHRU */
17590
17591 case RESULT_DECL:
17592 if (DECL_HAS_VALUE_EXPR_P (loc))
17593 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
17594 want_address, context);
17595 /* FALLTHRU */
17596
17597 case FUNCTION_DECL:
17598 {
17599 rtx rtl;
17600 var_loc_list *loc_list = lookup_decl_loc (loc);
17601
17602 if (loc_list && loc_list->first)
17603 {
17604 list_ret = dw_loc_list (loc_list, loc, want_address);
17605 have_address = want_address != 0;
17606 break;
17607 }
17608 rtl = rtl_for_decl_location (loc);
17609 if (rtl == NULL_RTX)
17610 {
17611 if (TREE_CODE (loc) != FUNCTION_DECL
17612 && early_dwarf
17613 && current_function_decl
17614 && want_address != 1
17615 && ! DECL_IGNORED_P (loc)
17616 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
17617 || POINTER_TYPE_P (TREE_TYPE (loc)))
17618 && DECL_CONTEXT (loc) == current_function_decl
17619 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
17620 <= DWARF2_ADDR_SIZE))
17621 {
17622 dw_die_ref ref = lookup_decl_die (loc);
17623 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
17624 if (ref)
17625 {
17626 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
17627 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
17628 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
17629 }
17630 else
17631 {
17632 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
17633 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
17634 }
17635 break;
17636 }
17637 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
17638 return 0;
17639 }
17640 else if (CONST_INT_P (rtl))
17641 {
17642 HOST_WIDE_INT val = INTVAL (rtl);
17643 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
17644 val &= GET_MODE_MASK (DECL_MODE (loc));
17645 ret = int_loc_descriptor (val);
17646 }
17647 else if (GET_CODE (rtl) == CONST_STRING)
17648 {
17649 expansion_failed (loc, NULL_RTX, "CONST_STRING");
17650 return 0;
17651 }
17652 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
17653 ret = new_addr_loc_descr (rtl, dtprel_false);
17654 else
17655 {
17656 machine_mode mode, mem_mode;
17657
17658 /* Certain constructs can only be represented at top-level. */
17659 if (want_address == 2)
17660 {
17661 ret = loc_descriptor (rtl, VOIDmode,
17662 VAR_INIT_STATUS_INITIALIZED);
17663 have_address = 1;
17664 }
17665 else
17666 {
17667 mode = GET_MODE (rtl);
17668 mem_mode = VOIDmode;
17669 if (MEM_P (rtl))
17670 {
17671 mem_mode = mode;
17672 mode = get_address_mode (rtl);
17673 rtl = XEXP (rtl, 0);
17674 have_address = 1;
17675 }
17676 ret = mem_loc_descriptor (rtl, mode, mem_mode,
17677 VAR_INIT_STATUS_INITIALIZED);
17678 }
17679 if (!ret)
17680 expansion_failed (loc, rtl,
17681 "failed to produce loc descriptor for rtl");
17682 }
17683 }
17684 break;
17685
17686 case MEM_REF:
17687 if (!integer_zerop (TREE_OPERAND (loc, 1)))
17688 {
17689 have_address = 1;
17690 goto do_plus;
17691 }
17692 /* Fallthru. */
17693 case INDIRECT_REF:
17694 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17695 have_address = 1;
17696 break;
17697
17698 case TARGET_MEM_REF:
17699 case SSA_NAME:
17700 case DEBUG_EXPR_DECL:
17701 return NULL;
17702
17703 case COMPOUND_EXPR:
17704 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
17705 context);
17706
17707 CASE_CONVERT:
17708 case VIEW_CONVERT_EXPR:
17709 case SAVE_EXPR:
17710 case MODIFY_EXPR:
17711 case NON_LVALUE_EXPR:
17712 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
17713 context);
17714
17715 case COMPONENT_REF:
17716 case BIT_FIELD_REF:
17717 case ARRAY_REF:
17718 case ARRAY_RANGE_REF:
17719 case REALPART_EXPR:
17720 case IMAGPART_EXPR:
17721 {
17722 tree obj, offset;
17723 poly_int64 bitsize, bitpos, bytepos;
17724 machine_mode mode;
17725 int unsignedp, reversep, volatilep = 0;
17726
17727 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
17728 &unsignedp, &reversep, &volatilep);
17729
17730 gcc_assert (obj != loc);
17731
17732 list_ret = loc_list_from_tree_1 (obj,
17733 want_address == 2
17734 && known_eq (bitpos, 0)
17735 && !offset ? 2 : 1,
17736 context);
17737 /* TODO: We can extract value of the small expression via shifting even
17738 for nonzero bitpos. */
17739 if (list_ret == 0)
17740 return 0;
17741 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
17742 || !multiple_p (bitsize, BITS_PER_UNIT))
17743 {
17744 expansion_failed (loc, NULL_RTX,
17745 "bitfield access");
17746 return 0;
17747 }
17748
17749 if (offset != NULL_TREE)
17750 {
17751 /* Variable offset. */
17752 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
17753 if (list_ret1 == 0)
17754 return 0;
17755 add_loc_list (&list_ret, list_ret1);
17756 if (!list_ret)
17757 return 0;
17758 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
17759 }
17760
17761 HOST_WIDE_INT value;
17762 if (bytepos.is_constant (&value) && value > 0)
17763 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
17764 value, 0));
17765 else if (maybe_ne (bytepos, 0))
17766 loc_list_plus_const (list_ret, bytepos);
17767
17768 have_address = 1;
17769 break;
17770 }
17771
17772 case INTEGER_CST:
17773 if ((want_address || !tree_fits_shwi_p (loc))
17774 && (ret = cst_pool_loc_descr (loc)))
17775 have_address = 1;
17776 else if (want_address == 2
17777 && tree_fits_shwi_p (loc)
17778 && (ret = address_of_int_loc_descriptor
17779 (int_size_in_bytes (TREE_TYPE (loc)),
17780 tree_to_shwi (loc))))
17781 have_address = 1;
17782 else if (tree_fits_shwi_p (loc))
17783 ret = int_loc_descriptor (tree_to_shwi (loc));
17784 else if (tree_fits_uhwi_p (loc))
17785 ret = uint_loc_descriptor (tree_to_uhwi (loc));
17786 else
17787 {
17788 expansion_failed (loc, NULL_RTX,
17789 "Integer operand is not host integer");
17790 return 0;
17791 }
17792 break;
17793
17794 case CONSTRUCTOR:
17795 case REAL_CST:
17796 case STRING_CST:
17797 case COMPLEX_CST:
17798 if ((ret = cst_pool_loc_descr (loc)))
17799 have_address = 1;
17800 else if (TREE_CODE (loc) == CONSTRUCTOR)
17801 {
17802 tree type = TREE_TYPE (loc);
17803 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
17804 unsigned HOST_WIDE_INT offset = 0;
17805 unsigned HOST_WIDE_INT cnt;
17806 constructor_elt *ce;
17807
17808 if (TREE_CODE (type) == RECORD_TYPE)
17809 {
17810 /* This is very limited, but it's enough to output
17811 pointers to member functions, as long as the
17812 referenced function is defined in the current
17813 translation unit. */
17814 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
17815 {
17816 tree val = ce->value;
17817
17818 tree field = ce->index;
17819
17820 if (val)
17821 STRIP_NOPS (val);
17822
17823 if (!field || DECL_BIT_FIELD (field))
17824 {
17825 expansion_failed (loc, NULL_RTX,
17826 "bitfield in record type constructor");
17827 size = offset = (unsigned HOST_WIDE_INT)-1;
17828 ret = NULL;
17829 break;
17830 }
17831
17832 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
17833 unsigned HOST_WIDE_INT pos = int_byte_position (field);
17834 gcc_assert (pos + fieldsize <= size);
17835 if (pos < offset)
17836 {
17837 expansion_failed (loc, NULL_RTX,
17838 "out-of-order fields in record constructor");
17839 size = offset = (unsigned HOST_WIDE_INT)-1;
17840 ret = NULL;
17841 break;
17842 }
17843 if (pos > offset)
17844 {
17845 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
17846 add_loc_descr (&ret, ret1);
17847 offset = pos;
17848 }
17849 if (val && fieldsize != 0)
17850 {
17851 ret1 = loc_descriptor_from_tree (val, want_address, context);
17852 if (!ret1)
17853 {
17854 expansion_failed (loc, NULL_RTX,
17855 "unsupported expression in field");
17856 size = offset = (unsigned HOST_WIDE_INT)-1;
17857 ret = NULL;
17858 break;
17859 }
17860 add_loc_descr (&ret, ret1);
17861 }
17862 if (fieldsize)
17863 {
17864 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
17865 add_loc_descr (&ret, ret1);
17866 offset = pos + fieldsize;
17867 }
17868 }
17869
17870 if (offset != size)
17871 {
17872 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
17873 add_loc_descr (&ret, ret1);
17874 offset = size;
17875 }
17876
17877 have_address = !!want_address;
17878 }
17879 else
17880 expansion_failed (loc, NULL_RTX,
17881 "constructor of non-record type");
17882 }
17883 else
17884 /* We can construct small constants here using int_loc_descriptor. */
17885 expansion_failed (loc, NULL_RTX,
17886 "constructor or constant not in constant pool");
17887 break;
17888
17889 case TRUTH_AND_EXPR:
17890 case TRUTH_ANDIF_EXPR:
17891 case BIT_AND_EXPR:
17892 op = DW_OP_and;
17893 goto do_binop;
17894
17895 case TRUTH_XOR_EXPR:
17896 case BIT_XOR_EXPR:
17897 op = DW_OP_xor;
17898 goto do_binop;
17899
17900 case TRUTH_OR_EXPR:
17901 case TRUTH_ORIF_EXPR:
17902 case BIT_IOR_EXPR:
17903 op = DW_OP_or;
17904 goto do_binop;
17905
17906 case FLOOR_DIV_EXPR:
17907 case CEIL_DIV_EXPR:
17908 case ROUND_DIV_EXPR:
17909 case TRUNC_DIV_EXPR:
17910 case EXACT_DIV_EXPR:
17911 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
17912 return 0;
17913 op = DW_OP_div;
17914 goto do_binop;
17915
17916 case MINUS_EXPR:
17917 op = DW_OP_minus;
17918 goto do_binop;
17919
17920 case FLOOR_MOD_EXPR:
17921 case CEIL_MOD_EXPR:
17922 case ROUND_MOD_EXPR:
17923 case TRUNC_MOD_EXPR:
17924 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
17925 {
17926 op = DW_OP_mod;
17927 goto do_binop;
17928 }
17929 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17930 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
17931 if (list_ret == 0 || list_ret1 == 0)
17932 return 0;
17933
17934 add_loc_list (&list_ret, list_ret1);
17935 if (list_ret == 0)
17936 return 0;
17937 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
17938 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
17939 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
17940 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
17941 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
17942 break;
17943
17944 case MULT_EXPR:
17945 op = DW_OP_mul;
17946 goto do_binop;
17947
17948 case LSHIFT_EXPR:
17949 op = DW_OP_shl;
17950 goto do_binop;
17951
17952 case RSHIFT_EXPR:
17953 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
17954 goto do_binop;
17955
17956 case POINTER_PLUS_EXPR:
17957 case PLUS_EXPR:
17958 do_plus:
17959 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
17960 {
17961 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
17962 smarter to encode their opposite. The DW_OP_plus_uconst operation
17963 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
17964 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
17965 bytes, Y being the size of the operation that pushes the opposite
17966 of the addend. So let's choose the smallest representation. */
17967 const tree tree_addend = TREE_OPERAND (loc, 1);
17968 offset_int wi_addend;
17969 HOST_WIDE_INT shwi_addend;
17970 dw_loc_descr_ref loc_naddend;
17971
17972 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
17973 if (list_ret == 0)
17974 return 0;
17975
17976 /* Try to get the literal to push. It is the opposite of the addend,
17977 so as we rely on wrapping during DWARF evaluation, first decode
17978 the literal as a "DWARF-sized" signed number. */
17979 wi_addend = wi::to_offset (tree_addend);
17980 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
17981 shwi_addend = wi_addend.to_shwi ();
17982 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
17983 ? int_loc_descriptor (-shwi_addend)
17984 : NULL;
17985
17986 if (loc_naddend != NULL
17987 && ((unsigned) size_of_uleb128 (shwi_addend)
17988 > size_of_loc_descr (loc_naddend)))
17989 {
17990 add_loc_descr_to_each (list_ret, loc_naddend);
17991 add_loc_descr_to_each (list_ret,
17992 new_loc_descr (DW_OP_minus, 0, 0));
17993 }
17994 else
17995 {
17996 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
17997 {
17998 loc_naddend = loc_cur;
17999 loc_cur = loc_cur->dw_loc_next;
18000 ggc_free (loc_naddend);
18001 }
18002 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18003 }
18004 break;
18005 }
18006
18007 op = DW_OP_plus;
18008 goto do_binop;
18009
18010 case LE_EXPR:
18011 op = DW_OP_le;
18012 goto do_comp_binop;
18013
18014 case GE_EXPR:
18015 op = DW_OP_ge;
18016 goto do_comp_binop;
18017
18018 case LT_EXPR:
18019 op = DW_OP_lt;
18020 goto do_comp_binop;
18021
18022 case GT_EXPR:
18023 op = DW_OP_gt;
18024 goto do_comp_binop;
18025
18026 do_comp_binop:
18027 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18028 {
18029 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18030 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18031 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18032 TREE_CODE (loc));
18033 break;
18034 }
18035 else
18036 goto do_binop;
18037
18038 case EQ_EXPR:
18039 op = DW_OP_eq;
18040 goto do_binop;
18041
18042 case NE_EXPR:
18043 op = DW_OP_ne;
18044 goto do_binop;
18045
18046 do_binop:
18047 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18048 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18049 if (list_ret == 0 || list_ret1 == 0)
18050 return 0;
18051
18052 add_loc_list (&list_ret, list_ret1);
18053 if (list_ret == 0)
18054 return 0;
18055 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18056 break;
18057
18058 case TRUTH_NOT_EXPR:
18059 case BIT_NOT_EXPR:
18060 op = DW_OP_not;
18061 goto do_unop;
18062
18063 case ABS_EXPR:
18064 op = DW_OP_abs;
18065 goto do_unop;
18066
18067 case NEGATE_EXPR:
18068 op = DW_OP_neg;
18069 goto do_unop;
18070
18071 do_unop:
18072 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18073 if (list_ret == 0)
18074 return 0;
18075
18076 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18077 break;
18078
18079 case MIN_EXPR:
18080 case MAX_EXPR:
18081 {
18082 const enum tree_code code =
18083 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18084
18085 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18086 build2 (code, integer_type_node,
18087 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18088 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18089 }
18090
18091 /* fall through */
18092
18093 case COND_EXPR:
18094 {
18095 dw_loc_descr_ref lhs
18096 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18097 dw_loc_list_ref rhs
18098 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18099 dw_loc_descr_ref bra_node, jump_node, tmp;
18100
18101 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18102 if (list_ret == 0 || lhs == 0 || rhs == 0)
18103 return 0;
18104
18105 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18106 add_loc_descr_to_each (list_ret, bra_node);
18107
18108 add_loc_list (&list_ret, rhs);
18109 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18110 add_loc_descr_to_each (list_ret, jump_node);
18111
18112 add_loc_descr_to_each (list_ret, lhs);
18113 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18114 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18115
18116 /* ??? Need a node to point the skip at. Use a nop. */
18117 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18118 add_loc_descr_to_each (list_ret, tmp);
18119 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18120 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18121 }
18122 break;
18123
18124 case FIX_TRUNC_EXPR:
18125 return 0;
18126
18127 default:
18128 /* Leave front-end specific codes as simply unknown. This comes
18129 up, for instance, with the C STMT_EXPR. */
18130 if ((unsigned int) TREE_CODE (loc)
18131 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18132 {
18133 expansion_failed (loc, NULL_RTX,
18134 "language specific tree node");
18135 return 0;
18136 }
18137
18138 /* Otherwise this is a generic code; we should just lists all of
18139 these explicitly. We forgot one. */
18140 if (flag_checking)
18141 gcc_unreachable ();
18142
18143 /* In a release build, we want to degrade gracefully: better to
18144 generate incomplete debugging information than to crash. */
18145 return NULL;
18146 }
18147
18148 if (!ret && !list_ret)
18149 return 0;
18150
18151 if (want_address == 2 && !have_address
18152 && (dwarf_version >= 4 || !dwarf_strict))
18153 {
18154 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18155 {
18156 expansion_failed (loc, NULL_RTX,
18157 "DWARF address size mismatch");
18158 return 0;
18159 }
18160 if (ret)
18161 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18162 else
18163 add_loc_descr_to_each (list_ret,
18164 new_loc_descr (DW_OP_stack_value, 0, 0));
18165 have_address = 1;
18166 }
18167 /* Show if we can't fill the request for an address. */
18168 if (want_address && !have_address)
18169 {
18170 expansion_failed (loc, NULL_RTX,
18171 "Want address and only have value");
18172 return 0;
18173 }
18174
18175 gcc_assert (!ret || !list_ret);
18176
18177 /* If we've got an address and don't want one, dereference. */
18178 if (!want_address && have_address)
18179 {
18180 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18181
18182 if (size > DWARF2_ADDR_SIZE || size == -1)
18183 {
18184 expansion_failed (loc, NULL_RTX,
18185 "DWARF address size mismatch");
18186 return 0;
18187 }
18188 else if (size == DWARF2_ADDR_SIZE)
18189 op = DW_OP_deref;
18190 else
18191 op = DW_OP_deref_size;
18192
18193 if (ret)
18194 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18195 else
18196 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18197 }
18198 if (ret)
18199 list_ret = new_loc_list (ret, NULL, NULL, NULL);
18200
18201 return list_ret;
18202 }
18203
18204 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18205 expressions. */
18206
18207 static dw_loc_list_ref
18208 loc_list_from_tree (tree loc, int want_address,
18209 struct loc_descr_context *context)
18210 {
18211 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18212
18213 for (dw_loc_list_ref loc_cur = result;
18214 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18215 loc_descr_without_nops (loc_cur->expr);
18216 return result;
18217 }
18218
18219 /* Same as above but return only single location expression. */
18220 static dw_loc_descr_ref
18221 loc_descriptor_from_tree (tree loc, int want_address,
18222 struct loc_descr_context *context)
18223 {
18224 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18225 if (!ret)
18226 return NULL;
18227 if (ret->dw_loc_next)
18228 {
18229 expansion_failed (loc, NULL_RTX,
18230 "Location list where only loc descriptor needed");
18231 return NULL;
18232 }
18233 return ret->expr;
18234 }
18235
18236 /* Given a value, round it up to the lowest multiple of `boundary'
18237 which is not less than the value itself. */
18238
18239 static inline HOST_WIDE_INT
18240 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18241 {
18242 return (((value + boundary - 1) / boundary) * boundary);
18243 }
18244
18245 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18246 pointer to the declared type for the relevant field variable, or return
18247 `integer_type_node' if the given node turns out to be an
18248 ERROR_MARK node. */
18249
18250 static inline tree
18251 field_type (const_tree decl)
18252 {
18253 tree type;
18254
18255 if (TREE_CODE (decl) == ERROR_MARK)
18256 return integer_type_node;
18257
18258 type = DECL_BIT_FIELD_TYPE (decl);
18259 if (type == NULL_TREE)
18260 type = TREE_TYPE (decl);
18261
18262 return type;
18263 }
18264
18265 /* Given a pointer to a tree node, return the alignment in bits for
18266 it, or else return BITS_PER_WORD if the node actually turns out to
18267 be an ERROR_MARK node. */
18268
18269 static inline unsigned
18270 simple_type_align_in_bits (const_tree type)
18271 {
18272 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18273 }
18274
18275 static inline unsigned
18276 simple_decl_align_in_bits (const_tree decl)
18277 {
18278 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18279 }
18280
18281 /* Return the result of rounding T up to ALIGN. */
18282
18283 static inline offset_int
18284 round_up_to_align (const offset_int &t, unsigned int align)
18285 {
18286 return wi::udiv_trunc (t + align - 1, align) * align;
18287 }
18288
18289 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18290 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18291 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18292 if we fail to return the size in one of these two forms. */
18293
18294 static dw_loc_descr_ref
18295 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18296 {
18297 tree tree_size;
18298 struct loc_descr_context ctx;
18299
18300 /* Return a constant integer in priority, if possible. */
18301 *cst_size = int_size_in_bytes (type);
18302 if (*cst_size != -1)
18303 return NULL;
18304
18305 ctx.context_type = const_cast<tree> (type);
18306 ctx.base_decl = NULL_TREE;
18307 ctx.dpi = NULL;
18308 ctx.placeholder_arg = false;
18309 ctx.placeholder_seen = false;
18310
18311 type = TYPE_MAIN_VARIANT (type);
18312 tree_size = TYPE_SIZE_UNIT (type);
18313 return ((tree_size != NULL_TREE)
18314 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18315 : NULL);
18316 }
18317
18318 /* Helper structure for RECORD_TYPE processing. */
18319 struct vlr_context
18320 {
18321 /* Root RECORD_TYPE. It is needed to generate data member location
18322 descriptions in variable-length records (VLR), but also to cope with
18323 variants, which are composed of nested structures multiplexed with
18324 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18325 function processing a FIELD_DECL, it is required to be non null. */
18326 tree struct_type;
18327 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18328 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18329 this variant part as part of the root record (in storage units). For
18330 regular records, it must be NULL_TREE. */
18331 tree variant_part_offset;
18332 };
18333
18334 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18335 addressed byte of the "containing object" for the given FIELD_DECL. If
18336 possible, return a native constant through CST_OFFSET (in which case NULL is
18337 returned); otherwise return a DWARF expression that computes the offset.
18338
18339 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
18340 that offset is, either because the argument turns out to be a pointer to an
18341 ERROR_MARK node, or because the offset expression is too complex for us.
18342
18343 CTX is required: see the comment for VLR_CONTEXT. */
18344
18345 static dw_loc_descr_ref
18346 field_byte_offset (const_tree decl, struct vlr_context *ctx,
18347 HOST_WIDE_INT *cst_offset)
18348 {
18349 tree tree_result;
18350 dw_loc_list_ref loc_result;
18351
18352 *cst_offset = 0;
18353
18354 if (TREE_CODE (decl) == ERROR_MARK)
18355 return NULL;
18356 else
18357 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
18358
18359 /* We cannot handle variable bit offsets at the moment, so abort if it's the
18360 case. */
18361 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
18362 return NULL;
18363
18364 #ifdef PCC_BITFIELD_TYPE_MATTERS
18365 /* We used to handle only constant offsets in all cases. Now, we handle
18366 properly dynamic byte offsets only when PCC bitfield type doesn't
18367 matter. */
18368 if (PCC_BITFIELD_TYPE_MATTERS
18369 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
18370 {
18371 offset_int object_offset_in_bits;
18372 offset_int object_offset_in_bytes;
18373 offset_int bitpos_int;
18374 tree type;
18375 tree field_size_tree;
18376 offset_int deepest_bitpos;
18377 offset_int field_size_in_bits;
18378 unsigned int type_align_in_bits;
18379 unsigned int decl_align_in_bits;
18380 offset_int type_size_in_bits;
18381
18382 bitpos_int = wi::to_offset (bit_position (decl));
18383 type = field_type (decl);
18384 type_size_in_bits = offset_int_type_size_in_bits (type);
18385 type_align_in_bits = simple_type_align_in_bits (type);
18386
18387 field_size_tree = DECL_SIZE (decl);
18388
18389 /* The size could be unspecified if there was an error, or for
18390 a flexible array member. */
18391 if (!field_size_tree)
18392 field_size_tree = bitsize_zero_node;
18393
18394 /* If the size of the field is not constant, use the type size. */
18395 if (TREE_CODE (field_size_tree) == INTEGER_CST)
18396 field_size_in_bits = wi::to_offset (field_size_tree);
18397 else
18398 field_size_in_bits = type_size_in_bits;
18399
18400 decl_align_in_bits = simple_decl_align_in_bits (decl);
18401
18402 /* The GCC front-end doesn't make any attempt to keep track of the
18403 starting bit offset (relative to the start of the containing
18404 structure type) of the hypothetical "containing object" for a
18405 bit-field. Thus, when computing the byte offset value for the
18406 start of the "containing object" of a bit-field, we must deduce
18407 this information on our own. This can be rather tricky to do in
18408 some cases. For example, handling the following structure type
18409 definition when compiling for an i386/i486 target (which only
18410 aligns long long's to 32-bit boundaries) can be very tricky:
18411
18412 struct S { int field1; long long field2:31; };
18413
18414 Fortunately, there is a simple rule-of-thumb which can be used
18415 in such cases. When compiling for an i386/i486, GCC will
18416 allocate 8 bytes for the structure shown above. It decides to
18417 do this based upon one simple rule for bit-field allocation.
18418 GCC allocates each "containing object" for each bit-field at
18419 the first (i.e. lowest addressed) legitimate alignment boundary
18420 (based upon the required minimum alignment for the declared
18421 type of the field) which it can possibly use, subject to the
18422 condition that there is still enough available space remaining
18423 in the containing object (when allocated at the selected point)
18424 to fully accommodate all of the bits of the bit-field itself.
18425
18426 This simple rule makes it obvious why GCC allocates 8 bytes for
18427 each object of the structure type shown above. When looking
18428 for a place to allocate the "containing object" for `field2',
18429 the compiler simply tries to allocate a 64-bit "containing
18430 object" at each successive 32-bit boundary (starting at zero)
18431 until it finds a place to allocate that 64- bit field such that
18432 at least 31 contiguous (and previously unallocated) bits remain
18433 within that selected 64 bit field. (As it turns out, for the
18434 example above, the compiler finds it is OK to allocate the
18435 "containing object" 64-bit field at bit-offset zero within the
18436 structure type.)
18437
18438 Here we attempt to work backwards from the limited set of facts
18439 we're given, and we try to deduce from those facts, where GCC
18440 must have believed that the containing object started (within
18441 the structure type). The value we deduce is then used (by the
18442 callers of this routine) to generate DW_AT_location and
18443 DW_AT_bit_offset attributes for fields (both bit-fields and, in
18444 the case of DW_AT_location, regular fields as well). */
18445
18446 /* Figure out the bit-distance from the start of the structure to
18447 the "deepest" bit of the bit-field. */
18448 deepest_bitpos = bitpos_int + field_size_in_bits;
18449
18450 /* This is the tricky part. Use some fancy footwork to deduce
18451 where the lowest addressed bit of the containing object must
18452 be. */
18453 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
18454
18455 /* Round up to type_align by default. This works best for
18456 bitfields. */
18457 object_offset_in_bits
18458 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
18459
18460 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
18461 {
18462 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
18463
18464 /* Round up to decl_align instead. */
18465 object_offset_in_bits
18466 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
18467 }
18468
18469 object_offset_in_bytes
18470 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
18471 if (ctx->variant_part_offset == NULL_TREE)
18472 {
18473 *cst_offset = object_offset_in_bytes.to_shwi ();
18474 return NULL;
18475 }
18476 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
18477 }
18478 else
18479 #endif /* PCC_BITFIELD_TYPE_MATTERS */
18480 tree_result = byte_position (decl);
18481
18482 if (ctx->variant_part_offset != NULL_TREE)
18483 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
18484 ctx->variant_part_offset, tree_result);
18485
18486 /* If the byte offset is a constant, it's simplier to handle a native
18487 constant rather than a DWARF expression. */
18488 if (TREE_CODE (tree_result) == INTEGER_CST)
18489 {
18490 *cst_offset = wi::to_offset (tree_result).to_shwi ();
18491 return NULL;
18492 }
18493 struct loc_descr_context loc_ctx = {
18494 ctx->struct_type, /* context_type */
18495 NULL_TREE, /* base_decl */
18496 NULL, /* dpi */
18497 false, /* placeholder_arg */
18498 false /* placeholder_seen */
18499 };
18500 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
18501
18502 /* We want a DWARF expression: abort if we only have a location list with
18503 multiple elements. */
18504 if (!loc_result || !single_element_loc_list_p (loc_result))
18505 return NULL;
18506 else
18507 return loc_result->expr;
18508 }
18509 \f
18510 /* The following routines define various Dwarf attributes and any data
18511 associated with them. */
18512
18513 /* Add a location description attribute value to a DIE.
18514
18515 This emits location attributes suitable for whole variables and
18516 whole parameters. Note that the location attributes for struct fields are
18517 generated by the routine `data_member_location_attribute' below. */
18518
18519 static inline void
18520 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
18521 dw_loc_list_ref descr)
18522 {
18523 if (descr == 0)
18524 return;
18525 if (single_element_loc_list_p (descr))
18526 add_AT_loc (die, attr_kind, descr->expr);
18527 else
18528 add_AT_loc_list (die, attr_kind, descr);
18529 }
18530
18531 /* Add DW_AT_accessibility attribute to DIE if needed. */
18532
18533 static void
18534 add_accessibility_attribute (dw_die_ref die, tree decl)
18535 {
18536 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
18537 children, otherwise the default is DW_ACCESS_public. In DWARF2
18538 the default has always been DW_ACCESS_public. */
18539 if (TREE_PROTECTED (decl))
18540 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
18541 else if (TREE_PRIVATE (decl))
18542 {
18543 if (dwarf_version == 2
18544 || die->die_parent == NULL
18545 || die->die_parent->die_tag != DW_TAG_class_type)
18546 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
18547 }
18548 else if (dwarf_version > 2
18549 && die->die_parent
18550 && die->die_parent->die_tag == DW_TAG_class_type)
18551 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
18552 }
18553
18554 /* Attach the specialized form of location attribute used for data members of
18555 struct and union types. In the special case of a FIELD_DECL node which
18556 represents a bit-field, the "offset" part of this special location
18557 descriptor must indicate the distance in bytes from the lowest-addressed
18558 byte of the containing struct or union type to the lowest-addressed byte of
18559 the "containing object" for the bit-field. (See the `field_byte_offset'
18560 function above).
18561
18562 For any given bit-field, the "containing object" is a hypothetical object
18563 (of some integral or enum type) within which the given bit-field lives. The
18564 type of this hypothetical "containing object" is always the same as the
18565 declared type of the individual bit-field itself (for GCC anyway... the
18566 DWARF spec doesn't actually mandate this). Note that it is the size (in
18567 bytes) of the hypothetical "containing object" which will be given in the
18568 DW_AT_byte_size attribute for this bit-field. (See the
18569 `byte_size_attribute' function below.) It is also used when calculating the
18570 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
18571 function below.)
18572
18573 CTX is required: see the comment for VLR_CONTEXT. */
18574
18575 static void
18576 add_data_member_location_attribute (dw_die_ref die,
18577 tree decl,
18578 struct vlr_context *ctx)
18579 {
18580 HOST_WIDE_INT offset;
18581 dw_loc_descr_ref loc_descr = 0;
18582
18583 if (TREE_CODE (decl) == TREE_BINFO)
18584 {
18585 /* We're working on the TAG_inheritance for a base class. */
18586 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
18587 {
18588 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
18589 aren't at a fixed offset from all (sub)objects of the same
18590 type. We need to extract the appropriate offset from our
18591 vtable. The following dwarf expression means
18592
18593 BaseAddr = ObAddr + *((*ObAddr) - Offset)
18594
18595 This is specific to the V3 ABI, of course. */
18596
18597 dw_loc_descr_ref tmp;
18598
18599 /* Make a copy of the object address. */
18600 tmp = new_loc_descr (DW_OP_dup, 0, 0);
18601 add_loc_descr (&loc_descr, tmp);
18602
18603 /* Extract the vtable address. */
18604 tmp = new_loc_descr (DW_OP_deref, 0, 0);
18605 add_loc_descr (&loc_descr, tmp);
18606
18607 /* Calculate the address of the offset. */
18608 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
18609 gcc_assert (offset < 0);
18610
18611 tmp = int_loc_descriptor (-offset);
18612 add_loc_descr (&loc_descr, tmp);
18613 tmp = new_loc_descr (DW_OP_minus, 0, 0);
18614 add_loc_descr (&loc_descr, tmp);
18615
18616 /* Extract the offset. */
18617 tmp = new_loc_descr (DW_OP_deref, 0, 0);
18618 add_loc_descr (&loc_descr, tmp);
18619
18620 /* Add it to the object address. */
18621 tmp = new_loc_descr (DW_OP_plus, 0, 0);
18622 add_loc_descr (&loc_descr, tmp);
18623 }
18624 else
18625 offset = tree_to_shwi (BINFO_OFFSET (decl));
18626 }
18627 else
18628 {
18629 loc_descr = field_byte_offset (decl, ctx, &offset);
18630
18631 /* If loc_descr is available then we know the field offset is dynamic.
18632 However, GDB does not handle dynamic field offsets very well at the
18633 moment. */
18634 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
18635 {
18636 loc_descr = NULL;
18637 offset = 0;
18638 }
18639
18640 /* Data member location evalutation starts with the base address on the
18641 stack. Compute the field offset and add it to this base address. */
18642 else if (loc_descr != NULL)
18643 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
18644 }
18645
18646 if (! loc_descr)
18647 {
18648 /* While DW_AT_data_bit_offset has been added already in DWARF4,
18649 e.g. GDB only added support to it in November 2016. For DWARF5
18650 we need newer debug info consumers anyway. We might change this
18651 to dwarf_version >= 4 once most consumers catched up. */
18652 if (dwarf_version >= 5
18653 && TREE_CODE (decl) == FIELD_DECL
18654 && DECL_BIT_FIELD_TYPE (decl))
18655 {
18656 tree off = bit_position (decl);
18657 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
18658 {
18659 remove_AT (die, DW_AT_byte_size);
18660 remove_AT (die, DW_AT_bit_offset);
18661 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
18662 return;
18663 }
18664 }
18665 if (dwarf_version > 2)
18666 {
18667 /* Don't need to output a location expression, just the constant. */
18668 if (offset < 0)
18669 add_AT_int (die, DW_AT_data_member_location, offset);
18670 else
18671 add_AT_unsigned (die, DW_AT_data_member_location, offset);
18672 return;
18673 }
18674 else
18675 {
18676 enum dwarf_location_atom op;
18677
18678 /* The DWARF2 standard says that we should assume that the structure
18679 address is already on the stack, so we can specify a structure
18680 field address by using DW_OP_plus_uconst. */
18681 op = DW_OP_plus_uconst;
18682 loc_descr = new_loc_descr (op, offset, 0);
18683 }
18684 }
18685
18686 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
18687 }
18688
18689 /* Writes integer values to dw_vec_const array. */
18690
18691 static void
18692 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
18693 {
18694 while (size != 0)
18695 {
18696 *dest++ = val & 0xff;
18697 val >>= 8;
18698 --size;
18699 }
18700 }
18701
18702 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
18703
18704 static HOST_WIDE_INT
18705 extract_int (const unsigned char *src, unsigned int size)
18706 {
18707 HOST_WIDE_INT val = 0;
18708
18709 src += size;
18710 while (size != 0)
18711 {
18712 val <<= 8;
18713 val |= *--src & 0xff;
18714 --size;
18715 }
18716 return val;
18717 }
18718
18719 /* Writes wide_int values to dw_vec_const array. */
18720
18721 static void
18722 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
18723 {
18724 int i;
18725
18726 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
18727 {
18728 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
18729 return;
18730 }
18731
18732 /* We'd have to extend this code to support odd sizes. */
18733 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
18734
18735 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
18736
18737 if (WORDS_BIG_ENDIAN)
18738 for (i = n - 1; i >= 0; i--)
18739 {
18740 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
18741 dest += sizeof (HOST_WIDE_INT);
18742 }
18743 else
18744 for (i = 0; i < n; i++)
18745 {
18746 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
18747 dest += sizeof (HOST_WIDE_INT);
18748 }
18749 }
18750
18751 /* Writes floating point values to dw_vec_const array. */
18752
18753 static void
18754 insert_float (const_rtx rtl, unsigned char *array)
18755 {
18756 long val[4];
18757 int i;
18758 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
18759
18760 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
18761
18762 /* real_to_target puts 32-bit pieces in each long. Pack them. */
18763 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
18764 {
18765 insert_int (val[i], 4, array);
18766 array += 4;
18767 }
18768 }
18769
18770 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
18771 does not have a "location" either in memory or in a register. These
18772 things can arise in GNU C when a constant is passed as an actual parameter
18773 to an inlined function. They can also arise in C++ where declared
18774 constants do not necessarily get memory "homes". */
18775
18776 static bool
18777 add_const_value_attribute (dw_die_ref die, rtx rtl)
18778 {
18779 switch (GET_CODE (rtl))
18780 {
18781 case CONST_INT:
18782 {
18783 HOST_WIDE_INT val = INTVAL (rtl);
18784
18785 if (val < 0)
18786 add_AT_int (die, DW_AT_const_value, val);
18787 else
18788 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
18789 }
18790 return true;
18791
18792 case CONST_WIDE_INT:
18793 {
18794 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
18795 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
18796 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
18797 wide_int w = wi::zext (w1, prec);
18798 add_AT_wide (die, DW_AT_const_value, w);
18799 }
18800 return true;
18801
18802 case CONST_DOUBLE:
18803 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
18804 floating-point constant. A CONST_DOUBLE is used whenever the
18805 constant requires more than one word in order to be adequately
18806 represented. */
18807 if (TARGET_SUPPORTS_WIDE_INT == 0
18808 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
18809 add_AT_double (die, DW_AT_const_value,
18810 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
18811 else
18812 {
18813 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
18814 unsigned int length = GET_MODE_SIZE (mode);
18815 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
18816
18817 insert_float (rtl, array);
18818 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
18819 }
18820 return true;
18821
18822 case CONST_VECTOR:
18823 {
18824 unsigned int length;
18825 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
18826 return false;
18827
18828 machine_mode mode = GET_MODE (rtl);
18829 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
18830 unsigned char *array
18831 = ggc_vec_alloc<unsigned char> (length * elt_size);
18832 unsigned int i;
18833 unsigned char *p;
18834 machine_mode imode = GET_MODE_INNER (mode);
18835
18836 switch (GET_MODE_CLASS (mode))
18837 {
18838 case MODE_VECTOR_INT:
18839 for (i = 0, p = array; i < length; i++, p += elt_size)
18840 {
18841 rtx elt = CONST_VECTOR_ELT (rtl, i);
18842 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
18843 }
18844 break;
18845
18846 case MODE_VECTOR_FLOAT:
18847 for (i = 0, p = array; i < length; i++, p += elt_size)
18848 {
18849 rtx elt = CONST_VECTOR_ELT (rtl, i);
18850 insert_float (elt, p);
18851 }
18852 break;
18853
18854 default:
18855 gcc_unreachable ();
18856 }
18857
18858 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
18859 }
18860 return true;
18861
18862 case CONST_STRING:
18863 if (dwarf_version >= 4 || !dwarf_strict)
18864 {
18865 dw_loc_descr_ref loc_result;
18866 resolve_one_addr (&rtl);
18867 rtl_addr:
18868 loc_result = new_addr_loc_descr (rtl, dtprel_false);
18869 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
18870 add_AT_loc (die, DW_AT_location, loc_result);
18871 vec_safe_push (used_rtx_array, rtl);
18872 return true;
18873 }
18874 return false;
18875
18876 case CONST:
18877 if (CONSTANT_P (XEXP (rtl, 0)))
18878 return add_const_value_attribute (die, XEXP (rtl, 0));
18879 /* FALLTHROUGH */
18880 case SYMBOL_REF:
18881 if (!const_ok_for_output (rtl))
18882 return false;
18883 /* FALLTHROUGH */
18884 case LABEL_REF:
18885 if (dwarf_version >= 4 || !dwarf_strict)
18886 goto rtl_addr;
18887 return false;
18888
18889 case PLUS:
18890 /* In cases where an inlined instance of an inline function is passed
18891 the address of an `auto' variable (which is local to the caller) we
18892 can get a situation where the DECL_RTL of the artificial local
18893 variable (for the inlining) which acts as a stand-in for the
18894 corresponding formal parameter (of the inline function) will look
18895 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
18896 exactly a compile-time constant expression, but it isn't the address
18897 of the (artificial) local variable either. Rather, it represents the
18898 *value* which the artificial local variable always has during its
18899 lifetime. We currently have no way to represent such quasi-constant
18900 values in Dwarf, so for now we just punt and generate nothing. */
18901 return false;
18902
18903 case HIGH:
18904 case CONST_FIXED:
18905 return false;
18906
18907 case MEM:
18908 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
18909 && MEM_READONLY_P (rtl)
18910 && GET_MODE (rtl) == BLKmode)
18911 {
18912 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
18913 return true;
18914 }
18915 return false;
18916
18917 default:
18918 /* No other kinds of rtx should be possible here. */
18919 gcc_unreachable ();
18920 }
18921 return false;
18922 }
18923
18924 /* Determine whether the evaluation of EXPR references any variables
18925 or functions which aren't otherwise used (and therefore may not be
18926 output). */
18927 static tree
18928 reference_to_unused (tree * tp, int * walk_subtrees,
18929 void * data ATTRIBUTE_UNUSED)
18930 {
18931 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
18932 *walk_subtrees = 0;
18933
18934 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
18935 && ! TREE_ASM_WRITTEN (*tp))
18936 return *tp;
18937 /* ??? The C++ FE emits debug information for using decls, so
18938 putting gcc_unreachable here falls over. See PR31899. For now
18939 be conservative. */
18940 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
18941 return *tp;
18942 else if (VAR_P (*tp))
18943 {
18944 varpool_node *node = varpool_node::get (*tp);
18945 if (!node || !node->definition)
18946 return *tp;
18947 }
18948 else if (TREE_CODE (*tp) == FUNCTION_DECL
18949 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
18950 {
18951 /* The call graph machinery must have finished analyzing,
18952 optimizing and gimplifying the CU by now.
18953 So if *TP has no call graph node associated
18954 to it, it means *TP will not be emitted. */
18955 if (!cgraph_node::get (*tp))
18956 return *tp;
18957 }
18958 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
18959 return *tp;
18960
18961 return NULL_TREE;
18962 }
18963
18964 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
18965 for use in a later add_const_value_attribute call. */
18966
18967 static rtx
18968 rtl_for_decl_init (tree init, tree type)
18969 {
18970 rtx rtl = NULL_RTX;
18971
18972 STRIP_NOPS (init);
18973
18974 /* If a variable is initialized with a string constant without embedded
18975 zeros, build CONST_STRING. */
18976 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
18977 {
18978 tree enttype = TREE_TYPE (type);
18979 tree domain = TYPE_DOMAIN (type);
18980 scalar_int_mode mode;
18981
18982 if (is_int_mode (TYPE_MODE (enttype), &mode)
18983 && GET_MODE_SIZE (mode) == 1
18984 && domain
18985 && integer_zerop (TYPE_MIN_VALUE (domain))
18986 && compare_tree_int (TYPE_MAX_VALUE (domain),
18987 TREE_STRING_LENGTH (init) - 1) == 0
18988 && ((size_t) TREE_STRING_LENGTH (init)
18989 == strlen (TREE_STRING_POINTER (init)) + 1))
18990 {
18991 rtl = gen_rtx_CONST_STRING (VOIDmode,
18992 ggc_strdup (TREE_STRING_POINTER (init)));
18993 rtl = gen_rtx_MEM (BLKmode, rtl);
18994 MEM_READONLY_P (rtl) = 1;
18995 }
18996 }
18997 /* Other aggregates, and complex values, could be represented using
18998 CONCAT: FIXME! */
18999 else if (AGGREGATE_TYPE_P (type)
19000 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19001 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19002 || TREE_CODE (type) == COMPLEX_TYPE)
19003 ;
19004 /* Vectors only work if their mode is supported by the target.
19005 FIXME: generic vectors ought to work too. */
19006 else if (TREE_CODE (type) == VECTOR_TYPE
19007 && !VECTOR_MODE_P (TYPE_MODE (type)))
19008 ;
19009 /* If the initializer is something that we know will expand into an
19010 immediate RTL constant, expand it now. We must be careful not to
19011 reference variables which won't be output. */
19012 else if (initializer_constant_valid_p (init, type)
19013 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19014 {
19015 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19016 possible. */
19017 if (TREE_CODE (type) == VECTOR_TYPE)
19018 switch (TREE_CODE (init))
19019 {
19020 case VECTOR_CST:
19021 break;
19022 case CONSTRUCTOR:
19023 if (TREE_CONSTANT (init))
19024 {
19025 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19026 bool constant_p = true;
19027 tree value;
19028 unsigned HOST_WIDE_INT ix;
19029
19030 /* Even when ctor is constant, it might contain non-*_CST
19031 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19032 belong into VECTOR_CST nodes. */
19033 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19034 if (!CONSTANT_CLASS_P (value))
19035 {
19036 constant_p = false;
19037 break;
19038 }
19039
19040 if (constant_p)
19041 {
19042 init = build_vector_from_ctor (type, elts);
19043 break;
19044 }
19045 }
19046 /* FALLTHRU */
19047
19048 default:
19049 return NULL;
19050 }
19051
19052 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19053
19054 /* If expand_expr returns a MEM, it wasn't immediate. */
19055 gcc_assert (!rtl || !MEM_P (rtl));
19056 }
19057
19058 return rtl;
19059 }
19060
19061 /* Generate RTL for the variable DECL to represent its location. */
19062
19063 static rtx
19064 rtl_for_decl_location (tree decl)
19065 {
19066 rtx rtl;
19067
19068 /* Here we have to decide where we are going to say the parameter "lives"
19069 (as far as the debugger is concerned). We only have a couple of
19070 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19071
19072 DECL_RTL normally indicates where the parameter lives during most of the
19073 activation of the function. If optimization is enabled however, this
19074 could be either NULL or else a pseudo-reg. Both of those cases indicate
19075 that the parameter doesn't really live anywhere (as far as the code
19076 generation parts of GCC are concerned) during most of the function's
19077 activation. That will happen (for example) if the parameter is never
19078 referenced within the function.
19079
19080 We could just generate a location descriptor here for all non-NULL
19081 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19082 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19083 where DECL_RTL is NULL or is a pseudo-reg.
19084
19085 Note however that we can only get away with using DECL_INCOMING_RTL as
19086 a backup substitute for DECL_RTL in certain limited cases. In cases
19087 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19088 we can be sure that the parameter was passed using the same type as it is
19089 declared to have within the function, and that its DECL_INCOMING_RTL
19090 points us to a place where a value of that type is passed.
19091
19092 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19093 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19094 because in these cases DECL_INCOMING_RTL points us to a value of some
19095 type which is *different* from the type of the parameter itself. Thus,
19096 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19097 such cases, the debugger would end up (for example) trying to fetch a
19098 `float' from a place which actually contains the first part of a
19099 `double'. That would lead to really incorrect and confusing
19100 output at debug-time.
19101
19102 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19103 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19104 are a couple of exceptions however. On little-endian machines we can
19105 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19106 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19107 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19108 when (on a little-endian machine) a non-prototyped function has a
19109 parameter declared to be of type `short' or `char'. In such cases,
19110 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19111 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19112 passed `int' value. If the debugger then uses that address to fetch
19113 a `short' or a `char' (on a little-endian machine) the result will be
19114 the correct data, so we allow for such exceptional cases below.
19115
19116 Note that our goal here is to describe the place where the given formal
19117 parameter lives during most of the function's activation (i.e. between the
19118 end of the prologue and the start of the epilogue). We'll do that as best
19119 as we can. Note however that if the given formal parameter is modified
19120 sometime during the execution of the function, then a stack backtrace (at
19121 debug-time) will show the function as having been called with the *new*
19122 value rather than the value which was originally passed in. This happens
19123 rarely enough that it is not a major problem, but it *is* a problem, and
19124 I'd like to fix it.
19125
19126 A future version of dwarf2out.c may generate two additional attributes for
19127 any given DW_TAG_formal_parameter DIE which will describe the "passed
19128 type" and the "passed location" for the given formal parameter in addition
19129 to the attributes we now generate to indicate the "declared type" and the
19130 "active location" for each parameter. This additional set of attributes
19131 could be used by debuggers for stack backtraces. Separately, note that
19132 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19133 This happens (for example) for inlined-instances of inline function formal
19134 parameters which are never referenced. This really shouldn't be
19135 happening. All PARM_DECL nodes should get valid non-NULL
19136 DECL_INCOMING_RTL values. FIXME. */
19137
19138 /* Use DECL_RTL as the "location" unless we find something better. */
19139 rtl = DECL_RTL_IF_SET (decl);
19140
19141 /* When generating abstract instances, ignore everything except
19142 constants, symbols living in memory, and symbols living in
19143 fixed registers. */
19144 if (! reload_completed)
19145 {
19146 if (rtl
19147 && (CONSTANT_P (rtl)
19148 || (MEM_P (rtl)
19149 && CONSTANT_P (XEXP (rtl, 0)))
19150 || (REG_P (rtl)
19151 && VAR_P (decl)
19152 && TREE_STATIC (decl))))
19153 {
19154 rtl = targetm.delegitimize_address (rtl);
19155 return rtl;
19156 }
19157 rtl = NULL_RTX;
19158 }
19159 else if (TREE_CODE (decl) == PARM_DECL)
19160 {
19161 if (rtl == NULL_RTX
19162 || is_pseudo_reg (rtl)
19163 || (MEM_P (rtl)
19164 && is_pseudo_reg (XEXP (rtl, 0))
19165 && DECL_INCOMING_RTL (decl)
19166 && MEM_P (DECL_INCOMING_RTL (decl))
19167 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19168 {
19169 tree declared_type = TREE_TYPE (decl);
19170 tree passed_type = DECL_ARG_TYPE (decl);
19171 machine_mode dmode = TYPE_MODE (declared_type);
19172 machine_mode pmode = TYPE_MODE (passed_type);
19173
19174 /* This decl represents a formal parameter which was optimized out.
19175 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19176 all cases where (rtl == NULL_RTX) just below. */
19177 if (dmode == pmode)
19178 rtl = DECL_INCOMING_RTL (decl);
19179 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19180 && SCALAR_INT_MODE_P (dmode)
19181 && GET_MODE_SIZE (dmode) <= GET_MODE_SIZE (pmode)
19182 && DECL_INCOMING_RTL (decl))
19183 {
19184 rtx inc = DECL_INCOMING_RTL (decl);
19185 if (REG_P (inc))
19186 rtl = inc;
19187 else if (MEM_P (inc))
19188 {
19189 if (BYTES_BIG_ENDIAN)
19190 rtl = adjust_address_nv (inc, dmode,
19191 GET_MODE_SIZE (pmode)
19192 - GET_MODE_SIZE (dmode));
19193 else
19194 rtl = inc;
19195 }
19196 }
19197 }
19198
19199 /* If the parm was passed in registers, but lives on the stack, then
19200 make a big endian correction if the mode of the type of the
19201 parameter is not the same as the mode of the rtl. */
19202 /* ??? This is the same series of checks that are made in dbxout.c before
19203 we reach the big endian correction code there. It isn't clear if all
19204 of these checks are necessary here, but keeping them all is the safe
19205 thing to do. */
19206 else if (MEM_P (rtl)
19207 && XEXP (rtl, 0) != const0_rtx
19208 && ! CONSTANT_P (XEXP (rtl, 0))
19209 /* Not passed in memory. */
19210 && !MEM_P (DECL_INCOMING_RTL (decl))
19211 /* Not passed by invisible reference. */
19212 && (!REG_P (XEXP (rtl, 0))
19213 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19214 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19215 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19216 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19217 #endif
19218 )
19219 /* Big endian correction check. */
19220 && BYTES_BIG_ENDIAN
19221 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19222 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl)))
19223 < UNITS_PER_WORD))
19224 {
19225 machine_mode addr_mode = get_address_mode (rtl);
19226 int offset = (UNITS_PER_WORD
19227 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19228
19229 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19230 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19231 }
19232 }
19233 else if (VAR_P (decl)
19234 && rtl
19235 && MEM_P (rtl)
19236 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19237 {
19238 machine_mode addr_mode = get_address_mode (rtl);
19239 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19240 GET_MODE (rtl));
19241
19242 /* If a variable is declared "register" yet is smaller than
19243 a register, then if we store the variable to memory, it
19244 looks like we're storing a register-sized value, when in
19245 fact we are not. We need to adjust the offset of the
19246 storage location to reflect the actual value's bytes,
19247 else gdb will not be able to display it. */
19248 if (maybe_ne (offset, 0))
19249 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19250 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19251 }
19252
19253 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19254 and will have been substituted directly into all expressions that use it.
19255 C does not have such a concept, but C++ and other languages do. */
19256 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19257 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19258
19259 if (rtl)
19260 rtl = targetm.delegitimize_address (rtl);
19261
19262 /* If we don't look past the constant pool, we risk emitting a
19263 reference to a constant pool entry that isn't referenced from
19264 code, and thus is not emitted. */
19265 if (rtl)
19266 rtl = avoid_constant_pool_reference (rtl);
19267
19268 /* Try harder to get a rtl. If this symbol ends up not being emitted
19269 in the current CU, resolve_addr will remove the expression referencing
19270 it. */
19271 if (rtl == NULL_RTX
19272 && VAR_P (decl)
19273 && !DECL_EXTERNAL (decl)
19274 && TREE_STATIC (decl)
19275 && DECL_NAME (decl)
19276 && !DECL_HARD_REGISTER (decl)
19277 && DECL_MODE (decl) != VOIDmode)
19278 {
19279 rtl = make_decl_rtl_for_debug (decl);
19280 if (!MEM_P (rtl)
19281 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19282 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19283 rtl = NULL_RTX;
19284 }
19285
19286 return rtl;
19287 }
19288
19289 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19290 returned. If so, the decl for the COMMON block is returned, and the
19291 value is the offset into the common block for the symbol. */
19292
19293 static tree
19294 fortran_common (tree decl, HOST_WIDE_INT *value)
19295 {
19296 tree val_expr, cvar;
19297 machine_mode mode;
19298 poly_int64 bitsize, bitpos;
19299 tree offset;
19300 HOST_WIDE_INT cbitpos;
19301 int unsignedp, reversep, volatilep = 0;
19302
19303 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19304 it does not have a value (the offset into the common area), or if it
19305 is thread local (as opposed to global) then it isn't common, and shouldn't
19306 be handled as such. */
19307 if (!VAR_P (decl)
19308 || !TREE_STATIC (decl)
19309 || !DECL_HAS_VALUE_EXPR_P (decl)
19310 || !is_fortran ())
19311 return NULL_TREE;
19312
19313 val_expr = DECL_VALUE_EXPR (decl);
19314 if (TREE_CODE (val_expr) != COMPONENT_REF)
19315 return NULL_TREE;
19316
19317 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19318 &unsignedp, &reversep, &volatilep);
19319
19320 if (cvar == NULL_TREE
19321 || !VAR_P (cvar)
19322 || DECL_ARTIFICIAL (cvar)
19323 || !TREE_PUBLIC (cvar)
19324 /* We don't expect to have to cope with variable offsets,
19325 since at present all static data must have a constant size. */
19326 || !bitpos.is_constant (&cbitpos))
19327 return NULL_TREE;
19328
19329 *value = 0;
19330 if (offset != NULL)
19331 {
19332 if (!tree_fits_shwi_p (offset))
19333 return NULL_TREE;
19334 *value = tree_to_shwi (offset);
19335 }
19336 if (cbitpos != 0)
19337 *value += cbitpos / BITS_PER_UNIT;
19338
19339 return cvar;
19340 }
19341
19342 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
19343 data attribute for a variable or a parameter. We generate the
19344 DW_AT_const_value attribute only in those cases where the given variable
19345 or parameter does not have a true "location" either in memory or in a
19346 register. This can happen (for example) when a constant is passed as an
19347 actual argument in a call to an inline function. (It's possible that
19348 these things can crop up in other ways also.) Note that one type of
19349 constant value which can be passed into an inlined function is a constant
19350 pointer. This can happen for example if an actual argument in an inlined
19351 function call evaluates to a compile-time constant address.
19352
19353 CACHE_P is true if it is worth caching the location list for DECL,
19354 so that future calls can reuse it rather than regenerate it from scratch.
19355 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
19356 since we will need to refer to them each time the function is inlined. */
19357
19358 static bool
19359 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
19360 {
19361 rtx rtl;
19362 dw_loc_list_ref list;
19363 var_loc_list *loc_list;
19364 cached_dw_loc_list *cache;
19365
19366 if (early_dwarf)
19367 return false;
19368
19369 if (TREE_CODE (decl) == ERROR_MARK)
19370 return false;
19371
19372 if (get_AT (die, DW_AT_location)
19373 || get_AT (die, DW_AT_const_value))
19374 return true;
19375
19376 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
19377 || TREE_CODE (decl) == RESULT_DECL);
19378
19379 /* Try to get some constant RTL for this decl, and use that as the value of
19380 the location. */
19381
19382 rtl = rtl_for_decl_location (decl);
19383 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
19384 && add_const_value_attribute (die, rtl))
19385 return true;
19386
19387 /* See if we have single element location list that is equivalent to
19388 a constant value. That way we are better to use add_const_value_attribute
19389 rather than expanding constant value equivalent. */
19390 loc_list = lookup_decl_loc (decl);
19391 if (loc_list
19392 && loc_list->first
19393 && loc_list->first->next == NULL
19394 && NOTE_P (loc_list->first->loc)
19395 && NOTE_VAR_LOCATION (loc_list->first->loc)
19396 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
19397 {
19398 struct var_loc_node *node;
19399
19400 node = loc_list->first;
19401 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
19402 if (GET_CODE (rtl) == EXPR_LIST)
19403 rtl = XEXP (rtl, 0);
19404 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
19405 && add_const_value_attribute (die, rtl))
19406 return true;
19407 }
19408 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
19409 list several times. See if we've already cached the contents. */
19410 list = NULL;
19411 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
19412 cache_p = false;
19413 if (cache_p)
19414 {
19415 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
19416 if (cache)
19417 list = cache->loc_list;
19418 }
19419 if (list == NULL)
19420 {
19421 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
19422 NULL);
19423 /* It is usually worth caching this result if the decl is from
19424 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
19425 if (cache_p && list && list->dw_loc_next)
19426 {
19427 cached_dw_loc_list **slot
19428 = cached_dw_loc_list_table->find_slot_with_hash (decl,
19429 DECL_UID (decl),
19430 INSERT);
19431 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
19432 cache->decl_id = DECL_UID (decl);
19433 cache->loc_list = list;
19434 *slot = cache;
19435 }
19436 }
19437 if (list)
19438 {
19439 add_AT_location_description (die, DW_AT_location, list);
19440 return true;
19441 }
19442 /* None of that worked, so it must not really have a location;
19443 try adding a constant value attribute from the DECL_INITIAL. */
19444 return tree_add_const_value_attribute_for_decl (die, decl);
19445 }
19446
19447 /* Helper function for tree_add_const_value_attribute. Natively encode
19448 initializer INIT into an array. Return true if successful. */
19449
19450 static bool
19451 native_encode_initializer (tree init, unsigned char *array, int size)
19452 {
19453 tree type;
19454
19455 if (init == NULL_TREE)
19456 return false;
19457
19458 STRIP_NOPS (init);
19459 switch (TREE_CODE (init))
19460 {
19461 case STRING_CST:
19462 type = TREE_TYPE (init);
19463 if (TREE_CODE (type) == ARRAY_TYPE)
19464 {
19465 tree enttype = TREE_TYPE (type);
19466 scalar_int_mode mode;
19467
19468 if (!is_int_mode (TYPE_MODE (enttype), &mode)
19469 || GET_MODE_SIZE (mode) != 1)
19470 return false;
19471 if (int_size_in_bytes (type) != size)
19472 return false;
19473 if (size > TREE_STRING_LENGTH (init))
19474 {
19475 memcpy (array, TREE_STRING_POINTER (init),
19476 TREE_STRING_LENGTH (init));
19477 memset (array + TREE_STRING_LENGTH (init),
19478 '\0', size - TREE_STRING_LENGTH (init));
19479 }
19480 else
19481 memcpy (array, TREE_STRING_POINTER (init), size);
19482 return true;
19483 }
19484 return false;
19485 case CONSTRUCTOR:
19486 type = TREE_TYPE (init);
19487 if (int_size_in_bytes (type) != size)
19488 return false;
19489 if (TREE_CODE (type) == ARRAY_TYPE)
19490 {
19491 HOST_WIDE_INT min_index;
19492 unsigned HOST_WIDE_INT cnt;
19493 int curpos = 0, fieldsize;
19494 constructor_elt *ce;
19495
19496 if (TYPE_DOMAIN (type) == NULL_TREE
19497 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
19498 return false;
19499
19500 fieldsize = int_size_in_bytes (TREE_TYPE (type));
19501 if (fieldsize <= 0)
19502 return false;
19503
19504 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
19505 memset (array, '\0', size);
19506 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
19507 {
19508 tree val = ce->value;
19509 tree index = ce->index;
19510 int pos = curpos;
19511 if (index && TREE_CODE (index) == RANGE_EXPR)
19512 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
19513 * fieldsize;
19514 else if (index)
19515 pos = (tree_to_shwi (index) - min_index) * fieldsize;
19516
19517 if (val)
19518 {
19519 STRIP_NOPS (val);
19520 if (!native_encode_initializer (val, array + pos, fieldsize))
19521 return false;
19522 }
19523 curpos = pos + fieldsize;
19524 if (index && TREE_CODE (index) == RANGE_EXPR)
19525 {
19526 int count = tree_to_shwi (TREE_OPERAND (index, 1))
19527 - tree_to_shwi (TREE_OPERAND (index, 0));
19528 while (count-- > 0)
19529 {
19530 if (val)
19531 memcpy (array + curpos, array + pos, fieldsize);
19532 curpos += fieldsize;
19533 }
19534 }
19535 gcc_assert (curpos <= size);
19536 }
19537 return true;
19538 }
19539 else if (TREE_CODE (type) == RECORD_TYPE
19540 || TREE_CODE (type) == UNION_TYPE)
19541 {
19542 tree field = NULL_TREE;
19543 unsigned HOST_WIDE_INT cnt;
19544 constructor_elt *ce;
19545
19546 if (int_size_in_bytes (type) != size)
19547 return false;
19548
19549 if (TREE_CODE (type) == RECORD_TYPE)
19550 field = TYPE_FIELDS (type);
19551
19552 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
19553 {
19554 tree val = ce->value;
19555 int pos, fieldsize;
19556
19557 if (ce->index != 0)
19558 field = ce->index;
19559
19560 if (val)
19561 STRIP_NOPS (val);
19562
19563 if (field == NULL_TREE || DECL_BIT_FIELD (field))
19564 return false;
19565
19566 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
19567 && TYPE_DOMAIN (TREE_TYPE (field))
19568 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
19569 return false;
19570 else if (DECL_SIZE_UNIT (field) == NULL_TREE
19571 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
19572 return false;
19573 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
19574 pos = int_byte_position (field);
19575 gcc_assert (pos + fieldsize <= size);
19576 if (val && fieldsize != 0
19577 && !native_encode_initializer (val, array + pos, fieldsize))
19578 return false;
19579 }
19580 return true;
19581 }
19582 return false;
19583 case VIEW_CONVERT_EXPR:
19584 case NON_LVALUE_EXPR:
19585 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
19586 default:
19587 return native_encode_expr (init, array, size) == size;
19588 }
19589 }
19590
19591 /* Attach a DW_AT_const_value attribute to DIE. The value of the
19592 attribute is the const value T. */
19593
19594 static bool
19595 tree_add_const_value_attribute (dw_die_ref die, tree t)
19596 {
19597 tree init;
19598 tree type = TREE_TYPE (t);
19599 rtx rtl;
19600
19601 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
19602 return false;
19603
19604 init = t;
19605 gcc_assert (!DECL_P (init));
19606
19607 if (TREE_CODE (init) == INTEGER_CST)
19608 {
19609 if (tree_fits_uhwi_p (init))
19610 {
19611 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
19612 return true;
19613 }
19614 if (tree_fits_shwi_p (init))
19615 {
19616 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
19617 return true;
19618 }
19619 }
19620 if (! early_dwarf)
19621 {
19622 rtl = rtl_for_decl_init (init, type);
19623 if (rtl)
19624 return add_const_value_attribute (die, rtl);
19625 }
19626 /* If the host and target are sane, try harder. */
19627 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
19628 && initializer_constant_valid_p (init, type))
19629 {
19630 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
19631 if (size > 0 && (int) size == size)
19632 {
19633 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
19634
19635 if (native_encode_initializer (init, array, size))
19636 {
19637 add_AT_vec (die, DW_AT_const_value, size, 1, array);
19638 return true;
19639 }
19640 ggc_free (array);
19641 }
19642 }
19643 return false;
19644 }
19645
19646 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
19647 attribute is the const value of T, where T is an integral constant
19648 variable with static storage duration
19649 (so it can't be a PARM_DECL or a RESULT_DECL). */
19650
19651 static bool
19652 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
19653 {
19654
19655 if (!decl
19656 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
19657 || (VAR_P (decl) && !TREE_STATIC (decl)))
19658 return false;
19659
19660 if (TREE_READONLY (decl)
19661 && ! TREE_THIS_VOLATILE (decl)
19662 && DECL_INITIAL (decl))
19663 /* OK */;
19664 else
19665 return false;
19666
19667 /* Don't add DW_AT_const_value if abstract origin already has one. */
19668 if (get_AT (var_die, DW_AT_const_value))
19669 return false;
19670
19671 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
19672 }
19673
19674 /* Convert the CFI instructions for the current function into a
19675 location list. This is used for DW_AT_frame_base when we targeting
19676 a dwarf2 consumer that does not support the dwarf3
19677 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
19678 expressions. */
19679
19680 static dw_loc_list_ref
19681 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
19682 {
19683 int ix;
19684 dw_fde_ref fde;
19685 dw_loc_list_ref list, *list_tail;
19686 dw_cfi_ref cfi;
19687 dw_cfa_location last_cfa, next_cfa;
19688 const char *start_label, *last_label, *section;
19689 dw_cfa_location remember;
19690
19691 fde = cfun->fde;
19692 gcc_assert (fde != NULL);
19693
19694 section = secname_for_decl (current_function_decl);
19695 list_tail = &list;
19696 list = NULL;
19697
19698 memset (&next_cfa, 0, sizeof (next_cfa));
19699 next_cfa.reg = INVALID_REGNUM;
19700 remember = next_cfa;
19701
19702 start_label = fde->dw_fde_begin;
19703
19704 /* ??? Bald assumption that the CIE opcode list does not contain
19705 advance opcodes. */
19706 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
19707 lookup_cfa_1 (cfi, &next_cfa, &remember);
19708
19709 last_cfa = next_cfa;
19710 last_label = start_label;
19711
19712 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
19713 {
19714 /* If the first partition contained no CFI adjustments, the
19715 CIE opcodes apply to the whole first partition. */
19716 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19717 fde->dw_fde_begin, fde->dw_fde_end, section);
19718 list_tail =&(*list_tail)->dw_loc_next;
19719 start_label = last_label = fde->dw_fde_second_begin;
19720 }
19721
19722 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
19723 {
19724 switch (cfi->dw_cfi_opc)
19725 {
19726 case DW_CFA_set_loc:
19727 case DW_CFA_advance_loc1:
19728 case DW_CFA_advance_loc2:
19729 case DW_CFA_advance_loc4:
19730 if (!cfa_equal_p (&last_cfa, &next_cfa))
19731 {
19732 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19733 start_label, last_label, section);
19734
19735 list_tail = &(*list_tail)->dw_loc_next;
19736 last_cfa = next_cfa;
19737 start_label = last_label;
19738 }
19739 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
19740 break;
19741
19742 case DW_CFA_advance_loc:
19743 /* The encoding is complex enough that we should never emit this. */
19744 gcc_unreachable ();
19745
19746 default:
19747 lookup_cfa_1 (cfi, &next_cfa, &remember);
19748 break;
19749 }
19750 if (ix + 1 == fde->dw_fde_switch_cfi_index)
19751 {
19752 if (!cfa_equal_p (&last_cfa, &next_cfa))
19753 {
19754 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19755 start_label, last_label, section);
19756
19757 list_tail = &(*list_tail)->dw_loc_next;
19758 last_cfa = next_cfa;
19759 start_label = last_label;
19760 }
19761 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19762 start_label, fde->dw_fde_end, section);
19763 list_tail = &(*list_tail)->dw_loc_next;
19764 start_label = last_label = fde->dw_fde_second_begin;
19765 }
19766 }
19767
19768 if (!cfa_equal_p (&last_cfa, &next_cfa))
19769 {
19770 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
19771 start_label, last_label, section);
19772 list_tail = &(*list_tail)->dw_loc_next;
19773 start_label = last_label;
19774 }
19775
19776 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
19777 start_label,
19778 fde->dw_fde_second_begin
19779 ? fde->dw_fde_second_end : fde->dw_fde_end,
19780 section);
19781
19782 if (list && list->dw_loc_next)
19783 gen_llsym (list);
19784
19785 return list;
19786 }
19787
19788 /* Compute a displacement from the "steady-state frame pointer" to the
19789 frame base (often the same as the CFA), and store it in
19790 frame_pointer_fb_offset. OFFSET is added to the displacement
19791 before the latter is negated. */
19792
19793 static void
19794 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
19795 {
19796 rtx reg, elim;
19797
19798 #ifdef FRAME_POINTER_CFA_OFFSET
19799 reg = frame_pointer_rtx;
19800 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
19801 #else
19802 reg = arg_pointer_rtx;
19803 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
19804 #endif
19805
19806 elim = (ira_use_lra_p
19807 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
19808 : eliminate_regs (reg, VOIDmode, NULL_RTX));
19809 elim = strip_offset_and_add (elim, &offset);
19810
19811 frame_pointer_fb_offset = -offset;
19812
19813 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
19814 in which to eliminate. This is because it's stack pointer isn't
19815 directly accessible as a register within the ISA. To work around
19816 this, assume that while we cannot provide a proper value for
19817 frame_pointer_fb_offset, we won't need one either. */
19818 frame_pointer_fb_offset_valid
19819 = ((SUPPORTS_STACK_ALIGNMENT
19820 && (elim == hard_frame_pointer_rtx
19821 || elim == stack_pointer_rtx))
19822 || elim == (frame_pointer_needed
19823 ? hard_frame_pointer_rtx
19824 : stack_pointer_rtx));
19825 }
19826
19827 /* Generate a DW_AT_name attribute given some string value to be included as
19828 the value of the attribute. */
19829
19830 static void
19831 add_name_attribute (dw_die_ref die, const char *name_string)
19832 {
19833 if (name_string != NULL && *name_string != 0)
19834 {
19835 if (demangle_name_func)
19836 name_string = (*demangle_name_func) (name_string);
19837
19838 add_AT_string (die, DW_AT_name, name_string);
19839 }
19840 }
19841
19842 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
19843 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
19844 of TYPE accordingly.
19845
19846 ??? This is a temporary measure until after we're able to generate
19847 regular DWARF for the complex Ada type system. */
19848
19849 static void
19850 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
19851 dw_die_ref context_die)
19852 {
19853 tree dtype;
19854 dw_die_ref dtype_die;
19855
19856 if (!lang_hooks.types.descriptive_type)
19857 return;
19858
19859 dtype = lang_hooks.types.descriptive_type (type);
19860 if (!dtype)
19861 return;
19862
19863 dtype_die = lookup_type_die (dtype);
19864 if (!dtype_die)
19865 {
19866 gen_type_die (dtype, context_die);
19867 dtype_die = lookup_type_die (dtype);
19868 gcc_assert (dtype_die);
19869 }
19870
19871 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
19872 }
19873
19874 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
19875
19876 static const char *
19877 comp_dir_string (void)
19878 {
19879 const char *wd;
19880 char *wd1;
19881 static const char *cached_wd = NULL;
19882
19883 if (cached_wd != NULL)
19884 return cached_wd;
19885
19886 wd = get_src_pwd ();
19887 if (wd == NULL)
19888 return NULL;
19889
19890 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
19891 {
19892 int wdlen;
19893
19894 wdlen = strlen (wd);
19895 wd1 = ggc_vec_alloc<char> (wdlen + 2);
19896 strcpy (wd1, wd);
19897 wd1 [wdlen] = DIR_SEPARATOR;
19898 wd1 [wdlen + 1] = 0;
19899 wd = wd1;
19900 }
19901
19902 cached_wd = remap_debug_filename (wd);
19903 return cached_wd;
19904 }
19905
19906 /* Generate a DW_AT_comp_dir attribute for DIE. */
19907
19908 static void
19909 add_comp_dir_attribute (dw_die_ref die)
19910 {
19911 const char * wd = comp_dir_string ();
19912 if (wd != NULL)
19913 add_AT_string (die, DW_AT_comp_dir, wd);
19914 }
19915
19916 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
19917 pointer computation, ...), output a representation for that bound according
19918 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
19919 loc_list_from_tree for the meaning of CONTEXT. */
19920
19921 static void
19922 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
19923 int forms, struct loc_descr_context *context)
19924 {
19925 dw_die_ref context_die, decl_die;
19926 dw_loc_list_ref list;
19927 bool strip_conversions = true;
19928 bool placeholder_seen = false;
19929
19930 while (strip_conversions)
19931 switch (TREE_CODE (value))
19932 {
19933 case ERROR_MARK:
19934 case SAVE_EXPR:
19935 return;
19936
19937 CASE_CONVERT:
19938 case VIEW_CONVERT_EXPR:
19939 value = TREE_OPERAND (value, 0);
19940 break;
19941
19942 default:
19943 strip_conversions = false;
19944 break;
19945 }
19946
19947 /* If possible and permitted, output the attribute as a constant. */
19948 if ((forms & dw_scalar_form_constant) != 0
19949 && TREE_CODE (value) == INTEGER_CST)
19950 {
19951 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
19952
19953 /* If HOST_WIDE_INT is big enough then represent the bound as
19954 a constant value. We need to choose a form based on
19955 whether the type is signed or unsigned. We cannot just
19956 call add_AT_unsigned if the value itself is positive
19957 (add_AT_unsigned might add the unsigned value encoded as
19958 DW_FORM_data[1248]). Some DWARF consumers will lookup the
19959 bounds type and then sign extend any unsigned values found
19960 for signed types. This is needed only for
19961 DW_AT_{lower,upper}_bound, since for most other attributes,
19962 consumers will treat DW_FORM_data[1248] as unsigned values,
19963 regardless of the underlying type. */
19964 if (prec <= HOST_BITS_PER_WIDE_INT
19965 || tree_fits_uhwi_p (value))
19966 {
19967 if (TYPE_UNSIGNED (TREE_TYPE (value)))
19968 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
19969 else
19970 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
19971 }
19972 else
19973 /* Otherwise represent the bound as an unsigned value with
19974 the precision of its type. The precision and signedness
19975 of the type will be necessary to re-interpret it
19976 unambiguously. */
19977 add_AT_wide (die, attr, wi::to_wide (value));
19978 return;
19979 }
19980
19981 /* Otherwise, if it's possible and permitted too, output a reference to
19982 another DIE. */
19983 if ((forms & dw_scalar_form_reference) != 0)
19984 {
19985 tree decl = NULL_TREE;
19986
19987 /* Some type attributes reference an outer type. For instance, the upper
19988 bound of an array may reference an embedding record (this happens in
19989 Ada). */
19990 if (TREE_CODE (value) == COMPONENT_REF
19991 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
19992 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
19993 decl = TREE_OPERAND (value, 1);
19994
19995 else if (VAR_P (value)
19996 || TREE_CODE (value) == PARM_DECL
19997 || TREE_CODE (value) == RESULT_DECL)
19998 decl = value;
19999
20000 if (decl != NULL_TREE)
20001 {
20002 dw_die_ref decl_die = lookup_decl_die (decl);
20003
20004 /* ??? Can this happen, or should the variable have been bound
20005 first? Probably it can, since I imagine that we try to create
20006 the types of parameters in the order in which they exist in
20007 the list, and won't have created a forward reference to a
20008 later parameter. */
20009 if (decl_die != NULL)
20010 {
20011 add_AT_die_ref (die, attr, decl_die);
20012 return;
20013 }
20014 }
20015 }
20016
20017 /* Last chance: try to create a stack operation procedure to evaluate the
20018 value. Do nothing if even that is not possible or permitted. */
20019 if ((forms & dw_scalar_form_exprloc) == 0)
20020 return;
20021
20022 list = loc_list_from_tree (value, 2, context);
20023 if (context && context->placeholder_arg)
20024 {
20025 placeholder_seen = context->placeholder_seen;
20026 context->placeholder_seen = false;
20027 }
20028 if (list == NULL || single_element_loc_list_p (list))
20029 {
20030 /* If this attribute is not a reference nor constant, it is
20031 a DWARF expression rather than location description. For that
20032 loc_list_from_tree (value, 0, &context) is needed. */
20033 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20034 if (list2 && single_element_loc_list_p (list2))
20035 {
20036 if (placeholder_seen)
20037 {
20038 struct dwarf_procedure_info dpi;
20039 dpi.fndecl = NULL_TREE;
20040 dpi.args_count = 1;
20041 if (!resolve_args_picking (list2->expr, 1, &dpi))
20042 return;
20043 }
20044 add_AT_loc (die, attr, list2->expr);
20045 return;
20046 }
20047 }
20048
20049 /* If that failed to give a single element location list, fall back to
20050 outputting this as a reference... still if permitted. */
20051 if (list == NULL
20052 || (forms & dw_scalar_form_reference) == 0
20053 || placeholder_seen)
20054 return;
20055
20056 if (current_function_decl == 0)
20057 context_die = comp_unit_die ();
20058 else
20059 context_die = lookup_decl_die (current_function_decl);
20060
20061 decl_die = new_die (DW_TAG_variable, context_die, value);
20062 add_AT_flag (decl_die, DW_AT_artificial, 1);
20063 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20064 context_die);
20065 add_AT_location_description (decl_die, DW_AT_location, list);
20066 add_AT_die_ref (die, attr, decl_die);
20067 }
20068
20069 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20070 default. */
20071
20072 static int
20073 lower_bound_default (void)
20074 {
20075 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20076 {
20077 case DW_LANG_C:
20078 case DW_LANG_C89:
20079 case DW_LANG_C99:
20080 case DW_LANG_C11:
20081 case DW_LANG_C_plus_plus:
20082 case DW_LANG_C_plus_plus_11:
20083 case DW_LANG_C_plus_plus_14:
20084 case DW_LANG_ObjC:
20085 case DW_LANG_ObjC_plus_plus:
20086 return 0;
20087 case DW_LANG_Fortran77:
20088 case DW_LANG_Fortran90:
20089 case DW_LANG_Fortran95:
20090 case DW_LANG_Fortran03:
20091 case DW_LANG_Fortran08:
20092 return 1;
20093 case DW_LANG_UPC:
20094 case DW_LANG_D:
20095 case DW_LANG_Python:
20096 return dwarf_version >= 4 ? 0 : -1;
20097 case DW_LANG_Ada95:
20098 case DW_LANG_Ada83:
20099 case DW_LANG_Cobol74:
20100 case DW_LANG_Cobol85:
20101 case DW_LANG_Modula2:
20102 case DW_LANG_PLI:
20103 return dwarf_version >= 4 ? 1 : -1;
20104 default:
20105 return -1;
20106 }
20107 }
20108
20109 /* Given a tree node describing an array bound (either lower or upper) output
20110 a representation for that bound. */
20111
20112 static void
20113 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20114 tree bound, struct loc_descr_context *context)
20115 {
20116 int dflt;
20117
20118 while (1)
20119 switch (TREE_CODE (bound))
20120 {
20121 /* Strip all conversions. */
20122 CASE_CONVERT:
20123 case VIEW_CONVERT_EXPR:
20124 bound = TREE_OPERAND (bound, 0);
20125 break;
20126
20127 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20128 are even omitted when they are the default. */
20129 case INTEGER_CST:
20130 /* If the value for this bound is the default one, we can even omit the
20131 attribute. */
20132 if (bound_attr == DW_AT_lower_bound
20133 && tree_fits_shwi_p (bound)
20134 && (dflt = lower_bound_default ()) != -1
20135 && tree_to_shwi (bound) == dflt)
20136 return;
20137
20138 /* FALLTHRU */
20139
20140 default:
20141 /* Because of the complex interaction there can be with other GNAT
20142 encodings, GDB isn't ready yet to handle proper DWARF description
20143 for self-referencial subrange bounds: let GNAT encodings do the
20144 magic in such a case. */
20145 if (is_ada ()
20146 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20147 && contains_placeholder_p (bound))
20148 return;
20149
20150 add_scalar_info (subrange_die, bound_attr, bound,
20151 dw_scalar_form_constant
20152 | dw_scalar_form_exprloc
20153 | dw_scalar_form_reference,
20154 context);
20155 return;
20156 }
20157 }
20158
20159 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20160 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20161 Note that the block of subscript information for an array type also
20162 includes information about the element type of the given array type.
20163
20164 This function reuses previously set type and bound information if
20165 available. */
20166
20167 static void
20168 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20169 {
20170 unsigned dimension_number;
20171 tree lower, upper;
20172 dw_die_ref child = type_die->die_child;
20173
20174 for (dimension_number = 0;
20175 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20176 type = TREE_TYPE (type), dimension_number++)
20177 {
20178 tree domain = TYPE_DOMAIN (type);
20179
20180 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20181 break;
20182
20183 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20184 and (in GNU C only) variable bounds. Handle all three forms
20185 here. */
20186
20187 /* Find and reuse a previously generated DW_TAG_subrange_type if
20188 available.
20189
20190 For multi-dimensional arrays, as we iterate through the
20191 various dimensions in the enclosing for loop above, we also
20192 iterate through the DIE children and pick at each
20193 DW_TAG_subrange_type previously generated (if available).
20194 Each child DW_TAG_subrange_type DIE describes the range of
20195 the current dimension. At this point we should have as many
20196 DW_TAG_subrange_type's as we have dimensions in the
20197 array. */
20198 dw_die_ref subrange_die = NULL;
20199 if (child)
20200 while (1)
20201 {
20202 child = child->die_sib;
20203 if (child->die_tag == DW_TAG_subrange_type)
20204 subrange_die = child;
20205 if (child == type_die->die_child)
20206 {
20207 /* If we wrapped around, stop looking next time. */
20208 child = NULL;
20209 break;
20210 }
20211 if (child->die_tag == DW_TAG_subrange_type)
20212 break;
20213 }
20214 if (!subrange_die)
20215 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20216
20217 if (domain)
20218 {
20219 /* We have an array type with specified bounds. */
20220 lower = TYPE_MIN_VALUE (domain);
20221 upper = TYPE_MAX_VALUE (domain);
20222
20223 /* Define the index type. */
20224 if (TREE_TYPE (domain)
20225 && !get_AT (subrange_die, DW_AT_type))
20226 {
20227 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20228 TREE_TYPE field. We can't emit debug info for this
20229 because it is an unnamed integral type. */
20230 if (TREE_CODE (domain) == INTEGER_TYPE
20231 && TYPE_NAME (domain) == NULL_TREE
20232 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20233 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20234 ;
20235 else
20236 add_type_attribute (subrange_die, TREE_TYPE (domain),
20237 TYPE_UNQUALIFIED, false, type_die);
20238 }
20239
20240 /* ??? If upper is NULL, the array has unspecified length,
20241 but it does have a lower bound. This happens with Fortran
20242 dimension arr(N:*)
20243 Since the debugger is definitely going to need to know N
20244 to produce useful results, go ahead and output the lower
20245 bound solo, and hope the debugger can cope. */
20246
20247 if (!get_AT (subrange_die, DW_AT_lower_bound))
20248 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20249 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
20250 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20251 }
20252
20253 /* Otherwise we have an array type with an unspecified length. The
20254 DWARF-2 spec does not say how to handle this; let's just leave out the
20255 bounds. */
20256 }
20257 }
20258
20259 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20260
20261 static void
20262 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20263 {
20264 dw_die_ref decl_die;
20265 HOST_WIDE_INT size;
20266 dw_loc_descr_ref size_expr = NULL;
20267
20268 switch (TREE_CODE (tree_node))
20269 {
20270 case ERROR_MARK:
20271 size = 0;
20272 break;
20273 case ENUMERAL_TYPE:
20274 case RECORD_TYPE:
20275 case UNION_TYPE:
20276 case QUAL_UNION_TYPE:
20277 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20278 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20279 {
20280 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20281 return;
20282 }
20283 size_expr = type_byte_size (tree_node, &size);
20284 break;
20285 case FIELD_DECL:
20286 /* For a data member of a struct or union, the DW_AT_byte_size is
20287 generally given as the number of bytes normally allocated for an
20288 object of the *declared* type of the member itself. This is true
20289 even for bit-fields. */
20290 size = int_size_in_bytes (field_type (tree_node));
20291 break;
20292 default:
20293 gcc_unreachable ();
20294 }
20295
20296 /* Support for dynamically-sized objects was introduced by DWARFv3.
20297 At the moment, GDB does not handle variable byte sizes very well,
20298 though. */
20299 if ((dwarf_version >= 3 || !dwarf_strict)
20300 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20301 && size_expr != NULL)
20302 add_AT_loc (die, DW_AT_byte_size, size_expr);
20303
20304 /* Note that `size' might be -1 when we get to this point. If it is, that
20305 indicates that the byte size of the entity in question is variable and
20306 that we could not generate a DWARF expression that computes it. */
20307 if (size >= 0)
20308 add_AT_unsigned (die, DW_AT_byte_size, size);
20309 }
20310
20311 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20312 alignment. */
20313
20314 static void
20315 add_alignment_attribute (dw_die_ref die, tree tree_node)
20316 {
20317 if (dwarf_version < 5 && dwarf_strict)
20318 return;
20319
20320 unsigned align;
20321
20322 if (DECL_P (tree_node))
20323 {
20324 if (!DECL_USER_ALIGN (tree_node))
20325 return;
20326
20327 align = DECL_ALIGN_UNIT (tree_node);
20328 }
20329 else if (TYPE_P (tree_node))
20330 {
20331 if (!TYPE_USER_ALIGN (tree_node))
20332 return;
20333
20334 align = TYPE_ALIGN_UNIT (tree_node);
20335 }
20336 else
20337 gcc_unreachable ();
20338
20339 add_AT_unsigned (die, DW_AT_alignment, align);
20340 }
20341
20342 /* For a FIELD_DECL node which represents a bit-field, output an attribute
20343 which specifies the distance in bits from the highest order bit of the
20344 "containing object" for the bit-field to the highest order bit of the
20345 bit-field itself.
20346
20347 For any given bit-field, the "containing object" is a hypothetical object
20348 (of some integral or enum type) within which the given bit-field lives. The
20349 type of this hypothetical "containing object" is always the same as the
20350 declared type of the individual bit-field itself. The determination of the
20351 exact location of the "containing object" for a bit-field is rather
20352 complicated. It's handled by the `field_byte_offset' function (above).
20353
20354 CTX is required: see the comment for VLR_CONTEXT.
20355
20356 Note that it is the size (in bytes) of the hypothetical "containing object"
20357 which will be given in the DW_AT_byte_size attribute for this bit-field.
20358 (See `byte_size_attribute' above). */
20359
20360 static inline void
20361 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
20362 {
20363 HOST_WIDE_INT object_offset_in_bytes;
20364 tree original_type = DECL_BIT_FIELD_TYPE (decl);
20365 HOST_WIDE_INT bitpos_int;
20366 HOST_WIDE_INT highest_order_object_bit_offset;
20367 HOST_WIDE_INT highest_order_field_bit_offset;
20368 HOST_WIDE_INT bit_offset;
20369
20370 field_byte_offset (decl, ctx, &object_offset_in_bytes);
20371
20372 /* Must be a field and a bit field. */
20373 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
20374
20375 /* We can't yet handle bit-fields whose offsets are variable, so if we
20376 encounter such things, just return without generating any attribute
20377 whatsoever. Likewise for variable or too large size. */
20378 if (! tree_fits_shwi_p (bit_position (decl))
20379 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
20380 return;
20381
20382 bitpos_int = int_bit_position (decl);
20383
20384 /* Note that the bit offset is always the distance (in bits) from the
20385 highest-order bit of the "containing object" to the highest-order bit of
20386 the bit-field itself. Since the "high-order end" of any object or field
20387 is different on big-endian and little-endian machines, the computation
20388 below must take account of these differences. */
20389 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
20390 highest_order_field_bit_offset = bitpos_int;
20391
20392 if (! BYTES_BIG_ENDIAN)
20393 {
20394 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
20395 highest_order_object_bit_offset +=
20396 simple_type_size_in_bits (original_type);
20397 }
20398
20399 bit_offset
20400 = (! BYTES_BIG_ENDIAN
20401 ? highest_order_object_bit_offset - highest_order_field_bit_offset
20402 : highest_order_field_bit_offset - highest_order_object_bit_offset);
20403
20404 if (bit_offset < 0)
20405 add_AT_int (die, DW_AT_bit_offset, bit_offset);
20406 else
20407 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
20408 }
20409
20410 /* For a FIELD_DECL node which represents a bit field, output an attribute
20411 which specifies the length in bits of the given field. */
20412
20413 static inline void
20414 add_bit_size_attribute (dw_die_ref die, tree decl)
20415 {
20416 /* Must be a field and a bit field. */
20417 gcc_assert (TREE_CODE (decl) == FIELD_DECL
20418 && DECL_BIT_FIELD_TYPE (decl));
20419
20420 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
20421 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
20422 }
20423
20424 /* If the compiled language is ANSI C, then add a 'prototyped'
20425 attribute, if arg types are given for the parameters of a function. */
20426
20427 static inline void
20428 add_prototyped_attribute (dw_die_ref die, tree func_type)
20429 {
20430 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20431 {
20432 case DW_LANG_C:
20433 case DW_LANG_C89:
20434 case DW_LANG_C99:
20435 case DW_LANG_C11:
20436 case DW_LANG_ObjC:
20437 if (prototype_p (func_type))
20438 add_AT_flag (die, DW_AT_prototyped, 1);
20439 break;
20440 default:
20441 break;
20442 }
20443 }
20444
20445 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
20446 by looking in the type declaration, the object declaration equate table or
20447 the block mapping. */
20448
20449 static inline dw_die_ref
20450 add_abstract_origin_attribute (dw_die_ref die, tree origin)
20451 {
20452 dw_die_ref origin_die = NULL;
20453
20454 if (DECL_P (origin))
20455 {
20456 dw_die_ref c;
20457 origin_die = lookup_decl_die (origin);
20458 /* "Unwrap" the decls DIE which we put in the imported unit context.
20459 We are looking for the abstract copy here. */
20460 if (in_lto_p
20461 && origin_die
20462 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
20463 /* ??? Identify this better. */
20464 && c->with_offset)
20465 origin_die = c;
20466 }
20467 else if (TYPE_P (origin))
20468 origin_die = lookup_type_die (origin);
20469 else if (TREE_CODE (origin) == BLOCK)
20470 origin_die = BLOCK_DIE (origin);
20471
20472 /* XXX: Functions that are never lowered don't always have correct block
20473 trees (in the case of java, they simply have no block tree, in some other
20474 languages). For these functions, there is nothing we can really do to
20475 output correct debug info for inlined functions in all cases. Rather
20476 than die, we'll just produce deficient debug info now, in that we will
20477 have variables without a proper abstract origin. In the future, when all
20478 functions are lowered, we should re-add a gcc_assert (origin_die)
20479 here. */
20480
20481 if (origin_die)
20482 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
20483 return origin_die;
20484 }
20485
20486 /* We do not currently support the pure_virtual attribute. */
20487
20488 static inline void
20489 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
20490 {
20491 if (DECL_VINDEX (func_decl))
20492 {
20493 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
20494
20495 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
20496 add_AT_loc (die, DW_AT_vtable_elem_location,
20497 new_loc_descr (DW_OP_constu,
20498 tree_to_shwi (DECL_VINDEX (func_decl)),
20499 0));
20500
20501 /* GNU extension: Record what type this method came from originally. */
20502 if (debug_info_level > DINFO_LEVEL_TERSE
20503 && DECL_CONTEXT (func_decl))
20504 add_AT_die_ref (die, DW_AT_containing_type,
20505 lookup_type_die (DECL_CONTEXT (func_decl)));
20506 }
20507 }
20508 \f
20509 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
20510 given decl. This used to be a vendor extension until after DWARF 4
20511 standardized it. */
20512
20513 static void
20514 add_linkage_attr (dw_die_ref die, tree decl)
20515 {
20516 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
20517
20518 /* Mimic what assemble_name_raw does with a leading '*'. */
20519 if (name[0] == '*')
20520 name = &name[1];
20521
20522 if (dwarf_version >= 4)
20523 add_AT_string (die, DW_AT_linkage_name, name);
20524 else
20525 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
20526 }
20527
20528 /* Add source coordinate attributes for the given decl. */
20529
20530 static void
20531 add_src_coords_attributes (dw_die_ref die, tree decl)
20532 {
20533 expanded_location s;
20534
20535 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
20536 return;
20537 s = expand_location (DECL_SOURCE_LOCATION (decl));
20538 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
20539 add_AT_unsigned (die, DW_AT_decl_line, s.line);
20540 if (debug_column_info && s.column)
20541 add_AT_unsigned (die, DW_AT_decl_column, s.column);
20542 }
20543
20544 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
20545
20546 static void
20547 add_linkage_name_raw (dw_die_ref die, tree decl)
20548 {
20549 /* Defer until we have an assembler name set. */
20550 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
20551 {
20552 limbo_die_node *asm_name;
20553
20554 asm_name = ggc_cleared_alloc<limbo_die_node> ();
20555 asm_name->die = die;
20556 asm_name->created_for = decl;
20557 asm_name->next = deferred_asm_name;
20558 deferred_asm_name = asm_name;
20559 }
20560 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
20561 add_linkage_attr (die, decl);
20562 }
20563
20564 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
20565
20566 static void
20567 add_linkage_name (dw_die_ref die, tree decl)
20568 {
20569 if (debug_info_level > DINFO_LEVEL_NONE
20570 && VAR_OR_FUNCTION_DECL_P (decl)
20571 && TREE_PUBLIC (decl)
20572 && !(VAR_P (decl) && DECL_REGISTER (decl))
20573 && die->die_tag != DW_TAG_member)
20574 add_linkage_name_raw (die, decl);
20575 }
20576
20577 /* Add a DW_AT_name attribute and source coordinate attribute for the
20578 given decl, but only if it actually has a name. */
20579
20580 static void
20581 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
20582 bool no_linkage_name)
20583 {
20584 tree decl_name;
20585
20586 decl_name = DECL_NAME (decl);
20587 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20588 {
20589 const char *name = dwarf2_name (decl, 0);
20590 if (name)
20591 add_name_attribute (die, name);
20592 if (! DECL_ARTIFICIAL (decl))
20593 add_src_coords_attributes (die, decl);
20594
20595 if (!no_linkage_name)
20596 add_linkage_name (die, decl);
20597 }
20598
20599 #ifdef VMS_DEBUGGING_INFO
20600 /* Get the function's name, as described by its RTL. This may be different
20601 from the DECL_NAME name used in the source file. */
20602 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
20603 {
20604 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
20605 XEXP (DECL_RTL (decl), 0), false);
20606 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
20607 }
20608 #endif /* VMS_DEBUGGING_INFO */
20609 }
20610
20611 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
20612
20613 static void
20614 add_discr_value (dw_die_ref die, dw_discr_value *value)
20615 {
20616 dw_attr_node attr;
20617
20618 attr.dw_attr = DW_AT_discr_value;
20619 attr.dw_attr_val.val_class = dw_val_class_discr_value;
20620 attr.dw_attr_val.val_entry = NULL;
20621 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
20622 if (value->pos)
20623 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
20624 else
20625 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
20626 add_dwarf_attr (die, &attr);
20627 }
20628
20629 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
20630
20631 static void
20632 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
20633 {
20634 dw_attr_node attr;
20635
20636 attr.dw_attr = DW_AT_discr_list;
20637 attr.dw_attr_val.val_class = dw_val_class_discr_list;
20638 attr.dw_attr_val.val_entry = NULL;
20639 attr.dw_attr_val.v.val_discr_list = discr_list;
20640 add_dwarf_attr (die, &attr);
20641 }
20642
20643 static inline dw_discr_list_ref
20644 AT_discr_list (dw_attr_node *attr)
20645 {
20646 return attr->dw_attr_val.v.val_discr_list;
20647 }
20648
20649 #ifdef VMS_DEBUGGING_INFO
20650 /* Output the debug main pointer die for VMS */
20651
20652 void
20653 dwarf2out_vms_debug_main_pointer (void)
20654 {
20655 char label[MAX_ARTIFICIAL_LABEL_BYTES];
20656 dw_die_ref die;
20657
20658 /* Allocate the VMS debug main subprogram die. */
20659 die = new_die_raw (DW_TAG_subprogram);
20660 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
20661 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
20662 current_function_funcdef_no);
20663 add_AT_lbl_id (die, DW_AT_entry_pc, label);
20664
20665 /* Make it the first child of comp_unit_die (). */
20666 die->die_parent = comp_unit_die ();
20667 if (comp_unit_die ()->die_child)
20668 {
20669 die->die_sib = comp_unit_die ()->die_child->die_sib;
20670 comp_unit_die ()->die_child->die_sib = die;
20671 }
20672 else
20673 {
20674 die->die_sib = die;
20675 comp_unit_die ()->die_child = die;
20676 }
20677 }
20678 #endif /* VMS_DEBUGGING_INFO */
20679
20680 /* Push a new declaration scope. */
20681
20682 static void
20683 push_decl_scope (tree scope)
20684 {
20685 vec_safe_push (decl_scope_table, scope);
20686 }
20687
20688 /* Pop a declaration scope. */
20689
20690 static inline void
20691 pop_decl_scope (void)
20692 {
20693 decl_scope_table->pop ();
20694 }
20695
20696 /* walk_tree helper function for uses_local_type, below. */
20697
20698 static tree
20699 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
20700 {
20701 if (!TYPE_P (*tp))
20702 *walk_subtrees = 0;
20703 else
20704 {
20705 tree name = TYPE_NAME (*tp);
20706 if (name && DECL_P (name) && decl_function_context (name))
20707 return *tp;
20708 }
20709 return NULL_TREE;
20710 }
20711
20712 /* If TYPE involves a function-local type (including a local typedef to a
20713 non-local type), returns that type; otherwise returns NULL_TREE. */
20714
20715 static tree
20716 uses_local_type (tree type)
20717 {
20718 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
20719 return used;
20720 }
20721
20722 /* Return the DIE for the scope that immediately contains this type.
20723 Non-named types that do not involve a function-local type get global
20724 scope. Named types nested in namespaces or other types get their
20725 containing scope. All other types (i.e. function-local named types) get
20726 the current active scope. */
20727
20728 static dw_die_ref
20729 scope_die_for (tree t, dw_die_ref context_die)
20730 {
20731 dw_die_ref scope_die = NULL;
20732 tree containing_scope;
20733
20734 /* Non-types always go in the current scope. */
20735 gcc_assert (TYPE_P (t));
20736
20737 /* Use the scope of the typedef, rather than the scope of the type
20738 it refers to. */
20739 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
20740 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
20741 else
20742 containing_scope = TYPE_CONTEXT (t);
20743
20744 /* Use the containing namespace if there is one. */
20745 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
20746 {
20747 if (context_die == lookup_decl_die (containing_scope))
20748 /* OK */;
20749 else if (debug_info_level > DINFO_LEVEL_TERSE)
20750 context_die = get_context_die (containing_scope);
20751 else
20752 containing_scope = NULL_TREE;
20753 }
20754
20755 /* Ignore function type "scopes" from the C frontend. They mean that
20756 a tagged type is local to a parmlist of a function declarator, but
20757 that isn't useful to DWARF. */
20758 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
20759 containing_scope = NULL_TREE;
20760
20761 if (SCOPE_FILE_SCOPE_P (containing_scope))
20762 {
20763 /* If T uses a local type keep it local as well, to avoid references
20764 to function-local DIEs from outside the function. */
20765 if (current_function_decl && uses_local_type (t))
20766 scope_die = context_die;
20767 else
20768 scope_die = comp_unit_die ();
20769 }
20770 else if (TYPE_P (containing_scope))
20771 {
20772 /* For types, we can just look up the appropriate DIE. */
20773 if (debug_info_level > DINFO_LEVEL_TERSE)
20774 scope_die = get_context_die (containing_scope);
20775 else
20776 {
20777 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
20778 if (scope_die == NULL)
20779 scope_die = comp_unit_die ();
20780 }
20781 }
20782 else
20783 scope_die = context_die;
20784
20785 return scope_die;
20786 }
20787
20788 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
20789
20790 static inline int
20791 local_scope_p (dw_die_ref context_die)
20792 {
20793 for (; context_die; context_die = context_die->die_parent)
20794 if (context_die->die_tag == DW_TAG_inlined_subroutine
20795 || context_die->die_tag == DW_TAG_subprogram)
20796 return 1;
20797
20798 return 0;
20799 }
20800
20801 /* Returns nonzero if CONTEXT_DIE is a class. */
20802
20803 static inline int
20804 class_scope_p (dw_die_ref context_die)
20805 {
20806 return (context_die
20807 && (context_die->die_tag == DW_TAG_structure_type
20808 || context_die->die_tag == DW_TAG_class_type
20809 || context_die->die_tag == DW_TAG_interface_type
20810 || context_die->die_tag == DW_TAG_union_type));
20811 }
20812
20813 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
20814 whether or not to treat a DIE in this context as a declaration. */
20815
20816 static inline int
20817 class_or_namespace_scope_p (dw_die_ref context_die)
20818 {
20819 return (class_scope_p (context_die)
20820 || (context_die && context_die->die_tag == DW_TAG_namespace));
20821 }
20822
20823 /* Many forms of DIEs require a "type description" attribute. This
20824 routine locates the proper "type descriptor" die for the type given
20825 by 'type' plus any additional qualifiers given by 'cv_quals', and
20826 adds a DW_AT_type attribute below the given die. */
20827
20828 static void
20829 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
20830 bool reverse, dw_die_ref context_die)
20831 {
20832 enum tree_code code = TREE_CODE (type);
20833 dw_die_ref type_die = NULL;
20834
20835 /* ??? If this type is an unnamed subrange type of an integral, floating-point
20836 or fixed-point type, use the inner type. This is because we have no
20837 support for unnamed types in base_type_die. This can happen if this is
20838 an Ada subrange type. Correct solution is emit a subrange type die. */
20839 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
20840 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
20841 type = TREE_TYPE (type), code = TREE_CODE (type);
20842
20843 if (code == ERROR_MARK
20844 /* Handle a special case. For functions whose return type is void, we
20845 generate *no* type attribute. (Note that no object may have type
20846 `void', so this only applies to function return types). */
20847 || code == VOID_TYPE)
20848 return;
20849
20850 type_die = modified_type_die (type,
20851 cv_quals | TYPE_QUALS (type),
20852 reverse,
20853 context_die);
20854
20855 if (type_die != NULL)
20856 add_AT_die_ref (object_die, DW_AT_type, type_die);
20857 }
20858
20859 /* Given an object die, add the calling convention attribute for the
20860 function call type. */
20861 static void
20862 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
20863 {
20864 enum dwarf_calling_convention value = DW_CC_normal;
20865
20866 value = ((enum dwarf_calling_convention)
20867 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
20868
20869 if (is_fortran ()
20870 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
20871 {
20872 /* DWARF 2 doesn't provide a way to identify a program's source-level
20873 entry point. DW_AT_calling_convention attributes are only meant
20874 to describe functions' calling conventions. However, lacking a
20875 better way to signal the Fortran main program, we used this for
20876 a long time, following existing custom. Now, DWARF 4 has
20877 DW_AT_main_subprogram, which we add below, but some tools still
20878 rely on the old way, which we thus keep. */
20879 value = DW_CC_program;
20880
20881 if (dwarf_version >= 4 || !dwarf_strict)
20882 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
20883 }
20884
20885 /* Only add the attribute if the backend requests it, and
20886 is not DW_CC_normal. */
20887 if (value && (value != DW_CC_normal))
20888 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
20889 }
20890
20891 /* Given a tree pointer to a struct, class, union, or enum type node, return
20892 a pointer to the (string) tag name for the given type, or zero if the type
20893 was declared without a tag. */
20894
20895 static const char *
20896 type_tag (const_tree type)
20897 {
20898 const char *name = 0;
20899
20900 if (TYPE_NAME (type) != 0)
20901 {
20902 tree t = 0;
20903
20904 /* Find the IDENTIFIER_NODE for the type name. */
20905 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
20906 && !TYPE_NAMELESS (type))
20907 t = TYPE_NAME (type);
20908
20909 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
20910 a TYPE_DECL node, regardless of whether or not a `typedef' was
20911 involved. */
20912 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
20913 && ! DECL_IGNORED_P (TYPE_NAME (type)))
20914 {
20915 /* We want to be extra verbose. Don't call dwarf_name if
20916 DECL_NAME isn't set. The default hook for decl_printable_name
20917 doesn't like that, and in this context it's correct to return
20918 0, instead of "<anonymous>" or the like. */
20919 if (DECL_NAME (TYPE_NAME (type))
20920 && !DECL_NAMELESS (TYPE_NAME (type)))
20921 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
20922 }
20923
20924 /* Now get the name as a string, or invent one. */
20925 if (!name && t != 0)
20926 name = IDENTIFIER_POINTER (t);
20927 }
20928
20929 return (name == 0 || *name == '\0') ? 0 : name;
20930 }
20931
20932 /* Return the type associated with a data member, make a special check
20933 for bit field types. */
20934
20935 static inline tree
20936 member_declared_type (const_tree member)
20937 {
20938 return (DECL_BIT_FIELD_TYPE (member)
20939 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
20940 }
20941
20942 /* Get the decl's label, as described by its RTL. This may be different
20943 from the DECL_NAME name used in the source file. */
20944
20945 #if 0
20946 static const char *
20947 decl_start_label (tree decl)
20948 {
20949 rtx x;
20950 const char *fnname;
20951
20952 x = DECL_RTL (decl);
20953 gcc_assert (MEM_P (x));
20954
20955 x = XEXP (x, 0);
20956 gcc_assert (GET_CODE (x) == SYMBOL_REF);
20957
20958 fnname = XSTR (x, 0);
20959 return fnname;
20960 }
20961 #endif
20962 \f
20963 /* For variable-length arrays that have been previously generated, but
20964 may be incomplete due to missing subscript info, fill the subscript
20965 info. Return TRUE if this is one of those cases. */
20966 static bool
20967 fill_variable_array_bounds (tree type)
20968 {
20969 if (TREE_ASM_WRITTEN (type)
20970 && TREE_CODE (type) == ARRAY_TYPE
20971 && variably_modified_type_p (type, NULL))
20972 {
20973 dw_die_ref array_die = lookup_type_die (type);
20974 if (!array_die)
20975 return false;
20976 add_subscript_info (array_die, type, !is_ada ());
20977 return true;
20978 }
20979 return false;
20980 }
20981
20982 /* These routines generate the internal representation of the DIE's for
20983 the compilation unit. Debugging information is collected by walking
20984 the declaration trees passed in from dwarf2out_decl(). */
20985
20986 static void
20987 gen_array_type_die (tree type, dw_die_ref context_die)
20988 {
20989 dw_die_ref array_die;
20990
20991 /* GNU compilers represent multidimensional array types as sequences of one
20992 dimensional array types whose element types are themselves array types.
20993 We sometimes squish that down to a single array_type DIE with multiple
20994 subscripts in the Dwarf debugging info. The draft Dwarf specification
20995 say that we are allowed to do this kind of compression in C, because
20996 there is no difference between an array of arrays and a multidimensional
20997 array. We don't do this for Ada to remain as close as possible to the
20998 actual representation, which is especially important against the language
20999 flexibilty wrt arrays of variable size. */
21000
21001 bool collapse_nested_arrays = !is_ada ();
21002
21003 if (fill_variable_array_bounds (type))
21004 return;
21005
21006 dw_die_ref scope_die = scope_die_for (type, context_die);
21007 tree element_type;
21008
21009 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21010 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21011 if (TYPE_STRING_FLAG (type)
21012 && TREE_CODE (type) == ARRAY_TYPE
21013 && is_fortran ()
21014 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21015 {
21016 HOST_WIDE_INT size;
21017
21018 array_die = new_die (DW_TAG_string_type, scope_die, type);
21019 add_name_attribute (array_die, type_tag (type));
21020 equate_type_number_to_die (type, array_die);
21021 size = int_size_in_bytes (type);
21022 if (size >= 0)
21023 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21024 /* ??? We can't annotate types late, but for LTO we may not
21025 generate a location early either (gfortran.dg/save_6.f90). */
21026 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21027 && TYPE_DOMAIN (type) != NULL_TREE
21028 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21029 {
21030 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21031 tree rszdecl = szdecl;
21032
21033 size = int_size_in_bytes (TREE_TYPE (szdecl));
21034 if (!DECL_P (szdecl))
21035 {
21036 if (TREE_CODE (szdecl) == INDIRECT_REF
21037 && DECL_P (TREE_OPERAND (szdecl, 0)))
21038 {
21039 rszdecl = TREE_OPERAND (szdecl, 0);
21040 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21041 != DWARF2_ADDR_SIZE)
21042 size = 0;
21043 }
21044 else
21045 size = 0;
21046 }
21047 if (size > 0)
21048 {
21049 dw_loc_list_ref loc
21050 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21051 NULL);
21052 if (loc)
21053 {
21054 add_AT_location_description (array_die, DW_AT_string_length,
21055 loc);
21056 if (size != DWARF2_ADDR_SIZE)
21057 add_AT_unsigned (array_die, dwarf_version >= 5
21058 ? DW_AT_string_length_byte_size
21059 : DW_AT_byte_size, size);
21060 }
21061 }
21062 }
21063 return;
21064 }
21065
21066 array_die = new_die (DW_TAG_array_type, scope_die, type);
21067 add_name_attribute (array_die, type_tag (type));
21068 equate_type_number_to_die (type, array_die);
21069
21070 if (TREE_CODE (type) == VECTOR_TYPE)
21071 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21072
21073 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21074 if (is_fortran ()
21075 && TREE_CODE (type) == ARRAY_TYPE
21076 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21077 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21078 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21079
21080 #if 0
21081 /* We default the array ordering. Debuggers will probably do the right
21082 things even if DW_AT_ordering is not present. It's not even an issue
21083 until we start to get into multidimensional arrays anyway. If a debugger
21084 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21085 then we'll have to put the DW_AT_ordering attribute back in. (But if
21086 and when we find out that we need to put these in, we will only do so
21087 for multidimensional arrays. */
21088 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21089 #endif
21090
21091 if (TREE_CODE (type) == VECTOR_TYPE)
21092 {
21093 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21094 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21095 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21096 add_bound_info (subrange_die, DW_AT_upper_bound,
21097 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21098 }
21099 else
21100 add_subscript_info (array_die, type, collapse_nested_arrays);
21101
21102 /* Add representation of the type of the elements of this array type and
21103 emit the corresponding DIE if we haven't done it already. */
21104 element_type = TREE_TYPE (type);
21105 if (collapse_nested_arrays)
21106 while (TREE_CODE (element_type) == ARRAY_TYPE)
21107 {
21108 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21109 break;
21110 element_type = TREE_TYPE (element_type);
21111 }
21112
21113 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21114 TREE_CODE (type) == ARRAY_TYPE
21115 && TYPE_REVERSE_STORAGE_ORDER (type),
21116 context_die);
21117
21118 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21119 if (TYPE_ARTIFICIAL (type))
21120 add_AT_flag (array_die, DW_AT_artificial, 1);
21121
21122 if (get_AT (array_die, DW_AT_name))
21123 add_pubtype (type, array_die);
21124
21125 add_alignment_attribute (array_die, type);
21126 }
21127
21128 /* This routine generates DIE for array with hidden descriptor, details
21129 are filled into *info by a langhook. */
21130
21131 static void
21132 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21133 dw_die_ref context_die)
21134 {
21135 const dw_die_ref scope_die = scope_die_for (type, context_die);
21136 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21137 struct loc_descr_context context = { type, info->base_decl, NULL,
21138 false, false };
21139 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21140 int dim;
21141
21142 add_name_attribute (array_die, type_tag (type));
21143 equate_type_number_to_die (type, array_die);
21144
21145 if (info->ndimensions > 1)
21146 switch (info->ordering)
21147 {
21148 case array_descr_ordering_row_major:
21149 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21150 break;
21151 case array_descr_ordering_column_major:
21152 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21153 break;
21154 default:
21155 break;
21156 }
21157
21158 if (dwarf_version >= 3 || !dwarf_strict)
21159 {
21160 if (info->data_location)
21161 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21162 dw_scalar_form_exprloc, &context);
21163 if (info->associated)
21164 add_scalar_info (array_die, DW_AT_associated, info->associated,
21165 dw_scalar_form_constant
21166 | dw_scalar_form_exprloc
21167 | dw_scalar_form_reference, &context);
21168 if (info->allocated)
21169 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21170 dw_scalar_form_constant
21171 | dw_scalar_form_exprloc
21172 | dw_scalar_form_reference, &context);
21173 if (info->stride)
21174 {
21175 const enum dwarf_attribute attr
21176 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21177 const int forms
21178 = (info->stride_in_bits)
21179 ? dw_scalar_form_constant
21180 : (dw_scalar_form_constant
21181 | dw_scalar_form_exprloc
21182 | dw_scalar_form_reference);
21183
21184 add_scalar_info (array_die, attr, info->stride, forms, &context);
21185 }
21186 }
21187 if (dwarf_version >= 5)
21188 {
21189 if (info->rank)
21190 {
21191 add_scalar_info (array_die, DW_AT_rank, info->rank,
21192 dw_scalar_form_constant
21193 | dw_scalar_form_exprloc, &context);
21194 subrange_tag = DW_TAG_generic_subrange;
21195 context.placeholder_arg = true;
21196 }
21197 }
21198
21199 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21200
21201 for (dim = 0; dim < info->ndimensions; dim++)
21202 {
21203 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21204
21205 if (info->dimen[dim].bounds_type)
21206 add_type_attribute (subrange_die,
21207 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21208 false, context_die);
21209 if (info->dimen[dim].lower_bound)
21210 add_bound_info (subrange_die, DW_AT_lower_bound,
21211 info->dimen[dim].lower_bound, &context);
21212 if (info->dimen[dim].upper_bound)
21213 add_bound_info (subrange_die, DW_AT_upper_bound,
21214 info->dimen[dim].upper_bound, &context);
21215 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21216 add_scalar_info (subrange_die, DW_AT_byte_stride,
21217 info->dimen[dim].stride,
21218 dw_scalar_form_constant
21219 | dw_scalar_form_exprloc
21220 | dw_scalar_form_reference,
21221 &context);
21222 }
21223
21224 gen_type_die (info->element_type, context_die);
21225 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21226 TREE_CODE (type) == ARRAY_TYPE
21227 && TYPE_REVERSE_STORAGE_ORDER (type),
21228 context_die);
21229
21230 if (get_AT (array_die, DW_AT_name))
21231 add_pubtype (type, array_die);
21232
21233 add_alignment_attribute (array_die, type);
21234 }
21235
21236 #if 0
21237 static void
21238 gen_entry_point_die (tree decl, dw_die_ref context_die)
21239 {
21240 tree origin = decl_ultimate_origin (decl);
21241 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21242
21243 if (origin != NULL)
21244 add_abstract_origin_attribute (decl_die, origin);
21245 else
21246 {
21247 add_name_and_src_coords_attributes (decl_die, decl);
21248 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21249 TYPE_UNQUALIFIED, false, context_die);
21250 }
21251
21252 if (DECL_ABSTRACT_P (decl))
21253 equate_decl_number_to_die (decl, decl_die);
21254 else
21255 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21256 }
21257 #endif
21258
21259 /* Walk through the list of incomplete types again, trying once more to
21260 emit full debugging info for them. */
21261
21262 static void
21263 retry_incomplete_types (void)
21264 {
21265 set_early_dwarf s;
21266 int i;
21267
21268 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21269 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21270 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21271 vec_safe_truncate (incomplete_types, 0);
21272 }
21273
21274 /* Determine what tag to use for a record type. */
21275
21276 static enum dwarf_tag
21277 record_type_tag (tree type)
21278 {
21279 if (! lang_hooks.types.classify_record)
21280 return DW_TAG_structure_type;
21281
21282 switch (lang_hooks.types.classify_record (type))
21283 {
21284 case RECORD_IS_STRUCT:
21285 return DW_TAG_structure_type;
21286
21287 case RECORD_IS_CLASS:
21288 return DW_TAG_class_type;
21289
21290 case RECORD_IS_INTERFACE:
21291 if (dwarf_version >= 3 || !dwarf_strict)
21292 return DW_TAG_interface_type;
21293 return DW_TAG_structure_type;
21294
21295 default:
21296 gcc_unreachable ();
21297 }
21298 }
21299
21300 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21301 include all of the information about the enumeration values also. Each
21302 enumerated type name/value is listed as a child of the enumerated type
21303 DIE. */
21304
21305 static dw_die_ref
21306 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21307 {
21308 dw_die_ref type_die = lookup_type_die (type);
21309
21310 if (type_die == NULL)
21311 {
21312 type_die = new_die (DW_TAG_enumeration_type,
21313 scope_die_for (type, context_die), type);
21314 equate_type_number_to_die (type, type_die);
21315 add_name_attribute (type_die, type_tag (type));
21316 if (dwarf_version >= 4 || !dwarf_strict)
21317 {
21318 if (ENUM_IS_SCOPED (type))
21319 add_AT_flag (type_die, DW_AT_enum_class, 1);
21320 if (ENUM_IS_OPAQUE (type))
21321 add_AT_flag (type_die, DW_AT_declaration, 1);
21322 }
21323 if (!dwarf_strict)
21324 add_AT_unsigned (type_die, DW_AT_encoding,
21325 TYPE_UNSIGNED (type)
21326 ? DW_ATE_unsigned
21327 : DW_ATE_signed);
21328 }
21329 else if (! TYPE_SIZE (type))
21330 return type_die;
21331 else
21332 remove_AT (type_die, DW_AT_declaration);
21333
21334 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
21335 given enum type is incomplete, do not generate the DW_AT_byte_size
21336 attribute or the DW_AT_element_list attribute. */
21337 if (TYPE_SIZE (type))
21338 {
21339 tree link;
21340
21341 TREE_ASM_WRITTEN (type) = 1;
21342 add_byte_size_attribute (type_die, type);
21343 add_alignment_attribute (type_die, type);
21344 if (dwarf_version >= 3 || !dwarf_strict)
21345 {
21346 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
21347 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
21348 context_die);
21349 }
21350 if (TYPE_STUB_DECL (type) != NULL_TREE)
21351 {
21352 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
21353 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
21354 }
21355
21356 /* If the first reference to this type was as the return type of an
21357 inline function, then it may not have a parent. Fix this now. */
21358 if (type_die->die_parent == NULL)
21359 add_child_die (scope_die_for (type, context_die), type_die);
21360
21361 for (link = TYPE_VALUES (type);
21362 link != NULL; link = TREE_CHAIN (link))
21363 {
21364 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
21365 tree value = TREE_VALUE (link);
21366
21367 add_name_attribute (enum_die,
21368 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
21369
21370 if (TREE_CODE (value) == CONST_DECL)
21371 value = DECL_INITIAL (value);
21372
21373 if (simple_type_size_in_bits (TREE_TYPE (value))
21374 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
21375 {
21376 /* For constant forms created by add_AT_unsigned DWARF
21377 consumers (GDB, elfutils, etc.) always zero extend
21378 the value. Only when the actual value is negative
21379 do we need to use add_AT_int to generate a constant
21380 form that can represent negative values. */
21381 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
21382 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
21383 add_AT_unsigned (enum_die, DW_AT_const_value,
21384 (unsigned HOST_WIDE_INT) val);
21385 else
21386 add_AT_int (enum_die, DW_AT_const_value, val);
21387 }
21388 else
21389 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
21390 that here. TODO: This should be re-worked to use correct
21391 signed/unsigned double tags for all cases. */
21392 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
21393 }
21394
21395 add_gnat_descriptive_type_attribute (type_die, type, context_die);
21396 if (TYPE_ARTIFICIAL (type))
21397 add_AT_flag (type_die, DW_AT_artificial, 1);
21398 }
21399 else
21400 add_AT_flag (type_die, DW_AT_declaration, 1);
21401
21402 add_pubtype (type, type_die);
21403
21404 return type_die;
21405 }
21406
21407 /* Generate a DIE to represent either a real live formal parameter decl or to
21408 represent just the type of some formal parameter position in some function
21409 type.
21410
21411 Note that this routine is a bit unusual because its argument may be a
21412 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
21413 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
21414 node. If it's the former then this function is being called to output a
21415 DIE to represent a formal parameter object (or some inlining thereof). If
21416 it's the latter, then this function is only being called to output a
21417 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
21418 argument type of some subprogram type.
21419 If EMIT_NAME_P is true, name and source coordinate attributes
21420 are emitted. */
21421
21422 static dw_die_ref
21423 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
21424 dw_die_ref context_die)
21425 {
21426 tree node_or_origin = node ? node : origin;
21427 tree ultimate_origin;
21428 dw_die_ref parm_die = NULL;
21429
21430 if (DECL_P (node_or_origin))
21431 {
21432 parm_die = lookup_decl_die (node);
21433
21434 /* If the contexts differ, we may not be talking about the same
21435 thing.
21436 ??? When in LTO the DIE parent is the "abstract" copy and the
21437 context_die is the specification "copy". But this whole block
21438 should eventually be no longer needed. */
21439 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
21440 {
21441 if (!DECL_ABSTRACT_P (node))
21442 {
21443 /* This can happen when creating an inlined instance, in
21444 which case we need to create a new DIE that will get
21445 annotated with DW_AT_abstract_origin. */
21446 parm_die = NULL;
21447 }
21448 else
21449 gcc_unreachable ();
21450 }
21451
21452 if (parm_die && parm_die->die_parent == NULL)
21453 {
21454 /* Check that parm_die already has the right attributes that
21455 we would have added below. If any attributes are
21456 missing, fall through to add them. */
21457 if (! DECL_ABSTRACT_P (node_or_origin)
21458 && !get_AT (parm_die, DW_AT_location)
21459 && !get_AT (parm_die, DW_AT_const_value))
21460 /* We are missing location info, and are about to add it. */
21461 ;
21462 else
21463 {
21464 add_child_die (context_die, parm_die);
21465 return parm_die;
21466 }
21467 }
21468 }
21469
21470 /* If we have a previously generated DIE, use it, unless this is an
21471 concrete instance (origin != NULL), in which case we need a new
21472 DIE with a corresponding DW_AT_abstract_origin. */
21473 bool reusing_die;
21474 if (parm_die && origin == NULL)
21475 reusing_die = true;
21476 else
21477 {
21478 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
21479 reusing_die = false;
21480 }
21481
21482 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
21483 {
21484 case tcc_declaration:
21485 ultimate_origin = decl_ultimate_origin (node_or_origin);
21486 if (node || ultimate_origin)
21487 origin = ultimate_origin;
21488
21489 if (reusing_die)
21490 goto add_location;
21491
21492 if (origin != NULL)
21493 add_abstract_origin_attribute (parm_die, origin);
21494 else if (emit_name_p)
21495 add_name_and_src_coords_attributes (parm_die, node);
21496 if (origin == NULL
21497 || (! DECL_ABSTRACT_P (node_or_origin)
21498 && variably_modified_type_p (TREE_TYPE (node_or_origin),
21499 decl_function_context
21500 (node_or_origin))))
21501 {
21502 tree type = TREE_TYPE (node_or_origin);
21503 if (decl_by_reference_p (node_or_origin))
21504 add_type_attribute (parm_die, TREE_TYPE (type),
21505 TYPE_UNQUALIFIED,
21506 false, context_die);
21507 else
21508 add_type_attribute (parm_die, type,
21509 decl_quals (node_or_origin),
21510 false, context_die);
21511 }
21512 if (origin == NULL && DECL_ARTIFICIAL (node))
21513 add_AT_flag (parm_die, DW_AT_artificial, 1);
21514 add_location:
21515 if (node && node != origin)
21516 equate_decl_number_to_die (node, parm_die);
21517 if (! DECL_ABSTRACT_P (node_or_origin))
21518 add_location_or_const_value_attribute (parm_die, node_or_origin,
21519 node == NULL);
21520
21521 break;
21522
21523 case tcc_type:
21524 /* We were called with some kind of a ..._TYPE node. */
21525 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
21526 context_die);
21527 break;
21528
21529 default:
21530 gcc_unreachable ();
21531 }
21532
21533 return parm_die;
21534 }
21535
21536 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
21537 children DW_TAG_formal_parameter DIEs representing the arguments of the
21538 parameter pack.
21539
21540 PARM_PACK must be a function parameter pack.
21541 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
21542 must point to the subsequent arguments of the function PACK_ARG belongs to.
21543 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
21544 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
21545 following the last one for which a DIE was generated. */
21546
21547 static dw_die_ref
21548 gen_formal_parameter_pack_die (tree parm_pack,
21549 tree pack_arg,
21550 dw_die_ref subr_die,
21551 tree *next_arg)
21552 {
21553 tree arg;
21554 dw_die_ref parm_pack_die;
21555
21556 gcc_assert (parm_pack
21557 && lang_hooks.function_parameter_pack_p (parm_pack)
21558 && subr_die);
21559
21560 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
21561 add_src_coords_attributes (parm_pack_die, parm_pack);
21562
21563 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
21564 {
21565 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
21566 parm_pack))
21567 break;
21568 gen_formal_parameter_die (arg, NULL,
21569 false /* Don't emit name attribute. */,
21570 parm_pack_die);
21571 }
21572 if (next_arg)
21573 *next_arg = arg;
21574 return parm_pack_die;
21575 }
21576
21577 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
21578 at the end of an (ANSI prototyped) formal parameters list. */
21579
21580 static void
21581 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
21582 {
21583 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
21584 }
21585
21586 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
21587 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
21588 parameters as specified in some function type specification (except for
21589 those which appear as part of a function *definition*). */
21590
21591 static void
21592 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
21593 {
21594 tree link;
21595 tree formal_type = NULL;
21596 tree first_parm_type;
21597 tree arg;
21598
21599 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
21600 {
21601 arg = DECL_ARGUMENTS (function_or_method_type);
21602 function_or_method_type = TREE_TYPE (function_or_method_type);
21603 }
21604 else
21605 arg = NULL_TREE;
21606
21607 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
21608
21609 /* Make our first pass over the list of formal parameter types and output a
21610 DW_TAG_formal_parameter DIE for each one. */
21611 for (link = first_parm_type; link; )
21612 {
21613 dw_die_ref parm_die;
21614
21615 formal_type = TREE_VALUE (link);
21616 if (formal_type == void_type_node)
21617 break;
21618
21619 /* Output a (nameless) DIE to represent the formal parameter itself. */
21620 if (!POINTER_BOUNDS_TYPE_P (formal_type))
21621 {
21622 parm_die = gen_formal_parameter_die (formal_type, NULL,
21623 true /* Emit name attribute. */,
21624 context_die);
21625 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
21626 && link == first_parm_type)
21627 {
21628 add_AT_flag (parm_die, DW_AT_artificial, 1);
21629 if (dwarf_version >= 3 || !dwarf_strict)
21630 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
21631 }
21632 else if (arg && DECL_ARTIFICIAL (arg))
21633 add_AT_flag (parm_die, DW_AT_artificial, 1);
21634 }
21635
21636 link = TREE_CHAIN (link);
21637 if (arg)
21638 arg = DECL_CHAIN (arg);
21639 }
21640
21641 /* If this function type has an ellipsis, add a
21642 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
21643 if (formal_type != void_type_node)
21644 gen_unspecified_parameters_die (function_or_method_type, context_die);
21645
21646 /* Make our second (and final) pass over the list of formal parameter types
21647 and output DIEs to represent those types (as necessary). */
21648 for (link = TYPE_ARG_TYPES (function_or_method_type);
21649 link && TREE_VALUE (link);
21650 link = TREE_CHAIN (link))
21651 gen_type_die (TREE_VALUE (link), context_die);
21652 }
21653
21654 /* We want to generate the DIE for TYPE so that we can generate the
21655 die for MEMBER, which has been defined; we will need to refer back
21656 to the member declaration nested within TYPE. If we're trying to
21657 generate minimal debug info for TYPE, processing TYPE won't do the
21658 trick; we need to attach the member declaration by hand. */
21659
21660 static void
21661 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
21662 {
21663 gen_type_die (type, context_die);
21664
21665 /* If we're trying to avoid duplicate debug info, we may not have
21666 emitted the member decl for this function. Emit it now. */
21667 if (TYPE_STUB_DECL (type)
21668 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
21669 && ! lookup_decl_die (member))
21670 {
21671 dw_die_ref type_die;
21672 gcc_assert (!decl_ultimate_origin (member));
21673
21674 push_decl_scope (type);
21675 type_die = lookup_type_die_strip_naming_typedef (type);
21676 if (TREE_CODE (member) == FUNCTION_DECL)
21677 gen_subprogram_die (member, type_die);
21678 else if (TREE_CODE (member) == FIELD_DECL)
21679 {
21680 /* Ignore the nameless fields that are used to skip bits but handle
21681 C++ anonymous unions and structs. */
21682 if (DECL_NAME (member) != NULL_TREE
21683 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
21684 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
21685 {
21686 struct vlr_context vlr_ctx = {
21687 DECL_CONTEXT (member), /* struct_type */
21688 NULL_TREE /* variant_part_offset */
21689 };
21690 gen_type_die (member_declared_type (member), type_die);
21691 gen_field_die (member, &vlr_ctx, type_die);
21692 }
21693 }
21694 else
21695 gen_variable_die (member, NULL_TREE, type_die);
21696
21697 pop_decl_scope ();
21698 }
21699 }
21700 \f
21701 /* Forward declare these functions, because they are mutually recursive
21702 with their set_block_* pairing functions. */
21703 static void set_decl_origin_self (tree);
21704
21705 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
21706 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
21707 that it points to the node itself, thus indicating that the node is its
21708 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
21709 the given node is NULL, recursively descend the decl/block tree which
21710 it is the root of, and for each other ..._DECL or BLOCK node contained
21711 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
21712 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
21713 values to point to themselves. */
21714
21715 static void
21716 set_block_origin_self (tree stmt)
21717 {
21718 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
21719 {
21720 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
21721
21722 {
21723 tree local_decl;
21724
21725 for (local_decl = BLOCK_VARS (stmt);
21726 local_decl != NULL_TREE;
21727 local_decl = DECL_CHAIN (local_decl))
21728 /* Do not recurse on nested functions since the inlining status
21729 of parent and child can be different as per the DWARF spec. */
21730 if (TREE_CODE (local_decl) != FUNCTION_DECL
21731 && !DECL_EXTERNAL (local_decl))
21732 set_decl_origin_self (local_decl);
21733 }
21734
21735 {
21736 tree subblock;
21737
21738 for (subblock = BLOCK_SUBBLOCKS (stmt);
21739 subblock != NULL_TREE;
21740 subblock = BLOCK_CHAIN (subblock))
21741 set_block_origin_self (subblock); /* Recurse. */
21742 }
21743 }
21744 }
21745
21746 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
21747 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
21748 node to so that it points to the node itself, thus indicating that the
21749 node represents its own (abstract) origin. Additionally, if the
21750 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
21751 the decl/block tree of which the given node is the root of, and for
21752 each other ..._DECL or BLOCK node contained therein whose
21753 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
21754 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
21755 point to themselves. */
21756
21757 static void
21758 set_decl_origin_self (tree decl)
21759 {
21760 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
21761 {
21762 DECL_ABSTRACT_ORIGIN (decl) = decl;
21763 if (TREE_CODE (decl) == FUNCTION_DECL)
21764 {
21765 tree arg;
21766
21767 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
21768 DECL_ABSTRACT_ORIGIN (arg) = arg;
21769 if (DECL_INITIAL (decl) != NULL_TREE
21770 && DECL_INITIAL (decl) != error_mark_node)
21771 set_block_origin_self (DECL_INITIAL (decl));
21772 }
21773 }
21774 }
21775 \f
21776 /* Mark the early DIE for DECL as the abstract instance. */
21777
21778 static void
21779 dwarf2out_abstract_function (tree decl)
21780 {
21781 dw_die_ref old_die;
21782
21783 /* Make sure we have the actual abstract inline, not a clone. */
21784 decl = DECL_ORIGIN (decl);
21785
21786 if (DECL_IGNORED_P (decl))
21787 return;
21788
21789 old_die = lookup_decl_die (decl);
21790 /* With early debug we always have an old DIE unless we are in LTO
21791 and the user did not compile but only link with debug. */
21792 if (in_lto_p && ! old_die)
21793 return;
21794 gcc_assert (old_die != NULL);
21795 if (get_AT (old_die, DW_AT_inline)
21796 || get_AT (old_die, DW_AT_abstract_origin))
21797 /* We've already generated the abstract instance. */
21798 return;
21799
21800 /* Go ahead and put DW_AT_inline on the DIE. */
21801 if (DECL_DECLARED_INLINE_P (decl))
21802 {
21803 if (cgraph_function_possibly_inlined_p (decl))
21804 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
21805 else
21806 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
21807 }
21808 else
21809 {
21810 if (cgraph_function_possibly_inlined_p (decl))
21811 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
21812 else
21813 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
21814 }
21815
21816 if (DECL_DECLARED_INLINE_P (decl)
21817 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
21818 add_AT_flag (old_die, DW_AT_artificial, 1);
21819
21820 set_decl_origin_self (decl);
21821 }
21822
21823 /* Helper function of premark_used_types() which gets called through
21824 htab_traverse.
21825
21826 Marks the DIE of a given type in *SLOT as perennial, so it never gets
21827 marked as unused by prune_unused_types. */
21828
21829 bool
21830 premark_used_types_helper (tree const &type, void *)
21831 {
21832 dw_die_ref die;
21833
21834 die = lookup_type_die (type);
21835 if (die != NULL)
21836 die->die_perennial_p = 1;
21837 return true;
21838 }
21839
21840 /* Helper function of premark_types_used_by_global_vars which gets called
21841 through htab_traverse.
21842
21843 Marks the DIE of a given type in *SLOT as perennial, so it never gets
21844 marked as unused by prune_unused_types. The DIE of the type is marked
21845 only if the global variable using the type will actually be emitted. */
21846
21847 int
21848 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
21849 void *)
21850 {
21851 struct types_used_by_vars_entry *entry;
21852 dw_die_ref die;
21853
21854 entry = (struct types_used_by_vars_entry *) *slot;
21855 gcc_assert (entry->type != NULL
21856 && entry->var_decl != NULL);
21857 die = lookup_type_die (entry->type);
21858 if (die)
21859 {
21860 /* Ask cgraph if the global variable really is to be emitted.
21861 If yes, then we'll keep the DIE of ENTRY->TYPE. */
21862 varpool_node *node = varpool_node::get (entry->var_decl);
21863 if (node && node->definition)
21864 {
21865 die->die_perennial_p = 1;
21866 /* Keep the parent DIEs as well. */
21867 while ((die = die->die_parent) && die->die_perennial_p == 0)
21868 die->die_perennial_p = 1;
21869 }
21870 }
21871 return 1;
21872 }
21873
21874 /* Mark all members of used_types_hash as perennial. */
21875
21876 static void
21877 premark_used_types (struct function *fun)
21878 {
21879 if (fun && fun->used_types_hash)
21880 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
21881 }
21882
21883 /* Mark all members of types_used_by_vars_entry as perennial. */
21884
21885 static void
21886 premark_types_used_by_global_vars (void)
21887 {
21888 if (types_used_by_vars_hash)
21889 types_used_by_vars_hash
21890 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
21891 }
21892
21893 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
21894 for CA_LOC call arg loc node. */
21895
21896 static dw_die_ref
21897 gen_call_site_die (tree decl, dw_die_ref subr_die,
21898 struct call_arg_loc_node *ca_loc)
21899 {
21900 dw_die_ref stmt_die = NULL, die;
21901 tree block = ca_loc->block;
21902
21903 while (block
21904 && block != DECL_INITIAL (decl)
21905 && TREE_CODE (block) == BLOCK)
21906 {
21907 stmt_die = BLOCK_DIE (block);
21908 if (stmt_die)
21909 break;
21910 block = BLOCK_SUPERCONTEXT (block);
21911 }
21912 if (stmt_die == NULL)
21913 stmt_die = subr_die;
21914 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
21915 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
21916 if (ca_loc->tail_call_p)
21917 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
21918 if (ca_loc->symbol_ref)
21919 {
21920 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
21921 if (tdie)
21922 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
21923 else
21924 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
21925 false);
21926 }
21927 return die;
21928 }
21929
21930 /* Generate a DIE to represent a declared function (either file-scope or
21931 block-local). */
21932
21933 static void
21934 gen_subprogram_die (tree decl, dw_die_ref context_die)
21935 {
21936 tree origin = decl_ultimate_origin (decl);
21937 dw_die_ref subr_die;
21938 dw_die_ref old_die = lookup_decl_die (decl);
21939
21940 /* This function gets called multiple times for different stages of
21941 the debug process. For example, for func() in this code:
21942
21943 namespace S
21944 {
21945 void func() { ... }
21946 }
21947
21948 ...we get called 4 times. Twice in early debug and twice in
21949 late debug:
21950
21951 Early debug
21952 -----------
21953
21954 1. Once while generating func() within the namespace. This is
21955 the declaration. The declaration bit below is set, as the
21956 context is the namespace.
21957
21958 A new DIE will be generated with DW_AT_declaration set.
21959
21960 2. Once for func() itself. This is the specification. The
21961 declaration bit below is clear as the context is the CU.
21962
21963 We will use the cached DIE from (1) to create a new DIE with
21964 DW_AT_specification pointing to the declaration in (1).
21965
21966 Late debug via rest_of_handle_final()
21967 -------------------------------------
21968
21969 3. Once generating func() within the namespace. This is also the
21970 declaration, as in (1), but this time we will early exit below
21971 as we have a cached DIE and a declaration needs no additional
21972 annotations (no locations), as the source declaration line
21973 info is enough.
21974
21975 4. Once for func() itself. As in (2), this is the specification,
21976 but this time we will re-use the cached DIE, and just annotate
21977 it with the location information that should now be available.
21978
21979 For something without namespaces, but with abstract instances, we
21980 are also called a multiple times:
21981
21982 class Base
21983 {
21984 public:
21985 Base (); // constructor declaration (1)
21986 };
21987
21988 Base::Base () { } // constructor specification (2)
21989
21990 Early debug
21991 -----------
21992
21993 1. Once for the Base() constructor by virtue of it being a
21994 member of the Base class. This is done via
21995 rest_of_type_compilation.
21996
21997 This is a declaration, so a new DIE will be created with
21998 DW_AT_declaration.
21999
22000 2. Once for the Base() constructor definition, but this time
22001 while generating the abstract instance of the base
22002 constructor (__base_ctor) which is being generated via early
22003 debug of reachable functions.
22004
22005 Even though we have a cached version of the declaration (1),
22006 we will create a DW_AT_specification of the declaration DIE
22007 in (1).
22008
22009 3. Once for the __base_ctor itself, but this time, we generate
22010 an DW_AT_abstract_origin version of the DW_AT_specification in
22011 (2).
22012
22013 Late debug via rest_of_handle_final
22014 -----------------------------------
22015
22016 4. One final time for the __base_ctor (which will have a cached
22017 DIE with DW_AT_abstract_origin created in (3). This time,
22018 we will just annotate the location information now
22019 available.
22020 */
22021 int declaration = (current_function_decl != decl
22022 || class_or_namespace_scope_p (context_die));
22023
22024 /* Now that the C++ front end lazily declares artificial member fns, we
22025 might need to retrofit the declaration into its class. */
22026 if (!declaration && !origin && !old_die
22027 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22028 && !class_or_namespace_scope_p (context_die)
22029 && debug_info_level > DINFO_LEVEL_TERSE)
22030 old_die = force_decl_die (decl);
22031
22032 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22033 if (origin != NULL)
22034 {
22035 gcc_assert (!declaration || local_scope_p (context_die));
22036
22037 /* Fixup die_parent for the abstract instance of a nested
22038 inline function. */
22039 if (old_die && old_die->die_parent == NULL)
22040 add_child_die (context_die, old_die);
22041
22042 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22043 {
22044 /* If we have a DW_AT_abstract_origin we have a working
22045 cached version. */
22046 subr_die = old_die;
22047 }
22048 else
22049 {
22050 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22051 add_abstract_origin_attribute (subr_die, origin);
22052 /* This is where the actual code for a cloned function is.
22053 Let's emit linkage name attribute for it. This helps
22054 debuggers to e.g, set breakpoints into
22055 constructors/destructors when the user asks "break
22056 K::K". */
22057 add_linkage_name (subr_die, decl);
22058 }
22059 }
22060 /* A cached copy, possibly from early dwarf generation. Reuse as
22061 much as possible. */
22062 else if (old_die)
22063 {
22064 /* A declaration that has been previously dumped needs no
22065 additional information. */
22066 if (declaration)
22067 return;
22068
22069 if (!get_AT_flag (old_die, DW_AT_declaration)
22070 /* We can have a normal definition following an inline one in the
22071 case of redefinition of GNU C extern inlines.
22072 It seems reasonable to use AT_specification in this case. */
22073 && !get_AT (old_die, DW_AT_inline))
22074 {
22075 /* Detect and ignore this case, where we are trying to output
22076 something we have already output. */
22077 if (get_AT (old_die, DW_AT_low_pc)
22078 || get_AT (old_die, DW_AT_ranges))
22079 return;
22080
22081 /* If we have no location information, this must be a
22082 partially generated DIE from early dwarf generation.
22083 Fall through and generate it. */
22084 }
22085
22086 /* If the definition comes from the same place as the declaration,
22087 maybe use the old DIE. We always want the DIE for this function
22088 that has the *_pc attributes to be under comp_unit_die so the
22089 debugger can find it. We also need to do this for abstract
22090 instances of inlines, since the spec requires the out-of-line copy
22091 to have the same parent. For local class methods, this doesn't
22092 apply; we just use the old DIE. */
22093 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22094 struct dwarf_file_data * file_index = lookup_filename (s.file);
22095 if ((is_cu_die (old_die->die_parent)
22096 /* This condition fixes the inconsistency/ICE with the
22097 following Fortran test (or some derivative thereof) while
22098 building libgfortran:
22099
22100 module some_m
22101 contains
22102 logical function funky (FLAG)
22103 funky = .true.
22104 end function
22105 end module
22106 */
22107 || (old_die->die_parent
22108 && old_die->die_parent->die_tag == DW_TAG_module)
22109 || context_die == NULL)
22110 && (DECL_ARTIFICIAL (decl)
22111 /* The location attributes may be in the abstract origin
22112 which in the case of LTO might be not available to
22113 look at. */
22114 || get_AT (old_die, DW_AT_abstract_origin)
22115 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22116 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22117 == (unsigned) s.line)
22118 && (!debug_column_info
22119 || s.column == 0
22120 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22121 == (unsigned) s.column)))))
22122 {
22123 subr_die = old_die;
22124
22125 /* Clear out the declaration attribute, but leave the
22126 parameters so they can be augmented with location
22127 information later. Unless this was a declaration, in
22128 which case, wipe out the nameless parameters and recreate
22129 them further down. */
22130 if (remove_AT (subr_die, DW_AT_declaration))
22131 {
22132
22133 remove_AT (subr_die, DW_AT_object_pointer);
22134 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22135 }
22136 }
22137 /* Make a specification pointing to the previously built
22138 declaration. */
22139 else
22140 {
22141 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22142 add_AT_specification (subr_die, old_die);
22143 add_pubname (decl, subr_die);
22144 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22145 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22146 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22147 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22148 if (debug_column_info
22149 && s.column
22150 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22151 != (unsigned) s.column))
22152 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22153
22154 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22155 emit the real type on the definition die. */
22156 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22157 {
22158 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22159 if (die == auto_die || die == decltype_auto_die)
22160 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22161 TYPE_UNQUALIFIED, false, context_die);
22162 }
22163
22164 /* When we process the method declaration, we haven't seen
22165 the out-of-class defaulted definition yet, so we have to
22166 recheck now. */
22167 if ((dwarf_version >= 5 || ! dwarf_strict)
22168 && !get_AT (subr_die, DW_AT_defaulted))
22169 {
22170 int defaulted
22171 = lang_hooks.decls.decl_dwarf_attribute (decl,
22172 DW_AT_defaulted);
22173 if (defaulted != -1)
22174 {
22175 /* Other values must have been handled before. */
22176 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22177 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22178 }
22179 }
22180 }
22181 }
22182 /* Create a fresh DIE for anything else. */
22183 else
22184 {
22185 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22186
22187 if (TREE_PUBLIC (decl))
22188 add_AT_flag (subr_die, DW_AT_external, 1);
22189
22190 add_name_and_src_coords_attributes (subr_die, decl);
22191 add_pubname (decl, subr_die);
22192 if (debug_info_level > DINFO_LEVEL_TERSE)
22193 {
22194 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22195 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22196 TYPE_UNQUALIFIED, false, context_die);
22197 }
22198
22199 add_pure_or_virtual_attribute (subr_die, decl);
22200 if (DECL_ARTIFICIAL (decl))
22201 add_AT_flag (subr_die, DW_AT_artificial, 1);
22202
22203 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22204 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22205
22206 add_alignment_attribute (subr_die, decl);
22207
22208 add_accessibility_attribute (subr_die, decl);
22209 }
22210
22211 /* Unless we have an existing non-declaration DIE, equate the new
22212 DIE. */
22213 if (!old_die || is_declaration_die (old_die))
22214 equate_decl_number_to_die (decl, subr_die);
22215
22216 if (declaration)
22217 {
22218 if (!old_die || !get_AT (old_die, DW_AT_inline))
22219 {
22220 add_AT_flag (subr_die, DW_AT_declaration, 1);
22221
22222 /* If this is an explicit function declaration then generate
22223 a DW_AT_explicit attribute. */
22224 if ((dwarf_version >= 3 || !dwarf_strict)
22225 && lang_hooks.decls.decl_dwarf_attribute (decl,
22226 DW_AT_explicit) == 1)
22227 add_AT_flag (subr_die, DW_AT_explicit, 1);
22228
22229 /* If this is a C++11 deleted special function member then generate
22230 a DW_AT_deleted attribute. */
22231 if ((dwarf_version >= 5 || !dwarf_strict)
22232 && lang_hooks.decls.decl_dwarf_attribute (decl,
22233 DW_AT_deleted) == 1)
22234 add_AT_flag (subr_die, DW_AT_deleted, 1);
22235
22236 /* If this is a C++11 defaulted special function member then
22237 generate a DW_AT_defaulted attribute. */
22238 if (dwarf_version >= 5 || !dwarf_strict)
22239 {
22240 int defaulted
22241 = lang_hooks.decls.decl_dwarf_attribute (decl,
22242 DW_AT_defaulted);
22243 if (defaulted != -1)
22244 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22245 }
22246
22247 /* If this is a C++11 non-static member function with & ref-qualifier
22248 then generate a DW_AT_reference attribute. */
22249 if ((dwarf_version >= 5 || !dwarf_strict)
22250 && lang_hooks.decls.decl_dwarf_attribute (decl,
22251 DW_AT_reference) == 1)
22252 add_AT_flag (subr_die, DW_AT_reference, 1);
22253
22254 /* If this is a C++11 non-static member function with &&
22255 ref-qualifier then generate a DW_AT_reference attribute. */
22256 if ((dwarf_version >= 5 || !dwarf_strict)
22257 && lang_hooks.decls.decl_dwarf_attribute (decl,
22258 DW_AT_rvalue_reference)
22259 == 1)
22260 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22261 }
22262 }
22263 /* For non DECL_EXTERNALs, if range information is available, fill
22264 the DIE with it. */
22265 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22266 {
22267 HOST_WIDE_INT cfa_fb_offset;
22268
22269 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22270
22271 if (!crtl->has_bb_partition)
22272 {
22273 dw_fde_ref fde = fun->fde;
22274 if (fde->dw_fde_begin)
22275 {
22276 /* We have already generated the labels. */
22277 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22278 fde->dw_fde_end, false);
22279 }
22280 else
22281 {
22282 /* Create start/end labels and add the range. */
22283 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22284 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22285 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22286 current_function_funcdef_no);
22287 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22288 current_function_funcdef_no);
22289 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22290 false);
22291 }
22292
22293 #if VMS_DEBUGGING_INFO
22294 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22295 Section 2.3 Prologue and Epilogue Attributes:
22296 When a breakpoint is set on entry to a function, it is generally
22297 desirable for execution to be suspended, not on the very first
22298 instruction of the function, but rather at a point after the
22299 function's frame has been set up, after any language defined local
22300 declaration processing has been completed, and before execution of
22301 the first statement of the function begins. Debuggers generally
22302 cannot properly determine where this point is. Similarly for a
22303 breakpoint set on exit from a function. The prologue and epilogue
22304 attributes allow a compiler to communicate the location(s) to use. */
22305
22306 {
22307 if (fde->dw_fde_vms_end_prologue)
22308 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22309 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22310
22311 if (fde->dw_fde_vms_begin_epilogue)
22312 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22313 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22314 }
22315 #endif
22316
22317 }
22318 else
22319 {
22320 /* Generate pubnames entries for the split function code ranges. */
22321 dw_fde_ref fde = fun->fde;
22322
22323 if (fde->dw_fde_second_begin)
22324 {
22325 if (dwarf_version >= 3 || !dwarf_strict)
22326 {
22327 /* We should use ranges for non-contiguous code section
22328 addresses. Use the actual code range for the initial
22329 section, since the HOT/COLD labels might precede an
22330 alignment offset. */
22331 bool range_list_added = false;
22332 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
22333 fde->dw_fde_end, &range_list_added,
22334 false);
22335 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
22336 fde->dw_fde_second_end,
22337 &range_list_added, false);
22338 if (range_list_added)
22339 add_ranges (NULL);
22340 }
22341 else
22342 {
22343 /* There is no real support in DW2 for this .. so we make
22344 a work-around. First, emit the pub name for the segment
22345 containing the function label. Then make and emit a
22346 simplified subprogram DIE for the second segment with the
22347 name pre-fixed by __hot/cold_sect_of_. We use the same
22348 linkage name for the second die so that gdb will find both
22349 sections when given "b foo". */
22350 const char *name = NULL;
22351 tree decl_name = DECL_NAME (decl);
22352 dw_die_ref seg_die;
22353
22354 /* Do the 'primary' section. */
22355 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22356 fde->dw_fde_end, false);
22357
22358 /* Build a minimal DIE for the secondary section. */
22359 seg_die = new_die (DW_TAG_subprogram,
22360 subr_die->die_parent, decl);
22361
22362 if (TREE_PUBLIC (decl))
22363 add_AT_flag (seg_die, DW_AT_external, 1);
22364
22365 if (decl_name != NULL
22366 && IDENTIFIER_POINTER (decl_name) != NULL)
22367 {
22368 name = dwarf2_name (decl, 1);
22369 if (! DECL_ARTIFICIAL (decl))
22370 add_src_coords_attributes (seg_die, decl);
22371
22372 add_linkage_name (seg_die, decl);
22373 }
22374 gcc_assert (name != NULL);
22375 add_pure_or_virtual_attribute (seg_die, decl);
22376 if (DECL_ARTIFICIAL (decl))
22377 add_AT_flag (seg_die, DW_AT_artificial, 1);
22378
22379 name = concat ("__second_sect_of_", name, NULL);
22380 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
22381 fde->dw_fde_second_end, false);
22382 add_name_attribute (seg_die, name);
22383 if (want_pubnames ())
22384 add_pubname_string (name, seg_die);
22385 }
22386 }
22387 else
22388 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
22389 false);
22390 }
22391
22392 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
22393
22394 /* We define the "frame base" as the function's CFA. This is more
22395 convenient for several reasons: (1) It's stable across the prologue
22396 and epilogue, which makes it better than just a frame pointer,
22397 (2) With dwarf3, there exists a one-byte encoding that allows us
22398 to reference the .debug_frame data by proxy, but failing that,
22399 (3) We can at least reuse the code inspection and interpretation
22400 code that determines the CFA position at various points in the
22401 function. */
22402 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
22403 {
22404 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
22405 add_AT_loc (subr_die, DW_AT_frame_base, op);
22406 }
22407 else
22408 {
22409 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
22410 if (list->dw_loc_next)
22411 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
22412 else
22413 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
22414 }
22415
22416 /* Compute a displacement from the "steady-state frame pointer" to
22417 the CFA. The former is what all stack slots and argument slots
22418 will reference in the rtl; the latter is what we've told the
22419 debugger about. We'll need to adjust all frame_base references
22420 by this displacement. */
22421 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
22422
22423 if (fun->static_chain_decl)
22424 {
22425 /* DWARF requires here a location expression that computes the
22426 address of the enclosing subprogram's frame base. The machinery
22427 in tree-nested.c is supposed to store this specific address in the
22428 last field of the FRAME record. */
22429 const tree frame_type
22430 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
22431 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
22432
22433 tree fb_expr
22434 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
22435 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
22436 fb_expr, fb_decl, NULL_TREE);
22437
22438 add_AT_location_description (subr_die, DW_AT_static_link,
22439 loc_list_from_tree (fb_expr, 0, NULL));
22440 }
22441
22442 resolve_variable_values ();
22443 }
22444
22445 /* Generate child dies for template paramaters. */
22446 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
22447 gen_generic_params_dies (decl);
22448
22449 /* Now output descriptions of the arguments for this function. This gets
22450 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
22451 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
22452 `...' at the end of the formal parameter list. In order to find out if
22453 there was a trailing ellipsis or not, we must instead look at the type
22454 associated with the FUNCTION_DECL. This will be a node of type
22455 FUNCTION_TYPE. If the chain of type nodes hanging off of this
22456 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
22457 an ellipsis at the end. */
22458
22459 /* In the case where we are describing a mere function declaration, all we
22460 need to do here (and all we *can* do here) is to describe the *types* of
22461 its formal parameters. */
22462 if (debug_info_level <= DINFO_LEVEL_TERSE)
22463 ;
22464 else if (declaration)
22465 gen_formal_types_die (decl, subr_die);
22466 else
22467 {
22468 /* Generate DIEs to represent all known formal parameters. */
22469 tree parm = DECL_ARGUMENTS (decl);
22470 tree generic_decl = early_dwarf
22471 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
22472 tree generic_decl_parm = generic_decl
22473 ? DECL_ARGUMENTS (generic_decl)
22474 : NULL;
22475
22476 /* Now we want to walk the list of parameters of the function and
22477 emit their relevant DIEs.
22478
22479 We consider the case of DECL being an instance of a generic function
22480 as well as it being a normal function.
22481
22482 If DECL is an instance of a generic function we walk the
22483 parameters of the generic function declaration _and_ the parameters of
22484 DECL itself. This is useful because we want to emit specific DIEs for
22485 function parameter packs and those are declared as part of the
22486 generic function declaration. In that particular case,
22487 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
22488 That DIE has children DIEs representing the set of arguments
22489 of the pack. Note that the set of pack arguments can be empty.
22490 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
22491 children DIE.
22492
22493 Otherwise, we just consider the parameters of DECL. */
22494 while (generic_decl_parm || parm)
22495 {
22496 if (generic_decl_parm
22497 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
22498 gen_formal_parameter_pack_die (generic_decl_parm,
22499 parm, subr_die,
22500 &parm);
22501 else if (parm && !POINTER_BOUNDS_P (parm))
22502 {
22503 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
22504
22505 if (early_dwarf
22506 && parm == DECL_ARGUMENTS (decl)
22507 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
22508 && parm_die
22509 && (dwarf_version >= 3 || !dwarf_strict))
22510 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
22511
22512 parm = DECL_CHAIN (parm);
22513 }
22514 else if (parm)
22515 parm = DECL_CHAIN (parm);
22516
22517 if (generic_decl_parm)
22518 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
22519 }
22520
22521 /* Decide whether we need an unspecified_parameters DIE at the end.
22522 There are 2 more cases to do this for: 1) the ansi ... declaration -
22523 this is detectable when the end of the arg list is not a
22524 void_type_node 2) an unprototyped function declaration (not a
22525 definition). This just means that we have no info about the
22526 parameters at all. */
22527 if (early_dwarf)
22528 {
22529 if (prototype_p (TREE_TYPE (decl)))
22530 {
22531 /* This is the prototyped case, check for.... */
22532 if (stdarg_p (TREE_TYPE (decl)))
22533 gen_unspecified_parameters_die (decl, subr_die);
22534 }
22535 else if (DECL_INITIAL (decl) == NULL_TREE)
22536 gen_unspecified_parameters_die (decl, subr_die);
22537 }
22538 }
22539
22540 if (subr_die != old_die)
22541 /* Add the calling convention attribute if requested. */
22542 add_calling_convention_attribute (subr_die, decl);
22543
22544 /* Output Dwarf info for all of the stuff within the body of the function
22545 (if it has one - it may be just a declaration).
22546
22547 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
22548 a function. This BLOCK actually represents the outermost binding contour
22549 for the function, i.e. the contour in which the function's formal
22550 parameters and labels get declared. Curiously, it appears that the front
22551 end doesn't actually put the PARM_DECL nodes for the current function onto
22552 the BLOCK_VARS list for this outer scope, but are strung off of the
22553 DECL_ARGUMENTS list for the function instead.
22554
22555 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
22556 the LABEL_DECL nodes for the function however, and we output DWARF info
22557 for those in decls_for_scope. Just within the `outer_scope' there will be
22558 a BLOCK node representing the function's outermost pair of curly braces,
22559 and any blocks used for the base and member initializers of a C++
22560 constructor function. */
22561 tree outer_scope = DECL_INITIAL (decl);
22562 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
22563 {
22564 int call_site_note_count = 0;
22565 int tail_call_site_note_count = 0;
22566
22567 /* Emit a DW_TAG_variable DIE for a named return value. */
22568 if (DECL_NAME (DECL_RESULT (decl)))
22569 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
22570
22571 /* The first time through decls_for_scope we will generate the
22572 DIEs for the locals. The second time, we fill in the
22573 location info. */
22574 decls_for_scope (outer_scope, subr_die);
22575
22576 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
22577 {
22578 struct call_arg_loc_node *ca_loc;
22579 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
22580 {
22581 dw_die_ref die = NULL;
22582 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
22583 rtx arg, next_arg;
22584
22585 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
22586 ? NOTE_VAR_LOCATION (ca_loc->call_arg_loc_note)
22587 : NULL_RTX);
22588 arg; arg = next_arg)
22589 {
22590 dw_loc_descr_ref reg, val;
22591 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
22592 dw_die_ref cdie, tdie = NULL;
22593
22594 next_arg = XEXP (arg, 1);
22595 if (REG_P (XEXP (XEXP (arg, 0), 0))
22596 && next_arg
22597 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
22598 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
22599 && REGNO (XEXP (XEXP (arg, 0), 0))
22600 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
22601 next_arg = XEXP (next_arg, 1);
22602 if (mode == VOIDmode)
22603 {
22604 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
22605 if (mode == VOIDmode)
22606 mode = GET_MODE (XEXP (arg, 0));
22607 }
22608 if (mode == VOIDmode || mode == BLKmode)
22609 continue;
22610 /* Get dynamic information about call target only if we
22611 have no static information: we cannot generate both
22612 DW_AT_call_origin and DW_AT_call_target
22613 attributes. */
22614 if (ca_loc->symbol_ref == NULL_RTX)
22615 {
22616 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
22617 {
22618 tloc = XEXP (XEXP (arg, 0), 1);
22619 continue;
22620 }
22621 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
22622 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
22623 {
22624 tlocc = XEXP (XEXP (arg, 0), 1);
22625 continue;
22626 }
22627 }
22628 reg = NULL;
22629 if (REG_P (XEXP (XEXP (arg, 0), 0)))
22630 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
22631 VAR_INIT_STATUS_INITIALIZED);
22632 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
22633 {
22634 rtx mem = XEXP (XEXP (arg, 0), 0);
22635 reg = mem_loc_descriptor (XEXP (mem, 0),
22636 get_address_mode (mem),
22637 GET_MODE (mem),
22638 VAR_INIT_STATUS_INITIALIZED);
22639 }
22640 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
22641 == DEBUG_PARAMETER_REF)
22642 {
22643 tree tdecl
22644 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
22645 tdie = lookup_decl_die (tdecl);
22646 if (tdie == NULL)
22647 continue;
22648 }
22649 else
22650 continue;
22651 if (reg == NULL
22652 && GET_CODE (XEXP (XEXP (arg, 0), 0))
22653 != DEBUG_PARAMETER_REF)
22654 continue;
22655 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
22656 VOIDmode,
22657 VAR_INIT_STATUS_INITIALIZED);
22658 if (val == NULL)
22659 continue;
22660 if (die == NULL)
22661 die = gen_call_site_die (decl, subr_die, ca_loc);
22662 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
22663 NULL_TREE);
22664 if (reg != NULL)
22665 add_AT_loc (cdie, DW_AT_location, reg);
22666 else if (tdie != NULL)
22667 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
22668 tdie);
22669 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
22670 if (next_arg != XEXP (arg, 1))
22671 {
22672 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
22673 if (mode == VOIDmode)
22674 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
22675 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
22676 0), 1),
22677 mode, VOIDmode,
22678 VAR_INIT_STATUS_INITIALIZED);
22679 if (val != NULL)
22680 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
22681 val);
22682 }
22683 }
22684 if (die == NULL
22685 && (ca_loc->symbol_ref || tloc))
22686 die = gen_call_site_die (decl, subr_die, ca_loc);
22687 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
22688 {
22689 dw_loc_descr_ref tval = NULL;
22690
22691 if (tloc != NULL_RTX)
22692 tval = mem_loc_descriptor (tloc,
22693 GET_MODE (tloc) == VOIDmode
22694 ? Pmode : GET_MODE (tloc),
22695 VOIDmode,
22696 VAR_INIT_STATUS_INITIALIZED);
22697 if (tval)
22698 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
22699 else if (tlocc != NULL_RTX)
22700 {
22701 tval = mem_loc_descriptor (tlocc,
22702 GET_MODE (tlocc) == VOIDmode
22703 ? Pmode : GET_MODE (tlocc),
22704 VOIDmode,
22705 VAR_INIT_STATUS_INITIALIZED);
22706 if (tval)
22707 add_AT_loc (die,
22708 dwarf_AT (DW_AT_call_target_clobbered),
22709 tval);
22710 }
22711 }
22712 if (die != NULL)
22713 {
22714 call_site_note_count++;
22715 if (ca_loc->tail_call_p)
22716 tail_call_site_note_count++;
22717 }
22718 }
22719 }
22720 call_arg_locations = NULL;
22721 call_arg_loc_last = NULL;
22722 if (tail_call_site_count >= 0
22723 && tail_call_site_count == tail_call_site_note_count
22724 && (!dwarf_strict || dwarf_version >= 5))
22725 {
22726 if (call_site_count >= 0
22727 && call_site_count == call_site_note_count)
22728 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
22729 else
22730 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
22731 }
22732 call_site_count = -1;
22733 tail_call_site_count = -1;
22734 }
22735
22736 /* Mark used types after we have created DIEs for the functions scopes. */
22737 premark_used_types (DECL_STRUCT_FUNCTION (decl));
22738 }
22739
22740 /* Returns a hash value for X (which really is a die_struct). */
22741
22742 hashval_t
22743 block_die_hasher::hash (die_struct *d)
22744 {
22745 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
22746 }
22747
22748 /* Return nonzero if decl_id and die_parent of die_struct X is the same
22749 as decl_id and die_parent of die_struct Y. */
22750
22751 bool
22752 block_die_hasher::equal (die_struct *x, die_struct *y)
22753 {
22754 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
22755 }
22756
22757 /* Return TRUE if DECL, which may have been previously generated as
22758 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
22759 true if decl (or its origin) is either an extern declaration or a
22760 class/namespace scoped declaration.
22761
22762 The declare_in_namespace support causes us to get two DIEs for one
22763 variable, both of which are declarations. We want to avoid
22764 considering one to be a specification, so we must test for
22765 DECLARATION and DW_AT_declaration. */
22766 static inline bool
22767 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
22768 {
22769 return (old_die && TREE_STATIC (decl) && !declaration
22770 && get_AT_flag (old_die, DW_AT_declaration) == 1);
22771 }
22772
22773 /* Return true if DECL is a local static. */
22774
22775 static inline bool
22776 local_function_static (tree decl)
22777 {
22778 gcc_assert (VAR_P (decl));
22779 return TREE_STATIC (decl)
22780 && DECL_CONTEXT (decl)
22781 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
22782 }
22783
22784 /* Generate a DIE to represent a declared data object.
22785 Either DECL or ORIGIN must be non-null. */
22786
22787 static void
22788 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
22789 {
22790 HOST_WIDE_INT off = 0;
22791 tree com_decl;
22792 tree decl_or_origin = decl ? decl : origin;
22793 tree ultimate_origin;
22794 dw_die_ref var_die;
22795 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
22796 bool declaration = (DECL_EXTERNAL (decl_or_origin)
22797 || class_or_namespace_scope_p (context_die));
22798 bool specialization_p = false;
22799 bool no_linkage_name = false;
22800
22801 /* While C++ inline static data members have definitions inside of the
22802 class, force the first DIE to be a declaration, then let gen_member_die
22803 reparent it to the class context and call gen_variable_die again
22804 to create the outside of the class DIE for the definition. */
22805 if (!declaration
22806 && old_die == NULL
22807 && decl
22808 && DECL_CONTEXT (decl)
22809 && TYPE_P (DECL_CONTEXT (decl))
22810 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
22811 {
22812 declaration = true;
22813 if (dwarf_version < 5)
22814 no_linkage_name = true;
22815 }
22816
22817 ultimate_origin = decl_ultimate_origin (decl_or_origin);
22818 if (decl || ultimate_origin)
22819 origin = ultimate_origin;
22820 com_decl = fortran_common (decl_or_origin, &off);
22821
22822 /* Symbol in common gets emitted as a child of the common block, in the form
22823 of a data member. */
22824 if (com_decl)
22825 {
22826 dw_die_ref com_die;
22827 dw_loc_list_ref loc = NULL;
22828 die_node com_die_arg;
22829
22830 var_die = lookup_decl_die (decl_or_origin);
22831 if (var_die)
22832 {
22833 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
22834 {
22835 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
22836 if (loc)
22837 {
22838 if (off)
22839 {
22840 /* Optimize the common case. */
22841 if (single_element_loc_list_p (loc)
22842 && loc->expr->dw_loc_opc == DW_OP_addr
22843 && loc->expr->dw_loc_next == NULL
22844 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
22845 == SYMBOL_REF)
22846 {
22847 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
22848 loc->expr->dw_loc_oprnd1.v.val_addr
22849 = plus_constant (GET_MODE (x), x , off);
22850 }
22851 else
22852 loc_list_plus_const (loc, off);
22853 }
22854 add_AT_location_description (var_die, DW_AT_location, loc);
22855 remove_AT (var_die, DW_AT_declaration);
22856 }
22857 }
22858 return;
22859 }
22860
22861 if (common_block_die_table == NULL)
22862 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
22863
22864 com_die_arg.decl_id = DECL_UID (com_decl);
22865 com_die_arg.die_parent = context_die;
22866 com_die = common_block_die_table->find (&com_die_arg);
22867 if (! early_dwarf)
22868 loc = loc_list_from_tree (com_decl, 2, NULL);
22869 if (com_die == NULL)
22870 {
22871 const char *cnam
22872 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
22873 die_node **slot;
22874
22875 com_die = new_die (DW_TAG_common_block, context_die, decl);
22876 add_name_and_src_coords_attributes (com_die, com_decl);
22877 if (loc)
22878 {
22879 add_AT_location_description (com_die, DW_AT_location, loc);
22880 /* Avoid sharing the same loc descriptor between
22881 DW_TAG_common_block and DW_TAG_variable. */
22882 loc = loc_list_from_tree (com_decl, 2, NULL);
22883 }
22884 else if (DECL_EXTERNAL (decl_or_origin))
22885 add_AT_flag (com_die, DW_AT_declaration, 1);
22886 if (want_pubnames ())
22887 add_pubname_string (cnam, com_die); /* ??? needed? */
22888 com_die->decl_id = DECL_UID (com_decl);
22889 slot = common_block_die_table->find_slot (com_die, INSERT);
22890 *slot = com_die;
22891 }
22892 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
22893 {
22894 add_AT_location_description (com_die, DW_AT_location, loc);
22895 loc = loc_list_from_tree (com_decl, 2, NULL);
22896 remove_AT (com_die, DW_AT_declaration);
22897 }
22898 var_die = new_die (DW_TAG_variable, com_die, decl);
22899 add_name_and_src_coords_attributes (var_die, decl_or_origin);
22900 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
22901 decl_quals (decl_or_origin), false,
22902 context_die);
22903 add_alignment_attribute (var_die, decl);
22904 add_AT_flag (var_die, DW_AT_external, 1);
22905 if (loc)
22906 {
22907 if (off)
22908 {
22909 /* Optimize the common case. */
22910 if (single_element_loc_list_p (loc)
22911 && loc->expr->dw_loc_opc == DW_OP_addr
22912 && loc->expr->dw_loc_next == NULL
22913 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
22914 {
22915 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
22916 loc->expr->dw_loc_oprnd1.v.val_addr
22917 = plus_constant (GET_MODE (x), x, off);
22918 }
22919 else
22920 loc_list_plus_const (loc, off);
22921 }
22922 add_AT_location_description (var_die, DW_AT_location, loc);
22923 }
22924 else if (DECL_EXTERNAL (decl_or_origin))
22925 add_AT_flag (var_die, DW_AT_declaration, 1);
22926 if (decl)
22927 equate_decl_number_to_die (decl, var_die);
22928 return;
22929 }
22930
22931 if (old_die)
22932 {
22933 if (declaration)
22934 {
22935 /* A declaration that has been previously dumped, needs no
22936 further annotations, since it doesn't need location on
22937 the second pass. */
22938 return;
22939 }
22940 else if (decl_will_get_specification_p (old_die, decl, declaration)
22941 && !get_AT (old_die, DW_AT_specification))
22942 {
22943 /* Fall-thru so we can make a new variable die along with a
22944 DW_AT_specification. */
22945 }
22946 else if (origin && old_die->die_parent != context_die)
22947 {
22948 /* If we will be creating an inlined instance, we need a
22949 new DIE that will get annotated with
22950 DW_AT_abstract_origin. Clear things so we can get a
22951 new DIE. */
22952 gcc_assert (!DECL_ABSTRACT_P (decl));
22953 old_die = NULL;
22954 }
22955 else
22956 {
22957 /* If a DIE was dumped early, it still needs location info.
22958 Skip to where we fill the location bits. */
22959 var_die = old_die;
22960
22961 /* ??? In LTRANS we cannot annotate early created variably
22962 modified type DIEs without copying them and adjusting all
22963 references to them. Thus we dumped them again, also add a
22964 reference to them. */
22965 tree type = TREE_TYPE (decl_or_origin);
22966 if (in_lto_p
22967 && variably_modified_type_p
22968 (type, decl_function_context (decl_or_origin)))
22969 {
22970 if (decl_by_reference_p (decl_or_origin))
22971 add_type_attribute (var_die, TREE_TYPE (type),
22972 TYPE_UNQUALIFIED, false, context_die);
22973 else
22974 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
22975 false, context_die);
22976 }
22977
22978 goto gen_variable_die_location;
22979 }
22980 }
22981
22982 /* For static data members, the declaration in the class is supposed
22983 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
22984 also in DWARF2; the specification should still be DW_TAG_variable
22985 referencing the DW_TAG_member DIE. */
22986 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
22987 var_die = new_die (DW_TAG_member, context_die, decl);
22988 else
22989 var_die = new_die (DW_TAG_variable, context_die, decl);
22990
22991 if (origin != NULL)
22992 add_abstract_origin_attribute (var_die, origin);
22993
22994 /* Loop unrolling can create multiple blocks that refer to the same
22995 static variable, so we must test for the DW_AT_declaration flag.
22996
22997 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
22998 copy decls and set the DECL_ABSTRACT_P flag on them instead of
22999 sharing them.
23000
23001 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23002 else if (decl_will_get_specification_p (old_die, decl, declaration))
23003 {
23004 /* This is a definition of a C++ class level static. */
23005 add_AT_specification (var_die, old_die);
23006 specialization_p = true;
23007 if (DECL_NAME (decl))
23008 {
23009 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23010 struct dwarf_file_data * file_index = lookup_filename (s.file);
23011
23012 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23013 add_AT_file (var_die, DW_AT_decl_file, file_index);
23014
23015 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23016 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23017
23018 if (debug_column_info
23019 && s.column
23020 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23021 != (unsigned) s.column))
23022 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23023
23024 if (old_die->die_tag == DW_TAG_member)
23025 add_linkage_name (var_die, decl);
23026 }
23027 }
23028 else
23029 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23030
23031 if ((origin == NULL && !specialization_p)
23032 || (origin != NULL
23033 && !DECL_ABSTRACT_P (decl_or_origin)
23034 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23035 decl_function_context
23036 (decl_or_origin))))
23037 {
23038 tree type = TREE_TYPE (decl_or_origin);
23039
23040 if (decl_by_reference_p (decl_or_origin))
23041 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23042 context_die);
23043 else
23044 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23045 context_die);
23046 }
23047
23048 if (origin == NULL && !specialization_p)
23049 {
23050 if (TREE_PUBLIC (decl))
23051 add_AT_flag (var_die, DW_AT_external, 1);
23052
23053 if (DECL_ARTIFICIAL (decl))
23054 add_AT_flag (var_die, DW_AT_artificial, 1);
23055
23056 add_alignment_attribute (var_die, decl);
23057
23058 add_accessibility_attribute (var_die, decl);
23059 }
23060
23061 if (declaration)
23062 add_AT_flag (var_die, DW_AT_declaration, 1);
23063
23064 if (decl && (DECL_ABSTRACT_P (decl)
23065 || !old_die || is_declaration_die (old_die)))
23066 equate_decl_number_to_die (decl, var_die);
23067
23068 gen_variable_die_location:
23069 if (! declaration
23070 && (! DECL_ABSTRACT_P (decl_or_origin)
23071 /* Local static vars are shared between all clones/inlines,
23072 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23073 already set. */
23074 || (VAR_P (decl_or_origin)
23075 && TREE_STATIC (decl_or_origin)
23076 && DECL_RTL_SET_P (decl_or_origin))))
23077 {
23078 if (early_dwarf)
23079 add_pubname (decl_or_origin, var_die);
23080 else
23081 add_location_or_const_value_attribute (var_die, decl_or_origin,
23082 decl == NULL);
23083 }
23084 else
23085 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23086
23087 if ((dwarf_version >= 4 || !dwarf_strict)
23088 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23089 DW_AT_const_expr) == 1
23090 && !get_AT (var_die, DW_AT_const_expr)
23091 && !specialization_p)
23092 add_AT_flag (var_die, DW_AT_const_expr, 1);
23093
23094 if (!dwarf_strict)
23095 {
23096 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23097 DW_AT_inline);
23098 if (inl != -1
23099 && !get_AT (var_die, DW_AT_inline)
23100 && !specialization_p)
23101 add_AT_unsigned (var_die, DW_AT_inline, inl);
23102 }
23103 }
23104
23105 /* Generate a DIE to represent a named constant. */
23106
23107 static void
23108 gen_const_die (tree decl, dw_die_ref context_die)
23109 {
23110 dw_die_ref const_die;
23111 tree type = TREE_TYPE (decl);
23112
23113 const_die = lookup_decl_die (decl);
23114 if (const_die)
23115 return;
23116
23117 const_die = new_die (DW_TAG_constant, context_die, decl);
23118 equate_decl_number_to_die (decl, const_die);
23119 add_name_and_src_coords_attributes (const_die, decl);
23120 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23121 if (TREE_PUBLIC (decl))
23122 add_AT_flag (const_die, DW_AT_external, 1);
23123 if (DECL_ARTIFICIAL (decl))
23124 add_AT_flag (const_die, DW_AT_artificial, 1);
23125 tree_add_const_value_attribute_for_decl (const_die, decl);
23126 }
23127
23128 /* Generate a DIE to represent a label identifier. */
23129
23130 static void
23131 gen_label_die (tree decl, dw_die_ref context_die)
23132 {
23133 tree origin = decl_ultimate_origin (decl);
23134 dw_die_ref lbl_die = lookup_decl_die (decl);
23135 rtx insn;
23136 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23137
23138 if (!lbl_die)
23139 {
23140 lbl_die = new_die (DW_TAG_label, context_die, decl);
23141 equate_decl_number_to_die (decl, lbl_die);
23142
23143 if (origin != NULL)
23144 add_abstract_origin_attribute (lbl_die, origin);
23145 else
23146 add_name_and_src_coords_attributes (lbl_die, decl);
23147 }
23148
23149 if (DECL_ABSTRACT_P (decl))
23150 equate_decl_number_to_die (decl, lbl_die);
23151 else if (! early_dwarf)
23152 {
23153 insn = DECL_RTL_IF_SET (decl);
23154
23155 /* Deleted labels are programmer specified labels which have been
23156 eliminated because of various optimizations. We still emit them
23157 here so that it is possible to put breakpoints on them. */
23158 if (insn
23159 && (LABEL_P (insn)
23160 || ((NOTE_P (insn)
23161 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23162 {
23163 /* When optimization is enabled (via -O) some parts of the compiler
23164 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23165 represent source-level labels which were explicitly declared by
23166 the user. This really shouldn't be happening though, so catch
23167 it if it ever does happen. */
23168 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23169
23170 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23171 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23172 }
23173 else if (insn
23174 && NOTE_P (insn)
23175 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23176 && CODE_LABEL_NUMBER (insn) != -1)
23177 {
23178 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23179 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23180 }
23181 }
23182 }
23183
23184 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23185 attributes to the DIE for a block STMT, to describe where the inlined
23186 function was called from. This is similar to add_src_coords_attributes. */
23187
23188 static inline void
23189 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23190 {
23191 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23192
23193 if (dwarf_version >= 3 || !dwarf_strict)
23194 {
23195 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23196 add_AT_unsigned (die, DW_AT_call_line, s.line);
23197 if (debug_column_info && s.column)
23198 add_AT_unsigned (die, DW_AT_call_column, s.column);
23199 }
23200 }
23201
23202
23203 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23204 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23205
23206 static inline void
23207 add_high_low_attributes (tree stmt, dw_die_ref die)
23208 {
23209 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23210
23211 if (BLOCK_FRAGMENT_CHAIN (stmt)
23212 && (dwarf_version >= 3 || !dwarf_strict))
23213 {
23214 tree chain, superblock = NULL_TREE;
23215 dw_die_ref pdie;
23216 dw_attr_node *attr = NULL;
23217
23218 if (inlined_function_outer_scope_p (stmt))
23219 {
23220 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23221 BLOCK_NUMBER (stmt));
23222 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23223 }
23224
23225 /* Optimize duplicate .debug_ranges lists or even tails of
23226 lists. If this BLOCK has same ranges as its supercontext,
23227 lookup DW_AT_ranges attribute in the supercontext (and
23228 recursively so), verify that the ranges_table contains the
23229 right values and use it instead of adding a new .debug_range. */
23230 for (chain = stmt, pdie = die;
23231 BLOCK_SAME_RANGE (chain);
23232 chain = BLOCK_SUPERCONTEXT (chain))
23233 {
23234 dw_attr_node *new_attr;
23235
23236 pdie = pdie->die_parent;
23237 if (pdie == NULL)
23238 break;
23239 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
23240 break;
23241 new_attr = get_AT (pdie, DW_AT_ranges);
23242 if (new_attr == NULL
23243 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
23244 break;
23245 attr = new_attr;
23246 superblock = BLOCK_SUPERCONTEXT (chain);
23247 }
23248 if (attr != NULL
23249 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
23250 == BLOCK_NUMBER (superblock))
23251 && BLOCK_FRAGMENT_CHAIN (superblock))
23252 {
23253 unsigned long off = attr->dw_attr_val.v.val_offset;
23254 unsigned long supercnt = 0, thiscnt = 0;
23255 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
23256 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23257 {
23258 ++supercnt;
23259 gcc_checking_assert ((*ranges_table)[off + supercnt].num
23260 == BLOCK_NUMBER (chain));
23261 }
23262 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
23263 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
23264 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23265 ++thiscnt;
23266 gcc_assert (supercnt >= thiscnt);
23267 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
23268 false);
23269 note_rnglist_head (off + supercnt - thiscnt);
23270 return;
23271 }
23272
23273 unsigned int offset = add_ranges (stmt, true);
23274 add_AT_range_list (die, DW_AT_ranges, offset, false);
23275 note_rnglist_head (offset);
23276
23277 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
23278 chain = BLOCK_FRAGMENT_CHAIN (stmt);
23279 do
23280 {
23281 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
23282 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
23283 chain = BLOCK_FRAGMENT_CHAIN (chain);
23284 }
23285 while (chain);
23286 add_ranges (NULL);
23287 }
23288 else
23289 {
23290 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
23291 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23292 BLOCK_NUMBER (stmt));
23293 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
23294 BLOCK_NUMBER (stmt));
23295 add_AT_low_high_pc (die, label, label_high, false);
23296 }
23297 }
23298
23299 /* Generate a DIE for a lexical block. */
23300
23301 static void
23302 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
23303 {
23304 dw_die_ref old_die = BLOCK_DIE (stmt);
23305 dw_die_ref stmt_die = NULL;
23306 if (!old_die)
23307 {
23308 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
23309 BLOCK_DIE (stmt) = stmt_die;
23310 }
23311
23312 if (BLOCK_ABSTRACT (stmt))
23313 {
23314 if (old_die)
23315 {
23316 /* This must have been generated early and it won't even
23317 need location information since it's a DW_AT_inline
23318 function. */
23319 if (flag_checking)
23320 for (dw_die_ref c = context_die; c; c = c->die_parent)
23321 if (c->die_tag == DW_TAG_inlined_subroutine
23322 || c->die_tag == DW_TAG_subprogram)
23323 {
23324 gcc_assert (get_AT (c, DW_AT_inline));
23325 break;
23326 }
23327 return;
23328 }
23329 }
23330 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
23331 {
23332 /* If this is an inlined instance, create a new lexical die for
23333 anything below to attach DW_AT_abstract_origin to. */
23334 if (old_die)
23335 {
23336 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
23337 BLOCK_DIE (stmt) = stmt_die;
23338 old_die = NULL;
23339 }
23340
23341 tree origin = block_ultimate_origin (stmt);
23342 if (origin != NULL_TREE && origin != stmt)
23343 add_abstract_origin_attribute (stmt_die, origin);
23344 }
23345
23346 if (old_die)
23347 stmt_die = old_die;
23348
23349 /* A non abstract block whose blocks have already been reordered
23350 should have the instruction range for this block. If so, set the
23351 high/low attributes. */
23352 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
23353 {
23354 gcc_assert (stmt_die);
23355 add_high_low_attributes (stmt, stmt_die);
23356 }
23357
23358 decls_for_scope (stmt, stmt_die);
23359 }
23360
23361 /* Generate a DIE for an inlined subprogram. */
23362
23363 static void
23364 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
23365 {
23366 tree decl;
23367
23368 /* The instance of function that is effectively being inlined shall not
23369 be abstract. */
23370 gcc_assert (! BLOCK_ABSTRACT (stmt));
23371
23372 decl = block_ultimate_origin (stmt);
23373
23374 /* Make sure any inlined functions are known to be inlineable. */
23375 gcc_checking_assert (DECL_ABSTRACT_P (decl)
23376 || cgraph_function_possibly_inlined_p (decl));
23377
23378 if (! BLOCK_ABSTRACT (stmt))
23379 {
23380 dw_die_ref subr_die
23381 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
23382
23383 if (call_arg_locations)
23384 BLOCK_DIE (stmt) = subr_die;
23385 add_abstract_origin_attribute (subr_die, decl);
23386 if (TREE_ASM_WRITTEN (stmt))
23387 add_high_low_attributes (stmt, subr_die);
23388 add_call_src_coords_attributes (stmt, subr_die);
23389
23390 decls_for_scope (stmt, subr_die);
23391 }
23392 }
23393
23394 /* Generate a DIE for a field in a record, or structure. CTX is required: see
23395 the comment for VLR_CONTEXT. */
23396
23397 static void
23398 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
23399 {
23400 dw_die_ref decl_die;
23401
23402 if (TREE_TYPE (decl) == error_mark_node)
23403 return;
23404
23405 decl_die = new_die (DW_TAG_member, context_die, decl);
23406 add_name_and_src_coords_attributes (decl_die, decl);
23407 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
23408 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
23409 context_die);
23410
23411 if (DECL_BIT_FIELD_TYPE (decl))
23412 {
23413 add_byte_size_attribute (decl_die, decl);
23414 add_bit_size_attribute (decl_die, decl);
23415 add_bit_offset_attribute (decl_die, decl, ctx);
23416 }
23417
23418 add_alignment_attribute (decl_die, decl);
23419
23420 /* If we have a variant part offset, then we are supposed to process a member
23421 of a QUAL_UNION_TYPE, which is how we represent variant parts in
23422 trees. */
23423 gcc_assert (ctx->variant_part_offset == NULL_TREE
23424 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
23425 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
23426 add_data_member_location_attribute (decl_die, decl, ctx);
23427
23428 if (DECL_ARTIFICIAL (decl))
23429 add_AT_flag (decl_die, DW_AT_artificial, 1);
23430
23431 add_accessibility_attribute (decl_die, decl);
23432
23433 /* Equate decl number to die, so that we can look up this decl later on. */
23434 equate_decl_number_to_die (decl, decl_die);
23435 }
23436
23437 /* Generate a DIE for a pointer to a member type. TYPE can be an
23438 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
23439 pointer to member function. */
23440
23441 static void
23442 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
23443 {
23444 if (lookup_type_die (type))
23445 return;
23446
23447 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
23448 scope_die_for (type, context_die), type);
23449
23450 equate_type_number_to_die (type, ptr_die);
23451 add_AT_die_ref (ptr_die, DW_AT_containing_type,
23452 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
23453 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23454 context_die);
23455 add_alignment_attribute (ptr_die, type);
23456
23457 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
23458 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
23459 {
23460 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
23461 add_AT_loc (ptr_die, DW_AT_use_location, op);
23462 }
23463 }
23464
23465 static char *producer_string;
23466
23467 /* Return a heap allocated producer string including command line options
23468 if -grecord-gcc-switches. */
23469
23470 static char *
23471 gen_producer_string (void)
23472 {
23473 size_t j;
23474 auto_vec<const char *> switches;
23475 const char *language_string = lang_hooks.name;
23476 char *producer, *tail;
23477 const char *p;
23478 size_t len = dwarf_record_gcc_switches ? 0 : 3;
23479 size_t plen = strlen (language_string) + 1 + strlen (version_string);
23480
23481 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
23482 switch (save_decoded_options[j].opt_index)
23483 {
23484 case OPT_o:
23485 case OPT_d:
23486 case OPT_dumpbase:
23487 case OPT_dumpdir:
23488 case OPT_auxbase:
23489 case OPT_auxbase_strip:
23490 case OPT_quiet:
23491 case OPT_version:
23492 case OPT_v:
23493 case OPT_w:
23494 case OPT_L:
23495 case OPT_D:
23496 case OPT_I:
23497 case OPT_U:
23498 case OPT_SPECIAL_unknown:
23499 case OPT_SPECIAL_ignore:
23500 case OPT_SPECIAL_program_name:
23501 case OPT_SPECIAL_input_file:
23502 case OPT_grecord_gcc_switches:
23503 case OPT__output_pch_:
23504 case OPT_fdiagnostics_show_location_:
23505 case OPT_fdiagnostics_show_option:
23506 case OPT_fdiagnostics_show_caret:
23507 case OPT_fdiagnostics_color_:
23508 case OPT_fverbose_asm:
23509 case OPT____:
23510 case OPT__sysroot_:
23511 case OPT_nostdinc:
23512 case OPT_nostdinc__:
23513 case OPT_fpreprocessed:
23514 case OPT_fltrans_output_list_:
23515 case OPT_fresolution_:
23516 case OPT_fdebug_prefix_map_:
23517 case OPT_fcompare_debug:
23518 /* Ignore these. */
23519 continue;
23520 default:
23521 if (cl_options[save_decoded_options[j].opt_index].flags
23522 & CL_NO_DWARF_RECORD)
23523 continue;
23524 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
23525 == '-');
23526 switch (save_decoded_options[j].canonical_option[0][1])
23527 {
23528 case 'M':
23529 case 'i':
23530 case 'W':
23531 continue;
23532 case 'f':
23533 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
23534 "dump", 4) == 0)
23535 continue;
23536 break;
23537 default:
23538 break;
23539 }
23540 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
23541 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
23542 break;
23543 }
23544
23545 producer = XNEWVEC (char, plen + 1 + len + 1);
23546 tail = producer;
23547 sprintf (tail, "%s %s", language_string, version_string);
23548 tail += plen;
23549
23550 FOR_EACH_VEC_ELT (switches, j, p)
23551 {
23552 len = strlen (p);
23553 *tail = ' ';
23554 memcpy (tail + 1, p, len);
23555 tail += len + 1;
23556 }
23557
23558 *tail = '\0';
23559 return producer;
23560 }
23561
23562 /* Given a C and/or C++ language/version string return the "highest".
23563 C++ is assumed to be "higher" than C in this case. Used for merging
23564 LTO translation unit languages. */
23565 static const char *
23566 highest_c_language (const char *lang1, const char *lang2)
23567 {
23568 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
23569 return "GNU C++17";
23570 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
23571 return "GNU C++14";
23572 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
23573 return "GNU C++11";
23574 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
23575 return "GNU C++98";
23576
23577 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
23578 return "GNU C17";
23579 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
23580 return "GNU C11";
23581 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
23582 return "GNU C99";
23583 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
23584 return "GNU C89";
23585
23586 gcc_unreachable ();
23587 }
23588
23589
23590 /* Generate the DIE for the compilation unit. */
23591
23592 static dw_die_ref
23593 gen_compile_unit_die (const char *filename)
23594 {
23595 dw_die_ref die;
23596 const char *language_string = lang_hooks.name;
23597 int language;
23598
23599 die = new_die (DW_TAG_compile_unit, NULL, NULL);
23600
23601 if (filename)
23602 {
23603 add_name_attribute (die, filename);
23604 /* Don't add cwd for <built-in>. */
23605 if (filename[0] != '<')
23606 add_comp_dir_attribute (die);
23607 }
23608
23609 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
23610
23611 /* If our producer is LTO try to figure out a common language to use
23612 from the global list of translation units. */
23613 if (strcmp (language_string, "GNU GIMPLE") == 0)
23614 {
23615 unsigned i;
23616 tree t;
23617 const char *common_lang = NULL;
23618
23619 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
23620 {
23621 if (!TRANSLATION_UNIT_LANGUAGE (t))
23622 continue;
23623 if (!common_lang)
23624 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
23625 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
23626 ;
23627 else if (strncmp (common_lang, "GNU C", 5) == 0
23628 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
23629 /* Mixing C and C++ is ok, use C++ in that case. */
23630 common_lang = highest_c_language (common_lang,
23631 TRANSLATION_UNIT_LANGUAGE (t));
23632 else
23633 {
23634 /* Fall back to C. */
23635 common_lang = NULL;
23636 break;
23637 }
23638 }
23639
23640 if (common_lang)
23641 language_string = common_lang;
23642 }
23643
23644 language = DW_LANG_C;
23645 if (strncmp (language_string, "GNU C", 5) == 0
23646 && ISDIGIT (language_string[5]))
23647 {
23648 language = DW_LANG_C89;
23649 if (dwarf_version >= 3 || !dwarf_strict)
23650 {
23651 if (strcmp (language_string, "GNU C89") != 0)
23652 language = DW_LANG_C99;
23653
23654 if (dwarf_version >= 5 /* || !dwarf_strict */)
23655 if (strcmp (language_string, "GNU C11") == 0
23656 || strcmp (language_string, "GNU C17") == 0)
23657 language = DW_LANG_C11;
23658 }
23659 }
23660 else if (strncmp (language_string, "GNU C++", 7) == 0)
23661 {
23662 language = DW_LANG_C_plus_plus;
23663 if (dwarf_version >= 5 /* || !dwarf_strict */)
23664 {
23665 if (strcmp (language_string, "GNU C++11") == 0)
23666 language = DW_LANG_C_plus_plus_11;
23667 else if (strcmp (language_string, "GNU C++14") == 0)
23668 language = DW_LANG_C_plus_plus_14;
23669 else if (strcmp (language_string, "GNU C++17") == 0)
23670 /* For now. */
23671 language = DW_LANG_C_plus_plus_14;
23672 }
23673 }
23674 else if (strcmp (language_string, "GNU F77") == 0)
23675 language = DW_LANG_Fortran77;
23676 else if (dwarf_version >= 3 || !dwarf_strict)
23677 {
23678 if (strcmp (language_string, "GNU Ada") == 0)
23679 language = DW_LANG_Ada95;
23680 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
23681 {
23682 language = DW_LANG_Fortran95;
23683 if (dwarf_version >= 5 /* || !dwarf_strict */)
23684 {
23685 if (strcmp (language_string, "GNU Fortran2003") == 0)
23686 language = DW_LANG_Fortran03;
23687 else if (strcmp (language_string, "GNU Fortran2008") == 0)
23688 language = DW_LANG_Fortran08;
23689 }
23690 }
23691 else if (strcmp (language_string, "GNU Objective-C") == 0)
23692 language = DW_LANG_ObjC;
23693 else if (strcmp (language_string, "GNU Objective-C++") == 0)
23694 language = DW_LANG_ObjC_plus_plus;
23695 else if (dwarf_version >= 5 || !dwarf_strict)
23696 {
23697 if (strcmp (language_string, "GNU Go") == 0)
23698 language = DW_LANG_Go;
23699 }
23700 }
23701 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
23702 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
23703 language = DW_LANG_Fortran90;
23704
23705 add_AT_unsigned (die, DW_AT_language, language);
23706
23707 switch (language)
23708 {
23709 case DW_LANG_Fortran77:
23710 case DW_LANG_Fortran90:
23711 case DW_LANG_Fortran95:
23712 case DW_LANG_Fortran03:
23713 case DW_LANG_Fortran08:
23714 /* Fortran has case insensitive identifiers and the front-end
23715 lowercases everything. */
23716 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
23717 break;
23718 default:
23719 /* The default DW_ID_case_sensitive doesn't need to be specified. */
23720 break;
23721 }
23722 return die;
23723 }
23724
23725 /* Generate the DIE for a base class. */
23726
23727 static void
23728 gen_inheritance_die (tree binfo, tree access, tree type,
23729 dw_die_ref context_die)
23730 {
23731 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
23732 struct vlr_context ctx = { type, NULL };
23733
23734 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
23735 context_die);
23736 add_data_member_location_attribute (die, binfo, &ctx);
23737
23738 if (BINFO_VIRTUAL_P (binfo))
23739 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
23740
23741 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
23742 children, otherwise the default is DW_ACCESS_public. In DWARF2
23743 the default has always been DW_ACCESS_private. */
23744 if (access == access_public_node)
23745 {
23746 if (dwarf_version == 2
23747 || context_die->die_tag == DW_TAG_class_type)
23748 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
23749 }
23750 else if (access == access_protected_node)
23751 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
23752 else if (dwarf_version > 2
23753 && context_die->die_tag != DW_TAG_class_type)
23754 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
23755 }
23756
23757 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
23758 structure. */
23759 static bool
23760 is_variant_part (tree decl)
23761 {
23762 return (TREE_CODE (decl) == FIELD_DECL
23763 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
23764 }
23765
23766 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
23767 return the FIELD_DECL. Return NULL_TREE otherwise. */
23768
23769 static tree
23770 analyze_discr_in_predicate (tree operand, tree struct_type)
23771 {
23772 bool continue_stripping = true;
23773 while (continue_stripping)
23774 switch (TREE_CODE (operand))
23775 {
23776 CASE_CONVERT:
23777 operand = TREE_OPERAND (operand, 0);
23778 break;
23779 default:
23780 continue_stripping = false;
23781 break;
23782 }
23783
23784 /* Match field access to members of struct_type only. */
23785 if (TREE_CODE (operand) == COMPONENT_REF
23786 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
23787 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
23788 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
23789 return TREE_OPERAND (operand, 1);
23790 else
23791 return NULL_TREE;
23792 }
23793
23794 /* Check that SRC is a constant integer that can be represented as a native
23795 integer constant (either signed or unsigned). If so, store it into DEST and
23796 return true. Return false otherwise. */
23797
23798 static bool
23799 get_discr_value (tree src, dw_discr_value *dest)
23800 {
23801 tree discr_type = TREE_TYPE (src);
23802
23803 if (lang_hooks.types.get_debug_type)
23804 {
23805 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
23806 if (debug_type != NULL)
23807 discr_type = debug_type;
23808 }
23809
23810 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
23811 return false;
23812
23813 /* Signedness can vary between the original type and the debug type. This
23814 can happen for character types in Ada for instance: the character type
23815 used for code generation can be signed, to be compatible with the C one,
23816 but from a debugger point of view, it must be unsigned. */
23817 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
23818 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
23819
23820 if (is_orig_unsigned != is_debug_unsigned)
23821 src = fold_convert (discr_type, src);
23822
23823 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
23824 return false;
23825
23826 dest->pos = is_debug_unsigned;
23827 if (is_debug_unsigned)
23828 dest->v.uval = tree_to_uhwi (src);
23829 else
23830 dest->v.sval = tree_to_shwi (src);
23831
23832 return true;
23833 }
23834
23835 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
23836 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
23837 store NULL_TREE in DISCR_DECL. Otherwise:
23838
23839 - store the discriminant field in STRUCT_TYPE that controls the variant
23840 part to *DISCR_DECL
23841
23842 - put in *DISCR_LISTS_P an array where for each variant, the item
23843 represents the corresponding matching list of discriminant values.
23844
23845 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
23846 the above array.
23847
23848 Note that when the array is allocated (i.e. when the analysis is
23849 successful), it is up to the caller to free the array. */
23850
23851 static void
23852 analyze_variants_discr (tree variant_part_decl,
23853 tree struct_type,
23854 tree *discr_decl,
23855 dw_discr_list_ref **discr_lists_p,
23856 unsigned *discr_lists_length)
23857 {
23858 tree variant_part_type = TREE_TYPE (variant_part_decl);
23859 tree variant;
23860 dw_discr_list_ref *discr_lists;
23861 unsigned i;
23862
23863 /* Compute how many variants there are in this variant part. */
23864 *discr_lists_length = 0;
23865 for (variant = TYPE_FIELDS (variant_part_type);
23866 variant != NULL_TREE;
23867 variant = DECL_CHAIN (variant))
23868 ++*discr_lists_length;
23869
23870 *discr_decl = NULL_TREE;
23871 *discr_lists_p
23872 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
23873 sizeof (**discr_lists_p));
23874 discr_lists = *discr_lists_p;
23875
23876 /* And then analyze all variants to extract discriminant information for all
23877 of them. This analysis is conservative: as soon as we detect something we
23878 do not support, abort everything and pretend we found nothing. */
23879 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
23880 variant != NULL_TREE;
23881 variant = DECL_CHAIN (variant), ++i)
23882 {
23883 tree match_expr = DECL_QUALIFIER (variant);
23884
23885 /* Now, try to analyze the predicate and deduce a discriminant for
23886 it. */
23887 if (match_expr == boolean_true_node)
23888 /* Typically happens for the default variant: it matches all cases that
23889 previous variants rejected. Don't output any matching value for
23890 this one. */
23891 continue;
23892
23893 /* The following loop tries to iterate over each discriminant
23894 possibility: single values or ranges. */
23895 while (match_expr != NULL_TREE)
23896 {
23897 tree next_round_match_expr;
23898 tree candidate_discr = NULL_TREE;
23899 dw_discr_list_ref new_node = NULL;
23900
23901 /* Possibilities are matched one after the other by nested
23902 TRUTH_ORIF_EXPR expressions. Process the current possibility and
23903 continue with the rest at next iteration. */
23904 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
23905 {
23906 next_round_match_expr = TREE_OPERAND (match_expr, 0);
23907 match_expr = TREE_OPERAND (match_expr, 1);
23908 }
23909 else
23910 next_round_match_expr = NULL_TREE;
23911
23912 if (match_expr == boolean_false_node)
23913 /* This sub-expression matches nothing: just wait for the next
23914 one. */
23915 ;
23916
23917 else if (TREE_CODE (match_expr) == EQ_EXPR)
23918 {
23919 /* We are matching: <discr_field> == <integer_cst>
23920 This sub-expression matches a single value. */
23921 tree integer_cst = TREE_OPERAND (match_expr, 1);
23922
23923 candidate_discr
23924 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
23925 struct_type);
23926
23927 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
23928 if (!get_discr_value (integer_cst,
23929 &new_node->dw_discr_lower_bound))
23930 goto abort;
23931 new_node->dw_discr_range = false;
23932 }
23933
23934 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
23935 {
23936 /* We are matching:
23937 <discr_field> > <integer_cst>
23938 && <discr_field> < <integer_cst>.
23939 This sub-expression matches the range of values between the
23940 two matched integer constants. Note that comparisons can be
23941 inclusive or exclusive. */
23942 tree candidate_discr_1, candidate_discr_2;
23943 tree lower_cst, upper_cst;
23944 bool lower_cst_included, upper_cst_included;
23945 tree lower_op = TREE_OPERAND (match_expr, 0);
23946 tree upper_op = TREE_OPERAND (match_expr, 1);
23947
23948 /* When the comparison is exclusive, the integer constant is not
23949 the discriminant range bound we are looking for: we will have
23950 to increment or decrement it. */
23951 if (TREE_CODE (lower_op) == GE_EXPR)
23952 lower_cst_included = true;
23953 else if (TREE_CODE (lower_op) == GT_EXPR)
23954 lower_cst_included = false;
23955 else
23956 goto abort;
23957
23958 if (TREE_CODE (upper_op) == LE_EXPR)
23959 upper_cst_included = true;
23960 else if (TREE_CODE (upper_op) == LT_EXPR)
23961 upper_cst_included = false;
23962 else
23963 goto abort;
23964
23965 /* Extract the discriminant from the first operand and check it
23966 is consistant with the same analysis in the second
23967 operand. */
23968 candidate_discr_1
23969 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
23970 struct_type);
23971 candidate_discr_2
23972 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
23973 struct_type);
23974 if (candidate_discr_1 == candidate_discr_2)
23975 candidate_discr = candidate_discr_1;
23976 else
23977 goto abort;
23978
23979 /* Extract bounds from both. */
23980 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
23981 lower_cst = TREE_OPERAND (lower_op, 1);
23982 upper_cst = TREE_OPERAND (upper_op, 1);
23983
23984 if (!lower_cst_included)
23985 lower_cst
23986 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
23987 build_int_cst (TREE_TYPE (lower_cst), 1));
23988 if (!upper_cst_included)
23989 upper_cst
23990 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
23991 build_int_cst (TREE_TYPE (upper_cst), 1));
23992
23993 if (!get_discr_value (lower_cst,
23994 &new_node->dw_discr_lower_bound)
23995 || !get_discr_value (upper_cst,
23996 &new_node->dw_discr_upper_bound))
23997 goto abort;
23998
23999 new_node->dw_discr_range = true;
24000 }
24001
24002 else
24003 /* Unsupported sub-expression: we cannot determine the set of
24004 matching discriminant values. Abort everything. */
24005 goto abort;
24006
24007 /* If the discriminant info is not consistant with what we saw so
24008 far, consider the analysis failed and abort everything. */
24009 if (candidate_discr == NULL_TREE
24010 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24011 goto abort;
24012 else
24013 *discr_decl = candidate_discr;
24014
24015 if (new_node != NULL)
24016 {
24017 new_node->dw_discr_next = discr_lists[i];
24018 discr_lists[i] = new_node;
24019 }
24020 match_expr = next_round_match_expr;
24021 }
24022 }
24023
24024 /* If we reach this point, we could match everything we were interested
24025 in. */
24026 return;
24027
24028 abort:
24029 /* Clean all data structure and return no result. */
24030 free (*discr_lists_p);
24031 *discr_lists_p = NULL;
24032 *discr_decl = NULL_TREE;
24033 }
24034
24035 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24036 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24037 under CONTEXT_DIE.
24038
24039 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24040 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24041 this type, which are record types, represent the available variants and each
24042 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24043 values are inferred from these attributes.
24044
24045 In trees, the offsets for the fields inside these sub-records are relative
24046 to the variant part itself, whereas the corresponding DIEs should have
24047 offset attributes that are relative to the embedding record base address.
24048 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24049 must be an expression that computes the offset of the variant part to
24050 describe in DWARF. */
24051
24052 static void
24053 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24054 dw_die_ref context_die)
24055 {
24056 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24057 tree variant_part_offset = vlr_ctx->variant_part_offset;
24058 struct loc_descr_context ctx = {
24059 vlr_ctx->struct_type, /* context_type */
24060 NULL_TREE, /* base_decl */
24061 NULL, /* dpi */
24062 false, /* placeholder_arg */
24063 false /* placeholder_seen */
24064 };
24065
24066 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24067 NULL_TREE if there is no such field. */
24068 tree discr_decl = NULL_TREE;
24069 dw_discr_list_ref *discr_lists;
24070 unsigned discr_lists_length = 0;
24071 unsigned i;
24072
24073 dw_die_ref dwarf_proc_die = NULL;
24074 dw_die_ref variant_part_die
24075 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24076
24077 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24078
24079 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24080 &discr_decl, &discr_lists, &discr_lists_length);
24081
24082 if (discr_decl != NULL_TREE)
24083 {
24084 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24085
24086 if (discr_die)
24087 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24088 else
24089 /* We have no DIE for the discriminant, so just discard all
24090 discrimimant information in the output. */
24091 discr_decl = NULL_TREE;
24092 }
24093
24094 /* If the offset for this variant part is more complex than a constant,
24095 create a DWARF procedure for it so that we will not have to generate DWARF
24096 expressions for it for each member. */
24097 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24098 && (dwarf_version >= 3 || !dwarf_strict))
24099 {
24100 const tree dwarf_proc_fndecl
24101 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24102 build_function_type (TREE_TYPE (variant_part_offset),
24103 NULL_TREE));
24104 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24105 const dw_loc_descr_ref dwarf_proc_body
24106 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24107
24108 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24109 dwarf_proc_fndecl, context_die);
24110 if (dwarf_proc_die != NULL)
24111 variant_part_offset = dwarf_proc_call;
24112 }
24113
24114 /* Output DIEs for all variants. */
24115 i = 0;
24116 for (tree variant = TYPE_FIELDS (variant_part_type);
24117 variant != NULL_TREE;
24118 variant = DECL_CHAIN (variant), ++i)
24119 {
24120 tree variant_type = TREE_TYPE (variant);
24121 dw_die_ref variant_die;
24122
24123 /* All variants (i.e. members of a variant part) are supposed to be
24124 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24125 under these records. */
24126 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24127
24128 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24129 equate_decl_number_to_die (variant, variant_die);
24130
24131 /* Output discriminant values this variant matches, if any. */
24132 if (discr_decl == NULL || discr_lists[i] == NULL)
24133 /* In the case we have discriminant information at all, this is
24134 probably the default variant: as the standard says, don't
24135 output any discriminant value/list attribute. */
24136 ;
24137 else if (discr_lists[i]->dw_discr_next == NULL
24138 && !discr_lists[i]->dw_discr_range)
24139 /* If there is only one accepted value, don't bother outputting a
24140 list. */
24141 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24142 else
24143 add_discr_list (variant_die, discr_lists[i]);
24144
24145 for (tree member = TYPE_FIELDS (variant_type);
24146 member != NULL_TREE;
24147 member = DECL_CHAIN (member))
24148 {
24149 struct vlr_context vlr_sub_ctx = {
24150 vlr_ctx->struct_type, /* struct_type */
24151 NULL /* variant_part_offset */
24152 };
24153 if (is_variant_part (member))
24154 {
24155 /* All offsets for fields inside variant parts are relative to
24156 the top-level embedding RECORD_TYPE's base address. On the
24157 other hand, offsets in GCC's types are relative to the
24158 nested-most variant part. So we have to sum offsets each time
24159 we recurse. */
24160
24161 vlr_sub_ctx.variant_part_offset
24162 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24163 variant_part_offset, byte_position (member));
24164 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24165 }
24166 else
24167 {
24168 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24169 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24170 }
24171 }
24172 }
24173
24174 free (discr_lists);
24175 }
24176
24177 /* Generate a DIE for a class member. */
24178
24179 static void
24180 gen_member_die (tree type, dw_die_ref context_die)
24181 {
24182 tree member;
24183 tree binfo = TYPE_BINFO (type);
24184
24185 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24186
24187 /* If this is not an incomplete type, output descriptions of each of its
24188 members. Note that as we output the DIEs necessary to represent the
24189 members of this record or union type, we will also be trying to output
24190 DIEs to represent the *types* of those members. However the `type'
24191 function (above) will specifically avoid generating type DIEs for member
24192 types *within* the list of member DIEs for this (containing) type except
24193 for those types (of members) which are explicitly marked as also being
24194 members of this (containing) type themselves. The g++ front- end can
24195 force any given type to be treated as a member of some other (containing)
24196 type by setting the TYPE_CONTEXT of the given (member) type to point to
24197 the TREE node representing the appropriate (containing) type. */
24198
24199 /* First output info about the base classes. */
24200 if (binfo)
24201 {
24202 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24203 int i;
24204 tree base;
24205
24206 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24207 gen_inheritance_die (base,
24208 (accesses ? (*accesses)[i] : access_public_node),
24209 type,
24210 context_die);
24211 }
24212
24213 /* Now output info about the data members and type members. */
24214 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24215 {
24216 struct vlr_context vlr_ctx = { type, NULL_TREE };
24217 bool static_inline_p
24218 = (TREE_STATIC (member)
24219 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24220 != -1));
24221
24222 /* Ignore clones. */
24223 if (DECL_ABSTRACT_ORIGIN (member))
24224 continue;
24225
24226 /* If we thought we were generating minimal debug info for TYPE
24227 and then changed our minds, some of the member declarations
24228 may have already been defined. Don't define them again, but
24229 do put them in the right order. */
24230
24231 if (dw_die_ref child = lookup_decl_die (member))
24232 {
24233 /* Handle inline static data members, which only have in-class
24234 declarations. */
24235 dw_die_ref ref = NULL;
24236 if (child->die_tag == DW_TAG_variable
24237 && child->die_parent == comp_unit_die ())
24238 {
24239 ref = get_AT_ref (child, DW_AT_specification);
24240 /* For C++17 inline static data members followed by redundant
24241 out of class redeclaration, we might get here with
24242 child being the DIE created for the out of class
24243 redeclaration and with its DW_AT_specification being
24244 the DIE created for in-class definition. We want to
24245 reparent the latter, and don't want to create another
24246 DIE with DW_AT_specification in that case, because
24247 we already have one. */
24248 if (ref
24249 && static_inline_p
24250 && ref->die_tag == DW_TAG_variable
24251 && ref->die_parent == comp_unit_die ()
24252 && get_AT (ref, DW_AT_specification) == NULL)
24253 {
24254 child = ref;
24255 ref = NULL;
24256 static_inline_p = false;
24257 }
24258 }
24259
24260 if (child->die_tag == DW_TAG_variable
24261 && child->die_parent == comp_unit_die ()
24262 && ref == NULL)
24263 {
24264 reparent_child (child, context_die);
24265 if (dwarf_version < 5)
24266 child->die_tag = DW_TAG_member;
24267 }
24268 else
24269 splice_child_die (context_die, child);
24270 }
24271
24272 /* Do not generate standard DWARF for variant parts if we are generating
24273 the corresponding GNAT encodings: DIEs generated for both would
24274 conflict in our mappings. */
24275 else if (is_variant_part (member)
24276 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
24277 {
24278 vlr_ctx.variant_part_offset = byte_position (member);
24279 gen_variant_part (member, &vlr_ctx, context_die);
24280 }
24281 else
24282 {
24283 vlr_ctx.variant_part_offset = NULL_TREE;
24284 gen_decl_die (member, NULL, &vlr_ctx, context_die);
24285 }
24286
24287 /* For C++ inline static data members emit immediately a DW_TAG_variable
24288 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
24289 DW_AT_specification. */
24290 if (static_inline_p)
24291 {
24292 int old_extern = DECL_EXTERNAL (member);
24293 DECL_EXTERNAL (member) = 0;
24294 gen_decl_die (member, NULL, NULL, comp_unit_die ());
24295 DECL_EXTERNAL (member) = old_extern;
24296 }
24297 }
24298 }
24299
24300 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
24301 is set, we pretend that the type was never defined, so we only get the
24302 member DIEs needed by later specification DIEs. */
24303
24304 static void
24305 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
24306 enum debug_info_usage usage)
24307 {
24308 if (TREE_ASM_WRITTEN (type))
24309 {
24310 /* Fill in the bound of variable-length fields in late dwarf if
24311 still incomplete. */
24312 if (!early_dwarf && variably_modified_type_p (type, NULL))
24313 for (tree member = TYPE_FIELDS (type);
24314 member;
24315 member = DECL_CHAIN (member))
24316 fill_variable_array_bounds (TREE_TYPE (member));
24317 return;
24318 }
24319
24320 dw_die_ref type_die = lookup_type_die (type);
24321 dw_die_ref scope_die = 0;
24322 int nested = 0;
24323 int complete = (TYPE_SIZE (type)
24324 && (! TYPE_STUB_DECL (type)
24325 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
24326 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
24327 complete = complete && should_emit_struct_debug (type, usage);
24328
24329 if (type_die && ! complete)
24330 return;
24331
24332 if (TYPE_CONTEXT (type) != NULL_TREE
24333 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
24334 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
24335 nested = 1;
24336
24337 scope_die = scope_die_for (type, context_die);
24338
24339 /* Generate child dies for template paramaters. */
24340 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
24341 schedule_generic_params_dies_gen (type);
24342
24343 if (! type_die || (nested && is_cu_die (scope_die)))
24344 /* First occurrence of type or toplevel definition of nested class. */
24345 {
24346 dw_die_ref old_die = type_die;
24347
24348 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
24349 ? record_type_tag (type) : DW_TAG_union_type,
24350 scope_die, type);
24351 equate_type_number_to_die (type, type_die);
24352 if (old_die)
24353 add_AT_specification (type_die, old_die);
24354 else
24355 add_name_attribute (type_die, type_tag (type));
24356 }
24357 else
24358 remove_AT (type_die, DW_AT_declaration);
24359
24360 /* If this type has been completed, then give it a byte_size attribute and
24361 then give a list of members. */
24362 if (complete && !ns_decl)
24363 {
24364 /* Prevent infinite recursion in cases where the type of some member of
24365 this type is expressed in terms of this type itself. */
24366 TREE_ASM_WRITTEN (type) = 1;
24367 add_byte_size_attribute (type_die, type);
24368 add_alignment_attribute (type_die, type);
24369 if (TYPE_STUB_DECL (type) != NULL_TREE)
24370 {
24371 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
24372 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
24373 }
24374
24375 /* If the first reference to this type was as the return type of an
24376 inline function, then it may not have a parent. Fix this now. */
24377 if (type_die->die_parent == NULL)
24378 add_child_die (scope_die, type_die);
24379
24380 push_decl_scope (type);
24381 gen_member_die (type, type_die);
24382 pop_decl_scope ();
24383
24384 add_gnat_descriptive_type_attribute (type_die, type, context_die);
24385 if (TYPE_ARTIFICIAL (type))
24386 add_AT_flag (type_die, DW_AT_artificial, 1);
24387
24388 /* GNU extension: Record what type our vtable lives in. */
24389 if (TYPE_VFIELD (type))
24390 {
24391 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
24392
24393 gen_type_die (vtype, context_die);
24394 add_AT_die_ref (type_die, DW_AT_containing_type,
24395 lookup_type_die (vtype));
24396 }
24397 }
24398 else
24399 {
24400 add_AT_flag (type_die, DW_AT_declaration, 1);
24401
24402 /* We don't need to do this for function-local types. */
24403 if (TYPE_STUB_DECL (type)
24404 && ! decl_function_context (TYPE_STUB_DECL (type)))
24405 vec_safe_push (incomplete_types, type);
24406 }
24407
24408 if (get_AT (type_die, DW_AT_name))
24409 add_pubtype (type, type_die);
24410 }
24411
24412 /* Generate a DIE for a subroutine _type_. */
24413
24414 static void
24415 gen_subroutine_type_die (tree type, dw_die_ref context_die)
24416 {
24417 tree return_type = TREE_TYPE (type);
24418 dw_die_ref subr_die
24419 = new_die (DW_TAG_subroutine_type,
24420 scope_die_for (type, context_die), type);
24421
24422 equate_type_number_to_die (type, subr_die);
24423 add_prototyped_attribute (subr_die, type);
24424 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
24425 context_die);
24426 add_alignment_attribute (subr_die, type);
24427 gen_formal_types_die (type, subr_die);
24428
24429 if (get_AT (subr_die, DW_AT_name))
24430 add_pubtype (type, subr_die);
24431 if ((dwarf_version >= 5 || !dwarf_strict)
24432 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
24433 add_AT_flag (subr_die, DW_AT_reference, 1);
24434 if ((dwarf_version >= 5 || !dwarf_strict)
24435 && lang_hooks.types.type_dwarf_attribute (type,
24436 DW_AT_rvalue_reference) != -1)
24437 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
24438 }
24439
24440 /* Generate a DIE for a type definition. */
24441
24442 static void
24443 gen_typedef_die (tree decl, dw_die_ref context_die)
24444 {
24445 dw_die_ref type_die;
24446 tree type;
24447
24448 if (TREE_ASM_WRITTEN (decl))
24449 {
24450 if (DECL_ORIGINAL_TYPE (decl))
24451 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
24452 return;
24453 }
24454
24455 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
24456 checks in process_scope_var and modified_type_die), this should be called
24457 only for original types. */
24458 gcc_assert (decl_ultimate_origin (decl) == NULL
24459 || decl_ultimate_origin (decl) == decl);
24460
24461 TREE_ASM_WRITTEN (decl) = 1;
24462 type_die = new_die (DW_TAG_typedef, context_die, decl);
24463
24464 add_name_and_src_coords_attributes (type_die, decl);
24465 if (DECL_ORIGINAL_TYPE (decl))
24466 {
24467 type = DECL_ORIGINAL_TYPE (decl);
24468 if (type == error_mark_node)
24469 return;
24470
24471 gcc_assert (type != TREE_TYPE (decl));
24472 equate_type_number_to_die (TREE_TYPE (decl), type_die);
24473 }
24474 else
24475 {
24476 type = TREE_TYPE (decl);
24477 if (type == error_mark_node)
24478 return;
24479
24480 if (is_naming_typedef_decl (TYPE_NAME (type)))
24481 {
24482 /* Here, we are in the case of decl being a typedef naming
24483 an anonymous type, e.g:
24484 typedef struct {...} foo;
24485 In that case TREE_TYPE (decl) is not a typedef variant
24486 type and TYPE_NAME of the anonymous type is set to the
24487 TYPE_DECL of the typedef. This construct is emitted by
24488 the C++ FE.
24489
24490 TYPE is the anonymous struct named by the typedef
24491 DECL. As we need the DW_AT_type attribute of the
24492 DW_TAG_typedef to point to the DIE of TYPE, let's
24493 generate that DIE right away. add_type_attribute
24494 called below will then pick (via lookup_type_die) that
24495 anonymous struct DIE. */
24496 if (!TREE_ASM_WRITTEN (type))
24497 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
24498
24499 /* This is a GNU Extension. We are adding a
24500 DW_AT_linkage_name attribute to the DIE of the
24501 anonymous struct TYPE. The value of that attribute
24502 is the name of the typedef decl naming the anonymous
24503 struct. This greatly eases the work of consumers of
24504 this debug info. */
24505 add_linkage_name_raw (lookup_type_die (type), decl);
24506 }
24507 }
24508
24509 add_type_attribute (type_die, type, decl_quals (decl), false,
24510 context_die);
24511
24512 if (is_naming_typedef_decl (decl))
24513 /* We want that all subsequent calls to lookup_type_die with
24514 TYPE in argument yield the DW_TAG_typedef we have just
24515 created. */
24516 equate_type_number_to_die (type, type_die);
24517
24518 add_alignment_attribute (type_die, TREE_TYPE (decl));
24519
24520 add_accessibility_attribute (type_die, decl);
24521
24522 if (DECL_ABSTRACT_P (decl))
24523 equate_decl_number_to_die (decl, type_die);
24524
24525 if (get_AT (type_die, DW_AT_name))
24526 add_pubtype (decl, type_die);
24527 }
24528
24529 /* Generate a DIE for a struct, class, enum or union type. */
24530
24531 static void
24532 gen_tagged_type_die (tree type,
24533 dw_die_ref context_die,
24534 enum debug_info_usage usage)
24535 {
24536 int need_pop;
24537
24538 if (type == NULL_TREE
24539 || !is_tagged_type (type))
24540 return;
24541
24542 if (TREE_ASM_WRITTEN (type))
24543 need_pop = 0;
24544 /* If this is a nested type whose containing class hasn't been written
24545 out yet, writing it out will cover this one, too. This does not apply
24546 to instantiations of member class templates; they need to be added to
24547 the containing class as they are generated. FIXME: This hurts the
24548 idea of combining type decls from multiple TUs, since we can't predict
24549 what set of template instantiations we'll get. */
24550 else if (TYPE_CONTEXT (type)
24551 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
24552 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
24553 {
24554 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
24555
24556 if (TREE_ASM_WRITTEN (type))
24557 return;
24558
24559 /* If that failed, attach ourselves to the stub. */
24560 push_decl_scope (TYPE_CONTEXT (type));
24561 context_die = lookup_type_die (TYPE_CONTEXT (type));
24562 need_pop = 1;
24563 }
24564 else if (TYPE_CONTEXT (type) != NULL_TREE
24565 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
24566 {
24567 /* If this type is local to a function that hasn't been written
24568 out yet, use a NULL context for now; it will be fixed up in
24569 decls_for_scope. */
24570 context_die = lookup_decl_die (TYPE_CONTEXT (type));
24571 /* A declaration DIE doesn't count; nested types need to go in the
24572 specification. */
24573 if (context_die && is_declaration_die (context_die))
24574 context_die = NULL;
24575 need_pop = 0;
24576 }
24577 else
24578 {
24579 context_die = declare_in_namespace (type, context_die);
24580 need_pop = 0;
24581 }
24582
24583 if (TREE_CODE (type) == ENUMERAL_TYPE)
24584 {
24585 /* This might have been written out by the call to
24586 declare_in_namespace. */
24587 if (!TREE_ASM_WRITTEN (type))
24588 gen_enumeration_type_die (type, context_die);
24589 }
24590 else
24591 gen_struct_or_union_type_die (type, context_die, usage);
24592
24593 if (need_pop)
24594 pop_decl_scope ();
24595
24596 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
24597 it up if it is ever completed. gen_*_type_die will set it for us
24598 when appropriate. */
24599 }
24600
24601 /* Generate a type description DIE. */
24602
24603 static void
24604 gen_type_die_with_usage (tree type, dw_die_ref context_die,
24605 enum debug_info_usage usage)
24606 {
24607 struct array_descr_info info;
24608
24609 if (type == NULL_TREE || type == error_mark_node)
24610 return;
24611
24612 if (flag_checking && type)
24613 verify_type (type);
24614
24615 if (TYPE_NAME (type) != NULL_TREE
24616 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
24617 && is_redundant_typedef (TYPE_NAME (type))
24618 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
24619 /* The DECL of this type is a typedef we don't want to emit debug
24620 info for but we want debug info for its underlying typedef.
24621 This can happen for e.g, the injected-class-name of a C++
24622 type. */
24623 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
24624
24625 /* If TYPE is a typedef type variant, let's generate debug info
24626 for the parent typedef which TYPE is a type of. */
24627 if (typedef_variant_p (type))
24628 {
24629 if (TREE_ASM_WRITTEN (type))
24630 return;
24631
24632 tree name = TYPE_NAME (type);
24633 tree origin = decl_ultimate_origin (name);
24634 if (origin != NULL && origin != name)
24635 {
24636 gen_decl_die (origin, NULL, NULL, context_die);
24637 return;
24638 }
24639
24640 /* Prevent broken recursion; we can't hand off to the same type. */
24641 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
24642
24643 /* Give typedefs the right scope. */
24644 context_die = scope_die_for (type, context_die);
24645
24646 TREE_ASM_WRITTEN (type) = 1;
24647
24648 gen_decl_die (name, NULL, NULL, context_die);
24649 return;
24650 }
24651
24652 /* If type is an anonymous tagged type named by a typedef, let's
24653 generate debug info for the typedef. */
24654 if (is_naming_typedef_decl (TYPE_NAME (type)))
24655 {
24656 /* Use the DIE of the containing namespace as the parent DIE of
24657 the type description DIE we want to generate. */
24658 if (DECL_CONTEXT (TYPE_NAME (type))
24659 && TREE_CODE (DECL_CONTEXT (TYPE_NAME (type))) == NAMESPACE_DECL)
24660 context_die = get_context_die (DECL_CONTEXT (TYPE_NAME (type)));
24661
24662 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
24663 return;
24664 }
24665
24666 if (lang_hooks.types.get_debug_type)
24667 {
24668 tree debug_type = lang_hooks.types.get_debug_type (type);
24669
24670 if (debug_type != NULL_TREE && debug_type != type)
24671 {
24672 gen_type_die_with_usage (debug_type, context_die, usage);
24673 return;
24674 }
24675 }
24676
24677 /* We are going to output a DIE to represent the unqualified version
24678 of this type (i.e. without any const or volatile qualifiers) so
24679 get the main variant (i.e. the unqualified version) of this type
24680 now. (Vectors and arrays are special because the debugging info is in the
24681 cloned type itself. Similarly function/method types can contain extra
24682 ref-qualification). */
24683 if (TREE_CODE (type) == FUNCTION_TYPE
24684 || TREE_CODE (type) == METHOD_TYPE)
24685 {
24686 /* For function/method types, can't use type_main_variant here,
24687 because that can have different ref-qualifiers for C++,
24688 but try to canonicalize. */
24689 tree main = TYPE_MAIN_VARIANT (type);
24690 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
24691 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
24692 && check_base_type (t, main)
24693 && check_lang_type (t, type))
24694 {
24695 type = t;
24696 break;
24697 }
24698 }
24699 else if (TREE_CODE (type) != VECTOR_TYPE
24700 && TREE_CODE (type) != ARRAY_TYPE)
24701 type = type_main_variant (type);
24702
24703 /* If this is an array type with hidden descriptor, handle it first. */
24704 if (!TREE_ASM_WRITTEN (type)
24705 && lang_hooks.types.get_array_descr_info)
24706 {
24707 memset (&info, 0, sizeof (info));
24708 if (lang_hooks.types.get_array_descr_info (type, &info))
24709 {
24710 /* Fortran sometimes emits array types with no dimension. */
24711 gcc_assert (info.ndimensions >= 0
24712 && (info.ndimensions
24713 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
24714 gen_descr_array_type_die (type, &info, context_die);
24715 TREE_ASM_WRITTEN (type) = 1;
24716 return;
24717 }
24718 }
24719
24720 if (TREE_ASM_WRITTEN (type))
24721 {
24722 /* Variable-length types may be incomplete even if
24723 TREE_ASM_WRITTEN. For such types, fall through to
24724 gen_array_type_die() and possibly fill in
24725 DW_AT_{upper,lower}_bound attributes. */
24726 if ((TREE_CODE (type) != ARRAY_TYPE
24727 && TREE_CODE (type) != RECORD_TYPE
24728 && TREE_CODE (type) != UNION_TYPE
24729 && TREE_CODE (type) != QUAL_UNION_TYPE)
24730 || !variably_modified_type_p (type, NULL))
24731 return;
24732 }
24733
24734 switch (TREE_CODE (type))
24735 {
24736 case ERROR_MARK:
24737 break;
24738
24739 case POINTER_TYPE:
24740 case REFERENCE_TYPE:
24741 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
24742 ensures that the gen_type_die recursion will terminate even if the
24743 type is recursive. Recursive types are possible in Ada. */
24744 /* ??? We could perhaps do this for all types before the switch
24745 statement. */
24746 TREE_ASM_WRITTEN (type) = 1;
24747
24748 /* For these types, all that is required is that we output a DIE (or a
24749 set of DIEs) to represent the "basis" type. */
24750 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24751 DINFO_USAGE_IND_USE);
24752 break;
24753
24754 case OFFSET_TYPE:
24755 /* This code is used for C++ pointer-to-data-member types.
24756 Output a description of the relevant class type. */
24757 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
24758 DINFO_USAGE_IND_USE);
24759
24760 /* Output a description of the type of the object pointed to. */
24761 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24762 DINFO_USAGE_IND_USE);
24763
24764 /* Now output a DIE to represent this pointer-to-data-member type
24765 itself. */
24766 gen_ptr_to_mbr_type_die (type, context_die);
24767 break;
24768
24769 case FUNCTION_TYPE:
24770 /* Force out return type (in case it wasn't forced out already). */
24771 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24772 DINFO_USAGE_DIR_USE);
24773 gen_subroutine_type_die (type, context_die);
24774 break;
24775
24776 case METHOD_TYPE:
24777 /* Force out return type (in case it wasn't forced out already). */
24778 gen_type_die_with_usage (TREE_TYPE (type), context_die,
24779 DINFO_USAGE_DIR_USE);
24780 gen_subroutine_type_die (type, context_die);
24781 break;
24782
24783 case ARRAY_TYPE:
24784 case VECTOR_TYPE:
24785 gen_array_type_die (type, context_die);
24786 break;
24787
24788 case ENUMERAL_TYPE:
24789 case RECORD_TYPE:
24790 case UNION_TYPE:
24791 case QUAL_UNION_TYPE:
24792 gen_tagged_type_die (type, context_die, usage);
24793 return;
24794
24795 case VOID_TYPE:
24796 case INTEGER_TYPE:
24797 case REAL_TYPE:
24798 case FIXED_POINT_TYPE:
24799 case COMPLEX_TYPE:
24800 case BOOLEAN_TYPE:
24801 case POINTER_BOUNDS_TYPE:
24802 /* No DIEs needed for fundamental types. */
24803 break;
24804
24805 case NULLPTR_TYPE:
24806 case LANG_TYPE:
24807 /* Just use DW_TAG_unspecified_type. */
24808 {
24809 dw_die_ref type_die = lookup_type_die (type);
24810 if (type_die == NULL)
24811 {
24812 tree name = TYPE_IDENTIFIER (type);
24813 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
24814 type);
24815 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
24816 equate_type_number_to_die (type, type_die);
24817 }
24818 }
24819 break;
24820
24821 default:
24822 if (is_cxx_auto (type))
24823 {
24824 tree name = TYPE_IDENTIFIER (type);
24825 dw_die_ref *die = (name == get_identifier ("auto")
24826 ? &auto_die : &decltype_auto_die);
24827 if (!*die)
24828 {
24829 *die = new_die (DW_TAG_unspecified_type,
24830 comp_unit_die (), NULL_TREE);
24831 add_name_attribute (*die, IDENTIFIER_POINTER (name));
24832 }
24833 equate_type_number_to_die (type, *die);
24834 break;
24835 }
24836 gcc_unreachable ();
24837 }
24838
24839 TREE_ASM_WRITTEN (type) = 1;
24840 }
24841
24842 static void
24843 gen_type_die (tree type, dw_die_ref context_die)
24844 {
24845 if (type != error_mark_node)
24846 {
24847 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
24848 if (flag_checking)
24849 {
24850 dw_die_ref die = lookup_type_die (type);
24851 if (die)
24852 check_die (die);
24853 }
24854 }
24855 }
24856
24857 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
24858 things which are local to the given block. */
24859
24860 static void
24861 gen_block_die (tree stmt, dw_die_ref context_die)
24862 {
24863 int must_output_die = 0;
24864 bool inlined_func;
24865
24866 /* Ignore blocks that are NULL. */
24867 if (stmt == NULL_TREE)
24868 return;
24869
24870 inlined_func = inlined_function_outer_scope_p (stmt);
24871
24872 /* If the block is one fragment of a non-contiguous block, do not
24873 process the variables, since they will have been done by the
24874 origin block. Do process subblocks. */
24875 if (BLOCK_FRAGMENT_ORIGIN (stmt))
24876 {
24877 tree sub;
24878
24879 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
24880 gen_block_die (sub, context_die);
24881
24882 return;
24883 }
24884
24885 /* Determine if we need to output any Dwarf DIEs at all to represent this
24886 block. */
24887 if (inlined_func)
24888 /* The outer scopes for inlinings *must* always be represented. We
24889 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
24890 must_output_die = 1;
24891 else
24892 {
24893 /* Determine if this block directly contains any "significant"
24894 local declarations which we will need to output DIEs for. */
24895 if (debug_info_level > DINFO_LEVEL_TERSE)
24896 /* We are not in terse mode so *any* local declaration counts
24897 as being a "significant" one. */
24898 must_output_die = ((BLOCK_VARS (stmt) != NULL
24899 || BLOCK_NUM_NONLOCALIZED_VARS (stmt))
24900 && (TREE_USED (stmt)
24901 || TREE_ASM_WRITTEN (stmt)
24902 || BLOCK_ABSTRACT (stmt)));
24903 else if ((TREE_USED (stmt)
24904 || TREE_ASM_WRITTEN (stmt)
24905 || BLOCK_ABSTRACT (stmt))
24906 && !dwarf2out_ignore_block (stmt))
24907 must_output_die = 1;
24908 }
24909
24910 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
24911 DIE for any block which contains no significant local declarations at
24912 all. Rather, in such cases we just call `decls_for_scope' so that any
24913 needed Dwarf info for any sub-blocks will get properly generated. Note
24914 that in terse mode, our definition of what constitutes a "significant"
24915 local declaration gets restricted to include only inlined function
24916 instances and local (nested) function definitions. */
24917 if (must_output_die)
24918 {
24919 if (inlined_func)
24920 {
24921 /* If STMT block is abstract, that means we have been called
24922 indirectly from dwarf2out_abstract_function.
24923 That function rightfully marks the descendent blocks (of
24924 the abstract function it is dealing with) as being abstract,
24925 precisely to prevent us from emitting any
24926 DW_TAG_inlined_subroutine DIE as a descendent
24927 of an abstract function instance. So in that case, we should
24928 not call gen_inlined_subroutine_die.
24929
24930 Later though, when cgraph asks dwarf2out to emit info
24931 for the concrete instance of the function decl into which
24932 the concrete instance of STMT got inlined, the later will lead
24933 to the generation of a DW_TAG_inlined_subroutine DIE. */
24934 if (! BLOCK_ABSTRACT (stmt))
24935 gen_inlined_subroutine_die (stmt, context_die);
24936 }
24937 else
24938 gen_lexical_block_die (stmt, context_die);
24939 }
24940 else
24941 decls_for_scope (stmt, context_die);
24942 }
24943
24944 /* Process variable DECL (or variable with origin ORIGIN) within
24945 block STMT and add it to CONTEXT_DIE. */
24946 static void
24947 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
24948 {
24949 dw_die_ref die;
24950 tree decl_or_origin = decl ? decl : origin;
24951
24952 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
24953 die = lookup_decl_die (decl_or_origin);
24954 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
24955 {
24956 if (TYPE_DECL_IS_STUB (decl_or_origin))
24957 die = lookup_type_die (TREE_TYPE (decl_or_origin));
24958 else
24959 die = lookup_decl_die (decl_or_origin);
24960 /* Avoid re-creating the DIE late if it was optimized as unused early. */
24961 if (! die && ! early_dwarf)
24962 return;
24963 }
24964 else
24965 die = NULL;
24966
24967 /* Avoid creating DIEs for local typedefs and concrete static variables that
24968 will only be pruned later. */
24969 if ((origin || decl_ultimate_origin (decl))
24970 && (TREE_CODE (decl_or_origin) == TYPE_DECL
24971 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
24972 {
24973 origin = decl_ultimate_origin (decl_or_origin);
24974 if (decl && VAR_P (decl) && die != NULL)
24975 {
24976 die = lookup_decl_die (origin);
24977 if (die != NULL)
24978 equate_decl_number_to_die (decl, die);
24979 }
24980 return;
24981 }
24982
24983 if (die != NULL && die->die_parent == NULL)
24984 add_child_die (context_die, die);
24985 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
24986 {
24987 if (early_dwarf)
24988 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
24989 stmt, context_die);
24990 }
24991 else
24992 {
24993 if (decl && DECL_P (decl))
24994 {
24995 die = lookup_decl_die (decl);
24996
24997 /* Early created DIEs do not have a parent as the decls refer
24998 to the function as DECL_CONTEXT rather than the BLOCK. */
24999 if (die && die->die_parent == NULL)
25000 {
25001 gcc_assert (in_lto_p);
25002 add_child_die (context_die, die);
25003 }
25004 }
25005
25006 gen_decl_die (decl, origin, NULL, context_die);
25007 }
25008 }
25009
25010 /* Generate all of the decls declared within a given scope and (recursively)
25011 all of its sub-blocks. */
25012
25013 static void
25014 decls_for_scope (tree stmt, dw_die_ref context_die)
25015 {
25016 tree decl;
25017 unsigned int i;
25018 tree subblocks;
25019
25020 /* Ignore NULL blocks. */
25021 if (stmt == NULL_TREE)
25022 return;
25023
25024 /* Output the DIEs to represent all of the data objects and typedefs
25025 declared directly within this block but not within any nested
25026 sub-blocks. Also, nested function and tag DIEs have been
25027 generated with a parent of NULL; fix that up now. We don't
25028 have to do this if we're at -g1. */
25029 if (debug_info_level > DINFO_LEVEL_TERSE)
25030 {
25031 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25032 process_scope_var (stmt, decl, NULL_TREE, context_die);
25033 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25034 origin - avoid doing this twice as we have no good way to see
25035 if we've done it once already. */
25036 if (! early_dwarf)
25037 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25038 {
25039 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25040 if (decl == current_function_decl)
25041 /* Ignore declarations of the current function, while they
25042 are declarations, gen_subprogram_die would treat them
25043 as definitions again, because they are equal to
25044 current_function_decl and endlessly recurse. */;
25045 else if (TREE_CODE (decl) == FUNCTION_DECL)
25046 process_scope_var (stmt, decl, NULL_TREE, context_die);
25047 else
25048 process_scope_var (stmt, NULL_TREE, decl, context_die);
25049 }
25050 }
25051
25052 /* Even if we're at -g1, we need to process the subblocks in order to get
25053 inlined call information. */
25054
25055 /* Output the DIEs to represent all sub-blocks (and the items declared
25056 therein) of this block. */
25057 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25058 subblocks != NULL;
25059 subblocks = BLOCK_CHAIN (subblocks))
25060 gen_block_die (subblocks, context_die);
25061 }
25062
25063 /* Is this a typedef we can avoid emitting? */
25064
25065 bool
25066 is_redundant_typedef (const_tree decl)
25067 {
25068 if (TYPE_DECL_IS_STUB (decl))
25069 return true;
25070
25071 if (DECL_ARTIFICIAL (decl)
25072 && DECL_CONTEXT (decl)
25073 && is_tagged_type (DECL_CONTEXT (decl))
25074 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25075 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25076 /* Also ignore the artificial member typedef for the class name. */
25077 return true;
25078
25079 return false;
25080 }
25081
25082 /* Return TRUE if TYPE is a typedef that names a type for linkage
25083 purposes. This kind of typedefs is produced by the C++ FE for
25084 constructs like:
25085
25086 typedef struct {...} foo;
25087
25088 In that case, there is no typedef variant type produced for foo.
25089 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25090 struct type. */
25091
25092 static bool
25093 is_naming_typedef_decl (const_tree decl)
25094 {
25095 if (decl == NULL_TREE
25096 || TREE_CODE (decl) != TYPE_DECL
25097 || DECL_NAMELESS (decl)
25098 || !is_tagged_type (TREE_TYPE (decl))
25099 || DECL_IS_BUILTIN (decl)
25100 || is_redundant_typedef (decl)
25101 /* It looks like Ada produces TYPE_DECLs that are very similar
25102 to C++ naming typedefs but that have different
25103 semantics. Let's be specific to c++ for now. */
25104 || !is_cxx (decl))
25105 return FALSE;
25106
25107 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25108 && TYPE_NAME (TREE_TYPE (decl)) == decl
25109 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25110 != TYPE_NAME (TREE_TYPE (decl))));
25111 }
25112
25113 /* Looks up the DIE for a context. */
25114
25115 static inline dw_die_ref
25116 lookup_context_die (tree context)
25117 {
25118 if (context)
25119 {
25120 /* Find die that represents this context. */
25121 if (TYPE_P (context))
25122 {
25123 context = TYPE_MAIN_VARIANT (context);
25124 dw_die_ref ctx = lookup_type_die (context);
25125 if (!ctx)
25126 return NULL;
25127 return strip_naming_typedef (context, ctx);
25128 }
25129 else
25130 return lookup_decl_die (context);
25131 }
25132 return comp_unit_die ();
25133 }
25134
25135 /* Returns the DIE for a context. */
25136
25137 static inline dw_die_ref
25138 get_context_die (tree context)
25139 {
25140 if (context)
25141 {
25142 /* Find die that represents this context. */
25143 if (TYPE_P (context))
25144 {
25145 context = TYPE_MAIN_VARIANT (context);
25146 return strip_naming_typedef (context, force_type_die (context));
25147 }
25148 else
25149 return force_decl_die (context);
25150 }
25151 return comp_unit_die ();
25152 }
25153
25154 /* Returns the DIE for decl. A DIE will always be returned. */
25155
25156 static dw_die_ref
25157 force_decl_die (tree decl)
25158 {
25159 dw_die_ref decl_die;
25160 unsigned saved_external_flag;
25161 tree save_fn = NULL_TREE;
25162 decl_die = lookup_decl_die (decl);
25163 if (!decl_die)
25164 {
25165 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25166
25167 decl_die = lookup_decl_die (decl);
25168 if (decl_die)
25169 return decl_die;
25170
25171 switch (TREE_CODE (decl))
25172 {
25173 case FUNCTION_DECL:
25174 /* Clear current_function_decl, so that gen_subprogram_die thinks
25175 that this is a declaration. At this point, we just want to force
25176 declaration die. */
25177 save_fn = current_function_decl;
25178 current_function_decl = NULL_TREE;
25179 gen_subprogram_die (decl, context_die);
25180 current_function_decl = save_fn;
25181 break;
25182
25183 case VAR_DECL:
25184 /* Set external flag to force declaration die. Restore it after
25185 gen_decl_die() call. */
25186 saved_external_flag = DECL_EXTERNAL (decl);
25187 DECL_EXTERNAL (decl) = 1;
25188 gen_decl_die (decl, NULL, NULL, context_die);
25189 DECL_EXTERNAL (decl) = saved_external_flag;
25190 break;
25191
25192 case NAMESPACE_DECL:
25193 if (dwarf_version >= 3 || !dwarf_strict)
25194 dwarf2out_decl (decl);
25195 else
25196 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25197 decl_die = comp_unit_die ();
25198 break;
25199
25200 case TRANSLATION_UNIT_DECL:
25201 decl_die = comp_unit_die ();
25202 break;
25203
25204 default:
25205 gcc_unreachable ();
25206 }
25207
25208 /* We should be able to find the DIE now. */
25209 if (!decl_die)
25210 decl_die = lookup_decl_die (decl);
25211 gcc_assert (decl_die);
25212 }
25213
25214 return decl_die;
25215 }
25216
25217 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25218 always returned. */
25219
25220 static dw_die_ref
25221 force_type_die (tree type)
25222 {
25223 dw_die_ref type_die;
25224
25225 type_die = lookup_type_die (type);
25226 if (!type_die)
25227 {
25228 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25229
25230 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25231 false, context_die);
25232 gcc_assert (type_die);
25233 }
25234 return type_die;
25235 }
25236
25237 /* Force out any required namespaces to be able to output DECL,
25238 and return the new context_die for it, if it's changed. */
25239
25240 static dw_die_ref
25241 setup_namespace_context (tree thing, dw_die_ref context_die)
25242 {
25243 tree context = (DECL_P (thing)
25244 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
25245 if (context && TREE_CODE (context) == NAMESPACE_DECL)
25246 /* Force out the namespace. */
25247 context_die = force_decl_die (context);
25248
25249 return context_die;
25250 }
25251
25252 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
25253 type) within its namespace, if appropriate.
25254
25255 For compatibility with older debuggers, namespace DIEs only contain
25256 declarations; all definitions are emitted at CU scope, with
25257 DW_AT_specification pointing to the declaration (like with class
25258 members). */
25259
25260 static dw_die_ref
25261 declare_in_namespace (tree thing, dw_die_ref context_die)
25262 {
25263 dw_die_ref ns_context;
25264
25265 if (debug_info_level <= DINFO_LEVEL_TERSE)
25266 return context_die;
25267
25268 /* External declarations in the local scope only need to be emitted
25269 once, not once in the namespace and once in the scope.
25270
25271 This avoids declaring the `extern' below in the
25272 namespace DIE as well as in the innermost scope:
25273
25274 namespace S
25275 {
25276 int i=5;
25277 int foo()
25278 {
25279 int i=8;
25280 extern int i;
25281 return i;
25282 }
25283 }
25284 */
25285 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
25286 return context_die;
25287
25288 /* If this decl is from an inlined function, then don't try to emit it in its
25289 namespace, as we will get confused. It would have already been emitted
25290 when the abstract instance of the inline function was emitted anyways. */
25291 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
25292 return context_die;
25293
25294 ns_context = setup_namespace_context (thing, context_die);
25295
25296 if (ns_context != context_die)
25297 {
25298 if (is_fortran ())
25299 return ns_context;
25300 if (DECL_P (thing))
25301 gen_decl_die (thing, NULL, NULL, ns_context);
25302 else
25303 gen_type_die (thing, ns_context);
25304 }
25305 return context_die;
25306 }
25307
25308 /* Generate a DIE for a namespace or namespace alias. */
25309
25310 static void
25311 gen_namespace_die (tree decl, dw_die_ref context_die)
25312 {
25313 dw_die_ref namespace_die;
25314
25315 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
25316 they are an alias of. */
25317 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
25318 {
25319 /* Output a real namespace or module. */
25320 context_die = setup_namespace_context (decl, comp_unit_die ());
25321 namespace_die = new_die (is_fortran ()
25322 ? DW_TAG_module : DW_TAG_namespace,
25323 context_die, decl);
25324 /* For Fortran modules defined in different CU don't add src coords. */
25325 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
25326 {
25327 const char *name = dwarf2_name (decl, 0);
25328 if (name)
25329 add_name_attribute (namespace_die, name);
25330 }
25331 else
25332 add_name_and_src_coords_attributes (namespace_die, decl);
25333 if (DECL_EXTERNAL (decl))
25334 add_AT_flag (namespace_die, DW_AT_declaration, 1);
25335 equate_decl_number_to_die (decl, namespace_die);
25336 }
25337 else
25338 {
25339 /* Output a namespace alias. */
25340
25341 /* Force out the namespace we are an alias of, if necessary. */
25342 dw_die_ref origin_die
25343 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
25344
25345 if (DECL_FILE_SCOPE_P (decl)
25346 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
25347 context_die = setup_namespace_context (decl, comp_unit_die ());
25348 /* Now create the namespace alias DIE. */
25349 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
25350 add_name_and_src_coords_attributes (namespace_die, decl);
25351 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
25352 equate_decl_number_to_die (decl, namespace_die);
25353 }
25354 if ((dwarf_version >= 5 || !dwarf_strict)
25355 && lang_hooks.decls.decl_dwarf_attribute (decl,
25356 DW_AT_export_symbols) == 1)
25357 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
25358
25359 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
25360 if (want_pubnames ())
25361 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
25362 }
25363
25364 /* Generate Dwarf debug information for a decl described by DECL.
25365 The return value is currently only meaningful for PARM_DECLs,
25366 for all other decls it returns NULL.
25367
25368 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
25369 It can be NULL otherwise. */
25370
25371 static dw_die_ref
25372 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
25373 dw_die_ref context_die)
25374 {
25375 tree decl_or_origin = decl ? decl : origin;
25376 tree class_origin = NULL, ultimate_origin;
25377
25378 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
25379 return NULL;
25380
25381 /* Ignore pointer bounds decls. */
25382 if (DECL_P (decl_or_origin)
25383 && TREE_TYPE (decl_or_origin)
25384 && POINTER_BOUNDS_P (decl_or_origin))
25385 return NULL;
25386
25387 switch (TREE_CODE (decl_or_origin))
25388 {
25389 case ERROR_MARK:
25390 break;
25391
25392 case CONST_DECL:
25393 if (!is_fortran () && !is_ada ())
25394 {
25395 /* The individual enumerators of an enum type get output when we output
25396 the Dwarf representation of the relevant enum type itself. */
25397 break;
25398 }
25399
25400 /* Emit its type. */
25401 gen_type_die (TREE_TYPE (decl), context_die);
25402
25403 /* And its containing namespace. */
25404 context_die = declare_in_namespace (decl, context_die);
25405
25406 gen_const_die (decl, context_die);
25407 break;
25408
25409 case FUNCTION_DECL:
25410 #if 0
25411 /* FIXME */
25412 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
25413 on local redeclarations of global functions. That seems broken. */
25414 if (current_function_decl != decl)
25415 /* This is only a declaration. */;
25416 #endif
25417
25418 /* We should have abstract copies already and should not generate
25419 stray type DIEs in late LTO dumping. */
25420 if (! early_dwarf)
25421 ;
25422
25423 /* If we're emitting a clone, emit info for the abstract instance. */
25424 else if (origin || DECL_ORIGIN (decl) != decl)
25425 dwarf2out_abstract_function (origin
25426 ? DECL_ORIGIN (origin)
25427 : DECL_ABSTRACT_ORIGIN (decl));
25428
25429 /* If we're emitting a possibly inlined function emit it as
25430 abstract instance. */
25431 else if (cgraph_function_possibly_inlined_p (decl)
25432 && ! DECL_ABSTRACT_P (decl)
25433 && ! class_or_namespace_scope_p (context_die)
25434 /* dwarf2out_abstract_function won't emit a die if this is just
25435 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
25436 that case, because that works only if we have a die. */
25437 && DECL_INITIAL (decl) != NULL_TREE)
25438 dwarf2out_abstract_function (decl);
25439
25440 /* Otherwise we're emitting the primary DIE for this decl. */
25441 else if (debug_info_level > DINFO_LEVEL_TERSE)
25442 {
25443 /* Before we describe the FUNCTION_DECL itself, make sure that we
25444 have its containing type. */
25445 if (!origin)
25446 origin = decl_class_context (decl);
25447 if (origin != NULL_TREE)
25448 gen_type_die (origin, context_die);
25449
25450 /* And its return type. */
25451 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
25452
25453 /* And its virtual context. */
25454 if (DECL_VINDEX (decl) != NULL_TREE)
25455 gen_type_die (DECL_CONTEXT (decl), context_die);
25456
25457 /* Make sure we have a member DIE for decl. */
25458 if (origin != NULL_TREE)
25459 gen_type_die_for_member (origin, decl, context_die);
25460
25461 /* And its containing namespace. */
25462 context_die = declare_in_namespace (decl, context_die);
25463 }
25464
25465 /* Now output a DIE to represent the function itself. */
25466 if (decl)
25467 gen_subprogram_die (decl, context_die);
25468 break;
25469
25470 case TYPE_DECL:
25471 /* If we are in terse mode, don't generate any DIEs to represent any
25472 actual typedefs. */
25473 if (debug_info_level <= DINFO_LEVEL_TERSE)
25474 break;
25475
25476 /* In the special case of a TYPE_DECL node representing the declaration
25477 of some type tag, if the given TYPE_DECL is marked as having been
25478 instantiated from some other (original) TYPE_DECL node (e.g. one which
25479 was generated within the original definition of an inline function) we
25480 used to generate a special (abbreviated) DW_TAG_structure_type,
25481 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
25482 should be actually referencing those DIEs, as variable DIEs with that
25483 type would be emitted already in the abstract origin, so it was always
25484 removed during unused type prunning. Don't add anything in this
25485 case. */
25486 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
25487 break;
25488
25489 if (is_redundant_typedef (decl))
25490 gen_type_die (TREE_TYPE (decl), context_die);
25491 else
25492 /* Output a DIE to represent the typedef itself. */
25493 gen_typedef_die (decl, context_die);
25494 break;
25495
25496 case LABEL_DECL:
25497 if (debug_info_level >= DINFO_LEVEL_NORMAL)
25498 gen_label_die (decl, context_die);
25499 break;
25500
25501 case VAR_DECL:
25502 case RESULT_DECL:
25503 /* If we are in terse mode, don't generate any DIEs to represent any
25504 variable declarations or definitions. */
25505 if (debug_info_level <= DINFO_LEVEL_TERSE)
25506 break;
25507
25508 /* Avoid generating stray type DIEs during late dwarf dumping.
25509 All types have been dumped early. */
25510 if (early_dwarf
25511 /* ??? But in LTRANS we cannot annotate early created variably
25512 modified type DIEs without copying them and adjusting all
25513 references to them. Dump them again as happens for inlining
25514 which copies both the decl and the types. */
25515 /* ??? And even non-LTO needs to re-visit type DIEs to fill
25516 in VLA bound information for example. */
25517 || (decl && variably_modified_type_p (TREE_TYPE (decl),
25518 current_function_decl)))
25519 {
25520 /* Output any DIEs that are needed to specify the type of this data
25521 object. */
25522 if (decl_by_reference_p (decl_or_origin))
25523 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
25524 else
25525 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
25526 }
25527
25528 if (early_dwarf)
25529 {
25530 /* And its containing type. */
25531 class_origin = decl_class_context (decl_or_origin);
25532 if (class_origin != NULL_TREE)
25533 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
25534
25535 /* And its containing namespace. */
25536 context_die = declare_in_namespace (decl_or_origin, context_die);
25537 }
25538
25539 /* Now output the DIE to represent the data object itself. This gets
25540 complicated because of the possibility that the VAR_DECL really
25541 represents an inlined instance of a formal parameter for an inline
25542 function. */
25543 ultimate_origin = decl_ultimate_origin (decl_or_origin);
25544 if (ultimate_origin != NULL_TREE
25545 && TREE_CODE (ultimate_origin) == PARM_DECL)
25546 gen_formal_parameter_die (decl, origin,
25547 true /* Emit name attribute. */,
25548 context_die);
25549 else
25550 gen_variable_die (decl, origin, context_die);
25551 break;
25552
25553 case FIELD_DECL:
25554 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
25555 /* Ignore the nameless fields that are used to skip bits but handle C++
25556 anonymous unions and structs. */
25557 if (DECL_NAME (decl) != NULL_TREE
25558 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
25559 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
25560 {
25561 gen_type_die (member_declared_type (decl), context_die);
25562 gen_field_die (decl, ctx, context_die);
25563 }
25564 break;
25565
25566 case PARM_DECL:
25567 /* Avoid generating stray type DIEs during late dwarf dumping.
25568 All types have been dumped early. */
25569 if (early_dwarf
25570 /* ??? But in LTRANS we cannot annotate early created variably
25571 modified type DIEs without copying them and adjusting all
25572 references to them. Dump them again as happens for inlining
25573 which copies both the decl and the types. */
25574 /* ??? And even non-LTO needs to re-visit type DIEs to fill
25575 in VLA bound information for example. */
25576 || (decl && variably_modified_type_p (TREE_TYPE (decl),
25577 current_function_decl)))
25578 {
25579 if (DECL_BY_REFERENCE (decl_or_origin))
25580 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
25581 else
25582 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
25583 }
25584 return gen_formal_parameter_die (decl, origin,
25585 true /* Emit name attribute. */,
25586 context_die);
25587
25588 case NAMESPACE_DECL:
25589 if (dwarf_version >= 3 || !dwarf_strict)
25590 gen_namespace_die (decl, context_die);
25591 break;
25592
25593 case IMPORTED_DECL:
25594 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
25595 DECL_CONTEXT (decl), context_die);
25596 break;
25597
25598 case NAMELIST_DECL:
25599 gen_namelist_decl (DECL_NAME (decl), context_die,
25600 NAMELIST_DECL_ASSOCIATED_DECL (decl));
25601 break;
25602
25603 default:
25604 /* Probably some frontend-internal decl. Assume we don't care. */
25605 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
25606 break;
25607 }
25608
25609 return NULL;
25610 }
25611 \f
25612 /* Output initial debug information for global DECL. Called at the
25613 end of the parsing process.
25614
25615 This is the initial debug generation process. As such, the DIEs
25616 generated may be incomplete. A later debug generation pass
25617 (dwarf2out_late_global_decl) will augment the information generated
25618 in this pass (e.g., with complete location info). */
25619
25620 static void
25621 dwarf2out_early_global_decl (tree decl)
25622 {
25623 set_early_dwarf s;
25624
25625 /* gen_decl_die() will set DECL_ABSTRACT because
25626 cgraph_function_possibly_inlined_p() returns true. This is in
25627 turn will cause DW_AT_inline attributes to be set.
25628
25629 This happens because at early dwarf generation, there is no
25630 cgraph information, causing cgraph_function_possibly_inlined_p()
25631 to return true. Trick cgraph_function_possibly_inlined_p()
25632 while we generate dwarf early. */
25633 bool save = symtab->global_info_ready;
25634 symtab->global_info_ready = true;
25635
25636 /* We don't handle TYPE_DECLs. If required, they'll be reached via
25637 other DECLs and they can point to template types or other things
25638 that dwarf2out can't handle when done via dwarf2out_decl. */
25639 if (TREE_CODE (decl) != TYPE_DECL
25640 && TREE_CODE (decl) != PARM_DECL)
25641 {
25642 if (TREE_CODE (decl) == FUNCTION_DECL)
25643 {
25644 tree save_fndecl = current_function_decl;
25645
25646 /* For nested functions, make sure we have DIEs for the parents first
25647 so that all nested DIEs are generated at the proper scope in the
25648 first shot. */
25649 tree context = decl_function_context (decl);
25650 if (context != NULL)
25651 {
25652 dw_die_ref context_die = lookup_decl_die (context);
25653 current_function_decl = context;
25654
25655 /* Avoid emitting DIEs multiple times, but still process CONTEXT
25656 enough so that it lands in its own context. This avoids type
25657 pruning issues later on. */
25658 if (context_die == NULL || is_declaration_die (context_die))
25659 dwarf2out_decl (context);
25660 }
25661
25662 /* Emit an abstract origin of a function first. This happens
25663 with C++ constructor clones for example and makes
25664 dwarf2out_abstract_function happy which requires the early
25665 DIE of the abstract instance to be present. */
25666 tree origin = DECL_ABSTRACT_ORIGIN (decl);
25667 dw_die_ref origin_die;
25668 if (origin != NULL
25669 /* Do not emit the DIE multiple times but make sure to
25670 process it fully here in case we just saw a declaration. */
25671 && ((origin_die = lookup_decl_die (origin)) == NULL
25672 || is_declaration_die (origin_die)))
25673 {
25674 current_function_decl = origin;
25675 dwarf2out_decl (origin);
25676 }
25677
25678 /* Emit the DIE for decl but avoid doing that multiple times. */
25679 dw_die_ref old_die;
25680 if ((old_die = lookup_decl_die (decl)) == NULL
25681 || is_declaration_die (old_die))
25682 {
25683 current_function_decl = decl;
25684 dwarf2out_decl (decl);
25685 }
25686
25687 current_function_decl = save_fndecl;
25688 }
25689 else
25690 dwarf2out_decl (decl);
25691 }
25692 symtab->global_info_ready = save;
25693 }
25694
25695 /* Output debug information for global decl DECL. Called from
25696 toplev.c after compilation proper has finished. */
25697
25698 static void
25699 dwarf2out_late_global_decl (tree decl)
25700 {
25701 /* Fill-in any location information we were unable to determine
25702 on the first pass. */
25703 if (VAR_P (decl) && !POINTER_BOUNDS_P (decl))
25704 {
25705 dw_die_ref die = lookup_decl_die (decl);
25706
25707 /* We may have to generate early debug late for LTO in case debug
25708 was not enabled at compile-time or the target doesn't support
25709 the LTO early debug scheme. */
25710 if (! die && in_lto_p)
25711 {
25712 dwarf2out_decl (decl);
25713 die = lookup_decl_die (decl);
25714 }
25715
25716 if (die)
25717 {
25718 /* We get called via the symtab code invoking late_global_decl
25719 for symbols that are optimized out. Do not add locations
25720 for those, except if they have a DECL_VALUE_EXPR, in which case
25721 they are relevant for debuggers. */
25722 varpool_node *node = varpool_node::get (decl);
25723 if ((! node || ! node->definition) && ! DECL_HAS_VALUE_EXPR_P (decl))
25724 tree_add_const_value_attribute_for_decl (die, decl);
25725 else
25726 add_location_or_const_value_attribute (die, decl, false);
25727 }
25728 }
25729 }
25730
25731 /* Output debug information for type decl DECL. Called from toplev.c
25732 and from language front ends (to record built-in types). */
25733 static void
25734 dwarf2out_type_decl (tree decl, int local)
25735 {
25736 if (!local)
25737 {
25738 set_early_dwarf s;
25739 dwarf2out_decl (decl);
25740 }
25741 }
25742
25743 /* Output debug information for imported module or decl DECL.
25744 NAME is non-NULL name in the lexical block if the decl has been renamed.
25745 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
25746 that DECL belongs to.
25747 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
25748 static void
25749 dwarf2out_imported_module_or_decl_1 (tree decl,
25750 tree name,
25751 tree lexical_block,
25752 dw_die_ref lexical_block_die)
25753 {
25754 expanded_location xloc;
25755 dw_die_ref imported_die = NULL;
25756 dw_die_ref at_import_die;
25757
25758 if (TREE_CODE (decl) == IMPORTED_DECL)
25759 {
25760 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
25761 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
25762 gcc_assert (decl);
25763 }
25764 else
25765 xloc = expand_location (input_location);
25766
25767 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
25768 {
25769 at_import_die = force_type_die (TREE_TYPE (decl));
25770 /* For namespace N { typedef void T; } using N::T; base_type_die
25771 returns NULL, but DW_TAG_imported_declaration requires
25772 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
25773 if (!at_import_die)
25774 {
25775 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
25776 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
25777 at_import_die = lookup_type_die (TREE_TYPE (decl));
25778 gcc_assert (at_import_die);
25779 }
25780 }
25781 else
25782 {
25783 at_import_die = lookup_decl_die (decl);
25784 if (!at_import_die)
25785 {
25786 /* If we're trying to avoid duplicate debug info, we may not have
25787 emitted the member decl for this field. Emit it now. */
25788 if (TREE_CODE (decl) == FIELD_DECL)
25789 {
25790 tree type = DECL_CONTEXT (decl);
25791
25792 if (TYPE_CONTEXT (type)
25793 && TYPE_P (TYPE_CONTEXT (type))
25794 && !should_emit_struct_debug (TYPE_CONTEXT (type),
25795 DINFO_USAGE_DIR_USE))
25796 return;
25797 gen_type_die_for_member (type, decl,
25798 get_context_die (TYPE_CONTEXT (type)));
25799 }
25800 if (TREE_CODE (decl) == NAMELIST_DECL)
25801 at_import_die = gen_namelist_decl (DECL_NAME (decl),
25802 get_context_die (DECL_CONTEXT (decl)),
25803 NULL_TREE);
25804 else
25805 at_import_die = force_decl_die (decl);
25806 }
25807 }
25808
25809 if (TREE_CODE (decl) == NAMESPACE_DECL)
25810 {
25811 if (dwarf_version >= 3 || !dwarf_strict)
25812 imported_die = new_die (DW_TAG_imported_module,
25813 lexical_block_die,
25814 lexical_block);
25815 else
25816 return;
25817 }
25818 else
25819 imported_die = new_die (DW_TAG_imported_declaration,
25820 lexical_block_die,
25821 lexical_block);
25822
25823 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
25824 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
25825 if (debug_column_info && xloc.column)
25826 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
25827 if (name)
25828 add_AT_string (imported_die, DW_AT_name,
25829 IDENTIFIER_POINTER (name));
25830 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
25831 }
25832
25833 /* Output debug information for imported module or decl DECL.
25834 NAME is non-NULL name in context if the decl has been renamed.
25835 CHILD is true if decl is one of the renamed decls as part of
25836 importing whole module.
25837 IMPLICIT is set if this hook is called for an implicit import
25838 such as inline namespace. */
25839
25840 static void
25841 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
25842 bool child, bool implicit)
25843 {
25844 /* dw_die_ref at_import_die; */
25845 dw_die_ref scope_die;
25846
25847 if (debug_info_level <= DINFO_LEVEL_TERSE)
25848 return;
25849
25850 gcc_assert (decl);
25851
25852 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
25853 should be enough, for DWARF4 and older even if we emit as extension
25854 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
25855 for the benefit of consumers unaware of DW_AT_export_symbols. */
25856 if (implicit
25857 && dwarf_version >= 5
25858 && lang_hooks.decls.decl_dwarf_attribute (decl,
25859 DW_AT_export_symbols) == 1)
25860 return;
25861
25862 set_early_dwarf s;
25863
25864 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
25865 We need decl DIE for reference and scope die. First, get DIE for the decl
25866 itself. */
25867
25868 /* Get the scope die for decl context. Use comp_unit_die for global module
25869 or decl. If die is not found for non globals, force new die. */
25870 if (context
25871 && TYPE_P (context)
25872 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
25873 return;
25874
25875 scope_die = get_context_die (context);
25876
25877 if (child)
25878 {
25879 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
25880 there is nothing we can do, here. */
25881 if (dwarf_version < 3 && dwarf_strict)
25882 return;
25883
25884 gcc_assert (scope_die->die_child);
25885 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
25886 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
25887 scope_die = scope_die->die_child;
25888 }
25889
25890 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
25891 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
25892 }
25893
25894 /* Output debug information for namelists. */
25895
25896 static dw_die_ref
25897 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
25898 {
25899 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
25900 tree value;
25901 unsigned i;
25902
25903 if (debug_info_level <= DINFO_LEVEL_TERSE)
25904 return NULL;
25905
25906 gcc_assert (scope_die != NULL);
25907 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
25908 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
25909
25910 /* If there are no item_decls, we have a nondefining namelist, e.g.
25911 with USE association; hence, set DW_AT_declaration. */
25912 if (item_decls == NULL_TREE)
25913 {
25914 add_AT_flag (nml_die, DW_AT_declaration, 1);
25915 return nml_die;
25916 }
25917
25918 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
25919 {
25920 nml_item_ref_die = lookup_decl_die (value);
25921 if (!nml_item_ref_die)
25922 nml_item_ref_die = force_decl_die (value);
25923
25924 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
25925 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
25926 }
25927 return nml_die;
25928 }
25929
25930
25931 /* Write the debugging output for DECL and return the DIE. */
25932
25933 static void
25934 dwarf2out_decl (tree decl)
25935 {
25936 dw_die_ref context_die = comp_unit_die ();
25937
25938 switch (TREE_CODE (decl))
25939 {
25940 case ERROR_MARK:
25941 return;
25942
25943 case FUNCTION_DECL:
25944 /* If we're a nested function, initially use a parent of NULL; if we're
25945 a plain function, this will be fixed up in decls_for_scope. If
25946 we're a method, it will be ignored, since we already have a DIE. */
25947 if (decl_function_context (decl)
25948 /* But if we're in terse mode, we don't care about scope. */
25949 && debug_info_level > DINFO_LEVEL_TERSE)
25950 context_die = NULL;
25951 break;
25952
25953 case VAR_DECL:
25954 /* For local statics lookup proper context die. */
25955 if (local_function_static (decl))
25956 context_die = lookup_decl_die (DECL_CONTEXT (decl));
25957
25958 /* If we are in terse mode, don't generate any DIEs to represent any
25959 variable declarations or definitions. */
25960 if (debug_info_level <= DINFO_LEVEL_TERSE)
25961 return;
25962 break;
25963
25964 case CONST_DECL:
25965 if (debug_info_level <= DINFO_LEVEL_TERSE)
25966 return;
25967 if (!is_fortran () && !is_ada ())
25968 return;
25969 if (TREE_STATIC (decl) && decl_function_context (decl))
25970 context_die = lookup_decl_die (DECL_CONTEXT (decl));
25971 break;
25972
25973 case NAMESPACE_DECL:
25974 case IMPORTED_DECL:
25975 if (debug_info_level <= DINFO_LEVEL_TERSE)
25976 return;
25977 if (lookup_decl_die (decl) != NULL)
25978 return;
25979 break;
25980
25981 case TYPE_DECL:
25982 /* Don't emit stubs for types unless they are needed by other DIEs. */
25983 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
25984 return;
25985
25986 /* Don't bother trying to generate any DIEs to represent any of the
25987 normal built-in types for the language we are compiling. */
25988 if (DECL_IS_BUILTIN (decl))
25989 return;
25990
25991 /* If we are in terse mode, don't generate any DIEs for types. */
25992 if (debug_info_level <= DINFO_LEVEL_TERSE)
25993 return;
25994
25995 /* If we're a function-scope tag, initially use a parent of NULL;
25996 this will be fixed up in decls_for_scope. */
25997 if (decl_function_context (decl))
25998 context_die = NULL;
25999
26000 break;
26001
26002 case NAMELIST_DECL:
26003 break;
26004
26005 default:
26006 return;
26007 }
26008
26009 gen_decl_die (decl, NULL, NULL, context_die);
26010
26011 if (flag_checking)
26012 {
26013 dw_die_ref die = lookup_decl_die (decl);
26014 if (die)
26015 check_die (die);
26016 }
26017 }
26018
26019 /* Write the debugging output for DECL. */
26020
26021 static void
26022 dwarf2out_function_decl (tree decl)
26023 {
26024 dwarf2out_decl (decl);
26025 call_arg_locations = NULL;
26026 call_arg_loc_last = NULL;
26027 call_site_count = -1;
26028 tail_call_site_count = -1;
26029 decl_loc_table->empty ();
26030 cached_dw_loc_list_table->empty ();
26031 }
26032
26033 /* Output a marker (i.e. a label) for the beginning of the generated code for
26034 a lexical block. */
26035
26036 static void
26037 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26038 unsigned int blocknum)
26039 {
26040 switch_to_section (current_function_section ());
26041 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26042 }
26043
26044 /* Output a marker (i.e. a label) for the end of the generated code for a
26045 lexical block. */
26046
26047 static void
26048 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26049 {
26050 switch_to_section (current_function_section ());
26051 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26052 }
26053
26054 /* Returns nonzero if it is appropriate not to emit any debugging
26055 information for BLOCK, because it doesn't contain any instructions.
26056
26057 Don't allow this for blocks with nested functions or local classes
26058 as we would end up with orphans, and in the presence of scheduling
26059 we may end up calling them anyway. */
26060
26061 static bool
26062 dwarf2out_ignore_block (const_tree block)
26063 {
26064 tree decl;
26065 unsigned int i;
26066
26067 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26068 if (TREE_CODE (decl) == FUNCTION_DECL
26069 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26070 return 0;
26071 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26072 {
26073 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26074 if (TREE_CODE (decl) == FUNCTION_DECL
26075 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26076 return 0;
26077 }
26078
26079 return 1;
26080 }
26081
26082 /* Hash table routines for file_hash. */
26083
26084 bool
26085 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26086 {
26087 return filename_cmp (p1->filename, p2) == 0;
26088 }
26089
26090 hashval_t
26091 dwarf_file_hasher::hash (dwarf_file_data *p)
26092 {
26093 return htab_hash_string (p->filename);
26094 }
26095
26096 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26097 dwarf2out.c) and return its "index". The index of each (known) filename is
26098 just a unique number which is associated with only that one filename. We
26099 need such numbers for the sake of generating labels (in the .debug_sfnames
26100 section) and references to those files numbers (in the .debug_srcinfo
26101 and .debug_macinfo sections). If the filename given as an argument is not
26102 found in our current list, add it to the list and assign it the next
26103 available unique index number. */
26104
26105 static struct dwarf_file_data *
26106 lookup_filename (const char *file_name)
26107 {
26108 struct dwarf_file_data * created;
26109
26110 if (!file_name)
26111 return NULL;
26112
26113 dwarf_file_data **slot
26114 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26115 INSERT);
26116 if (*slot)
26117 return *slot;
26118
26119 created = ggc_alloc<dwarf_file_data> ();
26120 created->filename = file_name;
26121 created->emitted_number = 0;
26122 *slot = created;
26123 return created;
26124 }
26125
26126 /* If the assembler will construct the file table, then translate the compiler
26127 internal file table number into the assembler file table number, and emit
26128 a .file directive if we haven't already emitted one yet. The file table
26129 numbers are different because we prune debug info for unused variables and
26130 types, which may include filenames. */
26131
26132 static int
26133 maybe_emit_file (struct dwarf_file_data * fd)
26134 {
26135 if (! fd->emitted_number)
26136 {
26137 if (last_emitted_file)
26138 fd->emitted_number = last_emitted_file->emitted_number + 1;
26139 else
26140 fd->emitted_number = 1;
26141 last_emitted_file = fd;
26142
26143 if (DWARF2_ASM_LINE_DEBUG_INFO)
26144 {
26145 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26146 output_quoted_string (asm_out_file,
26147 remap_debug_filename (fd->filename));
26148 fputc ('\n', asm_out_file);
26149 }
26150 }
26151
26152 return fd->emitted_number;
26153 }
26154
26155 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26156 That generation should happen after function debug info has been
26157 generated. The value of the attribute is the constant value of ARG. */
26158
26159 static void
26160 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26161 {
26162 die_arg_entry entry;
26163
26164 if (!die || !arg)
26165 return;
26166
26167 gcc_assert (early_dwarf);
26168
26169 if (!tmpl_value_parm_die_table)
26170 vec_alloc (tmpl_value_parm_die_table, 32);
26171
26172 entry.die = die;
26173 entry.arg = arg;
26174 vec_safe_push (tmpl_value_parm_die_table, entry);
26175 }
26176
26177 /* Return TRUE if T is an instance of generic type, FALSE
26178 otherwise. */
26179
26180 static bool
26181 generic_type_p (tree t)
26182 {
26183 if (t == NULL_TREE || !TYPE_P (t))
26184 return false;
26185 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26186 }
26187
26188 /* Schedule the generation of the generic parameter dies for the
26189 instance of generic type T. The proper generation itself is later
26190 done by gen_scheduled_generic_parms_dies. */
26191
26192 static void
26193 schedule_generic_params_dies_gen (tree t)
26194 {
26195 if (!generic_type_p (t))
26196 return;
26197
26198 gcc_assert (early_dwarf);
26199
26200 if (!generic_type_instances)
26201 vec_alloc (generic_type_instances, 256);
26202
26203 vec_safe_push (generic_type_instances, t);
26204 }
26205
26206 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26207 by append_entry_to_tmpl_value_parm_die_table. This function must
26208 be called after function DIEs have been generated. */
26209
26210 static void
26211 gen_remaining_tmpl_value_param_die_attribute (void)
26212 {
26213 if (tmpl_value_parm_die_table)
26214 {
26215 unsigned i, j;
26216 die_arg_entry *e;
26217
26218 /* We do this in two phases - first get the cases we can
26219 handle during early-finish, preserving those we cannot
26220 (containing symbolic constants where we don't yet know
26221 whether we are going to output the referenced symbols).
26222 For those we try again at late-finish. */
26223 j = 0;
26224 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
26225 {
26226 if (!e->die->removed
26227 && !tree_add_const_value_attribute (e->die, e->arg))
26228 {
26229 dw_loc_descr_ref loc = NULL;
26230 if (! early_dwarf
26231 && (dwarf_version >= 5 || !dwarf_strict))
26232 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
26233 if (loc)
26234 add_AT_loc (e->die, DW_AT_location, loc);
26235 else
26236 (*tmpl_value_parm_die_table)[j++] = *e;
26237 }
26238 }
26239 tmpl_value_parm_die_table->truncate (j);
26240 }
26241 }
26242
26243 /* Generate generic parameters DIEs for instances of generic types
26244 that have been previously scheduled by
26245 schedule_generic_params_dies_gen. This function must be called
26246 after all the types of the CU have been laid out. */
26247
26248 static void
26249 gen_scheduled_generic_parms_dies (void)
26250 {
26251 unsigned i;
26252 tree t;
26253
26254 if (!generic_type_instances)
26255 return;
26256
26257 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
26258 if (COMPLETE_TYPE_P (t))
26259 gen_generic_params_dies (t);
26260
26261 generic_type_instances = NULL;
26262 }
26263
26264
26265 /* Replace DW_AT_name for the decl with name. */
26266
26267 static void
26268 dwarf2out_set_name (tree decl, tree name)
26269 {
26270 dw_die_ref die;
26271 dw_attr_node *attr;
26272 const char *dname;
26273
26274 die = TYPE_SYMTAB_DIE (decl);
26275 if (!die)
26276 return;
26277
26278 dname = dwarf2_name (name, 0);
26279 if (!dname)
26280 return;
26281
26282 attr = get_AT (die, DW_AT_name);
26283 if (attr)
26284 {
26285 struct indirect_string_node *node;
26286
26287 node = find_AT_string (dname);
26288 /* replace the string. */
26289 attr->dw_attr_val.v.val_str = node;
26290 }
26291
26292 else
26293 add_name_attribute (die, dname);
26294 }
26295
26296 /* True if before or during processing of the first function being emitted. */
26297 static bool in_first_function_p = true;
26298 /* True if loc_note during dwarf2out_var_location call might still be
26299 before first real instruction at address equal to .Ltext0. */
26300 static bool maybe_at_text_label_p = true;
26301 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
26302 static unsigned int first_loclabel_num_not_at_text_label;
26303
26304 /* Look ahead for a real insn, or for a begin stmt marker. */
26305
26306 static rtx_insn *
26307 dwarf2out_next_real_insn (rtx_insn *loc_note)
26308 {
26309 rtx_insn *next_real = NEXT_INSN (loc_note);
26310
26311 while (next_real)
26312 if (INSN_P (next_real))
26313 break;
26314 else
26315 next_real = NEXT_INSN (next_real);
26316
26317 return next_real;
26318 }
26319
26320 /* Called by the final INSN scan whenever we see a var location. We
26321 use it to drop labels in the right places, and throw the location in
26322 our lookup table. */
26323
26324 static void
26325 dwarf2out_var_location (rtx_insn *loc_note)
26326 {
26327 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
26328 struct var_loc_node *newloc;
26329 rtx_insn *next_real, *next_note;
26330 rtx_insn *call_insn = NULL;
26331 static const char *last_label;
26332 static const char *last_postcall_label;
26333 static bool last_in_cold_section_p;
26334 static rtx_insn *expected_next_loc_note;
26335 tree decl;
26336 bool var_loc_p;
26337
26338 if (!NOTE_P (loc_note))
26339 {
26340 if (CALL_P (loc_note))
26341 {
26342 call_site_count++;
26343 if (SIBLING_CALL_P (loc_note))
26344 tail_call_site_count++;
26345 if (optimize == 0 && !flag_var_tracking)
26346 {
26347 /* When the var-tracking pass is not running, there is no note
26348 for indirect calls whose target is compile-time known. In this
26349 case, process such calls specifically so that we generate call
26350 sites for them anyway. */
26351 rtx x = PATTERN (loc_note);
26352 if (GET_CODE (x) == PARALLEL)
26353 x = XVECEXP (x, 0, 0);
26354 if (GET_CODE (x) == SET)
26355 x = SET_SRC (x);
26356 if (GET_CODE (x) == CALL)
26357 x = XEXP (x, 0);
26358 if (!MEM_P (x)
26359 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
26360 || !SYMBOL_REF_DECL (XEXP (x, 0))
26361 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
26362 != FUNCTION_DECL))
26363 {
26364 call_insn = loc_note;
26365 loc_note = NULL;
26366 var_loc_p = false;
26367
26368 next_real = dwarf2out_next_real_insn (call_insn);
26369 next_note = NULL;
26370 cached_next_real_insn = NULL;
26371 goto create_label;
26372 }
26373 }
26374 }
26375 return;
26376 }
26377
26378 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
26379 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
26380 return;
26381
26382 /* Optimize processing a large consecutive sequence of location
26383 notes so we don't spend too much time in next_real_insn. If the
26384 next insn is another location note, remember the next_real_insn
26385 calculation for next time. */
26386 next_real = cached_next_real_insn;
26387 if (next_real)
26388 {
26389 if (expected_next_loc_note != loc_note)
26390 next_real = NULL;
26391 }
26392
26393 next_note = NEXT_INSN (loc_note);
26394 if (! next_note
26395 || next_note->deleted ()
26396 || ! NOTE_P (next_note)
26397 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
26398 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
26399 && NOTE_KIND (next_note) != NOTE_INSN_CALL_ARG_LOCATION))
26400 next_note = NULL;
26401
26402 if (! next_real)
26403 next_real = dwarf2out_next_real_insn (loc_note);
26404
26405 if (next_note)
26406 {
26407 expected_next_loc_note = next_note;
26408 cached_next_real_insn = next_real;
26409 }
26410 else
26411 cached_next_real_insn = NULL;
26412
26413 /* If there are no instructions which would be affected by this note,
26414 don't do anything. */
26415 if (var_loc_p
26416 && next_real == NULL_RTX
26417 && !NOTE_DURING_CALL_P (loc_note))
26418 return;
26419
26420 create_label:
26421
26422 if (next_real == NULL_RTX)
26423 next_real = get_last_insn ();
26424
26425 /* If there were any real insns between note we processed last time
26426 and this note (or if it is the first note), clear
26427 last_{,postcall_}label so that they are not reused this time. */
26428 if (last_var_location_insn == NULL_RTX
26429 || last_var_location_insn != next_real
26430 || last_in_cold_section_p != in_cold_section_p)
26431 {
26432 last_label = NULL;
26433 last_postcall_label = NULL;
26434 }
26435
26436 if (var_loc_p)
26437 {
26438 decl = NOTE_VAR_LOCATION_DECL (loc_note);
26439 newloc = add_var_loc_to_decl (decl, loc_note,
26440 NOTE_DURING_CALL_P (loc_note)
26441 ? last_postcall_label : last_label);
26442 if (newloc == NULL)
26443 return;
26444 }
26445 else
26446 {
26447 decl = NULL_TREE;
26448 newloc = NULL;
26449 }
26450
26451 /* If there were no real insns between note we processed last time
26452 and this note, use the label we emitted last time. Otherwise
26453 create a new label and emit it. */
26454 if (last_label == NULL)
26455 {
26456 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
26457 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
26458 loclabel_num++;
26459 last_label = ggc_strdup (loclabel);
26460 /* See if loclabel might be equal to .Ltext0. If yes,
26461 bump first_loclabel_num_not_at_text_label. */
26462 if (!have_multiple_function_sections
26463 && in_first_function_p
26464 && maybe_at_text_label_p)
26465 {
26466 static rtx_insn *last_start;
26467 rtx_insn *insn;
26468 for (insn = loc_note; insn; insn = previous_insn (insn))
26469 if (insn == last_start)
26470 break;
26471 else if (!NONDEBUG_INSN_P (insn))
26472 continue;
26473 else
26474 {
26475 rtx body = PATTERN (insn);
26476 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
26477 continue;
26478 /* Inline asm could occupy zero bytes. */
26479 else if (GET_CODE (body) == ASM_INPUT
26480 || asm_noperands (body) >= 0)
26481 continue;
26482 #ifdef HAVE_attr_length
26483 else if (get_attr_min_length (insn) == 0)
26484 continue;
26485 #endif
26486 else
26487 {
26488 /* Assume insn has non-zero length. */
26489 maybe_at_text_label_p = false;
26490 break;
26491 }
26492 }
26493 if (maybe_at_text_label_p)
26494 {
26495 last_start = loc_note;
26496 first_loclabel_num_not_at_text_label = loclabel_num;
26497 }
26498 }
26499 }
26500
26501 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
26502 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
26503
26504 if (!var_loc_p)
26505 {
26506 struct call_arg_loc_node *ca_loc
26507 = ggc_cleared_alloc<call_arg_loc_node> ();
26508 rtx_insn *prev
26509 = loc_note != NULL_RTX ? prev_real_insn (loc_note) : call_insn;
26510
26511 ca_loc->call_arg_loc_note = loc_note;
26512 ca_loc->next = NULL;
26513 ca_loc->label = last_label;
26514 gcc_assert (prev
26515 && (CALL_P (prev)
26516 || (NONJUMP_INSN_P (prev)
26517 && GET_CODE (PATTERN (prev)) == SEQUENCE
26518 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
26519 if (!CALL_P (prev))
26520 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
26521 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
26522
26523 /* Look for a SYMBOL_REF in the "prev" instruction. */
26524 rtx x = get_call_rtx_from (PATTERN (prev));
26525 if (x)
26526 {
26527 /* Try to get the call symbol, if any. */
26528 if (MEM_P (XEXP (x, 0)))
26529 x = XEXP (x, 0);
26530 /* First, look for a memory access to a symbol_ref. */
26531 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
26532 && SYMBOL_REF_DECL (XEXP (x, 0))
26533 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
26534 ca_loc->symbol_ref = XEXP (x, 0);
26535 /* Otherwise, look at a compile-time known user-level function
26536 declaration. */
26537 else if (MEM_P (x)
26538 && MEM_EXPR (x)
26539 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
26540 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
26541 }
26542
26543 ca_loc->block = insn_scope (prev);
26544 if (call_arg_locations)
26545 call_arg_loc_last->next = ca_loc;
26546 else
26547 call_arg_locations = ca_loc;
26548 call_arg_loc_last = ca_loc;
26549 }
26550 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
26551 newloc->label = last_label;
26552 else
26553 {
26554 if (!last_postcall_label)
26555 {
26556 sprintf (loclabel, "%s-1", last_label);
26557 last_postcall_label = ggc_strdup (loclabel);
26558 }
26559 newloc->label = last_postcall_label;
26560 }
26561
26562 if (var_loc_p && flag_debug_asm)
26563 {
26564 const char *name = NULL, *sep = " => ", *patstr = NULL;
26565 if (decl && DECL_NAME (decl))
26566 name = IDENTIFIER_POINTER (DECL_NAME (decl));
26567 if (NOTE_VAR_LOCATION_LOC (loc_note))
26568 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
26569 else
26570 {
26571 sep = " ";
26572 patstr = "RESET";
26573 }
26574 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
26575 name, sep, patstr);
26576 }
26577
26578 last_var_location_insn = next_real;
26579 last_in_cold_section_p = in_cold_section_p;
26580 }
26581
26582 /* Called from finalize_size_functions for size functions so that their body
26583 can be encoded in the debug info to describe the layout of variable-length
26584 structures. */
26585
26586 static void
26587 dwarf2out_size_function (tree decl)
26588 {
26589 function_to_dwarf_procedure (decl);
26590 }
26591
26592 /* Note in one location list that text section has changed. */
26593
26594 int
26595 var_location_switch_text_section_1 (var_loc_list **slot, void *)
26596 {
26597 var_loc_list *list = *slot;
26598 if (list->first)
26599 list->last_before_switch
26600 = list->last->next ? list->last->next : list->last;
26601 return 1;
26602 }
26603
26604 /* Note in all location lists that text section has changed. */
26605
26606 static void
26607 var_location_switch_text_section (void)
26608 {
26609 if (decl_loc_table == NULL)
26610 return;
26611
26612 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
26613 }
26614
26615 /* Create a new line number table. */
26616
26617 static dw_line_info_table *
26618 new_line_info_table (void)
26619 {
26620 dw_line_info_table *table;
26621
26622 table = ggc_cleared_alloc<dw_line_info_table> ();
26623 table->file_num = 1;
26624 table->line_num = 1;
26625 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
26626
26627 return table;
26628 }
26629
26630 /* Lookup the "current" table into which we emit line info, so
26631 that we don't have to do it for every source line. */
26632
26633 static void
26634 set_cur_line_info_table (section *sec)
26635 {
26636 dw_line_info_table *table;
26637
26638 if (sec == text_section)
26639 table = text_section_line_info;
26640 else if (sec == cold_text_section)
26641 {
26642 table = cold_text_section_line_info;
26643 if (!table)
26644 {
26645 cold_text_section_line_info = table = new_line_info_table ();
26646 table->end_label = cold_end_label;
26647 }
26648 }
26649 else
26650 {
26651 const char *end_label;
26652
26653 if (crtl->has_bb_partition)
26654 {
26655 if (in_cold_section_p)
26656 end_label = crtl->subsections.cold_section_end_label;
26657 else
26658 end_label = crtl->subsections.hot_section_end_label;
26659 }
26660 else
26661 {
26662 char label[MAX_ARTIFICIAL_LABEL_BYTES];
26663 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
26664 current_function_funcdef_no);
26665 end_label = ggc_strdup (label);
26666 }
26667
26668 table = new_line_info_table ();
26669 table->end_label = end_label;
26670
26671 vec_safe_push (separate_line_info, table);
26672 }
26673
26674 if (DWARF2_ASM_LINE_DEBUG_INFO)
26675 table->is_stmt = (cur_line_info_table
26676 ? cur_line_info_table->is_stmt
26677 : DWARF_LINE_DEFAULT_IS_STMT_START);
26678 cur_line_info_table = table;
26679 }
26680
26681
26682 /* We need to reset the locations at the beginning of each
26683 function. We can't do this in the end_function hook, because the
26684 declarations that use the locations won't have been output when
26685 that hook is called. Also compute have_multiple_function_sections here. */
26686
26687 static void
26688 dwarf2out_begin_function (tree fun)
26689 {
26690 section *sec = function_section (fun);
26691
26692 if (sec != text_section)
26693 have_multiple_function_sections = true;
26694
26695 if (crtl->has_bb_partition && !cold_text_section)
26696 {
26697 gcc_assert (current_function_decl == fun);
26698 cold_text_section = unlikely_text_section ();
26699 switch_to_section (cold_text_section);
26700 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
26701 switch_to_section (sec);
26702 }
26703
26704 dwarf2out_note_section_used ();
26705 call_site_count = 0;
26706 tail_call_site_count = 0;
26707
26708 set_cur_line_info_table (sec);
26709 }
26710
26711 /* Helper function of dwarf2out_end_function, called only after emitting
26712 the very first function into assembly. Check if some .debug_loc range
26713 might end with a .LVL* label that could be equal to .Ltext0.
26714 In that case we must force using absolute addresses in .debug_loc ranges,
26715 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
26716 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
26717 list terminator.
26718 Set have_multiple_function_sections to true in that case and
26719 terminate htab traversal. */
26720
26721 int
26722 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
26723 {
26724 var_loc_list *entry = *slot;
26725 struct var_loc_node *node;
26726
26727 node = entry->first;
26728 if (node && node->next && node->next->label)
26729 {
26730 unsigned int i;
26731 const char *label = node->next->label;
26732 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
26733
26734 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
26735 {
26736 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
26737 if (strcmp (label, loclabel) == 0)
26738 {
26739 have_multiple_function_sections = true;
26740 return 0;
26741 }
26742 }
26743 }
26744 return 1;
26745 }
26746
26747 /* Hook called after emitting a function into assembly.
26748 This does something only for the very first function emitted. */
26749
26750 static void
26751 dwarf2out_end_function (unsigned int)
26752 {
26753 if (in_first_function_p
26754 && !have_multiple_function_sections
26755 && first_loclabel_num_not_at_text_label
26756 && decl_loc_table)
26757 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
26758 in_first_function_p = false;
26759 maybe_at_text_label_p = false;
26760 }
26761
26762 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
26763 front-ends register a translation unit even before dwarf2out_init is
26764 called. */
26765 static tree main_translation_unit = NULL_TREE;
26766
26767 /* Hook called by front-ends after they built their main translation unit.
26768 Associate comp_unit_die to UNIT. */
26769
26770 static void
26771 dwarf2out_register_main_translation_unit (tree unit)
26772 {
26773 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
26774 && main_translation_unit == NULL_TREE);
26775 main_translation_unit = unit;
26776 /* If dwarf2out_init has not been called yet, it will perform the association
26777 itself looking at main_translation_unit. */
26778 if (decl_die_table != NULL)
26779 equate_decl_number_to_die (unit, comp_unit_die ());
26780 }
26781
26782 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
26783
26784 static void
26785 push_dw_line_info_entry (dw_line_info_table *table,
26786 enum dw_line_info_opcode opcode, unsigned int val)
26787 {
26788 dw_line_info_entry e;
26789 e.opcode = opcode;
26790 e.val = val;
26791 vec_safe_push (table->entries, e);
26792 }
26793
26794 /* Output a label to mark the beginning of a source code line entry
26795 and record information relating to this source line, in
26796 'line_info_table' for later output of the .debug_line section. */
26797 /* ??? The discriminator parameter ought to be unsigned. */
26798
26799 static void
26800 dwarf2out_source_line (unsigned int line, unsigned int column,
26801 const char *filename,
26802 int discriminator, bool is_stmt)
26803 {
26804 unsigned int file_num;
26805 dw_line_info_table *table;
26806
26807 if (debug_info_level < DINFO_LEVEL_TERSE || line == 0)
26808 return;
26809
26810 /* The discriminator column was added in dwarf4. Simplify the below
26811 by simply removing it if we're not supposed to output it. */
26812 if (dwarf_version < 4 && dwarf_strict)
26813 discriminator = 0;
26814
26815 if (!debug_column_info)
26816 column = 0;
26817
26818 table = cur_line_info_table;
26819 file_num = maybe_emit_file (lookup_filename (filename));
26820
26821 /* ??? TODO: Elide duplicate line number entries. Traditionally,
26822 the debugger has used the second (possibly duplicate) line number
26823 at the beginning of the function to mark the end of the prologue.
26824 We could eliminate any other duplicates within the function. For
26825 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
26826 that second line number entry. */
26827 /* Recall that this end-of-prologue indication is *not* the same thing
26828 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
26829 to which the hook corresponds, follows the last insn that was
26830 emitted by gen_prologue. What we need is to precede the first insn
26831 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
26832 insn that corresponds to something the user wrote. These may be
26833 very different locations once scheduling is enabled. */
26834
26835 if (0 && file_num == table->file_num
26836 && line == table->line_num
26837 && column == table->column_num
26838 && discriminator == table->discrim_num
26839 && is_stmt == table->is_stmt)
26840 return;
26841
26842 switch_to_section (current_function_section ());
26843
26844 /* If requested, emit something human-readable. */
26845 if (flag_debug_asm)
26846 {
26847 if (debug_column_info)
26848 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
26849 filename, line, column);
26850 else
26851 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
26852 filename, line);
26853 }
26854
26855 if (DWARF2_ASM_LINE_DEBUG_INFO)
26856 {
26857 /* Emit the .loc directive understood by GNU as. */
26858 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
26859 file_num, line, is_stmt, discriminator */
26860 fputs ("\t.loc ", asm_out_file);
26861 fprint_ul (asm_out_file, file_num);
26862 putc (' ', asm_out_file);
26863 fprint_ul (asm_out_file, line);
26864 putc (' ', asm_out_file);
26865 fprint_ul (asm_out_file, column);
26866
26867 if (is_stmt != table->is_stmt)
26868 {
26869 fputs (" is_stmt ", asm_out_file);
26870 putc (is_stmt ? '1' : '0', asm_out_file);
26871 }
26872 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
26873 {
26874 gcc_assert (discriminator > 0);
26875 fputs (" discriminator ", asm_out_file);
26876 fprint_ul (asm_out_file, (unsigned long) discriminator);
26877 }
26878 putc ('\n', asm_out_file);
26879 }
26880 else
26881 {
26882 unsigned int label_num = ++line_info_label_num;
26883
26884 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
26885
26886 push_dw_line_info_entry (table, LI_set_address, label_num);
26887 if (file_num != table->file_num)
26888 push_dw_line_info_entry (table, LI_set_file, file_num);
26889 if (discriminator != table->discrim_num)
26890 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
26891 if (is_stmt != table->is_stmt)
26892 push_dw_line_info_entry (table, LI_negate_stmt, 0);
26893 push_dw_line_info_entry (table, LI_set_line, line);
26894 if (debug_column_info)
26895 push_dw_line_info_entry (table, LI_set_column, column);
26896 }
26897
26898 table->file_num = file_num;
26899 table->line_num = line;
26900 table->column_num = column;
26901 table->discrim_num = discriminator;
26902 table->is_stmt = is_stmt;
26903 table->in_use = true;
26904 }
26905
26906 /* Record the beginning of a new source file. */
26907
26908 static void
26909 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
26910 {
26911 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26912 {
26913 macinfo_entry e;
26914 e.code = DW_MACINFO_start_file;
26915 e.lineno = lineno;
26916 e.info = ggc_strdup (filename);
26917 vec_safe_push (macinfo_table, e);
26918 }
26919 }
26920
26921 /* Record the end of a source file. */
26922
26923 static void
26924 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
26925 {
26926 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26927 {
26928 macinfo_entry e;
26929 e.code = DW_MACINFO_end_file;
26930 e.lineno = lineno;
26931 e.info = NULL;
26932 vec_safe_push (macinfo_table, e);
26933 }
26934 }
26935
26936 /* Called from debug_define in toplev.c. The `buffer' parameter contains
26937 the tail part of the directive line, i.e. the part which is past the
26938 initial whitespace, #, whitespace, directive-name, whitespace part. */
26939
26940 static void
26941 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
26942 const char *buffer ATTRIBUTE_UNUSED)
26943 {
26944 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26945 {
26946 macinfo_entry e;
26947 /* Insert a dummy first entry to be able to optimize the whole
26948 predefined macro block using DW_MACRO_import. */
26949 if (macinfo_table->is_empty () && lineno <= 1)
26950 {
26951 e.code = 0;
26952 e.lineno = 0;
26953 e.info = NULL;
26954 vec_safe_push (macinfo_table, e);
26955 }
26956 e.code = DW_MACINFO_define;
26957 e.lineno = lineno;
26958 e.info = ggc_strdup (buffer);
26959 vec_safe_push (macinfo_table, e);
26960 }
26961 }
26962
26963 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
26964 the tail part of the directive line, i.e. the part which is past the
26965 initial whitespace, #, whitespace, directive-name, whitespace part. */
26966
26967 static void
26968 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
26969 const char *buffer ATTRIBUTE_UNUSED)
26970 {
26971 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
26972 {
26973 macinfo_entry e;
26974 /* Insert a dummy first entry to be able to optimize the whole
26975 predefined macro block using DW_MACRO_import. */
26976 if (macinfo_table->is_empty () && lineno <= 1)
26977 {
26978 e.code = 0;
26979 e.lineno = 0;
26980 e.info = NULL;
26981 vec_safe_push (macinfo_table, e);
26982 }
26983 e.code = DW_MACINFO_undef;
26984 e.lineno = lineno;
26985 e.info = ggc_strdup (buffer);
26986 vec_safe_push (macinfo_table, e);
26987 }
26988 }
26989
26990 /* Helpers to manipulate hash table of CUs. */
26991
26992 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
26993 {
26994 static inline hashval_t hash (const macinfo_entry *);
26995 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
26996 };
26997
26998 inline hashval_t
26999 macinfo_entry_hasher::hash (const macinfo_entry *entry)
27000 {
27001 return htab_hash_string (entry->info);
27002 }
27003
27004 inline bool
27005 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
27006 const macinfo_entry *entry2)
27007 {
27008 return !strcmp (entry1->info, entry2->info);
27009 }
27010
27011 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
27012
27013 /* Output a single .debug_macinfo entry. */
27014
27015 static void
27016 output_macinfo_op (macinfo_entry *ref)
27017 {
27018 int file_num;
27019 size_t len;
27020 struct indirect_string_node *node;
27021 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27022 struct dwarf_file_data *fd;
27023
27024 switch (ref->code)
27025 {
27026 case DW_MACINFO_start_file:
27027 fd = lookup_filename (ref->info);
27028 file_num = maybe_emit_file (fd);
27029 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
27030 dw2_asm_output_data_uleb128 (ref->lineno,
27031 "Included from line number %lu",
27032 (unsigned long) ref->lineno);
27033 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
27034 break;
27035 case DW_MACINFO_end_file:
27036 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
27037 break;
27038 case DW_MACINFO_define:
27039 case DW_MACINFO_undef:
27040 len = strlen (ref->info) + 1;
27041 if (!dwarf_strict
27042 && len > DWARF_OFFSET_SIZE
27043 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
27044 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
27045 {
27046 ref->code = ref->code == DW_MACINFO_define
27047 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
27048 output_macinfo_op (ref);
27049 return;
27050 }
27051 dw2_asm_output_data (1, ref->code,
27052 ref->code == DW_MACINFO_define
27053 ? "Define macro" : "Undefine macro");
27054 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
27055 (unsigned long) ref->lineno);
27056 dw2_asm_output_nstring (ref->info, -1, "The macro");
27057 break;
27058 case DW_MACRO_define_strp:
27059 case DW_MACRO_undef_strp:
27060 node = find_AT_string (ref->info);
27061 gcc_assert (node
27062 && (node->form == DW_FORM_strp
27063 || node->form == DW_FORM_GNU_str_index));
27064 dw2_asm_output_data (1, ref->code,
27065 ref->code == DW_MACRO_define_strp
27066 ? "Define macro strp"
27067 : "Undefine macro strp");
27068 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
27069 (unsigned long) ref->lineno);
27070 if (node->form == DW_FORM_strp)
27071 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
27072 debug_str_section, "The macro: \"%s\"",
27073 ref->info);
27074 else
27075 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
27076 ref->info);
27077 break;
27078 case DW_MACRO_import:
27079 dw2_asm_output_data (1, ref->code, "Import");
27080 ASM_GENERATE_INTERNAL_LABEL (label,
27081 DEBUG_MACRO_SECTION_LABEL,
27082 ref->lineno + macinfo_label_base);
27083 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
27084 break;
27085 default:
27086 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
27087 ASM_COMMENT_START, (unsigned long) ref->code);
27088 break;
27089 }
27090 }
27091
27092 /* Attempt to make a sequence of define/undef macinfo ops shareable with
27093 other compilation unit .debug_macinfo sections. IDX is the first
27094 index of a define/undef, return the number of ops that should be
27095 emitted in a comdat .debug_macinfo section and emit
27096 a DW_MACRO_import entry referencing it.
27097 If the define/undef entry should be emitted normally, return 0. */
27098
27099 static unsigned
27100 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
27101 macinfo_hash_type **macinfo_htab)
27102 {
27103 macinfo_entry *first, *second, *cur, *inc;
27104 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
27105 unsigned char checksum[16];
27106 struct md5_ctx ctx;
27107 char *grp_name, *tail;
27108 const char *base;
27109 unsigned int i, count, encoded_filename_len, linebuf_len;
27110 macinfo_entry **slot;
27111
27112 first = &(*macinfo_table)[idx];
27113 second = &(*macinfo_table)[idx + 1];
27114
27115 /* Optimize only if there are at least two consecutive define/undef ops,
27116 and either all of them are before first DW_MACINFO_start_file
27117 with lineno {0,1} (i.e. predefined macro block), or all of them are
27118 in some included header file. */
27119 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
27120 return 0;
27121 if (vec_safe_is_empty (files))
27122 {
27123 if (first->lineno > 1 || second->lineno > 1)
27124 return 0;
27125 }
27126 else if (first->lineno == 0)
27127 return 0;
27128
27129 /* Find the last define/undef entry that can be grouped together
27130 with first and at the same time compute md5 checksum of their
27131 codes, linenumbers and strings. */
27132 md5_init_ctx (&ctx);
27133 for (i = idx; macinfo_table->iterate (i, &cur); i++)
27134 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
27135 break;
27136 else if (vec_safe_is_empty (files) && cur->lineno > 1)
27137 break;
27138 else
27139 {
27140 unsigned char code = cur->code;
27141 md5_process_bytes (&code, 1, &ctx);
27142 checksum_uleb128 (cur->lineno, &ctx);
27143 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
27144 }
27145 md5_finish_ctx (&ctx, checksum);
27146 count = i - idx;
27147
27148 /* From the containing include filename (if any) pick up just
27149 usable characters from its basename. */
27150 if (vec_safe_is_empty (files))
27151 base = "";
27152 else
27153 base = lbasename (files->last ().info);
27154 for (encoded_filename_len = 0, i = 0; base[i]; i++)
27155 if (ISIDNUM (base[i]) || base[i] == '.')
27156 encoded_filename_len++;
27157 /* Count . at the end. */
27158 if (encoded_filename_len)
27159 encoded_filename_len++;
27160
27161 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
27162 linebuf_len = strlen (linebuf);
27163
27164 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
27165 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
27166 + 16 * 2 + 1);
27167 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
27168 tail = grp_name + 4;
27169 if (encoded_filename_len)
27170 {
27171 for (i = 0; base[i]; i++)
27172 if (ISIDNUM (base[i]) || base[i] == '.')
27173 *tail++ = base[i];
27174 *tail++ = '.';
27175 }
27176 memcpy (tail, linebuf, linebuf_len);
27177 tail += linebuf_len;
27178 *tail++ = '.';
27179 for (i = 0; i < 16; i++)
27180 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
27181
27182 /* Construct a macinfo_entry for DW_MACRO_import
27183 in the empty vector entry before the first define/undef. */
27184 inc = &(*macinfo_table)[idx - 1];
27185 inc->code = DW_MACRO_import;
27186 inc->lineno = 0;
27187 inc->info = ggc_strdup (grp_name);
27188 if (!*macinfo_htab)
27189 *macinfo_htab = new macinfo_hash_type (10);
27190 /* Avoid emitting duplicates. */
27191 slot = (*macinfo_htab)->find_slot (inc, INSERT);
27192 if (*slot != NULL)
27193 {
27194 inc->code = 0;
27195 inc->info = NULL;
27196 /* If such an entry has been used before, just emit
27197 a DW_MACRO_import op. */
27198 inc = *slot;
27199 output_macinfo_op (inc);
27200 /* And clear all macinfo_entry in the range to avoid emitting them
27201 in the second pass. */
27202 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
27203 {
27204 cur->code = 0;
27205 cur->info = NULL;
27206 }
27207 }
27208 else
27209 {
27210 *slot = inc;
27211 inc->lineno = (*macinfo_htab)->elements ();
27212 output_macinfo_op (inc);
27213 }
27214 return count;
27215 }
27216
27217 /* Save any strings needed by the macinfo table in the debug str
27218 table. All strings must be collected into the table by the time
27219 index_string is called. */
27220
27221 static void
27222 save_macinfo_strings (void)
27223 {
27224 unsigned len;
27225 unsigned i;
27226 macinfo_entry *ref;
27227
27228 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
27229 {
27230 switch (ref->code)
27231 {
27232 /* Match the logic in output_macinfo_op to decide on
27233 indirect strings. */
27234 case DW_MACINFO_define:
27235 case DW_MACINFO_undef:
27236 len = strlen (ref->info) + 1;
27237 if (!dwarf_strict
27238 && len > DWARF_OFFSET_SIZE
27239 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
27240 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
27241 set_indirect_string (find_AT_string (ref->info));
27242 break;
27243 case DW_MACRO_define_strp:
27244 case DW_MACRO_undef_strp:
27245 set_indirect_string (find_AT_string (ref->info));
27246 break;
27247 default:
27248 break;
27249 }
27250 }
27251 }
27252
27253 /* Output macinfo section(s). */
27254
27255 static void
27256 output_macinfo (const char *debug_line_label, bool early_lto_debug)
27257 {
27258 unsigned i;
27259 unsigned long length = vec_safe_length (macinfo_table);
27260 macinfo_entry *ref;
27261 vec<macinfo_entry, va_gc> *files = NULL;
27262 macinfo_hash_type *macinfo_htab = NULL;
27263 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
27264
27265 if (! length)
27266 return;
27267
27268 /* output_macinfo* uses these interchangeably. */
27269 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
27270 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
27271 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
27272 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
27273
27274 /* AIX Assembler inserts the length, so adjust the reference to match the
27275 offset expected by debuggers. */
27276 strcpy (dl_section_ref, debug_line_label);
27277 if (XCOFF_DEBUGGING_INFO)
27278 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
27279
27280 /* For .debug_macro emit the section header. */
27281 if (!dwarf_strict || dwarf_version >= 5)
27282 {
27283 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
27284 "DWARF macro version number");
27285 if (DWARF_OFFSET_SIZE == 8)
27286 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
27287 else
27288 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
27289 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
27290 debug_line_section, NULL);
27291 }
27292
27293 /* In the first loop, it emits the primary .debug_macinfo section
27294 and after each emitted op the macinfo_entry is cleared.
27295 If a longer range of define/undef ops can be optimized using
27296 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
27297 the vector before the first define/undef in the range and the
27298 whole range of define/undef ops is not emitted and kept. */
27299 for (i = 0; macinfo_table->iterate (i, &ref); i++)
27300 {
27301 switch (ref->code)
27302 {
27303 case DW_MACINFO_start_file:
27304 vec_safe_push (files, *ref);
27305 break;
27306 case DW_MACINFO_end_file:
27307 if (!vec_safe_is_empty (files))
27308 files->pop ();
27309 break;
27310 case DW_MACINFO_define:
27311 case DW_MACINFO_undef:
27312 if ((!dwarf_strict || dwarf_version >= 5)
27313 && HAVE_COMDAT_GROUP
27314 && vec_safe_length (files) != 1
27315 && i > 0
27316 && i + 1 < length
27317 && (*macinfo_table)[i - 1].code == 0)
27318 {
27319 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
27320 if (count)
27321 {
27322 i += count - 1;
27323 continue;
27324 }
27325 }
27326 break;
27327 case 0:
27328 /* A dummy entry may be inserted at the beginning to be able
27329 to optimize the whole block of predefined macros. */
27330 if (i == 0)
27331 continue;
27332 default:
27333 break;
27334 }
27335 output_macinfo_op (ref);
27336 ref->info = NULL;
27337 ref->code = 0;
27338 }
27339
27340 if (!macinfo_htab)
27341 return;
27342
27343 /* Save the number of transparent includes so we can adjust the
27344 label number for the fat LTO object DWARF. */
27345 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
27346
27347 delete macinfo_htab;
27348 macinfo_htab = NULL;
27349
27350 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
27351 terminate the current chain and switch to a new comdat .debug_macinfo
27352 section and emit the define/undef entries within it. */
27353 for (i = 0; macinfo_table->iterate (i, &ref); i++)
27354 switch (ref->code)
27355 {
27356 case 0:
27357 continue;
27358 case DW_MACRO_import:
27359 {
27360 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27361 tree comdat_key = get_identifier (ref->info);
27362 /* Terminate the previous .debug_macinfo section. */
27363 dw2_asm_output_data (1, 0, "End compilation unit");
27364 targetm.asm_out.named_section (debug_macinfo_section_name,
27365 SECTION_DEBUG
27366 | SECTION_LINKONCE
27367 | (early_lto_debug
27368 ? SECTION_EXCLUDE : 0),
27369 comdat_key);
27370 ASM_GENERATE_INTERNAL_LABEL (label,
27371 DEBUG_MACRO_SECTION_LABEL,
27372 ref->lineno + macinfo_label_base);
27373 ASM_OUTPUT_LABEL (asm_out_file, label);
27374 ref->code = 0;
27375 ref->info = NULL;
27376 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
27377 "DWARF macro version number");
27378 if (DWARF_OFFSET_SIZE == 8)
27379 dw2_asm_output_data (1, 1, "Flags: 64-bit");
27380 else
27381 dw2_asm_output_data (1, 0, "Flags: 32-bit");
27382 }
27383 break;
27384 case DW_MACINFO_define:
27385 case DW_MACINFO_undef:
27386 output_macinfo_op (ref);
27387 ref->code = 0;
27388 ref->info = NULL;
27389 break;
27390 default:
27391 gcc_unreachable ();
27392 }
27393
27394 macinfo_label_base += macinfo_label_base_adj;
27395 }
27396
27397 /* Initialize the various sections and labels for dwarf output and prefix
27398 them with PREFIX if non-NULL. Returns the generation (zero based
27399 number of times function was called). */
27400
27401 static unsigned
27402 init_sections_and_labels (bool early_lto_debug)
27403 {
27404 /* As we may get called multiple times have a generation count for
27405 labels. */
27406 static unsigned generation = 0;
27407
27408 if (early_lto_debug)
27409 {
27410 if (!dwarf_split_debug_info)
27411 {
27412 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
27413 SECTION_DEBUG | SECTION_EXCLUDE,
27414 NULL);
27415 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
27416 SECTION_DEBUG | SECTION_EXCLUDE,
27417 NULL);
27418 debug_macinfo_section_name
27419 = ((dwarf_strict && dwarf_version < 5)
27420 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
27421 debug_macinfo_section = get_section (debug_macinfo_section_name,
27422 SECTION_DEBUG
27423 | SECTION_EXCLUDE, NULL);
27424 /* For macro info we have to refer to a debug_line section, so
27425 similar to split-dwarf emit a skeleton one for early debug. */
27426 debug_skeleton_line_section
27427 = get_section (DEBUG_LTO_LINE_SECTION,
27428 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27429 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
27430 DEBUG_SKELETON_LINE_SECTION_LABEL,
27431 generation);
27432 }
27433 else
27434 {
27435 /* ??? Which of the following do we need early? */
27436 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
27437 SECTION_DEBUG | SECTION_EXCLUDE,
27438 NULL);
27439 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
27440 SECTION_DEBUG | SECTION_EXCLUDE,
27441 NULL);
27442 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
27443 SECTION_DEBUG
27444 | SECTION_EXCLUDE, NULL);
27445 debug_skeleton_abbrev_section
27446 = get_section (DEBUG_LTO_ABBREV_SECTION,
27447 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27448 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
27449 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
27450 generation);
27451
27452 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
27453 stay in the main .o, but the skeleton_line goes into the split
27454 off dwo. */
27455 debug_skeleton_line_section
27456 = get_section (DEBUG_LTO_LINE_SECTION,
27457 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27458 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
27459 DEBUG_SKELETON_LINE_SECTION_LABEL,
27460 generation);
27461 debug_str_offsets_section
27462 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
27463 SECTION_DEBUG | SECTION_EXCLUDE,
27464 NULL);
27465 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
27466 DEBUG_SKELETON_INFO_SECTION_LABEL,
27467 generation);
27468 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
27469 DEBUG_STR_DWO_SECTION_FLAGS,
27470 NULL);
27471 debug_macinfo_section_name
27472 = ((dwarf_strict && dwarf_version < 5)
27473 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
27474 debug_macinfo_section = get_section (debug_macinfo_section_name,
27475 SECTION_DEBUG | SECTION_EXCLUDE,
27476 NULL);
27477 }
27478 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
27479 DEBUG_STR_SECTION_FLAGS
27480 | SECTION_EXCLUDE, NULL);
27481 if (!dwarf_split_debug_info && !DWARF2_ASM_LINE_DEBUG_INFO)
27482 debug_line_str_section
27483 = get_section (DEBUG_LTO_LINE_STR_SECTION,
27484 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
27485 }
27486 else
27487 {
27488 if (!dwarf_split_debug_info)
27489 {
27490 debug_info_section = get_section (DEBUG_INFO_SECTION,
27491 SECTION_DEBUG, NULL);
27492 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
27493 SECTION_DEBUG, NULL);
27494 debug_loc_section = get_section (dwarf_version >= 5
27495 ? DEBUG_LOCLISTS_SECTION
27496 : DEBUG_LOC_SECTION,
27497 SECTION_DEBUG, NULL);
27498 debug_macinfo_section_name
27499 = ((dwarf_strict && dwarf_version < 5)
27500 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
27501 debug_macinfo_section = get_section (debug_macinfo_section_name,
27502 SECTION_DEBUG, NULL);
27503 }
27504 else
27505 {
27506 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
27507 SECTION_DEBUG | SECTION_EXCLUDE,
27508 NULL);
27509 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
27510 SECTION_DEBUG | SECTION_EXCLUDE,
27511 NULL);
27512 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
27513 SECTION_DEBUG, NULL);
27514 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
27515 SECTION_DEBUG, NULL);
27516 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
27517 SECTION_DEBUG, NULL);
27518 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
27519 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
27520 generation);
27521
27522 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
27523 stay in the main .o, but the skeleton_line goes into the
27524 split off dwo. */
27525 debug_skeleton_line_section
27526 = get_section (DEBUG_DWO_LINE_SECTION,
27527 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27528 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
27529 DEBUG_SKELETON_LINE_SECTION_LABEL,
27530 generation);
27531 debug_str_offsets_section
27532 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
27533 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
27534 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
27535 DEBUG_SKELETON_INFO_SECTION_LABEL,
27536 generation);
27537 debug_loc_section = get_section (dwarf_version >= 5
27538 ? DEBUG_DWO_LOCLISTS_SECTION
27539 : DEBUG_DWO_LOC_SECTION,
27540 SECTION_DEBUG | SECTION_EXCLUDE,
27541 NULL);
27542 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
27543 DEBUG_STR_DWO_SECTION_FLAGS,
27544 NULL);
27545 debug_macinfo_section_name
27546 = ((dwarf_strict && dwarf_version < 5)
27547 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
27548 debug_macinfo_section = get_section (debug_macinfo_section_name,
27549 SECTION_DEBUG | SECTION_EXCLUDE,
27550 NULL);
27551 }
27552 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
27553 SECTION_DEBUG, NULL);
27554 debug_line_section = get_section (DEBUG_LINE_SECTION,
27555 SECTION_DEBUG, NULL);
27556 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
27557 SECTION_DEBUG, NULL);
27558 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
27559 SECTION_DEBUG, NULL);
27560 debug_str_section = get_section (DEBUG_STR_SECTION,
27561 DEBUG_STR_SECTION_FLAGS, NULL);
27562 if (!dwarf_split_debug_info && !DWARF2_ASM_LINE_DEBUG_INFO)
27563 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
27564 DEBUG_STR_SECTION_FLAGS, NULL);
27565 debug_ranges_section = get_section (dwarf_version >= 5
27566 ? DEBUG_RNGLISTS_SECTION
27567 : DEBUG_RANGES_SECTION,
27568 SECTION_DEBUG, NULL);
27569 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
27570 SECTION_DEBUG, NULL);
27571 }
27572
27573 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
27574 DEBUG_ABBREV_SECTION_LABEL, generation);
27575 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
27576 DEBUG_INFO_SECTION_LABEL, generation);
27577 info_section_emitted = false;
27578 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
27579 DEBUG_LINE_SECTION_LABEL, generation);
27580 /* There are up to 4 unique ranges labels per generation.
27581 See also output_rnglists. */
27582 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
27583 DEBUG_RANGES_SECTION_LABEL, generation * 4);
27584 if (dwarf_version >= 5 && dwarf_split_debug_info)
27585 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
27586 DEBUG_RANGES_SECTION_LABEL,
27587 1 + generation * 4);
27588 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
27589 DEBUG_ADDR_SECTION_LABEL, generation);
27590 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
27591 (dwarf_strict && dwarf_version < 5)
27592 ? DEBUG_MACINFO_SECTION_LABEL
27593 : DEBUG_MACRO_SECTION_LABEL, generation);
27594 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
27595 generation);
27596
27597 ++generation;
27598 return generation - 1;
27599 }
27600
27601 /* Set up for Dwarf output at the start of compilation. */
27602
27603 static void
27604 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
27605 {
27606 /* Allocate the file_table. */
27607 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
27608
27609 #ifndef DWARF2_LINENO_DEBUGGING_INFO
27610 /* Allocate the decl_die_table. */
27611 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
27612
27613 /* Allocate the decl_loc_table. */
27614 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
27615
27616 /* Allocate the cached_dw_loc_list_table. */
27617 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
27618
27619 /* Allocate the initial hunk of the decl_scope_table. */
27620 vec_alloc (decl_scope_table, 256);
27621
27622 /* Allocate the initial hunk of the abbrev_die_table. */
27623 vec_alloc (abbrev_die_table, 256);
27624 /* Zero-th entry is allocated, but unused. */
27625 abbrev_die_table->quick_push (NULL);
27626
27627 /* Allocate the dwarf_proc_stack_usage_map. */
27628 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
27629
27630 /* Allocate the pubtypes and pubnames vectors. */
27631 vec_alloc (pubname_table, 32);
27632 vec_alloc (pubtype_table, 32);
27633
27634 vec_alloc (incomplete_types, 64);
27635
27636 vec_alloc (used_rtx_array, 32);
27637
27638 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27639 vec_alloc (macinfo_table, 64);
27640 #endif
27641
27642 /* If front-ends already registered a main translation unit but we were not
27643 ready to perform the association, do this now. */
27644 if (main_translation_unit != NULL_TREE)
27645 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
27646 }
27647
27648 /* Called before compile () starts outputtting functions, variables
27649 and toplevel asms into assembly. */
27650
27651 static void
27652 dwarf2out_assembly_start (void)
27653 {
27654 if (text_section_line_info)
27655 return;
27656
27657 #ifndef DWARF2_LINENO_DEBUGGING_INFO
27658 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
27659 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
27660 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
27661 COLD_TEXT_SECTION_LABEL, 0);
27662 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
27663
27664 switch_to_section (text_section);
27665 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
27666 #endif
27667
27668 /* Make sure the line number table for .text always exists. */
27669 text_section_line_info = new_line_info_table ();
27670 text_section_line_info->end_label = text_end_label;
27671
27672 #ifdef DWARF2_LINENO_DEBUGGING_INFO
27673 cur_line_info_table = text_section_line_info;
27674 #endif
27675
27676 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
27677 && dwarf2out_do_cfi_asm ()
27678 && !dwarf2out_do_eh_frame ())
27679 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
27680 }
27681
27682 /* A helper function for dwarf2out_finish called through
27683 htab_traverse. Assign a string its index. All strings must be
27684 collected into the table by the time index_string is called,
27685 because the indexing code relies on htab_traverse to traverse nodes
27686 in the same order for each run. */
27687
27688 int
27689 index_string (indirect_string_node **h, unsigned int *index)
27690 {
27691 indirect_string_node *node = *h;
27692
27693 find_string_form (node);
27694 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
27695 {
27696 gcc_assert (node->index == NO_INDEX_ASSIGNED);
27697 node->index = *index;
27698 *index += 1;
27699 }
27700 return 1;
27701 }
27702
27703 /* A helper function for output_indirect_strings called through
27704 htab_traverse. Output the offset to a string and update the
27705 current offset. */
27706
27707 int
27708 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
27709 {
27710 indirect_string_node *node = *h;
27711
27712 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
27713 {
27714 /* Assert that this node has been assigned an index. */
27715 gcc_assert (node->index != NO_INDEX_ASSIGNED
27716 && node->index != NOT_INDEXED);
27717 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
27718 "indexed string 0x%x: %s", node->index, node->str);
27719 *offset += strlen (node->str) + 1;
27720 }
27721 return 1;
27722 }
27723
27724 /* A helper function for dwarf2out_finish called through
27725 htab_traverse. Output the indexed string. */
27726
27727 int
27728 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
27729 {
27730 struct indirect_string_node *node = *h;
27731
27732 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
27733 {
27734 /* Assert that the strings are output in the same order as their
27735 indexes were assigned. */
27736 gcc_assert (*cur_idx == node->index);
27737 assemble_string (node->str, strlen (node->str) + 1);
27738 *cur_idx += 1;
27739 }
27740 return 1;
27741 }
27742
27743 /* A helper function for dwarf2out_finish called through
27744 htab_traverse. Emit one queued .debug_str string. */
27745
27746 int
27747 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
27748 {
27749 struct indirect_string_node *node = *h;
27750
27751 node->form = find_string_form (node);
27752 if (node->form == form && node->refcount > 0)
27753 {
27754 ASM_OUTPUT_LABEL (asm_out_file, node->label);
27755 assemble_string (node->str, strlen (node->str) + 1);
27756 }
27757
27758 return 1;
27759 }
27760
27761 /* Output the indexed string table. */
27762
27763 static void
27764 output_indirect_strings (void)
27765 {
27766 switch_to_section (debug_str_section);
27767 if (!dwarf_split_debug_info)
27768 debug_str_hash->traverse<enum dwarf_form,
27769 output_indirect_string> (DW_FORM_strp);
27770 else
27771 {
27772 unsigned int offset = 0;
27773 unsigned int cur_idx = 0;
27774
27775 skeleton_debug_str_hash->traverse<enum dwarf_form,
27776 output_indirect_string> (DW_FORM_strp);
27777
27778 switch_to_section (debug_str_offsets_section);
27779 debug_str_hash->traverse_noresize
27780 <unsigned int *, output_index_string_offset> (&offset);
27781 switch_to_section (debug_str_dwo_section);
27782 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
27783 (&cur_idx);
27784 }
27785 }
27786
27787 /* Callback for htab_traverse to assign an index to an entry in the
27788 table, and to write that entry to the .debug_addr section. */
27789
27790 int
27791 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
27792 {
27793 addr_table_entry *entry = *slot;
27794
27795 if (entry->refcount == 0)
27796 {
27797 gcc_assert (entry->index == NO_INDEX_ASSIGNED
27798 || entry->index == NOT_INDEXED);
27799 return 1;
27800 }
27801
27802 gcc_assert (entry->index == *cur_index);
27803 (*cur_index)++;
27804
27805 switch (entry->kind)
27806 {
27807 case ate_kind_rtx:
27808 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
27809 "0x%x", entry->index);
27810 break;
27811 case ate_kind_rtx_dtprel:
27812 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
27813 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
27814 DWARF2_ADDR_SIZE,
27815 entry->addr.rtl);
27816 fputc ('\n', asm_out_file);
27817 break;
27818 case ate_kind_label:
27819 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
27820 "0x%x", entry->index);
27821 break;
27822 default:
27823 gcc_unreachable ();
27824 }
27825 return 1;
27826 }
27827
27828 /* Produce the .debug_addr section. */
27829
27830 static void
27831 output_addr_table (void)
27832 {
27833 unsigned int index = 0;
27834 if (addr_index_table == NULL || addr_index_table->size () == 0)
27835 return;
27836
27837 switch_to_section (debug_addr_section);
27838 addr_index_table
27839 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
27840 }
27841
27842 #if ENABLE_ASSERT_CHECKING
27843 /* Verify that all marks are clear. */
27844
27845 static void
27846 verify_marks_clear (dw_die_ref die)
27847 {
27848 dw_die_ref c;
27849
27850 gcc_assert (! die->die_mark);
27851 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
27852 }
27853 #endif /* ENABLE_ASSERT_CHECKING */
27854
27855 /* Clear the marks for a die and its children.
27856 Be cool if the mark isn't set. */
27857
27858 static void
27859 prune_unmark_dies (dw_die_ref die)
27860 {
27861 dw_die_ref c;
27862
27863 if (die->die_mark)
27864 die->die_mark = 0;
27865 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
27866 }
27867
27868 /* Given LOC that is referenced by a DIE we're marking as used, find all
27869 referenced DWARF procedures it references and mark them as used. */
27870
27871 static void
27872 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
27873 {
27874 for (; loc != NULL; loc = loc->dw_loc_next)
27875 switch (loc->dw_loc_opc)
27876 {
27877 case DW_OP_implicit_pointer:
27878 case DW_OP_convert:
27879 case DW_OP_reinterpret:
27880 case DW_OP_GNU_implicit_pointer:
27881 case DW_OP_GNU_convert:
27882 case DW_OP_GNU_reinterpret:
27883 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
27884 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
27885 break;
27886 case DW_OP_GNU_variable_value:
27887 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
27888 {
27889 dw_die_ref ref
27890 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
27891 if (ref == NULL)
27892 break;
27893 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
27894 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
27895 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
27896 }
27897 /* FALLTHRU */
27898 case DW_OP_call2:
27899 case DW_OP_call4:
27900 case DW_OP_call_ref:
27901 case DW_OP_const_type:
27902 case DW_OP_GNU_const_type:
27903 case DW_OP_GNU_parameter_ref:
27904 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
27905 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
27906 break;
27907 case DW_OP_regval_type:
27908 case DW_OP_deref_type:
27909 case DW_OP_GNU_regval_type:
27910 case DW_OP_GNU_deref_type:
27911 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
27912 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
27913 break;
27914 case DW_OP_entry_value:
27915 case DW_OP_GNU_entry_value:
27916 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
27917 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
27918 break;
27919 default:
27920 break;
27921 }
27922 }
27923
27924 /* Given DIE that we're marking as used, find any other dies
27925 it references as attributes and mark them as used. */
27926
27927 static void
27928 prune_unused_types_walk_attribs (dw_die_ref die)
27929 {
27930 dw_attr_node *a;
27931 unsigned ix;
27932
27933 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
27934 {
27935 switch (AT_class (a))
27936 {
27937 /* Make sure DWARF procedures referenced by location descriptions will
27938 get emitted. */
27939 case dw_val_class_loc:
27940 prune_unused_types_walk_loc_descr (AT_loc (a));
27941 break;
27942 case dw_val_class_loc_list:
27943 for (dw_loc_list_ref list = AT_loc_list (a);
27944 list != NULL;
27945 list = list->dw_loc_next)
27946 prune_unused_types_walk_loc_descr (list->expr);
27947 break;
27948
27949 case dw_val_class_die_ref:
27950 /* A reference to another DIE.
27951 Make sure that it will get emitted.
27952 If it was broken out into a comdat group, don't follow it. */
27953 if (! AT_ref (a)->comdat_type_p
27954 || a->dw_attr == DW_AT_specification)
27955 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
27956 break;
27957
27958 case dw_val_class_str:
27959 /* Set the string's refcount to 0 so that prune_unused_types_mark
27960 accounts properly for it. */
27961 a->dw_attr_val.v.val_str->refcount = 0;
27962 break;
27963
27964 default:
27965 break;
27966 }
27967 }
27968 }
27969
27970 /* Mark the generic parameters and arguments children DIEs of DIE. */
27971
27972 static void
27973 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
27974 {
27975 dw_die_ref c;
27976
27977 if (die == NULL || die->die_child == NULL)
27978 return;
27979 c = die->die_child;
27980 do
27981 {
27982 if (is_template_parameter (c))
27983 prune_unused_types_mark (c, 1);
27984 c = c->die_sib;
27985 } while (c && c != die->die_child);
27986 }
27987
27988 /* Mark DIE as being used. If DOKIDS is true, then walk down
27989 to DIE's children. */
27990
27991 static void
27992 prune_unused_types_mark (dw_die_ref die, int dokids)
27993 {
27994 dw_die_ref c;
27995
27996 if (die->die_mark == 0)
27997 {
27998 /* We haven't done this node yet. Mark it as used. */
27999 die->die_mark = 1;
28000 /* If this is the DIE of a generic type instantiation,
28001 mark the children DIEs that describe its generic parms and
28002 args. */
28003 prune_unused_types_mark_generic_parms_dies (die);
28004
28005 /* We also have to mark its parents as used.
28006 (But we don't want to mark our parent's kids due to this,
28007 unless it is a class.) */
28008 if (die->die_parent)
28009 prune_unused_types_mark (die->die_parent,
28010 class_scope_p (die->die_parent));
28011
28012 /* Mark any referenced nodes. */
28013 prune_unused_types_walk_attribs (die);
28014
28015 /* If this node is a specification,
28016 also mark the definition, if it exists. */
28017 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
28018 prune_unused_types_mark (die->die_definition, 1);
28019 }
28020
28021 if (dokids && die->die_mark != 2)
28022 {
28023 /* We need to walk the children, but haven't done so yet.
28024 Remember that we've walked the kids. */
28025 die->die_mark = 2;
28026
28027 /* If this is an array type, we need to make sure our
28028 kids get marked, even if they're types. If we're
28029 breaking out types into comdat sections, do this
28030 for all type definitions. */
28031 if (die->die_tag == DW_TAG_array_type
28032 || (use_debug_types
28033 && is_type_die (die) && ! is_declaration_die (die)))
28034 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
28035 else
28036 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
28037 }
28038 }
28039
28040 /* For local classes, look if any static member functions were emitted
28041 and if so, mark them. */
28042
28043 static void
28044 prune_unused_types_walk_local_classes (dw_die_ref die)
28045 {
28046 dw_die_ref c;
28047
28048 if (die->die_mark == 2)
28049 return;
28050
28051 switch (die->die_tag)
28052 {
28053 case DW_TAG_structure_type:
28054 case DW_TAG_union_type:
28055 case DW_TAG_class_type:
28056 break;
28057
28058 case DW_TAG_subprogram:
28059 if (!get_AT_flag (die, DW_AT_declaration)
28060 || die->die_definition != NULL)
28061 prune_unused_types_mark (die, 1);
28062 return;
28063
28064 default:
28065 return;
28066 }
28067
28068 /* Mark children. */
28069 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
28070 }
28071
28072 /* Walk the tree DIE and mark types that we actually use. */
28073
28074 static void
28075 prune_unused_types_walk (dw_die_ref die)
28076 {
28077 dw_die_ref c;
28078
28079 /* Don't do anything if this node is already marked and
28080 children have been marked as well. */
28081 if (die->die_mark == 2)
28082 return;
28083
28084 switch (die->die_tag)
28085 {
28086 case DW_TAG_structure_type:
28087 case DW_TAG_union_type:
28088 case DW_TAG_class_type:
28089 if (die->die_perennial_p)
28090 break;
28091
28092 for (c = die->die_parent; c; c = c->die_parent)
28093 if (c->die_tag == DW_TAG_subprogram)
28094 break;
28095
28096 /* Finding used static member functions inside of classes
28097 is needed just for local classes, because for other classes
28098 static member function DIEs with DW_AT_specification
28099 are emitted outside of the DW_TAG_*_type. If we ever change
28100 it, we'd need to call this even for non-local classes. */
28101 if (c)
28102 prune_unused_types_walk_local_classes (die);
28103
28104 /* It's a type node --- don't mark it. */
28105 return;
28106
28107 case DW_TAG_const_type:
28108 case DW_TAG_packed_type:
28109 case DW_TAG_pointer_type:
28110 case DW_TAG_reference_type:
28111 case DW_TAG_rvalue_reference_type:
28112 case DW_TAG_volatile_type:
28113 case DW_TAG_typedef:
28114 case DW_TAG_array_type:
28115 case DW_TAG_interface_type:
28116 case DW_TAG_friend:
28117 case DW_TAG_enumeration_type:
28118 case DW_TAG_subroutine_type:
28119 case DW_TAG_string_type:
28120 case DW_TAG_set_type:
28121 case DW_TAG_subrange_type:
28122 case DW_TAG_ptr_to_member_type:
28123 case DW_TAG_file_type:
28124 /* Type nodes are useful only when other DIEs reference them --- don't
28125 mark them. */
28126 /* FALLTHROUGH */
28127
28128 case DW_TAG_dwarf_procedure:
28129 /* Likewise for DWARF procedures. */
28130
28131 if (die->die_perennial_p)
28132 break;
28133
28134 return;
28135
28136 default:
28137 /* Mark everything else. */
28138 break;
28139 }
28140
28141 if (die->die_mark == 0)
28142 {
28143 die->die_mark = 1;
28144
28145 /* Now, mark any dies referenced from here. */
28146 prune_unused_types_walk_attribs (die);
28147 }
28148
28149 die->die_mark = 2;
28150
28151 /* Mark children. */
28152 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
28153 }
28154
28155 /* Increment the string counts on strings referred to from DIE's
28156 attributes. */
28157
28158 static void
28159 prune_unused_types_update_strings (dw_die_ref die)
28160 {
28161 dw_attr_node *a;
28162 unsigned ix;
28163
28164 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
28165 if (AT_class (a) == dw_val_class_str)
28166 {
28167 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
28168 s->refcount++;
28169 /* Avoid unnecessarily putting strings that are used less than
28170 twice in the hash table. */
28171 if (s->refcount
28172 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
28173 {
28174 indirect_string_node **slot
28175 = debug_str_hash->find_slot_with_hash (s->str,
28176 htab_hash_string (s->str),
28177 INSERT);
28178 gcc_assert (*slot == NULL);
28179 *slot = s;
28180 }
28181 }
28182 }
28183
28184 /* Mark DIE and its children as removed. */
28185
28186 static void
28187 mark_removed (dw_die_ref die)
28188 {
28189 dw_die_ref c;
28190 die->removed = true;
28191 FOR_EACH_CHILD (die, c, mark_removed (c));
28192 }
28193
28194 /* Remove from the tree DIE any dies that aren't marked. */
28195
28196 static void
28197 prune_unused_types_prune (dw_die_ref die)
28198 {
28199 dw_die_ref c;
28200
28201 gcc_assert (die->die_mark);
28202 prune_unused_types_update_strings (die);
28203
28204 if (! die->die_child)
28205 return;
28206
28207 c = die->die_child;
28208 do {
28209 dw_die_ref prev = c, next;
28210 for (c = c->die_sib; ! c->die_mark; c = next)
28211 if (c == die->die_child)
28212 {
28213 /* No marked children between 'prev' and the end of the list. */
28214 if (prev == c)
28215 /* No marked children at all. */
28216 die->die_child = NULL;
28217 else
28218 {
28219 prev->die_sib = c->die_sib;
28220 die->die_child = prev;
28221 }
28222 c->die_sib = NULL;
28223 mark_removed (c);
28224 return;
28225 }
28226 else
28227 {
28228 next = c->die_sib;
28229 c->die_sib = NULL;
28230 mark_removed (c);
28231 }
28232
28233 if (c != prev->die_sib)
28234 prev->die_sib = c;
28235 prune_unused_types_prune (c);
28236 } while (c != die->die_child);
28237 }
28238
28239 /* Remove dies representing declarations that we never use. */
28240
28241 static void
28242 prune_unused_types (void)
28243 {
28244 unsigned int i;
28245 limbo_die_node *node;
28246 comdat_type_node *ctnode;
28247 pubname_entry *pub;
28248 dw_die_ref base_type;
28249
28250 #if ENABLE_ASSERT_CHECKING
28251 /* All the marks should already be clear. */
28252 verify_marks_clear (comp_unit_die ());
28253 for (node = limbo_die_list; node; node = node->next)
28254 verify_marks_clear (node->die);
28255 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
28256 verify_marks_clear (ctnode->root_die);
28257 #endif /* ENABLE_ASSERT_CHECKING */
28258
28259 /* Mark types that are used in global variables. */
28260 premark_types_used_by_global_vars ();
28261
28262 /* Set the mark on nodes that are actually used. */
28263 prune_unused_types_walk (comp_unit_die ());
28264 for (node = limbo_die_list; node; node = node->next)
28265 prune_unused_types_walk (node->die);
28266 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
28267 {
28268 prune_unused_types_walk (ctnode->root_die);
28269 prune_unused_types_mark (ctnode->type_die, 1);
28270 }
28271
28272 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
28273 are unusual in that they are pubnames that are the children of pubtypes.
28274 They should only be marked via their parent DW_TAG_enumeration_type die,
28275 not as roots in themselves. */
28276 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
28277 if (pub->die->die_tag != DW_TAG_enumerator)
28278 prune_unused_types_mark (pub->die, 1);
28279 for (i = 0; base_types.iterate (i, &base_type); i++)
28280 prune_unused_types_mark (base_type, 1);
28281
28282 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
28283 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
28284 callees). */
28285 cgraph_node *cnode;
28286 FOR_EACH_FUNCTION (cnode)
28287 if (cnode->referred_to_p (false))
28288 {
28289 dw_die_ref die = lookup_decl_die (cnode->decl);
28290 if (die == NULL || die->die_mark)
28291 continue;
28292 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
28293 if (e->caller != cnode
28294 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
28295 {
28296 prune_unused_types_mark (die, 1);
28297 break;
28298 }
28299 }
28300
28301 if (debug_str_hash)
28302 debug_str_hash->empty ();
28303 if (skeleton_debug_str_hash)
28304 skeleton_debug_str_hash->empty ();
28305 prune_unused_types_prune (comp_unit_die ());
28306 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
28307 {
28308 node = *pnode;
28309 if (!node->die->die_mark)
28310 *pnode = node->next;
28311 else
28312 {
28313 prune_unused_types_prune (node->die);
28314 pnode = &node->next;
28315 }
28316 }
28317 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
28318 prune_unused_types_prune (ctnode->root_die);
28319
28320 /* Leave the marks clear. */
28321 prune_unmark_dies (comp_unit_die ());
28322 for (node = limbo_die_list; node; node = node->next)
28323 prune_unmark_dies (node->die);
28324 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
28325 prune_unmark_dies (ctnode->root_die);
28326 }
28327
28328 /* Helpers to manipulate hash table of comdat type units. */
28329
28330 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
28331 {
28332 static inline hashval_t hash (const comdat_type_node *);
28333 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
28334 };
28335
28336 inline hashval_t
28337 comdat_type_hasher::hash (const comdat_type_node *type_node)
28338 {
28339 hashval_t h;
28340 memcpy (&h, type_node->signature, sizeof (h));
28341 return h;
28342 }
28343
28344 inline bool
28345 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
28346 const comdat_type_node *type_node_2)
28347 {
28348 return (! memcmp (type_node_1->signature, type_node_2->signature,
28349 DWARF_TYPE_SIGNATURE_SIZE));
28350 }
28351
28352 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
28353 to the location it would have been added, should we know its
28354 DECL_ASSEMBLER_NAME when we added other attributes. This will
28355 probably improve compactness of debug info, removing equivalent
28356 abbrevs, and hide any differences caused by deferring the
28357 computation of the assembler name, triggered by e.g. PCH. */
28358
28359 static inline void
28360 move_linkage_attr (dw_die_ref die)
28361 {
28362 unsigned ix = vec_safe_length (die->die_attr);
28363 dw_attr_node linkage = (*die->die_attr)[ix - 1];
28364
28365 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
28366 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
28367
28368 while (--ix > 0)
28369 {
28370 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
28371
28372 if (prev->dw_attr == DW_AT_decl_line
28373 || prev->dw_attr == DW_AT_decl_column
28374 || prev->dw_attr == DW_AT_name)
28375 break;
28376 }
28377
28378 if (ix != vec_safe_length (die->die_attr) - 1)
28379 {
28380 die->die_attr->pop ();
28381 die->die_attr->quick_insert (ix, linkage);
28382 }
28383 }
28384
28385 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
28386 referenced from typed stack ops and count how often they are used. */
28387
28388 static void
28389 mark_base_types (dw_loc_descr_ref loc)
28390 {
28391 dw_die_ref base_type = NULL;
28392
28393 for (; loc; loc = loc->dw_loc_next)
28394 {
28395 switch (loc->dw_loc_opc)
28396 {
28397 case DW_OP_regval_type:
28398 case DW_OP_deref_type:
28399 case DW_OP_GNU_regval_type:
28400 case DW_OP_GNU_deref_type:
28401 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
28402 break;
28403 case DW_OP_convert:
28404 case DW_OP_reinterpret:
28405 case DW_OP_GNU_convert:
28406 case DW_OP_GNU_reinterpret:
28407 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
28408 continue;
28409 /* FALLTHRU */
28410 case DW_OP_const_type:
28411 case DW_OP_GNU_const_type:
28412 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
28413 break;
28414 case DW_OP_entry_value:
28415 case DW_OP_GNU_entry_value:
28416 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
28417 continue;
28418 default:
28419 continue;
28420 }
28421 gcc_assert (base_type->die_parent == comp_unit_die ());
28422 if (base_type->die_mark)
28423 base_type->die_mark++;
28424 else
28425 {
28426 base_types.safe_push (base_type);
28427 base_type->die_mark = 1;
28428 }
28429 }
28430 }
28431
28432 /* Comparison function for sorting marked base types. */
28433
28434 static int
28435 base_type_cmp (const void *x, const void *y)
28436 {
28437 dw_die_ref dx = *(const dw_die_ref *) x;
28438 dw_die_ref dy = *(const dw_die_ref *) y;
28439 unsigned int byte_size1, byte_size2;
28440 unsigned int encoding1, encoding2;
28441 unsigned int align1, align2;
28442 if (dx->die_mark > dy->die_mark)
28443 return -1;
28444 if (dx->die_mark < dy->die_mark)
28445 return 1;
28446 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
28447 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
28448 if (byte_size1 < byte_size2)
28449 return 1;
28450 if (byte_size1 > byte_size2)
28451 return -1;
28452 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
28453 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
28454 if (encoding1 < encoding2)
28455 return 1;
28456 if (encoding1 > encoding2)
28457 return -1;
28458 align1 = get_AT_unsigned (dx, DW_AT_alignment);
28459 align2 = get_AT_unsigned (dy, DW_AT_alignment);
28460 if (align1 < align2)
28461 return 1;
28462 if (align1 > align2)
28463 return -1;
28464 return 0;
28465 }
28466
28467 /* Move base types marked by mark_base_types as early as possible
28468 in the CU, sorted by decreasing usage count both to make the
28469 uleb128 references as small as possible and to make sure they
28470 will have die_offset already computed by calc_die_sizes when
28471 sizes of typed stack loc ops is computed. */
28472
28473 static void
28474 move_marked_base_types (void)
28475 {
28476 unsigned int i;
28477 dw_die_ref base_type, die, c;
28478
28479 if (base_types.is_empty ())
28480 return;
28481
28482 /* Sort by decreasing usage count, they will be added again in that
28483 order later on. */
28484 base_types.qsort (base_type_cmp);
28485 die = comp_unit_die ();
28486 c = die->die_child;
28487 do
28488 {
28489 dw_die_ref prev = c;
28490 c = c->die_sib;
28491 while (c->die_mark)
28492 {
28493 remove_child_with_prev (c, prev);
28494 /* As base types got marked, there must be at least
28495 one node other than DW_TAG_base_type. */
28496 gcc_assert (die->die_child != NULL);
28497 c = prev->die_sib;
28498 }
28499 }
28500 while (c != die->die_child);
28501 gcc_assert (die->die_child);
28502 c = die->die_child;
28503 for (i = 0; base_types.iterate (i, &base_type); i++)
28504 {
28505 base_type->die_mark = 0;
28506 base_type->die_sib = c->die_sib;
28507 c->die_sib = base_type;
28508 c = base_type;
28509 }
28510 }
28511
28512 /* Helper function for resolve_addr, attempt to resolve
28513 one CONST_STRING, return true if successful. Similarly verify that
28514 SYMBOL_REFs refer to variables emitted in the current CU. */
28515
28516 static bool
28517 resolve_one_addr (rtx *addr)
28518 {
28519 rtx rtl = *addr;
28520
28521 if (GET_CODE (rtl) == CONST_STRING)
28522 {
28523 size_t len = strlen (XSTR (rtl, 0)) + 1;
28524 tree t = build_string (len, XSTR (rtl, 0));
28525 tree tlen = size_int (len - 1);
28526 TREE_TYPE (t)
28527 = build_array_type (char_type_node, build_index_type (tlen));
28528 rtl = lookup_constant_def (t);
28529 if (!rtl || !MEM_P (rtl))
28530 return false;
28531 rtl = XEXP (rtl, 0);
28532 if (GET_CODE (rtl) == SYMBOL_REF
28533 && SYMBOL_REF_DECL (rtl)
28534 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
28535 return false;
28536 vec_safe_push (used_rtx_array, rtl);
28537 *addr = rtl;
28538 return true;
28539 }
28540
28541 if (GET_CODE (rtl) == SYMBOL_REF
28542 && SYMBOL_REF_DECL (rtl))
28543 {
28544 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
28545 {
28546 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
28547 return false;
28548 }
28549 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
28550 return false;
28551 }
28552
28553 if (GET_CODE (rtl) == CONST)
28554 {
28555 subrtx_ptr_iterator::array_type array;
28556 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
28557 if (!resolve_one_addr (*iter))
28558 return false;
28559 }
28560
28561 return true;
28562 }
28563
28564 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
28565 if possible, and create DW_TAG_dwarf_procedure that can be referenced
28566 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
28567
28568 static rtx
28569 string_cst_pool_decl (tree t)
28570 {
28571 rtx rtl = output_constant_def (t, 1);
28572 unsigned char *array;
28573 dw_loc_descr_ref l;
28574 tree decl;
28575 size_t len;
28576 dw_die_ref ref;
28577
28578 if (!rtl || !MEM_P (rtl))
28579 return NULL_RTX;
28580 rtl = XEXP (rtl, 0);
28581 if (GET_CODE (rtl) != SYMBOL_REF
28582 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
28583 return NULL_RTX;
28584
28585 decl = SYMBOL_REF_DECL (rtl);
28586 if (!lookup_decl_die (decl))
28587 {
28588 len = TREE_STRING_LENGTH (t);
28589 vec_safe_push (used_rtx_array, rtl);
28590 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
28591 array = ggc_vec_alloc<unsigned char> (len);
28592 memcpy (array, TREE_STRING_POINTER (t), len);
28593 l = new_loc_descr (DW_OP_implicit_value, len, 0);
28594 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
28595 l->dw_loc_oprnd2.v.val_vec.length = len;
28596 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
28597 l->dw_loc_oprnd2.v.val_vec.array = array;
28598 add_AT_loc (ref, DW_AT_location, l);
28599 equate_decl_number_to_die (decl, ref);
28600 }
28601 return rtl;
28602 }
28603
28604 /* Helper function of resolve_addr_in_expr. LOC is
28605 a DW_OP_addr followed by DW_OP_stack_value, either at the start
28606 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
28607 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
28608 with DW_OP_implicit_pointer if possible
28609 and return true, if unsuccessful, return false. */
28610
28611 static bool
28612 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
28613 {
28614 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
28615 HOST_WIDE_INT offset = 0;
28616 dw_die_ref ref = NULL;
28617 tree decl;
28618
28619 if (GET_CODE (rtl) == CONST
28620 && GET_CODE (XEXP (rtl, 0)) == PLUS
28621 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
28622 {
28623 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
28624 rtl = XEXP (XEXP (rtl, 0), 0);
28625 }
28626 if (GET_CODE (rtl) == CONST_STRING)
28627 {
28628 size_t len = strlen (XSTR (rtl, 0)) + 1;
28629 tree t = build_string (len, XSTR (rtl, 0));
28630 tree tlen = size_int (len - 1);
28631
28632 TREE_TYPE (t)
28633 = build_array_type (char_type_node, build_index_type (tlen));
28634 rtl = string_cst_pool_decl (t);
28635 if (!rtl)
28636 return false;
28637 }
28638 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
28639 {
28640 decl = SYMBOL_REF_DECL (rtl);
28641 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
28642 {
28643 ref = lookup_decl_die (decl);
28644 if (ref && (get_AT (ref, DW_AT_location)
28645 || get_AT (ref, DW_AT_const_value)))
28646 {
28647 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
28648 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28649 loc->dw_loc_oprnd1.val_entry = NULL;
28650 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28651 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28652 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
28653 loc->dw_loc_oprnd2.v.val_int = offset;
28654 return true;
28655 }
28656 }
28657 }
28658 return false;
28659 }
28660
28661 /* Helper function for resolve_addr, handle one location
28662 expression, return false if at least one CONST_STRING or SYMBOL_REF in
28663 the location list couldn't be resolved. */
28664
28665 static bool
28666 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
28667 {
28668 dw_loc_descr_ref keep = NULL;
28669 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
28670 switch (loc->dw_loc_opc)
28671 {
28672 case DW_OP_addr:
28673 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
28674 {
28675 if ((prev == NULL
28676 || prev->dw_loc_opc == DW_OP_piece
28677 || prev->dw_loc_opc == DW_OP_bit_piece)
28678 && loc->dw_loc_next
28679 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
28680 && (!dwarf_strict || dwarf_version >= 5)
28681 && optimize_one_addr_into_implicit_ptr (loc))
28682 break;
28683 return false;
28684 }
28685 break;
28686 case DW_OP_GNU_addr_index:
28687 case DW_OP_GNU_const_index:
28688 if (loc->dw_loc_opc == DW_OP_GNU_addr_index
28689 || (loc->dw_loc_opc == DW_OP_GNU_const_index && loc->dtprel))
28690 {
28691 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
28692 if (!resolve_one_addr (&rtl))
28693 return false;
28694 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
28695 loc->dw_loc_oprnd1.val_entry
28696 = add_addr_table_entry (rtl, ate_kind_rtx);
28697 }
28698 break;
28699 case DW_OP_const4u:
28700 case DW_OP_const8u:
28701 if (loc->dtprel
28702 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
28703 return false;
28704 break;
28705 case DW_OP_plus_uconst:
28706 if (size_of_loc_descr (loc)
28707 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
28708 + 1
28709 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
28710 {
28711 dw_loc_descr_ref repl
28712 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
28713 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
28714 add_loc_descr (&repl, loc->dw_loc_next);
28715 *loc = *repl;
28716 }
28717 break;
28718 case DW_OP_implicit_value:
28719 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
28720 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
28721 return false;
28722 break;
28723 case DW_OP_implicit_pointer:
28724 case DW_OP_GNU_implicit_pointer:
28725 case DW_OP_GNU_parameter_ref:
28726 case DW_OP_GNU_variable_value:
28727 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28728 {
28729 dw_die_ref ref
28730 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28731 if (ref == NULL)
28732 return false;
28733 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28734 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28735 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28736 }
28737 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
28738 {
28739 if (prev == NULL
28740 && loc->dw_loc_next == NULL
28741 && AT_class (a) == dw_val_class_loc)
28742 switch (a->dw_attr)
28743 {
28744 /* Following attributes allow both exprloc and reference,
28745 so if the whole expression is DW_OP_GNU_variable_value
28746 alone we could transform it into reference. */
28747 case DW_AT_byte_size:
28748 case DW_AT_bit_size:
28749 case DW_AT_lower_bound:
28750 case DW_AT_upper_bound:
28751 case DW_AT_bit_stride:
28752 case DW_AT_count:
28753 case DW_AT_allocated:
28754 case DW_AT_associated:
28755 case DW_AT_byte_stride:
28756 a->dw_attr_val.val_class = dw_val_class_die_ref;
28757 a->dw_attr_val.val_entry = NULL;
28758 a->dw_attr_val.v.val_die_ref.die
28759 = loc->dw_loc_oprnd1.v.val_die_ref.die;
28760 a->dw_attr_val.v.val_die_ref.external = 0;
28761 return true;
28762 default:
28763 break;
28764 }
28765 if (dwarf_strict)
28766 return false;
28767 }
28768 break;
28769 case DW_OP_const_type:
28770 case DW_OP_regval_type:
28771 case DW_OP_deref_type:
28772 case DW_OP_convert:
28773 case DW_OP_reinterpret:
28774 case DW_OP_GNU_const_type:
28775 case DW_OP_GNU_regval_type:
28776 case DW_OP_GNU_deref_type:
28777 case DW_OP_GNU_convert:
28778 case DW_OP_GNU_reinterpret:
28779 while (loc->dw_loc_next
28780 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
28781 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
28782 {
28783 dw_die_ref base1, base2;
28784 unsigned enc1, enc2, size1, size2;
28785 if (loc->dw_loc_opc == DW_OP_regval_type
28786 || loc->dw_loc_opc == DW_OP_deref_type
28787 || loc->dw_loc_opc == DW_OP_GNU_regval_type
28788 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
28789 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
28790 else if (loc->dw_loc_oprnd1.val_class
28791 == dw_val_class_unsigned_const)
28792 break;
28793 else
28794 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
28795 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
28796 == dw_val_class_unsigned_const)
28797 break;
28798 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
28799 gcc_assert (base1->die_tag == DW_TAG_base_type
28800 && base2->die_tag == DW_TAG_base_type);
28801 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
28802 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
28803 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
28804 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
28805 if (size1 == size2
28806 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
28807 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
28808 && loc != keep)
28809 || enc1 == enc2))
28810 {
28811 /* Optimize away next DW_OP_convert after
28812 adjusting LOC's base type die reference. */
28813 if (loc->dw_loc_opc == DW_OP_regval_type
28814 || loc->dw_loc_opc == DW_OP_deref_type
28815 || loc->dw_loc_opc == DW_OP_GNU_regval_type
28816 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
28817 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
28818 else
28819 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
28820 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
28821 continue;
28822 }
28823 /* Don't change integer DW_OP_convert after e.g. floating
28824 point typed stack entry. */
28825 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
28826 keep = loc->dw_loc_next;
28827 break;
28828 }
28829 break;
28830 default:
28831 break;
28832 }
28833 return true;
28834 }
28835
28836 /* Helper function of resolve_addr. DIE had DW_AT_location of
28837 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
28838 and DW_OP_addr couldn't be resolved. resolve_addr has already
28839 removed the DW_AT_location attribute. This function attempts to
28840 add a new DW_AT_location attribute with DW_OP_implicit_pointer
28841 to it or DW_AT_const_value attribute, if possible. */
28842
28843 static void
28844 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
28845 {
28846 if (!VAR_P (decl)
28847 || lookup_decl_die (decl) != die
28848 || DECL_EXTERNAL (decl)
28849 || !TREE_STATIC (decl)
28850 || DECL_INITIAL (decl) == NULL_TREE
28851 || DECL_P (DECL_INITIAL (decl))
28852 || get_AT (die, DW_AT_const_value))
28853 return;
28854
28855 tree init = DECL_INITIAL (decl);
28856 HOST_WIDE_INT offset = 0;
28857 /* For variables that have been optimized away and thus
28858 don't have a memory location, see if we can emit
28859 DW_AT_const_value instead. */
28860 if (tree_add_const_value_attribute (die, init))
28861 return;
28862 if (dwarf_strict && dwarf_version < 5)
28863 return;
28864 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
28865 and ADDR_EXPR refers to a decl that has DW_AT_location or
28866 DW_AT_const_value (but isn't addressable, otherwise
28867 resolving the original DW_OP_addr wouldn't fail), see if
28868 we can add DW_OP_implicit_pointer. */
28869 STRIP_NOPS (init);
28870 if (TREE_CODE (init) == POINTER_PLUS_EXPR
28871 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
28872 {
28873 offset = tree_to_shwi (TREE_OPERAND (init, 1));
28874 init = TREE_OPERAND (init, 0);
28875 STRIP_NOPS (init);
28876 }
28877 if (TREE_CODE (init) != ADDR_EXPR)
28878 return;
28879 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
28880 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
28881 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
28882 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
28883 && TREE_OPERAND (init, 0) != decl))
28884 {
28885 dw_die_ref ref;
28886 dw_loc_descr_ref l;
28887
28888 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
28889 {
28890 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
28891 if (!rtl)
28892 return;
28893 decl = SYMBOL_REF_DECL (rtl);
28894 }
28895 else
28896 decl = TREE_OPERAND (init, 0);
28897 ref = lookup_decl_die (decl);
28898 if (ref == NULL
28899 || (!get_AT (ref, DW_AT_location)
28900 && !get_AT (ref, DW_AT_const_value)))
28901 return;
28902 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
28903 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28904 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
28905 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
28906 add_AT_loc (die, DW_AT_location, l);
28907 }
28908 }
28909
28910 /* Return NULL if l is a DWARF expression, or first op that is not
28911 valid DWARF expression. */
28912
28913 static dw_loc_descr_ref
28914 non_dwarf_expression (dw_loc_descr_ref l)
28915 {
28916 while (l)
28917 {
28918 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
28919 return l;
28920 switch (l->dw_loc_opc)
28921 {
28922 case DW_OP_regx:
28923 case DW_OP_implicit_value:
28924 case DW_OP_stack_value:
28925 case DW_OP_implicit_pointer:
28926 case DW_OP_GNU_implicit_pointer:
28927 case DW_OP_GNU_parameter_ref:
28928 case DW_OP_piece:
28929 case DW_OP_bit_piece:
28930 return l;
28931 default:
28932 break;
28933 }
28934 l = l->dw_loc_next;
28935 }
28936 return NULL;
28937 }
28938
28939 /* Return adjusted copy of EXPR:
28940 If it is empty DWARF expression, return it.
28941 If it is valid non-empty DWARF expression,
28942 return copy of EXPR with DW_OP_deref appended to it.
28943 If it is DWARF expression followed by DW_OP_reg{N,x}, return
28944 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
28945 If it is DWARF expression followed by DW_OP_stack_value, return
28946 copy of the DWARF expression without anything appended.
28947 Otherwise, return NULL. */
28948
28949 static dw_loc_descr_ref
28950 copy_deref_exprloc (dw_loc_descr_ref expr)
28951 {
28952 dw_loc_descr_ref tail = NULL;
28953
28954 if (expr == NULL)
28955 return NULL;
28956
28957 dw_loc_descr_ref l = non_dwarf_expression (expr);
28958 if (l && l->dw_loc_next)
28959 return NULL;
28960
28961 if (l)
28962 {
28963 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
28964 tail = new_loc_descr ((enum dwarf_location_atom)
28965 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
28966 0, 0);
28967 else
28968 switch (l->dw_loc_opc)
28969 {
28970 case DW_OP_regx:
28971 tail = new_loc_descr (DW_OP_bregx,
28972 l->dw_loc_oprnd1.v.val_unsigned, 0);
28973 break;
28974 case DW_OP_stack_value:
28975 break;
28976 default:
28977 return NULL;
28978 }
28979 }
28980 else
28981 tail = new_loc_descr (DW_OP_deref, 0, 0);
28982
28983 dw_loc_descr_ref ret = NULL, *p = &ret;
28984 while (expr != l)
28985 {
28986 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
28987 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
28988 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
28989 p = &(*p)->dw_loc_next;
28990 expr = expr->dw_loc_next;
28991 }
28992 *p = tail;
28993 return ret;
28994 }
28995
28996 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
28997 reference to a variable or argument, adjust it if needed and return:
28998 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
28999 attribute if present should be removed
29000 0 keep the attribute perhaps with minor modifications, no need to rescan
29001 1 if the attribute has been successfully adjusted. */
29002
29003 static int
29004 optimize_string_length (dw_attr_node *a)
29005 {
29006 dw_loc_descr_ref l = AT_loc (a), lv;
29007 dw_die_ref die;
29008 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29009 {
29010 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
29011 die = lookup_decl_die (decl);
29012 if (die)
29013 {
29014 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29015 l->dw_loc_oprnd1.v.val_die_ref.die = die;
29016 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
29017 }
29018 else
29019 return -1;
29020 }
29021 else
29022 die = l->dw_loc_oprnd1.v.val_die_ref.die;
29023
29024 /* DWARF5 allows reference class, so we can then reference the DIE.
29025 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
29026 if (l->dw_loc_next != NULL && dwarf_version >= 5)
29027 {
29028 a->dw_attr_val.val_class = dw_val_class_die_ref;
29029 a->dw_attr_val.val_entry = NULL;
29030 a->dw_attr_val.v.val_die_ref.die = die;
29031 a->dw_attr_val.v.val_die_ref.external = 0;
29032 return 0;
29033 }
29034
29035 dw_attr_node *av = get_AT (die, DW_AT_location);
29036 dw_loc_list_ref d;
29037 bool non_dwarf_expr = false;
29038
29039 if (av == NULL)
29040 return dwarf_strict ? -1 : 0;
29041 switch (AT_class (av))
29042 {
29043 case dw_val_class_loc_list:
29044 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
29045 if (d->expr && non_dwarf_expression (d->expr))
29046 non_dwarf_expr = true;
29047 break;
29048 case dw_val_class_loc:
29049 lv = AT_loc (av);
29050 if (lv == NULL)
29051 return dwarf_strict ? -1 : 0;
29052 if (non_dwarf_expression (lv))
29053 non_dwarf_expr = true;
29054 break;
29055 default:
29056 return dwarf_strict ? -1 : 0;
29057 }
29058
29059 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
29060 into DW_OP_call4 or DW_OP_GNU_variable_value into
29061 DW_OP_call4 DW_OP_deref, do so. */
29062 if (!non_dwarf_expr
29063 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
29064 {
29065 l->dw_loc_opc = DW_OP_call4;
29066 if (l->dw_loc_next)
29067 l->dw_loc_next = NULL;
29068 else
29069 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
29070 return 0;
29071 }
29072
29073 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
29074 copy over the DW_AT_location attribute from die to a. */
29075 if (l->dw_loc_next != NULL)
29076 {
29077 a->dw_attr_val = av->dw_attr_val;
29078 return 1;
29079 }
29080
29081 dw_loc_list_ref list, *p;
29082 switch (AT_class (av))
29083 {
29084 case dw_val_class_loc_list:
29085 p = &list;
29086 list = NULL;
29087 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
29088 {
29089 lv = copy_deref_exprloc (d->expr);
29090 if (lv)
29091 {
29092 *p = new_loc_list (lv, d->begin, d->end, d->section);
29093 p = &(*p)->dw_loc_next;
29094 }
29095 else if (!dwarf_strict && d->expr)
29096 return 0;
29097 }
29098 if (list == NULL)
29099 return dwarf_strict ? -1 : 0;
29100 a->dw_attr_val.val_class = dw_val_class_loc_list;
29101 gen_llsym (list);
29102 *AT_loc_list_ptr (a) = list;
29103 return 1;
29104 case dw_val_class_loc:
29105 lv = copy_deref_exprloc (AT_loc (av));
29106 if (lv == NULL)
29107 return dwarf_strict ? -1 : 0;
29108 a->dw_attr_val.v.val_loc = lv;
29109 return 1;
29110 default:
29111 gcc_unreachable ();
29112 }
29113 }
29114
29115 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
29116 an address in .rodata section if the string literal is emitted there,
29117 or remove the containing location list or replace DW_AT_const_value
29118 with DW_AT_location and empty location expression, if it isn't found
29119 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
29120 to something that has been emitted in the current CU. */
29121
29122 static void
29123 resolve_addr (dw_die_ref die)
29124 {
29125 dw_die_ref c;
29126 dw_attr_node *a;
29127 dw_loc_list_ref *curr, *start, loc;
29128 unsigned ix;
29129 bool remove_AT_byte_size = false;
29130
29131 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29132 switch (AT_class (a))
29133 {
29134 case dw_val_class_loc_list:
29135 start = curr = AT_loc_list_ptr (a);
29136 loc = *curr;
29137 gcc_assert (loc);
29138 /* The same list can be referenced more than once. See if we have
29139 already recorded the result from a previous pass. */
29140 if (loc->replaced)
29141 *curr = loc->dw_loc_next;
29142 else if (!loc->resolved_addr)
29143 {
29144 /* As things stand, we do not expect or allow one die to
29145 reference a suffix of another die's location list chain.
29146 References must be identical or completely separate.
29147 There is therefore no need to cache the result of this
29148 pass on any list other than the first; doing so
29149 would lead to unnecessary writes. */
29150 while (*curr)
29151 {
29152 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
29153 if (!resolve_addr_in_expr (a, (*curr)->expr))
29154 {
29155 dw_loc_list_ref next = (*curr)->dw_loc_next;
29156 dw_loc_descr_ref l = (*curr)->expr;
29157
29158 if (next && (*curr)->ll_symbol)
29159 {
29160 gcc_assert (!next->ll_symbol);
29161 next->ll_symbol = (*curr)->ll_symbol;
29162 }
29163 if (dwarf_split_debug_info)
29164 remove_loc_list_addr_table_entries (l);
29165 *curr = next;
29166 }
29167 else
29168 {
29169 mark_base_types ((*curr)->expr);
29170 curr = &(*curr)->dw_loc_next;
29171 }
29172 }
29173 if (loc == *start)
29174 loc->resolved_addr = 1;
29175 else
29176 {
29177 loc->replaced = 1;
29178 loc->dw_loc_next = *start;
29179 }
29180 }
29181 if (!*start)
29182 {
29183 remove_AT (die, a->dw_attr);
29184 ix--;
29185 }
29186 break;
29187 case dw_val_class_loc:
29188 {
29189 dw_loc_descr_ref l = AT_loc (a);
29190 /* DW_OP_GNU_variable_value DW_OP_stack_value or
29191 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
29192 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
29193 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
29194 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
29195 with DW_FORM_ref referencing the same DIE as
29196 DW_OP_GNU_variable_value used to reference. */
29197 if (a->dw_attr == DW_AT_string_length
29198 && l
29199 && l->dw_loc_opc == DW_OP_GNU_variable_value
29200 && (l->dw_loc_next == NULL
29201 || (l->dw_loc_next->dw_loc_next == NULL
29202 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
29203 {
29204 switch (optimize_string_length (a))
29205 {
29206 case -1:
29207 remove_AT (die, a->dw_attr);
29208 ix--;
29209 /* If we drop DW_AT_string_length, we need to drop also
29210 DW_AT_{string_length_,}byte_size. */
29211 remove_AT_byte_size = true;
29212 continue;
29213 default:
29214 break;
29215 case 1:
29216 /* Even if we keep the optimized DW_AT_string_length,
29217 it might have changed AT_class, so process it again. */
29218 ix--;
29219 continue;
29220 }
29221 }
29222 /* For -gdwarf-2 don't attempt to optimize
29223 DW_AT_data_member_location containing
29224 DW_OP_plus_uconst - older consumers might
29225 rely on it being that op instead of a more complex,
29226 but shorter, location description. */
29227 if ((dwarf_version > 2
29228 || a->dw_attr != DW_AT_data_member_location
29229 || l == NULL
29230 || l->dw_loc_opc != DW_OP_plus_uconst
29231 || l->dw_loc_next != NULL)
29232 && !resolve_addr_in_expr (a, l))
29233 {
29234 if (dwarf_split_debug_info)
29235 remove_loc_list_addr_table_entries (l);
29236 if (l != NULL
29237 && l->dw_loc_next == NULL
29238 && l->dw_loc_opc == DW_OP_addr
29239 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
29240 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
29241 && a->dw_attr == DW_AT_location)
29242 {
29243 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
29244 remove_AT (die, a->dw_attr);
29245 ix--;
29246 optimize_location_into_implicit_ptr (die, decl);
29247 break;
29248 }
29249 if (a->dw_attr == DW_AT_string_length)
29250 /* If we drop DW_AT_string_length, we need to drop also
29251 DW_AT_{string_length_,}byte_size. */
29252 remove_AT_byte_size = true;
29253 remove_AT (die, a->dw_attr);
29254 ix--;
29255 }
29256 else
29257 mark_base_types (l);
29258 }
29259 break;
29260 case dw_val_class_addr:
29261 if (a->dw_attr == DW_AT_const_value
29262 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
29263 {
29264 if (AT_index (a) != NOT_INDEXED)
29265 remove_addr_table_entry (a->dw_attr_val.val_entry);
29266 remove_AT (die, a->dw_attr);
29267 ix--;
29268 }
29269 if ((die->die_tag == DW_TAG_call_site
29270 && a->dw_attr == DW_AT_call_origin)
29271 || (die->die_tag == DW_TAG_GNU_call_site
29272 && a->dw_attr == DW_AT_abstract_origin))
29273 {
29274 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
29275 dw_die_ref tdie = lookup_decl_die (tdecl);
29276 dw_die_ref cdie;
29277 if (tdie == NULL
29278 && DECL_EXTERNAL (tdecl)
29279 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
29280 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
29281 {
29282 dw_die_ref pdie = cdie;
29283 /* Make sure we don't add these DIEs into type units.
29284 We could emit skeleton DIEs for context (namespaces,
29285 outer structs/classes) and a skeleton DIE for the
29286 innermost context with DW_AT_signature pointing to the
29287 type unit. See PR78835. */
29288 while (pdie && pdie->die_tag != DW_TAG_type_unit)
29289 pdie = pdie->die_parent;
29290 if (pdie == NULL)
29291 {
29292 /* Creating a full DIE for tdecl is overly expensive and
29293 at this point even wrong when in the LTO phase
29294 as it can end up generating new type DIEs we didn't
29295 output and thus optimize_external_refs will crash. */
29296 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
29297 add_AT_flag (tdie, DW_AT_external, 1);
29298 add_AT_flag (tdie, DW_AT_declaration, 1);
29299 add_linkage_attr (tdie, tdecl);
29300 add_name_and_src_coords_attributes (tdie, tdecl, true);
29301 equate_decl_number_to_die (tdecl, tdie);
29302 }
29303 }
29304 if (tdie)
29305 {
29306 a->dw_attr_val.val_class = dw_val_class_die_ref;
29307 a->dw_attr_val.v.val_die_ref.die = tdie;
29308 a->dw_attr_val.v.val_die_ref.external = 0;
29309 }
29310 else
29311 {
29312 if (AT_index (a) != NOT_INDEXED)
29313 remove_addr_table_entry (a->dw_attr_val.val_entry);
29314 remove_AT (die, a->dw_attr);
29315 ix--;
29316 }
29317 }
29318 break;
29319 default:
29320 break;
29321 }
29322
29323 if (remove_AT_byte_size)
29324 remove_AT (die, dwarf_version >= 5
29325 ? DW_AT_string_length_byte_size
29326 : DW_AT_byte_size);
29327
29328 FOR_EACH_CHILD (die, c, resolve_addr (c));
29329 }
29330 \f
29331 /* Helper routines for optimize_location_lists.
29332 This pass tries to share identical local lists in .debug_loc
29333 section. */
29334
29335 /* Iteratively hash operands of LOC opcode into HSTATE. */
29336
29337 static void
29338 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
29339 {
29340 dw_val_ref val1 = &loc->dw_loc_oprnd1;
29341 dw_val_ref val2 = &loc->dw_loc_oprnd2;
29342
29343 switch (loc->dw_loc_opc)
29344 {
29345 case DW_OP_const4u:
29346 case DW_OP_const8u:
29347 if (loc->dtprel)
29348 goto hash_addr;
29349 /* FALLTHRU */
29350 case DW_OP_const1u:
29351 case DW_OP_const1s:
29352 case DW_OP_const2u:
29353 case DW_OP_const2s:
29354 case DW_OP_const4s:
29355 case DW_OP_const8s:
29356 case DW_OP_constu:
29357 case DW_OP_consts:
29358 case DW_OP_pick:
29359 case DW_OP_plus_uconst:
29360 case DW_OP_breg0:
29361 case DW_OP_breg1:
29362 case DW_OP_breg2:
29363 case DW_OP_breg3:
29364 case DW_OP_breg4:
29365 case DW_OP_breg5:
29366 case DW_OP_breg6:
29367 case DW_OP_breg7:
29368 case DW_OP_breg8:
29369 case DW_OP_breg9:
29370 case DW_OP_breg10:
29371 case DW_OP_breg11:
29372 case DW_OP_breg12:
29373 case DW_OP_breg13:
29374 case DW_OP_breg14:
29375 case DW_OP_breg15:
29376 case DW_OP_breg16:
29377 case DW_OP_breg17:
29378 case DW_OP_breg18:
29379 case DW_OP_breg19:
29380 case DW_OP_breg20:
29381 case DW_OP_breg21:
29382 case DW_OP_breg22:
29383 case DW_OP_breg23:
29384 case DW_OP_breg24:
29385 case DW_OP_breg25:
29386 case DW_OP_breg26:
29387 case DW_OP_breg27:
29388 case DW_OP_breg28:
29389 case DW_OP_breg29:
29390 case DW_OP_breg30:
29391 case DW_OP_breg31:
29392 case DW_OP_regx:
29393 case DW_OP_fbreg:
29394 case DW_OP_piece:
29395 case DW_OP_deref_size:
29396 case DW_OP_xderef_size:
29397 hstate.add_object (val1->v.val_int);
29398 break;
29399 case DW_OP_skip:
29400 case DW_OP_bra:
29401 {
29402 int offset;
29403
29404 gcc_assert (val1->val_class == dw_val_class_loc);
29405 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
29406 hstate.add_object (offset);
29407 }
29408 break;
29409 case DW_OP_implicit_value:
29410 hstate.add_object (val1->v.val_unsigned);
29411 switch (val2->val_class)
29412 {
29413 case dw_val_class_const:
29414 hstate.add_object (val2->v.val_int);
29415 break;
29416 case dw_val_class_vec:
29417 {
29418 unsigned int elt_size = val2->v.val_vec.elt_size;
29419 unsigned int len = val2->v.val_vec.length;
29420
29421 hstate.add_int (elt_size);
29422 hstate.add_int (len);
29423 hstate.add (val2->v.val_vec.array, len * elt_size);
29424 }
29425 break;
29426 case dw_val_class_const_double:
29427 hstate.add_object (val2->v.val_double.low);
29428 hstate.add_object (val2->v.val_double.high);
29429 break;
29430 case dw_val_class_wide_int:
29431 hstate.add (val2->v.val_wide->get_val (),
29432 get_full_len (*val2->v.val_wide)
29433 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
29434 break;
29435 case dw_val_class_addr:
29436 inchash::add_rtx (val2->v.val_addr, hstate);
29437 break;
29438 default:
29439 gcc_unreachable ();
29440 }
29441 break;
29442 case DW_OP_bregx:
29443 case DW_OP_bit_piece:
29444 hstate.add_object (val1->v.val_int);
29445 hstate.add_object (val2->v.val_int);
29446 break;
29447 case DW_OP_addr:
29448 hash_addr:
29449 if (loc->dtprel)
29450 {
29451 unsigned char dtprel = 0xd1;
29452 hstate.add_object (dtprel);
29453 }
29454 inchash::add_rtx (val1->v.val_addr, hstate);
29455 break;
29456 case DW_OP_GNU_addr_index:
29457 case DW_OP_GNU_const_index:
29458 {
29459 if (loc->dtprel)
29460 {
29461 unsigned char dtprel = 0xd1;
29462 hstate.add_object (dtprel);
29463 }
29464 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
29465 }
29466 break;
29467 case DW_OP_implicit_pointer:
29468 case DW_OP_GNU_implicit_pointer:
29469 hstate.add_int (val2->v.val_int);
29470 break;
29471 case DW_OP_entry_value:
29472 case DW_OP_GNU_entry_value:
29473 hstate.add_object (val1->v.val_loc);
29474 break;
29475 case DW_OP_regval_type:
29476 case DW_OP_deref_type:
29477 case DW_OP_GNU_regval_type:
29478 case DW_OP_GNU_deref_type:
29479 {
29480 unsigned int byte_size
29481 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
29482 unsigned int encoding
29483 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
29484 hstate.add_object (val1->v.val_int);
29485 hstate.add_object (byte_size);
29486 hstate.add_object (encoding);
29487 }
29488 break;
29489 case DW_OP_convert:
29490 case DW_OP_reinterpret:
29491 case DW_OP_GNU_convert:
29492 case DW_OP_GNU_reinterpret:
29493 if (val1->val_class == dw_val_class_unsigned_const)
29494 {
29495 hstate.add_object (val1->v.val_unsigned);
29496 break;
29497 }
29498 /* FALLTHRU */
29499 case DW_OP_const_type:
29500 case DW_OP_GNU_const_type:
29501 {
29502 unsigned int byte_size
29503 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
29504 unsigned int encoding
29505 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
29506 hstate.add_object (byte_size);
29507 hstate.add_object (encoding);
29508 if (loc->dw_loc_opc != DW_OP_const_type
29509 && loc->dw_loc_opc != DW_OP_GNU_const_type)
29510 break;
29511 hstate.add_object (val2->val_class);
29512 switch (val2->val_class)
29513 {
29514 case dw_val_class_const:
29515 hstate.add_object (val2->v.val_int);
29516 break;
29517 case dw_val_class_vec:
29518 {
29519 unsigned int elt_size = val2->v.val_vec.elt_size;
29520 unsigned int len = val2->v.val_vec.length;
29521
29522 hstate.add_object (elt_size);
29523 hstate.add_object (len);
29524 hstate.add (val2->v.val_vec.array, len * elt_size);
29525 }
29526 break;
29527 case dw_val_class_const_double:
29528 hstate.add_object (val2->v.val_double.low);
29529 hstate.add_object (val2->v.val_double.high);
29530 break;
29531 case dw_val_class_wide_int:
29532 hstate.add (val2->v.val_wide->get_val (),
29533 get_full_len (*val2->v.val_wide)
29534 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
29535 break;
29536 default:
29537 gcc_unreachable ();
29538 }
29539 }
29540 break;
29541
29542 default:
29543 /* Other codes have no operands. */
29544 break;
29545 }
29546 }
29547
29548 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
29549
29550 static inline void
29551 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
29552 {
29553 dw_loc_descr_ref l;
29554 bool sizes_computed = false;
29555 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
29556 size_of_locs (loc);
29557
29558 for (l = loc; l != NULL; l = l->dw_loc_next)
29559 {
29560 enum dwarf_location_atom opc = l->dw_loc_opc;
29561 hstate.add_object (opc);
29562 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
29563 {
29564 size_of_locs (loc);
29565 sizes_computed = true;
29566 }
29567 hash_loc_operands (l, hstate);
29568 }
29569 }
29570
29571 /* Compute hash of the whole location list LIST_HEAD. */
29572
29573 static inline void
29574 hash_loc_list (dw_loc_list_ref list_head)
29575 {
29576 dw_loc_list_ref curr = list_head;
29577 inchash::hash hstate;
29578
29579 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
29580 {
29581 hstate.add (curr->begin, strlen (curr->begin) + 1);
29582 hstate.add (curr->end, strlen (curr->end) + 1);
29583 if (curr->section)
29584 hstate.add (curr->section, strlen (curr->section) + 1);
29585 hash_locs (curr->expr, hstate);
29586 }
29587 list_head->hash = hstate.end ();
29588 }
29589
29590 /* Return true if X and Y opcodes have the same operands. */
29591
29592 static inline bool
29593 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
29594 {
29595 dw_val_ref valx1 = &x->dw_loc_oprnd1;
29596 dw_val_ref valx2 = &x->dw_loc_oprnd2;
29597 dw_val_ref valy1 = &y->dw_loc_oprnd1;
29598 dw_val_ref valy2 = &y->dw_loc_oprnd2;
29599
29600 switch (x->dw_loc_opc)
29601 {
29602 case DW_OP_const4u:
29603 case DW_OP_const8u:
29604 if (x->dtprel)
29605 goto hash_addr;
29606 /* FALLTHRU */
29607 case DW_OP_const1u:
29608 case DW_OP_const1s:
29609 case DW_OP_const2u:
29610 case DW_OP_const2s:
29611 case DW_OP_const4s:
29612 case DW_OP_const8s:
29613 case DW_OP_constu:
29614 case DW_OP_consts:
29615 case DW_OP_pick:
29616 case DW_OP_plus_uconst:
29617 case DW_OP_breg0:
29618 case DW_OP_breg1:
29619 case DW_OP_breg2:
29620 case DW_OP_breg3:
29621 case DW_OP_breg4:
29622 case DW_OP_breg5:
29623 case DW_OP_breg6:
29624 case DW_OP_breg7:
29625 case DW_OP_breg8:
29626 case DW_OP_breg9:
29627 case DW_OP_breg10:
29628 case DW_OP_breg11:
29629 case DW_OP_breg12:
29630 case DW_OP_breg13:
29631 case DW_OP_breg14:
29632 case DW_OP_breg15:
29633 case DW_OP_breg16:
29634 case DW_OP_breg17:
29635 case DW_OP_breg18:
29636 case DW_OP_breg19:
29637 case DW_OP_breg20:
29638 case DW_OP_breg21:
29639 case DW_OP_breg22:
29640 case DW_OP_breg23:
29641 case DW_OP_breg24:
29642 case DW_OP_breg25:
29643 case DW_OP_breg26:
29644 case DW_OP_breg27:
29645 case DW_OP_breg28:
29646 case DW_OP_breg29:
29647 case DW_OP_breg30:
29648 case DW_OP_breg31:
29649 case DW_OP_regx:
29650 case DW_OP_fbreg:
29651 case DW_OP_piece:
29652 case DW_OP_deref_size:
29653 case DW_OP_xderef_size:
29654 return valx1->v.val_int == valy1->v.val_int;
29655 case DW_OP_skip:
29656 case DW_OP_bra:
29657 /* If splitting debug info, the use of DW_OP_GNU_addr_index
29658 can cause irrelevant differences in dw_loc_addr. */
29659 gcc_assert (valx1->val_class == dw_val_class_loc
29660 && valy1->val_class == dw_val_class_loc
29661 && (dwarf_split_debug_info
29662 || x->dw_loc_addr == y->dw_loc_addr));
29663 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
29664 case DW_OP_implicit_value:
29665 if (valx1->v.val_unsigned != valy1->v.val_unsigned
29666 || valx2->val_class != valy2->val_class)
29667 return false;
29668 switch (valx2->val_class)
29669 {
29670 case dw_val_class_const:
29671 return valx2->v.val_int == valy2->v.val_int;
29672 case dw_val_class_vec:
29673 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
29674 && valx2->v.val_vec.length == valy2->v.val_vec.length
29675 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
29676 valx2->v.val_vec.elt_size
29677 * valx2->v.val_vec.length) == 0;
29678 case dw_val_class_const_double:
29679 return valx2->v.val_double.low == valy2->v.val_double.low
29680 && valx2->v.val_double.high == valy2->v.val_double.high;
29681 case dw_val_class_wide_int:
29682 return *valx2->v.val_wide == *valy2->v.val_wide;
29683 case dw_val_class_addr:
29684 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
29685 default:
29686 gcc_unreachable ();
29687 }
29688 case DW_OP_bregx:
29689 case DW_OP_bit_piece:
29690 return valx1->v.val_int == valy1->v.val_int
29691 && valx2->v.val_int == valy2->v.val_int;
29692 case DW_OP_addr:
29693 hash_addr:
29694 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
29695 case DW_OP_GNU_addr_index:
29696 case DW_OP_GNU_const_index:
29697 {
29698 rtx ax1 = valx1->val_entry->addr.rtl;
29699 rtx ay1 = valy1->val_entry->addr.rtl;
29700 return rtx_equal_p (ax1, ay1);
29701 }
29702 case DW_OP_implicit_pointer:
29703 case DW_OP_GNU_implicit_pointer:
29704 return valx1->val_class == dw_val_class_die_ref
29705 && valx1->val_class == valy1->val_class
29706 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
29707 && valx2->v.val_int == valy2->v.val_int;
29708 case DW_OP_entry_value:
29709 case DW_OP_GNU_entry_value:
29710 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
29711 case DW_OP_const_type:
29712 case DW_OP_GNU_const_type:
29713 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
29714 || valx2->val_class != valy2->val_class)
29715 return false;
29716 switch (valx2->val_class)
29717 {
29718 case dw_val_class_const:
29719 return valx2->v.val_int == valy2->v.val_int;
29720 case dw_val_class_vec:
29721 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
29722 && valx2->v.val_vec.length == valy2->v.val_vec.length
29723 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
29724 valx2->v.val_vec.elt_size
29725 * valx2->v.val_vec.length) == 0;
29726 case dw_val_class_const_double:
29727 return valx2->v.val_double.low == valy2->v.val_double.low
29728 && valx2->v.val_double.high == valy2->v.val_double.high;
29729 case dw_val_class_wide_int:
29730 return *valx2->v.val_wide == *valy2->v.val_wide;
29731 default:
29732 gcc_unreachable ();
29733 }
29734 case DW_OP_regval_type:
29735 case DW_OP_deref_type:
29736 case DW_OP_GNU_regval_type:
29737 case DW_OP_GNU_deref_type:
29738 return valx1->v.val_int == valy1->v.val_int
29739 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
29740 case DW_OP_convert:
29741 case DW_OP_reinterpret:
29742 case DW_OP_GNU_convert:
29743 case DW_OP_GNU_reinterpret:
29744 if (valx1->val_class != valy1->val_class)
29745 return false;
29746 if (valx1->val_class == dw_val_class_unsigned_const)
29747 return valx1->v.val_unsigned == valy1->v.val_unsigned;
29748 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
29749 case DW_OP_GNU_parameter_ref:
29750 return valx1->val_class == dw_val_class_die_ref
29751 && valx1->val_class == valy1->val_class
29752 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
29753 default:
29754 /* Other codes have no operands. */
29755 return true;
29756 }
29757 }
29758
29759 /* Return true if DWARF location expressions X and Y are the same. */
29760
29761 static inline bool
29762 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
29763 {
29764 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
29765 if (x->dw_loc_opc != y->dw_loc_opc
29766 || x->dtprel != y->dtprel
29767 || !compare_loc_operands (x, y))
29768 break;
29769 return x == NULL && y == NULL;
29770 }
29771
29772 /* Hashtable helpers. */
29773
29774 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
29775 {
29776 static inline hashval_t hash (const dw_loc_list_struct *);
29777 static inline bool equal (const dw_loc_list_struct *,
29778 const dw_loc_list_struct *);
29779 };
29780
29781 /* Return precomputed hash of location list X. */
29782
29783 inline hashval_t
29784 loc_list_hasher::hash (const dw_loc_list_struct *x)
29785 {
29786 return x->hash;
29787 }
29788
29789 /* Return true if location lists A and B are the same. */
29790
29791 inline bool
29792 loc_list_hasher::equal (const dw_loc_list_struct *a,
29793 const dw_loc_list_struct *b)
29794 {
29795 if (a == b)
29796 return 1;
29797 if (a->hash != b->hash)
29798 return 0;
29799 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
29800 if (strcmp (a->begin, b->begin) != 0
29801 || strcmp (a->end, b->end) != 0
29802 || (a->section == NULL) != (b->section == NULL)
29803 || (a->section && strcmp (a->section, b->section) != 0)
29804 || !compare_locs (a->expr, b->expr))
29805 break;
29806 return a == NULL && b == NULL;
29807 }
29808
29809 typedef hash_table<loc_list_hasher> loc_list_hash_type;
29810
29811
29812 /* Recursively optimize location lists referenced from DIE
29813 children and share them whenever possible. */
29814
29815 static void
29816 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
29817 {
29818 dw_die_ref c;
29819 dw_attr_node *a;
29820 unsigned ix;
29821 dw_loc_list_struct **slot;
29822
29823 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29824 if (AT_class (a) == dw_val_class_loc_list)
29825 {
29826 dw_loc_list_ref list = AT_loc_list (a);
29827 /* TODO: perform some optimizations here, before hashing
29828 it and storing into the hash table. */
29829 hash_loc_list (list);
29830 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
29831 if (*slot == NULL)
29832 *slot = list;
29833 else
29834 a->dw_attr_val.v.val_loc_list = *slot;
29835 }
29836
29837 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
29838 }
29839
29840
29841 /* Recursively assign each location list a unique index into the debug_addr
29842 section. */
29843
29844 static void
29845 index_location_lists (dw_die_ref die)
29846 {
29847 dw_die_ref c;
29848 dw_attr_node *a;
29849 unsigned ix;
29850
29851 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29852 if (AT_class (a) == dw_val_class_loc_list)
29853 {
29854 dw_loc_list_ref list = AT_loc_list (a);
29855 dw_loc_list_ref curr;
29856 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
29857 {
29858 /* Don't index an entry that has already been indexed
29859 or won't be output. */
29860 if (curr->begin_entry != NULL
29861 || (strcmp (curr->begin, curr->end) == 0 && !curr->force))
29862 continue;
29863
29864 curr->begin_entry
29865 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
29866 }
29867 }
29868
29869 FOR_EACH_CHILD (die, c, index_location_lists (c));
29870 }
29871
29872 /* Optimize location lists referenced from DIE
29873 children and share them whenever possible. */
29874
29875 static void
29876 optimize_location_lists (dw_die_ref die)
29877 {
29878 loc_list_hash_type htab (500);
29879 optimize_location_lists_1 (die, &htab);
29880 }
29881 \f
29882 /* Traverse the limbo die list, and add parent/child links. The only
29883 dies without parents that should be here are concrete instances of
29884 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
29885 For concrete instances, we can get the parent die from the abstract
29886 instance. */
29887
29888 static void
29889 flush_limbo_die_list (void)
29890 {
29891 limbo_die_node *node;
29892
29893 /* get_context_die calls force_decl_die, which can put new DIEs on the
29894 limbo list in LTO mode when nested functions are put in a different
29895 partition than that of their parent function. */
29896 while ((node = limbo_die_list))
29897 {
29898 dw_die_ref die = node->die;
29899 limbo_die_list = node->next;
29900
29901 if (die->die_parent == NULL)
29902 {
29903 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
29904
29905 if (origin && origin->die_parent)
29906 add_child_die (origin->die_parent, die);
29907 else if (is_cu_die (die))
29908 ;
29909 else if (seen_error ())
29910 /* It's OK to be confused by errors in the input. */
29911 add_child_die (comp_unit_die (), die);
29912 else
29913 {
29914 /* In certain situations, the lexical block containing a
29915 nested function can be optimized away, which results
29916 in the nested function die being orphaned. Likewise
29917 with the return type of that nested function. Force
29918 this to be a child of the containing function.
29919
29920 It may happen that even the containing function got fully
29921 inlined and optimized out. In that case we are lost and
29922 assign the empty child. This should not be big issue as
29923 the function is likely unreachable too. */
29924 gcc_assert (node->created_for);
29925
29926 if (DECL_P (node->created_for))
29927 origin = get_context_die (DECL_CONTEXT (node->created_for));
29928 else if (TYPE_P (node->created_for))
29929 origin = scope_die_for (node->created_for, comp_unit_die ());
29930 else
29931 origin = comp_unit_die ();
29932
29933 add_child_die (origin, die);
29934 }
29935 }
29936 }
29937 }
29938
29939 /* Reset DIEs so we can output them again. */
29940
29941 static void
29942 reset_dies (dw_die_ref die)
29943 {
29944 dw_die_ref c;
29945
29946 /* Remove stuff we re-generate. */
29947 die->die_mark = 0;
29948 die->die_offset = 0;
29949 die->die_abbrev = 0;
29950 remove_AT (die, DW_AT_sibling);
29951
29952 FOR_EACH_CHILD (die, c, reset_dies (c));
29953 }
29954
29955 /* Output stuff that dwarf requires at the end of every file,
29956 and generate the DWARF-2 debugging info. */
29957
29958 static void
29959 dwarf2out_finish (const char *)
29960 {
29961 comdat_type_node *ctnode;
29962 dw_die_ref main_comp_unit_die;
29963 unsigned char checksum[16];
29964 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
29965
29966 /* Flush out any latecomers to the limbo party. */
29967 flush_limbo_die_list ();
29968
29969 if (flag_checking)
29970 {
29971 verify_die (comp_unit_die ());
29972 for (limbo_die_node *node = cu_die_list; node; node = node->next)
29973 verify_die (node->die);
29974 }
29975
29976 /* We shouldn't have any symbols with delayed asm names for
29977 DIEs generated after early finish. */
29978 gcc_assert (deferred_asm_name == NULL);
29979
29980 gen_remaining_tmpl_value_param_die_attribute ();
29981
29982 if (flag_generate_lto || flag_generate_offload)
29983 {
29984 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
29985
29986 /* Prune stuff so that dwarf2out_finish runs successfully
29987 for the fat part of the object. */
29988 reset_dies (comp_unit_die ());
29989 for (limbo_die_node *node = cu_die_list; node; node = node->next)
29990 reset_dies (node->die);
29991
29992 hash_table<comdat_type_hasher> comdat_type_table (100);
29993 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
29994 {
29995 comdat_type_node **slot
29996 = comdat_type_table.find_slot (ctnode, INSERT);
29997
29998 /* Don't reset types twice. */
29999 if (*slot != HTAB_EMPTY_ENTRY)
30000 continue;
30001
30002 /* Add a pointer to the line table for the main compilation unit
30003 so that the debugger can make sense of DW_AT_decl_file
30004 attributes. */
30005 if (debug_info_level >= DINFO_LEVEL_TERSE)
30006 reset_dies (ctnode->root_die);
30007
30008 *slot = ctnode;
30009 }
30010
30011 /* Reset die CU symbol so we don't output it twice. */
30012 comp_unit_die ()->die_id.die_symbol = NULL;
30013
30014 /* Remove DW_AT_macro from the early output. */
30015 if (have_macinfo)
30016 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
30017
30018 /* Remove indirect string decisions. */
30019 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
30020 }
30021
30022 #if ENABLE_ASSERT_CHECKING
30023 {
30024 dw_die_ref die = comp_unit_die (), c;
30025 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
30026 }
30027 #endif
30028 resolve_addr (comp_unit_die ());
30029 move_marked_base_types ();
30030
30031 /* Initialize sections and labels used for actual assembler output. */
30032 unsigned generation = init_sections_and_labels (false);
30033
30034 /* Traverse the DIE's and add sibling attributes to those DIE's that
30035 have children. */
30036 add_sibling_attributes (comp_unit_die ());
30037 limbo_die_node *node;
30038 for (node = cu_die_list; node; node = node->next)
30039 add_sibling_attributes (node->die);
30040 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
30041 add_sibling_attributes (ctnode->root_die);
30042
30043 /* When splitting DWARF info, we put some attributes in the
30044 skeleton compile_unit DIE that remains in the .o, while
30045 most attributes go in the DWO compile_unit_die. */
30046 if (dwarf_split_debug_info)
30047 {
30048 limbo_die_node *cu;
30049 main_comp_unit_die = gen_compile_unit_die (NULL);
30050 if (dwarf_version >= 5)
30051 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
30052 cu = limbo_die_list;
30053 gcc_assert (cu->die == main_comp_unit_die);
30054 limbo_die_list = limbo_die_list->next;
30055 cu->next = cu_die_list;
30056 cu_die_list = cu;
30057 }
30058 else
30059 main_comp_unit_die = comp_unit_die ();
30060
30061 /* Output a terminator label for the .text section. */
30062 switch_to_section (text_section);
30063 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
30064 if (cold_text_section)
30065 {
30066 switch_to_section (cold_text_section);
30067 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
30068 }
30069
30070 /* We can only use the low/high_pc attributes if all of the code was
30071 in .text. */
30072 if (!have_multiple_function_sections
30073 || (dwarf_version < 3 && dwarf_strict))
30074 {
30075 /* Don't add if the CU has no associated code. */
30076 if (text_section_used)
30077 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
30078 text_end_label, true);
30079 }
30080 else
30081 {
30082 unsigned fde_idx;
30083 dw_fde_ref fde;
30084 bool range_list_added = false;
30085
30086 if (text_section_used)
30087 add_ranges_by_labels (main_comp_unit_die, text_section_label,
30088 text_end_label, &range_list_added, true);
30089 if (cold_text_section_used)
30090 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
30091 cold_end_label, &range_list_added, true);
30092
30093 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
30094 {
30095 if (DECL_IGNORED_P (fde->decl))
30096 continue;
30097 if (!fde->in_std_section)
30098 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
30099 fde->dw_fde_end, &range_list_added,
30100 true);
30101 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
30102 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
30103 fde->dw_fde_second_end, &range_list_added,
30104 true);
30105 }
30106
30107 if (range_list_added)
30108 {
30109 /* We need to give .debug_loc and .debug_ranges an appropriate
30110 "base address". Use zero so that these addresses become
30111 absolute. Historically, we've emitted the unexpected
30112 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
30113 Emit both to give time for other tools to adapt. */
30114 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
30115 if (! dwarf_strict && dwarf_version < 4)
30116 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
30117
30118 add_ranges (NULL);
30119 }
30120 }
30121
30122 /* AIX Assembler inserts the length, so adjust the reference to match the
30123 offset expected by debuggers. */
30124 strcpy (dl_section_ref, debug_line_section_label);
30125 if (XCOFF_DEBUGGING_INFO)
30126 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
30127
30128 if (debug_info_level >= DINFO_LEVEL_TERSE)
30129 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
30130 dl_section_ref);
30131
30132 if (have_macinfo)
30133 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
30134 macinfo_section_label);
30135
30136 if (dwarf_split_debug_info)
30137 {
30138 if (have_location_lists)
30139 {
30140 if (dwarf_version >= 5)
30141 add_AT_loclistsptr (comp_unit_die (), DW_AT_loclists_base,
30142 loc_section_label);
30143 /* optimize_location_lists calculates the size of the lists,
30144 so index them first, and assign indices to the entries.
30145 Although optimize_location_lists will remove entries from
30146 the table, it only does so for duplicates, and therefore
30147 only reduces ref_counts to 1. */
30148 index_location_lists (comp_unit_die ());
30149 }
30150
30151 if (addr_index_table != NULL)
30152 {
30153 unsigned int index = 0;
30154 addr_index_table
30155 ->traverse_noresize<unsigned int *, index_addr_table_entry>
30156 (&index);
30157 }
30158 }
30159
30160 loc_list_idx = 0;
30161 if (have_location_lists)
30162 {
30163 optimize_location_lists (comp_unit_die ());
30164 /* And finally assign indexes to the entries for -gsplit-dwarf. */
30165 if (dwarf_version >= 5 && dwarf_split_debug_info)
30166 assign_location_list_indexes (comp_unit_die ());
30167 }
30168
30169 save_macinfo_strings ();
30170
30171 if (dwarf_split_debug_info)
30172 {
30173 unsigned int index = 0;
30174
30175 /* Add attributes common to skeleton compile_units and
30176 type_units. Because these attributes include strings, it
30177 must be done before freezing the string table. Top-level
30178 skeleton die attrs are added when the skeleton type unit is
30179 created, so ensure it is created by this point. */
30180 add_top_level_skeleton_die_attrs (main_comp_unit_die);
30181 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
30182 }
30183
30184 /* Output all of the compilation units. We put the main one last so that
30185 the offsets are available to output_pubnames. */
30186 for (node = cu_die_list; node; node = node->next)
30187 output_comp_unit (node->die, 0, NULL);
30188
30189 hash_table<comdat_type_hasher> comdat_type_table (100);
30190 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
30191 {
30192 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
30193
30194 /* Don't output duplicate types. */
30195 if (*slot != HTAB_EMPTY_ENTRY)
30196 continue;
30197
30198 /* Add a pointer to the line table for the main compilation unit
30199 so that the debugger can make sense of DW_AT_decl_file
30200 attributes. */
30201 if (debug_info_level >= DINFO_LEVEL_TERSE)
30202 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
30203 (!dwarf_split_debug_info
30204 ? dl_section_ref
30205 : debug_skeleton_line_section_label));
30206
30207 output_comdat_type_unit (ctnode);
30208 *slot = ctnode;
30209 }
30210
30211 if (dwarf_split_debug_info)
30212 {
30213 int mark;
30214 struct md5_ctx ctx;
30215
30216 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
30217 index_rnglists ();
30218
30219 /* Compute a checksum of the comp_unit to use as the dwo_id. */
30220 md5_init_ctx (&ctx);
30221 mark = 0;
30222 die_checksum (comp_unit_die (), &ctx, &mark);
30223 unmark_all_dies (comp_unit_die ());
30224 md5_finish_ctx (&ctx, checksum);
30225
30226 if (dwarf_version < 5)
30227 {
30228 /* Use the first 8 bytes of the checksum as the dwo_id,
30229 and add it to both comp-unit DIEs. */
30230 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
30231 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
30232 }
30233
30234 /* Add the base offset of the ranges table to the skeleton
30235 comp-unit DIE. */
30236 if (!vec_safe_is_empty (ranges_table))
30237 {
30238 if (dwarf_version >= 5)
30239 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
30240 ranges_base_label);
30241 else
30242 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
30243 ranges_section_label);
30244 }
30245
30246 switch_to_section (debug_addr_section);
30247 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
30248 output_addr_table ();
30249 }
30250
30251 /* Output the main compilation unit if non-empty or if .debug_macinfo
30252 or .debug_macro will be emitted. */
30253 output_comp_unit (comp_unit_die (), have_macinfo,
30254 dwarf_split_debug_info ? checksum : NULL);
30255
30256 if (dwarf_split_debug_info && info_section_emitted)
30257 output_skeleton_debug_sections (main_comp_unit_die, checksum);
30258
30259 /* Output the abbreviation table. */
30260 if (vec_safe_length (abbrev_die_table) != 1)
30261 {
30262 switch_to_section (debug_abbrev_section);
30263 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
30264 output_abbrev_section ();
30265 }
30266
30267 /* Output location list section if necessary. */
30268 if (have_location_lists)
30269 {
30270 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
30271 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
30272 /* Output the location lists info. */
30273 switch_to_section (debug_loc_section);
30274 if (dwarf_version >= 5)
30275 {
30276 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 1);
30277 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 2);
30278 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
30279 dw2_asm_output_data (4, 0xffffffff,
30280 "Initial length escape value indicating "
30281 "64-bit DWARF extension");
30282 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
30283 "Length of Location Lists");
30284 ASM_OUTPUT_LABEL (asm_out_file, l1);
30285 dw2_asm_output_data (2, dwarf_version, "DWARF Version");
30286 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
30287 dw2_asm_output_data (1, 0, "Segment Size");
30288 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
30289 "Offset Entry Count");
30290 }
30291 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
30292 if (dwarf_version >= 5 && dwarf_split_debug_info)
30293 {
30294 unsigned int save_loc_list_idx = loc_list_idx;
30295 loc_list_idx = 0;
30296 output_loclists_offsets (comp_unit_die ());
30297 gcc_assert (save_loc_list_idx == loc_list_idx);
30298 }
30299 output_location_lists (comp_unit_die ());
30300 if (dwarf_version >= 5)
30301 ASM_OUTPUT_LABEL (asm_out_file, l2);
30302 }
30303
30304 output_pubtables ();
30305
30306 /* Output the address range information if a CU (.debug_info section)
30307 was emitted. We output an empty table even if we had no functions
30308 to put in it. This because the consumer has no way to tell the
30309 difference between an empty table that we omitted and failure to
30310 generate a table that would have contained data. */
30311 if (info_section_emitted)
30312 {
30313 switch_to_section (debug_aranges_section);
30314 output_aranges ();
30315 }
30316
30317 /* Output ranges section if necessary. */
30318 if (!vec_safe_is_empty (ranges_table))
30319 {
30320 if (dwarf_version >= 5)
30321 output_rnglists (generation);
30322 else
30323 output_ranges ();
30324 }
30325
30326 /* Have to end the macro section. */
30327 if (have_macinfo)
30328 {
30329 switch_to_section (debug_macinfo_section);
30330 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
30331 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
30332 : debug_skeleton_line_section_label, false);
30333 dw2_asm_output_data (1, 0, "End compilation unit");
30334 }
30335
30336 /* Output the source line correspondence table. We must do this
30337 even if there is no line information. Otherwise, on an empty
30338 translation unit, we will generate a present, but empty,
30339 .debug_info section. IRIX 6.5 `nm' will then complain when
30340 examining the file. This is done late so that any filenames
30341 used by the debug_info section are marked as 'used'. */
30342 switch_to_section (debug_line_section);
30343 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
30344 if (! DWARF2_ASM_LINE_DEBUG_INFO)
30345 output_line_info (false);
30346
30347 if (dwarf_split_debug_info && info_section_emitted)
30348 {
30349 switch_to_section (debug_skeleton_line_section);
30350 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
30351 output_line_info (true);
30352 }
30353
30354 /* If we emitted any indirect strings, output the string table too. */
30355 if (debug_str_hash || skeleton_debug_str_hash)
30356 output_indirect_strings ();
30357 if (debug_line_str_hash)
30358 {
30359 switch_to_section (debug_line_str_section);
30360 const enum dwarf_form form = DW_FORM_line_strp;
30361 debug_line_str_hash->traverse<enum dwarf_form,
30362 output_indirect_string> (form);
30363 }
30364 }
30365
30366 /* Returns a hash value for X (which really is a variable_value_struct). */
30367
30368 inline hashval_t
30369 variable_value_hasher::hash (variable_value_struct *x)
30370 {
30371 return (hashval_t) x->decl_id;
30372 }
30373
30374 /* Return nonzero if decl_id of variable_value_struct X is the same as
30375 UID of decl Y. */
30376
30377 inline bool
30378 variable_value_hasher::equal (variable_value_struct *x, tree y)
30379 {
30380 return x->decl_id == DECL_UID (y);
30381 }
30382
30383 /* Helper function for resolve_variable_value, handle
30384 DW_OP_GNU_variable_value in one location expression.
30385 Return true if exprloc has been changed into loclist. */
30386
30387 static bool
30388 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
30389 {
30390 dw_loc_descr_ref next;
30391 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
30392 {
30393 next = loc->dw_loc_next;
30394 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
30395 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
30396 continue;
30397
30398 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
30399 if (DECL_CONTEXT (decl) != current_function_decl)
30400 continue;
30401
30402 dw_die_ref ref = lookup_decl_die (decl);
30403 if (ref)
30404 {
30405 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30406 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30407 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30408 continue;
30409 }
30410 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
30411 if (l == NULL)
30412 continue;
30413 if (l->dw_loc_next)
30414 {
30415 if (AT_class (a) != dw_val_class_loc)
30416 continue;
30417 switch (a->dw_attr)
30418 {
30419 /* Following attributes allow both exprloc and loclist
30420 classes, so we can change them into a loclist. */
30421 case DW_AT_location:
30422 case DW_AT_string_length:
30423 case DW_AT_return_addr:
30424 case DW_AT_data_member_location:
30425 case DW_AT_frame_base:
30426 case DW_AT_segment:
30427 case DW_AT_static_link:
30428 case DW_AT_use_location:
30429 case DW_AT_vtable_elem_location:
30430 if (prev)
30431 {
30432 prev->dw_loc_next = NULL;
30433 prepend_loc_descr_to_each (l, AT_loc (a));
30434 }
30435 if (next)
30436 add_loc_descr_to_each (l, next);
30437 a->dw_attr_val.val_class = dw_val_class_loc_list;
30438 a->dw_attr_val.val_entry = NULL;
30439 a->dw_attr_val.v.val_loc_list = l;
30440 have_location_lists = true;
30441 return true;
30442 /* Following attributes allow both exprloc and reference,
30443 so if the whole expression is DW_OP_GNU_variable_value alone
30444 we could transform it into reference. */
30445 case DW_AT_byte_size:
30446 case DW_AT_bit_size:
30447 case DW_AT_lower_bound:
30448 case DW_AT_upper_bound:
30449 case DW_AT_bit_stride:
30450 case DW_AT_count:
30451 case DW_AT_allocated:
30452 case DW_AT_associated:
30453 case DW_AT_byte_stride:
30454 if (prev == NULL && next == NULL)
30455 break;
30456 /* FALLTHRU */
30457 default:
30458 if (dwarf_strict)
30459 continue;
30460 break;
30461 }
30462 /* Create DW_TAG_variable that we can refer to. */
30463 gen_decl_die (decl, NULL_TREE, NULL,
30464 lookup_decl_die (current_function_decl));
30465 ref = lookup_decl_die (decl);
30466 if (ref)
30467 {
30468 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30469 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30470 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30471 }
30472 continue;
30473 }
30474 if (prev)
30475 {
30476 prev->dw_loc_next = l->expr;
30477 add_loc_descr (&prev->dw_loc_next, next);
30478 free_loc_descr (loc, NULL);
30479 next = prev->dw_loc_next;
30480 }
30481 else
30482 {
30483 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
30484 add_loc_descr (&loc, next);
30485 next = loc;
30486 }
30487 loc = prev;
30488 }
30489 return false;
30490 }
30491
30492 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
30493
30494 static void
30495 resolve_variable_value (dw_die_ref die)
30496 {
30497 dw_attr_node *a;
30498 dw_loc_list_ref loc;
30499 unsigned ix;
30500
30501 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30502 switch (AT_class (a))
30503 {
30504 case dw_val_class_loc:
30505 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
30506 break;
30507 /* FALLTHRU */
30508 case dw_val_class_loc_list:
30509 loc = AT_loc_list (a);
30510 gcc_assert (loc);
30511 for (; loc; loc = loc->dw_loc_next)
30512 resolve_variable_value_in_expr (a, loc->expr);
30513 break;
30514 default:
30515 break;
30516 }
30517 }
30518
30519 /* Attempt to optimize DW_OP_GNU_variable_value refering to
30520 temporaries in the current function. */
30521
30522 static void
30523 resolve_variable_values (void)
30524 {
30525 if (!variable_value_hash || !current_function_decl)
30526 return;
30527
30528 struct variable_value_struct *node
30529 = variable_value_hash->find_with_hash (current_function_decl,
30530 DECL_UID (current_function_decl));
30531
30532 if (node == NULL)
30533 return;
30534
30535 unsigned int i;
30536 dw_die_ref die;
30537 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
30538 resolve_variable_value (die);
30539 }
30540
30541 /* Helper function for note_variable_value, handle one location
30542 expression. */
30543
30544 static void
30545 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
30546 {
30547 for (; loc; loc = loc->dw_loc_next)
30548 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
30549 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30550 {
30551 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
30552 dw_die_ref ref = lookup_decl_die (decl);
30553 if (! ref && (flag_generate_lto || flag_generate_offload))
30554 {
30555 /* ??? This is somewhat a hack because we do not create DIEs
30556 for variables not in BLOCK trees early but when generating
30557 early LTO output we need the dw_val_class_decl_ref to be
30558 fully resolved. For fat LTO objects we'd also like to
30559 undo this after LTO dwarf output. */
30560 gcc_assert (DECL_CONTEXT (decl));
30561 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
30562 gcc_assert (ctx != NULL);
30563 gen_decl_die (decl, NULL_TREE, NULL, ctx);
30564 ref = lookup_decl_die (decl);
30565 gcc_assert (ref != NULL);
30566 }
30567 if (ref)
30568 {
30569 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30570 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30571 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30572 continue;
30573 }
30574 if (VAR_P (decl)
30575 && DECL_CONTEXT (decl)
30576 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
30577 && lookup_decl_die (DECL_CONTEXT (decl)))
30578 {
30579 if (!variable_value_hash)
30580 variable_value_hash
30581 = hash_table<variable_value_hasher>::create_ggc (10);
30582
30583 tree fndecl = DECL_CONTEXT (decl);
30584 struct variable_value_struct *node;
30585 struct variable_value_struct **slot
30586 = variable_value_hash->find_slot_with_hash (fndecl,
30587 DECL_UID (fndecl),
30588 INSERT);
30589 if (*slot == NULL)
30590 {
30591 node = ggc_cleared_alloc<variable_value_struct> ();
30592 node->decl_id = DECL_UID (fndecl);
30593 *slot = node;
30594 }
30595 else
30596 node = *slot;
30597
30598 vec_safe_push (node->dies, die);
30599 }
30600 }
30601 }
30602
30603 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
30604 with dw_val_class_decl_ref operand. */
30605
30606 static void
30607 note_variable_value (dw_die_ref die)
30608 {
30609 dw_die_ref c;
30610 dw_attr_node *a;
30611 dw_loc_list_ref loc;
30612 unsigned ix;
30613
30614 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30615 switch (AT_class (a))
30616 {
30617 case dw_val_class_loc_list:
30618 loc = AT_loc_list (a);
30619 gcc_assert (loc);
30620 if (!loc->noted_variable_value)
30621 {
30622 loc->noted_variable_value = 1;
30623 for (; loc; loc = loc->dw_loc_next)
30624 note_variable_value_in_expr (die, loc->expr);
30625 }
30626 break;
30627 case dw_val_class_loc:
30628 note_variable_value_in_expr (die, AT_loc (a));
30629 break;
30630 default:
30631 break;
30632 }
30633
30634 /* Mark children. */
30635 FOR_EACH_CHILD (die, c, note_variable_value (c));
30636 }
30637
30638 /* Perform any cleanups needed after the early debug generation pass
30639 has run. */
30640
30641 static void
30642 dwarf2out_early_finish (const char *filename)
30643 {
30644 set_early_dwarf s;
30645
30646 /* PCH might result in DW_AT_producer string being restored from the
30647 header compilation, so always fill it with empty string initially
30648 and overwrite only here. */
30649 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
30650 producer_string = gen_producer_string ();
30651 producer->dw_attr_val.v.val_str->refcount--;
30652 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
30653
30654 /* Add the name for the main input file now. We delayed this from
30655 dwarf2out_init to avoid complications with PCH. */
30656 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
30657 add_comp_dir_attribute (comp_unit_die ());
30658
30659 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
30660 DW_AT_comp_dir into .debug_line_str section. */
30661 if (!DWARF2_ASM_LINE_DEBUG_INFO
30662 && dwarf_version >= 5
30663 && DWARF5_USE_DEBUG_LINE_STR)
30664 {
30665 for (int i = 0; i < 2; i++)
30666 {
30667 dw_attr_node *a = get_AT (comp_unit_die (),
30668 i ? DW_AT_comp_dir : DW_AT_name);
30669 if (a == NULL
30670 || AT_class (a) != dw_val_class_str
30671 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
30672 continue;
30673
30674 if (! debug_line_str_hash)
30675 debug_line_str_hash
30676 = hash_table<indirect_string_hasher>::create_ggc (10);
30677
30678 struct indirect_string_node *node
30679 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
30680 set_indirect_string (node);
30681 node->form = DW_FORM_line_strp;
30682 a->dw_attr_val.v.val_str->refcount--;
30683 a->dw_attr_val.v.val_str = node;
30684 }
30685 }
30686
30687 /* With LTO early dwarf was really finished at compile-time, so make
30688 sure to adjust the phase after annotating the LTRANS CU DIE. */
30689 if (in_lto_p)
30690 {
30691 early_dwarf_finished = true;
30692 return;
30693 }
30694
30695 /* Walk through the list of incomplete types again, trying once more to
30696 emit full debugging info for them. */
30697 retry_incomplete_types ();
30698
30699 /* The point here is to flush out the limbo list so that it is empty
30700 and we don't need to stream it for LTO. */
30701 flush_limbo_die_list ();
30702
30703 gen_scheduled_generic_parms_dies ();
30704 gen_remaining_tmpl_value_param_die_attribute ();
30705
30706 /* Add DW_AT_linkage_name for all deferred DIEs. */
30707 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
30708 {
30709 tree decl = node->created_for;
30710 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
30711 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
30712 ended up in deferred_asm_name before we knew it was
30713 constant and never written to disk. */
30714 && DECL_ASSEMBLER_NAME (decl))
30715 {
30716 add_linkage_attr (node->die, decl);
30717 move_linkage_attr (node->die);
30718 }
30719 }
30720 deferred_asm_name = NULL;
30721
30722 if (flag_eliminate_unused_debug_types)
30723 prune_unused_types ();
30724
30725 /* Generate separate COMDAT sections for type DIEs. */
30726 if (use_debug_types)
30727 {
30728 break_out_comdat_types (comp_unit_die ());
30729
30730 /* Each new type_unit DIE was added to the limbo die list when created.
30731 Since these have all been added to comdat_type_list, clear the
30732 limbo die list. */
30733 limbo_die_list = NULL;
30734
30735 /* For each new comdat type unit, copy declarations for incomplete
30736 types to make the new unit self-contained (i.e., no direct
30737 references to the main compile unit). */
30738 for (comdat_type_node *ctnode = comdat_type_list;
30739 ctnode != NULL; ctnode = ctnode->next)
30740 copy_decls_for_unworthy_types (ctnode->root_die);
30741 copy_decls_for_unworthy_types (comp_unit_die ());
30742
30743 /* In the process of copying declarations from one unit to another,
30744 we may have left some declarations behind that are no longer
30745 referenced. Prune them. */
30746 prune_unused_types ();
30747 }
30748
30749 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
30750 with dw_val_class_decl_ref operand. */
30751 note_variable_value (comp_unit_die ());
30752 for (limbo_die_node *node = cu_die_list; node; node = node->next)
30753 note_variable_value (node->die);
30754 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
30755 ctnode = ctnode->next)
30756 note_variable_value (ctnode->root_die);
30757 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
30758 note_variable_value (node->die);
30759
30760 /* The AT_pubnames attribute needs to go in all skeleton dies, including
30761 both the main_cu and all skeleton TUs. Making this call unconditional
30762 would end up either adding a second copy of the AT_pubnames attribute, or
30763 requiring a special case in add_top_level_skeleton_die_attrs. */
30764 if (!dwarf_split_debug_info)
30765 add_AT_pubnames (comp_unit_die ());
30766
30767 /* The early debug phase is now finished. */
30768 early_dwarf_finished = true;
30769
30770 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
30771 if (!flag_generate_lto && !flag_generate_offload)
30772 return;
30773
30774 /* Now as we are going to output for LTO initialize sections and labels
30775 to the LTO variants. We don't need a random-seed postfix as other
30776 LTO sections as linking the LTO debug sections into one in a partial
30777 link is fine. */
30778 init_sections_and_labels (true);
30779
30780 /* The output below is modeled after dwarf2out_finish with all
30781 location related output removed and some LTO specific changes.
30782 Some refactoring might make both smaller and easier to match up. */
30783
30784 /* Traverse the DIE's and add add sibling attributes to those DIE's
30785 that have children. */
30786 add_sibling_attributes (comp_unit_die ());
30787 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
30788 add_sibling_attributes (node->die);
30789 for (comdat_type_node *ctnode = comdat_type_list;
30790 ctnode != NULL; ctnode = ctnode->next)
30791 add_sibling_attributes (ctnode->root_die);
30792
30793 if (have_macinfo)
30794 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
30795 macinfo_section_label);
30796
30797 save_macinfo_strings ();
30798
30799 /* Output all of the compilation units. We put the main one last so that
30800 the offsets are available to output_pubnames. */
30801 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
30802 output_comp_unit (node->die, 0, NULL);
30803
30804 hash_table<comdat_type_hasher> comdat_type_table (100);
30805 for (comdat_type_node *ctnode = comdat_type_list;
30806 ctnode != NULL; ctnode = ctnode->next)
30807 {
30808 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
30809
30810 /* Don't output duplicate types. */
30811 if (*slot != HTAB_EMPTY_ENTRY)
30812 continue;
30813
30814 /* Add a pointer to the line table for the main compilation unit
30815 so that the debugger can make sense of DW_AT_decl_file
30816 attributes. */
30817 if (debug_info_level >= DINFO_LEVEL_TERSE)
30818 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
30819 (!dwarf_split_debug_info
30820 ? debug_line_section_label
30821 : debug_skeleton_line_section_label));
30822
30823 output_comdat_type_unit (ctnode);
30824 *slot = ctnode;
30825 }
30826
30827 /* Stick a unique symbol to the main debuginfo section. */
30828 compute_comp_unit_symbol (comp_unit_die ());
30829
30830 /* Output the main compilation unit. We always need it if only for
30831 the CU symbol. */
30832 output_comp_unit (comp_unit_die (), true, NULL);
30833
30834 /* Output the abbreviation table. */
30835 if (vec_safe_length (abbrev_die_table) != 1)
30836 {
30837 switch_to_section (debug_abbrev_section);
30838 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
30839 output_abbrev_section ();
30840 }
30841
30842 /* Have to end the macro section. */
30843 if (have_macinfo)
30844 {
30845 /* We have to save macinfo state if we need to output it again
30846 for the FAT part of the object. */
30847 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
30848 if (flag_fat_lto_objects)
30849 macinfo_table = macinfo_table->copy ();
30850
30851 switch_to_section (debug_macinfo_section);
30852 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
30853 output_macinfo (debug_skeleton_line_section_label, true);
30854 dw2_asm_output_data (1, 0, "End compilation unit");
30855
30856 /* Emit a skeleton debug_line section. */
30857 switch_to_section (debug_skeleton_line_section);
30858 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
30859 output_line_info (true);
30860
30861 if (flag_fat_lto_objects)
30862 {
30863 vec_free (macinfo_table);
30864 macinfo_table = saved_macinfo_table;
30865 }
30866 }
30867
30868
30869 /* If we emitted any indirect strings, output the string table too. */
30870 if (debug_str_hash || skeleton_debug_str_hash)
30871 output_indirect_strings ();
30872
30873 /* Switch back to the text section. */
30874 switch_to_section (text_section);
30875 }
30876
30877 /* Reset all state within dwarf2out.c so that we can rerun the compiler
30878 within the same process. For use by toplev::finalize. */
30879
30880 void
30881 dwarf2out_c_finalize (void)
30882 {
30883 last_var_location_insn = NULL;
30884 cached_next_real_insn = NULL;
30885 used_rtx_array = NULL;
30886 incomplete_types = NULL;
30887 decl_scope_table = NULL;
30888 debug_info_section = NULL;
30889 debug_skeleton_info_section = NULL;
30890 debug_abbrev_section = NULL;
30891 debug_skeleton_abbrev_section = NULL;
30892 debug_aranges_section = NULL;
30893 debug_addr_section = NULL;
30894 debug_macinfo_section = NULL;
30895 debug_line_section = NULL;
30896 debug_skeleton_line_section = NULL;
30897 debug_loc_section = NULL;
30898 debug_pubnames_section = NULL;
30899 debug_pubtypes_section = NULL;
30900 debug_str_section = NULL;
30901 debug_line_str_section = NULL;
30902 debug_str_dwo_section = NULL;
30903 debug_str_offsets_section = NULL;
30904 debug_ranges_section = NULL;
30905 debug_frame_section = NULL;
30906 fde_vec = NULL;
30907 debug_str_hash = NULL;
30908 debug_line_str_hash = NULL;
30909 skeleton_debug_str_hash = NULL;
30910 dw2_string_counter = 0;
30911 have_multiple_function_sections = false;
30912 text_section_used = false;
30913 cold_text_section_used = false;
30914 cold_text_section = NULL;
30915 current_unit_personality = NULL;
30916
30917 early_dwarf = false;
30918 early_dwarf_finished = false;
30919
30920 next_die_offset = 0;
30921 single_comp_unit_die = NULL;
30922 comdat_type_list = NULL;
30923 limbo_die_list = NULL;
30924 file_table = NULL;
30925 decl_die_table = NULL;
30926 common_block_die_table = NULL;
30927 decl_loc_table = NULL;
30928 call_arg_locations = NULL;
30929 call_arg_loc_last = NULL;
30930 call_site_count = -1;
30931 tail_call_site_count = -1;
30932 cached_dw_loc_list_table = NULL;
30933 abbrev_die_table = NULL;
30934 delete dwarf_proc_stack_usage_map;
30935 dwarf_proc_stack_usage_map = NULL;
30936 line_info_label_num = 0;
30937 cur_line_info_table = NULL;
30938 text_section_line_info = NULL;
30939 cold_text_section_line_info = NULL;
30940 separate_line_info = NULL;
30941 info_section_emitted = false;
30942 pubname_table = NULL;
30943 pubtype_table = NULL;
30944 macinfo_table = NULL;
30945 ranges_table = NULL;
30946 ranges_by_label = NULL;
30947 rnglist_idx = 0;
30948 have_location_lists = false;
30949 loclabel_num = 0;
30950 poc_label_num = 0;
30951 last_emitted_file = NULL;
30952 label_num = 0;
30953 tmpl_value_parm_die_table = NULL;
30954 generic_type_instances = NULL;
30955 frame_pointer_fb_offset = 0;
30956 frame_pointer_fb_offset_valid = false;
30957 base_types.release ();
30958 XDELETEVEC (producer_string);
30959 producer_string = NULL;
30960 }
30961
30962 #include "gt-dwarf2out.h"