c++: Handle COMPOUND_EXPRs in ocp_convert [PR94339]
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2020 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105 static bool is_redundant_typedef (const_tree);
106
107 #ifndef XCOFF_DEBUGGING_INFO
108 #define XCOFF_DEBUGGING_INFO 0
109 #endif
110
111 #ifndef HAVE_XCOFF_DWARF_EXTRAS
112 #define HAVE_XCOFF_DWARF_EXTRAS 0
113 #endif
114
115 #ifdef VMS_DEBUGGING_INFO
116 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
117
118 /* Define this macro to be a nonzero value if the directory specifications
119 which are output in the debug info should end with a separator. */
120 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
121 /* Define this macro to evaluate to a nonzero value if GCC should refrain
122 from generating indirect strings in DWARF2 debug information, for instance
123 if your target is stuck with an old version of GDB that is unable to
124 process them properly or uses VMS Debug. */
125 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
126 #else
127 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
128 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
129 #endif
130
131 /* ??? Poison these here until it can be done generically. They've been
132 totally replaced in this file; make sure it stays that way. */
133 #undef DWARF2_UNWIND_INFO
134 #undef DWARF2_FRAME_INFO
135 #if (GCC_VERSION >= 3000)
136 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
137 #endif
138
139 /* The size of the target's pointer type. */
140 #ifndef PTR_SIZE
141 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
142 #endif
143
144 /* Array of RTXes referenced by the debugging information, which therefore
145 must be kept around forever. */
146 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
147
148 /* A pointer to the base of a list of incomplete types which might be
149 completed at some later time. incomplete_types_list needs to be a
150 vec<tree, va_gc> *because we want to tell the garbage collector about
151 it. */
152 static GTY(()) vec<tree, va_gc> *incomplete_types;
153
154 /* Pointers to various DWARF2 sections. */
155 static GTY(()) section *debug_info_section;
156 static GTY(()) section *debug_skeleton_info_section;
157 static GTY(()) section *debug_abbrev_section;
158 static GTY(()) section *debug_skeleton_abbrev_section;
159 static GTY(()) section *debug_aranges_section;
160 static GTY(()) section *debug_addr_section;
161 static GTY(()) section *debug_macinfo_section;
162 static const char *debug_macinfo_section_name;
163 static unsigned macinfo_label_base = 1;
164 static GTY(()) section *debug_line_section;
165 static GTY(()) section *debug_skeleton_line_section;
166 static GTY(()) section *debug_loc_section;
167 static GTY(()) section *debug_pubnames_section;
168 static GTY(()) section *debug_pubtypes_section;
169 static GTY(()) section *debug_str_section;
170 static GTY(()) section *debug_line_str_section;
171 static GTY(()) section *debug_str_dwo_section;
172 static GTY(()) section *debug_str_offsets_section;
173 static GTY(()) section *debug_ranges_section;
174 static GTY(()) section *debug_frame_section;
175
176 /* Maximum size (in bytes) of an artificially generated label. */
177 #define MAX_ARTIFICIAL_LABEL_BYTES 40
178
179 /* According to the (draft) DWARF 3 specification, the initial length
180 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
181 bytes are 0xffffffff, followed by the length stored in the next 8
182 bytes.
183
184 However, the SGI/MIPS ABI uses an initial length which is equal to
185 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
186
187 #ifndef DWARF_INITIAL_LENGTH_SIZE
188 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
189 #endif
190
191 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
192 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
193 #endif
194
195 /* Round SIZE up to the nearest BOUNDARY. */
196 #define DWARF_ROUND(SIZE,BOUNDARY) \
197 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
198
199 /* CIE identifier. */
200 #if HOST_BITS_PER_WIDE_INT >= 64
201 #define DWARF_CIE_ID \
202 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
203 #else
204 #define DWARF_CIE_ID DW_CIE_ID
205 #endif
206
207
208 /* A vector for a table that contains frame description
209 information for each routine. */
210 #define NOT_INDEXED (-1U)
211 #define NO_INDEX_ASSIGNED (-2U)
212
213 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
214
215 struct GTY((for_user)) indirect_string_node {
216 const char *str;
217 unsigned int refcount;
218 enum dwarf_form form;
219 char *label;
220 unsigned int index;
221 };
222
223 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
224 {
225 typedef const char *compare_type;
226
227 static hashval_t hash (indirect_string_node *);
228 static bool equal (indirect_string_node *, const char *);
229 };
230
231 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
232
233 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
234
235 /* With split_debug_info, both the comp_dir and dwo_name go in the
236 main object file, rather than the dwo, similar to the force_direct
237 parameter elsewhere but with additional complications:
238
239 1) The string is needed in both the main object file and the dwo.
240 That is, the comp_dir and dwo_name will appear in both places.
241
242 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
243 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
244
245 3) GCC chooses the form to use late, depending on the size and
246 reference count.
247
248 Rather than forcing the all debug string handling functions and
249 callers to deal with these complications, simply use a separate,
250 special-cased string table for any attribute that should go in the
251 main object file. This limits the complexity to just the places
252 that need it. */
253
254 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
255
256 static GTY(()) int dw2_string_counter;
257
258 /* True if the compilation unit places functions in more than one section. */
259 static GTY(()) bool have_multiple_function_sections = false;
260
261 /* Whether the default text and cold text sections have been used at all. */
262 static GTY(()) bool text_section_used = false;
263 static GTY(()) bool cold_text_section_used = false;
264
265 /* The default cold text section. */
266 static GTY(()) section *cold_text_section;
267
268 /* The DIE for C++14 'auto' in a function return type. */
269 static GTY(()) dw_die_ref auto_die;
270
271 /* The DIE for C++14 'decltype(auto)' in a function return type. */
272 static GTY(()) dw_die_ref decltype_auto_die;
273
274 /* Forward declarations for functions defined in this file. */
275
276 static void output_call_frame_info (int);
277 static void dwarf2out_note_section_used (void);
278
279 /* Personality decl of current unit. Used only when assembler does not support
280 personality CFI. */
281 static GTY(()) rtx current_unit_personality;
282
283 /* Whether an eh_frame section is required. */
284 static GTY(()) bool do_eh_frame = false;
285
286 /* .debug_rnglists next index. */
287 static unsigned int rnglist_idx;
288
289 /* Data and reference forms for relocatable data. */
290 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
291 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
292
293 #ifndef DEBUG_FRAME_SECTION
294 #define DEBUG_FRAME_SECTION ".debug_frame"
295 #endif
296
297 #ifndef FUNC_BEGIN_LABEL
298 #define FUNC_BEGIN_LABEL "LFB"
299 #endif
300
301 #ifndef FUNC_SECOND_SECT_LABEL
302 #define FUNC_SECOND_SECT_LABEL "LFSB"
303 #endif
304
305 #ifndef FUNC_END_LABEL
306 #define FUNC_END_LABEL "LFE"
307 #endif
308
309 #ifndef PROLOGUE_END_LABEL
310 #define PROLOGUE_END_LABEL "LPE"
311 #endif
312
313 #ifndef EPILOGUE_BEGIN_LABEL
314 #define EPILOGUE_BEGIN_LABEL "LEB"
315 #endif
316
317 #ifndef FRAME_BEGIN_LABEL
318 #define FRAME_BEGIN_LABEL "Lframe"
319 #endif
320 #define CIE_AFTER_SIZE_LABEL "LSCIE"
321 #define CIE_END_LABEL "LECIE"
322 #define FDE_LABEL "LSFDE"
323 #define FDE_AFTER_SIZE_LABEL "LASFDE"
324 #define FDE_END_LABEL "LEFDE"
325 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
326 #define LINE_NUMBER_END_LABEL "LELT"
327 #define LN_PROLOG_AS_LABEL "LASLTP"
328 #define LN_PROLOG_END_LABEL "LELTP"
329 #define DIE_LABEL_PREFIX "DW"
330 \f
331 /* Match the base name of a file to the base name of a compilation unit. */
332
333 static int
334 matches_main_base (const char *path)
335 {
336 /* Cache the last query. */
337 static const char *last_path = NULL;
338 static int last_match = 0;
339 if (path != last_path)
340 {
341 const char *base;
342 int length = base_of_path (path, &base);
343 last_path = path;
344 last_match = (length == main_input_baselength
345 && memcmp (base, main_input_basename, length) == 0);
346 }
347 return last_match;
348 }
349
350 #ifdef DEBUG_DEBUG_STRUCT
351
352 static int
353 dump_struct_debug (tree type, enum debug_info_usage usage,
354 enum debug_struct_file criterion, int generic,
355 int matches, int result)
356 {
357 /* Find the type name. */
358 tree type_decl = TYPE_STUB_DECL (type);
359 tree t = type_decl;
360 const char *name = 0;
361 if (TREE_CODE (t) == TYPE_DECL)
362 t = DECL_NAME (t);
363 if (t)
364 name = IDENTIFIER_POINTER (t);
365
366 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
367 criterion,
368 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
369 matches ? "bas" : "hdr",
370 generic ? "gen" : "ord",
371 usage == DINFO_USAGE_DFN ? ";" :
372 usage == DINFO_USAGE_DIR_USE ? "." : "*",
373 result,
374 (void*) type_decl, name);
375 return result;
376 }
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 dump_struct_debug (type, usage, criterion, generic, matches, result)
379
380 #else
381
382 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
383 (result)
384
385 #endif
386
387 /* Get the number of HOST_WIDE_INTs needed to represent the precision
388 of the number. Some constants have a large uniform precision, so
389 we get the precision needed for the actual value of the number. */
390
391 static unsigned int
392 get_full_len (const wide_int &op)
393 {
394 int prec = wi::min_precision (op, UNSIGNED);
395 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
396 / HOST_BITS_PER_WIDE_INT);
397 }
398
399 static bool
400 should_emit_struct_debug (tree type, enum debug_info_usage usage)
401 {
402 enum debug_struct_file criterion;
403 tree type_decl;
404 bool generic = lang_hooks.types.generic_p (type);
405
406 if (generic)
407 criterion = debug_struct_generic[usage];
408 else
409 criterion = debug_struct_ordinary[usage];
410
411 if (criterion == DINFO_STRUCT_FILE_NONE)
412 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
413 if (criterion == DINFO_STRUCT_FILE_ANY)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
415
416 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
417
418 if (type_decl != NULL)
419 {
420 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
421 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
422
423 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
424 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
425 }
426
427 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
428 }
429 \f
430 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
431 switch to the data section instead, and write out a synthetic start label
432 for collect2 the first time around. */
433
434 static void
435 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
436 {
437 if (eh_frame_section == 0)
438 {
439 int flags;
440
441 if (EH_TABLES_CAN_BE_READ_ONLY)
442 {
443 int fde_encoding;
444 int per_encoding;
445 int lsda_encoding;
446
447 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
448 /*global=*/0);
449 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
450 /*global=*/1);
451 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
452 /*global=*/0);
453 flags = ((! flag_pic
454 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
455 && (fde_encoding & 0x70) != DW_EH_PE_aligned
456 && (per_encoding & 0x70) != DW_EH_PE_absptr
457 && (per_encoding & 0x70) != DW_EH_PE_aligned
458 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
459 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
460 ? 0 : SECTION_WRITE);
461 }
462 else
463 flags = SECTION_WRITE;
464
465 #ifdef EH_FRAME_SECTION_NAME
466 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
467 #else
468 eh_frame_section = ((flags == SECTION_WRITE)
469 ? data_section : readonly_data_section);
470 #endif /* EH_FRAME_SECTION_NAME */
471 }
472
473 switch_to_section (eh_frame_section);
474
475 #ifdef EH_FRAME_THROUGH_COLLECT2
476 /* We have no special eh_frame section. Emit special labels to guide
477 collect2. */
478 if (!back)
479 {
480 tree label = get_file_function_name ("F");
481 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
482 targetm.asm_out.globalize_label (asm_out_file,
483 IDENTIFIER_POINTER (label));
484 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
485 }
486 #endif
487 }
488
489 /* Switch [BACK] to the eh or debug frame table section, depending on
490 FOR_EH. */
491
492 static void
493 switch_to_frame_table_section (int for_eh, bool back)
494 {
495 if (for_eh)
496 switch_to_eh_frame_section (back);
497 else
498 {
499 if (!debug_frame_section)
500 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
501 SECTION_DEBUG, NULL);
502 switch_to_section (debug_frame_section);
503 }
504 }
505
506 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
507
508 enum dw_cfi_oprnd_type
509 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
510 {
511 switch (cfi)
512 {
513 case DW_CFA_nop:
514 case DW_CFA_GNU_window_save:
515 case DW_CFA_remember_state:
516 case DW_CFA_restore_state:
517 return dw_cfi_oprnd_unused;
518
519 case DW_CFA_set_loc:
520 case DW_CFA_advance_loc1:
521 case DW_CFA_advance_loc2:
522 case DW_CFA_advance_loc4:
523 case DW_CFA_MIPS_advance_loc8:
524 return dw_cfi_oprnd_addr;
525
526 case DW_CFA_offset:
527 case DW_CFA_offset_extended:
528 case DW_CFA_def_cfa:
529 case DW_CFA_offset_extended_sf:
530 case DW_CFA_def_cfa_sf:
531 case DW_CFA_restore:
532 case DW_CFA_restore_extended:
533 case DW_CFA_undefined:
534 case DW_CFA_same_value:
535 case DW_CFA_def_cfa_register:
536 case DW_CFA_register:
537 case DW_CFA_expression:
538 case DW_CFA_val_expression:
539 return dw_cfi_oprnd_reg_num;
540
541 case DW_CFA_def_cfa_offset:
542 case DW_CFA_GNU_args_size:
543 case DW_CFA_def_cfa_offset_sf:
544 return dw_cfi_oprnd_offset;
545
546 case DW_CFA_def_cfa_expression:
547 return dw_cfi_oprnd_loc;
548
549 default:
550 gcc_unreachable ();
551 }
552 }
553
554 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
555
556 enum dw_cfi_oprnd_type
557 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
558 {
559 switch (cfi)
560 {
561 case DW_CFA_def_cfa:
562 case DW_CFA_def_cfa_sf:
563 case DW_CFA_offset:
564 case DW_CFA_offset_extended_sf:
565 case DW_CFA_offset_extended:
566 return dw_cfi_oprnd_offset;
567
568 case DW_CFA_register:
569 return dw_cfi_oprnd_reg_num;
570
571 case DW_CFA_expression:
572 case DW_CFA_val_expression:
573 return dw_cfi_oprnd_loc;
574
575 case DW_CFA_def_cfa_expression:
576 return dw_cfi_oprnd_cfa_loc;
577
578 default:
579 return dw_cfi_oprnd_unused;
580 }
581 }
582
583 /* Output one FDE. */
584
585 static void
586 output_fde (dw_fde_ref fde, bool for_eh, bool second,
587 char *section_start_label, int fde_encoding, char *augmentation,
588 bool any_lsda_needed, int lsda_encoding)
589 {
590 const char *begin, *end;
591 static unsigned int j;
592 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
593
594 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
595 /* empty */ 0);
596 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
597 for_eh + j);
598 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
599 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
600 if (!XCOFF_DEBUGGING_INFO || for_eh)
601 {
602 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
603 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
604 " indicating 64-bit DWARF extension");
605 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
606 "FDE Length");
607 }
608 ASM_OUTPUT_LABEL (asm_out_file, l1);
609
610 if (for_eh)
611 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
612 else
613 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
614 debug_frame_section, "FDE CIE offset");
615
616 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
617 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
618
619 if (for_eh)
620 {
621 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
622 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
623 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
624 "FDE initial location");
625 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
626 end, begin, "FDE address range");
627 }
628 else
629 {
630 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
631 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
632 }
633
634 if (augmentation[0])
635 {
636 if (any_lsda_needed)
637 {
638 int size = size_of_encoded_value (lsda_encoding);
639
640 if (lsda_encoding == DW_EH_PE_aligned)
641 {
642 int offset = ( 4 /* Length */
643 + 4 /* CIE offset */
644 + 2 * size_of_encoded_value (fde_encoding)
645 + 1 /* Augmentation size */ );
646 int pad = -offset & (PTR_SIZE - 1);
647
648 size += pad;
649 gcc_assert (size_of_uleb128 (size) == 1);
650 }
651
652 dw2_asm_output_data_uleb128 (size, "Augmentation size");
653
654 if (fde->uses_eh_lsda)
655 {
656 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
657 fde->funcdef_number);
658 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
659 gen_rtx_SYMBOL_REF (Pmode, l1),
660 false,
661 "Language Specific Data Area");
662 }
663 else
664 {
665 if (lsda_encoding == DW_EH_PE_aligned)
666 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
667 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
668 "Language Specific Data Area (none)");
669 }
670 }
671 else
672 dw2_asm_output_data_uleb128 (0, "Augmentation size");
673 }
674
675 /* Loop through the Call Frame Instructions associated with this FDE. */
676 fde->dw_fde_current_label = begin;
677 {
678 size_t from, until, i;
679
680 from = 0;
681 until = vec_safe_length (fde->dw_fde_cfi);
682
683 if (fde->dw_fde_second_begin == NULL)
684 ;
685 else if (!second)
686 until = fde->dw_fde_switch_cfi_index;
687 else
688 from = fde->dw_fde_switch_cfi_index;
689
690 for (i = from; i < until; i++)
691 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
692 }
693
694 /* If we are to emit a ref/link from function bodies to their frame tables,
695 do it now. This is typically performed to make sure that tables
696 associated with functions are dragged with them and not discarded in
697 garbage collecting links. We need to do this on a per function basis to
698 cope with -ffunction-sections. */
699
700 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
701 /* Switch to the function section, emit the ref to the tables, and
702 switch *back* into the table section. */
703 switch_to_section (function_section (fde->decl));
704 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
705 switch_to_frame_table_section (for_eh, true);
706 #endif
707
708 /* Pad the FDE out to an address sized boundary. */
709 ASM_OUTPUT_ALIGN (asm_out_file,
710 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
711 ASM_OUTPUT_LABEL (asm_out_file, l2);
712
713 j += 2;
714 }
715
716 /* Return true if frame description entry FDE is needed for EH. */
717
718 static bool
719 fde_needed_for_eh_p (dw_fde_ref fde)
720 {
721 if (flag_asynchronous_unwind_tables)
722 return true;
723
724 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
725 return true;
726
727 if (fde->uses_eh_lsda)
728 return true;
729
730 /* If exceptions are enabled, we have collected nothrow info. */
731 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
732 return false;
733
734 return true;
735 }
736
737 /* Output the call frame information used to record information
738 that relates to calculating the frame pointer, and records the
739 location of saved registers. */
740
741 static void
742 output_call_frame_info (int for_eh)
743 {
744 unsigned int i;
745 dw_fde_ref fde;
746 dw_cfi_ref cfi;
747 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
748 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
749 bool any_lsda_needed = false;
750 char augmentation[6];
751 int augmentation_size;
752 int fde_encoding = DW_EH_PE_absptr;
753 int per_encoding = DW_EH_PE_absptr;
754 int lsda_encoding = DW_EH_PE_absptr;
755 int return_reg;
756 rtx personality = NULL;
757 int dw_cie_version;
758
759 /* Don't emit a CIE if there won't be any FDEs. */
760 if (!fde_vec)
761 return;
762
763 /* Nothing to do if the assembler's doing it all. */
764 if (dwarf2out_do_cfi_asm ())
765 return;
766
767 /* If we don't have any functions we'll want to unwind out of, don't emit
768 any EH unwind information. If we make FDEs linkonce, we may have to
769 emit an empty label for an FDE that wouldn't otherwise be emitted. We
770 want to avoid having an FDE kept around when the function it refers to
771 is discarded. Example where this matters: a primary function template
772 in C++ requires EH information, an explicit specialization doesn't. */
773 if (for_eh)
774 {
775 bool any_eh_needed = false;
776
777 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
778 {
779 if (fde->uses_eh_lsda)
780 any_eh_needed = any_lsda_needed = true;
781 else if (fde_needed_for_eh_p (fde))
782 any_eh_needed = true;
783 else if (TARGET_USES_WEAK_UNWIND_INFO)
784 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
785 }
786
787 if (!any_eh_needed)
788 return;
789 }
790
791 /* We're going to be generating comments, so turn on app. */
792 if (flag_debug_asm)
793 app_enable ();
794
795 /* Switch to the proper frame section, first time. */
796 switch_to_frame_table_section (for_eh, false);
797
798 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
799 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
800
801 /* Output the CIE. */
802 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
803 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
804 if (!XCOFF_DEBUGGING_INFO || for_eh)
805 {
806 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
807 dw2_asm_output_data (4, 0xffffffff,
808 "Initial length escape value indicating 64-bit DWARF extension");
809 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
810 "Length of Common Information Entry");
811 }
812 ASM_OUTPUT_LABEL (asm_out_file, l1);
813
814 /* Now that the CIE pointer is PC-relative for EH,
815 use 0 to identify the CIE. */
816 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
817 (for_eh ? 0 : DWARF_CIE_ID),
818 "CIE Identifier Tag");
819
820 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
821 use CIE version 1, unless that would produce incorrect results
822 due to overflowing the return register column. */
823 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
824 dw_cie_version = 1;
825 if (return_reg >= 256 || dwarf_version > 2)
826 dw_cie_version = 3;
827 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
828
829 augmentation[0] = 0;
830 augmentation_size = 0;
831
832 personality = current_unit_personality;
833 if (for_eh)
834 {
835 char *p;
836
837 /* Augmentation:
838 z Indicates that a uleb128 is present to size the
839 augmentation section.
840 L Indicates the encoding (and thus presence) of
841 an LSDA pointer in the FDE augmentation.
842 R Indicates a non-default pointer encoding for
843 FDE code pointers.
844 P Indicates the presence of an encoding + language
845 personality routine in the CIE augmentation. */
846
847 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
848 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
849 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
850
851 p = augmentation + 1;
852 if (personality)
853 {
854 *p++ = 'P';
855 augmentation_size += 1 + size_of_encoded_value (per_encoding);
856 assemble_external_libcall (personality);
857 }
858 if (any_lsda_needed)
859 {
860 *p++ = 'L';
861 augmentation_size += 1;
862 }
863 if (fde_encoding != DW_EH_PE_absptr)
864 {
865 *p++ = 'R';
866 augmentation_size += 1;
867 }
868 if (p > augmentation + 1)
869 {
870 augmentation[0] = 'z';
871 *p = '\0';
872 }
873
874 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
875 if (personality && per_encoding == DW_EH_PE_aligned)
876 {
877 int offset = ( 4 /* Length */
878 + 4 /* CIE Id */
879 + 1 /* CIE version */
880 + strlen (augmentation) + 1 /* Augmentation */
881 + size_of_uleb128 (1) /* Code alignment */
882 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
883 + 1 /* RA column */
884 + 1 /* Augmentation size */
885 + 1 /* Personality encoding */ );
886 int pad = -offset & (PTR_SIZE - 1);
887
888 augmentation_size += pad;
889
890 /* Augmentations should be small, so there's scarce need to
891 iterate for a solution. Die if we exceed one uleb128 byte. */
892 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
893 }
894 }
895
896 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
897 if (dw_cie_version >= 4)
898 {
899 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
900 dw2_asm_output_data (1, 0, "CIE Segment Size");
901 }
902 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
903 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
904 "CIE Data Alignment Factor");
905
906 if (dw_cie_version == 1)
907 dw2_asm_output_data (1, return_reg, "CIE RA Column");
908 else
909 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
910
911 if (augmentation[0])
912 {
913 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
914 if (personality)
915 {
916 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
917 eh_data_format_name (per_encoding));
918 dw2_asm_output_encoded_addr_rtx (per_encoding,
919 personality,
920 true, NULL);
921 }
922
923 if (any_lsda_needed)
924 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
925 eh_data_format_name (lsda_encoding));
926
927 if (fde_encoding != DW_EH_PE_absptr)
928 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
929 eh_data_format_name (fde_encoding));
930 }
931
932 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
933 output_cfi (cfi, NULL, for_eh);
934
935 /* Pad the CIE out to an address sized boundary. */
936 ASM_OUTPUT_ALIGN (asm_out_file,
937 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
938 ASM_OUTPUT_LABEL (asm_out_file, l2);
939
940 /* Loop through all of the FDE's. */
941 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
942 {
943 unsigned int k;
944
945 /* Don't emit EH unwind info for leaf functions that don't need it. */
946 if (for_eh && !fde_needed_for_eh_p (fde))
947 continue;
948
949 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
950 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
951 augmentation, any_lsda_needed, lsda_encoding);
952 }
953
954 if (for_eh && targetm.terminate_dw2_eh_frame_info)
955 dw2_asm_output_data (4, 0, "End of Table");
956
957 /* Turn off app to make assembly quicker. */
958 if (flag_debug_asm)
959 app_disable ();
960 }
961
962 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
963
964 static void
965 dwarf2out_do_cfi_startproc (bool second)
966 {
967 int enc;
968 rtx ref;
969
970 fprintf (asm_out_file, "\t.cfi_startproc\n");
971
972 targetm.asm_out.post_cfi_startproc (asm_out_file, current_function_decl);
973
974 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
975 eh unwinders. */
976 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
977 return;
978
979 rtx personality = get_personality_function (current_function_decl);
980
981 if (personality)
982 {
983 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
984 ref = personality;
985
986 /* ??? The GAS support isn't entirely consistent. We have to
987 handle indirect support ourselves, but PC-relative is done
988 in the assembler. Further, the assembler can't handle any
989 of the weirder relocation types. */
990 if (enc & DW_EH_PE_indirect)
991 ref = dw2_force_const_mem (ref, true);
992
993 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
994 output_addr_const (asm_out_file, ref);
995 fputc ('\n', asm_out_file);
996 }
997
998 if (crtl->uses_eh_lsda)
999 {
1000 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1001
1002 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1003 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1004 current_function_funcdef_no);
1005 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1006 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1007
1008 if (enc & DW_EH_PE_indirect)
1009 ref = dw2_force_const_mem (ref, true);
1010
1011 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1012 output_addr_const (asm_out_file, ref);
1013 fputc ('\n', asm_out_file);
1014 }
1015 }
1016
1017 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1018 this allocation may be done before pass_final. */
1019
1020 dw_fde_ref
1021 dwarf2out_alloc_current_fde (void)
1022 {
1023 dw_fde_ref fde;
1024
1025 fde = ggc_cleared_alloc<dw_fde_node> ();
1026 fde->decl = current_function_decl;
1027 fde->funcdef_number = current_function_funcdef_no;
1028 fde->fde_index = vec_safe_length (fde_vec);
1029 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1030 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1031 fde->nothrow = crtl->nothrow;
1032 fde->drap_reg = INVALID_REGNUM;
1033 fde->vdrap_reg = INVALID_REGNUM;
1034
1035 /* Record the FDE associated with this function. */
1036 cfun->fde = fde;
1037 vec_safe_push (fde_vec, fde);
1038
1039 return fde;
1040 }
1041
1042 /* Output a marker (i.e. a label) for the beginning of a function, before
1043 the prologue. */
1044
1045 void
1046 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1047 unsigned int column ATTRIBUTE_UNUSED,
1048 const char *file ATTRIBUTE_UNUSED)
1049 {
1050 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1051 char * dup_label;
1052 dw_fde_ref fde;
1053 section *fnsec;
1054 bool do_frame;
1055
1056 current_function_func_begin_label = NULL;
1057
1058 do_frame = dwarf2out_do_frame ();
1059
1060 /* ??? current_function_func_begin_label is also used by except.c for
1061 call-site information. We must emit this label if it might be used. */
1062 if (!do_frame
1063 && (!flag_exceptions
1064 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1065 return;
1066
1067 fnsec = function_section (current_function_decl);
1068 switch_to_section (fnsec);
1069 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1072 current_function_funcdef_no);
1073 dup_label = xstrdup (label);
1074 current_function_func_begin_label = dup_label;
1075
1076 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1077 if (!do_frame)
1078 return;
1079
1080 /* Unlike the debug version, the EH version of frame unwind info is a per-
1081 function setting so we need to record whether we need it for the unit. */
1082 do_eh_frame |= dwarf2out_do_eh_frame ();
1083
1084 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1085 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1086 would include pass_dwarf2_frame. If we've not created the FDE yet,
1087 do so now. */
1088 fde = cfun->fde;
1089 if (fde == NULL)
1090 fde = dwarf2out_alloc_current_fde ();
1091
1092 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1093 fde->dw_fde_begin = dup_label;
1094 fde->dw_fde_current_label = dup_label;
1095 fde->in_std_section = (fnsec == text_section
1096 || (cold_text_section && fnsec == cold_text_section));
1097
1098 /* We only want to output line number information for the genuine dwarf2
1099 prologue case, not the eh frame case. */
1100 #ifdef DWARF2_DEBUGGING_INFO
1101 if (file)
1102 dwarf2out_source_line (line, column, file, 0, true);
1103 #endif
1104
1105 if (dwarf2out_do_cfi_asm ())
1106 dwarf2out_do_cfi_startproc (false);
1107 else
1108 {
1109 rtx personality = get_personality_function (current_function_decl);
1110 if (!current_unit_personality)
1111 current_unit_personality = personality;
1112
1113 /* We cannot keep a current personality per function as without CFI
1114 asm, at the point where we emit the CFI data, there is no current
1115 function anymore. */
1116 if (personality && current_unit_personality != personality)
1117 sorry ("multiple EH personalities are supported only with assemblers "
1118 "supporting %<.cfi_personality%> directive");
1119 }
1120 }
1121
1122 /* Output a marker (i.e. a label) for the end of the generated code
1123 for a function prologue. This gets called *after* the prologue code has
1124 been generated. */
1125
1126 void
1127 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1128 const char *file ATTRIBUTE_UNUSED)
1129 {
1130 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1131
1132 /* Output a label to mark the endpoint of the code generated for this
1133 function. */
1134 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1137 current_function_funcdef_no);
1138 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1139 }
1140
1141 /* Output a marker (i.e. a label) for the beginning of the generated code
1142 for a function epilogue. This gets called *before* the prologue code has
1143 been generated. */
1144
1145 void
1146 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1147 const char *file ATTRIBUTE_UNUSED)
1148 {
1149 dw_fde_ref fde = cfun->fde;
1150 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1151
1152 if (fde->dw_fde_vms_begin_epilogue)
1153 return;
1154
1155 /* Output a label to mark the endpoint of the code generated for this
1156 function. */
1157 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1160 current_function_funcdef_no);
1161 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1162 }
1163
1164 /* Output a marker (i.e. a label) for the absolute end of the generated code
1165 for a function definition. This gets called *after* the epilogue code has
1166 been generated. */
1167
1168 void
1169 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1170 const char *file ATTRIBUTE_UNUSED)
1171 {
1172 dw_fde_ref fde;
1173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1174
1175 last_var_location_insn = NULL;
1176 cached_next_real_insn = NULL;
1177
1178 if (dwarf2out_do_cfi_asm ())
1179 fprintf (asm_out_file, "\t.cfi_endproc\n");
1180
1181 /* Output a label to mark the endpoint of the code generated for this
1182 function. */
1183 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1184 current_function_funcdef_no);
1185 ASM_OUTPUT_LABEL (asm_out_file, label);
1186 fde = cfun->fde;
1187 gcc_assert (fde != NULL);
1188 if (fde->dw_fde_second_begin == NULL)
1189 fde->dw_fde_end = xstrdup (label);
1190 }
1191
1192 void
1193 dwarf2out_frame_finish (void)
1194 {
1195 /* Output call frame information. */
1196 if (targetm.debug_unwind_info () == UI_DWARF2)
1197 output_call_frame_info (0);
1198
1199 /* Output another copy for the unwinder. */
1200 if (do_eh_frame)
1201 output_call_frame_info (1);
1202 }
1203
1204 /* Note that the current function section is being used for code. */
1205
1206 static void
1207 dwarf2out_note_section_used (void)
1208 {
1209 section *sec = current_function_section ();
1210 if (sec == text_section)
1211 text_section_used = true;
1212 else if (sec == cold_text_section)
1213 cold_text_section_used = true;
1214 }
1215
1216 static void var_location_switch_text_section (void);
1217 static void set_cur_line_info_table (section *);
1218
1219 void
1220 dwarf2out_switch_text_section (void)
1221 {
1222 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1223 section *sect;
1224 dw_fde_ref fde = cfun->fde;
1225
1226 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1227
1228 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL,
1229 current_function_funcdef_no);
1230
1231 fde->dw_fde_second_begin = ggc_strdup (label);
1232 if (!in_cold_section_p)
1233 {
1234 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1235 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1236 }
1237 else
1238 {
1239 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1240 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1241 }
1242 have_multiple_function_sections = true;
1243
1244 /* There is no need to mark used sections when not debugging. */
1245 if (cold_text_section != NULL)
1246 dwarf2out_note_section_used ();
1247
1248 if (dwarf2out_do_cfi_asm ())
1249 fprintf (asm_out_file, "\t.cfi_endproc\n");
1250
1251 /* Now do the real section switch. */
1252 sect = current_function_section ();
1253 switch_to_section (sect);
1254
1255 fde->second_in_std_section
1256 = (sect == text_section
1257 || (cold_text_section && sect == cold_text_section));
1258
1259 if (dwarf2out_do_cfi_asm ())
1260 dwarf2out_do_cfi_startproc (true);
1261
1262 var_location_switch_text_section ();
1263
1264 if (cold_text_section != NULL)
1265 set_cur_line_info_table (sect);
1266 }
1267 \f
1268 /* And now, the subset of the debugging information support code necessary
1269 for emitting location expressions. */
1270
1271 /* Data about a single source file. */
1272 struct GTY((for_user)) dwarf_file_data {
1273 const char * filename;
1274 int emitted_number;
1275 };
1276
1277 /* Describe an entry into the .debug_addr section. */
1278
1279 enum ate_kind {
1280 ate_kind_rtx,
1281 ate_kind_rtx_dtprel,
1282 ate_kind_label
1283 };
1284
1285 struct GTY((for_user)) addr_table_entry {
1286 enum ate_kind kind;
1287 unsigned int refcount;
1288 unsigned int index;
1289 union addr_table_entry_struct_union
1290 {
1291 rtx GTY ((tag ("0"))) rtl;
1292 char * GTY ((tag ("1"))) label;
1293 }
1294 GTY ((desc ("%1.kind"))) addr;
1295 };
1296
1297 typedef unsigned int var_loc_view;
1298
1299 /* Location lists are ranges + location descriptions for that range,
1300 so you can track variables that are in different places over
1301 their entire life. */
1302 typedef struct GTY(()) dw_loc_list_struct {
1303 dw_loc_list_ref dw_loc_next;
1304 const char *begin; /* Label and addr_entry for start of range */
1305 addr_table_entry *begin_entry;
1306 const char *end; /* Label for end of range */
1307 char *ll_symbol; /* Label for beginning of location list.
1308 Only on head of list. */
1309 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1310 const char *section; /* Section this loclist is relative to */
1311 dw_loc_descr_ref expr;
1312 var_loc_view vbegin, vend;
1313 hashval_t hash;
1314 /* True if all addresses in this and subsequent lists are known to be
1315 resolved. */
1316 bool resolved_addr;
1317 /* True if this list has been replaced by dw_loc_next. */
1318 bool replaced;
1319 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1320 section. */
1321 unsigned char emitted : 1;
1322 /* True if hash field is index rather than hash value. */
1323 unsigned char num_assigned : 1;
1324 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1325 unsigned char offset_emitted : 1;
1326 /* True if note_variable_value_in_expr has been called on it. */
1327 unsigned char noted_variable_value : 1;
1328 /* True if the range should be emitted even if begin and end
1329 are the same. */
1330 bool force;
1331 } dw_loc_list_node;
1332
1333 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1334 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1335
1336 /* Convert a DWARF stack opcode into its string name. */
1337
1338 static const char *
1339 dwarf_stack_op_name (unsigned int op)
1340 {
1341 const char *name = get_DW_OP_name (op);
1342
1343 if (name != NULL)
1344 return name;
1345
1346 return "OP_<unknown>";
1347 }
1348
1349 /* Return TRUE iff we're to output location view lists as a separate
1350 attribute next to the location lists, as an extension compatible
1351 with DWARF 2 and above. */
1352
1353 static inline bool
1354 dwarf2out_locviews_in_attribute ()
1355 {
1356 return debug_variable_location_views == 1;
1357 }
1358
1359 /* Return TRUE iff we're to output location view lists as part of the
1360 location lists, as proposed for standardization after DWARF 5. */
1361
1362 static inline bool
1363 dwarf2out_locviews_in_loclist ()
1364 {
1365 #ifndef DW_LLE_view_pair
1366 return false;
1367 #else
1368 return debug_variable_location_views == -1;
1369 #endif
1370 }
1371
1372 /* Return a pointer to a newly allocated location description. Location
1373 descriptions are simple expression terms that can be strung
1374 together to form more complicated location (address) descriptions. */
1375
1376 static inline dw_loc_descr_ref
1377 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1378 unsigned HOST_WIDE_INT oprnd2)
1379 {
1380 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1381
1382 descr->dw_loc_opc = op;
1383 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1384 descr->dw_loc_oprnd1.val_entry = NULL;
1385 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1386 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1387 descr->dw_loc_oprnd2.val_entry = NULL;
1388 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1389
1390 return descr;
1391 }
1392
1393 /* Add a location description term to a location description expression. */
1394
1395 static inline void
1396 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1397 {
1398 dw_loc_descr_ref *d;
1399
1400 /* Find the end of the chain. */
1401 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1402 ;
1403
1404 *d = descr;
1405 }
1406
1407 /* Compare two location operands for exact equality. */
1408
1409 static bool
1410 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1411 {
1412 if (a->val_class != b->val_class)
1413 return false;
1414 switch (a->val_class)
1415 {
1416 case dw_val_class_none:
1417 return true;
1418 case dw_val_class_addr:
1419 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1420
1421 case dw_val_class_offset:
1422 case dw_val_class_unsigned_const:
1423 case dw_val_class_const:
1424 case dw_val_class_unsigned_const_implicit:
1425 case dw_val_class_const_implicit:
1426 case dw_val_class_range_list:
1427 /* These are all HOST_WIDE_INT, signed or unsigned. */
1428 return a->v.val_unsigned == b->v.val_unsigned;
1429
1430 case dw_val_class_loc:
1431 return a->v.val_loc == b->v.val_loc;
1432 case dw_val_class_loc_list:
1433 return a->v.val_loc_list == b->v.val_loc_list;
1434 case dw_val_class_view_list:
1435 return a->v.val_view_list == b->v.val_view_list;
1436 case dw_val_class_die_ref:
1437 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1438 case dw_val_class_fde_ref:
1439 return a->v.val_fde_index == b->v.val_fde_index;
1440 case dw_val_class_symview:
1441 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1442 case dw_val_class_lbl_id:
1443 case dw_val_class_lineptr:
1444 case dw_val_class_macptr:
1445 case dw_val_class_loclistsptr:
1446 case dw_val_class_high_pc:
1447 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1448 case dw_val_class_str:
1449 return a->v.val_str == b->v.val_str;
1450 case dw_val_class_flag:
1451 return a->v.val_flag == b->v.val_flag;
1452 case dw_val_class_file:
1453 case dw_val_class_file_implicit:
1454 return a->v.val_file == b->v.val_file;
1455 case dw_val_class_decl_ref:
1456 return a->v.val_decl_ref == b->v.val_decl_ref;
1457
1458 case dw_val_class_const_double:
1459 return (a->v.val_double.high == b->v.val_double.high
1460 && a->v.val_double.low == b->v.val_double.low);
1461
1462 case dw_val_class_wide_int:
1463 return *a->v.val_wide == *b->v.val_wide;
1464
1465 case dw_val_class_vec:
1466 {
1467 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1468 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1469
1470 return (a_len == b_len
1471 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1472 }
1473
1474 case dw_val_class_data8:
1475 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1476
1477 case dw_val_class_vms_delta:
1478 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1479 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1480
1481 case dw_val_class_discr_value:
1482 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1483 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1484 case dw_val_class_discr_list:
1485 /* It makes no sense comparing two discriminant value lists. */
1486 return false;
1487 }
1488 gcc_unreachable ();
1489 }
1490
1491 /* Compare two location atoms for exact equality. */
1492
1493 static bool
1494 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1495 {
1496 if (a->dw_loc_opc != b->dw_loc_opc)
1497 return false;
1498
1499 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1500 address size, but since we always allocate cleared storage it
1501 should be zero for other types of locations. */
1502 if (a->dtprel != b->dtprel)
1503 return false;
1504
1505 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1506 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1507 }
1508
1509 /* Compare two complete location expressions for exact equality. */
1510
1511 bool
1512 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1513 {
1514 while (1)
1515 {
1516 if (a == b)
1517 return true;
1518 if (a == NULL || b == NULL)
1519 return false;
1520 if (!loc_descr_equal_p_1 (a, b))
1521 return false;
1522
1523 a = a->dw_loc_next;
1524 b = b->dw_loc_next;
1525 }
1526 }
1527
1528
1529 /* Add a constant POLY_OFFSET to a location expression. */
1530
1531 static void
1532 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1533 {
1534 dw_loc_descr_ref loc;
1535 HOST_WIDE_INT *p;
1536
1537 gcc_assert (*list_head != NULL);
1538
1539 if (known_eq (poly_offset, 0))
1540 return;
1541
1542 /* Find the end of the chain. */
1543 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1544 ;
1545
1546 HOST_WIDE_INT offset;
1547 if (!poly_offset.is_constant (&offset))
1548 {
1549 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1550 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1551 return;
1552 }
1553
1554 p = NULL;
1555 if (loc->dw_loc_opc == DW_OP_fbreg
1556 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1557 p = &loc->dw_loc_oprnd1.v.val_int;
1558 else if (loc->dw_loc_opc == DW_OP_bregx)
1559 p = &loc->dw_loc_oprnd2.v.val_int;
1560
1561 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1562 offset. Don't optimize if an signed integer overflow would happen. */
1563 if (p != NULL
1564 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1565 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1566 *p += offset;
1567
1568 else if (offset > 0)
1569 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1570
1571 else
1572 {
1573 loc->dw_loc_next
1574 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1575 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1576 }
1577 }
1578
1579 /* Return a pointer to a newly allocated location description for
1580 REG and OFFSET. */
1581
1582 static inline dw_loc_descr_ref
1583 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1584 {
1585 HOST_WIDE_INT const_offset;
1586 if (offset.is_constant (&const_offset))
1587 {
1588 if (reg <= 31)
1589 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1590 const_offset, 0);
1591 else
1592 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1593 }
1594 else
1595 {
1596 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1597 loc_descr_plus_const (&ret, offset);
1598 return ret;
1599 }
1600 }
1601
1602 /* Add a constant OFFSET to a location list. */
1603
1604 static void
1605 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1606 {
1607 dw_loc_list_ref d;
1608 for (d = list_head; d != NULL; d = d->dw_loc_next)
1609 loc_descr_plus_const (&d->expr, offset);
1610 }
1611
1612 #define DWARF_REF_SIZE \
1613 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1614
1615 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1616 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1617 DW_FORM_data16 with 128 bits. */
1618 #define DWARF_LARGEST_DATA_FORM_BITS \
1619 (dwarf_version >= 5 ? 128 : 64)
1620
1621 /* Utility inline function for construction of ops that were GNU extension
1622 before DWARF 5. */
1623 static inline enum dwarf_location_atom
1624 dwarf_OP (enum dwarf_location_atom op)
1625 {
1626 switch (op)
1627 {
1628 case DW_OP_implicit_pointer:
1629 if (dwarf_version < 5)
1630 return DW_OP_GNU_implicit_pointer;
1631 break;
1632
1633 case DW_OP_entry_value:
1634 if (dwarf_version < 5)
1635 return DW_OP_GNU_entry_value;
1636 break;
1637
1638 case DW_OP_const_type:
1639 if (dwarf_version < 5)
1640 return DW_OP_GNU_const_type;
1641 break;
1642
1643 case DW_OP_regval_type:
1644 if (dwarf_version < 5)
1645 return DW_OP_GNU_regval_type;
1646 break;
1647
1648 case DW_OP_deref_type:
1649 if (dwarf_version < 5)
1650 return DW_OP_GNU_deref_type;
1651 break;
1652
1653 case DW_OP_convert:
1654 if (dwarf_version < 5)
1655 return DW_OP_GNU_convert;
1656 break;
1657
1658 case DW_OP_reinterpret:
1659 if (dwarf_version < 5)
1660 return DW_OP_GNU_reinterpret;
1661 break;
1662
1663 case DW_OP_addrx:
1664 if (dwarf_version < 5)
1665 return DW_OP_GNU_addr_index;
1666 break;
1667
1668 case DW_OP_constx:
1669 if (dwarf_version < 5)
1670 return DW_OP_GNU_const_index;
1671 break;
1672
1673 default:
1674 break;
1675 }
1676 return op;
1677 }
1678
1679 /* Similarly for attributes. */
1680 static inline enum dwarf_attribute
1681 dwarf_AT (enum dwarf_attribute at)
1682 {
1683 switch (at)
1684 {
1685 case DW_AT_call_return_pc:
1686 if (dwarf_version < 5)
1687 return DW_AT_low_pc;
1688 break;
1689
1690 case DW_AT_call_tail_call:
1691 if (dwarf_version < 5)
1692 return DW_AT_GNU_tail_call;
1693 break;
1694
1695 case DW_AT_call_origin:
1696 if (dwarf_version < 5)
1697 return DW_AT_abstract_origin;
1698 break;
1699
1700 case DW_AT_call_target:
1701 if (dwarf_version < 5)
1702 return DW_AT_GNU_call_site_target;
1703 break;
1704
1705 case DW_AT_call_target_clobbered:
1706 if (dwarf_version < 5)
1707 return DW_AT_GNU_call_site_target_clobbered;
1708 break;
1709
1710 case DW_AT_call_parameter:
1711 if (dwarf_version < 5)
1712 return DW_AT_abstract_origin;
1713 break;
1714
1715 case DW_AT_call_value:
1716 if (dwarf_version < 5)
1717 return DW_AT_GNU_call_site_value;
1718 break;
1719
1720 case DW_AT_call_data_value:
1721 if (dwarf_version < 5)
1722 return DW_AT_GNU_call_site_data_value;
1723 break;
1724
1725 case DW_AT_call_all_calls:
1726 if (dwarf_version < 5)
1727 return DW_AT_GNU_all_call_sites;
1728 break;
1729
1730 case DW_AT_call_all_tail_calls:
1731 if (dwarf_version < 5)
1732 return DW_AT_GNU_all_tail_call_sites;
1733 break;
1734
1735 case DW_AT_dwo_name:
1736 if (dwarf_version < 5)
1737 return DW_AT_GNU_dwo_name;
1738 break;
1739
1740 case DW_AT_addr_base:
1741 if (dwarf_version < 5)
1742 return DW_AT_GNU_addr_base;
1743 break;
1744
1745 default:
1746 break;
1747 }
1748 return at;
1749 }
1750
1751 /* And similarly for tags. */
1752 static inline enum dwarf_tag
1753 dwarf_TAG (enum dwarf_tag tag)
1754 {
1755 switch (tag)
1756 {
1757 case DW_TAG_call_site:
1758 if (dwarf_version < 5)
1759 return DW_TAG_GNU_call_site;
1760 break;
1761
1762 case DW_TAG_call_site_parameter:
1763 if (dwarf_version < 5)
1764 return DW_TAG_GNU_call_site_parameter;
1765 break;
1766
1767 default:
1768 break;
1769 }
1770 return tag;
1771 }
1772
1773 /* And similarly for forms. */
1774 static inline enum dwarf_form
1775 dwarf_FORM (enum dwarf_form form)
1776 {
1777 switch (form)
1778 {
1779 case DW_FORM_addrx:
1780 if (dwarf_version < 5)
1781 return DW_FORM_GNU_addr_index;
1782 break;
1783
1784 case DW_FORM_strx:
1785 if (dwarf_version < 5)
1786 return DW_FORM_GNU_str_index;
1787 break;
1788
1789 default:
1790 break;
1791 }
1792 return form;
1793 }
1794
1795 static unsigned long int get_base_type_offset (dw_die_ref);
1796
1797 /* Return the size of a location descriptor. */
1798
1799 static unsigned long
1800 size_of_loc_descr (dw_loc_descr_ref loc)
1801 {
1802 unsigned long size = 1;
1803
1804 switch (loc->dw_loc_opc)
1805 {
1806 case DW_OP_addr:
1807 size += DWARF2_ADDR_SIZE;
1808 break;
1809 case DW_OP_GNU_addr_index:
1810 case DW_OP_addrx:
1811 case DW_OP_GNU_const_index:
1812 case DW_OP_constx:
1813 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1814 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1815 break;
1816 case DW_OP_const1u:
1817 case DW_OP_const1s:
1818 size += 1;
1819 break;
1820 case DW_OP_const2u:
1821 case DW_OP_const2s:
1822 size += 2;
1823 break;
1824 case DW_OP_const4u:
1825 case DW_OP_const4s:
1826 size += 4;
1827 break;
1828 case DW_OP_const8u:
1829 case DW_OP_const8s:
1830 size += 8;
1831 break;
1832 case DW_OP_constu:
1833 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1834 break;
1835 case DW_OP_consts:
1836 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1837 break;
1838 case DW_OP_pick:
1839 size += 1;
1840 break;
1841 case DW_OP_plus_uconst:
1842 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1843 break;
1844 case DW_OP_skip:
1845 case DW_OP_bra:
1846 size += 2;
1847 break;
1848 case DW_OP_breg0:
1849 case DW_OP_breg1:
1850 case DW_OP_breg2:
1851 case DW_OP_breg3:
1852 case DW_OP_breg4:
1853 case DW_OP_breg5:
1854 case DW_OP_breg6:
1855 case DW_OP_breg7:
1856 case DW_OP_breg8:
1857 case DW_OP_breg9:
1858 case DW_OP_breg10:
1859 case DW_OP_breg11:
1860 case DW_OP_breg12:
1861 case DW_OP_breg13:
1862 case DW_OP_breg14:
1863 case DW_OP_breg15:
1864 case DW_OP_breg16:
1865 case DW_OP_breg17:
1866 case DW_OP_breg18:
1867 case DW_OP_breg19:
1868 case DW_OP_breg20:
1869 case DW_OP_breg21:
1870 case DW_OP_breg22:
1871 case DW_OP_breg23:
1872 case DW_OP_breg24:
1873 case DW_OP_breg25:
1874 case DW_OP_breg26:
1875 case DW_OP_breg27:
1876 case DW_OP_breg28:
1877 case DW_OP_breg29:
1878 case DW_OP_breg30:
1879 case DW_OP_breg31:
1880 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1881 break;
1882 case DW_OP_regx:
1883 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1884 break;
1885 case DW_OP_fbreg:
1886 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1887 break;
1888 case DW_OP_bregx:
1889 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1890 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1891 break;
1892 case DW_OP_piece:
1893 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1894 break;
1895 case DW_OP_bit_piece:
1896 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1897 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1898 break;
1899 case DW_OP_deref_size:
1900 case DW_OP_xderef_size:
1901 size += 1;
1902 break;
1903 case DW_OP_call2:
1904 size += 2;
1905 break;
1906 case DW_OP_call4:
1907 size += 4;
1908 break;
1909 case DW_OP_call_ref:
1910 case DW_OP_GNU_variable_value:
1911 size += DWARF_REF_SIZE;
1912 break;
1913 case DW_OP_implicit_value:
1914 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1915 + loc->dw_loc_oprnd1.v.val_unsigned;
1916 break;
1917 case DW_OP_implicit_pointer:
1918 case DW_OP_GNU_implicit_pointer:
1919 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1920 break;
1921 case DW_OP_entry_value:
1922 case DW_OP_GNU_entry_value:
1923 {
1924 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1925 size += size_of_uleb128 (op_size) + op_size;
1926 break;
1927 }
1928 case DW_OP_const_type:
1929 case DW_OP_GNU_const_type:
1930 {
1931 unsigned long o
1932 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1933 size += size_of_uleb128 (o) + 1;
1934 switch (loc->dw_loc_oprnd2.val_class)
1935 {
1936 case dw_val_class_vec:
1937 size += loc->dw_loc_oprnd2.v.val_vec.length
1938 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1939 break;
1940 case dw_val_class_const:
1941 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1942 break;
1943 case dw_val_class_const_double:
1944 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1945 break;
1946 case dw_val_class_wide_int:
1947 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1948 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1949 break;
1950 default:
1951 gcc_unreachable ();
1952 }
1953 break;
1954 }
1955 case DW_OP_regval_type:
1956 case DW_OP_GNU_regval_type:
1957 {
1958 unsigned long o
1959 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1960 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1961 + size_of_uleb128 (o);
1962 }
1963 break;
1964 case DW_OP_deref_type:
1965 case DW_OP_GNU_deref_type:
1966 {
1967 unsigned long o
1968 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1969 size += 1 + size_of_uleb128 (o);
1970 }
1971 break;
1972 case DW_OP_convert:
1973 case DW_OP_reinterpret:
1974 case DW_OP_GNU_convert:
1975 case DW_OP_GNU_reinterpret:
1976 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1977 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1978 else
1979 {
1980 unsigned long o
1981 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1982 size += size_of_uleb128 (o);
1983 }
1984 break;
1985 case DW_OP_GNU_parameter_ref:
1986 size += 4;
1987 break;
1988 default:
1989 break;
1990 }
1991
1992 return size;
1993 }
1994
1995 /* Return the size of a series of location descriptors. */
1996
1997 unsigned long
1998 size_of_locs (dw_loc_descr_ref loc)
1999 {
2000 dw_loc_descr_ref l;
2001 unsigned long size;
2002
2003 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
2004 field, to avoid writing to a PCH file. */
2005 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2006 {
2007 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
2008 break;
2009 size += size_of_loc_descr (l);
2010 }
2011 if (! l)
2012 return size;
2013
2014 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2015 {
2016 l->dw_loc_addr = size;
2017 size += size_of_loc_descr (l);
2018 }
2019
2020 return size;
2021 }
2022
2023 /* Return the size of the value in a DW_AT_discr_value attribute. */
2024
2025 static int
2026 size_of_discr_value (dw_discr_value *discr_value)
2027 {
2028 if (discr_value->pos)
2029 return size_of_uleb128 (discr_value->v.uval);
2030 else
2031 return size_of_sleb128 (discr_value->v.sval);
2032 }
2033
2034 /* Return the size of the value in a DW_AT_discr_list attribute. */
2035
2036 static int
2037 size_of_discr_list (dw_discr_list_ref discr_list)
2038 {
2039 int size = 0;
2040
2041 for (dw_discr_list_ref list = discr_list;
2042 list != NULL;
2043 list = list->dw_discr_next)
2044 {
2045 /* One byte for the discriminant value descriptor, and then one or two
2046 LEB128 numbers, depending on whether it's a single case label or a
2047 range label. */
2048 size += 1;
2049 size += size_of_discr_value (&list->dw_discr_lower_bound);
2050 if (list->dw_discr_range != 0)
2051 size += size_of_discr_value (&list->dw_discr_upper_bound);
2052 }
2053 return size;
2054 }
2055
2056 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2057 static void get_ref_die_offset_label (char *, dw_die_ref);
2058 static unsigned long int get_ref_die_offset (dw_die_ref);
2059
2060 /* Output location description stack opcode's operands (if any).
2061 The for_eh_or_skip parameter controls whether register numbers are
2062 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2063 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2064 info). This should be suppressed for the cases that have not been converted
2065 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2066
2067 static void
2068 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2069 {
2070 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2071 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2072
2073 switch (loc->dw_loc_opc)
2074 {
2075 #ifdef DWARF2_DEBUGGING_INFO
2076 case DW_OP_const2u:
2077 case DW_OP_const2s:
2078 dw2_asm_output_data (2, val1->v.val_int, NULL);
2079 break;
2080 case DW_OP_const4u:
2081 if (loc->dtprel)
2082 {
2083 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2084 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2085 val1->v.val_addr);
2086 fputc ('\n', asm_out_file);
2087 break;
2088 }
2089 /* FALLTHRU */
2090 case DW_OP_const4s:
2091 dw2_asm_output_data (4, val1->v.val_int, NULL);
2092 break;
2093 case DW_OP_const8u:
2094 if (loc->dtprel)
2095 {
2096 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2097 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2098 val1->v.val_addr);
2099 fputc ('\n', asm_out_file);
2100 break;
2101 }
2102 /* FALLTHRU */
2103 case DW_OP_const8s:
2104 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2105 dw2_asm_output_data (8, val1->v.val_int, NULL);
2106 break;
2107 case DW_OP_skip:
2108 case DW_OP_bra:
2109 {
2110 int offset;
2111
2112 gcc_assert (val1->val_class == dw_val_class_loc);
2113 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2114
2115 dw2_asm_output_data (2, offset, NULL);
2116 }
2117 break;
2118 case DW_OP_implicit_value:
2119 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2120 switch (val2->val_class)
2121 {
2122 case dw_val_class_const:
2123 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2124 break;
2125 case dw_val_class_vec:
2126 {
2127 unsigned int elt_size = val2->v.val_vec.elt_size;
2128 unsigned int len = val2->v.val_vec.length;
2129 unsigned int i;
2130 unsigned char *p;
2131
2132 if (elt_size > sizeof (HOST_WIDE_INT))
2133 {
2134 elt_size /= 2;
2135 len *= 2;
2136 }
2137 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2138 i < len;
2139 i++, p += elt_size)
2140 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2141 "fp or vector constant word %u", i);
2142 }
2143 break;
2144 case dw_val_class_const_double:
2145 {
2146 unsigned HOST_WIDE_INT first, second;
2147
2148 if (WORDS_BIG_ENDIAN)
2149 {
2150 first = val2->v.val_double.high;
2151 second = val2->v.val_double.low;
2152 }
2153 else
2154 {
2155 first = val2->v.val_double.low;
2156 second = val2->v.val_double.high;
2157 }
2158 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2159 first, NULL);
2160 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2161 second, NULL);
2162 }
2163 break;
2164 case dw_val_class_wide_int:
2165 {
2166 int i;
2167 int len = get_full_len (*val2->v.val_wide);
2168 if (WORDS_BIG_ENDIAN)
2169 for (i = len - 1; i >= 0; --i)
2170 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2171 val2->v.val_wide->elt (i), NULL);
2172 else
2173 for (i = 0; i < len; ++i)
2174 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2175 val2->v.val_wide->elt (i), NULL);
2176 }
2177 break;
2178 case dw_val_class_addr:
2179 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2180 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2181 break;
2182 default:
2183 gcc_unreachable ();
2184 }
2185 break;
2186 #else
2187 case DW_OP_const2u:
2188 case DW_OP_const2s:
2189 case DW_OP_const4u:
2190 case DW_OP_const4s:
2191 case DW_OP_const8u:
2192 case DW_OP_const8s:
2193 case DW_OP_skip:
2194 case DW_OP_bra:
2195 case DW_OP_implicit_value:
2196 /* We currently don't make any attempt to make sure these are
2197 aligned properly like we do for the main unwind info, so
2198 don't support emitting things larger than a byte if we're
2199 only doing unwinding. */
2200 gcc_unreachable ();
2201 #endif
2202 case DW_OP_const1u:
2203 case DW_OP_const1s:
2204 dw2_asm_output_data (1, val1->v.val_int, NULL);
2205 break;
2206 case DW_OP_constu:
2207 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2208 break;
2209 case DW_OP_consts:
2210 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2211 break;
2212 case DW_OP_pick:
2213 dw2_asm_output_data (1, val1->v.val_int, NULL);
2214 break;
2215 case DW_OP_plus_uconst:
2216 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2217 break;
2218 case DW_OP_breg0:
2219 case DW_OP_breg1:
2220 case DW_OP_breg2:
2221 case DW_OP_breg3:
2222 case DW_OP_breg4:
2223 case DW_OP_breg5:
2224 case DW_OP_breg6:
2225 case DW_OP_breg7:
2226 case DW_OP_breg8:
2227 case DW_OP_breg9:
2228 case DW_OP_breg10:
2229 case DW_OP_breg11:
2230 case DW_OP_breg12:
2231 case DW_OP_breg13:
2232 case DW_OP_breg14:
2233 case DW_OP_breg15:
2234 case DW_OP_breg16:
2235 case DW_OP_breg17:
2236 case DW_OP_breg18:
2237 case DW_OP_breg19:
2238 case DW_OP_breg20:
2239 case DW_OP_breg21:
2240 case DW_OP_breg22:
2241 case DW_OP_breg23:
2242 case DW_OP_breg24:
2243 case DW_OP_breg25:
2244 case DW_OP_breg26:
2245 case DW_OP_breg27:
2246 case DW_OP_breg28:
2247 case DW_OP_breg29:
2248 case DW_OP_breg30:
2249 case DW_OP_breg31:
2250 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2251 break;
2252 case DW_OP_regx:
2253 {
2254 unsigned r = val1->v.val_unsigned;
2255 if (for_eh_or_skip >= 0)
2256 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2257 gcc_assert (size_of_uleb128 (r)
2258 == size_of_uleb128 (val1->v.val_unsigned));
2259 dw2_asm_output_data_uleb128 (r, NULL);
2260 }
2261 break;
2262 case DW_OP_fbreg:
2263 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2264 break;
2265 case DW_OP_bregx:
2266 {
2267 unsigned r = val1->v.val_unsigned;
2268 if (for_eh_or_skip >= 0)
2269 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2270 gcc_assert (size_of_uleb128 (r)
2271 == size_of_uleb128 (val1->v.val_unsigned));
2272 dw2_asm_output_data_uleb128 (r, NULL);
2273 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2274 }
2275 break;
2276 case DW_OP_piece:
2277 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2278 break;
2279 case DW_OP_bit_piece:
2280 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2281 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2282 break;
2283 case DW_OP_deref_size:
2284 case DW_OP_xderef_size:
2285 dw2_asm_output_data (1, val1->v.val_int, NULL);
2286 break;
2287
2288 case DW_OP_addr:
2289 if (loc->dtprel)
2290 {
2291 if (targetm.asm_out.output_dwarf_dtprel)
2292 {
2293 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2294 DWARF2_ADDR_SIZE,
2295 val1->v.val_addr);
2296 fputc ('\n', asm_out_file);
2297 }
2298 else
2299 gcc_unreachable ();
2300 }
2301 else
2302 {
2303 #ifdef DWARF2_DEBUGGING_INFO
2304 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2305 #else
2306 gcc_unreachable ();
2307 #endif
2308 }
2309 break;
2310
2311 case DW_OP_GNU_addr_index:
2312 case DW_OP_addrx:
2313 case DW_OP_GNU_const_index:
2314 case DW_OP_constx:
2315 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2316 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2317 "(index into .debug_addr)");
2318 break;
2319
2320 case DW_OP_call2:
2321 case DW_OP_call4:
2322 {
2323 unsigned long die_offset
2324 = get_ref_die_offset (val1->v.val_die_ref.die);
2325 /* Make sure the offset has been computed and that we can encode it as
2326 an operand. */
2327 gcc_assert (die_offset > 0
2328 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2329 ? 0xffff
2330 : 0xffffffff));
2331 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2332 die_offset, NULL);
2333 }
2334 break;
2335
2336 case DW_OP_call_ref:
2337 case DW_OP_GNU_variable_value:
2338 {
2339 char label[MAX_ARTIFICIAL_LABEL_BYTES
2340 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2341 gcc_assert (val1->val_class == dw_val_class_die_ref);
2342 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2343 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2344 }
2345 break;
2346
2347 case DW_OP_implicit_pointer:
2348 case DW_OP_GNU_implicit_pointer:
2349 {
2350 char label[MAX_ARTIFICIAL_LABEL_BYTES
2351 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2352 gcc_assert (val1->val_class == dw_val_class_die_ref);
2353 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2354 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2355 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2356 }
2357 break;
2358
2359 case DW_OP_entry_value:
2360 case DW_OP_GNU_entry_value:
2361 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2362 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2363 break;
2364
2365 case DW_OP_const_type:
2366 case DW_OP_GNU_const_type:
2367 {
2368 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2369 gcc_assert (o);
2370 dw2_asm_output_data_uleb128 (o, NULL);
2371 switch (val2->val_class)
2372 {
2373 case dw_val_class_const:
2374 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2375 dw2_asm_output_data (1, l, NULL);
2376 dw2_asm_output_data (l, val2->v.val_int, NULL);
2377 break;
2378 case dw_val_class_vec:
2379 {
2380 unsigned int elt_size = val2->v.val_vec.elt_size;
2381 unsigned int len = val2->v.val_vec.length;
2382 unsigned int i;
2383 unsigned char *p;
2384
2385 l = len * elt_size;
2386 dw2_asm_output_data (1, l, NULL);
2387 if (elt_size > sizeof (HOST_WIDE_INT))
2388 {
2389 elt_size /= 2;
2390 len *= 2;
2391 }
2392 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2393 i < len;
2394 i++, p += elt_size)
2395 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2396 "fp or vector constant word %u", i);
2397 }
2398 break;
2399 case dw_val_class_const_double:
2400 {
2401 unsigned HOST_WIDE_INT first, second;
2402 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2403
2404 dw2_asm_output_data (1, 2 * l, NULL);
2405 if (WORDS_BIG_ENDIAN)
2406 {
2407 first = val2->v.val_double.high;
2408 second = val2->v.val_double.low;
2409 }
2410 else
2411 {
2412 first = val2->v.val_double.low;
2413 second = val2->v.val_double.high;
2414 }
2415 dw2_asm_output_data (l, first, NULL);
2416 dw2_asm_output_data (l, second, NULL);
2417 }
2418 break;
2419 case dw_val_class_wide_int:
2420 {
2421 int i;
2422 int len = get_full_len (*val2->v.val_wide);
2423 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2424
2425 dw2_asm_output_data (1, len * l, NULL);
2426 if (WORDS_BIG_ENDIAN)
2427 for (i = len - 1; i >= 0; --i)
2428 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2429 else
2430 for (i = 0; i < len; ++i)
2431 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2432 }
2433 break;
2434 default:
2435 gcc_unreachable ();
2436 }
2437 }
2438 break;
2439 case DW_OP_regval_type:
2440 case DW_OP_GNU_regval_type:
2441 {
2442 unsigned r = val1->v.val_unsigned;
2443 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2444 gcc_assert (o);
2445 if (for_eh_or_skip >= 0)
2446 {
2447 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2448 gcc_assert (size_of_uleb128 (r)
2449 == size_of_uleb128 (val1->v.val_unsigned));
2450 }
2451 dw2_asm_output_data_uleb128 (r, NULL);
2452 dw2_asm_output_data_uleb128 (o, NULL);
2453 }
2454 break;
2455 case DW_OP_deref_type:
2456 case DW_OP_GNU_deref_type:
2457 {
2458 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2459 gcc_assert (o);
2460 dw2_asm_output_data (1, val1->v.val_int, NULL);
2461 dw2_asm_output_data_uleb128 (o, NULL);
2462 }
2463 break;
2464 case DW_OP_convert:
2465 case DW_OP_reinterpret:
2466 case DW_OP_GNU_convert:
2467 case DW_OP_GNU_reinterpret:
2468 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2469 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2470 else
2471 {
2472 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2473 gcc_assert (o);
2474 dw2_asm_output_data_uleb128 (o, NULL);
2475 }
2476 break;
2477
2478 case DW_OP_GNU_parameter_ref:
2479 {
2480 unsigned long o;
2481 gcc_assert (val1->val_class == dw_val_class_die_ref);
2482 o = get_ref_die_offset (val1->v.val_die_ref.die);
2483 dw2_asm_output_data (4, o, NULL);
2484 }
2485 break;
2486
2487 default:
2488 /* Other codes have no operands. */
2489 break;
2490 }
2491 }
2492
2493 /* Output a sequence of location operations.
2494 The for_eh_or_skip parameter controls whether register numbers are
2495 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2496 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2497 info). This should be suppressed for the cases that have not been converted
2498 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2499
2500 void
2501 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2502 {
2503 for (; loc != NULL; loc = loc->dw_loc_next)
2504 {
2505 enum dwarf_location_atom opc = loc->dw_loc_opc;
2506 /* Output the opcode. */
2507 if (for_eh_or_skip >= 0
2508 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2509 {
2510 unsigned r = (opc - DW_OP_breg0);
2511 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2512 gcc_assert (r <= 31);
2513 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2514 }
2515 else if (for_eh_or_skip >= 0
2516 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2517 {
2518 unsigned r = (opc - DW_OP_reg0);
2519 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2520 gcc_assert (r <= 31);
2521 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2522 }
2523
2524 dw2_asm_output_data (1, opc,
2525 "%s", dwarf_stack_op_name (opc));
2526
2527 /* Output the operand(s) (if any). */
2528 output_loc_operands (loc, for_eh_or_skip);
2529 }
2530 }
2531
2532 /* Output location description stack opcode's operands (if any).
2533 The output is single bytes on a line, suitable for .cfi_escape. */
2534
2535 static void
2536 output_loc_operands_raw (dw_loc_descr_ref loc)
2537 {
2538 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2539 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2540
2541 switch (loc->dw_loc_opc)
2542 {
2543 case DW_OP_addr:
2544 case DW_OP_GNU_addr_index:
2545 case DW_OP_addrx:
2546 case DW_OP_GNU_const_index:
2547 case DW_OP_constx:
2548 case DW_OP_implicit_value:
2549 /* We cannot output addresses in .cfi_escape, only bytes. */
2550 gcc_unreachable ();
2551
2552 case DW_OP_const1u:
2553 case DW_OP_const1s:
2554 case DW_OP_pick:
2555 case DW_OP_deref_size:
2556 case DW_OP_xderef_size:
2557 fputc (',', asm_out_file);
2558 dw2_asm_output_data_raw (1, val1->v.val_int);
2559 break;
2560
2561 case DW_OP_const2u:
2562 case DW_OP_const2s:
2563 fputc (',', asm_out_file);
2564 dw2_asm_output_data_raw (2, val1->v.val_int);
2565 break;
2566
2567 case DW_OP_const4u:
2568 case DW_OP_const4s:
2569 fputc (',', asm_out_file);
2570 dw2_asm_output_data_raw (4, val1->v.val_int);
2571 break;
2572
2573 case DW_OP_const8u:
2574 case DW_OP_const8s:
2575 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2576 fputc (',', asm_out_file);
2577 dw2_asm_output_data_raw (8, val1->v.val_int);
2578 break;
2579
2580 case DW_OP_skip:
2581 case DW_OP_bra:
2582 {
2583 int offset;
2584
2585 gcc_assert (val1->val_class == dw_val_class_loc);
2586 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2587
2588 fputc (',', asm_out_file);
2589 dw2_asm_output_data_raw (2, offset);
2590 }
2591 break;
2592
2593 case DW_OP_regx:
2594 {
2595 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2596 gcc_assert (size_of_uleb128 (r)
2597 == size_of_uleb128 (val1->v.val_unsigned));
2598 fputc (',', asm_out_file);
2599 dw2_asm_output_data_uleb128_raw (r);
2600 }
2601 break;
2602
2603 case DW_OP_constu:
2604 case DW_OP_plus_uconst:
2605 case DW_OP_piece:
2606 fputc (',', asm_out_file);
2607 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2608 break;
2609
2610 case DW_OP_bit_piece:
2611 fputc (',', asm_out_file);
2612 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2613 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2614 break;
2615
2616 case DW_OP_consts:
2617 case DW_OP_breg0:
2618 case DW_OP_breg1:
2619 case DW_OP_breg2:
2620 case DW_OP_breg3:
2621 case DW_OP_breg4:
2622 case DW_OP_breg5:
2623 case DW_OP_breg6:
2624 case DW_OP_breg7:
2625 case DW_OP_breg8:
2626 case DW_OP_breg9:
2627 case DW_OP_breg10:
2628 case DW_OP_breg11:
2629 case DW_OP_breg12:
2630 case DW_OP_breg13:
2631 case DW_OP_breg14:
2632 case DW_OP_breg15:
2633 case DW_OP_breg16:
2634 case DW_OP_breg17:
2635 case DW_OP_breg18:
2636 case DW_OP_breg19:
2637 case DW_OP_breg20:
2638 case DW_OP_breg21:
2639 case DW_OP_breg22:
2640 case DW_OP_breg23:
2641 case DW_OP_breg24:
2642 case DW_OP_breg25:
2643 case DW_OP_breg26:
2644 case DW_OP_breg27:
2645 case DW_OP_breg28:
2646 case DW_OP_breg29:
2647 case DW_OP_breg30:
2648 case DW_OP_breg31:
2649 case DW_OP_fbreg:
2650 fputc (',', asm_out_file);
2651 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2652 break;
2653
2654 case DW_OP_bregx:
2655 {
2656 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2657 gcc_assert (size_of_uleb128 (r)
2658 == size_of_uleb128 (val1->v.val_unsigned));
2659 fputc (',', asm_out_file);
2660 dw2_asm_output_data_uleb128_raw (r);
2661 fputc (',', asm_out_file);
2662 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2663 }
2664 break;
2665
2666 case DW_OP_implicit_pointer:
2667 case DW_OP_entry_value:
2668 case DW_OP_const_type:
2669 case DW_OP_regval_type:
2670 case DW_OP_deref_type:
2671 case DW_OP_convert:
2672 case DW_OP_reinterpret:
2673 case DW_OP_GNU_implicit_pointer:
2674 case DW_OP_GNU_entry_value:
2675 case DW_OP_GNU_const_type:
2676 case DW_OP_GNU_regval_type:
2677 case DW_OP_GNU_deref_type:
2678 case DW_OP_GNU_convert:
2679 case DW_OP_GNU_reinterpret:
2680 case DW_OP_GNU_parameter_ref:
2681 gcc_unreachable ();
2682 break;
2683
2684 default:
2685 /* Other codes have no operands. */
2686 break;
2687 }
2688 }
2689
2690 void
2691 output_loc_sequence_raw (dw_loc_descr_ref loc)
2692 {
2693 while (1)
2694 {
2695 enum dwarf_location_atom opc = loc->dw_loc_opc;
2696 /* Output the opcode. */
2697 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2698 {
2699 unsigned r = (opc - DW_OP_breg0);
2700 r = DWARF2_FRAME_REG_OUT (r, 1);
2701 gcc_assert (r <= 31);
2702 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2703 }
2704 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2705 {
2706 unsigned r = (opc - DW_OP_reg0);
2707 r = DWARF2_FRAME_REG_OUT (r, 1);
2708 gcc_assert (r <= 31);
2709 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2710 }
2711 /* Output the opcode. */
2712 fprintf (asm_out_file, "%#x", opc);
2713 output_loc_operands_raw (loc);
2714
2715 if (!loc->dw_loc_next)
2716 break;
2717 loc = loc->dw_loc_next;
2718
2719 fputc (',', asm_out_file);
2720 }
2721 }
2722
2723 /* This function builds a dwarf location descriptor sequence from a
2724 dw_cfa_location, adding the given OFFSET to the result of the
2725 expression. */
2726
2727 struct dw_loc_descr_node *
2728 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2729 {
2730 struct dw_loc_descr_node *head, *tmp;
2731
2732 offset += cfa->offset;
2733
2734 if (cfa->indirect)
2735 {
2736 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2737 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2738 head->dw_loc_oprnd1.val_entry = NULL;
2739 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2740 add_loc_descr (&head, tmp);
2741 loc_descr_plus_const (&head, offset);
2742 }
2743 else
2744 head = new_reg_loc_descr (cfa->reg, offset);
2745
2746 return head;
2747 }
2748
2749 /* This function builds a dwarf location descriptor sequence for
2750 the address at OFFSET from the CFA when stack is aligned to
2751 ALIGNMENT byte. */
2752
2753 struct dw_loc_descr_node *
2754 build_cfa_aligned_loc (dw_cfa_location *cfa,
2755 poly_int64 offset, HOST_WIDE_INT alignment)
2756 {
2757 struct dw_loc_descr_node *head;
2758 unsigned int dwarf_fp
2759 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2760
2761 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2762 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2763 {
2764 head = new_reg_loc_descr (dwarf_fp, 0);
2765 add_loc_descr (&head, int_loc_descriptor (alignment));
2766 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2767 loc_descr_plus_const (&head, offset);
2768 }
2769 else
2770 head = new_reg_loc_descr (dwarf_fp, offset);
2771 return head;
2772 }
2773 \f
2774 /* And now, the support for symbolic debugging information. */
2775
2776 /* .debug_str support. */
2777
2778 static void dwarf2out_init (const char *);
2779 static void dwarf2out_finish (const char *);
2780 static void dwarf2out_early_finish (const char *);
2781 static void dwarf2out_assembly_start (void);
2782 static void dwarf2out_define (unsigned int, const char *);
2783 static void dwarf2out_undef (unsigned int, const char *);
2784 static void dwarf2out_start_source_file (unsigned, const char *);
2785 static void dwarf2out_end_source_file (unsigned);
2786 static void dwarf2out_function_decl (tree);
2787 static void dwarf2out_begin_block (unsigned, unsigned);
2788 static void dwarf2out_end_block (unsigned, unsigned);
2789 static bool dwarf2out_ignore_block (const_tree);
2790 static void dwarf2out_early_global_decl (tree);
2791 static void dwarf2out_late_global_decl (tree);
2792 static void dwarf2out_type_decl (tree, int);
2793 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2794 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2795 dw_die_ref);
2796 static void dwarf2out_abstract_function (tree);
2797 static void dwarf2out_var_location (rtx_insn *);
2798 static void dwarf2out_inline_entry (tree);
2799 static void dwarf2out_size_function (tree);
2800 static void dwarf2out_begin_function (tree);
2801 static void dwarf2out_end_function (unsigned int);
2802 static void dwarf2out_register_main_translation_unit (tree unit);
2803 static void dwarf2out_set_name (tree, tree);
2804 static void dwarf2out_register_external_die (tree decl, const char *sym,
2805 unsigned HOST_WIDE_INT off);
2806 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2807 unsigned HOST_WIDE_INT *off);
2808
2809 /* The debug hooks structure. */
2810
2811 const struct gcc_debug_hooks dwarf2_debug_hooks =
2812 {
2813 dwarf2out_init,
2814 dwarf2out_finish,
2815 dwarf2out_early_finish,
2816 dwarf2out_assembly_start,
2817 dwarf2out_define,
2818 dwarf2out_undef,
2819 dwarf2out_start_source_file,
2820 dwarf2out_end_source_file,
2821 dwarf2out_begin_block,
2822 dwarf2out_end_block,
2823 dwarf2out_ignore_block,
2824 dwarf2out_source_line,
2825 dwarf2out_begin_prologue,
2826 #if VMS_DEBUGGING_INFO
2827 dwarf2out_vms_end_prologue,
2828 dwarf2out_vms_begin_epilogue,
2829 #else
2830 debug_nothing_int_charstar,
2831 debug_nothing_int_charstar,
2832 #endif
2833 dwarf2out_end_epilogue,
2834 dwarf2out_begin_function,
2835 dwarf2out_end_function, /* end_function */
2836 dwarf2out_register_main_translation_unit,
2837 dwarf2out_function_decl, /* function_decl */
2838 dwarf2out_early_global_decl,
2839 dwarf2out_late_global_decl,
2840 dwarf2out_type_decl, /* type_decl */
2841 dwarf2out_imported_module_or_decl,
2842 dwarf2out_die_ref_for_decl,
2843 dwarf2out_register_external_die,
2844 debug_nothing_tree, /* deferred_inline_function */
2845 /* The DWARF 2 backend tries to reduce debugging bloat by not
2846 emitting the abstract description of inline functions until
2847 something tries to reference them. */
2848 dwarf2out_abstract_function, /* outlining_inline_function */
2849 debug_nothing_rtx_code_label, /* label */
2850 debug_nothing_int, /* handle_pch */
2851 dwarf2out_var_location,
2852 dwarf2out_inline_entry, /* inline_entry */
2853 dwarf2out_size_function, /* size_function */
2854 dwarf2out_switch_text_section,
2855 dwarf2out_set_name,
2856 1, /* start_end_main_source_file */
2857 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2858 };
2859
2860 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2861 {
2862 dwarf2out_init,
2863 debug_nothing_charstar,
2864 debug_nothing_charstar,
2865 dwarf2out_assembly_start,
2866 debug_nothing_int_charstar,
2867 debug_nothing_int_charstar,
2868 debug_nothing_int_charstar,
2869 debug_nothing_int,
2870 debug_nothing_int_int, /* begin_block */
2871 debug_nothing_int_int, /* end_block */
2872 debug_true_const_tree, /* ignore_block */
2873 dwarf2out_source_line, /* source_line */
2874 debug_nothing_int_int_charstar, /* begin_prologue */
2875 debug_nothing_int_charstar, /* end_prologue */
2876 debug_nothing_int_charstar, /* begin_epilogue */
2877 debug_nothing_int_charstar, /* end_epilogue */
2878 debug_nothing_tree, /* begin_function */
2879 debug_nothing_int, /* end_function */
2880 debug_nothing_tree, /* register_main_translation_unit */
2881 debug_nothing_tree, /* function_decl */
2882 debug_nothing_tree, /* early_global_decl */
2883 debug_nothing_tree, /* late_global_decl */
2884 debug_nothing_tree_int, /* type_decl */
2885 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2886 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2887 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2888 debug_nothing_tree, /* deferred_inline_function */
2889 debug_nothing_tree, /* outlining_inline_function */
2890 debug_nothing_rtx_code_label, /* label */
2891 debug_nothing_int, /* handle_pch */
2892 debug_nothing_rtx_insn, /* var_location */
2893 debug_nothing_tree, /* inline_entry */
2894 debug_nothing_tree, /* size_function */
2895 debug_nothing_void, /* switch_text_section */
2896 debug_nothing_tree_tree, /* set_name */
2897 0, /* start_end_main_source_file */
2898 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2899 };
2900 \f
2901 /* NOTE: In the comments in this file, many references are made to
2902 "Debugging Information Entries". This term is abbreviated as `DIE'
2903 throughout the remainder of this file. */
2904
2905 /* An internal representation of the DWARF output is built, and then
2906 walked to generate the DWARF debugging info. The walk of the internal
2907 representation is done after the entire program has been compiled.
2908 The types below are used to describe the internal representation. */
2909
2910 /* Whether to put type DIEs into their own section .debug_types instead
2911 of making them part of the .debug_info section. Only supported for
2912 Dwarf V4 or higher and the user didn't disable them through
2913 -fno-debug-types-section. It is more efficient to put them in a
2914 separate comdat sections since the linker will then be able to
2915 remove duplicates. But not all tools support .debug_types sections
2916 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2917 it is DW_UT_type unit type in .debug_info section. For late LTO
2918 debug there should be almost no types emitted so avoid enabling
2919 -fdebug-types-section there. */
2920
2921 #define use_debug_types (dwarf_version >= 4 \
2922 && flag_debug_types_section \
2923 && !in_lto_p)
2924
2925 /* Various DIE's use offsets relative to the beginning of the
2926 .debug_info section to refer to each other. */
2927
2928 typedef long int dw_offset;
2929
2930 struct comdat_type_node;
2931
2932 /* The entries in the line_info table more-or-less mirror the opcodes
2933 that are used in the real dwarf line table. Arrays of these entries
2934 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2935 supported. */
2936
2937 enum dw_line_info_opcode {
2938 /* Emit DW_LNE_set_address; the operand is the label index. */
2939 LI_set_address,
2940
2941 /* Emit a row to the matrix with the given line. This may be done
2942 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2943 special opcodes. */
2944 LI_set_line,
2945
2946 /* Emit a DW_LNS_set_file. */
2947 LI_set_file,
2948
2949 /* Emit a DW_LNS_set_column. */
2950 LI_set_column,
2951
2952 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2953 LI_negate_stmt,
2954
2955 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2956 LI_set_prologue_end,
2957 LI_set_epilogue_begin,
2958
2959 /* Emit a DW_LNE_set_discriminator. */
2960 LI_set_discriminator,
2961
2962 /* Output a Fixed Advance PC; the target PC is the label index; the
2963 base PC is the previous LI_adv_address or LI_set_address entry.
2964 We only use this when emitting debug views without assembler
2965 support, at explicit user request. Ideally, we should only use
2966 it when the offset might be zero but we can't tell: it's the only
2967 way to maybe change the PC without resetting the view number. */
2968 LI_adv_address
2969 };
2970
2971 typedef struct GTY(()) dw_line_info_struct {
2972 enum dw_line_info_opcode opcode;
2973 unsigned int val;
2974 } dw_line_info_entry;
2975
2976
2977 struct GTY(()) dw_line_info_table {
2978 /* The label that marks the end of this section. */
2979 const char *end_label;
2980
2981 /* The values for the last row of the matrix, as collected in the table.
2982 These are used to minimize the changes to the next row. */
2983 unsigned int file_num;
2984 unsigned int line_num;
2985 unsigned int column_num;
2986 int discrim_num;
2987 bool is_stmt;
2988 bool in_use;
2989
2990 /* This denotes the NEXT view number.
2991
2992 If it is 0, it is known that the NEXT view will be the first view
2993 at the given PC.
2994
2995 If it is -1, we're forcing the view number to be reset, e.g. at a
2996 function entry.
2997
2998 The meaning of other nonzero values depends on whether we're
2999 computing views internally or leaving it for the assembler to do
3000 so. If we're emitting them internally, view denotes the view
3001 number since the last known advance of PC. If we're leaving it
3002 for the assembler, it denotes the LVU label number that we're
3003 going to ask the assembler to assign. */
3004 var_loc_view view;
3005
3006 /* This counts the number of symbolic views emitted in this table
3007 since the latest view reset. Its max value, over all tables,
3008 sets symview_upper_bound. */
3009 var_loc_view symviews_since_reset;
3010
3011 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
3012 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3013 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3014 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3015
3016 vec<dw_line_info_entry, va_gc> *entries;
3017 };
3018
3019 /* This is an upper bound for view numbers that the assembler may
3020 assign to symbolic views output in this translation. It is used to
3021 decide how big a field to use to represent view numbers in
3022 symview-classed attributes. */
3023
3024 static var_loc_view symview_upper_bound;
3025
3026 /* If we're keep track of location views and their reset points, and
3027 INSN is a reset point (i.e., it necessarily advances the PC), mark
3028 the next view in TABLE as reset. */
3029
3030 static void
3031 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3032 {
3033 if (!debug_internal_reset_location_views)
3034 return;
3035
3036 /* Maybe turn (part of?) this test into a default target hook. */
3037 int reset = 0;
3038
3039 if (targetm.reset_location_view)
3040 reset = targetm.reset_location_view (insn);
3041
3042 if (reset)
3043 ;
3044 else if (JUMP_TABLE_DATA_P (insn))
3045 reset = 1;
3046 else if (GET_CODE (insn) == USE
3047 || GET_CODE (insn) == CLOBBER
3048 || GET_CODE (insn) == ASM_INPUT
3049 || asm_noperands (insn) >= 0)
3050 ;
3051 else if (get_attr_min_length (insn) > 0)
3052 reset = 1;
3053
3054 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3055 RESET_NEXT_VIEW (table->view);
3056 }
3057
3058 /* Each DIE attribute has a field specifying the attribute kind,
3059 a link to the next attribute in the chain, and an attribute value.
3060 Attributes are typically linked below the DIE they modify. */
3061
3062 typedef struct GTY(()) dw_attr_struct {
3063 enum dwarf_attribute dw_attr;
3064 dw_val_node dw_attr_val;
3065 }
3066 dw_attr_node;
3067
3068
3069 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3070 The children of each node form a circular list linked by
3071 die_sib. die_child points to the node *before* the "first" child node. */
3072
3073 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3074 union die_symbol_or_type_node
3075 {
3076 const char * GTY ((tag ("0"))) die_symbol;
3077 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3078 }
3079 GTY ((desc ("%0.comdat_type_p"))) die_id;
3080 vec<dw_attr_node, va_gc> *die_attr;
3081 dw_die_ref die_parent;
3082 dw_die_ref die_child;
3083 dw_die_ref die_sib;
3084 dw_die_ref die_definition; /* ref from a specification to its definition */
3085 dw_offset die_offset;
3086 unsigned long die_abbrev;
3087 int die_mark;
3088 unsigned int decl_id;
3089 enum dwarf_tag die_tag;
3090 /* Die is used and must not be pruned as unused. */
3091 BOOL_BITFIELD die_perennial_p : 1;
3092 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3093 /* For an external ref to die_symbol if die_offset contains an extra
3094 offset to that symbol. */
3095 BOOL_BITFIELD with_offset : 1;
3096 /* Whether this DIE was removed from the DIE tree, for example via
3097 prune_unused_types. We don't consider those present from the
3098 DIE lookup routines. */
3099 BOOL_BITFIELD removed : 1;
3100 /* Lots of spare bits. */
3101 }
3102 die_node;
3103
3104 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3105 static bool early_dwarf;
3106 static bool early_dwarf_finished;
3107 class set_early_dwarf {
3108 public:
3109 bool saved;
3110 set_early_dwarf () : saved(early_dwarf)
3111 {
3112 gcc_assert (! early_dwarf_finished);
3113 early_dwarf = true;
3114 }
3115 ~set_early_dwarf () { early_dwarf = saved; }
3116 };
3117
3118 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3119 #define FOR_EACH_CHILD(die, c, expr) do { \
3120 c = die->die_child; \
3121 if (c) do { \
3122 c = c->die_sib; \
3123 expr; \
3124 } while (c != die->die_child); \
3125 } while (0)
3126
3127 /* The pubname structure */
3128
3129 typedef struct GTY(()) pubname_struct {
3130 dw_die_ref die;
3131 const char *name;
3132 }
3133 pubname_entry;
3134
3135
3136 struct GTY(()) dw_ranges {
3137 const char *label;
3138 /* If this is positive, it's a block number, otherwise it's a
3139 bitwise-negated index into dw_ranges_by_label. */
3140 int num;
3141 /* Index for the range list for DW_FORM_rnglistx. */
3142 unsigned int idx : 31;
3143 /* True if this range might be possibly in a different section
3144 from previous entry. */
3145 unsigned int maybe_new_sec : 1;
3146 };
3147
3148 /* A structure to hold a macinfo entry. */
3149
3150 typedef struct GTY(()) macinfo_struct {
3151 unsigned char code;
3152 unsigned HOST_WIDE_INT lineno;
3153 const char *info;
3154 }
3155 macinfo_entry;
3156
3157
3158 struct GTY(()) dw_ranges_by_label {
3159 const char *begin;
3160 const char *end;
3161 };
3162
3163 /* The comdat type node structure. */
3164 struct GTY(()) comdat_type_node
3165 {
3166 dw_die_ref root_die;
3167 dw_die_ref type_die;
3168 dw_die_ref skeleton_die;
3169 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3170 comdat_type_node *next;
3171 };
3172
3173 /* A list of DIEs for which we can't determine ancestry (parent_die
3174 field) just yet. Later in dwarf2out_finish we will fill in the
3175 missing bits. */
3176 typedef struct GTY(()) limbo_die_struct {
3177 dw_die_ref die;
3178 /* The tree for which this DIE was created. We use this to
3179 determine ancestry later. */
3180 tree created_for;
3181 struct limbo_die_struct *next;
3182 }
3183 limbo_die_node;
3184
3185 typedef struct skeleton_chain_struct
3186 {
3187 dw_die_ref old_die;
3188 dw_die_ref new_die;
3189 struct skeleton_chain_struct *parent;
3190 }
3191 skeleton_chain_node;
3192
3193 /* Define a macro which returns nonzero for a TYPE_DECL which was
3194 implicitly generated for a type.
3195
3196 Note that, unlike the C front-end (which generates a NULL named
3197 TYPE_DECL node for each complete tagged type, each array type,
3198 and each function type node created) the C++ front-end generates
3199 a _named_ TYPE_DECL node for each tagged type node created.
3200 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3201 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3202 front-end, but for each type, tagged or not. */
3203
3204 #define TYPE_DECL_IS_STUB(decl) \
3205 (DECL_NAME (decl) == NULL_TREE \
3206 || (DECL_ARTIFICIAL (decl) \
3207 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3208 /* This is necessary for stub decls that \
3209 appear in nested inline functions. */ \
3210 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3211 && (decl_ultimate_origin (decl) \
3212 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3213
3214 /* Information concerning the compilation unit's programming
3215 language, and compiler version. */
3216
3217 /* Fixed size portion of the DWARF compilation unit header. */
3218 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3219 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3220 + (dwarf_version >= 5 ? 4 : 3))
3221
3222 /* Fixed size portion of the DWARF comdat type unit header. */
3223 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3224 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3225 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3226
3227 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3228 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3229 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3230
3231 /* Fixed size portion of public names info. */
3232 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3233
3234 /* Fixed size portion of the address range info. */
3235 #define DWARF_ARANGES_HEADER_SIZE \
3236 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3237 DWARF2_ADDR_SIZE * 2) \
3238 - DWARF_INITIAL_LENGTH_SIZE)
3239
3240 /* Size of padding portion in the address range info. It must be
3241 aligned to twice the pointer size. */
3242 #define DWARF_ARANGES_PAD_SIZE \
3243 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3244 DWARF2_ADDR_SIZE * 2) \
3245 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3246
3247 /* Use assembler line directives if available. */
3248 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3249 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3250 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3251 #else
3252 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3253 #endif
3254 #endif
3255
3256 /* Use assembler views in line directives if available. */
3257 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3258 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3259 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3260 #else
3261 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3262 #endif
3263 #endif
3264
3265 /* Return true if GCC configure detected assembler support for .loc. */
3266
3267 bool
3268 dwarf2out_default_as_loc_support (void)
3269 {
3270 return DWARF2_ASM_LINE_DEBUG_INFO;
3271 #if (GCC_VERSION >= 3000)
3272 # undef DWARF2_ASM_LINE_DEBUG_INFO
3273 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3274 #endif
3275 }
3276
3277 /* Return true if GCC configure detected assembler support for views
3278 in .loc directives. */
3279
3280 bool
3281 dwarf2out_default_as_locview_support (void)
3282 {
3283 return DWARF2_ASM_VIEW_DEBUG_INFO;
3284 #if (GCC_VERSION >= 3000)
3285 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3286 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3287 #endif
3288 }
3289
3290 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3291 view computation, and it refers to a view identifier for which we
3292 will not emit a label because it is known to map to a view number
3293 zero. We won't allocate the bitmap if we're not using assembler
3294 support for location views, but we have to make the variable
3295 visible for GGC and for code that will be optimized out for lack of
3296 support but that's still parsed and compiled. We could abstract it
3297 out with macros, but it's not worth it. */
3298 static GTY(()) bitmap zero_view_p;
3299
3300 /* Evaluate to TRUE iff N is known to identify the first location view
3301 at its PC. When not using assembler location view computation,
3302 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3303 and views label numbers recorded in it are the ones known to be
3304 zero. */
3305 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3306 || (N) == (var_loc_view)-1 \
3307 || (zero_view_p \
3308 && bitmap_bit_p (zero_view_p, (N))))
3309
3310 /* Return true iff we're to emit .loc directives for the assembler to
3311 generate line number sections.
3312
3313 When we're not emitting views, all we need from the assembler is
3314 support for .loc directives.
3315
3316 If we are emitting views, we can only use the assembler's .loc
3317 support if it also supports views.
3318
3319 When the compiler is emitting the line number programs and
3320 computing view numbers itself, it resets view numbers at known PC
3321 changes and counts from that, and then it emits view numbers as
3322 literal constants in locviewlists. There are cases in which the
3323 compiler is not sure about PC changes, e.g. when extra alignment is
3324 requested for a label. In these cases, the compiler may not reset
3325 the view counter, and the potential PC advance in the line number
3326 program will use an opcode that does not reset the view counter
3327 even if the PC actually changes, so that compiler and debug info
3328 consumer can keep view numbers in sync.
3329
3330 When the compiler defers view computation to the assembler, it
3331 emits symbolic view numbers in locviewlists, with the exception of
3332 views known to be zero (forced resets, or reset after
3333 compiler-visible PC changes): instead of emitting symbols for
3334 these, we emit literal zero and assert the assembler agrees with
3335 the compiler's assessment. We could use symbolic views everywhere,
3336 instead of special-casing zero views, but then we'd be unable to
3337 optimize out locviewlists that contain only zeros. */
3338
3339 static bool
3340 output_asm_line_debug_info (void)
3341 {
3342 return (dwarf2out_as_loc_support
3343 && (dwarf2out_as_locview_support
3344 || !debug_variable_location_views));
3345 }
3346
3347 /* Minimum line offset in a special line info. opcode.
3348 This value was chosen to give a reasonable range of values. */
3349 #define DWARF_LINE_BASE -10
3350
3351 /* First special line opcode - leave room for the standard opcodes. */
3352 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3353
3354 /* Range of line offsets in a special line info. opcode. */
3355 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3356
3357 /* Flag that indicates the initial value of the is_stmt_start flag.
3358 In the present implementation, we do not mark any lines as
3359 the beginning of a source statement, because that information
3360 is not made available by the GCC front-end. */
3361 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3362
3363 /* Maximum number of operations per instruction bundle. */
3364 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3365 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3366 #endif
3367
3368 /* This location is used by calc_die_sizes() to keep track
3369 the offset of each DIE within the .debug_info section. */
3370 static unsigned long next_die_offset;
3371
3372 /* Record the root of the DIE's built for the current compilation unit. */
3373 static GTY(()) dw_die_ref single_comp_unit_die;
3374
3375 /* A list of type DIEs that have been separated into comdat sections. */
3376 static GTY(()) comdat_type_node *comdat_type_list;
3377
3378 /* A list of CU DIEs that have been separated. */
3379 static GTY(()) limbo_die_node *cu_die_list;
3380
3381 /* A list of DIEs with a NULL parent waiting to be relocated. */
3382 static GTY(()) limbo_die_node *limbo_die_list;
3383
3384 /* A list of DIEs for which we may have to generate
3385 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3386 static GTY(()) limbo_die_node *deferred_asm_name;
3387
3388 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3389 {
3390 typedef const char *compare_type;
3391
3392 static hashval_t hash (dwarf_file_data *);
3393 static bool equal (dwarf_file_data *, const char *);
3394 };
3395
3396 /* Filenames referenced by this compilation unit. */
3397 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3398
3399 struct decl_die_hasher : ggc_ptr_hash<die_node>
3400 {
3401 typedef tree compare_type;
3402
3403 static hashval_t hash (die_node *);
3404 static bool equal (die_node *, tree);
3405 };
3406 /* A hash table of references to DIE's that describe declarations.
3407 The key is a DECL_UID() which is a unique number identifying each decl. */
3408 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3409
3410 struct GTY ((for_user)) variable_value_struct {
3411 unsigned int decl_id;
3412 vec<dw_die_ref, va_gc> *dies;
3413 };
3414
3415 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3416 {
3417 typedef tree compare_type;
3418
3419 static hashval_t hash (variable_value_struct *);
3420 static bool equal (variable_value_struct *, tree);
3421 };
3422 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3423 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3424 DECL_CONTEXT of the referenced VAR_DECLs. */
3425 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3426
3427 struct block_die_hasher : ggc_ptr_hash<die_struct>
3428 {
3429 static hashval_t hash (die_struct *);
3430 static bool equal (die_struct *, die_struct *);
3431 };
3432
3433 /* A hash table of references to DIE's that describe COMMON blocks.
3434 The key is DECL_UID() ^ die_parent. */
3435 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3436
3437 typedef struct GTY(()) die_arg_entry_struct {
3438 dw_die_ref die;
3439 tree arg;
3440 } die_arg_entry;
3441
3442
3443 /* Node of the variable location list. */
3444 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3445 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3446 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3447 in mode of the EXPR_LIST node and first EXPR_LIST operand
3448 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3449 location or NULL for padding. For larger bitsizes,
3450 mode is 0 and first operand is a CONCAT with bitsize
3451 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3452 NULL as second operand. */
3453 rtx GTY (()) loc;
3454 const char * GTY (()) label;
3455 struct var_loc_node * GTY (()) next;
3456 var_loc_view view;
3457 };
3458
3459 /* Variable location list. */
3460 struct GTY ((for_user)) var_loc_list_def {
3461 struct var_loc_node * GTY (()) first;
3462
3463 /* Pointer to the last but one or last element of the
3464 chained list. If the list is empty, both first and
3465 last are NULL, if the list contains just one node
3466 or the last node certainly is not redundant, it points
3467 to the last node, otherwise points to the last but one.
3468 Do not mark it for GC because it is marked through the chain. */
3469 struct var_loc_node * GTY ((skip ("%h"))) last;
3470
3471 /* Pointer to the last element before section switch,
3472 if NULL, either sections weren't switched or first
3473 is after section switch. */
3474 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3475
3476 /* DECL_UID of the variable decl. */
3477 unsigned int decl_id;
3478 };
3479 typedef struct var_loc_list_def var_loc_list;
3480
3481 /* Call argument location list. */
3482 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3483 rtx GTY (()) call_arg_loc_note;
3484 const char * GTY (()) label;
3485 tree GTY (()) block;
3486 bool tail_call_p;
3487 rtx GTY (()) symbol_ref;
3488 struct call_arg_loc_node * GTY (()) next;
3489 };
3490
3491
3492 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3493 {
3494 typedef const_tree compare_type;
3495
3496 static hashval_t hash (var_loc_list *);
3497 static bool equal (var_loc_list *, const_tree);
3498 };
3499
3500 /* Table of decl location linked lists. */
3501 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3502
3503 /* Head and tail of call_arg_loc chain. */
3504 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3505 static struct call_arg_loc_node *call_arg_loc_last;
3506
3507 /* Number of call sites in the current function. */
3508 static int call_site_count = -1;
3509 /* Number of tail call sites in the current function. */
3510 static int tail_call_site_count = -1;
3511
3512 /* A cached location list. */
3513 struct GTY ((for_user)) cached_dw_loc_list_def {
3514 /* The DECL_UID of the decl that this entry describes. */
3515 unsigned int decl_id;
3516
3517 /* The cached location list. */
3518 dw_loc_list_ref loc_list;
3519 };
3520 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3521
3522 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3523 {
3524
3525 typedef const_tree compare_type;
3526
3527 static hashval_t hash (cached_dw_loc_list *);
3528 static bool equal (cached_dw_loc_list *, const_tree);
3529 };
3530
3531 /* Table of cached location lists. */
3532 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3533
3534 /* A vector of references to DIE's that are uniquely identified by their tag,
3535 presence/absence of children DIE's, and list of attribute/value pairs. */
3536 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3537
3538 /* A hash map to remember the stack usage for DWARF procedures. The value
3539 stored is the stack size difference between before the DWARF procedure
3540 invokation and after it returned. In other words, for a DWARF procedure
3541 that consumes N stack slots and that pushes M ones, this stores M - N. */
3542 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3543
3544 /* A global counter for generating labels for line number data. */
3545 static unsigned int line_info_label_num;
3546
3547 /* The current table to which we should emit line number information
3548 for the current function. This will be set up at the beginning of
3549 assembly for the function. */
3550 static GTY(()) dw_line_info_table *cur_line_info_table;
3551
3552 /* The two default tables of line number info. */
3553 static GTY(()) dw_line_info_table *text_section_line_info;
3554 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3555
3556 /* The set of all non-default tables of line number info. */
3557 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3558
3559 /* A flag to tell pubnames/types export if there is an info section to
3560 refer to. */
3561 static bool info_section_emitted;
3562
3563 /* A pointer to the base of a table that contains a list of publicly
3564 accessible names. */
3565 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3566
3567 /* A pointer to the base of a table that contains a list of publicly
3568 accessible types. */
3569 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3570
3571 /* A pointer to the base of a table that contains a list of macro
3572 defines/undefines (and file start/end markers). */
3573 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3574
3575 /* True if .debug_macinfo or .debug_macros section is going to be
3576 emitted. */
3577 #define have_macinfo \
3578 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3579 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3580 && !macinfo_table->is_empty ())
3581
3582 /* Vector of dies for which we should generate .debug_ranges info. */
3583 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3584
3585 /* Vector of pairs of labels referenced in ranges_table. */
3586 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3587
3588 /* Whether we have location lists that need outputting */
3589 static GTY(()) bool have_location_lists;
3590
3591 /* Unique label counter. */
3592 static GTY(()) unsigned int loclabel_num;
3593
3594 /* Unique label counter for point-of-call tables. */
3595 static GTY(()) unsigned int poc_label_num;
3596
3597 /* The last file entry emitted by maybe_emit_file(). */
3598 static GTY(()) struct dwarf_file_data * last_emitted_file;
3599
3600 /* Number of internal labels generated by gen_internal_sym(). */
3601 static GTY(()) int label_num;
3602
3603 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3604
3605 /* Instances of generic types for which we need to generate debug
3606 info that describe their generic parameters and arguments. That
3607 generation needs to happen once all types are properly laid out so
3608 we do it at the end of compilation. */
3609 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3610
3611 /* Offset from the "steady-state frame pointer" to the frame base,
3612 within the current function. */
3613 static poly_int64 frame_pointer_fb_offset;
3614 static bool frame_pointer_fb_offset_valid;
3615
3616 static vec<dw_die_ref> base_types;
3617
3618 /* Flags to represent a set of attribute classes for attributes that represent
3619 a scalar value (bounds, pointers, ...). */
3620 enum dw_scalar_form
3621 {
3622 dw_scalar_form_constant = 0x01,
3623 dw_scalar_form_exprloc = 0x02,
3624 dw_scalar_form_reference = 0x04
3625 };
3626
3627 /* Forward declarations for functions defined in this file. */
3628
3629 static int is_pseudo_reg (const_rtx);
3630 static tree type_main_variant (tree);
3631 static int is_tagged_type (const_tree);
3632 static const char *dwarf_tag_name (unsigned);
3633 static const char *dwarf_attr_name (unsigned);
3634 static const char *dwarf_form_name (unsigned);
3635 static tree decl_ultimate_origin (const_tree);
3636 static tree decl_class_context (tree);
3637 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3638 static inline enum dw_val_class AT_class (dw_attr_node *);
3639 static inline unsigned int AT_index (dw_attr_node *);
3640 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3641 static inline unsigned AT_flag (dw_attr_node *);
3642 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3643 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3644 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3645 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3646 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3647 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3648 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3649 unsigned int, unsigned char *);
3650 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3651 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3652 static inline const char *AT_string (dw_attr_node *);
3653 static enum dwarf_form AT_string_form (dw_attr_node *);
3654 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3655 static void add_AT_specification (dw_die_ref, dw_die_ref);
3656 static inline dw_die_ref AT_ref (dw_attr_node *);
3657 static inline int AT_ref_external (dw_attr_node *);
3658 static inline void set_AT_ref_external (dw_attr_node *, int);
3659 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3660 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3661 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3662 dw_loc_list_ref);
3663 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3664 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3665 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3666 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3667 static void remove_addr_table_entry (addr_table_entry *);
3668 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3669 static inline rtx AT_addr (dw_attr_node *);
3670 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3671 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3672 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3673 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3674 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3675 unsigned long, bool);
3676 static inline const char *AT_lbl (dw_attr_node *);
3677 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3678 static const char *get_AT_low_pc (dw_die_ref);
3679 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3680 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3681 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3682 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3683 static bool is_c (void);
3684 static bool is_cxx (void);
3685 static bool is_cxx (const_tree);
3686 static bool is_fortran (void);
3687 static bool is_ada (void);
3688 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3689 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3690 static void add_child_die (dw_die_ref, dw_die_ref);
3691 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3692 static dw_die_ref lookup_type_die (tree);
3693 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3694 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3695 static void equate_type_number_to_die (tree, dw_die_ref);
3696 static dw_die_ref lookup_decl_die (tree);
3697 static var_loc_list *lookup_decl_loc (const_tree);
3698 static void equate_decl_number_to_die (tree, dw_die_ref);
3699 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3700 static void print_spaces (FILE *);
3701 static void print_die (dw_die_ref, FILE *);
3702 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3703 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3704 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3705 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3706 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3707 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3708 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3709 struct md5_ctx *, int *);
3710 struct checksum_attributes;
3711 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3712 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3713 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3714 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3715 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3716 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3717 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3718 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3719 static int is_type_die (dw_die_ref);
3720 static inline bool is_template_instantiation (dw_die_ref);
3721 static int is_declaration_die (dw_die_ref);
3722 static int should_move_die_to_comdat (dw_die_ref);
3723 static dw_die_ref clone_as_declaration (dw_die_ref);
3724 static dw_die_ref clone_die (dw_die_ref);
3725 static dw_die_ref clone_tree (dw_die_ref);
3726 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3727 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3728 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3729 static dw_die_ref generate_skeleton (dw_die_ref);
3730 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3731 dw_die_ref,
3732 dw_die_ref);
3733 static void break_out_comdat_types (dw_die_ref);
3734 static void copy_decls_for_unworthy_types (dw_die_ref);
3735
3736 static void add_sibling_attributes (dw_die_ref);
3737 static void output_location_lists (dw_die_ref);
3738 static int constant_size (unsigned HOST_WIDE_INT);
3739 static unsigned long size_of_die (dw_die_ref);
3740 static void calc_die_sizes (dw_die_ref);
3741 static void calc_base_type_die_sizes (void);
3742 static void mark_dies (dw_die_ref);
3743 static void unmark_dies (dw_die_ref);
3744 static void unmark_all_dies (dw_die_ref);
3745 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3746 static unsigned long size_of_aranges (void);
3747 static enum dwarf_form value_format (dw_attr_node *);
3748 static void output_value_format (dw_attr_node *);
3749 static void output_abbrev_section (void);
3750 static void output_die_abbrevs (unsigned long, dw_die_ref);
3751 static void output_die (dw_die_ref);
3752 static void output_compilation_unit_header (enum dwarf_unit_type);
3753 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3754 static void output_comdat_type_unit (comdat_type_node *, bool);
3755 static const char *dwarf2_name (tree, int);
3756 static void add_pubname (tree, dw_die_ref);
3757 static void add_enumerator_pubname (const char *, dw_die_ref);
3758 static void add_pubname_string (const char *, dw_die_ref);
3759 static void add_pubtype (tree, dw_die_ref);
3760 static void output_pubnames (vec<pubname_entry, va_gc> *);
3761 static void output_aranges (void);
3762 static unsigned int add_ranges (const_tree, bool = false);
3763 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3764 bool *, bool);
3765 static void output_ranges (void);
3766 static dw_line_info_table *new_line_info_table (void);
3767 static void output_line_info (bool);
3768 static void output_file_names (void);
3769 static dw_die_ref base_type_die (tree, bool);
3770 static int is_base_type (tree);
3771 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3772 static int decl_quals (const_tree);
3773 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3774 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3775 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3776 static unsigned int dbx_reg_number (const_rtx);
3777 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3778 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3779 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3780 enum var_init_status);
3781 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3782 enum var_init_status);
3783 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3784 enum var_init_status);
3785 static int is_based_loc (const_rtx);
3786 static bool resolve_one_addr (rtx *);
3787 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3788 enum var_init_status);
3789 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3790 enum var_init_status);
3791 struct loc_descr_context;
3792 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3793 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3794 static dw_loc_list_ref loc_list_from_tree (tree, int,
3795 struct loc_descr_context *);
3796 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3797 struct loc_descr_context *);
3798 static tree field_type (const_tree);
3799 static unsigned int simple_type_align_in_bits (const_tree);
3800 static unsigned int simple_decl_align_in_bits (const_tree);
3801 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3802 struct vlr_context;
3803 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3804 HOST_WIDE_INT *);
3805 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3806 dw_loc_list_ref);
3807 static void add_data_member_location_attribute (dw_die_ref, tree,
3808 struct vlr_context *);
3809 static bool add_const_value_attribute (dw_die_ref, rtx);
3810 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3811 static void insert_wide_int (const wide_int &, unsigned char *, int);
3812 static void insert_float (const_rtx, unsigned char *);
3813 static rtx rtl_for_decl_location (tree);
3814 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3815 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3816 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3817 static void add_name_attribute (dw_die_ref, const char *);
3818 static void add_desc_attribute (dw_die_ref, tree);
3819 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3820 static void add_comp_dir_attribute (dw_die_ref);
3821 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3822 struct loc_descr_context *);
3823 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3824 struct loc_descr_context *);
3825 static void add_subscript_info (dw_die_ref, tree, bool);
3826 static void add_byte_size_attribute (dw_die_ref, tree);
3827 static void add_alignment_attribute (dw_die_ref, tree);
3828 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3829 struct vlr_context *);
3830 static void add_bit_size_attribute (dw_die_ref, tree);
3831 static void add_prototyped_attribute (dw_die_ref, tree);
3832 static void add_abstract_origin_attribute (dw_die_ref, tree);
3833 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3834 static void add_src_coords_attributes (dw_die_ref, tree);
3835 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3836 static void add_discr_value (dw_die_ref, dw_discr_value *);
3837 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3838 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3839 static dw_die_ref scope_die_for (tree, dw_die_ref);
3840 static inline int local_scope_p (dw_die_ref);
3841 static inline int class_scope_p (dw_die_ref);
3842 static inline int class_or_namespace_scope_p (dw_die_ref);
3843 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3844 static void add_calling_convention_attribute (dw_die_ref, tree);
3845 static const char *type_tag (const_tree);
3846 static tree member_declared_type (const_tree);
3847 #if 0
3848 static const char *decl_start_label (tree);
3849 #endif
3850 static void gen_array_type_die (tree, dw_die_ref);
3851 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3852 #if 0
3853 static void gen_entry_point_die (tree, dw_die_ref);
3854 #endif
3855 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3856 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3857 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3858 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3859 static void gen_formal_types_die (tree, dw_die_ref);
3860 static void gen_subprogram_die (tree, dw_die_ref);
3861 static void gen_variable_die (tree, tree, dw_die_ref);
3862 static void gen_const_die (tree, dw_die_ref);
3863 static void gen_label_die (tree, dw_die_ref);
3864 static void gen_lexical_block_die (tree, dw_die_ref);
3865 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3866 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3867 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3868 static dw_die_ref gen_compile_unit_die (const char *);
3869 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3870 static void gen_member_die (tree, dw_die_ref);
3871 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3872 enum debug_info_usage);
3873 static void gen_subroutine_type_die (tree, dw_die_ref);
3874 static void gen_typedef_die (tree, dw_die_ref);
3875 static void gen_type_die (tree, dw_die_ref);
3876 static void gen_block_die (tree, dw_die_ref);
3877 static void decls_for_scope (tree, dw_die_ref, bool = true);
3878 static bool is_naming_typedef_decl (const_tree);
3879 static inline dw_die_ref get_context_die (tree);
3880 static void gen_namespace_die (tree, dw_die_ref);
3881 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3882 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3883 static dw_die_ref force_decl_die (tree);
3884 static dw_die_ref force_type_die (tree);
3885 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3886 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3887 static struct dwarf_file_data * lookup_filename (const char *);
3888 static void retry_incomplete_types (void);
3889 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3890 static void gen_generic_params_dies (tree);
3891 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3892 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3893 static void splice_child_die (dw_die_ref, dw_die_ref);
3894 static int file_info_cmp (const void *, const void *);
3895 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3896 const char *, var_loc_view, const char *);
3897 static void output_loc_list (dw_loc_list_ref);
3898 static char *gen_internal_sym (const char *);
3899 static bool want_pubnames (void);
3900
3901 static void prune_unmark_dies (dw_die_ref);
3902 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3903 static void prune_unused_types_mark (dw_die_ref, int);
3904 static void prune_unused_types_walk (dw_die_ref);
3905 static void prune_unused_types_walk_attribs (dw_die_ref);
3906 static void prune_unused_types_prune (dw_die_ref);
3907 static void prune_unused_types (void);
3908 static int maybe_emit_file (struct dwarf_file_data *fd);
3909 static inline const char *AT_vms_delta1 (dw_attr_node *);
3910 static inline const char *AT_vms_delta2 (dw_attr_node *);
3911 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3912 const char *, const char *);
3913 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3914 static void gen_remaining_tmpl_value_param_die_attribute (void);
3915 static bool generic_type_p (tree);
3916 static void schedule_generic_params_dies_gen (tree t);
3917 static void gen_scheduled_generic_parms_dies (void);
3918 static void resolve_variable_values (void);
3919
3920 static const char *comp_dir_string (void);
3921
3922 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3923
3924 /* enum for tracking thread-local variables whose address is really an offset
3925 relative to the TLS pointer, which will need link-time relocation, but will
3926 not need relocation by the DWARF consumer. */
3927
3928 enum dtprel_bool
3929 {
3930 dtprel_false = 0,
3931 dtprel_true = 1
3932 };
3933
3934 /* Return the operator to use for an address of a variable. For dtprel_true, we
3935 use DW_OP_const*. For regular variables, which need both link-time
3936 relocation and consumer-level relocation (e.g., to account for shared objects
3937 loaded at a random address), we use DW_OP_addr*. */
3938
3939 static inline enum dwarf_location_atom
3940 dw_addr_op (enum dtprel_bool dtprel)
3941 {
3942 if (dtprel == dtprel_true)
3943 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3944 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3945 else
3946 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3947 }
3948
3949 /* Return a pointer to a newly allocated address location description. If
3950 dwarf_split_debug_info is true, then record the address with the appropriate
3951 relocation. */
3952 static inline dw_loc_descr_ref
3953 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3954 {
3955 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3956
3957 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3958 ref->dw_loc_oprnd1.v.val_addr = addr;
3959 ref->dtprel = dtprel;
3960 if (dwarf_split_debug_info)
3961 ref->dw_loc_oprnd1.val_entry
3962 = add_addr_table_entry (addr,
3963 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3964 else
3965 ref->dw_loc_oprnd1.val_entry = NULL;
3966
3967 return ref;
3968 }
3969
3970 /* Section names used to hold DWARF debugging information. */
3971
3972 #ifndef DEBUG_INFO_SECTION
3973 #define DEBUG_INFO_SECTION ".debug_info"
3974 #endif
3975 #ifndef DEBUG_DWO_INFO_SECTION
3976 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3977 #endif
3978 #ifndef DEBUG_LTO_INFO_SECTION
3979 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3980 #endif
3981 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3982 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3983 #endif
3984 #ifndef DEBUG_ABBREV_SECTION
3985 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3986 #endif
3987 #ifndef DEBUG_LTO_ABBREV_SECTION
3988 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3989 #endif
3990 #ifndef DEBUG_DWO_ABBREV_SECTION
3991 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3992 #endif
3993 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3994 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3995 #endif
3996 #ifndef DEBUG_ARANGES_SECTION
3997 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3998 #endif
3999 #ifndef DEBUG_ADDR_SECTION
4000 #define DEBUG_ADDR_SECTION ".debug_addr"
4001 #endif
4002 #ifndef DEBUG_MACINFO_SECTION
4003 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
4004 #endif
4005 #ifndef DEBUG_LTO_MACINFO_SECTION
4006 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
4007 #endif
4008 #ifndef DEBUG_DWO_MACINFO_SECTION
4009 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4010 #endif
4011 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4012 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4013 #endif
4014 #ifndef DEBUG_MACRO_SECTION
4015 #define DEBUG_MACRO_SECTION ".debug_macro"
4016 #endif
4017 #ifndef DEBUG_LTO_MACRO_SECTION
4018 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4019 #endif
4020 #ifndef DEBUG_DWO_MACRO_SECTION
4021 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4022 #endif
4023 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4024 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4025 #endif
4026 #ifndef DEBUG_LINE_SECTION
4027 #define DEBUG_LINE_SECTION ".debug_line"
4028 #endif
4029 #ifndef DEBUG_LTO_LINE_SECTION
4030 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4031 #endif
4032 #ifndef DEBUG_DWO_LINE_SECTION
4033 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4034 #endif
4035 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4036 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4037 #endif
4038 #ifndef DEBUG_LOC_SECTION
4039 #define DEBUG_LOC_SECTION ".debug_loc"
4040 #endif
4041 #ifndef DEBUG_DWO_LOC_SECTION
4042 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4043 #endif
4044 #ifndef DEBUG_LOCLISTS_SECTION
4045 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4046 #endif
4047 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4048 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4049 #endif
4050 #ifndef DEBUG_PUBNAMES_SECTION
4051 #define DEBUG_PUBNAMES_SECTION \
4052 ((debug_generate_pub_sections == 2) \
4053 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4054 #endif
4055 #ifndef DEBUG_PUBTYPES_SECTION
4056 #define DEBUG_PUBTYPES_SECTION \
4057 ((debug_generate_pub_sections == 2) \
4058 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4059 #endif
4060 #ifndef DEBUG_STR_OFFSETS_SECTION
4061 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4062 #endif
4063 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4064 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4065 #endif
4066 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4067 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4068 #endif
4069 #ifndef DEBUG_STR_SECTION
4070 #define DEBUG_STR_SECTION ".debug_str"
4071 #endif
4072 #ifndef DEBUG_LTO_STR_SECTION
4073 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4074 #endif
4075 #ifndef DEBUG_STR_DWO_SECTION
4076 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4077 #endif
4078 #ifndef DEBUG_LTO_STR_DWO_SECTION
4079 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4080 #endif
4081 #ifndef DEBUG_RANGES_SECTION
4082 #define DEBUG_RANGES_SECTION ".debug_ranges"
4083 #endif
4084 #ifndef DEBUG_RNGLISTS_SECTION
4085 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4086 #endif
4087 #ifndef DEBUG_LINE_STR_SECTION
4088 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4089 #endif
4090 #ifndef DEBUG_LTO_LINE_STR_SECTION
4091 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4092 #endif
4093
4094 /* Standard ELF section names for compiled code and data. */
4095 #ifndef TEXT_SECTION_NAME
4096 #define TEXT_SECTION_NAME ".text"
4097 #endif
4098
4099 /* Section flags for .debug_str section. */
4100 #define DEBUG_STR_SECTION_FLAGS \
4101 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4102 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4103 : SECTION_DEBUG)
4104
4105 /* Section flags for .debug_str.dwo section. */
4106 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4107
4108 /* Attribute used to refer to the macro section. */
4109 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4110 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4111
4112 /* Labels we insert at beginning sections we can reference instead of
4113 the section names themselves. */
4114
4115 #ifndef TEXT_SECTION_LABEL
4116 #define TEXT_SECTION_LABEL "Ltext"
4117 #endif
4118 #ifndef COLD_TEXT_SECTION_LABEL
4119 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4120 #endif
4121 #ifndef DEBUG_LINE_SECTION_LABEL
4122 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4123 #endif
4124 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4125 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4126 #endif
4127 #ifndef DEBUG_INFO_SECTION_LABEL
4128 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4129 #endif
4130 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4131 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4132 #endif
4133 #ifndef DEBUG_ABBREV_SECTION_LABEL
4134 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4135 #endif
4136 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4137 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4138 #endif
4139 #ifndef DEBUG_ADDR_SECTION_LABEL
4140 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4141 #endif
4142 #ifndef DEBUG_LOC_SECTION_LABEL
4143 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4144 #endif
4145 #ifndef DEBUG_RANGES_SECTION_LABEL
4146 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4147 #endif
4148 #ifndef DEBUG_MACINFO_SECTION_LABEL
4149 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4150 #endif
4151 #ifndef DEBUG_MACRO_SECTION_LABEL
4152 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4153 #endif
4154 #define SKELETON_COMP_DIE_ABBREV 1
4155 #define SKELETON_TYPE_DIE_ABBREV 2
4156
4157 /* Definitions of defaults for formats and names of various special
4158 (artificial) labels which may be generated within this file (when the -g
4159 options is used and DWARF2_DEBUGGING_INFO is in effect.
4160 If necessary, these may be overridden from within the tm.h file, but
4161 typically, overriding these defaults is unnecessary. */
4162
4163 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4164 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4169 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4170 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4171 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4172 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4173 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4174 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4175 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4176 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4177 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4178
4179 #ifndef TEXT_END_LABEL
4180 #define TEXT_END_LABEL "Letext"
4181 #endif
4182 #ifndef COLD_END_LABEL
4183 #define COLD_END_LABEL "Letext_cold"
4184 #endif
4185 #ifndef BLOCK_BEGIN_LABEL
4186 #define BLOCK_BEGIN_LABEL "LBB"
4187 #endif
4188 #ifndef BLOCK_INLINE_ENTRY_LABEL
4189 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4190 #endif
4191 #ifndef BLOCK_END_LABEL
4192 #define BLOCK_END_LABEL "LBE"
4193 #endif
4194 #ifndef LINE_CODE_LABEL
4195 #define LINE_CODE_LABEL "LM"
4196 #endif
4197
4198 \f
4199 /* Return the root of the DIE's built for the current compilation unit. */
4200 static dw_die_ref
4201 comp_unit_die (void)
4202 {
4203 if (!single_comp_unit_die)
4204 single_comp_unit_die = gen_compile_unit_die (NULL);
4205 return single_comp_unit_die;
4206 }
4207
4208 /* We allow a language front-end to designate a function that is to be
4209 called to "demangle" any name before it is put into a DIE. */
4210
4211 static const char *(*demangle_name_func) (const char *);
4212
4213 void
4214 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4215 {
4216 demangle_name_func = func;
4217 }
4218
4219 /* Test if rtl node points to a pseudo register. */
4220
4221 static inline int
4222 is_pseudo_reg (const_rtx rtl)
4223 {
4224 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4225 || (GET_CODE (rtl) == SUBREG
4226 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4227 }
4228
4229 /* Return a reference to a type, with its const and volatile qualifiers
4230 removed. */
4231
4232 static inline tree
4233 type_main_variant (tree type)
4234 {
4235 type = TYPE_MAIN_VARIANT (type);
4236
4237 /* ??? There really should be only one main variant among any group of
4238 variants of a given type (and all of the MAIN_VARIANT values for all
4239 members of the group should point to that one type) but sometimes the C
4240 front-end messes this up for array types, so we work around that bug
4241 here. */
4242 if (TREE_CODE (type) == ARRAY_TYPE)
4243 while (type != TYPE_MAIN_VARIANT (type))
4244 type = TYPE_MAIN_VARIANT (type);
4245
4246 return type;
4247 }
4248
4249 /* Return nonzero if the given type node represents a tagged type. */
4250
4251 static inline int
4252 is_tagged_type (const_tree type)
4253 {
4254 enum tree_code code = TREE_CODE (type);
4255
4256 return (code == RECORD_TYPE || code == UNION_TYPE
4257 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4258 }
4259
4260 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4261
4262 static void
4263 get_ref_die_offset_label (char *label, dw_die_ref ref)
4264 {
4265 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4266 }
4267
4268 /* Return die_offset of a DIE reference to a base type. */
4269
4270 static unsigned long int
4271 get_base_type_offset (dw_die_ref ref)
4272 {
4273 if (ref->die_offset)
4274 return ref->die_offset;
4275 if (comp_unit_die ()->die_abbrev)
4276 {
4277 calc_base_type_die_sizes ();
4278 gcc_assert (ref->die_offset);
4279 }
4280 return ref->die_offset;
4281 }
4282
4283 /* Return die_offset of a DIE reference other than base type. */
4284
4285 static unsigned long int
4286 get_ref_die_offset (dw_die_ref ref)
4287 {
4288 gcc_assert (ref->die_offset);
4289 return ref->die_offset;
4290 }
4291
4292 /* Convert a DIE tag into its string name. */
4293
4294 static const char *
4295 dwarf_tag_name (unsigned int tag)
4296 {
4297 const char *name = get_DW_TAG_name (tag);
4298
4299 if (name != NULL)
4300 return name;
4301
4302 return "DW_TAG_<unknown>";
4303 }
4304
4305 /* Convert a DWARF attribute code into its string name. */
4306
4307 static const char *
4308 dwarf_attr_name (unsigned int attr)
4309 {
4310 const char *name;
4311
4312 switch (attr)
4313 {
4314 #if VMS_DEBUGGING_INFO
4315 case DW_AT_HP_prologue:
4316 return "DW_AT_HP_prologue";
4317 #else
4318 case DW_AT_MIPS_loop_unroll_factor:
4319 return "DW_AT_MIPS_loop_unroll_factor";
4320 #endif
4321
4322 #if VMS_DEBUGGING_INFO
4323 case DW_AT_HP_epilogue:
4324 return "DW_AT_HP_epilogue";
4325 #else
4326 case DW_AT_MIPS_stride:
4327 return "DW_AT_MIPS_stride";
4328 #endif
4329 }
4330
4331 name = get_DW_AT_name (attr);
4332
4333 if (name != NULL)
4334 return name;
4335
4336 return "DW_AT_<unknown>";
4337 }
4338
4339 /* Convert a DWARF value form code into its string name. */
4340
4341 static const char *
4342 dwarf_form_name (unsigned int form)
4343 {
4344 const char *name = get_DW_FORM_name (form);
4345
4346 if (name != NULL)
4347 return name;
4348
4349 return "DW_FORM_<unknown>";
4350 }
4351 \f
4352 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4353 instance of an inlined instance of a decl which is local to an inline
4354 function, so we have to trace all of the way back through the origin chain
4355 to find out what sort of node actually served as the original seed for the
4356 given block. */
4357
4358 static tree
4359 decl_ultimate_origin (const_tree decl)
4360 {
4361 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4362 return NULL_TREE;
4363
4364 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4365 we're trying to output the abstract instance of this function. */
4366 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4367 return NULL_TREE;
4368
4369 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4370 most distant ancestor, this should never happen. */
4371 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4372
4373 return DECL_ABSTRACT_ORIGIN (decl);
4374 }
4375
4376 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4377 of a virtual function may refer to a base class, so we check the 'this'
4378 parameter. */
4379
4380 static tree
4381 decl_class_context (tree decl)
4382 {
4383 tree context = NULL_TREE;
4384
4385 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4386 context = DECL_CONTEXT (decl);
4387 else
4388 context = TYPE_MAIN_VARIANT
4389 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4390
4391 if (context && !TYPE_P (context))
4392 context = NULL_TREE;
4393
4394 return context;
4395 }
4396 \f
4397 /* Add an attribute/value pair to a DIE. */
4398
4399 static inline void
4400 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4401 {
4402 /* Maybe this should be an assert? */
4403 if (die == NULL)
4404 return;
4405
4406 if (flag_checking)
4407 {
4408 /* Check we do not add duplicate attrs. Can't use get_AT here
4409 because that recurses to the specification/abstract origin DIE. */
4410 dw_attr_node *a;
4411 unsigned ix;
4412 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4413 gcc_assert (a->dw_attr != attr->dw_attr);
4414 }
4415
4416 vec_safe_reserve (die->die_attr, 1);
4417 vec_safe_push (die->die_attr, *attr);
4418 }
4419
4420 static inline enum dw_val_class
4421 AT_class (dw_attr_node *a)
4422 {
4423 return a->dw_attr_val.val_class;
4424 }
4425
4426 /* Return the index for any attribute that will be referenced with a
4427 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4428 indices are stored in dw_attr_val.v.val_str for reference counting
4429 pruning. */
4430
4431 static inline unsigned int
4432 AT_index (dw_attr_node *a)
4433 {
4434 if (AT_class (a) == dw_val_class_str)
4435 return a->dw_attr_val.v.val_str->index;
4436 else if (a->dw_attr_val.val_entry != NULL)
4437 return a->dw_attr_val.val_entry->index;
4438 return NOT_INDEXED;
4439 }
4440
4441 /* Add a flag value attribute to a DIE. */
4442
4443 static inline void
4444 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4445 {
4446 dw_attr_node attr;
4447
4448 attr.dw_attr = attr_kind;
4449 attr.dw_attr_val.val_class = dw_val_class_flag;
4450 attr.dw_attr_val.val_entry = NULL;
4451 attr.dw_attr_val.v.val_flag = flag;
4452 add_dwarf_attr (die, &attr);
4453 }
4454
4455 static inline unsigned
4456 AT_flag (dw_attr_node *a)
4457 {
4458 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4459 return a->dw_attr_val.v.val_flag;
4460 }
4461
4462 /* Add a signed integer attribute value to a DIE. */
4463
4464 static inline void
4465 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4466 {
4467 dw_attr_node attr;
4468
4469 attr.dw_attr = attr_kind;
4470 attr.dw_attr_val.val_class = dw_val_class_const;
4471 attr.dw_attr_val.val_entry = NULL;
4472 attr.dw_attr_val.v.val_int = int_val;
4473 add_dwarf_attr (die, &attr);
4474 }
4475
4476 static inline HOST_WIDE_INT
4477 AT_int (dw_attr_node *a)
4478 {
4479 gcc_assert (a && (AT_class (a) == dw_val_class_const
4480 || AT_class (a) == dw_val_class_const_implicit));
4481 return a->dw_attr_val.v.val_int;
4482 }
4483
4484 /* Add an unsigned integer attribute value to a DIE. */
4485
4486 static inline void
4487 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4488 unsigned HOST_WIDE_INT unsigned_val)
4489 {
4490 dw_attr_node attr;
4491
4492 attr.dw_attr = attr_kind;
4493 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4494 attr.dw_attr_val.val_entry = NULL;
4495 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4496 add_dwarf_attr (die, &attr);
4497 }
4498
4499 static inline unsigned HOST_WIDE_INT
4500 AT_unsigned (dw_attr_node *a)
4501 {
4502 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4503 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4504 return a->dw_attr_val.v.val_unsigned;
4505 }
4506
4507 /* Add an unsigned wide integer attribute value to a DIE. */
4508
4509 static inline void
4510 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4511 const wide_int& w)
4512 {
4513 dw_attr_node attr;
4514
4515 attr.dw_attr = attr_kind;
4516 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4517 attr.dw_attr_val.val_entry = NULL;
4518 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4519 *attr.dw_attr_val.v.val_wide = w;
4520 add_dwarf_attr (die, &attr);
4521 }
4522
4523 /* Add an unsigned double integer attribute value to a DIE. */
4524
4525 static inline void
4526 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4527 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4528 {
4529 dw_attr_node attr;
4530
4531 attr.dw_attr = attr_kind;
4532 attr.dw_attr_val.val_class = dw_val_class_const_double;
4533 attr.dw_attr_val.val_entry = NULL;
4534 attr.dw_attr_val.v.val_double.high = high;
4535 attr.dw_attr_val.v.val_double.low = low;
4536 add_dwarf_attr (die, &attr);
4537 }
4538
4539 /* Add a floating point attribute value to a DIE and return it. */
4540
4541 static inline void
4542 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4543 unsigned int length, unsigned int elt_size, unsigned char *array)
4544 {
4545 dw_attr_node attr;
4546
4547 attr.dw_attr = attr_kind;
4548 attr.dw_attr_val.val_class = dw_val_class_vec;
4549 attr.dw_attr_val.val_entry = NULL;
4550 attr.dw_attr_val.v.val_vec.length = length;
4551 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4552 attr.dw_attr_val.v.val_vec.array = array;
4553 add_dwarf_attr (die, &attr);
4554 }
4555
4556 /* Add an 8-byte data attribute value to a DIE. */
4557
4558 static inline void
4559 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4560 unsigned char data8[8])
4561 {
4562 dw_attr_node attr;
4563
4564 attr.dw_attr = attr_kind;
4565 attr.dw_attr_val.val_class = dw_val_class_data8;
4566 attr.dw_attr_val.val_entry = NULL;
4567 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4568 add_dwarf_attr (die, &attr);
4569 }
4570
4571 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4572 dwarf_split_debug_info, address attributes in dies destined for the
4573 final executable have force_direct set to avoid using indexed
4574 references. */
4575
4576 static inline void
4577 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4578 bool force_direct)
4579 {
4580 dw_attr_node attr;
4581 char * lbl_id;
4582
4583 lbl_id = xstrdup (lbl_low);
4584 attr.dw_attr = DW_AT_low_pc;
4585 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4586 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4587 if (dwarf_split_debug_info && !force_direct)
4588 attr.dw_attr_val.val_entry
4589 = add_addr_table_entry (lbl_id, ate_kind_label);
4590 else
4591 attr.dw_attr_val.val_entry = NULL;
4592 add_dwarf_attr (die, &attr);
4593
4594 attr.dw_attr = DW_AT_high_pc;
4595 if (dwarf_version < 4)
4596 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4597 else
4598 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4599 lbl_id = xstrdup (lbl_high);
4600 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4601 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4602 && dwarf_split_debug_info && !force_direct)
4603 attr.dw_attr_val.val_entry
4604 = add_addr_table_entry (lbl_id, ate_kind_label);
4605 else
4606 attr.dw_attr_val.val_entry = NULL;
4607 add_dwarf_attr (die, &attr);
4608 }
4609
4610 /* Hash and equality functions for debug_str_hash. */
4611
4612 hashval_t
4613 indirect_string_hasher::hash (indirect_string_node *x)
4614 {
4615 return htab_hash_string (x->str);
4616 }
4617
4618 bool
4619 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4620 {
4621 return strcmp (x1->str, x2) == 0;
4622 }
4623
4624 /* Add STR to the given string hash table. */
4625
4626 static struct indirect_string_node *
4627 find_AT_string_in_table (const char *str,
4628 hash_table<indirect_string_hasher> *table,
4629 enum insert_option insert = INSERT)
4630 {
4631 struct indirect_string_node *node;
4632
4633 indirect_string_node **slot
4634 = table->find_slot_with_hash (str, htab_hash_string (str), insert);
4635 if (*slot == NULL)
4636 {
4637 node = ggc_cleared_alloc<indirect_string_node> ();
4638 node->str = ggc_strdup (str);
4639 *slot = node;
4640 }
4641 else
4642 node = *slot;
4643
4644 node->refcount++;
4645 return node;
4646 }
4647
4648 /* Add STR to the indirect string hash table. */
4649
4650 static struct indirect_string_node *
4651 find_AT_string (const char *str, enum insert_option insert = INSERT)
4652 {
4653 if (! debug_str_hash)
4654 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4655
4656 return find_AT_string_in_table (str, debug_str_hash, insert);
4657 }
4658
4659 /* Add a string attribute value to a DIE. */
4660
4661 static inline void
4662 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4663 {
4664 dw_attr_node attr;
4665 struct indirect_string_node *node;
4666
4667 node = find_AT_string (str);
4668
4669 attr.dw_attr = attr_kind;
4670 attr.dw_attr_val.val_class = dw_val_class_str;
4671 attr.dw_attr_val.val_entry = NULL;
4672 attr.dw_attr_val.v.val_str = node;
4673 add_dwarf_attr (die, &attr);
4674 }
4675
4676 static inline const char *
4677 AT_string (dw_attr_node *a)
4678 {
4679 gcc_assert (a && AT_class (a) == dw_val_class_str);
4680 return a->dw_attr_val.v.val_str->str;
4681 }
4682
4683 /* Call this function directly to bypass AT_string_form's logic to put
4684 the string inline in the die. */
4685
4686 static void
4687 set_indirect_string (struct indirect_string_node *node)
4688 {
4689 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4690 /* Already indirect is a no op. */
4691 if (node->form == DW_FORM_strp
4692 || node->form == DW_FORM_line_strp
4693 || node->form == dwarf_FORM (DW_FORM_strx))
4694 {
4695 gcc_assert (node->label);
4696 return;
4697 }
4698 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4699 ++dw2_string_counter;
4700 node->label = xstrdup (label);
4701
4702 if (!dwarf_split_debug_info)
4703 {
4704 node->form = DW_FORM_strp;
4705 node->index = NOT_INDEXED;
4706 }
4707 else
4708 {
4709 node->form = dwarf_FORM (DW_FORM_strx);
4710 node->index = NO_INDEX_ASSIGNED;
4711 }
4712 }
4713
4714 /* A helper function for dwarf2out_finish, called to reset indirect
4715 string decisions done for early LTO dwarf output before fat object
4716 dwarf output. */
4717
4718 int
4719 reset_indirect_string (indirect_string_node **h, void *)
4720 {
4721 struct indirect_string_node *node = *h;
4722 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4723 {
4724 free (node->label);
4725 node->label = NULL;
4726 node->form = (dwarf_form) 0;
4727 node->index = 0;
4728 }
4729 return 1;
4730 }
4731
4732 /* Find out whether a string should be output inline in DIE
4733 or out-of-line in .debug_str section. */
4734
4735 static enum dwarf_form
4736 find_string_form (struct indirect_string_node *node)
4737 {
4738 unsigned int len;
4739
4740 if (node->form)
4741 return node->form;
4742
4743 len = strlen (node->str) + 1;
4744
4745 /* If the string is shorter or equal to the size of the reference, it is
4746 always better to put it inline. */
4747 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4748 return node->form = DW_FORM_string;
4749
4750 /* If we cannot expect the linker to merge strings in .debug_str
4751 section, only put it into .debug_str if it is worth even in this
4752 single module. */
4753 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4754 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4755 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4756 return node->form = DW_FORM_string;
4757
4758 set_indirect_string (node);
4759
4760 return node->form;
4761 }
4762
4763 /* Find out whether the string referenced from the attribute should be
4764 output inline in DIE or out-of-line in .debug_str section. */
4765
4766 static enum dwarf_form
4767 AT_string_form (dw_attr_node *a)
4768 {
4769 gcc_assert (a && AT_class (a) == dw_val_class_str);
4770 return find_string_form (a->dw_attr_val.v.val_str);
4771 }
4772
4773 /* Add a DIE reference attribute value to a DIE. */
4774
4775 static inline void
4776 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4777 {
4778 dw_attr_node attr;
4779 gcc_checking_assert (targ_die != NULL);
4780
4781 /* With LTO we can end up trying to reference something we didn't create
4782 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4783 if (targ_die == NULL)
4784 return;
4785
4786 attr.dw_attr = attr_kind;
4787 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4788 attr.dw_attr_val.val_entry = NULL;
4789 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4790 attr.dw_attr_val.v.val_die_ref.external = 0;
4791 add_dwarf_attr (die, &attr);
4792 }
4793
4794 /* Change DIE reference REF to point to NEW_DIE instead. */
4795
4796 static inline void
4797 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4798 {
4799 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4800 ref->dw_attr_val.v.val_die_ref.die = new_die;
4801 ref->dw_attr_val.v.val_die_ref.external = 0;
4802 }
4803
4804 /* Add an AT_specification attribute to a DIE, and also make the back
4805 pointer from the specification to the definition. */
4806
4807 static inline void
4808 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4809 {
4810 add_AT_die_ref (die, DW_AT_specification, targ_die);
4811 gcc_assert (!targ_die->die_definition);
4812 targ_die->die_definition = die;
4813 }
4814
4815 static inline dw_die_ref
4816 AT_ref (dw_attr_node *a)
4817 {
4818 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4819 return a->dw_attr_val.v.val_die_ref.die;
4820 }
4821
4822 static inline int
4823 AT_ref_external (dw_attr_node *a)
4824 {
4825 if (a && AT_class (a) == dw_val_class_die_ref)
4826 return a->dw_attr_val.v.val_die_ref.external;
4827
4828 return 0;
4829 }
4830
4831 static inline void
4832 set_AT_ref_external (dw_attr_node *a, int i)
4833 {
4834 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4835 a->dw_attr_val.v.val_die_ref.external = i;
4836 }
4837
4838 /* Add a location description attribute value to a DIE. */
4839
4840 static inline void
4841 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4842 {
4843 dw_attr_node attr;
4844
4845 attr.dw_attr = attr_kind;
4846 attr.dw_attr_val.val_class = dw_val_class_loc;
4847 attr.dw_attr_val.val_entry = NULL;
4848 attr.dw_attr_val.v.val_loc = loc;
4849 add_dwarf_attr (die, &attr);
4850 }
4851
4852 static inline dw_loc_descr_ref
4853 AT_loc (dw_attr_node *a)
4854 {
4855 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4856 return a->dw_attr_val.v.val_loc;
4857 }
4858
4859 static inline void
4860 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4861 {
4862 dw_attr_node attr;
4863
4864 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4865 return;
4866
4867 attr.dw_attr = attr_kind;
4868 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4869 attr.dw_attr_val.val_entry = NULL;
4870 attr.dw_attr_val.v.val_loc_list = loc_list;
4871 add_dwarf_attr (die, &attr);
4872 have_location_lists = true;
4873 }
4874
4875 static inline dw_loc_list_ref
4876 AT_loc_list (dw_attr_node *a)
4877 {
4878 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4879 return a->dw_attr_val.v.val_loc_list;
4880 }
4881
4882 /* Add a view list attribute to DIE. It must have a DW_AT_location
4883 attribute, because the view list complements the location list. */
4884
4885 static inline void
4886 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4887 {
4888 dw_attr_node attr;
4889
4890 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4891 return;
4892
4893 attr.dw_attr = attr_kind;
4894 attr.dw_attr_val.val_class = dw_val_class_view_list;
4895 attr.dw_attr_val.val_entry = NULL;
4896 attr.dw_attr_val.v.val_view_list = die;
4897 add_dwarf_attr (die, &attr);
4898 gcc_checking_assert (get_AT (die, DW_AT_location));
4899 gcc_assert (have_location_lists);
4900 }
4901
4902 /* Return a pointer to the location list referenced by the attribute.
4903 If the named attribute is a view list, look up the corresponding
4904 DW_AT_location attribute and return its location list. */
4905
4906 static inline dw_loc_list_ref *
4907 AT_loc_list_ptr (dw_attr_node *a)
4908 {
4909 gcc_assert (a);
4910 switch (AT_class (a))
4911 {
4912 case dw_val_class_loc_list:
4913 return &a->dw_attr_val.v.val_loc_list;
4914 case dw_val_class_view_list:
4915 {
4916 dw_attr_node *l;
4917 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4918 if (!l)
4919 return NULL;
4920 gcc_checking_assert (l + 1 == a);
4921 return AT_loc_list_ptr (l);
4922 }
4923 default:
4924 gcc_unreachable ();
4925 }
4926 }
4927
4928 /* Return the location attribute value associated with a view list
4929 attribute value. */
4930
4931 static inline dw_val_node *
4932 view_list_to_loc_list_val_node (dw_val_node *val)
4933 {
4934 gcc_assert (val->val_class == dw_val_class_view_list);
4935 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4936 if (!loc)
4937 return NULL;
4938 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4939 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4940 return &loc->dw_attr_val;
4941 }
4942
4943 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4944 {
4945 static hashval_t hash (addr_table_entry *);
4946 static bool equal (addr_table_entry *, addr_table_entry *);
4947 };
4948
4949 /* Table of entries into the .debug_addr section. */
4950
4951 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4952
4953 /* Hash an address_table_entry. */
4954
4955 hashval_t
4956 addr_hasher::hash (addr_table_entry *a)
4957 {
4958 inchash::hash hstate;
4959 switch (a->kind)
4960 {
4961 case ate_kind_rtx:
4962 hstate.add_int (0);
4963 break;
4964 case ate_kind_rtx_dtprel:
4965 hstate.add_int (1);
4966 break;
4967 case ate_kind_label:
4968 return htab_hash_string (a->addr.label);
4969 default:
4970 gcc_unreachable ();
4971 }
4972 inchash::add_rtx (a->addr.rtl, hstate);
4973 return hstate.end ();
4974 }
4975
4976 /* Determine equality for two address_table_entries. */
4977
4978 bool
4979 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4980 {
4981 if (a1->kind != a2->kind)
4982 return 0;
4983 switch (a1->kind)
4984 {
4985 case ate_kind_rtx:
4986 case ate_kind_rtx_dtprel:
4987 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4988 case ate_kind_label:
4989 return strcmp (a1->addr.label, a2->addr.label) == 0;
4990 default:
4991 gcc_unreachable ();
4992 }
4993 }
4994
4995 /* Initialize an addr_table_entry. */
4996
4997 void
4998 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4999 {
5000 e->kind = kind;
5001 switch (kind)
5002 {
5003 case ate_kind_rtx:
5004 case ate_kind_rtx_dtprel:
5005 e->addr.rtl = (rtx) addr;
5006 break;
5007 case ate_kind_label:
5008 e->addr.label = (char *) addr;
5009 break;
5010 }
5011 e->refcount = 0;
5012 e->index = NO_INDEX_ASSIGNED;
5013 }
5014
5015 /* Add attr to the address table entry to the table. Defer setting an
5016 index until output time. */
5017
5018 static addr_table_entry *
5019 add_addr_table_entry (void *addr, enum ate_kind kind)
5020 {
5021 addr_table_entry *node;
5022 addr_table_entry finder;
5023
5024 gcc_assert (dwarf_split_debug_info);
5025 if (! addr_index_table)
5026 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5027 init_addr_table_entry (&finder, kind, addr);
5028 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5029
5030 if (*slot == HTAB_EMPTY_ENTRY)
5031 {
5032 node = ggc_cleared_alloc<addr_table_entry> ();
5033 init_addr_table_entry (node, kind, addr);
5034 *slot = node;
5035 }
5036 else
5037 node = *slot;
5038
5039 node->refcount++;
5040 return node;
5041 }
5042
5043 /* Remove an entry from the addr table by decrementing its refcount.
5044 Strictly, decrementing the refcount would be enough, but the
5045 assertion that the entry is actually in the table has found
5046 bugs. */
5047
5048 static void
5049 remove_addr_table_entry (addr_table_entry *entry)
5050 {
5051 gcc_assert (dwarf_split_debug_info && addr_index_table);
5052 /* After an index is assigned, the table is frozen. */
5053 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5054 entry->refcount--;
5055 }
5056
5057 /* Given a location list, remove all addresses it refers to from the
5058 address_table. */
5059
5060 static void
5061 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5062 {
5063 for (; descr; descr = descr->dw_loc_next)
5064 if (descr->dw_loc_oprnd1.val_entry != NULL)
5065 {
5066 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5067 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5068 }
5069 }
5070
5071 /* A helper function for dwarf2out_finish called through
5072 htab_traverse. Assign an addr_table_entry its index. All entries
5073 must be collected into the table when this function is called,
5074 because the indexing code relies on htab_traverse to traverse nodes
5075 in the same order for each run. */
5076
5077 int
5078 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5079 {
5080 addr_table_entry *node = *h;
5081
5082 /* Don't index unreferenced nodes. */
5083 if (node->refcount == 0)
5084 return 1;
5085
5086 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5087 node->index = *index;
5088 *index += 1;
5089
5090 return 1;
5091 }
5092
5093 /* Add an address constant attribute value to a DIE. When using
5094 dwarf_split_debug_info, address attributes in dies destined for the
5095 final executable should be direct references--setting the parameter
5096 force_direct ensures this behavior. */
5097
5098 static inline void
5099 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5100 bool force_direct)
5101 {
5102 dw_attr_node attr;
5103
5104 attr.dw_attr = attr_kind;
5105 attr.dw_attr_val.val_class = dw_val_class_addr;
5106 attr.dw_attr_val.v.val_addr = addr;
5107 if (dwarf_split_debug_info && !force_direct)
5108 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5109 else
5110 attr.dw_attr_val.val_entry = NULL;
5111 add_dwarf_attr (die, &attr);
5112 }
5113
5114 /* Get the RTX from to an address DIE attribute. */
5115
5116 static inline rtx
5117 AT_addr (dw_attr_node *a)
5118 {
5119 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5120 return a->dw_attr_val.v.val_addr;
5121 }
5122
5123 /* Add a file attribute value to a DIE. */
5124
5125 static inline void
5126 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5127 struct dwarf_file_data *fd)
5128 {
5129 dw_attr_node attr;
5130
5131 attr.dw_attr = attr_kind;
5132 attr.dw_attr_val.val_class = dw_val_class_file;
5133 attr.dw_attr_val.val_entry = NULL;
5134 attr.dw_attr_val.v.val_file = fd;
5135 add_dwarf_attr (die, &attr);
5136 }
5137
5138 /* Get the dwarf_file_data from a file DIE attribute. */
5139
5140 static inline struct dwarf_file_data *
5141 AT_file (dw_attr_node *a)
5142 {
5143 gcc_assert (a && (AT_class (a) == dw_val_class_file
5144 || AT_class (a) == dw_val_class_file_implicit));
5145 return a->dw_attr_val.v.val_file;
5146 }
5147
5148 /* Add a vms delta attribute value to a DIE. */
5149
5150 static inline void
5151 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5152 const char *lbl1, const char *lbl2)
5153 {
5154 dw_attr_node attr;
5155
5156 attr.dw_attr = attr_kind;
5157 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5158 attr.dw_attr_val.val_entry = NULL;
5159 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5160 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5161 add_dwarf_attr (die, &attr);
5162 }
5163
5164 /* Add a symbolic view identifier attribute value to a DIE. */
5165
5166 static inline void
5167 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5168 const char *view_label)
5169 {
5170 dw_attr_node attr;
5171
5172 attr.dw_attr = attr_kind;
5173 attr.dw_attr_val.val_class = dw_val_class_symview;
5174 attr.dw_attr_val.val_entry = NULL;
5175 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5176 add_dwarf_attr (die, &attr);
5177 }
5178
5179 /* Add a label identifier attribute value to a DIE. */
5180
5181 static inline void
5182 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5183 const char *lbl_id)
5184 {
5185 dw_attr_node attr;
5186
5187 attr.dw_attr = attr_kind;
5188 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5189 attr.dw_attr_val.val_entry = NULL;
5190 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5191 if (dwarf_split_debug_info)
5192 attr.dw_attr_val.val_entry
5193 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5194 ate_kind_label);
5195 add_dwarf_attr (die, &attr);
5196 }
5197
5198 /* Add a section offset attribute value to a DIE, an offset into the
5199 debug_line section. */
5200
5201 static inline void
5202 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5203 const char *label)
5204 {
5205 dw_attr_node attr;
5206
5207 attr.dw_attr = attr_kind;
5208 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5209 attr.dw_attr_val.val_entry = NULL;
5210 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5211 add_dwarf_attr (die, &attr);
5212 }
5213
5214 /* Add a section offset attribute value to a DIE, an offset into the
5215 debug_macinfo section. */
5216
5217 static inline void
5218 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5219 const char *label)
5220 {
5221 dw_attr_node attr;
5222
5223 attr.dw_attr = attr_kind;
5224 attr.dw_attr_val.val_class = dw_val_class_macptr;
5225 attr.dw_attr_val.val_entry = NULL;
5226 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5227 add_dwarf_attr (die, &attr);
5228 }
5229
5230 /* Add a range_list attribute value to a DIE. When using
5231 dwarf_split_debug_info, address attributes in dies destined for the
5232 final executable should be direct references--setting the parameter
5233 force_direct ensures this behavior. */
5234
5235 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5236 #define RELOCATED_OFFSET (NULL)
5237
5238 static void
5239 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5240 long unsigned int offset, bool force_direct)
5241 {
5242 dw_attr_node attr;
5243
5244 attr.dw_attr = attr_kind;
5245 attr.dw_attr_val.val_class = dw_val_class_range_list;
5246 /* For the range_list attribute, use val_entry to store whether the
5247 offset should follow split-debug-info or normal semantics. This
5248 value is read in output_range_list_offset. */
5249 if (dwarf_split_debug_info && !force_direct)
5250 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5251 else
5252 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5253 attr.dw_attr_val.v.val_offset = offset;
5254 add_dwarf_attr (die, &attr);
5255 }
5256
5257 /* Return the start label of a delta attribute. */
5258
5259 static inline const char *
5260 AT_vms_delta1 (dw_attr_node *a)
5261 {
5262 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5263 return a->dw_attr_val.v.val_vms_delta.lbl1;
5264 }
5265
5266 /* Return the end label of a delta attribute. */
5267
5268 static inline const char *
5269 AT_vms_delta2 (dw_attr_node *a)
5270 {
5271 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5272 return a->dw_attr_val.v.val_vms_delta.lbl2;
5273 }
5274
5275 static inline const char *
5276 AT_lbl (dw_attr_node *a)
5277 {
5278 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5279 || AT_class (a) == dw_val_class_lineptr
5280 || AT_class (a) == dw_val_class_macptr
5281 || AT_class (a) == dw_val_class_loclistsptr
5282 || AT_class (a) == dw_val_class_high_pc));
5283 return a->dw_attr_val.v.val_lbl_id;
5284 }
5285
5286 /* Get the attribute of type attr_kind. */
5287
5288 static dw_attr_node *
5289 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5290 {
5291 dw_attr_node *a;
5292 unsigned ix;
5293 dw_die_ref spec = NULL;
5294
5295 if (! die)
5296 return NULL;
5297
5298 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5299 if (a->dw_attr == attr_kind)
5300 return a;
5301 else if (a->dw_attr == DW_AT_specification
5302 || a->dw_attr == DW_AT_abstract_origin)
5303 spec = AT_ref (a);
5304
5305 if (spec)
5306 return get_AT (spec, attr_kind);
5307
5308 return NULL;
5309 }
5310
5311 /* Returns the parent of the declaration of DIE. */
5312
5313 static dw_die_ref
5314 get_die_parent (dw_die_ref die)
5315 {
5316 dw_die_ref t;
5317
5318 if (!die)
5319 return NULL;
5320
5321 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5322 || (t = get_AT_ref (die, DW_AT_specification)))
5323 die = t;
5324
5325 return die->die_parent;
5326 }
5327
5328 /* Return the "low pc" attribute value, typically associated with a subprogram
5329 DIE. Return null if the "low pc" attribute is either not present, or if it
5330 cannot be represented as an assembler label identifier. */
5331
5332 static inline const char *
5333 get_AT_low_pc (dw_die_ref die)
5334 {
5335 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5336
5337 return a ? AT_lbl (a) : NULL;
5338 }
5339
5340 /* Return the value of the string attribute designated by ATTR_KIND, or
5341 NULL if it is not present. */
5342
5343 static inline const char *
5344 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5345 {
5346 dw_attr_node *a = get_AT (die, attr_kind);
5347
5348 return a ? AT_string (a) : NULL;
5349 }
5350
5351 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5352 if it is not present. */
5353
5354 static inline int
5355 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5356 {
5357 dw_attr_node *a = get_AT (die, attr_kind);
5358
5359 return a ? AT_flag (a) : 0;
5360 }
5361
5362 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5363 if it is not present. */
5364
5365 static inline unsigned
5366 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5367 {
5368 dw_attr_node *a = get_AT (die, attr_kind);
5369
5370 return a ? AT_unsigned (a) : 0;
5371 }
5372
5373 static inline dw_die_ref
5374 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5375 {
5376 dw_attr_node *a = get_AT (die, attr_kind);
5377
5378 return a ? AT_ref (a) : NULL;
5379 }
5380
5381 static inline struct dwarf_file_data *
5382 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5383 {
5384 dw_attr_node *a = get_AT (die, attr_kind);
5385
5386 return a ? AT_file (a) : NULL;
5387 }
5388
5389 /* Return TRUE if the language is C. */
5390
5391 static inline bool
5392 is_c (void)
5393 {
5394 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5395
5396 return (lang == DW_LANG_C || lang == DW_LANG_C89 || lang == DW_LANG_C99
5397 || lang == DW_LANG_C11 || lang == DW_LANG_ObjC);
5398
5399
5400 }
5401
5402 /* Return TRUE if the language is C++. */
5403
5404 static inline bool
5405 is_cxx (void)
5406 {
5407 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5408
5409 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5410 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5411 }
5412
5413 /* Return TRUE if DECL was created by the C++ frontend. */
5414
5415 static bool
5416 is_cxx (const_tree decl)
5417 {
5418 if (in_lto_p)
5419 {
5420 const_tree context = get_ultimate_context (decl);
5421 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5422 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5423 }
5424 return is_cxx ();
5425 }
5426
5427 /* Return TRUE if the language is Fortran. */
5428
5429 static inline bool
5430 is_fortran (void)
5431 {
5432 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5433
5434 return (lang == DW_LANG_Fortran77
5435 || lang == DW_LANG_Fortran90
5436 || lang == DW_LANG_Fortran95
5437 || lang == DW_LANG_Fortran03
5438 || lang == DW_LANG_Fortran08);
5439 }
5440
5441 static inline bool
5442 is_fortran (const_tree decl)
5443 {
5444 if (in_lto_p)
5445 {
5446 const_tree context = get_ultimate_context (decl);
5447 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5448 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5449 "GNU Fortran", 11) == 0
5450 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5451 "GNU F77") == 0);
5452 }
5453 return is_fortran ();
5454 }
5455
5456 /* Return TRUE if the language is Ada. */
5457
5458 static inline bool
5459 is_ada (void)
5460 {
5461 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5462
5463 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5464 }
5465
5466 /* Return TRUE if the language is D. */
5467
5468 static inline bool
5469 is_dlang (void)
5470 {
5471 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5472
5473 return lang == DW_LANG_D;
5474 }
5475
5476 /* Remove the specified attribute if present. Return TRUE if removal
5477 was successful. */
5478
5479 static bool
5480 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5481 {
5482 dw_attr_node *a;
5483 unsigned ix;
5484
5485 if (! die)
5486 return false;
5487
5488 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5489 if (a->dw_attr == attr_kind)
5490 {
5491 if (AT_class (a) == dw_val_class_str)
5492 if (a->dw_attr_val.v.val_str->refcount)
5493 a->dw_attr_val.v.val_str->refcount--;
5494
5495 /* vec::ordered_remove should help reduce the number of abbrevs
5496 that are needed. */
5497 die->die_attr->ordered_remove (ix);
5498 return true;
5499 }
5500 return false;
5501 }
5502
5503 /* Remove CHILD from its parent. PREV must have the property that
5504 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5505
5506 static void
5507 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5508 {
5509 gcc_assert (child->die_parent == prev->die_parent);
5510 gcc_assert (prev->die_sib == child);
5511 if (prev == child)
5512 {
5513 gcc_assert (child->die_parent->die_child == child);
5514 prev = NULL;
5515 }
5516 else
5517 prev->die_sib = child->die_sib;
5518 if (child->die_parent->die_child == child)
5519 child->die_parent->die_child = prev;
5520 child->die_sib = NULL;
5521 }
5522
5523 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5524 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5525
5526 static void
5527 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5528 {
5529 dw_die_ref parent = old_child->die_parent;
5530
5531 gcc_assert (parent == prev->die_parent);
5532 gcc_assert (prev->die_sib == old_child);
5533
5534 new_child->die_parent = parent;
5535 if (prev == old_child)
5536 {
5537 gcc_assert (parent->die_child == old_child);
5538 new_child->die_sib = new_child;
5539 }
5540 else
5541 {
5542 prev->die_sib = new_child;
5543 new_child->die_sib = old_child->die_sib;
5544 }
5545 if (old_child->die_parent->die_child == old_child)
5546 old_child->die_parent->die_child = new_child;
5547 old_child->die_sib = NULL;
5548 }
5549
5550 /* Move all children from OLD_PARENT to NEW_PARENT. */
5551
5552 static void
5553 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5554 {
5555 dw_die_ref c;
5556 new_parent->die_child = old_parent->die_child;
5557 old_parent->die_child = NULL;
5558 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5559 }
5560
5561 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5562 matches TAG. */
5563
5564 static void
5565 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5566 {
5567 dw_die_ref c;
5568
5569 c = die->die_child;
5570 if (c) do {
5571 dw_die_ref prev = c;
5572 c = c->die_sib;
5573 while (c->die_tag == tag)
5574 {
5575 remove_child_with_prev (c, prev);
5576 c->die_parent = NULL;
5577 /* Might have removed every child. */
5578 if (die->die_child == NULL)
5579 return;
5580 c = prev->die_sib;
5581 }
5582 } while (c != die->die_child);
5583 }
5584
5585 /* Add a CHILD_DIE as the last child of DIE. */
5586
5587 static void
5588 add_child_die (dw_die_ref die, dw_die_ref child_die)
5589 {
5590 /* FIXME this should probably be an assert. */
5591 if (! die || ! child_die)
5592 return;
5593 gcc_assert (die != child_die);
5594
5595 child_die->die_parent = die;
5596 if (die->die_child)
5597 {
5598 child_die->die_sib = die->die_child->die_sib;
5599 die->die_child->die_sib = child_die;
5600 }
5601 else
5602 child_die->die_sib = child_die;
5603 die->die_child = child_die;
5604 }
5605
5606 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5607
5608 static void
5609 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5610 dw_die_ref after_die)
5611 {
5612 gcc_assert (die
5613 && child_die
5614 && after_die
5615 && die->die_child
5616 && die != child_die);
5617
5618 child_die->die_parent = die;
5619 child_die->die_sib = after_die->die_sib;
5620 after_die->die_sib = child_die;
5621 if (die->die_child == after_die)
5622 die->die_child = child_die;
5623 }
5624
5625 /* Unassociate CHILD from its parent, and make its parent be
5626 NEW_PARENT. */
5627
5628 static void
5629 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5630 {
5631 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5632 if (p->die_sib == child)
5633 {
5634 remove_child_with_prev (child, p);
5635 break;
5636 }
5637 add_child_die (new_parent, child);
5638 }
5639
5640 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5641 is the specification, to the end of PARENT's list of children.
5642 This is done by removing and re-adding it. */
5643
5644 static void
5645 splice_child_die (dw_die_ref parent, dw_die_ref child)
5646 {
5647 /* We want the declaration DIE from inside the class, not the
5648 specification DIE at toplevel. */
5649 if (child->die_parent != parent)
5650 {
5651 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5652
5653 if (tmp)
5654 child = tmp;
5655 }
5656
5657 gcc_assert (child->die_parent == parent
5658 || (child->die_parent
5659 == get_AT_ref (parent, DW_AT_specification)));
5660
5661 reparent_child (child, parent);
5662 }
5663
5664 /* Create and return a new die with TAG_VALUE as tag. */
5665
5666 static inline dw_die_ref
5667 new_die_raw (enum dwarf_tag tag_value)
5668 {
5669 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5670 die->die_tag = tag_value;
5671 return die;
5672 }
5673
5674 /* Create and return a new die with a parent of PARENT_DIE. If
5675 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5676 associated tree T must be supplied to determine parenthood
5677 later. */
5678
5679 static inline dw_die_ref
5680 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5681 {
5682 dw_die_ref die = new_die_raw (tag_value);
5683
5684 if (parent_die != NULL)
5685 add_child_die (parent_die, die);
5686 else
5687 {
5688 limbo_die_node *limbo_node;
5689
5690 /* No DIEs created after early dwarf should end up in limbo,
5691 because the limbo list should not persist past LTO
5692 streaming. */
5693 if (tag_value != DW_TAG_compile_unit
5694 /* These are allowed because they're generated while
5695 breaking out COMDAT units late. */
5696 && tag_value != DW_TAG_type_unit
5697 && tag_value != DW_TAG_skeleton_unit
5698 && !early_dwarf
5699 /* Allow nested functions to live in limbo because they will
5700 only temporarily live there, as decls_for_scope will fix
5701 them up. */
5702 && (TREE_CODE (t) != FUNCTION_DECL
5703 || !decl_function_context (t))
5704 /* Same as nested functions above but for types. Types that
5705 are local to a function will be fixed in
5706 decls_for_scope. */
5707 && (!RECORD_OR_UNION_TYPE_P (t)
5708 || !TYPE_CONTEXT (t)
5709 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5710 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5711 especially in the ltrans stage, but once we implement LTO
5712 dwarf streaming, we should remove this exception. */
5713 && !in_lto_p)
5714 {
5715 fprintf (stderr, "symbol ended up in limbo too late:");
5716 debug_generic_stmt (t);
5717 gcc_unreachable ();
5718 }
5719
5720 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5721 limbo_node->die = die;
5722 limbo_node->created_for = t;
5723 limbo_node->next = limbo_die_list;
5724 limbo_die_list = limbo_node;
5725 }
5726
5727 return die;
5728 }
5729
5730 /* Return the DIE associated with the given type specifier. */
5731
5732 static inline dw_die_ref
5733 lookup_type_die (tree type)
5734 {
5735 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5736 if (die && die->removed)
5737 {
5738 TYPE_SYMTAB_DIE (type) = NULL;
5739 return NULL;
5740 }
5741 return die;
5742 }
5743
5744 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5745 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5746 anonymous type instead the one of the naming typedef. */
5747
5748 static inline dw_die_ref
5749 strip_naming_typedef (tree type, dw_die_ref type_die)
5750 {
5751 if (type
5752 && TREE_CODE (type) == RECORD_TYPE
5753 && type_die
5754 && type_die->die_tag == DW_TAG_typedef
5755 && is_naming_typedef_decl (TYPE_NAME (type)))
5756 type_die = get_AT_ref (type_die, DW_AT_type);
5757 return type_die;
5758 }
5759
5760 /* Like lookup_type_die, but if type is an anonymous type named by a
5761 typedef[1], return the DIE of the anonymous type instead the one of
5762 the naming typedef. This is because in gen_typedef_die, we did
5763 equate the anonymous struct named by the typedef with the DIE of
5764 the naming typedef. So by default, lookup_type_die on an anonymous
5765 struct yields the DIE of the naming typedef.
5766
5767 [1]: Read the comment of is_naming_typedef_decl to learn about what
5768 a naming typedef is. */
5769
5770 static inline dw_die_ref
5771 lookup_type_die_strip_naming_typedef (tree type)
5772 {
5773 dw_die_ref die = lookup_type_die (type);
5774 return strip_naming_typedef (type, die);
5775 }
5776
5777 /* Equate a DIE to a given type specifier. */
5778
5779 static inline void
5780 equate_type_number_to_die (tree type, dw_die_ref type_die)
5781 {
5782 TYPE_SYMTAB_DIE (type) = type_die;
5783 }
5784
5785 static dw_die_ref maybe_create_die_with_external_ref (tree);
5786 struct GTY(()) sym_off_pair
5787 {
5788 const char * GTY((skip)) sym;
5789 unsigned HOST_WIDE_INT off;
5790 };
5791 static GTY(()) hash_map<tree, sym_off_pair> *external_die_map;
5792
5793 /* Returns a hash value for X (which really is a die_struct). */
5794
5795 inline hashval_t
5796 decl_die_hasher::hash (die_node *x)
5797 {
5798 return (hashval_t) x->decl_id;
5799 }
5800
5801 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5802
5803 inline bool
5804 decl_die_hasher::equal (die_node *x, tree y)
5805 {
5806 return (x->decl_id == DECL_UID (y));
5807 }
5808
5809 /* Return the DIE associated with a given declaration. */
5810
5811 static inline dw_die_ref
5812 lookup_decl_die (tree decl)
5813 {
5814 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5815 NO_INSERT);
5816 if (!die)
5817 {
5818 if (in_lto_p)
5819 return maybe_create_die_with_external_ref (decl);
5820 return NULL;
5821 }
5822 if ((*die)->removed)
5823 {
5824 decl_die_table->clear_slot (die);
5825 return NULL;
5826 }
5827 return *die;
5828 }
5829
5830
5831 /* Return the DIE associated with BLOCK. */
5832
5833 static inline dw_die_ref
5834 lookup_block_die (tree block)
5835 {
5836 dw_die_ref die = BLOCK_DIE (block);
5837 if (!die && in_lto_p)
5838 return maybe_create_die_with_external_ref (block);
5839 return die;
5840 }
5841
5842 /* Associate DIE with BLOCK. */
5843
5844 static inline void
5845 equate_block_to_die (tree block, dw_die_ref die)
5846 {
5847 BLOCK_DIE (block) = die;
5848 }
5849 #undef BLOCK_DIE
5850
5851
5852 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5853 style reference. Return true if we found one refering to a DIE for
5854 DECL, otherwise return false. */
5855
5856 static bool
5857 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5858 unsigned HOST_WIDE_INT *off)
5859 {
5860 dw_die_ref die;
5861
5862 if (in_lto_p)
5863 {
5864 /* During WPA stage and incremental linking we use a hash-map
5865 to store the decl <-> label + offset map. */
5866 if (!external_die_map)
5867 return false;
5868 sym_off_pair *desc = external_die_map->get (decl);
5869 if (!desc)
5870 return false;
5871 *sym = desc->sym;
5872 *off = desc->off;
5873 return true;
5874 }
5875
5876 if (TREE_CODE (decl) == BLOCK)
5877 die = lookup_block_die (decl);
5878 else
5879 die = lookup_decl_die (decl);
5880 if (!die)
5881 return false;
5882
5883 /* Similar to get_ref_die_offset_label, but using the "correct"
5884 label. */
5885 *off = die->die_offset;
5886 while (die->die_parent)
5887 die = die->die_parent;
5888 /* For the containing CU DIE we compute a die_symbol in
5889 compute_comp_unit_symbol. */
5890 gcc_assert (die->die_tag == DW_TAG_compile_unit
5891 && die->die_id.die_symbol != NULL);
5892 *sym = die->die_id.die_symbol;
5893 return true;
5894 }
5895
5896 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5897
5898 static void
5899 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5900 const char *symbol, HOST_WIDE_INT offset)
5901 {
5902 /* Create a fake DIE that contains the reference. Don't use
5903 new_die because we don't want to end up in the limbo list. */
5904 /* ??? We probably want to share these, thus put a ref to the DIE
5905 we create here to the external_die_map entry. */
5906 dw_die_ref ref = new_die_raw (die->die_tag);
5907 ref->die_id.die_symbol = symbol;
5908 ref->die_offset = offset;
5909 ref->with_offset = 1;
5910 add_AT_die_ref (die, attr_kind, ref);
5911 }
5912
5913 /* Create a DIE for DECL if required and add a reference to a DIE
5914 at SYMBOL + OFFSET which contains attributes dumped early. */
5915
5916 static void
5917 dwarf2out_register_external_die (tree decl, const char *sym,
5918 unsigned HOST_WIDE_INT off)
5919 {
5920 if (debug_info_level == DINFO_LEVEL_NONE)
5921 return;
5922
5923 if (!external_die_map)
5924 external_die_map = hash_map<tree, sym_off_pair>::create_ggc (1000);
5925 gcc_checking_assert (!external_die_map->get (decl));
5926 sym_off_pair p = { IDENTIFIER_POINTER (get_identifier (sym)), off };
5927 external_die_map->put (decl, p);
5928 }
5929
5930 /* If we have a registered external DIE for DECL return a new DIE for
5931 the concrete instance with an appropriate abstract origin. */
5932
5933 static dw_die_ref
5934 maybe_create_die_with_external_ref (tree decl)
5935 {
5936 if (!external_die_map)
5937 return NULL;
5938 sym_off_pair *desc = external_die_map->get (decl);
5939 if (!desc)
5940 return NULL;
5941
5942 const char *sym = desc->sym;
5943 unsigned HOST_WIDE_INT off = desc->off;
5944
5945 in_lto_p = false;
5946 dw_die_ref die = (TREE_CODE (decl) == BLOCK
5947 ? lookup_block_die (decl) : lookup_decl_die (decl));
5948 gcc_assert (!die);
5949 in_lto_p = true;
5950
5951 tree ctx;
5952 dw_die_ref parent = NULL;
5953 /* Need to lookup a DIE for the decls context - the containing
5954 function or translation unit. */
5955 if (TREE_CODE (decl) == BLOCK)
5956 {
5957 ctx = BLOCK_SUPERCONTEXT (decl);
5958 /* ??? We do not output DIEs for all scopes thus skip as
5959 many DIEs as needed. */
5960 while (TREE_CODE (ctx) == BLOCK
5961 && !lookup_block_die (ctx))
5962 ctx = BLOCK_SUPERCONTEXT (ctx);
5963 }
5964 else
5965 ctx = DECL_CONTEXT (decl);
5966 /* Peel types in the context stack. */
5967 while (ctx && TYPE_P (ctx))
5968 ctx = TYPE_CONTEXT (ctx);
5969 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5970 if (debug_info_level <= DINFO_LEVEL_TERSE)
5971 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5972 ctx = DECL_CONTEXT (ctx);
5973 if (ctx)
5974 {
5975 if (TREE_CODE (ctx) == BLOCK)
5976 parent = lookup_block_die (ctx);
5977 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5978 /* Keep the 1:1 association during WPA. */
5979 && !flag_wpa
5980 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5981 /* Otherwise all late annotations go to the main CU which
5982 imports the original CUs. */
5983 parent = comp_unit_die ();
5984 else if (TREE_CODE (ctx) == FUNCTION_DECL
5985 && TREE_CODE (decl) != FUNCTION_DECL
5986 && TREE_CODE (decl) != PARM_DECL
5987 && TREE_CODE (decl) != RESULT_DECL
5988 && TREE_CODE (decl) != BLOCK)
5989 /* Leave function local entities parent determination to when
5990 we process scope vars. */
5991 ;
5992 else
5993 parent = lookup_decl_die (ctx);
5994 }
5995 else
5996 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5997 Handle this case gracefully by globalizing stuff. */
5998 parent = comp_unit_die ();
5999 /* Create a DIE "stub". */
6000 switch (TREE_CODE (decl))
6001 {
6002 case TRANSLATION_UNIT_DECL:
6003 {
6004 die = comp_unit_die ();
6005 /* We re-target all CU decls to the LTRANS CU DIE, so no need
6006 to create a DIE for the original CUs. */
6007 return die;
6008 }
6009 case NAMESPACE_DECL:
6010 if (is_fortran (decl))
6011 die = new_die (DW_TAG_module, parent, decl);
6012 else
6013 die = new_die (DW_TAG_namespace, parent, decl);
6014 break;
6015 case FUNCTION_DECL:
6016 die = new_die (DW_TAG_subprogram, parent, decl);
6017 break;
6018 case VAR_DECL:
6019 die = new_die (DW_TAG_variable, parent, decl);
6020 break;
6021 case RESULT_DECL:
6022 die = new_die (DW_TAG_variable, parent, decl);
6023 break;
6024 case PARM_DECL:
6025 die = new_die (DW_TAG_formal_parameter, parent, decl);
6026 break;
6027 case CONST_DECL:
6028 die = new_die (DW_TAG_constant, parent, decl);
6029 break;
6030 case LABEL_DECL:
6031 die = new_die (DW_TAG_label, parent, decl);
6032 break;
6033 case BLOCK:
6034 die = new_die (DW_TAG_lexical_block, parent, decl);
6035 break;
6036 default:
6037 gcc_unreachable ();
6038 }
6039 if (TREE_CODE (decl) == BLOCK)
6040 equate_block_to_die (decl, die);
6041 else
6042 equate_decl_number_to_die (decl, die);
6043
6044 add_desc_attribute (die, decl);
6045
6046 /* Add a reference to the DIE providing early debug at $sym + off. */
6047 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6048
6049 return die;
6050 }
6051
6052 /* Returns a hash value for X (which really is a var_loc_list). */
6053
6054 inline hashval_t
6055 decl_loc_hasher::hash (var_loc_list *x)
6056 {
6057 return (hashval_t) x->decl_id;
6058 }
6059
6060 /* Return nonzero if decl_id of var_loc_list X is the same as
6061 UID of decl *Y. */
6062
6063 inline bool
6064 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6065 {
6066 return (x->decl_id == DECL_UID (y));
6067 }
6068
6069 /* Return the var_loc list associated with a given declaration. */
6070
6071 static inline var_loc_list *
6072 lookup_decl_loc (const_tree decl)
6073 {
6074 if (!decl_loc_table)
6075 return NULL;
6076 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6077 }
6078
6079 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6080
6081 inline hashval_t
6082 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6083 {
6084 return (hashval_t) x->decl_id;
6085 }
6086
6087 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6088 UID of decl *Y. */
6089
6090 inline bool
6091 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6092 {
6093 return (x->decl_id == DECL_UID (y));
6094 }
6095
6096 /* Equate a DIE to a particular declaration. */
6097
6098 static void
6099 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6100 {
6101 unsigned int decl_id = DECL_UID (decl);
6102
6103 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6104 decl_die->decl_id = decl_id;
6105 }
6106
6107 /* Return how many bits covers PIECE EXPR_LIST. */
6108
6109 static HOST_WIDE_INT
6110 decl_piece_bitsize (rtx piece)
6111 {
6112 int ret = (int) GET_MODE (piece);
6113 if (ret)
6114 return ret;
6115 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6116 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6117 return INTVAL (XEXP (XEXP (piece, 0), 0));
6118 }
6119
6120 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6121
6122 static rtx *
6123 decl_piece_varloc_ptr (rtx piece)
6124 {
6125 if ((int) GET_MODE (piece))
6126 return &XEXP (piece, 0);
6127 else
6128 return &XEXP (XEXP (piece, 0), 1);
6129 }
6130
6131 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6132 Next is the chain of following piece nodes. */
6133
6134 static rtx_expr_list *
6135 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6136 {
6137 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6138 return alloc_EXPR_LIST (bitsize, loc_note, next);
6139 else
6140 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6141 GEN_INT (bitsize),
6142 loc_note), next);
6143 }
6144
6145 /* Return rtx that should be stored into loc field for
6146 LOC_NOTE and BITPOS/BITSIZE. */
6147
6148 static rtx
6149 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6150 HOST_WIDE_INT bitsize)
6151 {
6152 if (bitsize != -1)
6153 {
6154 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6155 if (bitpos != 0)
6156 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6157 }
6158 return loc_note;
6159 }
6160
6161 /* This function either modifies location piece list *DEST in
6162 place (if SRC and INNER is NULL), or copies location piece list
6163 *SRC to *DEST while modifying it. Location BITPOS is modified
6164 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6165 not copied and if needed some padding around it is added.
6166 When modifying in place, DEST should point to EXPR_LIST where
6167 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6168 to the start of the whole list and INNER points to the EXPR_LIST
6169 where earlier pieces cover PIECE_BITPOS bits. */
6170
6171 static void
6172 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6173 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6174 HOST_WIDE_INT bitsize, rtx loc_note)
6175 {
6176 HOST_WIDE_INT diff;
6177 bool copy = inner != NULL;
6178
6179 if (copy)
6180 {
6181 /* First copy all nodes preceding the current bitpos. */
6182 while (src != inner)
6183 {
6184 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6185 decl_piece_bitsize (*src), NULL_RTX);
6186 dest = &XEXP (*dest, 1);
6187 src = &XEXP (*src, 1);
6188 }
6189 }
6190 /* Add padding if needed. */
6191 if (bitpos != piece_bitpos)
6192 {
6193 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6194 copy ? NULL_RTX : *dest);
6195 dest = &XEXP (*dest, 1);
6196 }
6197 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6198 {
6199 gcc_assert (!copy);
6200 /* A piece with correct bitpos and bitsize already exist,
6201 just update the location for it and return. */
6202 *decl_piece_varloc_ptr (*dest) = loc_note;
6203 return;
6204 }
6205 /* Add the piece that changed. */
6206 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6207 dest = &XEXP (*dest, 1);
6208 /* Skip over pieces that overlap it. */
6209 diff = bitpos - piece_bitpos + bitsize;
6210 if (!copy)
6211 src = dest;
6212 while (diff > 0 && *src)
6213 {
6214 rtx piece = *src;
6215 diff -= decl_piece_bitsize (piece);
6216 if (copy)
6217 src = &XEXP (piece, 1);
6218 else
6219 {
6220 *src = XEXP (piece, 1);
6221 free_EXPR_LIST_node (piece);
6222 }
6223 }
6224 /* Add padding if needed. */
6225 if (diff < 0 && *src)
6226 {
6227 if (!copy)
6228 dest = src;
6229 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6230 dest = &XEXP (*dest, 1);
6231 }
6232 if (!copy)
6233 return;
6234 /* Finally copy all nodes following it. */
6235 while (*src)
6236 {
6237 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6238 decl_piece_bitsize (*src), NULL_RTX);
6239 dest = &XEXP (*dest, 1);
6240 src = &XEXP (*src, 1);
6241 }
6242 }
6243
6244 /* Add a variable location node to the linked list for DECL. */
6245
6246 static struct var_loc_node *
6247 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6248 {
6249 unsigned int decl_id;
6250 var_loc_list *temp;
6251 struct var_loc_node *loc = NULL;
6252 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6253
6254 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6255 {
6256 tree realdecl = DECL_DEBUG_EXPR (decl);
6257 if (handled_component_p (realdecl)
6258 || (TREE_CODE (realdecl) == MEM_REF
6259 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6260 {
6261 bool reverse;
6262 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6263 &bitsize, &reverse);
6264 if (!innerdecl
6265 || !DECL_P (innerdecl)
6266 || DECL_IGNORED_P (innerdecl)
6267 || TREE_STATIC (innerdecl)
6268 || bitsize == 0
6269 || bitpos + bitsize > 256)
6270 return NULL;
6271 decl = innerdecl;
6272 }
6273 }
6274
6275 decl_id = DECL_UID (decl);
6276 var_loc_list **slot
6277 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6278 if (*slot == NULL)
6279 {
6280 temp = ggc_cleared_alloc<var_loc_list> ();
6281 temp->decl_id = decl_id;
6282 *slot = temp;
6283 }
6284 else
6285 temp = *slot;
6286
6287 /* For PARM_DECLs try to keep around the original incoming value,
6288 even if that means we'll emit a zero-range .debug_loc entry. */
6289 if (temp->last
6290 && temp->first == temp->last
6291 && TREE_CODE (decl) == PARM_DECL
6292 && NOTE_P (temp->first->loc)
6293 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6294 && DECL_INCOMING_RTL (decl)
6295 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6296 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6297 == GET_CODE (DECL_INCOMING_RTL (decl))
6298 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6299 && (bitsize != -1
6300 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6301 NOTE_VAR_LOCATION_LOC (loc_note))
6302 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6303 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6304 {
6305 loc = ggc_cleared_alloc<var_loc_node> ();
6306 temp->first->next = loc;
6307 temp->last = loc;
6308 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6309 }
6310 else if (temp->last)
6311 {
6312 struct var_loc_node *last = temp->last, *unused = NULL;
6313 rtx *piece_loc = NULL, last_loc_note;
6314 HOST_WIDE_INT piece_bitpos = 0;
6315 if (last->next)
6316 {
6317 last = last->next;
6318 gcc_assert (last->next == NULL);
6319 }
6320 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6321 {
6322 piece_loc = &last->loc;
6323 do
6324 {
6325 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6326 if (piece_bitpos + cur_bitsize > bitpos)
6327 break;
6328 piece_bitpos += cur_bitsize;
6329 piece_loc = &XEXP (*piece_loc, 1);
6330 }
6331 while (*piece_loc);
6332 }
6333 /* TEMP->LAST here is either pointer to the last but one or
6334 last element in the chained list, LAST is pointer to the
6335 last element. */
6336 if (label && strcmp (last->label, label) == 0 && last->view == view)
6337 {
6338 /* For SRA optimized variables if there weren't any real
6339 insns since last note, just modify the last node. */
6340 if (piece_loc != NULL)
6341 {
6342 adjust_piece_list (piece_loc, NULL, NULL,
6343 bitpos, piece_bitpos, bitsize, loc_note);
6344 return NULL;
6345 }
6346 /* If the last note doesn't cover any instructions, remove it. */
6347 if (temp->last != last)
6348 {
6349 temp->last->next = NULL;
6350 unused = last;
6351 last = temp->last;
6352 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6353 }
6354 else
6355 {
6356 gcc_assert (temp->first == temp->last
6357 || (temp->first->next == temp->last
6358 && TREE_CODE (decl) == PARM_DECL));
6359 memset (temp->last, '\0', sizeof (*temp->last));
6360 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6361 return temp->last;
6362 }
6363 }
6364 if (bitsize == -1 && NOTE_P (last->loc))
6365 last_loc_note = last->loc;
6366 else if (piece_loc != NULL
6367 && *piece_loc != NULL_RTX
6368 && piece_bitpos == bitpos
6369 && decl_piece_bitsize (*piece_loc) == bitsize)
6370 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6371 else
6372 last_loc_note = NULL_RTX;
6373 /* If the current location is the same as the end of the list,
6374 and either both or neither of the locations is uninitialized,
6375 we have nothing to do. */
6376 if (last_loc_note == NULL_RTX
6377 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6378 NOTE_VAR_LOCATION_LOC (loc_note)))
6379 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6380 != NOTE_VAR_LOCATION_STATUS (loc_note))
6381 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6382 == VAR_INIT_STATUS_UNINITIALIZED)
6383 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6384 == VAR_INIT_STATUS_UNINITIALIZED))))
6385 {
6386 /* Add LOC to the end of list and update LAST. If the last
6387 element of the list has been removed above, reuse its
6388 memory for the new node, otherwise allocate a new one. */
6389 if (unused)
6390 {
6391 loc = unused;
6392 memset (loc, '\0', sizeof (*loc));
6393 }
6394 else
6395 loc = ggc_cleared_alloc<var_loc_node> ();
6396 if (bitsize == -1 || piece_loc == NULL)
6397 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6398 else
6399 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6400 bitpos, piece_bitpos, bitsize, loc_note);
6401 last->next = loc;
6402 /* Ensure TEMP->LAST will point either to the new last but one
6403 element of the chain, or to the last element in it. */
6404 if (last != temp->last)
6405 temp->last = last;
6406 }
6407 else if (unused)
6408 ggc_free (unused);
6409 }
6410 else
6411 {
6412 loc = ggc_cleared_alloc<var_loc_node> ();
6413 temp->first = loc;
6414 temp->last = loc;
6415 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6416 }
6417 return loc;
6418 }
6419 \f
6420 /* Keep track of the number of spaces used to indent the
6421 output of the debugging routines that print the structure of
6422 the DIE internal representation. */
6423 static int print_indent;
6424
6425 /* Indent the line the number of spaces given by print_indent. */
6426
6427 static inline void
6428 print_spaces (FILE *outfile)
6429 {
6430 fprintf (outfile, "%*s", print_indent, "");
6431 }
6432
6433 /* Print a type signature in hex. */
6434
6435 static inline void
6436 print_signature (FILE *outfile, char *sig)
6437 {
6438 int i;
6439
6440 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6441 fprintf (outfile, "%02x", sig[i] & 0xff);
6442 }
6443
6444 static inline void
6445 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6446 {
6447 if (discr_value->pos)
6448 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6449 else
6450 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6451 }
6452
6453 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6454
6455 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6456 RECURSE, output location descriptor operations. */
6457
6458 static void
6459 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6460 {
6461 switch (val->val_class)
6462 {
6463 case dw_val_class_addr:
6464 fprintf (outfile, "address");
6465 break;
6466 case dw_val_class_offset:
6467 fprintf (outfile, "offset");
6468 break;
6469 case dw_val_class_loc:
6470 fprintf (outfile, "location descriptor");
6471 if (val->v.val_loc == NULL)
6472 fprintf (outfile, " -> <null>\n");
6473 else if (recurse)
6474 {
6475 fprintf (outfile, ":\n");
6476 print_indent += 4;
6477 print_loc_descr (val->v.val_loc, outfile);
6478 print_indent -= 4;
6479 }
6480 else
6481 {
6482 if (flag_dump_noaddr || flag_dump_unnumbered)
6483 fprintf (outfile, " #\n");
6484 else
6485 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6486 }
6487 break;
6488 case dw_val_class_loc_list:
6489 fprintf (outfile, "location list -> label:%s",
6490 val->v.val_loc_list->ll_symbol);
6491 break;
6492 case dw_val_class_view_list:
6493 val = view_list_to_loc_list_val_node (val);
6494 fprintf (outfile, "location list with views -> labels:%s and %s",
6495 val->v.val_loc_list->ll_symbol,
6496 val->v.val_loc_list->vl_symbol);
6497 break;
6498 case dw_val_class_range_list:
6499 fprintf (outfile, "range list");
6500 break;
6501 case dw_val_class_const:
6502 case dw_val_class_const_implicit:
6503 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6504 break;
6505 case dw_val_class_unsigned_const:
6506 case dw_val_class_unsigned_const_implicit:
6507 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6508 break;
6509 case dw_val_class_const_double:
6510 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6511 HOST_WIDE_INT_PRINT_UNSIGNED")",
6512 val->v.val_double.high,
6513 val->v.val_double.low);
6514 break;
6515 case dw_val_class_wide_int:
6516 {
6517 int i = val->v.val_wide->get_len ();
6518 fprintf (outfile, "constant (");
6519 gcc_assert (i > 0);
6520 if (val->v.val_wide->elt (i - 1) == 0)
6521 fprintf (outfile, "0x");
6522 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6523 val->v.val_wide->elt (--i));
6524 while (--i >= 0)
6525 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6526 val->v.val_wide->elt (i));
6527 fprintf (outfile, ")");
6528 break;
6529 }
6530 case dw_val_class_vec:
6531 fprintf (outfile, "floating-point or vector constant");
6532 break;
6533 case dw_val_class_flag:
6534 fprintf (outfile, "%u", val->v.val_flag);
6535 break;
6536 case dw_val_class_die_ref:
6537 if (val->v.val_die_ref.die != NULL)
6538 {
6539 dw_die_ref die = val->v.val_die_ref.die;
6540
6541 if (die->comdat_type_p)
6542 {
6543 fprintf (outfile, "die -> signature: ");
6544 print_signature (outfile,
6545 die->die_id.die_type_node->signature);
6546 }
6547 else if (die->die_id.die_symbol)
6548 {
6549 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6550 if (die->with_offset)
6551 fprintf (outfile, " + %ld", die->die_offset);
6552 }
6553 else
6554 fprintf (outfile, "die -> %ld", die->die_offset);
6555 if (flag_dump_noaddr || flag_dump_unnumbered)
6556 fprintf (outfile, " #");
6557 else
6558 fprintf (outfile, " (%p)", (void *) die);
6559 }
6560 else
6561 fprintf (outfile, "die -> <null>");
6562 break;
6563 case dw_val_class_vms_delta:
6564 fprintf (outfile, "delta: @slotcount(%s-%s)",
6565 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6566 break;
6567 case dw_val_class_symview:
6568 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6569 break;
6570 case dw_val_class_lbl_id:
6571 case dw_val_class_lineptr:
6572 case dw_val_class_macptr:
6573 case dw_val_class_loclistsptr:
6574 case dw_val_class_high_pc:
6575 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6576 break;
6577 case dw_val_class_str:
6578 if (val->v.val_str->str != NULL)
6579 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6580 else
6581 fprintf (outfile, "<null>");
6582 break;
6583 case dw_val_class_file:
6584 case dw_val_class_file_implicit:
6585 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6586 val->v.val_file->emitted_number);
6587 break;
6588 case dw_val_class_data8:
6589 {
6590 int i;
6591
6592 for (i = 0; i < 8; i++)
6593 fprintf (outfile, "%02x", val->v.val_data8[i]);
6594 break;
6595 }
6596 case dw_val_class_discr_value:
6597 print_discr_value (outfile, &val->v.val_discr_value);
6598 break;
6599 case dw_val_class_discr_list:
6600 for (dw_discr_list_ref node = val->v.val_discr_list;
6601 node != NULL;
6602 node = node->dw_discr_next)
6603 {
6604 if (node->dw_discr_range)
6605 {
6606 fprintf (outfile, " .. ");
6607 print_discr_value (outfile, &node->dw_discr_lower_bound);
6608 print_discr_value (outfile, &node->dw_discr_upper_bound);
6609 }
6610 else
6611 print_discr_value (outfile, &node->dw_discr_lower_bound);
6612
6613 if (node->dw_discr_next != NULL)
6614 fprintf (outfile, " | ");
6615 }
6616 default:
6617 break;
6618 }
6619 }
6620
6621 /* Likewise, for a DIE attribute. */
6622
6623 static void
6624 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6625 {
6626 print_dw_val (&a->dw_attr_val, recurse, outfile);
6627 }
6628
6629
6630 /* Print the list of operands in the LOC location description to OUTFILE. This
6631 routine is a debugging aid only. */
6632
6633 static void
6634 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6635 {
6636 dw_loc_descr_ref l = loc;
6637
6638 if (loc == NULL)
6639 {
6640 print_spaces (outfile);
6641 fprintf (outfile, "<null>\n");
6642 return;
6643 }
6644
6645 for (l = loc; l != NULL; l = l->dw_loc_next)
6646 {
6647 print_spaces (outfile);
6648 if (flag_dump_noaddr || flag_dump_unnumbered)
6649 fprintf (outfile, "#");
6650 else
6651 fprintf (outfile, "(%p)", (void *) l);
6652 fprintf (outfile, " %s",
6653 dwarf_stack_op_name (l->dw_loc_opc));
6654 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6655 {
6656 fprintf (outfile, " ");
6657 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6658 }
6659 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6660 {
6661 fprintf (outfile, ", ");
6662 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6663 }
6664 fprintf (outfile, "\n");
6665 }
6666 }
6667
6668 /* Print the information associated with a given DIE, and its children.
6669 This routine is a debugging aid only. */
6670
6671 static void
6672 print_die (dw_die_ref die, FILE *outfile)
6673 {
6674 dw_attr_node *a;
6675 dw_die_ref c;
6676 unsigned ix;
6677
6678 print_spaces (outfile);
6679 fprintf (outfile, "DIE %4ld: %s ",
6680 die->die_offset, dwarf_tag_name (die->die_tag));
6681 if (flag_dump_noaddr || flag_dump_unnumbered)
6682 fprintf (outfile, "#\n");
6683 else
6684 fprintf (outfile, "(%p)\n", (void*) die);
6685 print_spaces (outfile);
6686 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6687 fprintf (outfile, " offset: %ld", die->die_offset);
6688 fprintf (outfile, " mark: %d\n", die->die_mark);
6689
6690 if (die->comdat_type_p)
6691 {
6692 print_spaces (outfile);
6693 fprintf (outfile, " signature: ");
6694 print_signature (outfile, die->die_id.die_type_node->signature);
6695 fprintf (outfile, "\n");
6696 }
6697
6698 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6699 {
6700 print_spaces (outfile);
6701 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6702
6703 print_attribute (a, true, outfile);
6704 fprintf (outfile, "\n");
6705 }
6706
6707 if (die->die_child != NULL)
6708 {
6709 print_indent += 4;
6710 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6711 print_indent -= 4;
6712 }
6713 if (print_indent == 0)
6714 fprintf (outfile, "\n");
6715 }
6716
6717 /* Print the list of operations in the LOC location description. */
6718
6719 DEBUG_FUNCTION void
6720 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6721 {
6722 print_loc_descr (loc, stderr);
6723 }
6724
6725 /* Print the information collected for a given DIE. */
6726
6727 DEBUG_FUNCTION void
6728 debug_dwarf_die (dw_die_ref die)
6729 {
6730 print_die (die, stderr);
6731 }
6732
6733 DEBUG_FUNCTION void
6734 debug (die_struct &ref)
6735 {
6736 print_die (&ref, stderr);
6737 }
6738
6739 DEBUG_FUNCTION void
6740 debug (die_struct *ptr)
6741 {
6742 if (ptr)
6743 debug (*ptr);
6744 else
6745 fprintf (stderr, "<nil>\n");
6746 }
6747
6748
6749 /* Print all DWARF information collected for the compilation unit.
6750 This routine is a debugging aid only. */
6751
6752 DEBUG_FUNCTION void
6753 debug_dwarf (void)
6754 {
6755 print_indent = 0;
6756 print_die (comp_unit_die (), stderr);
6757 }
6758
6759 /* Verify the DIE tree structure. */
6760
6761 DEBUG_FUNCTION void
6762 verify_die (dw_die_ref die)
6763 {
6764 gcc_assert (!die->die_mark);
6765 if (die->die_parent == NULL
6766 && die->die_sib == NULL)
6767 return;
6768 /* Verify the die_sib list is cyclic. */
6769 dw_die_ref x = die;
6770 do
6771 {
6772 x->die_mark = 1;
6773 x = x->die_sib;
6774 }
6775 while (x && !x->die_mark);
6776 gcc_assert (x == die);
6777 x = die;
6778 do
6779 {
6780 /* Verify all dies have the same parent. */
6781 gcc_assert (x->die_parent == die->die_parent);
6782 if (x->die_child)
6783 {
6784 /* Verify the child has the proper parent and recurse. */
6785 gcc_assert (x->die_child->die_parent == x);
6786 verify_die (x->die_child);
6787 }
6788 x->die_mark = 0;
6789 x = x->die_sib;
6790 }
6791 while (x && x->die_mark);
6792 }
6793
6794 /* Sanity checks on DIEs. */
6795
6796 static void
6797 check_die (dw_die_ref die)
6798 {
6799 unsigned ix;
6800 dw_attr_node *a;
6801 bool inline_found = false;
6802 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6803 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6804 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6805 {
6806 switch (a->dw_attr)
6807 {
6808 case DW_AT_inline:
6809 if (a->dw_attr_val.v.val_unsigned)
6810 inline_found = true;
6811 break;
6812 case DW_AT_location:
6813 ++n_location;
6814 break;
6815 case DW_AT_low_pc:
6816 ++n_low_pc;
6817 break;
6818 case DW_AT_high_pc:
6819 ++n_high_pc;
6820 break;
6821 case DW_AT_artificial:
6822 ++n_artificial;
6823 break;
6824 case DW_AT_decl_column:
6825 ++n_decl_column;
6826 break;
6827 case DW_AT_decl_line:
6828 ++n_decl_line;
6829 break;
6830 case DW_AT_decl_file:
6831 ++n_decl_file;
6832 break;
6833 default:
6834 break;
6835 }
6836 }
6837 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6838 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6839 {
6840 fprintf (stderr, "Duplicate attributes in DIE:\n");
6841 debug_dwarf_die (die);
6842 gcc_unreachable ();
6843 }
6844 if (inline_found)
6845 {
6846 /* A debugging information entry that is a member of an abstract
6847 instance tree [that has DW_AT_inline] should not contain any
6848 attributes which describe aspects of the subroutine which vary
6849 between distinct inlined expansions or distinct out-of-line
6850 expansions. */
6851 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6852 gcc_assert (a->dw_attr != DW_AT_low_pc
6853 && a->dw_attr != DW_AT_high_pc
6854 && a->dw_attr != DW_AT_location
6855 && a->dw_attr != DW_AT_frame_base
6856 && a->dw_attr != DW_AT_call_all_calls
6857 && a->dw_attr != DW_AT_GNU_all_call_sites);
6858 }
6859 }
6860 \f
6861 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6862 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6863 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6864
6865 /* Calculate the checksum of a location expression. */
6866
6867 static inline void
6868 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6869 {
6870 int tem;
6871 inchash::hash hstate;
6872 hashval_t hash;
6873
6874 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6875 CHECKSUM (tem);
6876 hash_loc_operands (loc, hstate);
6877 hash = hstate.end();
6878 CHECKSUM (hash);
6879 }
6880
6881 /* Calculate the checksum of an attribute. */
6882
6883 static void
6884 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6885 {
6886 dw_loc_descr_ref loc;
6887 rtx r;
6888
6889 CHECKSUM (at->dw_attr);
6890
6891 /* We don't care that this was compiled with a different compiler
6892 snapshot; if the output is the same, that's what matters. */
6893 if (at->dw_attr == DW_AT_producer)
6894 return;
6895
6896 switch (AT_class (at))
6897 {
6898 case dw_val_class_const:
6899 case dw_val_class_const_implicit:
6900 CHECKSUM (at->dw_attr_val.v.val_int);
6901 break;
6902 case dw_val_class_unsigned_const:
6903 case dw_val_class_unsigned_const_implicit:
6904 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6905 break;
6906 case dw_val_class_const_double:
6907 CHECKSUM (at->dw_attr_val.v.val_double);
6908 break;
6909 case dw_val_class_wide_int:
6910 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6911 get_full_len (*at->dw_attr_val.v.val_wide)
6912 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6913 break;
6914 case dw_val_class_vec:
6915 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6916 (at->dw_attr_val.v.val_vec.length
6917 * at->dw_attr_val.v.val_vec.elt_size));
6918 break;
6919 case dw_val_class_flag:
6920 CHECKSUM (at->dw_attr_val.v.val_flag);
6921 break;
6922 case dw_val_class_str:
6923 CHECKSUM_STRING (AT_string (at));
6924 break;
6925
6926 case dw_val_class_addr:
6927 r = AT_addr (at);
6928 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6929 CHECKSUM_STRING (XSTR (r, 0));
6930 break;
6931
6932 case dw_val_class_offset:
6933 CHECKSUM (at->dw_attr_val.v.val_offset);
6934 break;
6935
6936 case dw_val_class_loc:
6937 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6938 loc_checksum (loc, ctx);
6939 break;
6940
6941 case dw_val_class_die_ref:
6942 die_checksum (AT_ref (at), ctx, mark);
6943 break;
6944
6945 case dw_val_class_fde_ref:
6946 case dw_val_class_vms_delta:
6947 case dw_val_class_symview:
6948 case dw_val_class_lbl_id:
6949 case dw_val_class_lineptr:
6950 case dw_val_class_macptr:
6951 case dw_val_class_loclistsptr:
6952 case dw_val_class_high_pc:
6953 break;
6954
6955 case dw_val_class_file:
6956 case dw_val_class_file_implicit:
6957 CHECKSUM_STRING (AT_file (at)->filename);
6958 break;
6959
6960 case dw_val_class_data8:
6961 CHECKSUM (at->dw_attr_val.v.val_data8);
6962 break;
6963
6964 default:
6965 break;
6966 }
6967 }
6968
6969 /* Calculate the checksum of a DIE. */
6970
6971 static void
6972 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6973 {
6974 dw_die_ref c;
6975 dw_attr_node *a;
6976 unsigned ix;
6977
6978 /* To avoid infinite recursion. */
6979 if (die->die_mark)
6980 {
6981 CHECKSUM (die->die_mark);
6982 return;
6983 }
6984 die->die_mark = ++(*mark);
6985
6986 CHECKSUM (die->die_tag);
6987
6988 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6989 attr_checksum (a, ctx, mark);
6990
6991 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6992 }
6993
6994 #undef CHECKSUM
6995 #undef CHECKSUM_BLOCK
6996 #undef CHECKSUM_STRING
6997
6998 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6999 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
7000 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
7001 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
7002 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
7003 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
7004 #define CHECKSUM_ATTR(FOO) \
7005 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
7006
7007 /* Calculate the checksum of a number in signed LEB128 format. */
7008
7009 static void
7010 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
7011 {
7012 unsigned char byte;
7013 bool more;
7014
7015 while (1)
7016 {
7017 byte = (value & 0x7f);
7018 value >>= 7;
7019 more = !((value == 0 && (byte & 0x40) == 0)
7020 || (value == -1 && (byte & 0x40) != 0));
7021 if (more)
7022 byte |= 0x80;
7023 CHECKSUM (byte);
7024 if (!more)
7025 break;
7026 }
7027 }
7028
7029 /* Calculate the checksum of a number in unsigned LEB128 format. */
7030
7031 static void
7032 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
7033 {
7034 while (1)
7035 {
7036 unsigned char byte = (value & 0x7f);
7037 value >>= 7;
7038 if (value != 0)
7039 /* More bytes to follow. */
7040 byte |= 0x80;
7041 CHECKSUM (byte);
7042 if (value == 0)
7043 break;
7044 }
7045 }
7046
7047 /* Checksum the context of the DIE. This adds the names of any
7048 surrounding namespaces or structures to the checksum. */
7049
7050 static void
7051 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7052 {
7053 const char *name;
7054 dw_die_ref spec;
7055 int tag = die->die_tag;
7056
7057 if (tag != DW_TAG_namespace
7058 && tag != DW_TAG_structure_type
7059 && tag != DW_TAG_class_type)
7060 return;
7061
7062 name = get_AT_string (die, DW_AT_name);
7063
7064 spec = get_AT_ref (die, DW_AT_specification);
7065 if (spec != NULL)
7066 die = spec;
7067
7068 if (die->die_parent != NULL)
7069 checksum_die_context (die->die_parent, ctx);
7070
7071 CHECKSUM_ULEB128 ('C');
7072 CHECKSUM_ULEB128 (tag);
7073 if (name != NULL)
7074 CHECKSUM_STRING (name);
7075 }
7076
7077 /* Calculate the checksum of a location expression. */
7078
7079 static inline void
7080 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7081 {
7082 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7083 were emitted as a DW_FORM_sdata instead of a location expression. */
7084 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7085 {
7086 CHECKSUM_ULEB128 (DW_FORM_sdata);
7087 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7088 return;
7089 }
7090
7091 /* Otherwise, just checksum the raw location expression. */
7092 while (loc != NULL)
7093 {
7094 inchash::hash hstate;
7095 hashval_t hash;
7096
7097 CHECKSUM_ULEB128 (loc->dtprel);
7098 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7099 hash_loc_operands (loc, hstate);
7100 hash = hstate.end ();
7101 CHECKSUM (hash);
7102 loc = loc->dw_loc_next;
7103 }
7104 }
7105
7106 /* Calculate the checksum of an attribute. */
7107
7108 static void
7109 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7110 struct md5_ctx *ctx, int *mark)
7111 {
7112 dw_loc_descr_ref loc;
7113 rtx r;
7114
7115 if (AT_class (at) == dw_val_class_die_ref)
7116 {
7117 dw_die_ref target_die = AT_ref (at);
7118
7119 /* For pointer and reference types, we checksum only the (qualified)
7120 name of the target type (if there is a name). For friend entries,
7121 we checksum only the (qualified) name of the target type or function.
7122 This allows the checksum to remain the same whether the target type
7123 is complete or not. */
7124 if ((at->dw_attr == DW_AT_type
7125 && (tag == DW_TAG_pointer_type
7126 || tag == DW_TAG_reference_type
7127 || tag == DW_TAG_rvalue_reference_type
7128 || tag == DW_TAG_ptr_to_member_type))
7129 || (at->dw_attr == DW_AT_friend
7130 && tag == DW_TAG_friend))
7131 {
7132 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7133
7134 if (name_attr != NULL)
7135 {
7136 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7137
7138 if (decl == NULL)
7139 decl = target_die;
7140 CHECKSUM_ULEB128 ('N');
7141 CHECKSUM_ULEB128 (at->dw_attr);
7142 if (decl->die_parent != NULL)
7143 checksum_die_context (decl->die_parent, ctx);
7144 CHECKSUM_ULEB128 ('E');
7145 CHECKSUM_STRING (AT_string (name_attr));
7146 return;
7147 }
7148 }
7149
7150 /* For all other references to another DIE, we check to see if the
7151 target DIE has already been visited. If it has, we emit a
7152 backward reference; if not, we descend recursively. */
7153 if (target_die->die_mark > 0)
7154 {
7155 CHECKSUM_ULEB128 ('R');
7156 CHECKSUM_ULEB128 (at->dw_attr);
7157 CHECKSUM_ULEB128 (target_die->die_mark);
7158 }
7159 else
7160 {
7161 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7162
7163 if (decl == NULL)
7164 decl = target_die;
7165 target_die->die_mark = ++(*mark);
7166 CHECKSUM_ULEB128 ('T');
7167 CHECKSUM_ULEB128 (at->dw_attr);
7168 if (decl->die_parent != NULL)
7169 checksum_die_context (decl->die_parent, ctx);
7170 die_checksum_ordered (target_die, ctx, mark);
7171 }
7172 return;
7173 }
7174
7175 CHECKSUM_ULEB128 ('A');
7176 CHECKSUM_ULEB128 (at->dw_attr);
7177
7178 switch (AT_class (at))
7179 {
7180 case dw_val_class_const:
7181 case dw_val_class_const_implicit:
7182 CHECKSUM_ULEB128 (DW_FORM_sdata);
7183 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7184 break;
7185
7186 case dw_val_class_unsigned_const:
7187 case dw_val_class_unsigned_const_implicit:
7188 CHECKSUM_ULEB128 (DW_FORM_sdata);
7189 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7190 break;
7191
7192 case dw_val_class_const_double:
7193 CHECKSUM_ULEB128 (DW_FORM_block);
7194 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7195 CHECKSUM (at->dw_attr_val.v.val_double);
7196 break;
7197
7198 case dw_val_class_wide_int:
7199 CHECKSUM_ULEB128 (DW_FORM_block);
7200 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7201 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7202 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7203 get_full_len (*at->dw_attr_val.v.val_wide)
7204 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7205 break;
7206
7207 case dw_val_class_vec:
7208 CHECKSUM_ULEB128 (DW_FORM_block);
7209 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7210 * at->dw_attr_val.v.val_vec.elt_size);
7211 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7212 (at->dw_attr_val.v.val_vec.length
7213 * at->dw_attr_val.v.val_vec.elt_size));
7214 break;
7215
7216 case dw_val_class_flag:
7217 CHECKSUM_ULEB128 (DW_FORM_flag);
7218 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7219 break;
7220
7221 case dw_val_class_str:
7222 CHECKSUM_ULEB128 (DW_FORM_string);
7223 CHECKSUM_STRING (AT_string (at));
7224 break;
7225
7226 case dw_val_class_addr:
7227 r = AT_addr (at);
7228 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7229 CHECKSUM_ULEB128 (DW_FORM_string);
7230 CHECKSUM_STRING (XSTR (r, 0));
7231 break;
7232
7233 case dw_val_class_offset:
7234 CHECKSUM_ULEB128 (DW_FORM_sdata);
7235 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7236 break;
7237
7238 case dw_val_class_loc:
7239 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7240 loc_checksum_ordered (loc, ctx);
7241 break;
7242
7243 case dw_val_class_fde_ref:
7244 case dw_val_class_symview:
7245 case dw_val_class_lbl_id:
7246 case dw_val_class_lineptr:
7247 case dw_val_class_macptr:
7248 case dw_val_class_loclistsptr:
7249 case dw_val_class_high_pc:
7250 break;
7251
7252 case dw_val_class_file:
7253 case dw_val_class_file_implicit:
7254 CHECKSUM_ULEB128 (DW_FORM_string);
7255 CHECKSUM_STRING (AT_file (at)->filename);
7256 break;
7257
7258 case dw_val_class_data8:
7259 CHECKSUM (at->dw_attr_val.v.val_data8);
7260 break;
7261
7262 default:
7263 break;
7264 }
7265 }
7266
7267 struct checksum_attributes
7268 {
7269 dw_attr_node *at_name;
7270 dw_attr_node *at_type;
7271 dw_attr_node *at_friend;
7272 dw_attr_node *at_accessibility;
7273 dw_attr_node *at_address_class;
7274 dw_attr_node *at_alignment;
7275 dw_attr_node *at_allocated;
7276 dw_attr_node *at_artificial;
7277 dw_attr_node *at_associated;
7278 dw_attr_node *at_binary_scale;
7279 dw_attr_node *at_bit_offset;
7280 dw_attr_node *at_bit_size;
7281 dw_attr_node *at_bit_stride;
7282 dw_attr_node *at_byte_size;
7283 dw_attr_node *at_byte_stride;
7284 dw_attr_node *at_const_value;
7285 dw_attr_node *at_containing_type;
7286 dw_attr_node *at_count;
7287 dw_attr_node *at_data_location;
7288 dw_attr_node *at_data_member_location;
7289 dw_attr_node *at_decimal_scale;
7290 dw_attr_node *at_decimal_sign;
7291 dw_attr_node *at_default_value;
7292 dw_attr_node *at_digit_count;
7293 dw_attr_node *at_discr;
7294 dw_attr_node *at_discr_list;
7295 dw_attr_node *at_discr_value;
7296 dw_attr_node *at_encoding;
7297 dw_attr_node *at_endianity;
7298 dw_attr_node *at_explicit;
7299 dw_attr_node *at_is_optional;
7300 dw_attr_node *at_location;
7301 dw_attr_node *at_lower_bound;
7302 dw_attr_node *at_mutable;
7303 dw_attr_node *at_ordering;
7304 dw_attr_node *at_picture_string;
7305 dw_attr_node *at_prototyped;
7306 dw_attr_node *at_small;
7307 dw_attr_node *at_segment;
7308 dw_attr_node *at_string_length;
7309 dw_attr_node *at_string_length_bit_size;
7310 dw_attr_node *at_string_length_byte_size;
7311 dw_attr_node *at_threads_scaled;
7312 dw_attr_node *at_upper_bound;
7313 dw_attr_node *at_use_location;
7314 dw_attr_node *at_use_UTF8;
7315 dw_attr_node *at_variable_parameter;
7316 dw_attr_node *at_virtuality;
7317 dw_attr_node *at_visibility;
7318 dw_attr_node *at_vtable_elem_location;
7319 };
7320
7321 /* Collect the attributes that we will want to use for the checksum. */
7322
7323 static void
7324 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7325 {
7326 dw_attr_node *a;
7327 unsigned ix;
7328
7329 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7330 {
7331 switch (a->dw_attr)
7332 {
7333 case DW_AT_name:
7334 attrs->at_name = a;
7335 break;
7336 case DW_AT_type:
7337 attrs->at_type = a;
7338 break;
7339 case DW_AT_friend:
7340 attrs->at_friend = a;
7341 break;
7342 case DW_AT_accessibility:
7343 attrs->at_accessibility = a;
7344 break;
7345 case DW_AT_address_class:
7346 attrs->at_address_class = a;
7347 break;
7348 case DW_AT_alignment:
7349 attrs->at_alignment = a;
7350 break;
7351 case DW_AT_allocated:
7352 attrs->at_allocated = a;
7353 break;
7354 case DW_AT_artificial:
7355 attrs->at_artificial = a;
7356 break;
7357 case DW_AT_associated:
7358 attrs->at_associated = a;
7359 break;
7360 case DW_AT_binary_scale:
7361 attrs->at_binary_scale = a;
7362 break;
7363 case DW_AT_bit_offset:
7364 attrs->at_bit_offset = a;
7365 break;
7366 case DW_AT_bit_size:
7367 attrs->at_bit_size = a;
7368 break;
7369 case DW_AT_bit_stride:
7370 attrs->at_bit_stride = a;
7371 break;
7372 case DW_AT_byte_size:
7373 attrs->at_byte_size = a;
7374 break;
7375 case DW_AT_byte_stride:
7376 attrs->at_byte_stride = a;
7377 break;
7378 case DW_AT_const_value:
7379 attrs->at_const_value = a;
7380 break;
7381 case DW_AT_containing_type:
7382 attrs->at_containing_type = a;
7383 break;
7384 case DW_AT_count:
7385 attrs->at_count = a;
7386 break;
7387 case DW_AT_data_location:
7388 attrs->at_data_location = a;
7389 break;
7390 case DW_AT_data_member_location:
7391 attrs->at_data_member_location = a;
7392 break;
7393 case DW_AT_decimal_scale:
7394 attrs->at_decimal_scale = a;
7395 break;
7396 case DW_AT_decimal_sign:
7397 attrs->at_decimal_sign = a;
7398 break;
7399 case DW_AT_default_value:
7400 attrs->at_default_value = a;
7401 break;
7402 case DW_AT_digit_count:
7403 attrs->at_digit_count = a;
7404 break;
7405 case DW_AT_discr:
7406 attrs->at_discr = a;
7407 break;
7408 case DW_AT_discr_list:
7409 attrs->at_discr_list = a;
7410 break;
7411 case DW_AT_discr_value:
7412 attrs->at_discr_value = a;
7413 break;
7414 case DW_AT_encoding:
7415 attrs->at_encoding = a;
7416 break;
7417 case DW_AT_endianity:
7418 attrs->at_endianity = a;
7419 break;
7420 case DW_AT_explicit:
7421 attrs->at_explicit = a;
7422 break;
7423 case DW_AT_is_optional:
7424 attrs->at_is_optional = a;
7425 break;
7426 case DW_AT_location:
7427 attrs->at_location = a;
7428 break;
7429 case DW_AT_lower_bound:
7430 attrs->at_lower_bound = a;
7431 break;
7432 case DW_AT_mutable:
7433 attrs->at_mutable = a;
7434 break;
7435 case DW_AT_ordering:
7436 attrs->at_ordering = a;
7437 break;
7438 case DW_AT_picture_string:
7439 attrs->at_picture_string = a;
7440 break;
7441 case DW_AT_prototyped:
7442 attrs->at_prototyped = a;
7443 break;
7444 case DW_AT_small:
7445 attrs->at_small = a;
7446 break;
7447 case DW_AT_segment:
7448 attrs->at_segment = a;
7449 break;
7450 case DW_AT_string_length:
7451 attrs->at_string_length = a;
7452 break;
7453 case DW_AT_string_length_bit_size:
7454 attrs->at_string_length_bit_size = a;
7455 break;
7456 case DW_AT_string_length_byte_size:
7457 attrs->at_string_length_byte_size = a;
7458 break;
7459 case DW_AT_threads_scaled:
7460 attrs->at_threads_scaled = a;
7461 break;
7462 case DW_AT_upper_bound:
7463 attrs->at_upper_bound = a;
7464 break;
7465 case DW_AT_use_location:
7466 attrs->at_use_location = a;
7467 break;
7468 case DW_AT_use_UTF8:
7469 attrs->at_use_UTF8 = a;
7470 break;
7471 case DW_AT_variable_parameter:
7472 attrs->at_variable_parameter = a;
7473 break;
7474 case DW_AT_virtuality:
7475 attrs->at_virtuality = a;
7476 break;
7477 case DW_AT_visibility:
7478 attrs->at_visibility = a;
7479 break;
7480 case DW_AT_vtable_elem_location:
7481 attrs->at_vtable_elem_location = a;
7482 break;
7483 default:
7484 break;
7485 }
7486 }
7487 }
7488
7489 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7490
7491 static void
7492 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7493 {
7494 dw_die_ref c;
7495 dw_die_ref decl;
7496 struct checksum_attributes attrs;
7497
7498 CHECKSUM_ULEB128 ('D');
7499 CHECKSUM_ULEB128 (die->die_tag);
7500
7501 memset (&attrs, 0, sizeof (attrs));
7502
7503 decl = get_AT_ref (die, DW_AT_specification);
7504 if (decl != NULL)
7505 collect_checksum_attributes (&attrs, decl);
7506 collect_checksum_attributes (&attrs, die);
7507
7508 CHECKSUM_ATTR (attrs.at_name);
7509 CHECKSUM_ATTR (attrs.at_accessibility);
7510 CHECKSUM_ATTR (attrs.at_address_class);
7511 CHECKSUM_ATTR (attrs.at_allocated);
7512 CHECKSUM_ATTR (attrs.at_artificial);
7513 CHECKSUM_ATTR (attrs.at_associated);
7514 CHECKSUM_ATTR (attrs.at_binary_scale);
7515 CHECKSUM_ATTR (attrs.at_bit_offset);
7516 CHECKSUM_ATTR (attrs.at_bit_size);
7517 CHECKSUM_ATTR (attrs.at_bit_stride);
7518 CHECKSUM_ATTR (attrs.at_byte_size);
7519 CHECKSUM_ATTR (attrs.at_byte_stride);
7520 CHECKSUM_ATTR (attrs.at_const_value);
7521 CHECKSUM_ATTR (attrs.at_containing_type);
7522 CHECKSUM_ATTR (attrs.at_count);
7523 CHECKSUM_ATTR (attrs.at_data_location);
7524 CHECKSUM_ATTR (attrs.at_data_member_location);
7525 CHECKSUM_ATTR (attrs.at_decimal_scale);
7526 CHECKSUM_ATTR (attrs.at_decimal_sign);
7527 CHECKSUM_ATTR (attrs.at_default_value);
7528 CHECKSUM_ATTR (attrs.at_digit_count);
7529 CHECKSUM_ATTR (attrs.at_discr);
7530 CHECKSUM_ATTR (attrs.at_discr_list);
7531 CHECKSUM_ATTR (attrs.at_discr_value);
7532 CHECKSUM_ATTR (attrs.at_encoding);
7533 CHECKSUM_ATTR (attrs.at_endianity);
7534 CHECKSUM_ATTR (attrs.at_explicit);
7535 CHECKSUM_ATTR (attrs.at_is_optional);
7536 CHECKSUM_ATTR (attrs.at_location);
7537 CHECKSUM_ATTR (attrs.at_lower_bound);
7538 CHECKSUM_ATTR (attrs.at_mutable);
7539 CHECKSUM_ATTR (attrs.at_ordering);
7540 CHECKSUM_ATTR (attrs.at_picture_string);
7541 CHECKSUM_ATTR (attrs.at_prototyped);
7542 CHECKSUM_ATTR (attrs.at_small);
7543 CHECKSUM_ATTR (attrs.at_segment);
7544 CHECKSUM_ATTR (attrs.at_string_length);
7545 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7546 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7547 CHECKSUM_ATTR (attrs.at_threads_scaled);
7548 CHECKSUM_ATTR (attrs.at_upper_bound);
7549 CHECKSUM_ATTR (attrs.at_use_location);
7550 CHECKSUM_ATTR (attrs.at_use_UTF8);
7551 CHECKSUM_ATTR (attrs.at_variable_parameter);
7552 CHECKSUM_ATTR (attrs.at_virtuality);
7553 CHECKSUM_ATTR (attrs.at_visibility);
7554 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7555 CHECKSUM_ATTR (attrs.at_type);
7556 CHECKSUM_ATTR (attrs.at_friend);
7557 CHECKSUM_ATTR (attrs.at_alignment);
7558
7559 /* Checksum the child DIEs. */
7560 c = die->die_child;
7561 if (c) do {
7562 dw_attr_node *name_attr;
7563
7564 c = c->die_sib;
7565 name_attr = get_AT (c, DW_AT_name);
7566 if (is_template_instantiation (c))
7567 {
7568 /* Ignore instantiations of member type and function templates. */
7569 }
7570 else if (name_attr != NULL
7571 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7572 {
7573 /* Use a shallow checksum for named nested types and member
7574 functions. */
7575 CHECKSUM_ULEB128 ('S');
7576 CHECKSUM_ULEB128 (c->die_tag);
7577 CHECKSUM_STRING (AT_string (name_attr));
7578 }
7579 else
7580 {
7581 /* Use a deep checksum for other children. */
7582 /* Mark this DIE so it gets processed when unmarking. */
7583 if (c->die_mark == 0)
7584 c->die_mark = -1;
7585 die_checksum_ordered (c, ctx, mark);
7586 }
7587 } while (c != die->die_child);
7588
7589 CHECKSUM_ULEB128 (0);
7590 }
7591
7592 /* Add a type name and tag to a hash. */
7593 static void
7594 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7595 {
7596 CHECKSUM_ULEB128 (tag);
7597 CHECKSUM_STRING (name);
7598 }
7599
7600 #undef CHECKSUM
7601 #undef CHECKSUM_STRING
7602 #undef CHECKSUM_ATTR
7603 #undef CHECKSUM_LEB128
7604 #undef CHECKSUM_ULEB128
7605
7606 /* Generate the type signature for DIE. This is computed by generating an
7607 MD5 checksum over the DIE's tag, its relevant attributes, and its
7608 children. Attributes that are references to other DIEs are processed
7609 by recursion, using the MARK field to prevent infinite recursion.
7610 If the DIE is nested inside a namespace or another type, we also
7611 need to include that context in the signature. The lower 64 bits
7612 of the resulting MD5 checksum comprise the signature. */
7613
7614 static void
7615 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7616 {
7617 int mark;
7618 const char *name;
7619 unsigned char checksum[16];
7620 struct md5_ctx ctx;
7621 dw_die_ref decl;
7622 dw_die_ref parent;
7623
7624 name = get_AT_string (die, DW_AT_name);
7625 decl = get_AT_ref (die, DW_AT_specification);
7626 parent = get_die_parent (die);
7627
7628 /* First, compute a signature for just the type name (and its surrounding
7629 context, if any. This is stored in the type unit DIE for link-time
7630 ODR (one-definition rule) checking. */
7631
7632 if (is_cxx () && name != NULL)
7633 {
7634 md5_init_ctx (&ctx);
7635
7636 /* Checksum the names of surrounding namespaces and structures. */
7637 if (parent != NULL)
7638 checksum_die_context (parent, &ctx);
7639
7640 /* Checksum the current DIE. */
7641 die_odr_checksum (die->die_tag, name, &ctx);
7642 md5_finish_ctx (&ctx, checksum);
7643
7644 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7645 }
7646
7647 /* Next, compute the complete type signature. */
7648
7649 md5_init_ctx (&ctx);
7650 mark = 1;
7651 die->die_mark = mark;
7652
7653 /* Checksum the names of surrounding namespaces and structures. */
7654 if (parent != NULL)
7655 checksum_die_context (parent, &ctx);
7656
7657 /* Checksum the DIE and its children. */
7658 die_checksum_ordered (die, &ctx, &mark);
7659 unmark_all_dies (die);
7660 md5_finish_ctx (&ctx, checksum);
7661
7662 /* Store the signature in the type node and link the type DIE and the
7663 type node together. */
7664 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7665 DWARF_TYPE_SIGNATURE_SIZE);
7666 die->comdat_type_p = true;
7667 die->die_id.die_type_node = type_node;
7668 type_node->type_die = die;
7669
7670 /* If the DIE is a specification, link its declaration to the type node
7671 as well. */
7672 if (decl != NULL)
7673 {
7674 decl->comdat_type_p = true;
7675 decl->die_id.die_type_node = type_node;
7676 }
7677 }
7678
7679 /* Do the location expressions look same? */
7680 static inline int
7681 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7682 {
7683 return loc1->dw_loc_opc == loc2->dw_loc_opc
7684 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7685 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7686 }
7687
7688 /* Do the values look the same? */
7689 static int
7690 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7691 {
7692 dw_loc_descr_ref loc1, loc2;
7693 rtx r1, r2;
7694
7695 if (v1->val_class != v2->val_class)
7696 return 0;
7697
7698 switch (v1->val_class)
7699 {
7700 case dw_val_class_const:
7701 case dw_val_class_const_implicit:
7702 return v1->v.val_int == v2->v.val_int;
7703 case dw_val_class_unsigned_const:
7704 case dw_val_class_unsigned_const_implicit:
7705 return v1->v.val_unsigned == v2->v.val_unsigned;
7706 case dw_val_class_const_double:
7707 return v1->v.val_double.high == v2->v.val_double.high
7708 && v1->v.val_double.low == v2->v.val_double.low;
7709 case dw_val_class_wide_int:
7710 return *v1->v.val_wide == *v2->v.val_wide;
7711 case dw_val_class_vec:
7712 if (v1->v.val_vec.length != v2->v.val_vec.length
7713 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7714 return 0;
7715 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7716 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7717 return 0;
7718 return 1;
7719 case dw_val_class_flag:
7720 return v1->v.val_flag == v2->v.val_flag;
7721 case dw_val_class_str:
7722 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7723
7724 case dw_val_class_addr:
7725 r1 = v1->v.val_addr;
7726 r2 = v2->v.val_addr;
7727 if (GET_CODE (r1) != GET_CODE (r2))
7728 return 0;
7729 return !rtx_equal_p (r1, r2);
7730
7731 case dw_val_class_offset:
7732 return v1->v.val_offset == v2->v.val_offset;
7733
7734 case dw_val_class_loc:
7735 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7736 loc1 && loc2;
7737 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7738 if (!same_loc_p (loc1, loc2, mark))
7739 return 0;
7740 return !loc1 && !loc2;
7741
7742 case dw_val_class_die_ref:
7743 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7744
7745 case dw_val_class_symview:
7746 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7747
7748 case dw_val_class_fde_ref:
7749 case dw_val_class_vms_delta:
7750 case dw_val_class_lbl_id:
7751 case dw_val_class_lineptr:
7752 case dw_val_class_macptr:
7753 case dw_val_class_loclistsptr:
7754 case dw_val_class_high_pc:
7755 return 1;
7756
7757 case dw_val_class_file:
7758 case dw_val_class_file_implicit:
7759 return v1->v.val_file == v2->v.val_file;
7760
7761 case dw_val_class_data8:
7762 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7763
7764 default:
7765 return 1;
7766 }
7767 }
7768
7769 /* Do the attributes look the same? */
7770
7771 static int
7772 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7773 {
7774 if (at1->dw_attr != at2->dw_attr)
7775 return 0;
7776
7777 /* We don't care that this was compiled with a different compiler
7778 snapshot; if the output is the same, that's what matters. */
7779 if (at1->dw_attr == DW_AT_producer)
7780 return 1;
7781
7782 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7783 }
7784
7785 /* Do the dies look the same? */
7786
7787 static int
7788 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7789 {
7790 dw_die_ref c1, c2;
7791 dw_attr_node *a1;
7792 unsigned ix;
7793
7794 /* To avoid infinite recursion. */
7795 if (die1->die_mark)
7796 return die1->die_mark == die2->die_mark;
7797 die1->die_mark = die2->die_mark = ++(*mark);
7798
7799 if (die1->die_tag != die2->die_tag)
7800 return 0;
7801
7802 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7803 return 0;
7804
7805 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7806 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7807 return 0;
7808
7809 c1 = die1->die_child;
7810 c2 = die2->die_child;
7811 if (! c1)
7812 {
7813 if (c2)
7814 return 0;
7815 }
7816 else
7817 for (;;)
7818 {
7819 if (!same_die_p (c1, c2, mark))
7820 return 0;
7821 c1 = c1->die_sib;
7822 c2 = c2->die_sib;
7823 if (c1 == die1->die_child)
7824 {
7825 if (c2 == die2->die_child)
7826 break;
7827 else
7828 return 0;
7829 }
7830 }
7831
7832 return 1;
7833 }
7834
7835 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7836 children, and set die_symbol. */
7837
7838 static void
7839 compute_comp_unit_symbol (dw_die_ref unit_die)
7840 {
7841 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7842 const char *base = die_name ? lbasename (die_name) : "anonymous";
7843 char *name = XALLOCAVEC (char, strlen (base) + 64);
7844 char *p;
7845 int i, mark;
7846 unsigned char checksum[16];
7847 struct md5_ctx ctx;
7848
7849 /* Compute the checksum of the DIE, then append part of it as hex digits to
7850 the name filename of the unit. */
7851
7852 md5_init_ctx (&ctx);
7853 mark = 0;
7854 die_checksum (unit_die, &ctx, &mark);
7855 unmark_all_dies (unit_die);
7856 md5_finish_ctx (&ctx, checksum);
7857
7858 /* When we this for comp_unit_die () we have a DW_AT_name that might
7859 not start with a letter but with anything valid for filenames and
7860 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7861 character is not a letter. */
7862 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7863 clean_symbol_name (name);
7864
7865 p = name + strlen (name);
7866 for (i = 0; i < 4; i++)
7867 {
7868 sprintf (p, "%.2x", checksum[i]);
7869 p += 2;
7870 }
7871
7872 unit_die->die_id.die_symbol = xstrdup (name);
7873 }
7874
7875 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7876
7877 static int
7878 is_type_die (dw_die_ref die)
7879 {
7880 switch (die->die_tag)
7881 {
7882 case DW_TAG_array_type:
7883 case DW_TAG_class_type:
7884 case DW_TAG_interface_type:
7885 case DW_TAG_enumeration_type:
7886 case DW_TAG_pointer_type:
7887 case DW_TAG_reference_type:
7888 case DW_TAG_rvalue_reference_type:
7889 case DW_TAG_string_type:
7890 case DW_TAG_structure_type:
7891 case DW_TAG_subroutine_type:
7892 case DW_TAG_union_type:
7893 case DW_TAG_ptr_to_member_type:
7894 case DW_TAG_set_type:
7895 case DW_TAG_subrange_type:
7896 case DW_TAG_base_type:
7897 case DW_TAG_const_type:
7898 case DW_TAG_file_type:
7899 case DW_TAG_packed_type:
7900 case DW_TAG_volatile_type:
7901 case DW_TAG_typedef:
7902 return 1;
7903 default:
7904 return 0;
7905 }
7906 }
7907
7908 /* Returns true iff C is a compile-unit DIE. */
7909
7910 static inline bool
7911 is_cu_die (dw_die_ref c)
7912 {
7913 return c && (c->die_tag == DW_TAG_compile_unit
7914 || c->die_tag == DW_TAG_skeleton_unit);
7915 }
7916
7917 /* Returns true iff C is a unit DIE of some sort. */
7918
7919 static inline bool
7920 is_unit_die (dw_die_ref c)
7921 {
7922 return c && (c->die_tag == DW_TAG_compile_unit
7923 || c->die_tag == DW_TAG_partial_unit
7924 || c->die_tag == DW_TAG_type_unit
7925 || c->die_tag == DW_TAG_skeleton_unit);
7926 }
7927
7928 /* Returns true iff C is a namespace DIE. */
7929
7930 static inline bool
7931 is_namespace_die (dw_die_ref c)
7932 {
7933 return c && c->die_tag == DW_TAG_namespace;
7934 }
7935
7936 /* Return non-zero if this DIE is a template parameter. */
7937
7938 static inline bool
7939 is_template_parameter (dw_die_ref die)
7940 {
7941 switch (die->die_tag)
7942 {
7943 case DW_TAG_template_type_param:
7944 case DW_TAG_template_value_param:
7945 case DW_TAG_GNU_template_template_param:
7946 case DW_TAG_GNU_template_parameter_pack:
7947 return true;
7948 default:
7949 return false;
7950 }
7951 }
7952
7953 /* Return non-zero if this DIE represents a template instantiation. */
7954
7955 static inline bool
7956 is_template_instantiation (dw_die_ref die)
7957 {
7958 dw_die_ref c;
7959
7960 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7961 return false;
7962 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7963 return false;
7964 }
7965
7966 static char *
7967 gen_internal_sym (const char *prefix)
7968 {
7969 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7970
7971 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7972 return xstrdup (buf);
7973 }
7974
7975 /* Return non-zero if this DIE is a declaration. */
7976
7977 static int
7978 is_declaration_die (dw_die_ref die)
7979 {
7980 dw_attr_node *a;
7981 unsigned ix;
7982
7983 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7984 if (a->dw_attr == DW_AT_declaration)
7985 return 1;
7986
7987 return 0;
7988 }
7989
7990 /* Return non-zero if this DIE is nested inside a subprogram. */
7991
7992 static int
7993 is_nested_in_subprogram (dw_die_ref die)
7994 {
7995 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7996
7997 if (decl == NULL)
7998 decl = die;
7999 return local_scope_p (decl);
8000 }
8001
8002 /* Return non-zero if this DIE contains a defining declaration of a
8003 subprogram. */
8004
8005 static int
8006 contains_subprogram_definition (dw_die_ref die)
8007 {
8008 dw_die_ref c;
8009
8010 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
8011 return 1;
8012 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
8013 return 0;
8014 }
8015
8016 /* Return non-zero if this is a type DIE that should be moved to a
8017 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8018 unit type. */
8019
8020 static int
8021 should_move_die_to_comdat (dw_die_ref die)
8022 {
8023 switch (die->die_tag)
8024 {
8025 case DW_TAG_class_type:
8026 case DW_TAG_structure_type:
8027 case DW_TAG_enumeration_type:
8028 case DW_TAG_union_type:
8029 /* Don't move declarations, inlined instances, types nested in a
8030 subprogram, or types that contain subprogram definitions. */
8031 if (is_declaration_die (die)
8032 || get_AT (die, DW_AT_abstract_origin)
8033 || is_nested_in_subprogram (die)
8034 || contains_subprogram_definition (die))
8035 return 0;
8036 return 1;
8037 case DW_TAG_array_type:
8038 case DW_TAG_interface_type:
8039 case DW_TAG_pointer_type:
8040 case DW_TAG_reference_type:
8041 case DW_TAG_rvalue_reference_type:
8042 case DW_TAG_string_type:
8043 case DW_TAG_subroutine_type:
8044 case DW_TAG_ptr_to_member_type:
8045 case DW_TAG_set_type:
8046 case DW_TAG_subrange_type:
8047 case DW_TAG_base_type:
8048 case DW_TAG_const_type:
8049 case DW_TAG_file_type:
8050 case DW_TAG_packed_type:
8051 case DW_TAG_volatile_type:
8052 case DW_TAG_typedef:
8053 default:
8054 return 0;
8055 }
8056 }
8057
8058 /* Make a clone of DIE. */
8059
8060 static dw_die_ref
8061 clone_die (dw_die_ref die)
8062 {
8063 dw_die_ref clone = new_die_raw (die->die_tag);
8064 dw_attr_node *a;
8065 unsigned ix;
8066
8067 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8068 add_dwarf_attr (clone, a);
8069
8070 return clone;
8071 }
8072
8073 /* Make a clone of the tree rooted at DIE. */
8074
8075 static dw_die_ref
8076 clone_tree (dw_die_ref die)
8077 {
8078 dw_die_ref c;
8079 dw_die_ref clone = clone_die (die);
8080
8081 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8082
8083 return clone;
8084 }
8085
8086 /* Make a clone of DIE as a declaration. */
8087
8088 static dw_die_ref
8089 clone_as_declaration (dw_die_ref die)
8090 {
8091 dw_die_ref clone;
8092 dw_die_ref decl;
8093 dw_attr_node *a;
8094 unsigned ix;
8095
8096 /* If the DIE is already a declaration, just clone it. */
8097 if (is_declaration_die (die))
8098 return clone_die (die);
8099
8100 /* If the DIE is a specification, just clone its declaration DIE. */
8101 decl = get_AT_ref (die, DW_AT_specification);
8102 if (decl != NULL)
8103 {
8104 clone = clone_die (decl);
8105 if (die->comdat_type_p)
8106 add_AT_die_ref (clone, DW_AT_signature, die);
8107 return clone;
8108 }
8109
8110 clone = new_die_raw (die->die_tag);
8111
8112 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8113 {
8114 /* We don't want to copy over all attributes.
8115 For example we don't want DW_AT_byte_size because otherwise we will no
8116 longer have a declaration and GDB will treat it as a definition. */
8117
8118 switch (a->dw_attr)
8119 {
8120 case DW_AT_abstract_origin:
8121 case DW_AT_artificial:
8122 case DW_AT_containing_type:
8123 case DW_AT_external:
8124 case DW_AT_name:
8125 case DW_AT_type:
8126 case DW_AT_virtuality:
8127 case DW_AT_linkage_name:
8128 case DW_AT_MIPS_linkage_name:
8129 add_dwarf_attr (clone, a);
8130 break;
8131 case DW_AT_byte_size:
8132 case DW_AT_alignment:
8133 default:
8134 break;
8135 }
8136 }
8137
8138 if (die->comdat_type_p)
8139 add_AT_die_ref (clone, DW_AT_signature, die);
8140
8141 add_AT_flag (clone, DW_AT_declaration, 1);
8142 return clone;
8143 }
8144
8145
8146 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8147
8148 struct decl_table_entry
8149 {
8150 dw_die_ref orig;
8151 dw_die_ref copy;
8152 };
8153
8154 /* Helpers to manipulate hash table of copied declarations. */
8155
8156 /* Hashtable helpers. */
8157
8158 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8159 {
8160 typedef die_struct *compare_type;
8161 static inline hashval_t hash (const decl_table_entry *);
8162 static inline bool equal (const decl_table_entry *, const die_struct *);
8163 };
8164
8165 inline hashval_t
8166 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8167 {
8168 return htab_hash_pointer (entry->orig);
8169 }
8170
8171 inline bool
8172 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8173 const die_struct *entry2)
8174 {
8175 return entry1->orig == entry2;
8176 }
8177
8178 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8179
8180 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8181 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8182 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8183 to check if the ancestor has already been copied into UNIT. */
8184
8185 static dw_die_ref
8186 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8187 decl_hash_type *decl_table)
8188 {
8189 dw_die_ref parent = die->die_parent;
8190 dw_die_ref new_parent = unit;
8191 dw_die_ref copy;
8192 decl_table_entry **slot = NULL;
8193 struct decl_table_entry *entry = NULL;
8194
8195 /* If DIE refers to a stub unfold that so we get the appropriate
8196 DIE registered as orig in decl_table. */
8197 if (dw_die_ref c = get_AT_ref (die, DW_AT_signature))
8198 die = c;
8199
8200 if (decl_table)
8201 {
8202 /* Check if the entry has already been copied to UNIT. */
8203 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8204 INSERT);
8205 if (*slot != HTAB_EMPTY_ENTRY)
8206 {
8207 entry = *slot;
8208 return entry->copy;
8209 }
8210
8211 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8212 entry = XCNEW (struct decl_table_entry);
8213 entry->orig = die;
8214 entry->copy = NULL;
8215 *slot = entry;
8216 }
8217
8218 if (parent != NULL)
8219 {
8220 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8221 if (spec != NULL)
8222 parent = spec;
8223 if (!is_unit_die (parent))
8224 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8225 }
8226
8227 copy = clone_as_declaration (die);
8228 add_child_die (new_parent, copy);
8229
8230 if (decl_table)
8231 {
8232 /* Record the pointer to the copy. */
8233 entry->copy = copy;
8234 }
8235
8236 return copy;
8237 }
8238 /* Copy the declaration context to the new type unit DIE. This includes
8239 any surrounding namespace or type declarations. If the DIE has an
8240 AT_specification attribute, it also includes attributes and children
8241 attached to the specification, and returns a pointer to the original
8242 parent of the declaration DIE. Returns NULL otherwise. */
8243
8244 static dw_die_ref
8245 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8246 {
8247 dw_die_ref decl;
8248 dw_die_ref new_decl;
8249 dw_die_ref orig_parent = NULL;
8250
8251 decl = get_AT_ref (die, DW_AT_specification);
8252 if (decl == NULL)
8253 decl = die;
8254 else
8255 {
8256 unsigned ix;
8257 dw_die_ref c;
8258 dw_attr_node *a;
8259
8260 /* The original DIE will be changed to a declaration, and must
8261 be moved to be a child of the original declaration DIE. */
8262 orig_parent = decl->die_parent;
8263
8264 /* Copy the type node pointer from the new DIE to the original
8265 declaration DIE so we can forward references later. */
8266 decl->comdat_type_p = true;
8267 decl->die_id.die_type_node = die->die_id.die_type_node;
8268
8269 remove_AT (die, DW_AT_specification);
8270
8271 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8272 {
8273 if (a->dw_attr != DW_AT_name
8274 && a->dw_attr != DW_AT_declaration
8275 && a->dw_attr != DW_AT_external)
8276 add_dwarf_attr (die, a);
8277 }
8278
8279 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8280 }
8281
8282 if (decl->die_parent != NULL
8283 && !is_unit_die (decl->die_parent))
8284 {
8285 new_decl = copy_ancestor_tree (unit, decl, NULL);
8286 if (new_decl != NULL)
8287 {
8288 remove_AT (new_decl, DW_AT_signature);
8289 add_AT_specification (die, new_decl);
8290 }
8291 }
8292
8293 return orig_parent;
8294 }
8295
8296 /* Generate the skeleton ancestor tree for the given NODE, then clone
8297 the DIE and add the clone into the tree. */
8298
8299 static void
8300 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8301 {
8302 if (node->new_die != NULL)
8303 return;
8304
8305 node->new_die = clone_as_declaration (node->old_die);
8306
8307 if (node->parent != NULL)
8308 {
8309 generate_skeleton_ancestor_tree (node->parent);
8310 add_child_die (node->parent->new_die, node->new_die);
8311 }
8312 }
8313
8314 /* Generate a skeleton tree of DIEs containing any declarations that are
8315 found in the original tree. We traverse the tree looking for declaration
8316 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8317
8318 static void
8319 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8320 {
8321 skeleton_chain_node node;
8322 dw_die_ref c;
8323 dw_die_ref first;
8324 dw_die_ref prev = NULL;
8325 dw_die_ref next = NULL;
8326
8327 node.parent = parent;
8328
8329 first = c = parent->old_die->die_child;
8330 if (c)
8331 next = c->die_sib;
8332 if (c) do {
8333 if (prev == NULL || prev->die_sib == c)
8334 prev = c;
8335 c = next;
8336 next = (c == first ? NULL : c->die_sib);
8337 node.old_die = c;
8338 node.new_die = NULL;
8339 if (is_declaration_die (c))
8340 {
8341 if (is_template_instantiation (c))
8342 {
8343 /* Instantiated templates do not need to be cloned into the
8344 type unit. Just move the DIE and its children back to
8345 the skeleton tree (in the main CU). */
8346 remove_child_with_prev (c, prev);
8347 add_child_die (parent->new_die, c);
8348 c = prev;
8349 }
8350 else if (c->comdat_type_p)
8351 {
8352 /* This is the skeleton of earlier break_out_comdat_types
8353 type. Clone the existing DIE, but keep the children
8354 under the original (which is in the main CU). */
8355 dw_die_ref clone = clone_die (c);
8356
8357 replace_child (c, clone, prev);
8358 generate_skeleton_ancestor_tree (parent);
8359 add_child_die (parent->new_die, c);
8360 c = clone;
8361 continue;
8362 }
8363 else
8364 {
8365 /* Clone the existing DIE, move the original to the skeleton
8366 tree (which is in the main CU), and put the clone, with
8367 all the original's children, where the original came from
8368 (which is about to be moved to the type unit). */
8369 dw_die_ref clone = clone_die (c);
8370 move_all_children (c, clone);
8371
8372 /* If the original has a DW_AT_object_pointer attribute,
8373 it would now point to a child DIE just moved to the
8374 cloned tree, so we need to remove that attribute from
8375 the original. */
8376 remove_AT (c, DW_AT_object_pointer);
8377
8378 replace_child (c, clone, prev);
8379 generate_skeleton_ancestor_tree (parent);
8380 add_child_die (parent->new_die, c);
8381 node.old_die = clone;
8382 node.new_die = c;
8383 c = clone;
8384 }
8385 }
8386 generate_skeleton_bottom_up (&node);
8387 } while (next != NULL);
8388 }
8389
8390 /* Wrapper function for generate_skeleton_bottom_up. */
8391
8392 static dw_die_ref
8393 generate_skeleton (dw_die_ref die)
8394 {
8395 skeleton_chain_node node;
8396
8397 node.old_die = die;
8398 node.new_die = NULL;
8399 node.parent = NULL;
8400
8401 /* If this type definition is nested inside another type,
8402 and is not an instantiation of a template, always leave
8403 at least a declaration in its place. */
8404 if (die->die_parent != NULL
8405 && is_type_die (die->die_parent)
8406 && !is_template_instantiation (die))
8407 node.new_die = clone_as_declaration (die);
8408
8409 generate_skeleton_bottom_up (&node);
8410 return node.new_die;
8411 }
8412
8413 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8414 declaration. The original DIE is moved to a new compile unit so that
8415 existing references to it follow it to the new location. If any of the
8416 original DIE's descendants is a declaration, we need to replace the
8417 original DIE with a skeleton tree and move the declarations back into the
8418 skeleton tree. */
8419
8420 static dw_die_ref
8421 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8422 dw_die_ref prev)
8423 {
8424 dw_die_ref skeleton, orig_parent;
8425
8426 /* Copy the declaration context to the type unit DIE. If the returned
8427 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8428 that DIE. */
8429 orig_parent = copy_declaration_context (unit, child);
8430
8431 skeleton = generate_skeleton (child);
8432 if (skeleton == NULL)
8433 remove_child_with_prev (child, prev);
8434 else
8435 {
8436 skeleton->comdat_type_p = true;
8437 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8438
8439 /* If the original DIE was a specification, we need to put
8440 the skeleton under the parent DIE of the declaration.
8441 This leaves the original declaration in the tree, but
8442 it will be pruned later since there are no longer any
8443 references to it. */
8444 if (orig_parent != NULL)
8445 {
8446 remove_child_with_prev (child, prev);
8447 add_child_die (orig_parent, skeleton);
8448 }
8449 else
8450 replace_child (child, skeleton, prev);
8451 }
8452
8453 return skeleton;
8454 }
8455
8456 static void
8457 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8458 comdat_type_node *type_node,
8459 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8460
8461 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8462 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8463 DWARF procedure references in the DW_AT_location attribute. */
8464
8465 static dw_die_ref
8466 copy_dwarf_procedure (dw_die_ref die,
8467 comdat_type_node *type_node,
8468 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8469 {
8470 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8471
8472 /* DWARF procedures are not supposed to have children... */
8473 gcc_assert (die->die_child == NULL);
8474
8475 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8476 gcc_assert (vec_safe_length (die->die_attr) == 1
8477 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8478
8479 /* Do not copy more than once DWARF procedures. */
8480 bool existed;
8481 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8482 if (existed)
8483 return die_copy;
8484
8485 die_copy = clone_die (die);
8486 add_child_die (type_node->root_die, die_copy);
8487 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8488 return die_copy;
8489 }
8490
8491 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8492 procedures in DIE's attributes. */
8493
8494 static void
8495 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8496 comdat_type_node *type_node,
8497 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8498 {
8499 dw_attr_node *a;
8500 unsigned i;
8501
8502 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8503 {
8504 dw_loc_descr_ref loc;
8505
8506 if (a->dw_attr_val.val_class != dw_val_class_loc)
8507 continue;
8508
8509 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8510 {
8511 switch (loc->dw_loc_opc)
8512 {
8513 case DW_OP_call2:
8514 case DW_OP_call4:
8515 case DW_OP_call_ref:
8516 gcc_assert (loc->dw_loc_oprnd1.val_class
8517 == dw_val_class_die_ref);
8518 loc->dw_loc_oprnd1.v.val_die_ref.die
8519 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8520 type_node,
8521 copied_dwarf_procs);
8522
8523 default:
8524 break;
8525 }
8526 }
8527 }
8528 }
8529
8530 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8531 rewrite references to point to the copies.
8532
8533 References are looked for in DIE's attributes and recursively in all its
8534 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8535 mapping from old DWARF procedures to their copy. It is used not to copy
8536 twice the same DWARF procedure under TYPE_NODE. */
8537
8538 static void
8539 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8540 comdat_type_node *type_node,
8541 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8542 {
8543 dw_die_ref c;
8544
8545 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8546 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8547 type_node,
8548 copied_dwarf_procs));
8549 }
8550
8551 /* Traverse the DIE and set up additional .debug_types or .debug_info
8552 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8553 section. */
8554
8555 static void
8556 break_out_comdat_types (dw_die_ref die)
8557 {
8558 dw_die_ref c;
8559 dw_die_ref first;
8560 dw_die_ref prev = NULL;
8561 dw_die_ref next = NULL;
8562 dw_die_ref unit = NULL;
8563
8564 first = c = die->die_child;
8565 if (c)
8566 next = c->die_sib;
8567 if (c) do {
8568 if (prev == NULL || prev->die_sib == c)
8569 prev = c;
8570 c = next;
8571 next = (c == first ? NULL : c->die_sib);
8572 if (should_move_die_to_comdat (c))
8573 {
8574 dw_die_ref replacement;
8575 comdat_type_node *type_node;
8576
8577 /* Break out nested types into their own type units. */
8578 break_out_comdat_types (c);
8579
8580 /* Create a new type unit DIE as the root for the new tree. */
8581 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8582 add_AT_unsigned (unit, DW_AT_language,
8583 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8584
8585 /* Add the new unit's type DIE into the comdat type list. */
8586 type_node = ggc_cleared_alloc<comdat_type_node> ();
8587 type_node->root_die = unit;
8588 type_node->next = comdat_type_list;
8589 comdat_type_list = type_node;
8590
8591 /* Generate the type signature. */
8592 generate_type_signature (c, type_node);
8593
8594 /* Copy the declaration context, attributes, and children of the
8595 declaration into the new type unit DIE, then remove this DIE
8596 from the main CU (or replace it with a skeleton if necessary). */
8597 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8598 type_node->skeleton_die = replacement;
8599
8600 /* Add the DIE to the new compunit. */
8601 add_child_die (unit, c);
8602
8603 /* Types can reference DWARF procedures for type size or data location
8604 expressions. Calls in DWARF expressions cannot target procedures
8605 that are not in the same section. So we must copy DWARF procedures
8606 along with this type and then rewrite references to them. */
8607 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8608 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8609
8610 if (replacement != NULL)
8611 c = replacement;
8612 }
8613 else if (c->die_tag == DW_TAG_namespace
8614 || c->die_tag == DW_TAG_class_type
8615 || c->die_tag == DW_TAG_structure_type
8616 || c->die_tag == DW_TAG_union_type)
8617 {
8618 /* Look for nested types that can be broken out. */
8619 break_out_comdat_types (c);
8620 }
8621 } while (next != NULL);
8622 }
8623
8624 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8625 Enter all the cloned children into the hash table decl_table. */
8626
8627 static dw_die_ref
8628 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8629 {
8630 dw_die_ref c;
8631 dw_die_ref clone;
8632 struct decl_table_entry *entry;
8633 decl_table_entry **slot;
8634
8635 if (die->die_tag == DW_TAG_subprogram)
8636 clone = clone_as_declaration (die);
8637 else
8638 clone = clone_die (die);
8639
8640 slot = decl_table->find_slot_with_hash (die,
8641 htab_hash_pointer (die), INSERT);
8642
8643 /* Assert that DIE isn't in the hash table yet. If it would be there
8644 before, the ancestors would be necessarily there as well, therefore
8645 clone_tree_partial wouldn't be called. */
8646 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8647
8648 entry = XCNEW (struct decl_table_entry);
8649 entry->orig = die;
8650 entry->copy = clone;
8651 *slot = entry;
8652
8653 if (die->die_tag != DW_TAG_subprogram)
8654 FOR_EACH_CHILD (die, c,
8655 add_child_die (clone, clone_tree_partial (c, decl_table)));
8656
8657 return clone;
8658 }
8659
8660 /* Walk the DIE and its children, looking for references to incomplete
8661 or trivial types that are unmarked (i.e., that are not in the current
8662 type_unit). */
8663
8664 static void
8665 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8666 {
8667 dw_die_ref c;
8668 dw_attr_node *a;
8669 unsigned ix;
8670
8671 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8672 {
8673 if (AT_class (a) == dw_val_class_die_ref)
8674 {
8675 dw_die_ref targ = AT_ref (a);
8676 decl_table_entry **slot;
8677 struct decl_table_entry *entry;
8678
8679 if (targ->die_mark != 0 || targ->comdat_type_p)
8680 continue;
8681
8682 slot = decl_table->find_slot_with_hash (targ,
8683 htab_hash_pointer (targ),
8684 INSERT);
8685
8686 if (*slot != HTAB_EMPTY_ENTRY)
8687 {
8688 /* TARG has already been copied, so we just need to
8689 modify the reference to point to the copy. */
8690 entry = *slot;
8691 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8692 }
8693 else
8694 {
8695 dw_die_ref parent = unit;
8696 dw_die_ref copy = clone_die (targ);
8697
8698 /* Record in DECL_TABLE that TARG has been copied.
8699 Need to do this now, before the recursive call,
8700 because DECL_TABLE may be expanded and SLOT
8701 would no longer be a valid pointer. */
8702 entry = XCNEW (struct decl_table_entry);
8703 entry->orig = targ;
8704 entry->copy = copy;
8705 *slot = entry;
8706
8707 /* If TARG is not a declaration DIE, we need to copy its
8708 children. */
8709 if (!is_declaration_die (targ))
8710 {
8711 FOR_EACH_CHILD (
8712 targ, c,
8713 add_child_die (copy,
8714 clone_tree_partial (c, decl_table)));
8715 }
8716
8717 /* Make sure the cloned tree is marked as part of the
8718 type unit. */
8719 mark_dies (copy);
8720
8721 /* If TARG has surrounding context, copy its ancestor tree
8722 into the new type unit. */
8723 if (targ->die_parent != NULL
8724 && !is_unit_die (targ->die_parent))
8725 parent = copy_ancestor_tree (unit, targ->die_parent,
8726 decl_table);
8727
8728 add_child_die (parent, copy);
8729 a->dw_attr_val.v.val_die_ref.die = copy;
8730
8731 /* Make sure the newly-copied DIE is walked. If it was
8732 installed in a previously-added context, it won't
8733 get visited otherwise. */
8734 if (parent != unit)
8735 {
8736 /* Find the highest point of the newly-added tree,
8737 mark each node along the way, and walk from there. */
8738 parent->die_mark = 1;
8739 while (parent->die_parent
8740 && parent->die_parent->die_mark == 0)
8741 {
8742 parent = parent->die_parent;
8743 parent->die_mark = 1;
8744 }
8745 copy_decls_walk (unit, parent, decl_table);
8746 }
8747 }
8748 }
8749 }
8750
8751 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8752 }
8753
8754 /* Collect skeleton dies in DIE created by break_out_comdat_types already
8755 and record them in DECL_TABLE. */
8756
8757 static void
8758 collect_skeleton_dies (dw_die_ref die, decl_hash_type *decl_table)
8759 {
8760 dw_die_ref c;
8761
8762 if (dw_attr_node *a = get_AT (die, DW_AT_signature))
8763 {
8764 dw_die_ref targ = AT_ref (a);
8765 gcc_assert (targ->die_mark == 0 && targ->comdat_type_p);
8766 decl_table_entry **slot
8767 = decl_table->find_slot_with_hash (targ,
8768 htab_hash_pointer (targ),
8769 INSERT);
8770 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8771 /* Record in DECL_TABLE that TARG has been already copied
8772 by remove_child_or_replace_with_skeleton. */
8773 decl_table_entry *entry = XCNEW (struct decl_table_entry);
8774 entry->orig = targ;
8775 entry->copy = die;
8776 *slot = entry;
8777 }
8778 FOR_EACH_CHILD (die, c, collect_skeleton_dies (c, decl_table));
8779 }
8780
8781 /* Copy declarations for "unworthy" types into the new comdat section.
8782 Incomplete types, modified types, and certain other types aren't broken
8783 out into comdat sections of their own, so they don't have a signature,
8784 and we need to copy the declaration into the same section so that we
8785 don't have an external reference. */
8786
8787 static void
8788 copy_decls_for_unworthy_types (dw_die_ref unit)
8789 {
8790 mark_dies (unit);
8791 decl_hash_type decl_table (10);
8792 collect_skeleton_dies (unit, &decl_table);
8793 copy_decls_walk (unit, unit, &decl_table);
8794 unmark_dies (unit);
8795 }
8796
8797 /* Traverse the DIE and add a sibling attribute if it may have the
8798 effect of speeding up access to siblings. To save some space,
8799 avoid generating sibling attributes for DIE's without children. */
8800
8801 static void
8802 add_sibling_attributes (dw_die_ref die)
8803 {
8804 dw_die_ref c;
8805
8806 if (! die->die_child)
8807 return;
8808
8809 if (die->die_parent && die != die->die_parent->die_child)
8810 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8811
8812 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8813 }
8814
8815 /* Output all location lists for the DIE and its children. */
8816
8817 static void
8818 output_location_lists (dw_die_ref die)
8819 {
8820 dw_die_ref c;
8821 dw_attr_node *a;
8822 unsigned ix;
8823
8824 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8825 if (AT_class (a) == dw_val_class_loc_list)
8826 output_loc_list (AT_loc_list (a));
8827
8828 FOR_EACH_CHILD (die, c, output_location_lists (c));
8829 }
8830
8831 /* During assign_location_list_indexes and output_loclists_offset the
8832 current index, after it the number of assigned indexes (i.e. how
8833 large the .debug_loclists* offset table should be). */
8834 static unsigned int loc_list_idx;
8835
8836 /* Output all location list offsets for the DIE and its children. */
8837
8838 static void
8839 output_loclists_offsets (dw_die_ref die)
8840 {
8841 dw_die_ref c;
8842 dw_attr_node *a;
8843 unsigned ix;
8844
8845 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8846 if (AT_class (a) == dw_val_class_loc_list)
8847 {
8848 dw_loc_list_ref l = AT_loc_list (a);
8849 if (l->offset_emitted)
8850 continue;
8851 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8852 loc_section_label, NULL);
8853 gcc_assert (l->hash == loc_list_idx);
8854 loc_list_idx++;
8855 l->offset_emitted = true;
8856 }
8857
8858 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8859 }
8860
8861 /* Recursively set indexes of location lists. */
8862
8863 static void
8864 assign_location_list_indexes (dw_die_ref die)
8865 {
8866 dw_die_ref c;
8867 dw_attr_node *a;
8868 unsigned ix;
8869
8870 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8871 if (AT_class (a) == dw_val_class_loc_list)
8872 {
8873 dw_loc_list_ref list = AT_loc_list (a);
8874 if (!list->num_assigned)
8875 {
8876 list->num_assigned = true;
8877 list->hash = loc_list_idx++;
8878 }
8879 }
8880
8881 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8882 }
8883
8884 /* We want to limit the number of external references, because they are
8885 larger than local references: a relocation takes multiple words, and
8886 even a sig8 reference is always eight bytes, whereas a local reference
8887 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8888 So if we encounter multiple external references to the same type DIE, we
8889 make a local typedef stub for it and redirect all references there.
8890
8891 This is the element of the hash table for keeping track of these
8892 references. */
8893
8894 struct external_ref
8895 {
8896 dw_die_ref type;
8897 dw_die_ref stub;
8898 unsigned n_refs;
8899 };
8900
8901 /* Hashtable helpers. */
8902
8903 struct external_ref_hasher : free_ptr_hash <external_ref>
8904 {
8905 static inline hashval_t hash (const external_ref *);
8906 static inline bool equal (const external_ref *, const external_ref *);
8907 };
8908
8909 inline hashval_t
8910 external_ref_hasher::hash (const external_ref *r)
8911 {
8912 dw_die_ref die = r->type;
8913 hashval_t h = 0;
8914
8915 /* We can't use the address of the DIE for hashing, because
8916 that will make the order of the stub DIEs non-deterministic. */
8917 if (! die->comdat_type_p)
8918 /* We have a symbol; use it to compute a hash. */
8919 h = htab_hash_string (die->die_id.die_symbol);
8920 else
8921 {
8922 /* We have a type signature; use a subset of the bits as the hash.
8923 The 8-byte signature is at least as large as hashval_t. */
8924 comdat_type_node *type_node = die->die_id.die_type_node;
8925 memcpy (&h, type_node->signature, sizeof (h));
8926 }
8927 return h;
8928 }
8929
8930 inline bool
8931 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8932 {
8933 return r1->type == r2->type;
8934 }
8935
8936 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8937
8938 /* Return a pointer to the external_ref for references to DIE. */
8939
8940 static struct external_ref *
8941 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8942 {
8943 struct external_ref ref, *ref_p;
8944 external_ref **slot;
8945
8946 ref.type = die;
8947 slot = map->find_slot (&ref, INSERT);
8948 if (*slot != HTAB_EMPTY_ENTRY)
8949 return *slot;
8950
8951 ref_p = XCNEW (struct external_ref);
8952 ref_p->type = die;
8953 *slot = ref_p;
8954 return ref_p;
8955 }
8956
8957 /* Subroutine of optimize_external_refs, below.
8958
8959 If we see a type skeleton, record it as our stub. If we see external
8960 references, remember how many we've seen. */
8961
8962 static void
8963 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8964 {
8965 dw_die_ref c;
8966 dw_attr_node *a;
8967 unsigned ix;
8968 struct external_ref *ref_p;
8969
8970 if (is_type_die (die)
8971 && (c = get_AT_ref (die, DW_AT_signature)))
8972 {
8973 /* This is a local skeleton; use it for local references. */
8974 ref_p = lookup_external_ref (map, c);
8975 ref_p->stub = die;
8976 }
8977
8978 /* Scan the DIE references, and remember any that refer to DIEs from
8979 other CUs (i.e. those which are not marked). */
8980 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8981 if (AT_class (a) == dw_val_class_die_ref
8982 && (c = AT_ref (a))->die_mark == 0
8983 && is_type_die (c))
8984 {
8985 ref_p = lookup_external_ref (map, c);
8986 ref_p->n_refs++;
8987 }
8988
8989 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8990 }
8991
8992 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8993 points to an external_ref, DATA is the CU we're processing. If we don't
8994 already have a local stub, and we have multiple refs, build a stub. */
8995
8996 int
8997 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8998 {
8999 struct external_ref *ref_p = *slot;
9000
9001 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
9002 {
9003 /* We have multiple references to this type, so build a small stub.
9004 Both of these forms are a bit dodgy from the perspective of the
9005 DWARF standard, since technically they should have names. */
9006 dw_die_ref cu = data;
9007 dw_die_ref type = ref_p->type;
9008 dw_die_ref stub = NULL;
9009
9010 if (type->comdat_type_p)
9011 {
9012 /* If we refer to this type via sig8, use AT_signature. */
9013 stub = new_die (type->die_tag, cu, NULL_TREE);
9014 add_AT_die_ref (stub, DW_AT_signature, type);
9015 }
9016 else
9017 {
9018 /* Otherwise, use a typedef with no name. */
9019 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
9020 add_AT_die_ref (stub, DW_AT_type, type);
9021 }
9022
9023 stub->die_mark++;
9024 ref_p->stub = stub;
9025 }
9026 return 1;
9027 }
9028
9029 /* DIE is a unit; look through all the DIE references to see if there are
9030 any external references to types, and if so, create local stubs for
9031 them which will be applied in build_abbrev_table. This is useful because
9032 references to local DIEs are smaller. */
9033
9034 static external_ref_hash_type *
9035 optimize_external_refs (dw_die_ref die)
9036 {
9037 external_ref_hash_type *map = new external_ref_hash_type (10);
9038 optimize_external_refs_1 (die, map);
9039 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9040 return map;
9041 }
9042
9043 /* The following 3 variables are temporaries that are computed only during the
9044 build_abbrev_table call and used and released during the following
9045 optimize_abbrev_table call. */
9046
9047 /* First abbrev_id that can be optimized based on usage. */
9048 static unsigned int abbrev_opt_start;
9049
9050 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9051 abbrev_id smaller than this, because they must be already sized
9052 during build_abbrev_table). */
9053 static unsigned int abbrev_opt_base_type_end;
9054
9055 /* Vector of usage counts during build_abbrev_table. Indexed by
9056 abbrev_id - abbrev_opt_start. */
9057 static vec<unsigned int> abbrev_usage_count;
9058
9059 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9060 static vec<dw_die_ref> sorted_abbrev_dies;
9061
9062 /* The format of each DIE (and its attribute value pairs) is encoded in an
9063 abbreviation table. This routine builds the abbreviation table and assigns
9064 a unique abbreviation id for each abbreviation entry. The children of each
9065 die are visited recursively. */
9066
9067 static void
9068 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9069 {
9070 unsigned int abbrev_id = 0;
9071 dw_die_ref c;
9072 dw_attr_node *a;
9073 unsigned ix;
9074 dw_die_ref abbrev;
9075
9076 /* Scan the DIE references, and replace any that refer to
9077 DIEs from other CUs (i.e. those which are not marked) with
9078 the local stubs we built in optimize_external_refs. */
9079 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9080 if (AT_class (a) == dw_val_class_die_ref
9081 && (c = AT_ref (a))->die_mark == 0)
9082 {
9083 struct external_ref *ref_p;
9084 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9085
9086 if (is_type_die (c)
9087 && (ref_p = lookup_external_ref (extern_map, c))
9088 && ref_p->stub && ref_p->stub != die)
9089 {
9090 gcc_assert (a->dw_attr != DW_AT_signature);
9091 change_AT_die_ref (a, ref_p->stub);
9092 }
9093 else
9094 /* We aren't changing this reference, so mark it external. */
9095 set_AT_ref_external (a, 1);
9096 }
9097
9098 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9099 {
9100 dw_attr_node *die_a, *abbrev_a;
9101 unsigned ix;
9102 bool ok = true;
9103
9104 if (abbrev_id == 0)
9105 continue;
9106 if (abbrev->die_tag != die->die_tag)
9107 continue;
9108 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9109 continue;
9110
9111 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9112 continue;
9113
9114 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9115 {
9116 abbrev_a = &(*abbrev->die_attr)[ix];
9117 if ((abbrev_a->dw_attr != die_a->dw_attr)
9118 || (value_format (abbrev_a) != value_format (die_a)))
9119 {
9120 ok = false;
9121 break;
9122 }
9123 }
9124 if (ok)
9125 break;
9126 }
9127
9128 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9129 {
9130 vec_safe_push (abbrev_die_table, die);
9131 if (abbrev_opt_start)
9132 abbrev_usage_count.safe_push (0);
9133 }
9134 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9135 {
9136 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9137 sorted_abbrev_dies.safe_push (die);
9138 }
9139
9140 die->die_abbrev = abbrev_id;
9141 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9142 }
9143
9144 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9145 by die_abbrev's usage count, from the most commonly used
9146 abbreviation to the least. */
9147
9148 static int
9149 die_abbrev_cmp (const void *p1, const void *p2)
9150 {
9151 dw_die_ref die1 = *(const dw_die_ref *) p1;
9152 dw_die_ref die2 = *(const dw_die_ref *) p2;
9153
9154 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9155 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9156
9157 if (die1->die_abbrev >= abbrev_opt_base_type_end
9158 && die2->die_abbrev >= abbrev_opt_base_type_end)
9159 {
9160 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9161 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9162 return -1;
9163 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9164 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9165 return 1;
9166 }
9167
9168 /* Stabilize the sort. */
9169 if (die1->die_abbrev < die2->die_abbrev)
9170 return -1;
9171 if (die1->die_abbrev > die2->die_abbrev)
9172 return 1;
9173
9174 return 0;
9175 }
9176
9177 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9178 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9179 into dw_val_class_const_implicit or
9180 dw_val_class_unsigned_const_implicit. */
9181
9182 static void
9183 optimize_implicit_const (unsigned int first_id, unsigned int end,
9184 vec<bool> &implicit_consts)
9185 {
9186 /* It never makes sense if there is just one DIE using the abbreviation. */
9187 if (end < first_id + 2)
9188 return;
9189
9190 dw_attr_node *a;
9191 unsigned ix, i;
9192 dw_die_ref die = sorted_abbrev_dies[first_id];
9193 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9194 if (implicit_consts[ix])
9195 {
9196 enum dw_val_class new_class = dw_val_class_none;
9197 switch (AT_class (a))
9198 {
9199 case dw_val_class_unsigned_const:
9200 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9201 continue;
9202
9203 /* The .debug_abbrev section will grow by
9204 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9205 in all the DIEs using that abbreviation. */
9206 if (constant_size (AT_unsigned (a)) * (end - first_id)
9207 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9208 continue;
9209
9210 new_class = dw_val_class_unsigned_const_implicit;
9211 break;
9212
9213 case dw_val_class_const:
9214 new_class = dw_val_class_const_implicit;
9215 break;
9216
9217 case dw_val_class_file:
9218 new_class = dw_val_class_file_implicit;
9219 break;
9220
9221 default:
9222 continue;
9223 }
9224 for (i = first_id; i < end; i++)
9225 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9226 = new_class;
9227 }
9228 }
9229
9230 /* Attempt to optimize abbreviation table from abbrev_opt_start
9231 abbreviation above. */
9232
9233 static void
9234 optimize_abbrev_table (void)
9235 {
9236 if (abbrev_opt_start
9237 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9238 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9239 {
9240 auto_vec<bool, 32> implicit_consts;
9241 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9242
9243 unsigned int abbrev_id = abbrev_opt_start - 1;
9244 unsigned int first_id = ~0U;
9245 unsigned int last_abbrev_id = 0;
9246 unsigned int i;
9247 dw_die_ref die;
9248 if (abbrev_opt_base_type_end > abbrev_opt_start)
9249 abbrev_id = abbrev_opt_base_type_end - 1;
9250 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9251 most commonly used abbreviations come first. */
9252 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9253 {
9254 dw_attr_node *a;
9255 unsigned ix;
9256
9257 /* If calc_base_type_die_sizes has been called, the CU and
9258 base types after it can't be optimized, because we've already
9259 calculated their DIE offsets. We've sorted them first. */
9260 if (die->die_abbrev < abbrev_opt_base_type_end)
9261 continue;
9262 if (die->die_abbrev != last_abbrev_id)
9263 {
9264 last_abbrev_id = die->die_abbrev;
9265 if (dwarf_version >= 5 && first_id != ~0U)
9266 optimize_implicit_const (first_id, i, implicit_consts);
9267 abbrev_id++;
9268 (*abbrev_die_table)[abbrev_id] = die;
9269 if (dwarf_version >= 5)
9270 {
9271 first_id = i;
9272 implicit_consts.truncate (0);
9273
9274 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9275 switch (AT_class (a))
9276 {
9277 case dw_val_class_const:
9278 case dw_val_class_unsigned_const:
9279 case dw_val_class_file:
9280 implicit_consts.safe_push (true);
9281 break;
9282 default:
9283 implicit_consts.safe_push (false);
9284 break;
9285 }
9286 }
9287 }
9288 else if (dwarf_version >= 5)
9289 {
9290 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9291 if (!implicit_consts[ix])
9292 continue;
9293 else
9294 {
9295 dw_attr_node *other_a
9296 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9297 if (!dw_val_equal_p (&a->dw_attr_val,
9298 &other_a->dw_attr_val))
9299 implicit_consts[ix] = false;
9300 }
9301 }
9302 die->die_abbrev = abbrev_id;
9303 }
9304 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9305 if (dwarf_version >= 5 && first_id != ~0U)
9306 optimize_implicit_const (first_id, i, implicit_consts);
9307 }
9308
9309 abbrev_opt_start = 0;
9310 abbrev_opt_base_type_end = 0;
9311 abbrev_usage_count.release ();
9312 sorted_abbrev_dies.release ();
9313 }
9314 \f
9315 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9316
9317 static int
9318 constant_size (unsigned HOST_WIDE_INT value)
9319 {
9320 int log;
9321
9322 if (value == 0)
9323 log = 0;
9324 else
9325 log = floor_log2 (value);
9326
9327 log = log / 8;
9328 log = 1 << (floor_log2 (log) + 1);
9329
9330 return log;
9331 }
9332
9333 /* Return the size of a DIE as it is represented in the
9334 .debug_info section. */
9335
9336 static unsigned long
9337 size_of_die (dw_die_ref die)
9338 {
9339 unsigned long size = 0;
9340 dw_attr_node *a;
9341 unsigned ix;
9342 enum dwarf_form form;
9343
9344 size += size_of_uleb128 (die->die_abbrev);
9345 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9346 {
9347 switch (AT_class (a))
9348 {
9349 case dw_val_class_addr:
9350 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9351 {
9352 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9353 size += size_of_uleb128 (AT_index (a));
9354 }
9355 else
9356 size += DWARF2_ADDR_SIZE;
9357 break;
9358 case dw_val_class_offset:
9359 size += DWARF_OFFSET_SIZE;
9360 break;
9361 case dw_val_class_loc:
9362 {
9363 unsigned long lsize = size_of_locs (AT_loc (a));
9364
9365 /* Block length. */
9366 if (dwarf_version >= 4)
9367 size += size_of_uleb128 (lsize);
9368 else
9369 size += constant_size (lsize);
9370 size += lsize;
9371 }
9372 break;
9373 case dw_val_class_loc_list:
9374 if (dwarf_split_debug_info && dwarf_version >= 5)
9375 {
9376 gcc_assert (AT_loc_list (a)->num_assigned);
9377 size += size_of_uleb128 (AT_loc_list (a)->hash);
9378 }
9379 else
9380 size += DWARF_OFFSET_SIZE;
9381 break;
9382 case dw_val_class_view_list:
9383 size += DWARF_OFFSET_SIZE;
9384 break;
9385 case dw_val_class_range_list:
9386 if (value_format (a) == DW_FORM_rnglistx)
9387 {
9388 gcc_assert (rnglist_idx);
9389 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9390 size += size_of_uleb128 (r->idx);
9391 }
9392 else
9393 size += DWARF_OFFSET_SIZE;
9394 break;
9395 case dw_val_class_const:
9396 size += size_of_sleb128 (AT_int (a));
9397 break;
9398 case dw_val_class_unsigned_const:
9399 {
9400 int csize = constant_size (AT_unsigned (a));
9401 if (dwarf_version == 3
9402 && a->dw_attr == DW_AT_data_member_location
9403 && csize >= 4)
9404 size += size_of_uleb128 (AT_unsigned (a));
9405 else
9406 size += csize;
9407 }
9408 break;
9409 case dw_val_class_symview:
9410 if (symview_upper_bound <= 0xff)
9411 size += 1;
9412 else if (symview_upper_bound <= 0xffff)
9413 size += 2;
9414 else if (symview_upper_bound <= 0xffffffff)
9415 size += 4;
9416 else
9417 size += 8;
9418 break;
9419 case dw_val_class_const_implicit:
9420 case dw_val_class_unsigned_const_implicit:
9421 case dw_val_class_file_implicit:
9422 /* These occupy no size in the DIE, just an extra sleb128 in
9423 .debug_abbrev. */
9424 break;
9425 case dw_val_class_const_double:
9426 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9427 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9428 size++; /* block */
9429 break;
9430 case dw_val_class_wide_int:
9431 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9432 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9433 if (get_full_len (*a->dw_attr_val.v.val_wide)
9434 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9435 size++; /* block */
9436 break;
9437 case dw_val_class_vec:
9438 size += constant_size (a->dw_attr_val.v.val_vec.length
9439 * a->dw_attr_val.v.val_vec.elt_size)
9440 + a->dw_attr_val.v.val_vec.length
9441 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9442 break;
9443 case dw_val_class_flag:
9444 if (dwarf_version >= 4)
9445 /* Currently all add_AT_flag calls pass in 1 as last argument,
9446 so DW_FORM_flag_present can be used. If that ever changes,
9447 we'll need to use DW_FORM_flag and have some optimization
9448 in build_abbrev_table that will change those to
9449 DW_FORM_flag_present if it is set to 1 in all DIEs using
9450 the same abbrev entry. */
9451 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9452 else
9453 size += 1;
9454 break;
9455 case dw_val_class_die_ref:
9456 if (AT_ref_external (a))
9457 {
9458 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9459 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9460 is sized by target address length, whereas in DWARF3
9461 it's always sized as an offset. */
9462 if (AT_ref (a)->comdat_type_p)
9463 size += DWARF_TYPE_SIGNATURE_SIZE;
9464 else if (dwarf_version == 2)
9465 size += DWARF2_ADDR_SIZE;
9466 else
9467 size += DWARF_OFFSET_SIZE;
9468 }
9469 else
9470 size += DWARF_OFFSET_SIZE;
9471 break;
9472 case dw_val_class_fde_ref:
9473 size += DWARF_OFFSET_SIZE;
9474 break;
9475 case dw_val_class_lbl_id:
9476 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9477 {
9478 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9479 size += size_of_uleb128 (AT_index (a));
9480 }
9481 else
9482 size += DWARF2_ADDR_SIZE;
9483 break;
9484 case dw_val_class_lineptr:
9485 case dw_val_class_macptr:
9486 case dw_val_class_loclistsptr:
9487 size += DWARF_OFFSET_SIZE;
9488 break;
9489 case dw_val_class_str:
9490 form = AT_string_form (a);
9491 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9492 size += DWARF_OFFSET_SIZE;
9493 else if (form == dwarf_FORM (DW_FORM_strx))
9494 size += size_of_uleb128 (AT_index (a));
9495 else
9496 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9497 break;
9498 case dw_val_class_file:
9499 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9500 break;
9501 case dw_val_class_data8:
9502 size += 8;
9503 break;
9504 case dw_val_class_vms_delta:
9505 size += DWARF_OFFSET_SIZE;
9506 break;
9507 case dw_val_class_high_pc:
9508 size += DWARF2_ADDR_SIZE;
9509 break;
9510 case dw_val_class_discr_value:
9511 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9512 break;
9513 case dw_val_class_discr_list:
9514 {
9515 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9516
9517 /* This is a block, so we have the block length and then its
9518 data. */
9519 size += constant_size (block_size) + block_size;
9520 }
9521 break;
9522 default:
9523 gcc_unreachable ();
9524 }
9525 }
9526
9527 return size;
9528 }
9529
9530 /* Size the debugging information associated with a given DIE. Visits the
9531 DIE's children recursively. Updates the global variable next_die_offset, on
9532 each time through. Uses the current value of next_die_offset to update the
9533 die_offset field in each DIE. */
9534
9535 static void
9536 calc_die_sizes (dw_die_ref die)
9537 {
9538 dw_die_ref c;
9539
9540 gcc_assert (die->die_offset == 0
9541 || (unsigned long int) die->die_offset == next_die_offset);
9542 die->die_offset = next_die_offset;
9543 next_die_offset += size_of_die (die);
9544
9545 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9546
9547 if (die->die_child != NULL)
9548 /* Count the null byte used to terminate sibling lists. */
9549 next_die_offset += 1;
9550 }
9551
9552 /* Size just the base type children at the start of the CU.
9553 This is needed because build_abbrev needs to size locs
9554 and sizing of type based stack ops needs to know die_offset
9555 values for the base types. */
9556
9557 static void
9558 calc_base_type_die_sizes (void)
9559 {
9560 unsigned long die_offset = (dwarf_split_debug_info
9561 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9562 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9563 unsigned int i;
9564 dw_die_ref base_type;
9565 #if ENABLE_ASSERT_CHECKING
9566 dw_die_ref prev = comp_unit_die ()->die_child;
9567 #endif
9568
9569 die_offset += size_of_die (comp_unit_die ());
9570 for (i = 0; base_types.iterate (i, &base_type); i++)
9571 {
9572 #if ENABLE_ASSERT_CHECKING
9573 gcc_assert (base_type->die_offset == 0
9574 && prev->die_sib == base_type
9575 && base_type->die_child == NULL
9576 && base_type->die_abbrev);
9577 prev = base_type;
9578 #endif
9579 if (abbrev_opt_start
9580 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9581 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9582 base_type->die_offset = die_offset;
9583 die_offset += size_of_die (base_type);
9584 }
9585 }
9586
9587 /* Set the marks for a die and its children. We do this so
9588 that we know whether or not a reference needs to use FORM_ref_addr; only
9589 DIEs in the same CU will be marked. We used to clear out the offset
9590 and use that as the flag, but ran into ordering problems. */
9591
9592 static void
9593 mark_dies (dw_die_ref die)
9594 {
9595 dw_die_ref c;
9596
9597 gcc_assert (!die->die_mark);
9598
9599 die->die_mark = 1;
9600 FOR_EACH_CHILD (die, c, mark_dies (c));
9601 }
9602
9603 /* Clear the marks for a die and its children. */
9604
9605 static void
9606 unmark_dies (dw_die_ref die)
9607 {
9608 dw_die_ref c;
9609
9610 if (! use_debug_types)
9611 gcc_assert (die->die_mark);
9612
9613 die->die_mark = 0;
9614 FOR_EACH_CHILD (die, c, unmark_dies (c));
9615 }
9616
9617 /* Clear the marks for a die, its children and referred dies. */
9618
9619 static void
9620 unmark_all_dies (dw_die_ref die)
9621 {
9622 dw_die_ref c;
9623 dw_attr_node *a;
9624 unsigned ix;
9625
9626 if (!die->die_mark)
9627 return;
9628 die->die_mark = 0;
9629
9630 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9631
9632 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9633 if (AT_class (a) == dw_val_class_die_ref)
9634 unmark_all_dies (AT_ref (a));
9635 }
9636
9637 /* Calculate if the entry should appear in the final output file. It may be
9638 from a pruned a type. */
9639
9640 static bool
9641 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9642 {
9643 /* By limiting gnu pubnames to definitions only, gold can generate a
9644 gdb index without entries for declarations, which don't include
9645 enough information to be useful. */
9646 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9647 return false;
9648
9649 if (table == pubname_table)
9650 {
9651 /* Enumerator names are part of the pubname table, but the
9652 parent DW_TAG_enumeration_type die may have been pruned.
9653 Don't output them if that is the case. */
9654 if (p->die->die_tag == DW_TAG_enumerator &&
9655 (p->die->die_parent == NULL
9656 || !p->die->die_parent->die_perennial_p))
9657 return false;
9658
9659 /* Everything else in the pubname table is included. */
9660 return true;
9661 }
9662
9663 /* The pubtypes table shouldn't include types that have been
9664 pruned. */
9665 return (p->die->die_offset != 0
9666 || !flag_eliminate_unused_debug_types);
9667 }
9668
9669 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9670 generated for the compilation unit. */
9671
9672 static unsigned long
9673 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9674 {
9675 unsigned long size;
9676 unsigned i;
9677 pubname_entry *p;
9678 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9679
9680 size = DWARF_PUBNAMES_HEADER_SIZE;
9681 FOR_EACH_VEC_ELT (*names, i, p)
9682 if (include_pubname_in_output (names, p))
9683 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9684
9685 size += DWARF_OFFSET_SIZE;
9686 return size;
9687 }
9688
9689 /* Return the size of the information in the .debug_aranges section. */
9690
9691 static unsigned long
9692 size_of_aranges (void)
9693 {
9694 unsigned long size;
9695
9696 size = DWARF_ARANGES_HEADER_SIZE;
9697
9698 /* Count the address/length pair for this compilation unit. */
9699 if (text_section_used)
9700 size += 2 * DWARF2_ADDR_SIZE;
9701 if (cold_text_section_used)
9702 size += 2 * DWARF2_ADDR_SIZE;
9703 if (have_multiple_function_sections)
9704 {
9705 unsigned fde_idx;
9706 dw_fde_ref fde;
9707
9708 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9709 {
9710 if (DECL_IGNORED_P (fde->decl))
9711 continue;
9712 if (!fde->in_std_section)
9713 size += 2 * DWARF2_ADDR_SIZE;
9714 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9715 size += 2 * DWARF2_ADDR_SIZE;
9716 }
9717 }
9718
9719 /* Count the two zero words used to terminated the address range table. */
9720 size += 2 * DWARF2_ADDR_SIZE;
9721 return size;
9722 }
9723 \f
9724 /* Select the encoding of an attribute value. */
9725
9726 static enum dwarf_form
9727 value_format (dw_attr_node *a)
9728 {
9729 switch (AT_class (a))
9730 {
9731 case dw_val_class_addr:
9732 /* Only very few attributes allow DW_FORM_addr. */
9733 switch (a->dw_attr)
9734 {
9735 case DW_AT_low_pc:
9736 case DW_AT_high_pc:
9737 case DW_AT_entry_pc:
9738 case DW_AT_trampoline:
9739 return (AT_index (a) == NOT_INDEXED
9740 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9741 default:
9742 break;
9743 }
9744 switch (DWARF2_ADDR_SIZE)
9745 {
9746 case 1:
9747 return DW_FORM_data1;
9748 case 2:
9749 return DW_FORM_data2;
9750 case 4:
9751 return DW_FORM_data4;
9752 case 8:
9753 return DW_FORM_data8;
9754 default:
9755 gcc_unreachable ();
9756 }
9757 case dw_val_class_loc_list:
9758 if (dwarf_split_debug_info
9759 && dwarf_version >= 5
9760 && AT_loc_list (a)->num_assigned)
9761 return DW_FORM_loclistx;
9762 /* FALLTHRU */
9763 case dw_val_class_view_list:
9764 case dw_val_class_range_list:
9765 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9766 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9767 care about sizes of .debug* sections in shared libraries and
9768 executables and don't take into account relocations that affect just
9769 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9770 table in the .debug_rnglists section. */
9771 if (dwarf_split_debug_info
9772 && dwarf_version >= 5
9773 && AT_class (a) == dw_val_class_range_list
9774 && rnglist_idx
9775 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9776 return DW_FORM_rnglistx;
9777 if (dwarf_version >= 4)
9778 return DW_FORM_sec_offset;
9779 /* FALLTHRU */
9780 case dw_val_class_vms_delta:
9781 case dw_val_class_offset:
9782 switch (DWARF_OFFSET_SIZE)
9783 {
9784 case 4:
9785 return DW_FORM_data4;
9786 case 8:
9787 return DW_FORM_data8;
9788 default:
9789 gcc_unreachable ();
9790 }
9791 case dw_val_class_loc:
9792 if (dwarf_version >= 4)
9793 return DW_FORM_exprloc;
9794 switch (constant_size (size_of_locs (AT_loc (a))))
9795 {
9796 case 1:
9797 return DW_FORM_block1;
9798 case 2:
9799 return DW_FORM_block2;
9800 case 4:
9801 return DW_FORM_block4;
9802 default:
9803 gcc_unreachable ();
9804 }
9805 case dw_val_class_const:
9806 return DW_FORM_sdata;
9807 case dw_val_class_unsigned_const:
9808 switch (constant_size (AT_unsigned (a)))
9809 {
9810 case 1:
9811 return DW_FORM_data1;
9812 case 2:
9813 return DW_FORM_data2;
9814 case 4:
9815 /* In DWARF3 DW_AT_data_member_location with
9816 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9817 constant, so we need to use DW_FORM_udata if we need
9818 a large constant. */
9819 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9820 return DW_FORM_udata;
9821 return DW_FORM_data4;
9822 case 8:
9823 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9824 return DW_FORM_udata;
9825 return DW_FORM_data8;
9826 default:
9827 gcc_unreachable ();
9828 }
9829 case dw_val_class_const_implicit:
9830 case dw_val_class_unsigned_const_implicit:
9831 case dw_val_class_file_implicit:
9832 return DW_FORM_implicit_const;
9833 case dw_val_class_const_double:
9834 switch (HOST_BITS_PER_WIDE_INT)
9835 {
9836 case 8:
9837 return DW_FORM_data2;
9838 case 16:
9839 return DW_FORM_data4;
9840 case 32:
9841 return DW_FORM_data8;
9842 case 64:
9843 if (dwarf_version >= 5)
9844 return DW_FORM_data16;
9845 /* FALLTHRU */
9846 default:
9847 return DW_FORM_block1;
9848 }
9849 case dw_val_class_wide_int:
9850 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9851 {
9852 case 8:
9853 return DW_FORM_data1;
9854 case 16:
9855 return DW_FORM_data2;
9856 case 32:
9857 return DW_FORM_data4;
9858 case 64:
9859 return DW_FORM_data8;
9860 case 128:
9861 if (dwarf_version >= 5)
9862 return DW_FORM_data16;
9863 /* FALLTHRU */
9864 default:
9865 return DW_FORM_block1;
9866 }
9867 case dw_val_class_symview:
9868 /* ??? We might use uleb128, but then we'd have to compute
9869 .debug_info offsets in the assembler. */
9870 if (symview_upper_bound <= 0xff)
9871 return DW_FORM_data1;
9872 else if (symview_upper_bound <= 0xffff)
9873 return DW_FORM_data2;
9874 else if (symview_upper_bound <= 0xffffffff)
9875 return DW_FORM_data4;
9876 else
9877 return DW_FORM_data8;
9878 case dw_val_class_vec:
9879 switch (constant_size (a->dw_attr_val.v.val_vec.length
9880 * a->dw_attr_val.v.val_vec.elt_size))
9881 {
9882 case 1:
9883 return DW_FORM_block1;
9884 case 2:
9885 return DW_FORM_block2;
9886 case 4:
9887 return DW_FORM_block4;
9888 default:
9889 gcc_unreachable ();
9890 }
9891 case dw_val_class_flag:
9892 if (dwarf_version >= 4)
9893 {
9894 /* Currently all add_AT_flag calls pass in 1 as last argument,
9895 so DW_FORM_flag_present can be used. If that ever changes,
9896 we'll need to use DW_FORM_flag and have some optimization
9897 in build_abbrev_table that will change those to
9898 DW_FORM_flag_present if it is set to 1 in all DIEs using
9899 the same abbrev entry. */
9900 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9901 return DW_FORM_flag_present;
9902 }
9903 return DW_FORM_flag;
9904 case dw_val_class_die_ref:
9905 if (AT_ref_external (a))
9906 {
9907 if (AT_ref (a)->comdat_type_p)
9908 return DW_FORM_ref_sig8;
9909 else
9910 return DW_FORM_ref_addr;
9911 }
9912 else
9913 return DW_FORM_ref;
9914 case dw_val_class_fde_ref:
9915 return DW_FORM_data;
9916 case dw_val_class_lbl_id:
9917 return (AT_index (a) == NOT_INDEXED
9918 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9919 case dw_val_class_lineptr:
9920 case dw_val_class_macptr:
9921 case dw_val_class_loclistsptr:
9922 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9923 case dw_val_class_str:
9924 return AT_string_form (a);
9925 case dw_val_class_file:
9926 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9927 {
9928 case 1:
9929 return DW_FORM_data1;
9930 case 2:
9931 return DW_FORM_data2;
9932 case 4:
9933 return DW_FORM_data4;
9934 default:
9935 gcc_unreachable ();
9936 }
9937
9938 case dw_val_class_data8:
9939 return DW_FORM_data8;
9940
9941 case dw_val_class_high_pc:
9942 switch (DWARF2_ADDR_SIZE)
9943 {
9944 case 1:
9945 return DW_FORM_data1;
9946 case 2:
9947 return DW_FORM_data2;
9948 case 4:
9949 return DW_FORM_data4;
9950 case 8:
9951 return DW_FORM_data8;
9952 default:
9953 gcc_unreachable ();
9954 }
9955
9956 case dw_val_class_discr_value:
9957 return (a->dw_attr_val.v.val_discr_value.pos
9958 ? DW_FORM_udata
9959 : DW_FORM_sdata);
9960 case dw_val_class_discr_list:
9961 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9962 {
9963 case 1:
9964 return DW_FORM_block1;
9965 case 2:
9966 return DW_FORM_block2;
9967 case 4:
9968 return DW_FORM_block4;
9969 default:
9970 gcc_unreachable ();
9971 }
9972
9973 default:
9974 gcc_unreachable ();
9975 }
9976 }
9977
9978 /* Output the encoding of an attribute value. */
9979
9980 static void
9981 output_value_format (dw_attr_node *a)
9982 {
9983 enum dwarf_form form = value_format (a);
9984
9985 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9986 }
9987
9988 /* Given a die and id, produce the appropriate abbreviations. */
9989
9990 static void
9991 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9992 {
9993 unsigned ix;
9994 dw_attr_node *a_attr;
9995
9996 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9997 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9998 dwarf_tag_name (abbrev->die_tag));
9999
10000 if (abbrev->die_child != NULL)
10001 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
10002 else
10003 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
10004
10005 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
10006 {
10007 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
10008 dwarf_attr_name (a_attr->dw_attr));
10009 output_value_format (a_attr);
10010 if (value_format (a_attr) == DW_FORM_implicit_const)
10011 {
10012 if (AT_class (a_attr) == dw_val_class_file_implicit)
10013 {
10014 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
10015 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
10016 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
10017 }
10018 else
10019 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
10020 }
10021 }
10022
10023 dw2_asm_output_data (1, 0, NULL);
10024 dw2_asm_output_data (1, 0, NULL);
10025 }
10026
10027
10028 /* Output the .debug_abbrev section which defines the DIE abbreviation
10029 table. */
10030
10031 static void
10032 output_abbrev_section (void)
10033 {
10034 unsigned int abbrev_id;
10035 dw_die_ref abbrev;
10036
10037 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
10038 if (abbrev_id != 0)
10039 output_die_abbrevs (abbrev_id, abbrev);
10040
10041 /* Terminate the table. */
10042 dw2_asm_output_data (1, 0, NULL);
10043 }
10044
10045 /* Return a new location list, given the begin and end range, and the
10046 expression. */
10047
10048 static inline dw_loc_list_ref
10049 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10050 const char *end, var_loc_view vend,
10051 const char *section)
10052 {
10053 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10054
10055 retlist->begin = begin;
10056 retlist->begin_entry = NULL;
10057 retlist->end = end;
10058 retlist->expr = expr;
10059 retlist->section = section;
10060 retlist->vbegin = vbegin;
10061 retlist->vend = vend;
10062
10063 return retlist;
10064 }
10065
10066 /* Return true iff there's any nonzero view number in the loc list.
10067
10068 ??? When views are not enabled, we'll often extend a single range
10069 to the entire function, so that we emit a single location
10070 expression rather than a location list. With views, even with a
10071 single range, we'll output a list if start or end have a nonzero
10072 view. If we change this, we may want to stop splitting a single
10073 range in dw_loc_list just because of a nonzero view, even if it
10074 straddles across hot/cold partitions. */
10075
10076 static bool
10077 loc_list_has_views (dw_loc_list_ref list)
10078 {
10079 if (!debug_variable_location_views)
10080 return false;
10081
10082 for (dw_loc_list_ref loc = list;
10083 loc != NULL; loc = loc->dw_loc_next)
10084 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10085 return true;
10086
10087 return false;
10088 }
10089
10090 /* Generate a new internal symbol for this location list node, if it
10091 hasn't got one yet. */
10092
10093 static inline void
10094 gen_llsym (dw_loc_list_ref list)
10095 {
10096 gcc_assert (!list->ll_symbol);
10097 list->ll_symbol = gen_internal_sym ("LLST");
10098
10099 if (!loc_list_has_views (list))
10100 return;
10101
10102 if (dwarf2out_locviews_in_attribute ())
10103 {
10104 /* Use the same label_num for the view list. */
10105 label_num--;
10106 list->vl_symbol = gen_internal_sym ("LVUS");
10107 }
10108 else
10109 list->vl_symbol = list->ll_symbol;
10110 }
10111
10112 /* Generate a symbol for the list, but only if we really want to emit
10113 it as a list. */
10114
10115 static inline void
10116 maybe_gen_llsym (dw_loc_list_ref list)
10117 {
10118 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10119 return;
10120
10121 gen_llsym (list);
10122 }
10123
10124 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10125 NULL, don't consider size of the location expression. If we're not
10126 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10127 representation in *SIZEP. */
10128
10129 static bool
10130 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10131 {
10132 /* Don't output an entry that starts and ends at the same address. */
10133 if (strcmp (curr->begin, curr->end) == 0
10134 && curr->vbegin == curr->vend && !curr->force)
10135 return true;
10136
10137 if (!sizep)
10138 return false;
10139
10140 unsigned long size = size_of_locs (curr->expr);
10141
10142 /* If the expression is too large, drop it on the floor. We could
10143 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10144 in the expression, but >= 64KB expressions for a single value
10145 in a single range are unlikely very useful. */
10146 if (dwarf_version < 5 && size > 0xffff)
10147 return true;
10148
10149 *sizep = size;
10150
10151 return false;
10152 }
10153
10154 /* Output a view pair loclist entry for CURR, if it requires one. */
10155
10156 static void
10157 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10158 {
10159 if (!dwarf2out_locviews_in_loclist ())
10160 return;
10161
10162 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10163 return;
10164
10165 #ifdef DW_LLE_view_pair
10166 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10167
10168 if (dwarf2out_as_locview_support)
10169 {
10170 if (ZERO_VIEW_P (curr->vbegin))
10171 dw2_asm_output_data_uleb128 (0, "Location view begin");
10172 else
10173 {
10174 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10175 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10176 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10177 }
10178
10179 if (ZERO_VIEW_P (curr->vend))
10180 dw2_asm_output_data_uleb128 (0, "Location view end");
10181 else
10182 {
10183 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10184 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10185 dw2_asm_output_symname_uleb128 (label, "Location view end");
10186 }
10187 }
10188 else
10189 {
10190 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10191 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10192 }
10193 #endif /* DW_LLE_view_pair */
10194
10195 return;
10196 }
10197
10198 /* Output the location list given to us. */
10199
10200 static void
10201 output_loc_list (dw_loc_list_ref list_head)
10202 {
10203 int vcount = 0, lcount = 0;
10204
10205 if (list_head->emitted)
10206 return;
10207 list_head->emitted = true;
10208
10209 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10210 {
10211 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10212
10213 for (dw_loc_list_ref curr = list_head; curr != NULL;
10214 curr = curr->dw_loc_next)
10215 {
10216 unsigned long size;
10217
10218 if (skip_loc_list_entry (curr, &size))
10219 continue;
10220
10221 vcount++;
10222
10223 /* ?? dwarf_split_debug_info? */
10224 if (dwarf2out_as_locview_support)
10225 {
10226 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10227
10228 if (!ZERO_VIEW_P (curr->vbegin))
10229 {
10230 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10231 dw2_asm_output_symname_uleb128 (label,
10232 "View list begin (%s)",
10233 list_head->vl_symbol);
10234 }
10235 else
10236 dw2_asm_output_data_uleb128 (0,
10237 "View list begin (%s)",
10238 list_head->vl_symbol);
10239
10240 if (!ZERO_VIEW_P (curr->vend))
10241 {
10242 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10243 dw2_asm_output_symname_uleb128 (label,
10244 "View list end (%s)",
10245 list_head->vl_symbol);
10246 }
10247 else
10248 dw2_asm_output_data_uleb128 (0,
10249 "View list end (%s)",
10250 list_head->vl_symbol);
10251 }
10252 else
10253 {
10254 dw2_asm_output_data_uleb128 (curr->vbegin,
10255 "View list begin (%s)",
10256 list_head->vl_symbol);
10257 dw2_asm_output_data_uleb128 (curr->vend,
10258 "View list end (%s)",
10259 list_head->vl_symbol);
10260 }
10261 }
10262 }
10263
10264 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10265
10266 const char *last_section = NULL;
10267 const char *base_label = NULL;
10268
10269 /* Walk the location list, and output each range + expression. */
10270 for (dw_loc_list_ref curr = list_head; curr != NULL;
10271 curr = curr->dw_loc_next)
10272 {
10273 unsigned long size;
10274
10275 /* Skip this entry? If we skip it here, we must skip it in the
10276 view list above as well. */
10277 if (skip_loc_list_entry (curr, &size))
10278 continue;
10279
10280 lcount++;
10281
10282 if (dwarf_version >= 5)
10283 {
10284 if (dwarf_split_debug_info)
10285 {
10286 dwarf2out_maybe_output_loclist_view_pair (curr);
10287 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10288 uleb128 index into .debug_addr and uleb128 length. */
10289 dw2_asm_output_data (1, DW_LLE_startx_length,
10290 "DW_LLE_startx_length (%s)",
10291 list_head->ll_symbol);
10292 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10293 "Location list range start index "
10294 "(%s)", curr->begin);
10295 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10296 For that case we probably need to emit DW_LLE_startx_endx,
10297 but we'd need 2 .debug_addr entries rather than just one. */
10298 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10299 "Location list length (%s)",
10300 list_head->ll_symbol);
10301 }
10302 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10303 {
10304 dwarf2out_maybe_output_loclist_view_pair (curr);
10305 /* If all code is in .text section, the base address is
10306 already provided by the CU attributes. Use
10307 DW_LLE_offset_pair where both addresses are uleb128 encoded
10308 offsets against that base. */
10309 dw2_asm_output_data (1, DW_LLE_offset_pair,
10310 "DW_LLE_offset_pair (%s)",
10311 list_head->ll_symbol);
10312 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10313 "Location list begin address (%s)",
10314 list_head->ll_symbol);
10315 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10316 "Location list end address (%s)",
10317 list_head->ll_symbol);
10318 }
10319 else if (HAVE_AS_LEB128)
10320 {
10321 /* Otherwise, find out how many consecutive entries could share
10322 the same base entry. If just one, emit DW_LLE_start_length,
10323 otherwise emit DW_LLE_base_address for the base address
10324 followed by a series of DW_LLE_offset_pair. */
10325 if (last_section == NULL || curr->section != last_section)
10326 {
10327 dw_loc_list_ref curr2;
10328 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10329 curr2 = curr2->dw_loc_next)
10330 {
10331 if (strcmp (curr2->begin, curr2->end) == 0
10332 && !curr2->force)
10333 continue;
10334 break;
10335 }
10336 if (curr2 == NULL || curr->section != curr2->section)
10337 last_section = NULL;
10338 else
10339 {
10340 last_section = curr->section;
10341 base_label = curr->begin;
10342 dw2_asm_output_data (1, DW_LLE_base_address,
10343 "DW_LLE_base_address (%s)",
10344 list_head->ll_symbol);
10345 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10346 "Base address (%s)",
10347 list_head->ll_symbol);
10348 }
10349 }
10350 /* Only one entry with the same base address. Use
10351 DW_LLE_start_length with absolute address and uleb128
10352 length. */
10353 if (last_section == NULL)
10354 {
10355 dwarf2out_maybe_output_loclist_view_pair (curr);
10356 dw2_asm_output_data (1, DW_LLE_start_length,
10357 "DW_LLE_start_length (%s)",
10358 list_head->ll_symbol);
10359 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10360 "Location list begin address (%s)",
10361 list_head->ll_symbol);
10362 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10363 "Location list length "
10364 "(%s)", list_head->ll_symbol);
10365 }
10366 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10367 DW_LLE_base_address. */
10368 else
10369 {
10370 dwarf2out_maybe_output_loclist_view_pair (curr);
10371 dw2_asm_output_data (1, DW_LLE_offset_pair,
10372 "DW_LLE_offset_pair (%s)",
10373 list_head->ll_symbol);
10374 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10375 "Location list begin address "
10376 "(%s)", list_head->ll_symbol);
10377 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10378 "Location list end address "
10379 "(%s)", list_head->ll_symbol);
10380 }
10381 }
10382 /* The assembler does not support .uleb128 directive. Emit
10383 DW_LLE_start_end with a pair of absolute addresses. */
10384 else
10385 {
10386 dwarf2out_maybe_output_loclist_view_pair (curr);
10387 dw2_asm_output_data (1, DW_LLE_start_end,
10388 "DW_LLE_start_end (%s)",
10389 list_head->ll_symbol);
10390 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10391 "Location list begin address (%s)",
10392 list_head->ll_symbol);
10393 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10394 "Location list end address (%s)",
10395 list_head->ll_symbol);
10396 }
10397 }
10398 else if (dwarf_split_debug_info)
10399 {
10400 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10401 and 4 byte length. */
10402 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10403 "Location list start/length entry (%s)",
10404 list_head->ll_symbol);
10405 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10406 "Location list range start index (%s)",
10407 curr->begin);
10408 /* The length field is 4 bytes. If we ever need to support
10409 an 8-byte length, we can add a new DW_LLE code or fall back
10410 to DW_LLE_GNU_start_end_entry. */
10411 dw2_asm_output_delta (4, curr->end, curr->begin,
10412 "Location list range length (%s)",
10413 list_head->ll_symbol);
10414 }
10415 else if (!have_multiple_function_sections)
10416 {
10417 /* Pair of relative addresses against start of text section. */
10418 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10419 "Location list begin address (%s)",
10420 list_head->ll_symbol);
10421 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10422 "Location list end address (%s)",
10423 list_head->ll_symbol);
10424 }
10425 else
10426 {
10427 /* Pair of absolute addresses. */
10428 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10429 "Location list begin address (%s)",
10430 list_head->ll_symbol);
10431 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10432 "Location list end address (%s)",
10433 list_head->ll_symbol);
10434 }
10435
10436 /* Output the block length for this list of location operations. */
10437 if (dwarf_version >= 5)
10438 dw2_asm_output_data_uleb128 (size, "Location expression size");
10439 else
10440 {
10441 gcc_assert (size <= 0xffff);
10442 dw2_asm_output_data (2, size, "Location expression size");
10443 }
10444
10445 output_loc_sequence (curr->expr, -1);
10446 }
10447
10448 /* And finally list termination. */
10449 if (dwarf_version >= 5)
10450 dw2_asm_output_data (1, DW_LLE_end_of_list,
10451 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10452 else if (dwarf_split_debug_info)
10453 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10454 "Location list terminator (%s)",
10455 list_head->ll_symbol);
10456 else
10457 {
10458 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10459 "Location list terminator begin (%s)",
10460 list_head->ll_symbol);
10461 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10462 "Location list terminator end (%s)",
10463 list_head->ll_symbol);
10464 }
10465
10466 gcc_assert (!list_head->vl_symbol
10467 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10468 }
10469
10470 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10471 section. Emit a relocated reference if val_entry is NULL, otherwise,
10472 emit an indirect reference. */
10473
10474 static void
10475 output_range_list_offset (dw_attr_node *a)
10476 {
10477 const char *name = dwarf_attr_name (a->dw_attr);
10478
10479 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10480 {
10481 if (dwarf_version >= 5)
10482 {
10483 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10484 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10485 debug_ranges_section, "%s", name);
10486 }
10487 else
10488 {
10489 char *p = strchr (ranges_section_label, '\0');
10490 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10491 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10492 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10493 debug_ranges_section, "%s", name);
10494 *p = '\0';
10495 }
10496 }
10497 else if (dwarf_version >= 5)
10498 {
10499 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10500 gcc_assert (rnglist_idx);
10501 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10502 }
10503 else
10504 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10505 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10506 "%s (offset from %s)", name, ranges_section_label);
10507 }
10508
10509 /* Output the offset into the debug_loc section. */
10510
10511 static void
10512 output_loc_list_offset (dw_attr_node *a)
10513 {
10514 char *sym = AT_loc_list (a)->ll_symbol;
10515
10516 gcc_assert (sym);
10517 if (!dwarf_split_debug_info)
10518 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10519 "%s", dwarf_attr_name (a->dw_attr));
10520 else if (dwarf_version >= 5)
10521 {
10522 gcc_assert (AT_loc_list (a)->num_assigned);
10523 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10524 dwarf_attr_name (a->dw_attr),
10525 sym);
10526 }
10527 else
10528 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10529 "%s", dwarf_attr_name (a->dw_attr));
10530 }
10531
10532 /* Output the offset into the debug_loc section. */
10533
10534 static void
10535 output_view_list_offset (dw_attr_node *a)
10536 {
10537 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10538
10539 gcc_assert (sym);
10540 if (dwarf_split_debug_info)
10541 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10542 "%s", dwarf_attr_name (a->dw_attr));
10543 else
10544 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10545 "%s", dwarf_attr_name (a->dw_attr));
10546 }
10547
10548 /* Output an attribute's index or value appropriately. */
10549
10550 static void
10551 output_attr_index_or_value (dw_attr_node *a)
10552 {
10553 const char *name = dwarf_attr_name (a->dw_attr);
10554
10555 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10556 {
10557 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10558 return;
10559 }
10560 switch (AT_class (a))
10561 {
10562 case dw_val_class_addr:
10563 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10564 break;
10565 case dw_val_class_high_pc:
10566 case dw_val_class_lbl_id:
10567 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10568 break;
10569 default:
10570 gcc_unreachable ();
10571 }
10572 }
10573
10574 /* Output a type signature. */
10575
10576 static inline void
10577 output_signature (const char *sig, const char *name)
10578 {
10579 int i;
10580
10581 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10582 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10583 }
10584
10585 /* Output a discriminant value. */
10586
10587 static inline void
10588 output_discr_value (dw_discr_value *discr_value, const char *name)
10589 {
10590 if (discr_value->pos)
10591 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10592 else
10593 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10594 }
10595
10596 /* Output the DIE and its attributes. Called recursively to generate
10597 the definitions of each child DIE. */
10598
10599 static void
10600 output_die (dw_die_ref die)
10601 {
10602 dw_attr_node *a;
10603 dw_die_ref c;
10604 unsigned long size;
10605 unsigned ix;
10606
10607 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10608 (unsigned long)die->die_offset,
10609 dwarf_tag_name (die->die_tag));
10610
10611 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10612 {
10613 const char *name = dwarf_attr_name (a->dw_attr);
10614
10615 switch (AT_class (a))
10616 {
10617 case dw_val_class_addr:
10618 output_attr_index_or_value (a);
10619 break;
10620
10621 case dw_val_class_offset:
10622 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10623 "%s", name);
10624 break;
10625
10626 case dw_val_class_range_list:
10627 output_range_list_offset (a);
10628 break;
10629
10630 case dw_val_class_loc:
10631 size = size_of_locs (AT_loc (a));
10632
10633 /* Output the block length for this list of location operations. */
10634 if (dwarf_version >= 4)
10635 dw2_asm_output_data_uleb128 (size, "%s", name);
10636 else
10637 dw2_asm_output_data (constant_size (size), size, "%s", name);
10638
10639 output_loc_sequence (AT_loc (a), -1);
10640 break;
10641
10642 case dw_val_class_const:
10643 /* ??? It would be slightly more efficient to use a scheme like is
10644 used for unsigned constants below, but gdb 4.x does not sign
10645 extend. Gdb 5.x does sign extend. */
10646 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10647 break;
10648
10649 case dw_val_class_unsigned_const:
10650 {
10651 int csize = constant_size (AT_unsigned (a));
10652 if (dwarf_version == 3
10653 && a->dw_attr == DW_AT_data_member_location
10654 && csize >= 4)
10655 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10656 else
10657 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10658 }
10659 break;
10660
10661 case dw_val_class_symview:
10662 {
10663 int vsize;
10664 if (symview_upper_bound <= 0xff)
10665 vsize = 1;
10666 else if (symview_upper_bound <= 0xffff)
10667 vsize = 2;
10668 else if (symview_upper_bound <= 0xffffffff)
10669 vsize = 4;
10670 else
10671 vsize = 8;
10672 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10673 "%s", name);
10674 }
10675 break;
10676
10677 case dw_val_class_const_implicit:
10678 if (flag_debug_asm)
10679 fprintf (asm_out_file, "\t\t\t%s %s ("
10680 HOST_WIDE_INT_PRINT_DEC ")\n",
10681 ASM_COMMENT_START, name, AT_int (a));
10682 break;
10683
10684 case dw_val_class_unsigned_const_implicit:
10685 if (flag_debug_asm)
10686 fprintf (asm_out_file, "\t\t\t%s %s ("
10687 HOST_WIDE_INT_PRINT_HEX ")\n",
10688 ASM_COMMENT_START, name, AT_unsigned (a));
10689 break;
10690
10691 case dw_val_class_const_double:
10692 {
10693 unsigned HOST_WIDE_INT first, second;
10694
10695 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10696 dw2_asm_output_data (1,
10697 HOST_BITS_PER_DOUBLE_INT
10698 / HOST_BITS_PER_CHAR,
10699 NULL);
10700
10701 if (WORDS_BIG_ENDIAN)
10702 {
10703 first = a->dw_attr_val.v.val_double.high;
10704 second = a->dw_attr_val.v.val_double.low;
10705 }
10706 else
10707 {
10708 first = a->dw_attr_val.v.val_double.low;
10709 second = a->dw_attr_val.v.val_double.high;
10710 }
10711
10712 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10713 first, "%s", name);
10714 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10715 second, NULL);
10716 }
10717 break;
10718
10719 case dw_val_class_wide_int:
10720 {
10721 int i;
10722 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10723 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10724 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10725 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10726 * l, NULL);
10727
10728 if (WORDS_BIG_ENDIAN)
10729 for (i = len - 1; i >= 0; --i)
10730 {
10731 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10732 "%s", name);
10733 name = "";
10734 }
10735 else
10736 for (i = 0; i < len; ++i)
10737 {
10738 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10739 "%s", name);
10740 name = "";
10741 }
10742 }
10743 break;
10744
10745 case dw_val_class_vec:
10746 {
10747 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10748 unsigned int len = a->dw_attr_val.v.val_vec.length;
10749 unsigned int i;
10750 unsigned char *p;
10751
10752 dw2_asm_output_data (constant_size (len * elt_size),
10753 len * elt_size, "%s", name);
10754 if (elt_size > sizeof (HOST_WIDE_INT))
10755 {
10756 elt_size /= 2;
10757 len *= 2;
10758 }
10759 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10760 i < len;
10761 i++, p += elt_size)
10762 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10763 "fp or vector constant word %u", i);
10764 break;
10765 }
10766
10767 case dw_val_class_flag:
10768 if (dwarf_version >= 4)
10769 {
10770 /* Currently all add_AT_flag calls pass in 1 as last argument,
10771 so DW_FORM_flag_present can be used. If that ever changes,
10772 we'll need to use DW_FORM_flag and have some optimization
10773 in build_abbrev_table that will change those to
10774 DW_FORM_flag_present if it is set to 1 in all DIEs using
10775 the same abbrev entry. */
10776 gcc_assert (AT_flag (a) == 1);
10777 if (flag_debug_asm)
10778 fprintf (asm_out_file, "\t\t\t%s %s\n",
10779 ASM_COMMENT_START, name);
10780 break;
10781 }
10782 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10783 break;
10784
10785 case dw_val_class_loc_list:
10786 output_loc_list_offset (a);
10787 break;
10788
10789 case dw_val_class_view_list:
10790 output_view_list_offset (a);
10791 break;
10792
10793 case dw_val_class_die_ref:
10794 if (AT_ref_external (a))
10795 {
10796 if (AT_ref (a)->comdat_type_p)
10797 {
10798 comdat_type_node *type_node
10799 = AT_ref (a)->die_id.die_type_node;
10800
10801 gcc_assert (type_node);
10802 output_signature (type_node->signature, name);
10803 }
10804 else
10805 {
10806 const char *sym = AT_ref (a)->die_id.die_symbol;
10807 int size;
10808
10809 gcc_assert (sym);
10810 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10811 length, whereas in DWARF3 it's always sized as an
10812 offset. */
10813 if (dwarf_version == 2)
10814 size = DWARF2_ADDR_SIZE;
10815 else
10816 size = DWARF_OFFSET_SIZE;
10817 /* ??? We cannot unconditionally output die_offset if
10818 non-zero - others might create references to those
10819 DIEs via symbols.
10820 And we do not clear its DIE offset after outputting it
10821 (and the label refers to the actual DIEs, not the
10822 DWARF CU unit header which is when using label + offset
10823 would be the correct thing to do).
10824 ??? This is the reason for the with_offset flag. */
10825 if (AT_ref (a)->with_offset)
10826 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10827 debug_info_section, "%s", name);
10828 else
10829 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10830 name);
10831 }
10832 }
10833 else
10834 {
10835 gcc_assert (AT_ref (a)->die_offset);
10836 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10837 "%s", name);
10838 }
10839 break;
10840
10841 case dw_val_class_fde_ref:
10842 {
10843 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10844
10845 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10846 a->dw_attr_val.v.val_fde_index * 2);
10847 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10848 "%s", name);
10849 }
10850 break;
10851
10852 case dw_val_class_vms_delta:
10853 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10854 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10855 AT_vms_delta2 (a), AT_vms_delta1 (a),
10856 "%s", name);
10857 #else
10858 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10859 AT_vms_delta2 (a), AT_vms_delta1 (a),
10860 "%s", name);
10861 #endif
10862 break;
10863
10864 case dw_val_class_lbl_id:
10865 output_attr_index_or_value (a);
10866 break;
10867
10868 case dw_val_class_lineptr:
10869 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10870 debug_line_section, "%s", name);
10871 break;
10872
10873 case dw_val_class_macptr:
10874 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10875 debug_macinfo_section, "%s", name);
10876 break;
10877
10878 case dw_val_class_loclistsptr:
10879 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10880 debug_loc_section, "%s", name);
10881 break;
10882
10883 case dw_val_class_str:
10884 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10885 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10886 a->dw_attr_val.v.val_str->label,
10887 debug_str_section,
10888 "%s: \"%s\"", name, AT_string (a));
10889 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10890 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10891 a->dw_attr_val.v.val_str->label,
10892 debug_line_str_section,
10893 "%s: \"%s\"", name, AT_string (a));
10894 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10895 dw2_asm_output_data_uleb128 (AT_index (a),
10896 "%s: \"%s\"", name, AT_string (a));
10897 else
10898 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10899 break;
10900
10901 case dw_val_class_file:
10902 {
10903 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10904
10905 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10906 a->dw_attr_val.v.val_file->filename);
10907 break;
10908 }
10909
10910 case dw_val_class_file_implicit:
10911 if (flag_debug_asm)
10912 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10913 ASM_COMMENT_START, name,
10914 maybe_emit_file (a->dw_attr_val.v.val_file),
10915 a->dw_attr_val.v.val_file->filename);
10916 break;
10917
10918 case dw_val_class_data8:
10919 {
10920 int i;
10921
10922 for (i = 0; i < 8; i++)
10923 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10924 i == 0 ? "%s" : NULL, name);
10925 break;
10926 }
10927
10928 case dw_val_class_high_pc:
10929 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10930 get_AT_low_pc (die), "DW_AT_high_pc");
10931 break;
10932
10933 case dw_val_class_discr_value:
10934 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10935 break;
10936
10937 case dw_val_class_discr_list:
10938 {
10939 dw_discr_list_ref list = AT_discr_list (a);
10940 const int size = size_of_discr_list (list);
10941
10942 /* This is a block, so output its length first. */
10943 dw2_asm_output_data (constant_size (size), size,
10944 "%s: block size", name);
10945
10946 for (; list != NULL; list = list->dw_discr_next)
10947 {
10948 /* One byte for the discriminant value descriptor, and then as
10949 many LEB128 numbers as required. */
10950 if (list->dw_discr_range)
10951 dw2_asm_output_data (1, DW_DSC_range,
10952 "%s: DW_DSC_range", name);
10953 else
10954 dw2_asm_output_data (1, DW_DSC_label,
10955 "%s: DW_DSC_label", name);
10956
10957 output_discr_value (&list->dw_discr_lower_bound, name);
10958 if (list->dw_discr_range)
10959 output_discr_value (&list->dw_discr_upper_bound, name);
10960 }
10961 break;
10962 }
10963
10964 default:
10965 gcc_unreachable ();
10966 }
10967 }
10968
10969 FOR_EACH_CHILD (die, c, output_die (c));
10970
10971 /* Add null byte to terminate sibling list. */
10972 if (die->die_child != NULL)
10973 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10974 (unsigned long) die->die_offset);
10975 }
10976
10977 /* Output the dwarf version number. */
10978
10979 static void
10980 output_dwarf_version ()
10981 {
10982 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10983 views in loclist. That will change eventually. */
10984 if (dwarf_version == 6)
10985 {
10986 static bool once;
10987 if (!once)
10988 {
10989 warning (0, "%<-gdwarf-6%> is output as version 5 with "
10990 "incompatibilities");
10991 once = true;
10992 }
10993 dw2_asm_output_data (2, 5, "DWARF version number");
10994 }
10995 else
10996 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10997 }
10998
10999 /* Output the compilation unit that appears at the beginning of the
11000 .debug_info section, and precedes the DIE descriptions. */
11001
11002 static void
11003 output_compilation_unit_header (enum dwarf_unit_type ut)
11004 {
11005 if (!XCOFF_DEBUGGING_INFO)
11006 {
11007 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11008 dw2_asm_output_data (4, 0xffffffff,
11009 "Initial length escape value indicating 64-bit DWARF extension");
11010 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11011 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
11012 "Length of Compilation Unit Info");
11013 }
11014
11015 output_dwarf_version ();
11016 if (dwarf_version >= 5)
11017 {
11018 const char *name;
11019 switch (ut)
11020 {
11021 case DW_UT_compile: name = "DW_UT_compile"; break;
11022 case DW_UT_type: name = "DW_UT_type"; break;
11023 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
11024 case DW_UT_split_type: name = "DW_UT_split_type"; break;
11025 default: gcc_unreachable ();
11026 }
11027 dw2_asm_output_data (1, ut, "%s", name);
11028 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11029 }
11030 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
11031 debug_abbrev_section,
11032 "Offset Into Abbrev. Section");
11033 if (dwarf_version < 5)
11034 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11035 }
11036
11037 /* Output the compilation unit DIE and its children. */
11038
11039 static void
11040 output_comp_unit (dw_die_ref die, int output_if_empty,
11041 const unsigned char *dwo_id)
11042 {
11043 const char *secname, *oldsym;
11044 char *tmp;
11045
11046 /* Unless we are outputting main CU, we may throw away empty ones. */
11047 if (!output_if_empty && die->die_child == NULL)
11048 return;
11049
11050 /* Even if there are no children of this DIE, we must output the information
11051 about the compilation unit. Otherwise, on an empty translation unit, we
11052 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11053 will then complain when examining the file. First mark all the DIEs in
11054 this CU so we know which get local refs. */
11055 mark_dies (die);
11056
11057 external_ref_hash_type *extern_map = optimize_external_refs (die);
11058
11059 /* For now, optimize only the main CU, in order to optimize the rest
11060 we'd need to see all of them earlier. Leave the rest for post-linking
11061 tools like DWZ. */
11062 if (die == comp_unit_die ())
11063 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11064
11065 build_abbrev_table (die, extern_map);
11066
11067 optimize_abbrev_table ();
11068
11069 delete extern_map;
11070
11071 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11072 next_die_offset = (dwo_id
11073 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11074 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11075 calc_die_sizes (die);
11076
11077 oldsym = die->die_id.die_symbol;
11078 if (oldsym && die->comdat_type_p)
11079 {
11080 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11081
11082 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11083 secname = tmp;
11084 die->die_id.die_symbol = NULL;
11085 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11086 }
11087 else
11088 {
11089 switch_to_section (debug_info_section);
11090 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11091 info_section_emitted = true;
11092 }
11093
11094 /* For LTO cross unit DIE refs we want a symbol on the start of the
11095 debuginfo section, not on the CU DIE. */
11096 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11097 {
11098 /* ??? No way to get visibility assembled without a decl. */
11099 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11100 get_identifier (oldsym), char_type_node);
11101 TREE_PUBLIC (decl) = true;
11102 TREE_STATIC (decl) = true;
11103 DECL_ARTIFICIAL (decl) = true;
11104 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11105 DECL_VISIBILITY_SPECIFIED (decl) = true;
11106 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11107 #ifdef ASM_WEAKEN_LABEL
11108 /* We prefer a .weak because that handles duplicates from duplicate
11109 archive members in a graceful way. */
11110 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11111 #else
11112 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11113 #endif
11114 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11115 }
11116
11117 /* Output debugging information. */
11118 output_compilation_unit_header (dwo_id
11119 ? DW_UT_split_compile : DW_UT_compile);
11120 if (dwarf_version >= 5)
11121 {
11122 if (dwo_id != NULL)
11123 for (int i = 0; i < 8; i++)
11124 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11125 }
11126 output_die (die);
11127
11128 /* Leave the marks on the main CU, so we can check them in
11129 output_pubnames. */
11130 if (oldsym)
11131 {
11132 unmark_dies (die);
11133 die->die_id.die_symbol = oldsym;
11134 }
11135 }
11136
11137 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11138 and .debug_pubtypes. This is configured per-target, but can be
11139 overridden by the -gpubnames or -gno-pubnames options. */
11140
11141 static inline bool
11142 want_pubnames (void)
11143 {
11144 if (debug_info_level <= DINFO_LEVEL_TERSE
11145 /* Names and types go to the early debug part only. */
11146 || in_lto_p)
11147 return false;
11148 if (debug_generate_pub_sections != -1)
11149 return debug_generate_pub_sections;
11150 return targetm.want_debug_pub_sections;
11151 }
11152
11153 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11154
11155 static void
11156 add_AT_pubnames (dw_die_ref die)
11157 {
11158 if (want_pubnames ())
11159 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11160 }
11161
11162 /* Add a string attribute value to a skeleton DIE. */
11163
11164 static inline void
11165 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11166 const char *str)
11167 {
11168 dw_attr_node attr;
11169 struct indirect_string_node *node;
11170
11171 if (! skeleton_debug_str_hash)
11172 skeleton_debug_str_hash
11173 = hash_table<indirect_string_hasher>::create_ggc (10);
11174
11175 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11176 find_string_form (node);
11177 if (node->form == dwarf_FORM (DW_FORM_strx))
11178 node->form = DW_FORM_strp;
11179
11180 attr.dw_attr = attr_kind;
11181 attr.dw_attr_val.val_class = dw_val_class_str;
11182 attr.dw_attr_val.val_entry = NULL;
11183 attr.dw_attr_val.v.val_str = node;
11184 add_dwarf_attr (die, &attr);
11185 }
11186
11187 /* Helper function to generate top-level dies for skeleton debug_info and
11188 debug_types. */
11189
11190 static void
11191 add_top_level_skeleton_die_attrs (dw_die_ref die)
11192 {
11193 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11194 const char *comp_dir = comp_dir_string ();
11195
11196 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11197 if (comp_dir != NULL)
11198 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11199 add_AT_pubnames (die);
11200 if (addr_index_table != NULL && addr_index_table->size () > 0)
11201 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11202 }
11203
11204 /* Output skeleton debug sections that point to the dwo file. */
11205
11206 static void
11207 output_skeleton_debug_sections (dw_die_ref comp_unit,
11208 const unsigned char *dwo_id)
11209 {
11210 /* These attributes will be found in the full debug_info section. */
11211 remove_AT (comp_unit, DW_AT_producer);
11212 remove_AT (comp_unit, DW_AT_language);
11213
11214 switch_to_section (debug_skeleton_info_section);
11215 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11216
11217 /* Produce the skeleton compilation-unit header. This one differs enough from
11218 a normal CU header that it's better not to call output_compilation_unit
11219 header. */
11220 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11221 dw2_asm_output_data (4, 0xffffffff,
11222 "Initial length escape value indicating 64-bit "
11223 "DWARF extension");
11224
11225 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11226 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11227 - DWARF_INITIAL_LENGTH_SIZE
11228 + size_of_die (comp_unit),
11229 "Length of Compilation Unit Info");
11230 output_dwarf_version ();
11231 if (dwarf_version >= 5)
11232 {
11233 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11234 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11235 }
11236 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11237 debug_skeleton_abbrev_section,
11238 "Offset Into Abbrev. Section");
11239 if (dwarf_version < 5)
11240 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11241 else
11242 for (int i = 0; i < 8; i++)
11243 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11244
11245 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11246 output_die (comp_unit);
11247
11248 /* Build the skeleton debug_abbrev section. */
11249 switch_to_section (debug_skeleton_abbrev_section);
11250 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11251
11252 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11253
11254 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11255 }
11256
11257 /* Output a comdat type unit DIE and its children. */
11258
11259 static void
11260 output_comdat_type_unit (comdat_type_node *node,
11261 bool early_lto_debug ATTRIBUTE_UNUSED)
11262 {
11263 const char *secname;
11264 char *tmp;
11265 int i;
11266 #if defined (OBJECT_FORMAT_ELF)
11267 tree comdat_key;
11268 #endif
11269
11270 /* First mark all the DIEs in this CU so we know which get local refs. */
11271 mark_dies (node->root_die);
11272
11273 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11274
11275 build_abbrev_table (node->root_die, extern_map);
11276
11277 delete extern_map;
11278 extern_map = NULL;
11279
11280 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11281 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11282 calc_die_sizes (node->root_die);
11283
11284 #if defined (OBJECT_FORMAT_ELF)
11285 if (dwarf_version >= 5)
11286 {
11287 if (!dwarf_split_debug_info)
11288 secname = early_lto_debug ? DEBUG_LTO_INFO_SECTION : DEBUG_INFO_SECTION;
11289 else
11290 secname = (early_lto_debug
11291 ? DEBUG_LTO_DWO_INFO_SECTION : DEBUG_DWO_INFO_SECTION);
11292 }
11293 else if (!dwarf_split_debug_info)
11294 secname = early_lto_debug ? ".gnu.debuglto_.debug_types" : ".debug_types";
11295 else
11296 secname = (early_lto_debug
11297 ? ".gnu.debuglto_.debug_types.dwo" : ".debug_types.dwo");
11298
11299 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11300 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11301 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11302 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11303 comdat_key = get_identifier (tmp);
11304 targetm.asm_out.named_section (secname,
11305 SECTION_DEBUG | SECTION_LINKONCE,
11306 comdat_key);
11307 #else
11308 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11309 sprintf (tmp, (dwarf_version >= 5
11310 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11311 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11312 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11313 secname = tmp;
11314 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11315 #endif
11316
11317 /* Output debugging information. */
11318 output_compilation_unit_header (dwarf_split_debug_info
11319 ? DW_UT_split_type : DW_UT_type);
11320 output_signature (node->signature, "Type Signature");
11321 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11322 "Offset to Type DIE");
11323 output_die (node->root_die);
11324
11325 unmark_dies (node->root_die);
11326 }
11327
11328 /* Return the DWARF2/3 pubname associated with a decl. */
11329
11330 static const char *
11331 dwarf2_name (tree decl, int scope)
11332 {
11333 if (DECL_NAMELESS (decl))
11334 return NULL;
11335 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11336 }
11337
11338 /* Add a new entry to .debug_pubnames if appropriate. */
11339
11340 static void
11341 add_pubname_string (const char *str, dw_die_ref die)
11342 {
11343 pubname_entry e;
11344
11345 e.die = die;
11346 e.name = xstrdup (str);
11347 vec_safe_push (pubname_table, e);
11348 }
11349
11350 static void
11351 add_pubname (tree decl, dw_die_ref die)
11352 {
11353 if (!want_pubnames ())
11354 return;
11355
11356 /* Don't add items to the table when we expect that the consumer will have
11357 just read the enclosing die. For example, if the consumer is looking at a
11358 class_member, it will either be inside the class already, or will have just
11359 looked up the class to find the member. Either way, searching the class is
11360 faster than searching the index. */
11361 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11362 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11363 {
11364 const char *name = dwarf2_name (decl, 1);
11365
11366 if (name)
11367 add_pubname_string (name, die);
11368 }
11369 }
11370
11371 /* Add an enumerator to the pubnames section. */
11372
11373 static void
11374 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11375 {
11376 pubname_entry e;
11377
11378 gcc_assert (scope_name);
11379 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11380 e.die = die;
11381 vec_safe_push (pubname_table, e);
11382 }
11383
11384 /* Add a new entry to .debug_pubtypes if appropriate. */
11385
11386 static void
11387 add_pubtype (tree decl, dw_die_ref die)
11388 {
11389 pubname_entry e;
11390
11391 if (!want_pubnames ())
11392 return;
11393
11394 if ((TREE_PUBLIC (decl)
11395 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11396 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11397 {
11398 tree scope = NULL;
11399 const char *scope_name = "";
11400 const char *sep = is_cxx () ? "::" : ".";
11401 const char *name;
11402
11403 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11404 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11405 {
11406 scope_name = lang_hooks.dwarf_name (scope, 1);
11407 if (scope_name != NULL && scope_name[0] != '\0')
11408 scope_name = concat (scope_name, sep, NULL);
11409 else
11410 scope_name = "";
11411 }
11412
11413 if (TYPE_P (decl))
11414 name = type_tag (decl);
11415 else
11416 name = lang_hooks.dwarf_name (decl, 1);
11417
11418 /* If we don't have a name for the type, there's no point in adding
11419 it to the table. */
11420 if (name != NULL && name[0] != '\0')
11421 {
11422 e.die = die;
11423 e.name = concat (scope_name, name, NULL);
11424 vec_safe_push (pubtype_table, e);
11425 }
11426
11427 /* Although it might be more consistent to add the pubinfo for the
11428 enumerators as their dies are created, they should only be added if the
11429 enum type meets the criteria above. So rather than re-check the parent
11430 enum type whenever an enumerator die is created, just output them all
11431 here. This isn't protected by the name conditional because anonymous
11432 enums don't have names. */
11433 if (die->die_tag == DW_TAG_enumeration_type)
11434 {
11435 dw_die_ref c;
11436
11437 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11438 }
11439 }
11440 }
11441
11442 /* Output a single entry in the pubnames table. */
11443
11444 static void
11445 output_pubname (dw_offset die_offset, pubname_entry *entry)
11446 {
11447 dw_die_ref die = entry->die;
11448 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11449
11450 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11451
11452 if (debug_generate_pub_sections == 2)
11453 {
11454 /* This logic follows gdb's method for determining the value of the flag
11455 byte. */
11456 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11457 switch (die->die_tag)
11458 {
11459 case DW_TAG_typedef:
11460 case DW_TAG_base_type:
11461 case DW_TAG_subrange_type:
11462 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11463 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11464 break;
11465 case DW_TAG_enumerator:
11466 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11467 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11468 if (!is_cxx ())
11469 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11470 break;
11471 case DW_TAG_subprogram:
11472 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11473 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11474 if (!is_ada ())
11475 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11476 break;
11477 case DW_TAG_constant:
11478 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11479 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11480 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11481 break;
11482 case DW_TAG_variable:
11483 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11484 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11485 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11486 break;
11487 case DW_TAG_namespace:
11488 case DW_TAG_imported_declaration:
11489 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11490 break;
11491 case DW_TAG_class_type:
11492 case DW_TAG_interface_type:
11493 case DW_TAG_structure_type:
11494 case DW_TAG_union_type:
11495 case DW_TAG_enumeration_type:
11496 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11497 if (!is_cxx ())
11498 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11499 break;
11500 default:
11501 /* An unusual tag. Leave the flag-byte empty. */
11502 break;
11503 }
11504 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11505 "GDB-index flags");
11506 }
11507
11508 dw2_asm_output_nstring (entry->name, -1, "external name");
11509 }
11510
11511
11512 /* Output the public names table used to speed up access to externally
11513 visible names; or the public types table used to find type definitions. */
11514
11515 static void
11516 output_pubnames (vec<pubname_entry, va_gc> *names)
11517 {
11518 unsigned i;
11519 unsigned long pubnames_length = size_of_pubnames (names);
11520 pubname_entry *pub;
11521
11522 if (!XCOFF_DEBUGGING_INFO)
11523 {
11524 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11525 dw2_asm_output_data (4, 0xffffffff,
11526 "Initial length escape value indicating 64-bit DWARF extension");
11527 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11528 "Pub Info Length");
11529 }
11530
11531 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11532 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11533
11534 if (dwarf_split_debug_info)
11535 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11536 debug_skeleton_info_section,
11537 "Offset of Compilation Unit Info");
11538 else
11539 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11540 debug_info_section,
11541 "Offset of Compilation Unit Info");
11542 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11543 "Compilation Unit Length");
11544
11545 FOR_EACH_VEC_ELT (*names, i, pub)
11546 {
11547 if (include_pubname_in_output (names, pub))
11548 {
11549 dw_offset die_offset = pub->die->die_offset;
11550
11551 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11552 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11553 gcc_assert (pub->die->die_mark);
11554
11555 /* If we're putting types in their own .debug_types sections,
11556 the .debug_pubtypes table will still point to the compile
11557 unit (not the type unit), so we want to use the offset of
11558 the skeleton DIE (if there is one). */
11559 if (pub->die->comdat_type_p && names == pubtype_table)
11560 {
11561 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11562
11563 if (type_node != NULL)
11564 die_offset = (type_node->skeleton_die != NULL
11565 ? type_node->skeleton_die->die_offset
11566 : comp_unit_die ()->die_offset);
11567 }
11568
11569 output_pubname (die_offset, pub);
11570 }
11571 }
11572
11573 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11574 }
11575
11576 /* Output public names and types tables if necessary. */
11577
11578 static void
11579 output_pubtables (void)
11580 {
11581 if (!want_pubnames () || !info_section_emitted)
11582 return;
11583
11584 switch_to_section (debug_pubnames_section);
11585 output_pubnames (pubname_table);
11586 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11587 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11588 simply won't look for the section. */
11589 switch_to_section (debug_pubtypes_section);
11590 output_pubnames (pubtype_table);
11591 }
11592
11593
11594 /* Output the information that goes into the .debug_aranges table.
11595 Namely, define the beginning and ending address range of the
11596 text section generated for this compilation unit. */
11597
11598 static void
11599 output_aranges (void)
11600 {
11601 unsigned i;
11602 unsigned long aranges_length = size_of_aranges ();
11603
11604 if (!XCOFF_DEBUGGING_INFO)
11605 {
11606 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11607 dw2_asm_output_data (4, 0xffffffff,
11608 "Initial length escape value indicating 64-bit DWARF extension");
11609 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11610 "Length of Address Ranges Info");
11611 }
11612
11613 /* Version number for aranges is still 2, even up to DWARF5. */
11614 dw2_asm_output_data (2, 2, "DWARF aranges version");
11615 if (dwarf_split_debug_info)
11616 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11617 debug_skeleton_info_section,
11618 "Offset of Compilation Unit Info");
11619 else
11620 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11621 debug_info_section,
11622 "Offset of Compilation Unit Info");
11623 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11624 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11625
11626 /* We need to align to twice the pointer size here. */
11627 if (DWARF_ARANGES_PAD_SIZE)
11628 {
11629 /* Pad using a 2 byte words so that padding is correct for any
11630 pointer size. */
11631 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11632 2 * DWARF2_ADDR_SIZE);
11633 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11634 dw2_asm_output_data (2, 0, NULL);
11635 }
11636
11637 /* It is necessary not to output these entries if the sections were
11638 not used; if the sections were not used, the length will be 0 and
11639 the address may end up as 0 if the section is discarded by ld
11640 --gc-sections, leaving an invalid (0, 0) entry that can be
11641 confused with the terminator. */
11642 if (text_section_used)
11643 {
11644 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11645 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11646 text_section_label, "Length");
11647 }
11648 if (cold_text_section_used)
11649 {
11650 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11651 "Address");
11652 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11653 cold_text_section_label, "Length");
11654 }
11655
11656 if (have_multiple_function_sections)
11657 {
11658 unsigned fde_idx;
11659 dw_fde_ref fde;
11660
11661 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11662 {
11663 if (DECL_IGNORED_P (fde->decl))
11664 continue;
11665 if (!fde->in_std_section)
11666 {
11667 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11668 "Address");
11669 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11670 fde->dw_fde_begin, "Length");
11671 }
11672 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11673 {
11674 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11675 "Address");
11676 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11677 fde->dw_fde_second_begin, "Length");
11678 }
11679 }
11680 }
11681
11682 /* Output the terminator words. */
11683 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11684 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11685 }
11686
11687 /* Add a new entry to .debug_ranges. Return its index into
11688 ranges_table vector. */
11689
11690 static unsigned int
11691 add_ranges_num (int num, bool maybe_new_sec)
11692 {
11693 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11694 vec_safe_push (ranges_table, r);
11695 return vec_safe_length (ranges_table) - 1;
11696 }
11697
11698 /* Add a new entry to .debug_ranges corresponding to a block, or a
11699 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11700 this entry might be in a different section from previous range. */
11701
11702 static unsigned int
11703 add_ranges (const_tree block, bool maybe_new_sec)
11704 {
11705 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11706 }
11707
11708 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11709 chain, or middle entry of a chain that will be directly referred to. */
11710
11711 static void
11712 note_rnglist_head (unsigned int offset)
11713 {
11714 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11715 return;
11716 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11717 }
11718
11719 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11720 When using dwarf_split_debug_info, address attributes in dies destined
11721 for the final executable should be direct references--setting the
11722 parameter force_direct ensures this behavior. */
11723
11724 static void
11725 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11726 bool *added, bool force_direct)
11727 {
11728 unsigned int in_use = vec_safe_length (ranges_by_label);
11729 unsigned int offset;
11730 dw_ranges_by_label rbl = { begin, end };
11731 vec_safe_push (ranges_by_label, rbl);
11732 offset = add_ranges_num (-(int)in_use - 1, true);
11733 if (!*added)
11734 {
11735 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11736 *added = true;
11737 note_rnglist_head (offset);
11738 }
11739 }
11740
11741 /* Emit .debug_ranges section. */
11742
11743 static void
11744 output_ranges (void)
11745 {
11746 unsigned i;
11747 static const char *const start_fmt = "Offset %#x";
11748 const char *fmt = start_fmt;
11749 dw_ranges *r;
11750
11751 switch_to_section (debug_ranges_section);
11752 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11753 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11754 {
11755 int block_num = r->num;
11756
11757 if (block_num > 0)
11758 {
11759 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11760 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11761
11762 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11763 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11764
11765 /* If all code is in the text section, then the compilation
11766 unit base address defaults to DW_AT_low_pc, which is the
11767 base of the text section. */
11768 if (!have_multiple_function_sections)
11769 {
11770 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11771 text_section_label,
11772 fmt, i * 2 * DWARF2_ADDR_SIZE);
11773 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11774 text_section_label, NULL);
11775 }
11776
11777 /* Otherwise, the compilation unit base address is zero,
11778 which allows us to use absolute addresses, and not worry
11779 about whether the target supports cross-section
11780 arithmetic. */
11781 else
11782 {
11783 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11784 fmt, i * 2 * DWARF2_ADDR_SIZE);
11785 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11786 }
11787
11788 fmt = NULL;
11789 }
11790
11791 /* Negative block_num stands for an index into ranges_by_label. */
11792 else if (block_num < 0)
11793 {
11794 int lab_idx = - block_num - 1;
11795
11796 if (!have_multiple_function_sections)
11797 {
11798 gcc_unreachable ();
11799 #if 0
11800 /* If we ever use add_ranges_by_labels () for a single
11801 function section, all we have to do is to take out
11802 the #if 0 above. */
11803 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11804 (*ranges_by_label)[lab_idx].begin,
11805 text_section_label,
11806 fmt, i * 2 * DWARF2_ADDR_SIZE);
11807 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11808 (*ranges_by_label)[lab_idx].end,
11809 text_section_label, NULL);
11810 #endif
11811 }
11812 else
11813 {
11814 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11815 (*ranges_by_label)[lab_idx].begin,
11816 fmt, i * 2 * DWARF2_ADDR_SIZE);
11817 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11818 (*ranges_by_label)[lab_idx].end,
11819 NULL);
11820 }
11821 }
11822 else
11823 {
11824 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11825 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11826 fmt = start_fmt;
11827 }
11828 }
11829 }
11830
11831 /* Non-zero if .debug_line_str should be used for .debug_line section
11832 strings or strings that are likely shareable with those. */
11833 #define DWARF5_USE_DEBUG_LINE_STR \
11834 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11835 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11836 /* FIXME: there is no .debug_line_str.dwo section, \
11837 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11838 && !dwarf_split_debug_info)
11839
11840 /* Assign .debug_rnglists indexes. */
11841
11842 static void
11843 index_rnglists (void)
11844 {
11845 unsigned i;
11846 dw_ranges *r;
11847
11848 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11849 if (r->label)
11850 r->idx = rnglist_idx++;
11851 }
11852
11853 /* Emit .debug_rnglists section. */
11854
11855 static void
11856 output_rnglists (unsigned generation)
11857 {
11858 unsigned i;
11859 dw_ranges *r;
11860 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11861 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11862 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11863
11864 switch_to_section (debug_ranges_section);
11865 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11866 /* There are up to 4 unique ranges labels per generation.
11867 See also init_sections_and_labels. */
11868 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11869 2 + generation * 4);
11870 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11871 3 + generation * 4);
11872 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11873 dw2_asm_output_data (4, 0xffffffff,
11874 "Initial length escape value indicating "
11875 "64-bit DWARF extension");
11876 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11877 "Length of Range Lists");
11878 ASM_OUTPUT_LABEL (asm_out_file, l1);
11879 output_dwarf_version ();
11880 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11881 dw2_asm_output_data (1, 0, "Segment Size");
11882 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11883 about relocation sizes and primarily care about the size of .debug*
11884 sections in linked shared libraries and executables, then
11885 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11886 into it are usually larger than just DW_FORM_sec_offset offsets
11887 into the .debug_rnglists section. */
11888 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11889 "Offset Entry Count");
11890 if (dwarf_split_debug_info)
11891 {
11892 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11893 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11894 if (r->label)
11895 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11896 ranges_base_label, NULL);
11897 }
11898
11899 const char *lab = "";
11900 unsigned int len = vec_safe_length (ranges_table);
11901 const char *base = NULL;
11902 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11903 {
11904 int block_num = r->num;
11905
11906 if (r->label)
11907 {
11908 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11909 lab = r->label;
11910 }
11911 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11912 base = NULL;
11913 if (block_num > 0)
11914 {
11915 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11916 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11917
11918 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11919 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11920
11921 if (HAVE_AS_LEB128)
11922 {
11923 /* If all code is in the text section, then the compilation
11924 unit base address defaults to DW_AT_low_pc, which is the
11925 base of the text section. */
11926 if (!have_multiple_function_sections)
11927 {
11928 dw2_asm_output_data (1, DW_RLE_offset_pair,
11929 "DW_RLE_offset_pair (%s)", lab);
11930 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11931 "Range begin address (%s)", lab);
11932 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11933 "Range end address (%s)", lab);
11934 continue;
11935 }
11936 if (base == NULL)
11937 {
11938 dw_ranges *r2 = NULL;
11939 if (i < len - 1)
11940 r2 = &(*ranges_table)[i + 1];
11941 if (r2
11942 && r2->num != 0
11943 && r2->label == NULL
11944 && !r2->maybe_new_sec)
11945 {
11946 dw2_asm_output_data (1, DW_RLE_base_address,
11947 "DW_RLE_base_address (%s)", lab);
11948 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11949 "Base address (%s)", lab);
11950 strcpy (basebuf, blabel);
11951 base = basebuf;
11952 }
11953 }
11954 if (base)
11955 {
11956 dw2_asm_output_data (1, DW_RLE_offset_pair,
11957 "DW_RLE_offset_pair (%s)", lab);
11958 dw2_asm_output_delta_uleb128 (blabel, base,
11959 "Range begin address (%s)", lab);
11960 dw2_asm_output_delta_uleb128 (elabel, base,
11961 "Range end address (%s)", lab);
11962 continue;
11963 }
11964 dw2_asm_output_data (1, DW_RLE_start_length,
11965 "DW_RLE_start_length (%s)", lab);
11966 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11967 "Range begin address (%s)", lab);
11968 dw2_asm_output_delta_uleb128 (elabel, blabel,
11969 "Range length (%s)", lab);
11970 }
11971 else
11972 {
11973 dw2_asm_output_data (1, DW_RLE_start_end,
11974 "DW_RLE_start_end (%s)", lab);
11975 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11976 "Range begin address (%s)", lab);
11977 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11978 "Range end address (%s)", lab);
11979 }
11980 }
11981
11982 /* Negative block_num stands for an index into ranges_by_label. */
11983 else if (block_num < 0)
11984 {
11985 int lab_idx = - block_num - 1;
11986 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11987 const char *elabel = (*ranges_by_label)[lab_idx].end;
11988
11989 if (!have_multiple_function_sections)
11990 gcc_unreachable ();
11991 if (HAVE_AS_LEB128)
11992 {
11993 dw2_asm_output_data (1, DW_RLE_start_length,
11994 "DW_RLE_start_length (%s)", lab);
11995 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11996 "Range begin address (%s)", lab);
11997 dw2_asm_output_delta_uleb128 (elabel, blabel,
11998 "Range length (%s)", lab);
11999 }
12000 else
12001 {
12002 dw2_asm_output_data (1, DW_RLE_start_end,
12003 "DW_RLE_start_end (%s)", lab);
12004 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12005 "Range begin address (%s)", lab);
12006 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
12007 "Range end address (%s)", lab);
12008 }
12009 }
12010 else
12011 dw2_asm_output_data (1, DW_RLE_end_of_list,
12012 "DW_RLE_end_of_list (%s)", lab);
12013 }
12014 ASM_OUTPUT_LABEL (asm_out_file, l2);
12015 }
12016
12017 /* Data structure containing information about input files. */
12018 struct file_info
12019 {
12020 const char *path; /* Complete file name. */
12021 const char *fname; /* File name part. */
12022 int length; /* Length of entire string. */
12023 struct dwarf_file_data * file_idx; /* Index in input file table. */
12024 int dir_idx; /* Index in directory table. */
12025 };
12026
12027 /* Data structure containing information about directories with source
12028 files. */
12029 struct dir_info
12030 {
12031 const char *path; /* Path including directory name. */
12032 int length; /* Path length. */
12033 int prefix; /* Index of directory entry which is a prefix. */
12034 int count; /* Number of files in this directory. */
12035 int dir_idx; /* Index of directory used as base. */
12036 };
12037
12038 /* Callback function for file_info comparison. We sort by looking at
12039 the directories in the path. */
12040
12041 static int
12042 file_info_cmp (const void *p1, const void *p2)
12043 {
12044 const struct file_info *const s1 = (const struct file_info *) p1;
12045 const struct file_info *const s2 = (const struct file_info *) p2;
12046 const unsigned char *cp1;
12047 const unsigned char *cp2;
12048
12049 /* Take care of file names without directories. We need to make sure that
12050 we return consistent values to qsort since some will get confused if
12051 we return the same value when identical operands are passed in opposite
12052 orders. So if neither has a directory, return 0 and otherwise return
12053 1 or -1 depending on which one has the directory. We want the one with
12054 the directory to sort after the one without, so all no directory files
12055 are at the start (normally only the compilation unit file). */
12056 if ((s1->path == s1->fname || s2->path == s2->fname))
12057 return (s2->path == s2->fname) - (s1->path == s1->fname);
12058
12059 cp1 = (const unsigned char *) s1->path;
12060 cp2 = (const unsigned char *) s2->path;
12061
12062 while (1)
12063 {
12064 ++cp1;
12065 ++cp2;
12066 /* Reached the end of the first path? If so, handle like above,
12067 but now we want longer directory prefixes before shorter ones. */
12068 if ((cp1 == (const unsigned char *) s1->fname)
12069 || (cp2 == (const unsigned char *) s2->fname))
12070 return ((cp1 == (const unsigned char *) s1->fname)
12071 - (cp2 == (const unsigned char *) s2->fname));
12072
12073 /* Character of current path component the same? */
12074 else if (*cp1 != *cp2)
12075 return *cp1 - *cp2;
12076 }
12077 }
12078
12079 struct file_name_acquire_data
12080 {
12081 struct file_info *files;
12082 int used_files;
12083 int max_files;
12084 };
12085
12086 /* Traversal function for the hash table. */
12087
12088 int
12089 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12090 {
12091 struct dwarf_file_data *d = *slot;
12092 struct file_info *fi;
12093 const char *f;
12094
12095 gcc_assert (fnad->max_files >= d->emitted_number);
12096
12097 if (! d->emitted_number)
12098 return 1;
12099
12100 gcc_assert (fnad->max_files != fnad->used_files);
12101
12102 fi = fnad->files + fnad->used_files++;
12103
12104 f = remap_debug_filename (d->filename);
12105
12106 /* Skip all leading "./". */
12107 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12108 f += 2;
12109
12110 /* Create a new array entry. */
12111 fi->path = f;
12112 fi->length = strlen (f);
12113 fi->file_idx = d;
12114
12115 /* Search for the file name part. */
12116 f = strrchr (f, DIR_SEPARATOR);
12117 #if defined (DIR_SEPARATOR_2)
12118 {
12119 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12120
12121 if (g != NULL)
12122 {
12123 if (f == NULL || f < g)
12124 f = g;
12125 }
12126 }
12127 #endif
12128
12129 fi->fname = f == NULL ? fi->path : f + 1;
12130 return 1;
12131 }
12132
12133 /* Helper function for output_file_names. Emit a FORM encoded
12134 string STR, with assembly comment start ENTRY_KIND and
12135 index IDX */
12136
12137 static void
12138 output_line_string (enum dwarf_form form, const char *str,
12139 const char *entry_kind, unsigned int idx)
12140 {
12141 switch (form)
12142 {
12143 case DW_FORM_string:
12144 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12145 break;
12146 case DW_FORM_line_strp:
12147 if (!debug_line_str_hash)
12148 debug_line_str_hash
12149 = hash_table<indirect_string_hasher>::create_ggc (10);
12150
12151 struct indirect_string_node *node;
12152 node = find_AT_string_in_table (str, debug_line_str_hash);
12153 set_indirect_string (node);
12154 node->form = form;
12155 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12156 debug_line_str_section, "%s: %#x: \"%s\"",
12157 entry_kind, 0, node->str);
12158 break;
12159 default:
12160 gcc_unreachable ();
12161 }
12162 }
12163
12164 /* Output the directory table and the file name table. We try to minimize
12165 the total amount of memory needed. A heuristic is used to avoid large
12166 slowdowns with many input files. */
12167
12168 static void
12169 output_file_names (void)
12170 {
12171 struct file_name_acquire_data fnad;
12172 int numfiles;
12173 struct file_info *files;
12174 struct dir_info *dirs;
12175 int *saved;
12176 int *savehere;
12177 int *backmap;
12178 int ndirs;
12179 int idx_offset;
12180 int i;
12181
12182 if (!last_emitted_file)
12183 {
12184 if (dwarf_version >= 5)
12185 {
12186 dw2_asm_output_data (1, 0, "Directory entry format count");
12187 dw2_asm_output_data_uleb128 (0, "Directories count");
12188 dw2_asm_output_data (1, 0, "File name entry format count");
12189 dw2_asm_output_data_uleb128 (0, "File names count");
12190 }
12191 else
12192 {
12193 dw2_asm_output_data (1, 0, "End directory table");
12194 dw2_asm_output_data (1, 0, "End file name table");
12195 }
12196 return;
12197 }
12198
12199 numfiles = last_emitted_file->emitted_number;
12200
12201 /* Allocate the various arrays we need. */
12202 files = XALLOCAVEC (struct file_info, numfiles);
12203 dirs = XALLOCAVEC (struct dir_info, numfiles);
12204
12205 fnad.files = files;
12206 fnad.used_files = 0;
12207 fnad.max_files = numfiles;
12208 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12209 gcc_assert (fnad.used_files == fnad.max_files);
12210
12211 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12212
12213 /* Find all the different directories used. */
12214 dirs[0].path = files[0].path;
12215 dirs[0].length = files[0].fname - files[0].path;
12216 dirs[0].prefix = -1;
12217 dirs[0].count = 1;
12218 dirs[0].dir_idx = 0;
12219 files[0].dir_idx = 0;
12220 ndirs = 1;
12221
12222 for (i = 1; i < numfiles; i++)
12223 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12224 && memcmp (dirs[ndirs - 1].path, files[i].path,
12225 dirs[ndirs - 1].length) == 0)
12226 {
12227 /* Same directory as last entry. */
12228 files[i].dir_idx = ndirs - 1;
12229 ++dirs[ndirs - 1].count;
12230 }
12231 else
12232 {
12233 int j;
12234
12235 /* This is a new directory. */
12236 dirs[ndirs].path = files[i].path;
12237 dirs[ndirs].length = files[i].fname - files[i].path;
12238 dirs[ndirs].count = 1;
12239 dirs[ndirs].dir_idx = ndirs;
12240 files[i].dir_idx = ndirs;
12241
12242 /* Search for a prefix. */
12243 dirs[ndirs].prefix = -1;
12244 for (j = 0; j < ndirs; j++)
12245 if (dirs[j].length < dirs[ndirs].length
12246 && dirs[j].length > 1
12247 && (dirs[ndirs].prefix == -1
12248 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12249 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12250 dirs[ndirs].prefix = j;
12251
12252 ++ndirs;
12253 }
12254
12255 /* Now to the actual work. We have to find a subset of the directories which
12256 allow expressing the file name using references to the directory table
12257 with the least amount of characters. We do not do an exhaustive search
12258 where we would have to check out every combination of every single
12259 possible prefix. Instead we use a heuristic which provides nearly optimal
12260 results in most cases and never is much off. */
12261 saved = XALLOCAVEC (int, ndirs);
12262 savehere = XALLOCAVEC (int, ndirs);
12263
12264 memset (saved, '\0', ndirs * sizeof (saved[0]));
12265 for (i = 0; i < ndirs; i++)
12266 {
12267 int j;
12268 int total;
12269
12270 /* We can always save some space for the current directory. But this
12271 does not mean it will be enough to justify adding the directory. */
12272 savehere[i] = dirs[i].length;
12273 total = (savehere[i] - saved[i]) * dirs[i].count;
12274
12275 for (j = i + 1; j < ndirs; j++)
12276 {
12277 savehere[j] = 0;
12278 if (saved[j] < dirs[i].length)
12279 {
12280 /* Determine whether the dirs[i] path is a prefix of the
12281 dirs[j] path. */
12282 int k;
12283
12284 k = dirs[j].prefix;
12285 while (k != -1 && k != (int) i)
12286 k = dirs[k].prefix;
12287
12288 if (k == (int) i)
12289 {
12290 /* Yes it is. We can possibly save some memory by
12291 writing the filenames in dirs[j] relative to
12292 dirs[i]. */
12293 savehere[j] = dirs[i].length;
12294 total += (savehere[j] - saved[j]) * dirs[j].count;
12295 }
12296 }
12297 }
12298
12299 /* Check whether we can save enough to justify adding the dirs[i]
12300 directory. */
12301 if (total > dirs[i].length + 1)
12302 {
12303 /* It's worthwhile adding. */
12304 for (j = i; j < ndirs; j++)
12305 if (savehere[j] > 0)
12306 {
12307 /* Remember how much we saved for this directory so far. */
12308 saved[j] = savehere[j];
12309
12310 /* Remember the prefix directory. */
12311 dirs[j].dir_idx = i;
12312 }
12313 }
12314 }
12315
12316 /* Emit the directory name table. */
12317 idx_offset = dirs[0].length > 0 ? 1 : 0;
12318 enum dwarf_form str_form = DW_FORM_string;
12319 enum dwarf_form idx_form = DW_FORM_udata;
12320 if (dwarf_version >= 5)
12321 {
12322 const char *comp_dir = comp_dir_string ();
12323 if (comp_dir == NULL)
12324 comp_dir = "";
12325 dw2_asm_output_data (1, 1, "Directory entry format count");
12326 if (DWARF5_USE_DEBUG_LINE_STR)
12327 str_form = DW_FORM_line_strp;
12328 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12329 dw2_asm_output_data_uleb128 (str_form, "%s",
12330 get_DW_FORM_name (str_form));
12331 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12332 if (str_form == DW_FORM_string)
12333 {
12334 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12335 for (i = 1 - idx_offset; i < ndirs; i++)
12336 dw2_asm_output_nstring (dirs[i].path,
12337 dirs[i].length
12338 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12339 "Directory Entry: %#x", i + idx_offset);
12340 }
12341 else
12342 {
12343 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12344 for (i = 1 - idx_offset; i < ndirs; i++)
12345 {
12346 const char *str
12347 = ggc_alloc_string (dirs[i].path,
12348 dirs[i].length
12349 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12350 output_line_string (str_form, str, "Directory Entry",
12351 (unsigned) i + idx_offset);
12352 }
12353 }
12354 }
12355 else
12356 {
12357 for (i = 1 - idx_offset; i < ndirs; i++)
12358 dw2_asm_output_nstring (dirs[i].path,
12359 dirs[i].length
12360 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12361 "Directory Entry: %#x", i + idx_offset);
12362
12363 dw2_asm_output_data (1, 0, "End directory table");
12364 }
12365
12366 /* We have to emit them in the order of emitted_number since that's
12367 used in the debug info generation. To do this efficiently we
12368 generate a back-mapping of the indices first. */
12369 backmap = XALLOCAVEC (int, numfiles);
12370 for (i = 0; i < numfiles; i++)
12371 backmap[files[i].file_idx->emitted_number - 1] = i;
12372
12373 if (dwarf_version >= 5)
12374 {
12375 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12376 if (filename0 == NULL)
12377 filename0 = "";
12378 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12379 DW_FORM_data2. Choose one based on the number of directories
12380 and how much space would they occupy in each encoding.
12381 If we have at most 256 directories, all indexes fit into
12382 a single byte, so DW_FORM_data1 is most compact (if there
12383 are at most 128 directories, DW_FORM_udata would be as
12384 compact as that, but not shorter and slower to decode). */
12385 if (ndirs + idx_offset <= 256)
12386 idx_form = DW_FORM_data1;
12387 /* If there are more than 65536 directories, we have to use
12388 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12389 Otherwise, compute what space would occupy if all the indexes
12390 used DW_FORM_udata - sum - and compare that to how large would
12391 be DW_FORM_data2 encoding, and pick the more efficient one. */
12392 else if (ndirs + idx_offset <= 65536)
12393 {
12394 unsigned HOST_WIDE_INT sum = 1;
12395 for (i = 0; i < numfiles; i++)
12396 {
12397 int file_idx = backmap[i];
12398 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12399 sum += size_of_uleb128 (dir_idx);
12400 }
12401 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12402 idx_form = DW_FORM_data2;
12403 }
12404 #ifdef VMS_DEBUGGING_INFO
12405 dw2_asm_output_data (1, 4, "File name entry format count");
12406 #else
12407 dw2_asm_output_data (1, 2, "File name entry format count");
12408 #endif
12409 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12410 dw2_asm_output_data_uleb128 (str_form, "%s",
12411 get_DW_FORM_name (str_form));
12412 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12413 "DW_LNCT_directory_index");
12414 dw2_asm_output_data_uleb128 (idx_form, "%s",
12415 get_DW_FORM_name (idx_form));
12416 #ifdef VMS_DEBUGGING_INFO
12417 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12418 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12419 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12420 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12421 #endif
12422 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12423
12424 output_line_string (str_form, filename0, "File Entry", 0);
12425
12426 /* Include directory index. */
12427 if (idx_form != DW_FORM_udata)
12428 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12429 0, NULL);
12430 else
12431 dw2_asm_output_data_uleb128 (0, NULL);
12432
12433 #ifdef VMS_DEBUGGING_INFO
12434 dw2_asm_output_data_uleb128 (0, NULL);
12435 dw2_asm_output_data_uleb128 (0, NULL);
12436 #endif
12437 }
12438
12439 /* Now write all the file names. */
12440 for (i = 0; i < numfiles; i++)
12441 {
12442 int file_idx = backmap[i];
12443 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12444
12445 #ifdef VMS_DEBUGGING_INFO
12446 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12447
12448 /* Setting these fields can lead to debugger miscomparisons,
12449 but VMS Debug requires them to be set correctly. */
12450
12451 int ver;
12452 long long cdt;
12453 long siz;
12454 int maxfilelen = (strlen (files[file_idx].path)
12455 + dirs[dir_idx].length
12456 + MAX_VMS_VERSION_LEN + 1);
12457 char *filebuf = XALLOCAVEC (char, maxfilelen);
12458
12459 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12460 snprintf (filebuf, maxfilelen, "%s;%d",
12461 files[file_idx].path + dirs[dir_idx].length, ver);
12462
12463 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12464
12465 /* Include directory index. */
12466 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12467 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12468 dir_idx + idx_offset, NULL);
12469 else
12470 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12471
12472 /* Modification time. */
12473 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12474 &cdt, 0, 0, 0) == 0)
12475 ? cdt : 0, NULL);
12476
12477 /* File length in bytes. */
12478 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12479 0, &siz, 0, 0) == 0)
12480 ? siz : 0, NULL);
12481 #else
12482 output_line_string (str_form,
12483 files[file_idx].path + dirs[dir_idx].length,
12484 "File Entry", (unsigned) i + 1);
12485
12486 /* Include directory index. */
12487 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12488 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12489 dir_idx + idx_offset, NULL);
12490 else
12491 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12492
12493 if (dwarf_version >= 5)
12494 continue;
12495
12496 /* Modification time. */
12497 dw2_asm_output_data_uleb128 (0, NULL);
12498
12499 /* File length in bytes. */
12500 dw2_asm_output_data_uleb128 (0, NULL);
12501 #endif /* VMS_DEBUGGING_INFO */
12502 }
12503
12504 if (dwarf_version < 5)
12505 dw2_asm_output_data (1, 0, "End file name table");
12506 }
12507
12508
12509 /* Output one line number table into the .debug_line section. */
12510
12511 static void
12512 output_one_line_info_table (dw_line_info_table *table)
12513 {
12514 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12515 unsigned int current_line = 1;
12516 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12517 dw_line_info_entry *ent, *prev_addr;
12518 size_t i;
12519 unsigned int view;
12520
12521 view = 0;
12522
12523 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12524 {
12525 switch (ent->opcode)
12526 {
12527 case LI_set_address:
12528 /* ??? Unfortunately, we have little choice here currently, and
12529 must always use the most general form. GCC does not know the
12530 address delta itself, so we can't use DW_LNS_advance_pc. Many
12531 ports do have length attributes which will give an upper bound
12532 on the address range. We could perhaps use length attributes
12533 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12534 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12535
12536 view = 0;
12537
12538 /* This can handle any delta. This takes
12539 4+DWARF2_ADDR_SIZE bytes. */
12540 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12541 debug_variable_location_views
12542 ? ", reset view to 0" : "");
12543 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12544 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12545 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12546
12547 prev_addr = ent;
12548 break;
12549
12550 case LI_adv_address:
12551 {
12552 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12553 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12554 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12555
12556 view++;
12557
12558 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12559 dw2_asm_output_delta (2, line_label, prev_label,
12560 "from %s to %s", prev_label, line_label);
12561
12562 prev_addr = ent;
12563 break;
12564 }
12565
12566 case LI_set_line:
12567 if (ent->val == current_line)
12568 {
12569 /* We still need to start a new row, so output a copy insn. */
12570 dw2_asm_output_data (1, DW_LNS_copy,
12571 "copy line %u", current_line);
12572 }
12573 else
12574 {
12575 int line_offset = ent->val - current_line;
12576 int line_delta = line_offset - DWARF_LINE_BASE;
12577
12578 current_line = ent->val;
12579 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12580 {
12581 /* This can handle deltas from -10 to 234, using the current
12582 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12583 This takes 1 byte. */
12584 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12585 "line %u", current_line);
12586 }
12587 else
12588 {
12589 /* This can handle any delta. This takes at least 4 bytes,
12590 depending on the value being encoded. */
12591 dw2_asm_output_data (1, DW_LNS_advance_line,
12592 "advance to line %u", current_line);
12593 dw2_asm_output_data_sleb128 (line_offset, NULL);
12594 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12595 }
12596 }
12597 break;
12598
12599 case LI_set_file:
12600 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12601 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12602 break;
12603
12604 case LI_set_column:
12605 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12606 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12607 break;
12608
12609 case LI_negate_stmt:
12610 current_is_stmt = !current_is_stmt;
12611 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12612 "is_stmt %d", current_is_stmt);
12613 break;
12614
12615 case LI_set_prologue_end:
12616 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12617 "set prologue end");
12618 break;
12619
12620 case LI_set_epilogue_begin:
12621 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12622 "set epilogue begin");
12623 break;
12624
12625 case LI_set_discriminator:
12626 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12627 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12628 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12629 dw2_asm_output_data_uleb128 (ent->val, NULL);
12630 break;
12631 }
12632 }
12633
12634 /* Emit debug info for the address of the end of the table. */
12635 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12636 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12637 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12638 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12639
12640 dw2_asm_output_data (1, 0, "end sequence");
12641 dw2_asm_output_data_uleb128 (1, NULL);
12642 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12643 }
12644
12645 /* Output the source line number correspondence information. This
12646 information goes into the .debug_line section. */
12647
12648 static void
12649 output_line_info (bool prologue_only)
12650 {
12651 static unsigned int generation;
12652 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12653 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12654 bool saw_one = false;
12655 int opc;
12656
12657 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12658 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12659 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12660 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12661
12662 if (!XCOFF_DEBUGGING_INFO)
12663 {
12664 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12665 dw2_asm_output_data (4, 0xffffffff,
12666 "Initial length escape value indicating 64-bit DWARF extension");
12667 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12668 "Length of Source Line Info");
12669 }
12670
12671 ASM_OUTPUT_LABEL (asm_out_file, l1);
12672
12673 output_dwarf_version ();
12674 if (dwarf_version >= 5)
12675 {
12676 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12677 dw2_asm_output_data (1, 0, "Segment Size");
12678 }
12679 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12680 ASM_OUTPUT_LABEL (asm_out_file, p1);
12681
12682 /* Define the architecture-dependent minimum instruction length (in bytes).
12683 In this implementation of DWARF, this field is used for information
12684 purposes only. Since GCC generates assembly language, we have no
12685 a priori knowledge of how many instruction bytes are generated for each
12686 source line, and therefore can use only the DW_LNE_set_address and
12687 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12688 this as '1', which is "correct enough" for all architectures,
12689 and don't let the target override. */
12690 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12691
12692 if (dwarf_version >= 4)
12693 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12694 "Maximum Operations Per Instruction");
12695 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12696 "Default is_stmt_start flag");
12697 dw2_asm_output_data (1, DWARF_LINE_BASE,
12698 "Line Base Value (Special Opcodes)");
12699 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12700 "Line Range Value (Special Opcodes)");
12701 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12702 "Special Opcode Base");
12703
12704 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12705 {
12706 int n_op_args;
12707 switch (opc)
12708 {
12709 case DW_LNS_advance_pc:
12710 case DW_LNS_advance_line:
12711 case DW_LNS_set_file:
12712 case DW_LNS_set_column:
12713 case DW_LNS_fixed_advance_pc:
12714 case DW_LNS_set_isa:
12715 n_op_args = 1;
12716 break;
12717 default:
12718 n_op_args = 0;
12719 break;
12720 }
12721
12722 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12723 opc, n_op_args);
12724 }
12725
12726 /* Write out the information about the files we use. */
12727 output_file_names ();
12728 ASM_OUTPUT_LABEL (asm_out_file, p2);
12729 if (prologue_only)
12730 {
12731 /* Output the marker for the end of the line number info. */
12732 ASM_OUTPUT_LABEL (asm_out_file, l2);
12733 return;
12734 }
12735
12736 if (separate_line_info)
12737 {
12738 dw_line_info_table *table;
12739 size_t i;
12740
12741 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12742 if (table->in_use)
12743 {
12744 output_one_line_info_table (table);
12745 saw_one = true;
12746 }
12747 }
12748 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12749 {
12750 output_one_line_info_table (cold_text_section_line_info);
12751 saw_one = true;
12752 }
12753
12754 /* ??? Some Darwin linkers crash on a .debug_line section with no
12755 sequences. Further, merely a DW_LNE_end_sequence entry is not
12756 sufficient -- the address column must also be initialized.
12757 Make sure to output at least one set_address/end_sequence pair,
12758 choosing .text since that section is always present. */
12759 if (text_section_line_info->in_use || !saw_one)
12760 output_one_line_info_table (text_section_line_info);
12761
12762 /* Output the marker for the end of the line number info. */
12763 ASM_OUTPUT_LABEL (asm_out_file, l2);
12764 }
12765 \f
12766 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12767
12768 static inline bool
12769 need_endianity_attribute_p (bool reverse)
12770 {
12771 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12772 }
12773
12774 /* Given a pointer to a tree node for some base type, return a pointer to
12775 a DIE that describes the given type. REVERSE is true if the type is
12776 to be interpreted in the reverse storage order wrt the target order.
12777
12778 This routine must only be called for GCC type nodes that correspond to
12779 Dwarf base (fundamental) types. */
12780
12781 static dw_die_ref
12782 base_type_die (tree type, bool reverse)
12783 {
12784 dw_die_ref base_type_result;
12785 enum dwarf_type encoding;
12786 bool fpt_used = false;
12787 struct fixed_point_type_info fpt_info;
12788 tree type_bias = NULL_TREE;
12789
12790 /* If this is a subtype that should not be emitted as a subrange type,
12791 use the base type. See subrange_type_for_debug_p. */
12792 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12793 type = TREE_TYPE (type);
12794
12795 switch (TREE_CODE (type))
12796 {
12797 case INTEGER_TYPE:
12798 if ((dwarf_version >= 4 || !dwarf_strict)
12799 && TYPE_NAME (type)
12800 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12801 && DECL_IS_BUILTIN (TYPE_NAME (type))
12802 && DECL_NAME (TYPE_NAME (type)))
12803 {
12804 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12805 if (strcmp (name, "char16_t") == 0
12806 || strcmp (name, "char32_t") == 0)
12807 {
12808 encoding = DW_ATE_UTF;
12809 break;
12810 }
12811 }
12812 if ((dwarf_version >= 3 || !dwarf_strict)
12813 && lang_hooks.types.get_fixed_point_type_info)
12814 {
12815 memset (&fpt_info, 0, sizeof (fpt_info));
12816 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12817 {
12818 fpt_used = true;
12819 encoding = ((TYPE_UNSIGNED (type))
12820 ? DW_ATE_unsigned_fixed
12821 : DW_ATE_signed_fixed);
12822 break;
12823 }
12824 }
12825 if (TYPE_STRING_FLAG (type))
12826 {
12827 if (TYPE_UNSIGNED (type))
12828 encoding = DW_ATE_unsigned_char;
12829 else
12830 encoding = DW_ATE_signed_char;
12831 }
12832 else if (TYPE_UNSIGNED (type))
12833 encoding = DW_ATE_unsigned;
12834 else
12835 encoding = DW_ATE_signed;
12836
12837 if (!dwarf_strict
12838 && lang_hooks.types.get_type_bias)
12839 type_bias = lang_hooks.types.get_type_bias (type);
12840 break;
12841
12842 case REAL_TYPE:
12843 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12844 {
12845 if (dwarf_version >= 3 || !dwarf_strict)
12846 encoding = DW_ATE_decimal_float;
12847 else
12848 encoding = DW_ATE_lo_user;
12849 }
12850 else
12851 encoding = DW_ATE_float;
12852 break;
12853
12854 case FIXED_POINT_TYPE:
12855 if (!(dwarf_version >= 3 || !dwarf_strict))
12856 encoding = DW_ATE_lo_user;
12857 else if (TYPE_UNSIGNED (type))
12858 encoding = DW_ATE_unsigned_fixed;
12859 else
12860 encoding = DW_ATE_signed_fixed;
12861 break;
12862
12863 /* Dwarf2 doesn't know anything about complex ints, so use
12864 a user defined type for it. */
12865 case COMPLEX_TYPE:
12866 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12867 encoding = DW_ATE_complex_float;
12868 else
12869 encoding = DW_ATE_lo_user;
12870 break;
12871
12872 case BOOLEAN_TYPE:
12873 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12874 encoding = DW_ATE_boolean;
12875 break;
12876
12877 default:
12878 /* No other TREE_CODEs are Dwarf fundamental types. */
12879 gcc_unreachable ();
12880 }
12881
12882 base_type_result = new_die_raw (DW_TAG_base_type);
12883
12884 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12885 int_size_in_bytes (type));
12886 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12887
12888 if (need_endianity_attribute_p (reverse))
12889 add_AT_unsigned (base_type_result, DW_AT_endianity,
12890 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12891
12892 add_alignment_attribute (base_type_result, type);
12893
12894 if (fpt_used)
12895 {
12896 switch (fpt_info.scale_factor_kind)
12897 {
12898 case fixed_point_scale_factor_binary:
12899 add_AT_int (base_type_result, DW_AT_binary_scale,
12900 fpt_info.scale_factor.binary);
12901 break;
12902
12903 case fixed_point_scale_factor_decimal:
12904 add_AT_int (base_type_result, DW_AT_decimal_scale,
12905 fpt_info.scale_factor.decimal);
12906 break;
12907
12908 case fixed_point_scale_factor_arbitrary:
12909 /* Arbitrary scale factors cannot be described in standard DWARF,
12910 yet. */
12911 if (!dwarf_strict)
12912 {
12913 /* Describe the scale factor as a rational constant. */
12914 const dw_die_ref scale_factor
12915 = new_die (DW_TAG_constant, comp_unit_die (), type);
12916
12917 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12918 fpt_info.scale_factor.arbitrary.numerator);
12919 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12920 fpt_info.scale_factor.arbitrary.denominator);
12921
12922 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12923 }
12924 break;
12925
12926 default:
12927 gcc_unreachable ();
12928 }
12929 }
12930
12931 if (type_bias)
12932 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12933 dw_scalar_form_constant
12934 | dw_scalar_form_exprloc
12935 | dw_scalar_form_reference,
12936 NULL);
12937
12938 return base_type_result;
12939 }
12940
12941 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12942 named 'auto' in its type: return true for it, false otherwise. */
12943
12944 static inline bool
12945 is_cxx_auto (tree type)
12946 {
12947 if (is_cxx ())
12948 {
12949 tree name = TYPE_IDENTIFIER (type);
12950 if (name == get_identifier ("auto")
12951 || name == get_identifier ("decltype(auto)"))
12952 return true;
12953 }
12954 return false;
12955 }
12956
12957 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12958 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12959
12960 static inline int
12961 is_base_type (tree type)
12962 {
12963 switch (TREE_CODE (type))
12964 {
12965 case INTEGER_TYPE:
12966 case REAL_TYPE:
12967 case FIXED_POINT_TYPE:
12968 case COMPLEX_TYPE:
12969 case BOOLEAN_TYPE:
12970 return 1;
12971
12972 case VOID_TYPE:
12973 case ARRAY_TYPE:
12974 case RECORD_TYPE:
12975 case UNION_TYPE:
12976 case QUAL_UNION_TYPE:
12977 case ENUMERAL_TYPE:
12978 case FUNCTION_TYPE:
12979 case METHOD_TYPE:
12980 case POINTER_TYPE:
12981 case REFERENCE_TYPE:
12982 case NULLPTR_TYPE:
12983 case OFFSET_TYPE:
12984 case LANG_TYPE:
12985 case VECTOR_TYPE:
12986 return 0;
12987
12988 default:
12989 if (is_cxx_auto (type))
12990 return 0;
12991 gcc_unreachable ();
12992 }
12993
12994 return 0;
12995 }
12996
12997 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12998 node, return the size in bits for the type if it is a constant, or else
12999 return the alignment for the type if the type's size is not constant, or
13000 else return BITS_PER_WORD if the type actually turns out to be an
13001 ERROR_MARK node. */
13002
13003 static inline unsigned HOST_WIDE_INT
13004 simple_type_size_in_bits (const_tree type)
13005 {
13006 if (TREE_CODE (type) == ERROR_MARK)
13007 return BITS_PER_WORD;
13008 else if (TYPE_SIZE (type) == NULL_TREE)
13009 return 0;
13010 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
13011 return tree_to_uhwi (TYPE_SIZE (type));
13012 else
13013 return TYPE_ALIGN (type);
13014 }
13015
13016 /* Similarly, but return an offset_int instead of UHWI. */
13017
13018 static inline offset_int
13019 offset_int_type_size_in_bits (const_tree type)
13020 {
13021 if (TREE_CODE (type) == ERROR_MARK)
13022 return BITS_PER_WORD;
13023 else if (TYPE_SIZE (type) == NULL_TREE)
13024 return 0;
13025 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
13026 return wi::to_offset (TYPE_SIZE (type));
13027 else
13028 return TYPE_ALIGN (type);
13029 }
13030
13031 /* Given a pointer to a tree node for a subrange type, return a pointer
13032 to a DIE that describes the given type. */
13033
13034 static dw_die_ref
13035 subrange_type_die (tree type, tree low, tree high, tree bias,
13036 dw_die_ref context_die)
13037 {
13038 dw_die_ref subrange_die;
13039 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
13040
13041 if (context_die == NULL)
13042 context_die = comp_unit_die ();
13043
13044 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
13045
13046 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
13047 {
13048 /* The size of the subrange type and its base type do not match,
13049 so we need to generate a size attribute for the subrange type. */
13050 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
13051 }
13052
13053 add_alignment_attribute (subrange_die, type);
13054
13055 if (low)
13056 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13057 if (high)
13058 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13059 if (bias && !dwarf_strict)
13060 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13061 dw_scalar_form_constant
13062 | dw_scalar_form_exprloc
13063 | dw_scalar_form_reference,
13064 NULL);
13065
13066 return subrange_die;
13067 }
13068
13069 /* Returns the (const and/or volatile) cv_qualifiers associated with
13070 the decl node. This will normally be augmented with the
13071 cv_qualifiers of the underlying type in add_type_attribute. */
13072
13073 static int
13074 decl_quals (const_tree decl)
13075 {
13076 return ((TREE_READONLY (decl)
13077 /* The C++ front-end correctly marks reference-typed
13078 variables as readonly, but from a language (and debug
13079 info) standpoint they are not const-qualified. */
13080 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13081 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13082 | (TREE_THIS_VOLATILE (decl)
13083 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13084 }
13085
13086 /* Determine the TYPE whose qualifiers match the largest strict subset
13087 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13088 qualifiers outside QUAL_MASK. */
13089
13090 static int
13091 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13092 {
13093 tree t;
13094 int best_rank = 0, best_qual = 0, max_rank;
13095
13096 type_quals &= qual_mask;
13097 max_rank = popcount_hwi (type_quals) - 1;
13098
13099 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13100 t = TYPE_NEXT_VARIANT (t))
13101 {
13102 int q = TYPE_QUALS (t) & qual_mask;
13103
13104 if ((q & type_quals) == q && q != type_quals
13105 && check_base_type (t, type))
13106 {
13107 int rank = popcount_hwi (q);
13108
13109 if (rank > best_rank)
13110 {
13111 best_rank = rank;
13112 best_qual = q;
13113 }
13114 }
13115 }
13116
13117 return best_qual;
13118 }
13119
13120 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13121 static const dwarf_qual_info_t dwarf_qual_info[] =
13122 {
13123 { TYPE_QUAL_CONST, DW_TAG_const_type },
13124 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13125 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13126 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13127 };
13128 static const unsigned int dwarf_qual_info_size
13129 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13130
13131 /* If DIE is a qualified DIE of some base DIE with the same parent,
13132 return the base DIE, otherwise return NULL. Set MASK to the
13133 qualifiers added compared to the returned DIE. */
13134
13135 static dw_die_ref
13136 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13137 {
13138 unsigned int i;
13139 for (i = 0; i < dwarf_qual_info_size; i++)
13140 if (die->die_tag == dwarf_qual_info[i].t)
13141 break;
13142 if (i == dwarf_qual_info_size)
13143 return NULL;
13144 if (vec_safe_length (die->die_attr) != 1)
13145 return NULL;
13146 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13147 if (type == NULL || type->die_parent != die->die_parent)
13148 return NULL;
13149 *mask |= dwarf_qual_info[i].q;
13150 if (depth)
13151 {
13152 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13153 if (ret)
13154 return ret;
13155 }
13156 return type;
13157 }
13158
13159 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13160 entry that chains the modifiers specified by CV_QUALS in front of the
13161 given type. REVERSE is true if the type is to be interpreted in the
13162 reverse storage order wrt the target order. */
13163
13164 static dw_die_ref
13165 modified_type_die (tree type, int cv_quals, bool reverse,
13166 dw_die_ref context_die)
13167 {
13168 enum tree_code code = TREE_CODE (type);
13169 dw_die_ref mod_type_die;
13170 dw_die_ref sub_die = NULL;
13171 tree item_type = NULL;
13172 tree qualified_type;
13173 tree name, low, high;
13174 dw_die_ref mod_scope;
13175 /* Only these cv-qualifiers are currently handled. */
13176 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13177 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13178 ENCODE_QUAL_ADDR_SPACE(~0U));
13179 const bool reverse_base_type
13180 = need_endianity_attribute_p (reverse) && is_base_type (type);
13181
13182 if (code == ERROR_MARK)
13183 return NULL;
13184
13185 if (lang_hooks.types.get_debug_type)
13186 {
13187 tree debug_type = lang_hooks.types.get_debug_type (type);
13188
13189 if (debug_type != NULL_TREE && debug_type != type)
13190 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13191 }
13192
13193 cv_quals &= cv_qual_mask;
13194
13195 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13196 tag modifier (and not an attribute) old consumers won't be able
13197 to handle it. */
13198 if (dwarf_version < 3)
13199 cv_quals &= ~TYPE_QUAL_RESTRICT;
13200
13201 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13202 if (dwarf_version < 5)
13203 cv_quals &= ~TYPE_QUAL_ATOMIC;
13204
13205 /* See if we already have the appropriately qualified variant of
13206 this type. */
13207 qualified_type = get_qualified_type (type, cv_quals);
13208
13209 if (qualified_type == sizetype)
13210 {
13211 /* Try not to expose the internal sizetype type's name. */
13212 if (TYPE_NAME (qualified_type)
13213 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13214 {
13215 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13216
13217 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13218 && (TYPE_PRECISION (t)
13219 == TYPE_PRECISION (qualified_type))
13220 && (TYPE_UNSIGNED (t)
13221 == TYPE_UNSIGNED (qualified_type)));
13222 qualified_type = t;
13223 }
13224 else if (qualified_type == sizetype
13225 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13226 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13227 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13228 qualified_type = size_type_node;
13229 if (type == sizetype)
13230 type = qualified_type;
13231 }
13232
13233 /* If we do, then we can just use its DIE, if it exists. */
13234 if (qualified_type)
13235 {
13236 mod_type_die = lookup_type_die (qualified_type);
13237
13238 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13239 dealt with specially: the DIE with the attribute, if it exists, is
13240 placed immediately after the regular DIE for the same base type. */
13241 if (mod_type_die
13242 && (!reverse_base_type
13243 || ((mod_type_die = mod_type_die->die_sib) != NULL
13244 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13245 return mod_type_die;
13246 }
13247
13248 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13249
13250 /* Handle C typedef types. */
13251 if (name
13252 && TREE_CODE (name) == TYPE_DECL
13253 && DECL_ORIGINAL_TYPE (name)
13254 && !DECL_ARTIFICIAL (name))
13255 {
13256 tree dtype = TREE_TYPE (name);
13257
13258 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13259 if (qualified_type == dtype && !reverse_base_type)
13260 {
13261 tree origin = decl_ultimate_origin (name);
13262
13263 /* Typedef variants that have an abstract origin don't get their own
13264 type DIE (see gen_typedef_die), so fall back on the ultimate
13265 abstract origin instead. */
13266 if (origin != NULL && origin != name)
13267 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13268 context_die);
13269
13270 /* For a named type, use the typedef. */
13271 gen_type_die (qualified_type, context_die);
13272 return lookup_type_die (qualified_type);
13273 }
13274 else
13275 {
13276 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13277 dquals &= cv_qual_mask;
13278 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13279 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13280 /* cv-unqualified version of named type. Just use
13281 the unnamed type to which it refers. */
13282 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13283 reverse, context_die);
13284 /* Else cv-qualified version of named type; fall through. */
13285 }
13286 }
13287
13288 mod_scope = scope_die_for (type, context_die);
13289
13290 if (cv_quals)
13291 {
13292 int sub_quals = 0, first_quals = 0;
13293 unsigned i;
13294 dw_die_ref first = NULL, last = NULL;
13295
13296 /* Determine a lesser qualified type that most closely matches
13297 this one. Then generate DW_TAG_* entries for the remaining
13298 qualifiers. */
13299 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13300 cv_qual_mask);
13301 if (sub_quals && use_debug_types)
13302 {
13303 bool needed = false;
13304 /* If emitting type units, make sure the order of qualifiers
13305 is canonical. Thus, start from unqualified type if
13306 an earlier qualifier is missing in sub_quals, but some later
13307 one is present there. */
13308 for (i = 0; i < dwarf_qual_info_size; i++)
13309 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13310 needed = true;
13311 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13312 {
13313 sub_quals = 0;
13314 break;
13315 }
13316 }
13317 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13318 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13319 {
13320 /* As not all intermediate qualified DIEs have corresponding
13321 tree types, ensure that qualified DIEs in the same scope
13322 as their DW_AT_type are emitted after their DW_AT_type,
13323 only with other qualified DIEs for the same type possibly
13324 in between them. Determine the range of such qualified
13325 DIEs now (first being the base type, last being corresponding
13326 last qualified DIE for it). */
13327 unsigned int count = 0;
13328 first = qualified_die_p (mod_type_die, &first_quals,
13329 dwarf_qual_info_size);
13330 if (first == NULL)
13331 first = mod_type_die;
13332 gcc_assert ((first_quals & ~sub_quals) == 0);
13333 for (count = 0, last = first;
13334 count < (1U << dwarf_qual_info_size);
13335 count++, last = last->die_sib)
13336 {
13337 int quals = 0;
13338 if (last == mod_scope->die_child)
13339 break;
13340 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13341 != first)
13342 break;
13343 }
13344 }
13345
13346 for (i = 0; i < dwarf_qual_info_size; i++)
13347 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13348 {
13349 dw_die_ref d;
13350 if (first && first != last)
13351 {
13352 for (d = first->die_sib; ; d = d->die_sib)
13353 {
13354 int quals = 0;
13355 qualified_die_p (d, &quals, dwarf_qual_info_size);
13356 if (quals == (first_quals | dwarf_qual_info[i].q))
13357 break;
13358 if (d == last)
13359 {
13360 d = NULL;
13361 break;
13362 }
13363 }
13364 if (d)
13365 {
13366 mod_type_die = d;
13367 continue;
13368 }
13369 }
13370 if (first)
13371 {
13372 d = new_die_raw (dwarf_qual_info[i].t);
13373 add_child_die_after (mod_scope, d, last);
13374 last = d;
13375 }
13376 else
13377 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13378 if (mod_type_die)
13379 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13380 mod_type_die = d;
13381 first_quals |= dwarf_qual_info[i].q;
13382 }
13383 }
13384 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13385 {
13386 dwarf_tag tag = DW_TAG_pointer_type;
13387 if (code == REFERENCE_TYPE)
13388 {
13389 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13390 tag = DW_TAG_rvalue_reference_type;
13391 else
13392 tag = DW_TAG_reference_type;
13393 }
13394 mod_type_die = new_die (tag, mod_scope, type);
13395
13396 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13397 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13398 add_alignment_attribute (mod_type_die, type);
13399 item_type = TREE_TYPE (type);
13400
13401 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13402 if (!ADDR_SPACE_GENERIC_P (as))
13403 {
13404 int action = targetm.addr_space.debug (as);
13405 if (action >= 0)
13406 {
13407 /* Positive values indicate an address_class. */
13408 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13409 }
13410 else
13411 {
13412 /* Negative values indicate an (inverted) segment base reg. */
13413 dw_loc_descr_ref d
13414 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13415 add_AT_loc (mod_type_die, DW_AT_segment, d);
13416 }
13417 }
13418 }
13419 else if (code == INTEGER_TYPE
13420 && TREE_TYPE (type) != NULL_TREE
13421 && subrange_type_for_debug_p (type, &low, &high))
13422 {
13423 tree bias = NULL_TREE;
13424 if (lang_hooks.types.get_type_bias)
13425 bias = lang_hooks.types.get_type_bias (type);
13426 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13427 item_type = TREE_TYPE (type);
13428 }
13429 else if (is_base_type (type))
13430 {
13431 mod_type_die = base_type_die (type, reverse);
13432
13433 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13434 if (reverse_base_type)
13435 {
13436 dw_die_ref after_die
13437 = modified_type_die (type, cv_quals, false, context_die);
13438 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13439 }
13440 else
13441 add_child_die (comp_unit_die (), mod_type_die);
13442
13443 add_pubtype (type, mod_type_die);
13444 }
13445 else
13446 {
13447 gen_type_die (type, context_die);
13448
13449 /* We have to get the type_main_variant here (and pass that to the
13450 `lookup_type_die' routine) because the ..._TYPE node we have
13451 might simply be a *copy* of some original type node (where the
13452 copy was created to help us keep track of typedef names) and
13453 that copy might have a different TYPE_UID from the original
13454 ..._TYPE node. */
13455 if (TREE_CODE (type) == FUNCTION_TYPE
13456 || TREE_CODE (type) == METHOD_TYPE)
13457 {
13458 /* For function/method types, can't just use type_main_variant here,
13459 because that can have different ref-qualifiers for C++,
13460 but try to canonicalize. */
13461 tree main = TYPE_MAIN_VARIANT (type);
13462 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13463 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13464 && check_base_type (t, main)
13465 && check_lang_type (t, type))
13466 return lookup_type_die (t);
13467 return lookup_type_die (type);
13468 }
13469 else if (TREE_CODE (type) != VECTOR_TYPE
13470 && TREE_CODE (type) != ARRAY_TYPE)
13471 return lookup_type_die (type_main_variant (type));
13472 else
13473 /* Vectors have the debugging information in the type,
13474 not the main variant. */
13475 return lookup_type_die (type);
13476 }
13477
13478 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13479 don't output a DW_TAG_typedef, since there isn't one in the
13480 user's program; just attach a DW_AT_name to the type.
13481 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13482 if the base type already has the same name. */
13483 if (name
13484 && ((TREE_CODE (name) != TYPE_DECL
13485 && (qualified_type == TYPE_MAIN_VARIANT (type)
13486 || (cv_quals == TYPE_UNQUALIFIED)))
13487 || (TREE_CODE (name) == TYPE_DECL
13488 && TREE_TYPE (name) == qualified_type
13489 && DECL_NAME (name))))
13490 {
13491 if (TREE_CODE (name) == TYPE_DECL)
13492 /* Could just call add_name_and_src_coords_attributes here,
13493 but since this is a builtin type it doesn't have any
13494 useful source coordinates anyway. */
13495 name = DECL_NAME (name);
13496 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13497 }
13498 /* This probably indicates a bug. */
13499 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13500 {
13501 name = TYPE_IDENTIFIER (type);
13502 add_name_attribute (mod_type_die,
13503 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13504 }
13505
13506 if (qualified_type && !reverse_base_type)
13507 equate_type_number_to_die (qualified_type, mod_type_die);
13508
13509 if (item_type)
13510 /* We must do this after the equate_type_number_to_die call, in case
13511 this is a recursive type. This ensures that the modified_type_die
13512 recursion will terminate even if the type is recursive. Recursive
13513 types are possible in Ada. */
13514 sub_die = modified_type_die (item_type,
13515 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13516 reverse,
13517 context_die);
13518
13519 if (sub_die != NULL)
13520 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13521
13522 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13523 if (TYPE_ARTIFICIAL (type))
13524 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13525
13526 return mod_type_die;
13527 }
13528
13529 /* Generate DIEs for the generic parameters of T.
13530 T must be either a generic type or a generic function.
13531 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13532
13533 static void
13534 gen_generic_params_dies (tree t)
13535 {
13536 tree parms, args;
13537 int parms_num, i;
13538 dw_die_ref die = NULL;
13539 int non_default;
13540
13541 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13542 return;
13543
13544 if (TYPE_P (t))
13545 die = lookup_type_die (t);
13546 else if (DECL_P (t))
13547 die = lookup_decl_die (t);
13548
13549 gcc_assert (die);
13550
13551 parms = lang_hooks.get_innermost_generic_parms (t);
13552 if (!parms)
13553 /* T has no generic parameter. It means T is neither a generic type
13554 or function. End of story. */
13555 return;
13556
13557 parms_num = TREE_VEC_LENGTH (parms);
13558 args = lang_hooks.get_innermost_generic_args (t);
13559 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13560 non_default = int_cst_value (TREE_CHAIN (args));
13561 else
13562 non_default = TREE_VEC_LENGTH (args);
13563 for (i = 0; i < parms_num; i++)
13564 {
13565 tree parm, arg, arg_pack_elems;
13566 dw_die_ref parm_die;
13567
13568 parm = TREE_VEC_ELT (parms, i);
13569 arg = TREE_VEC_ELT (args, i);
13570 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13571 gcc_assert (parm && TREE_VALUE (parm) && arg);
13572
13573 if (parm && TREE_VALUE (parm) && arg)
13574 {
13575 /* If PARM represents a template parameter pack,
13576 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13577 by DW_TAG_template_*_parameter DIEs for the argument
13578 pack elements of ARG. Note that ARG would then be
13579 an argument pack. */
13580 if (arg_pack_elems)
13581 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13582 arg_pack_elems,
13583 die);
13584 else
13585 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13586 true /* emit name */, die);
13587 if (i >= non_default)
13588 add_AT_flag (parm_die, DW_AT_default_value, 1);
13589 }
13590 }
13591 }
13592
13593 /* Create and return a DIE for PARM which should be
13594 the representation of a generic type parameter.
13595 For instance, in the C++ front end, PARM would be a template parameter.
13596 ARG is the argument to PARM.
13597 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13598 name of the PARM.
13599 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13600 as a child node. */
13601
13602 static dw_die_ref
13603 generic_parameter_die (tree parm, tree arg,
13604 bool emit_name_p,
13605 dw_die_ref parent_die)
13606 {
13607 dw_die_ref tmpl_die = NULL;
13608 const char *name = NULL;
13609
13610 /* C++2a accepts class literals as template parameters, and var
13611 decls with initializers represent them. The VAR_DECLs would be
13612 rejected, but we can take the DECL_INITIAL constructor and
13613 attempt to expand it. */
13614 if (arg && VAR_P (arg))
13615 arg = DECL_INITIAL (arg);
13616
13617 if (!parm || !DECL_NAME (parm) || !arg)
13618 return NULL;
13619
13620 /* We support non-type generic parameters and arguments,
13621 type generic parameters and arguments, as well as
13622 generic generic parameters (a.k.a. template template parameters in C++)
13623 and arguments. */
13624 if (TREE_CODE (parm) == PARM_DECL)
13625 /* PARM is a nontype generic parameter */
13626 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13627 else if (TREE_CODE (parm) == TYPE_DECL)
13628 /* PARM is a type generic parameter. */
13629 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13630 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13631 /* PARM is a generic generic parameter.
13632 Its DIE is a GNU extension. It shall have a
13633 DW_AT_name attribute to represent the name of the template template
13634 parameter, and a DW_AT_GNU_template_name attribute to represent the
13635 name of the template template argument. */
13636 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13637 parent_die, parm);
13638 else
13639 gcc_unreachable ();
13640
13641 if (tmpl_die)
13642 {
13643 tree tmpl_type;
13644
13645 /* If PARM is a generic parameter pack, it means we are
13646 emitting debug info for a template argument pack element.
13647 In other terms, ARG is a template argument pack element.
13648 In that case, we don't emit any DW_AT_name attribute for
13649 the die. */
13650 if (emit_name_p)
13651 {
13652 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13653 gcc_assert (name);
13654 add_AT_string (tmpl_die, DW_AT_name, name);
13655 }
13656
13657 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13658 {
13659 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13660 TMPL_DIE should have a child DW_AT_type attribute that is set
13661 to the type of the argument to PARM, which is ARG.
13662 If PARM is a type generic parameter, TMPL_DIE should have a
13663 child DW_AT_type that is set to ARG. */
13664 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13665 add_type_attribute (tmpl_die, tmpl_type,
13666 (TREE_THIS_VOLATILE (tmpl_type)
13667 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13668 false, parent_die);
13669 }
13670 else
13671 {
13672 /* So TMPL_DIE is a DIE representing a
13673 a generic generic template parameter, a.k.a template template
13674 parameter in C++ and arg is a template. */
13675
13676 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13677 to the name of the argument. */
13678 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13679 if (name)
13680 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13681 }
13682
13683 if (TREE_CODE (parm) == PARM_DECL)
13684 /* So PARM is a non-type generic parameter.
13685 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13686 attribute of TMPL_DIE which value represents the value
13687 of ARG.
13688 We must be careful here:
13689 The value of ARG might reference some function decls.
13690 We might currently be emitting debug info for a generic
13691 type and types are emitted before function decls, we don't
13692 know if the function decls referenced by ARG will actually be
13693 emitted after cgraph computations.
13694 So must defer the generation of the DW_AT_const_value to
13695 after cgraph is ready. */
13696 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13697 }
13698
13699 return tmpl_die;
13700 }
13701
13702 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13703 PARM_PACK must be a template parameter pack. The returned DIE
13704 will be child DIE of PARENT_DIE. */
13705
13706 static dw_die_ref
13707 template_parameter_pack_die (tree parm_pack,
13708 tree parm_pack_args,
13709 dw_die_ref parent_die)
13710 {
13711 dw_die_ref die;
13712 int j;
13713
13714 gcc_assert (parent_die && parm_pack);
13715
13716 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13717 add_name_and_src_coords_attributes (die, parm_pack);
13718 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13719 generic_parameter_die (parm_pack,
13720 TREE_VEC_ELT (parm_pack_args, j),
13721 false /* Don't emit DW_AT_name */,
13722 die);
13723 return die;
13724 }
13725
13726 /* Return the DBX register number described by a given RTL node. */
13727
13728 static unsigned int
13729 dbx_reg_number (const_rtx rtl)
13730 {
13731 unsigned regno = REGNO (rtl);
13732
13733 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13734
13735 #ifdef LEAF_REG_REMAP
13736 if (crtl->uses_only_leaf_regs)
13737 {
13738 int leaf_reg = LEAF_REG_REMAP (regno);
13739 if (leaf_reg != -1)
13740 regno = (unsigned) leaf_reg;
13741 }
13742 #endif
13743
13744 regno = DBX_REGISTER_NUMBER (regno);
13745 gcc_assert (regno != INVALID_REGNUM);
13746 return regno;
13747 }
13748
13749 /* Optionally add a DW_OP_piece term to a location description expression.
13750 DW_OP_piece is only added if the location description expression already
13751 doesn't end with DW_OP_piece. */
13752
13753 static void
13754 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13755 {
13756 dw_loc_descr_ref loc;
13757
13758 if (*list_head != NULL)
13759 {
13760 /* Find the end of the chain. */
13761 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13762 ;
13763
13764 if (loc->dw_loc_opc != DW_OP_piece)
13765 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13766 }
13767 }
13768
13769 /* Return a location descriptor that designates a machine register or
13770 zero if there is none. */
13771
13772 static dw_loc_descr_ref
13773 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13774 {
13775 rtx regs;
13776
13777 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13778 return 0;
13779
13780 /* We only use "frame base" when we're sure we're talking about the
13781 post-prologue local stack frame. We do this by *not* running
13782 register elimination until this point, and recognizing the special
13783 argument pointer and soft frame pointer rtx's.
13784 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13785 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13786 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13787 {
13788 dw_loc_descr_ref result = NULL;
13789
13790 if (dwarf_version >= 4 || !dwarf_strict)
13791 {
13792 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13793 initialized);
13794 if (result)
13795 add_loc_descr (&result,
13796 new_loc_descr (DW_OP_stack_value, 0, 0));
13797 }
13798 return result;
13799 }
13800
13801 regs = targetm.dwarf_register_span (rtl);
13802
13803 if (REG_NREGS (rtl) > 1 || regs)
13804 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13805 else
13806 {
13807 unsigned int dbx_regnum = dbx_reg_number (rtl);
13808 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13809 return 0;
13810 return one_reg_loc_descriptor (dbx_regnum, initialized);
13811 }
13812 }
13813
13814 /* Return a location descriptor that designates a machine register for
13815 a given hard register number. */
13816
13817 static dw_loc_descr_ref
13818 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13819 {
13820 dw_loc_descr_ref reg_loc_descr;
13821
13822 if (regno <= 31)
13823 reg_loc_descr
13824 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13825 else
13826 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13827
13828 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13829 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13830
13831 return reg_loc_descr;
13832 }
13833
13834 /* Given an RTL of a register, return a location descriptor that
13835 designates a value that spans more than one register. */
13836
13837 static dw_loc_descr_ref
13838 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13839 enum var_init_status initialized)
13840 {
13841 int size, i;
13842 dw_loc_descr_ref loc_result = NULL;
13843
13844 /* Simple, contiguous registers. */
13845 if (regs == NULL_RTX)
13846 {
13847 unsigned reg = REGNO (rtl);
13848 int nregs;
13849
13850 #ifdef LEAF_REG_REMAP
13851 if (crtl->uses_only_leaf_regs)
13852 {
13853 int leaf_reg = LEAF_REG_REMAP (reg);
13854 if (leaf_reg != -1)
13855 reg = (unsigned) leaf_reg;
13856 }
13857 #endif
13858
13859 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13860 nregs = REG_NREGS (rtl);
13861
13862 /* At present we only track constant-sized pieces. */
13863 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13864 return NULL;
13865 size /= nregs;
13866
13867 loc_result = NULL;
13868 while (nregs--)
13869 {
13870 dw_loc_descr_ref t;
13871
13872 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13873 VAR_INIT_STATUS_INITIALIZED);
13874 add_loc_descr (&loc_result, t);
13875 add_loc_descr_op_piece (&loc_result, size);
13876 ++reg;
13877 }
13878 return loc_result;
13879 }
13880
13881 /* Now onto stupid register sets in non contiguous locations. */
13882
13883 gcc_assert (GET_CODE (regs) == PARALLEL);
13884
13885 /* At present we only track constant-sized pieces. */
13886 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13887 return NULL;
13888 loc_result = NULL;
13889
13890 for (i = 0; i < XVECLEN (regs, 0); ++i)
13891 {
13892 dw_loc_descr_ref t;
13893
13894 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13895 VAR_INIT_STATUS_INITIALIZED);
13896 add_loc_descr (&loc_result, t);
13897 add_loc_descr_op_piece (&loc_result, size);
13898 }
13899
13900 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13901 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13902 return loc_result;
13903 }
13904
13905 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13906
13907 /* Return a location descriptor that designates a constant i,
13908 as a compound operation from constant (i >> shift), constant shift
13909 and DW_OP_shl. */
13910
13911 static dw_loc_descr_ref
13912 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13913 {
13914 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13915 add_loc_descr (&ret, int_loc_descriptor (shift));
13916 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13917 return ret;
13918 }
13919
13920 /* Return a location descriptor that designates constant POLY_I. */
13921
13922 static dw_loc_descr_ref
13923 int_loc_descriptor (poly_int64 poly_i)
13924 {
13925 enum dwarf_location_atom op;
13926
13927 HOST_WIDE_INT i;
13928 if (!poly_i.is_constant (&i))
13929 {
13930 /* Create location descriptions for the non-constant part and
13931 add any constant offset at the end. */
13932 dw_loc_descr_ref ret = NULL;
13933 HOST_WIDE_INT constant = poly_i.coeffs[0];
13934 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13935 {
13936 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13937 if (coeff != 0)
13938 {
13939 dw_loc_descr_ref start = ret;
13940 unsigned int factor;
13941 int bias;
13942 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13943 (j, &factor, &bias);
13944
13945 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13946 add COEFF * (REGNO / FACTOR) now and subtract
13947 COEFF * BIAS from the final constant part. */
13948 constant -= coeff * bias;
13949 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13950 if (coeff % factor == 0)
13951 coeff /= factor;
13952 else
13953 {
13954 int amount = exact_log2 (factor);
13955 gcc_assert (amount >= 0);
13956 add_loc_descr (&ret, int_loc_descriptor (amount));
13957 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13958 }
13959 if (coeff != 1)
13960 {
13961 add_loc_descr (&ret, int_loc_descriptor (coeff));
13962 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13963 }
13964 if (start)
13965 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13966 }
13967 }
13968 loc_descr_plus_const (&ret, constant);
13969 return ret;
13970 }
13971
13972 /* Pick the smallest representation of a constant, rather than just
13973 defaulting to the LEB encoding. */
13974 if (i >= 0)
13975 {
13976 int clz = clz_hwi (i);
13977 int ctz = ctz_hwi (i);
13978 if (i <= 31)
13979 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13980 else if (i <= 0xff)
13981 op = DW_OP_const1u;
13982 else if (i <= 0xffff)
13983 op = DW_OP_const2u;
13984 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13985 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13986 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13987 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13988 while DW_OP_const4u is 5 bytes. */
13989 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13990 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13991 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13992 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13993 while DW_OP_const4u is 5 bytes. */
13994 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13995
13996 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13997 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13998 <= 4)
13999 {
14000 /* As i >= 2**31, the double cast above will yield a negative number.
14001 Since wrapping is defined in DWARF expressions we can output big
14002 positive integers as small negative ones, regardless of the size
14003 of host wide ints.
14004
14005 Here, since the evaluator will handle 32-bit values and since i >=
14006 2**31, we know it's going to be interpreted as a negative literal:
14007 store it this way if we can do better than 5 bytes this way. */
14008 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14009 }
14010 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14011 op = DW_OP_const4u;
14012
14013 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
14014 least 6 bytes: see if we can do better before falling back to it. */
14015 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14016 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14017 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
14018 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
14019 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14020 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
14021 >= HOST_BITS_PER_WIDE_INT)
14022 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
14023 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
14024 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
14025 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14026 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14027 && size_of_uleb128 (i) > 6)
14028 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
14029 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
14030 else
14031 op = DW_OP_constu;
14032 }
14033 else
14034 {
14035 if (i >= -0x80)
14036 op = DW_OP_const1s;
14037 else if (i >= -0x8000)
14038 op = DW_OP_const2s;
14039 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14040 {
14041 if (size_of_int_loc_descriptor (i) < 5)
14042 {
14043 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14044 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14045 return ret;
14046 }
14047 op = DW_OP_const4s;
14048 }
14049 else
14050 {
14051 if (size_of_int_loc_descriptor (i)
14052 < (unsigned long) 1 + size_of_sleb128 (i))
14053 {
14054 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14055 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14056 return ret;
14057 }
14058 op = DW_OP_consts;
14059 }
14060 }
14061
14062 return new_loc_descr (op, i, 0);
14063 }
14064
14065 /* Likewise, for unsigned constants. */
14066
14067 static dw_loc_descr_ref
14068 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14069 {
14070 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14071 const unsigned HOST_WIDE_INT max_uint
14072 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14073
14074 /* If possible, use the clever signed constants handling. */
14075 if (i <= max_int)
14076 return int_loc_descriptor ((HOST_WIDE_INT) i);
14077
14078 /* Here, we are left with positive numbers that cannot be represented as
14079 HOST_WIDE_INT, i.e.:
14080 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14081
14082 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14083 whereas may be better to output a negative integer: thanks to integer
14084 wrapping, we know that:
14085 x = x - 2 ** DWARF2_ADDR_SIZE
14086 = x - 2 * (max (HOST_WIDE_INT) + 1)
14087 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14088 small negative integers. Let's try that in cases it will clearly improve
14089 the encoding: there is no gain turning DW_OP_const4u into
14090 DW_OP_const4s. */
14091 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14092 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14093 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14094 {
14095 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14096
14097 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14098 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14099 const HOST_WIDE_INT second_shift
14100 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14101
14102 /* So we finally have:
14103 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14104 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14105 return int_loc_descriptor (second_shift);
14106 }
14107
14108 /* Last chance: fallback to a simple constant operation. */
14109 return new_loc_descr
14110 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14111 ? DW_OP_const4u
14112 : DW_OP_const8u,
14113 i, 0);
14114 }
14115
14116 /* Generate and return a location description that computes the unsigned
14117 comparison of the two stack top entries (a OP b where b is the top-most
14118 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14119 LE_EXPR, GT_EXPR or GE_EXPR. */
14120
14121 static dw_loc_descr_ref
14122 uint_comparison_loc_list (enum tree_code kind)
14123 {
14124 enum dwarf_location_atom op, flip_op;
14125 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14126
14127 switch (kind)
14128 {
14129 case LT_EXPR:
14130 op = DW_OP_lt;
14131 break;
14132 case LE_EXPR:
14133 op = DW_OP_le;
14134 break;
14135 case GT_EXPR:
14136 op = DW_OP_gt;
14137 break;
14138 case GE_EXPR:
14139 op = DW_OP_ge;
14140 break;
14141 default:
14142 gcc_unreachable ();
14143 }
14144
14145 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14146 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14147
14148 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14149 possible to perform unsigned comparisons: we just have to distinguish
14150 three cases:
14151
14152 1. when a and b have the same sign (as signed integers); then we should
14153 return: a OP(signed) b;
14154
14155 2. when a is a negative signed integer while b is a positive one, then a
14156 is a greater unsigned integer than b; likewise when a and b's roles
14157 are flipped.
14158
14159 So first, compare the sign of the two operands. */
14160 ret = new_loc_descr (DW_OP_over, 0, 0);
14161 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14162 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14163 /* If they have different signs (i.e. they have different sign bits), then
14164 the stack top value has now the sign bit set and thus it's smaller than
14165 zero. */
14166 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14167 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14168 add_loc_descr (&ret, bra_node);
14169
14170 /* We are in case 1. At this point, we know both operands have the same
14171 sign, to it's safe to use the built-in signed comparison. */
14172 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14173 add_loc_descr (&ret, jmp_node);
14174
14175 /* We are in case 2. Here, we know both operands do not have the same sign,
14176 so we have to flip the signed comparison. */
14177 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14178 tmp = new_loc_descr (flip_op, 0, 0);
14179 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14180 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14181 add_loc_descr (&ret, tmp);
14182
14183 /* This dummy operation is necessary to make the two branches join. */
14184 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14185 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14186 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14187 add_loc_descr (&ret, tmp);
14188
14189 return ret;
14190 }
14191
14192 /* Likewise, but takes the location description lists (might be destructive on
14193 them). Return NULL if either is NULL or if concatenation fails. */
14194
14195 static dw_loc_list_ref
14196 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14197 enum tree_code kind)
14198 {
14199 if (left == NULL || right == NULL)
14200 return NULL;
14201
14202 add_loc_list (&left, right);
14203 if (left == NULL)
14204 return NULL;
14205
14206 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14207 return left;
14208 }
14209
14210 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14211 without actually allocating it. */
14212
14213 static unsigned long
14214 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14215 {
14216 return size_of_int_loc_descriptor (i >> shift)
14217 + size_of_int_loc_descriptor (shift)
14218 + 1;
14219 }
14220
14221 /* Return size_of_locs (int_loc_descriptor (i)) without
14222 actually allocating it. */
14223
14224 static unsigned long
14225 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14226 {
14227 unsigned long s;
14228
14229 if (i >= 0)
14230 {
14231 int clz, ctz;
14232 if (i <= 31)
14233 return 1;
14234 else if (i <= 0xff)
14235 return 2;
14236 else if (i <= 0xffff)
14237 return 3;
14238 clz = clz_hwi (i);
14239 ctz = ctz_hwi (i);
14240 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14241 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14242 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14243 - clz - 5);
14244 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14245 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14246 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14247 - clz - 8);
14248 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14249 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14250 <= 4)
14251 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14252 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14253 return 5;
14254 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14255 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14256 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14257 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14258 - clz - 8);
14259 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14260 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14261 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14262 - clz - 16);
14263 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14264 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14265 && s > 6)
14266 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14267 - clz - 32);
14268 else
14269 return 1 + s;
14270 }
14271 else
14272 {
14273 if (i >= -0x80)
14274 return 2;
14275 else if (i >= -0x8000)
14276 return 3;
14277 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14278 {
14279 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14280 {
14281 s = size_of_int_loc_descriptor (-i) + 1;
14282 if (s < 5)
14283 return s;
14284 }
14285 return 5;
14286 }
14287 else
14288 {
14289 unsigned long r = 1 + size_of_sleb128 (i);
14290 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14291 {
14292 s = size_of_int_loc_descriptor (-i) + 1;
14293 if (s < r)
14294 return s;
14295 }
14296 return r;
14297 }
14298 }
14299 }
14300
14301 /* Return loc description representing "address" of integer value.
14302 This can appear only as toplevel expression. */
14303
14304 static dw_loc_descr_ref
14305 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14306 {
14307 int litsize;
14308 dw_loc_descr_ref loc_result = NULL;
14309
14310 if (!(dwarf_version >= 4 || !dwarf_strict))
14311 return NULL;
14312
14313 litsize = size_of_int_loc_descriptor (i);
14314 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14315 is more compact. For DW_OP_stack_value we need:
14316 litsize + 1 (DW_OP_stack_value)
14317 and for DW_OP_implicit_value:
14318 1 (DW_OP_implicit_value) + 1 (length) + size. */
14319 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14320 {
14321 loc_result = int_loc_descriptor (i);
14322 add_loc_descr (&loc_result,
14323 new_loc_descr (DW_OP_stack_value, 0, 0));
14324 return loc_result;
14325 }
14326
14327 loc_result = new_loc_descr (DW_OP_implicit_value,
14328 size, 0);
14329 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14330 loc_result->dw_loc_oprnd2.v.val_int = i;
14331 return loc_result;
14332 }
14333
14334 /* Return a location descriptor that designates a base+offset location. */
14335
14336 static dw_loc_descr_ref
14337 based_loc_descr (rtx reg, poly_int64 offset,
14338 enum var_init_status initialized)
14339 {
14340 unsigned int regno;
14341 dw_loc_descr_ref result;
14342 dw_fde_ref fde = cfun->fde;
14343
14344 /* We only use "frame base" when we're sure we're talking about the
14345 post-prologue local stack frame. We do this by *not* running
14346 register elimination until this point, and recognizing the special
14347 argument pointer and soft frame pointer rtx's. */
14348 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14349 {
14350 rtx elim = (ira_use_lra_p
14351 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14352 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14353
14354 if (elim != reg)
14355 {
14356 /* Allow hard frame pointer here even if frame pointer
14357 isn't used since hard frame pointer is encoded with
14358 DW_OP_fbreg which uses the DW_AT_frame_base attribute,
14359 not hard frame pointer directly. */
14360 elim = strip_offset_and_add (elim, &offset);
14361 gcc_assert (elim == hard_frame_pointer_rtx
14362 || elim == stack_pointer_rtx);
14363
14364 /* If drap register is used to align stack, use frame
14365 pointer + offset to access stack variables. If stack
14366 is aligned without drap, use stack pointer + offset to
14367 access stack variables. */
14368 if (crtl->stack_realign_tried
14369 && reg == frame_pointer_rtx)
14370 {
14371 int base_reg
14372 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14373 ? HARD_FRAME_POINTER_REGNUM
14374 : REGNO (elim));
14375 return new_reg_loc_descr (base_reg, offset);
14376 }
14377
14378 gcc_assert (frame_pointer_fb_offset_valid);
14379 offset += frame_pointer_fb_offset;
14380 HOST_WIDE_INT const_offset;
14381 if (offset.is_constant (&const_offset))
14382 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14383 else
14384 {
14385 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14386 loc_descr_plus_const (&ret, offset);
14387 return ret;
14388 }
14389 }
14390 }
14391
14392 regno = REGNO (reg);
14393 #ifdef LEAF_REG_REMAP
14394 if (crtl->uses_only_leaf_regs)
14395 {
14396 int leaf_reg = LEAF_REG_REMAP (regno);
14397 if (leaf_reg != -1)
14398 regno = (unsigned) leaf_reg;
14399 }
14400 #endif
14401 regno = DWARF_FRAME_REGNUM (regno);
14402
14403 HOST_WIDE_INT const_offset;
14404 if (!optimize && fde
14405 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14406 && offset.is_constant (&const_offset))
14407 {
14408 /* Use cfa+offset to represent the location of arguments passed
14409 on the stack when drap is used to align stack.
14410 Only do this when not optimizing, for optimized code var-tracking
14411 is supposed to track where the arguments live and the register
14412 used as vdrap or drap in some spot might be used for something
14413 else in other part of the routine. */
14414 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14415 }
14416
14417 result = new_reg_loc_descr (regno, offset);
14418
14419 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14420 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14421
14422 return result;
14423 }
14424
14425 /* Return true if this RTL expression describes a base+offset calculation. */
14426
14427 static inline int
14428 is_based_loc (const_rtx rtl)
14429 {
14430 return (GET_CODE (rtl) == PLUS
14431 && ((REG_P (XEXP (rtl, 0))
14432 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14433 && CONST_INT_P (XEXP (rtl, 1)))));
14434 }
14435
14436 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14437 failed. */
14438
14439 static dw_loc_descr_ref
14440 tls_mem_loc_descriptor (rtx mem)
14441 {
14442 tree base;
14443 dw_loc_descr_ref loc_result;
14444
14445 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14446 return NULL;
14447
14448 base = get_base_address (MEM_EXPR (mem));
14449 if (base == NULL
14450 || !VAR_P (base)
14451 || !DECL_THREAD_LOCAL_P (base))
14452 return NULL;
14453
14454 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14455 if (loc_result == NULL)
14456 return NULL;
14457
14458 if (maybe_ne (MEM_OFFSET (mem), 0))
14459 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14460
14461 return loc_result;
14462 }
14463
14464 /* Output debug info about reason why we failed to expand expression as dwarf
14465 expression. */
14466
14467 static void
14468 expansion_failed (tree expr, rtx rtl, char const *reason)
14469 {
14470 if (dump_file && (dump_flags & TDF_DETAILS))
14471 {
14472 fprintf (dump_file, "Failed to expand as dwarf: ");
14473 if (expr)
14474 print_generic_expr (dump_file, expr, dump_flags);
14475 if (rtl)
14476 {
14477 fprintf (dump_file, "\n");
14478 print_rtl (dump_file, rtl);
14479 }
14480 fprintf (dump_file, "\nReason: %s\n", reason);
14481 }
14482 }
14483
14484 /* Helper function for const_ok_for_output. */
14485
14486 static bool
14487 const_ok_for_output_1 (rtx rtl)
14488 {
14489 if (targetm.const_not_ok_for_debug_p (rtl))
14490 {
14491 if (GET_CODE (rtl) != UNSPEC)
14492 {
14493 expansion_failed (NULL_TREE, rtl,
14494 "Expression rejected for debug by the backend.\n");
14495 return false;
14496 }
14497
14498 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14499 the target hook doesn't explicitly allow it in debug info, assume
14500 we can't express it in the debug info. */
14501 /* Don't complain about TLS UNSPECs, those are just too hard to
14502 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14503 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14504 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14505 if (flag_checking
14506 && (XVECLEN (rtl, 0) == 0
14507 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14508 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14509 inform (current_function_decl
14510 ? DECL_SOURCE_LOCATION (current_function_decl)
14511 : UNKNOWN_LOCATION,
14512 #if NUM_UNSPEC_VALUES > 0
14513 "non-delegitimized UNSPEC %s (%d) found in variable location",
14514 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14515 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14516 #else
14517 "non-delegitimized UNSPEC %d found in variable location",
14518 #endif
14519 XINT (rtl, 1));
14520 expansion_failed (NULL_TREE, rtl,
14521 "UNSPEC hasn't been delegitimized.\n");
14522 return false;
14523 }
14524
14525 if (CONST_POLY_INT_P (rtl))
14526 return false;
14527
14528 /* FIXME: Refer to PR60655. It is possible for simplification
14529 of rtl expressions in var tracking to produce such expressions.
14530 We should really identify / validate expressions
14531 enclosed in CONST that can be handled by assemblers on various
14532 targets and only handle legitimate cases here. */
14533 switch (GET_CODE (rtl))
14534 {
14535 case SYMBOL_REF:
14536 break;
14537 case NOT:
14538 case NEG:
14539 return false;
14540 case PLUS:
14541 {
14542 /* Make sure SYMBOL_REFs/UNSPECs are at most in one of the
14543 operands. */
14544 subrtx_var_iterator::array_type array;
14545 bool first = false;
14546 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14547 if (SYMBOL_REF_P (*iter)
14548 || LABEL_P (*iter)
14549 || GET_CODE (*iter) == UNSPEC)
14550 {
14551 first = true;
14552 break;
14553 }
14554 if (!first)
14555 return true;
14556 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14557 if (SYMBOL_REF_P (*iter)
14558 || LABEL_P (*iter)
14559 || GET_CODE (*iter) == UNSPEC)
14560 return false;
14561 return true;
14562 }
14563 case MINUS:
14564 {
14565 /* Disallow negation of SYMBOL_REFs or UNSPECs when they
14566 appear in the second operand of MINUS. */
14567 subrtx_var_iterator::array_type array;
14568 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14569 if (SYMBOL_REF_P (*iter)
14570 || LABEL_P (*iter)
14571 || GET_CODE (*iter) == UNSPEC)
14572 return false;
14573 return true;
14574 }
14575 default:
14576 return true;
14577 }
14578
14579 if (CONSTANT_POOL_ADDRESS_P (rtl))
14580 {
14581 bool marked;
14582 get_pool_constant_mark (rtl, &marked);
14583 /* If all references to this pool constant were optimized away,
14584 it was not output and thus we can't represent it. */
14585 if (!marked)
14586 {
14587 expansion_failed (NULL_TREE, rtl,
14588 "Constant was removed from constant pool.\n");
14589 return false;
14590 }
14591 }
14592
14593 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14594 return false;
14595
14596 /* Avoid references to external symbols in debug info, on several targets
14597 the linker might even refuse to link when linking a shared library,
14598 and in many other cases the relocations for .debug_info/.debug_loc are
14599 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14600 to be defined within the same shared library or executable are fine. */
14601 if (SYMBOL_REF_EXTERNAL_P (rtl))
14602 {
14603 tree decl = SYMBOL_REF_DECL (rtl);
14604
14605 if (decl == NULL || !targetm.binds_local_p (decl))
14606 {
14607 expansion_failed (NULL_TREE, rtl,
14608 "Symbol not defined in current TU.\n");
14609 return false;
14610 }
14611 }
14612
14613 return true;
14614 }
14615
14616 /* Return true if constant RTL can be emitted in DW_OP_addr or
14617 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14618 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14619
14620 static bool
14621 const_ok_for_output (rtx rtl)
14622 {
14623 if (GET_CODE (rtl) == SYMBOL_REF)
14624 return const_ok_for_output_1 (rtl);
14625
14626 if (GET_CODE (rtl) == CONST)
14627 {
14628 subrtx_var_iterator::array_type array;
14629 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14630 if (!const_ok_for_output_1 (*iter))
14631 return false;
14632 return true;
14633 }
14634
14635 return true;
14636 }
14637
14638 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14639 if possible, NULL otherwise. */
14640
14641 static dw_die_ref
14642 base_type_for_mode (machine_mode mode, bool unsignedp)
14643 {
14644 dw_die_ref type_die;
14645 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14646
14647 if (type == NULL)
14648 return NULL;
14649 switch (TREE_CODE (type))
14650 {
14651 case INTEGER_TYPE:
14652 case REAL_TYPE:
14653 break;
14654 default:
14655 return NULL;
14656 }
14657 type_die = lookup_type_die (type);
14658 if (!type_die)
14659 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14660 comp_unit_die ());
14661 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14662 return NULL;
14663 return type_die;
14664 }
14665
14666 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14667 type matching MODE, or, if MODE is narrower than or as wide as
14668 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14669 possible. */
14670
14671 static dw_loc_descr_ref
14672 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14673 {
14674 machine_mode outer_mode = mode;
14675 dw_die_ref type_die;
14676 dw_loc_descr_ref cvt;
14677
14678 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14679 {
14680 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14681 return op;
14682 }
14683 type_die = base_type_for_mode (outer_mode, 1);
14684 if (type_die == NULL)
14685 return NULL;
14686 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14687 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14688 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14689 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14690 add_loc_descr (&op, cvt);
14691 return op;
14692 }
14693
14694 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14695
14696 static dw_loc_descr_ref
14697 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14698 dw_loc_descr_ref op1)
14699 {
14700 dw_loc_descr_ref ret = op0;
14701 add_loc_descr (&ret, op1);
14702 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14703 if (STORE_FLAG_VALUE != 1)
14704 {
14705 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14706 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14707 }
14708 return ret;
14709 }
14710
14711 /* Subroutine of scompare_loc_descriptor for the case in which we're
14712 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14713 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14714
14715 static dw_loc_descr_ref
14716 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14717 scalar_int_mode op_mode,
14718 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14719 {
14720 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14721 dw_loc_descr_ref cvt;
14722
14723 if (type_die == NULL)
14724 return NULL;
14725 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14726 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14727 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14728 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14729 add_loc_descr (&op0, cvt);
14730 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14731 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14732 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14733 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14734 add_loc_descr (&op1, cvt);
14735 return compare_loc_descriptor (op, op0, op1);
14736 }
14737
14738 /* Subroutine of scompare_loc_descriptor for the case in which we're
14739 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14740 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14741
14742 static dw_loc_descr_ref
14743 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14744 scalar_int_mode op_mode,
14745 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14746 {
14747 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14748 /* For eq/ne, if the operands are known to be zero-extended,
14749 there is no need to do the fancy shifting up. */
14750 if (op == DW_OP_eq || op == DW_OP_ne)
14751 {
14752 dw_loc_descr_ref last0, last1;
14753 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14754 ;
14755 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14756 ;
14757 /* deref_size zero extends, and for constants we can check
14758 whether they are zero extended or not. */
14759 if (((last0->dw_loc_opc == DW_OP_deref_size
14760 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14761 || (CONST_INT_P (XEXP (rtl, 0))
14762 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14763 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14764 && ((last1->dw_loc_opc == DW_OP_deref_size
14765 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14766 || (CONST_INT_P (XEXP (rtl, 1))
14767 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14768 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14769 return compare_loc_descriptor (op, op0, op1);
14770
14771 /* EQ/NE comparison against constant in narrower type than
14772 DWARF2_ADDR_SIZE can be performed either as
14773 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14774 DW_OP_{eq,ne}
14775 or
14776 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14777 DW_OP_{eq,ne}. Pick whatever is shorter. */
14778 if (CONST_INT_P (XEXP (rtl, 1))
14779 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14780 && (size_of_int_loc_descriptor (shift) + 1
14781 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14782 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14783 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14784 & GET_MODE_MASK (op_mode))))
14785 {
14786 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14787 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14788 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14789 & GET_MODE_MASK (op_mode));
14790 return compare_loc_descriptor (op, op0, op1);
14791 }
14792 }
14793 add_loc_descr (&op0, int_loc_descriptor (shift));
14794 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14795 if (CONST_INT_P (XEXP (rtl, 1)))
14796 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14797 else
14798 {
14799 add_loc_descr (&op1, int_loc_descriptor (shift));
14800 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14801 }
14802 return compare_loc_descriptor (op, op0, op1);
14803 }
14804
14805 /* Return location descriptor for unsigned comparison OP RTL. */
14806
14807 static dw_loc_descr_ref
14808 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14809 machine_mode mem_mode)
14810 {
14811 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14812 dw_loc_descr_ref op0, op1;
14813
14814 if (op_mode == VOIDmode)
14815 op_mode = GET_MODE (XEXP (rtl, 1));
14816 if (op_mode == VOIDmode)
14817 return NULL;
14818
14819 scalar_int_mode int_op_mode;
14820 if (dwarf_strict
14821 && dwarf_version < 5
14822 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14823 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14824 return NULL;
14825
14826 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14827 VAR_INIT_STATUS_INITIALIZED);
14828 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14829 VAR_INIT_STATUS_INITIALIZED);
14830
14831 if (op0 == NULL || op1 == NULL)
14832 return NULL;
14833
14834 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14835 {
14836 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14837 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14838
14839 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14840 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14841 }
14842 return compare_loc_descriptor (op, op0, op1);
14843 }
14844
14845 /* Return location descriptor for unsigned comparison OP RTL. */
14846
14847 static dw_loc_descr_ref
14848 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14849 machine_mode mem_mode)
14850 {
14851 dw_loc_descr_ref op0, op1;
14852
14853 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14854 if (test_op_mode == VOIDmode)
14855 test_op_mode = GET_MODE (XEXP (rtl, 1));
14856
14857 scalar_int_mode op_mode;
14858 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14859 return NULL;
14860
14861 if (dwarf_strict
14862 && dwarf_version < 5
14863 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14864 return NULL;
14865
14866 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14867 VAR_INIT_STATUS_INITIALIZED);
14868 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14869 VAR_INIT_STATUS_INITIALIZED);
14870
14871 if (op0 == NULL || op1 == NULL)
14872 return NULL;
14873
14874 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14875 {
14876 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14877 dw_loc_descr_ref last0, last1;
14878 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14879 ;
14880 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14881 ;
14882 if (CONST_INT_P (XEXP (rtl, 0)))
14883 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14884 /* deref_size zero extends, so no need to mask it again. */
14885 else if (last0->dw_loc_opc != DW_OP_deref_size
14886 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14887 {
14888 add_loc_descr (&op0, int_loc_descriptor (mask));
14889 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14890 }
14891 if (CONST_INT_P (XEXP (rtl, 1)))
14892 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14893 /* deref_size zero extends, so no need to mask it again. */
14894 else if (last1->dw_loc_opc != DW_OP_deref_size
14895 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14896 {
14897 add_loc_descr (&op1, int_loc_descriptor (mask));
14898 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14899 }
14900 }
14901 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14902 {
14903 HOST_WIDE_INT bias = 1;
14904 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14905 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14906 if (CONST_INT_P (XEXP (rtl, 1)))
14907 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14908 + INTVAL (XEXP (rtl, 1)));
14909 else
14910 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14911 bias, 0));
14912 }
14913 return compare_loc_descriptor (op, op0, op1);
14914 }
14915
14916 /* Return location descriptor for {U,S}{MIN,MAX}. */
14917
14918 static dw_loc_descr_ref
14919 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14920 machine_mode mem_mode)
14921 {
14922 enum dwarf_location_atom op;
14923 dw_loc_descr_ref op0, op1, ret;
14924 dw_loc_descr_ref bra_node, drop_node;
14925
14926 scalar_int_mode int_mode;
14927 if (dwarf_strict
14928 && dwarf_version < 5
14929 && (!is_a <scalar_int_mode> (mode, &int_mode)
14930 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14931 return NULL;
14932
14933 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14934 VAR_INIT_STATUS_INITIALIZED);
14935 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14936 VAR_INIT_STATUS_INITIALIZED);
14937
14938 if (op0 == NULL || op1 == NULL)
14939 return NULL;
14940
14941 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14942 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14943 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14944 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14945 {
14946 /* Checked by the caller. */
14947 int_mode = as_a <scalar_int_mode> (mode);
14948 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14949 {
14950 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14951 add_loc_descr (&op0, int_loc_descriptor (mask));
14952 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14953 add_loc_descr (&op1, int_loc_descriptor (mask));
14954 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14955 }
14956 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14957 {
14958 HOST_WIDE_INT bias = 1;
14959 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14960 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14961 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14962 }
14963 }
14964 else if (is_a <scalar_int_mode> (mode, &int_mode)
14965 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14966 {
14967 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14968 add_loc_descr (&op0, int_loc_descriptor (shift));
14969 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14970 add_loc_descr (&op1, int_loc_descriptor (shift));
14971 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14972 }
14973 else if (is_a <scalar_int_mode> (mode, &int_mode)
14974 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14975 {
14976 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14977 dw_loc_descr_ref cvt;
14978 if (type_die == NULL)
14979 return NULL;
14980 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14981 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14982 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14983 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14984 add_loc_descr (&op0, cvt);
14985 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14986 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14987 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14988 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14989 add_loc_descr (&op1, cvt);
14990 }
14991
14992 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14993 op = DW_OP_lt;
14994 else
14995 op = DW_OP_gt;
14996 ret = op0;
14997 add_loc_descr (&ret, op1);
14998 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14999 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
15000 add_loc_descr (&ret, bra_node);
15001 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15002 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
15003 add_loc_descr (&ret, drop_node);
15004 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
15005 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
15006 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
15007 && is_a <scalar_int_mode> (mode, &int_mode)
15008 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15009 ret = convert_descriptor_to_mode (int_mode, ret);
15010 return ret;
15011 }
15012
15013 /* Helper function for mem_loc_descriptor. Perform OP binary op,
15014 but after converting arguments to type_die, afterwards
15015 convert back to unsigned. */
15016
15017 static dw_loc_descr_ref
15018 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
15019 scalar_int_mode mode, machine_mode mem_mode)
15020 {
15021 dw_loc_descr_ref cvt, op0, op1;
15022
15023 if (type_die == NULL)
15024 return NULL;
15025 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15026 VAR_INIT_STATUS_INITIALIZED);
15027 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15028 VAR_INIT_STATUS_INITIALIZED);
15029 if (op0 == NULL || op1 == NULL)
15030 return NULL;
15031 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15032 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15033 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15034 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15035 add_loc_descr (&op0, cvt);
15036 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15037 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15038 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15039 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15040 add_loc_descr (&op1, cvt);
15041 add_loc_descr (&op0, op1);
15042 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
15043 return convert_descriptor_to_mode (mode, op0);
15044 }
15045
15046 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
15047 const0 is DW_OP_lit0 or corresponding typed constant,
15048 const1 is DW_OP_lit1 or corresponding typed constant
15049 and constMSB is constant with just the MSB bit set
15050 for the mode):
15051 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15052 L1: const0 DW_OP_swap
15053 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
15054 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15055 L3: DW_OP_drop
15056 L4: DW_OP_nop
15057
15058 CTZ is similar:
15059 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15060 L1: const0 DW_OP_swap
15061 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15062 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15063 L3: DW_OP_drop
15064 L4: DW_OP_nop
15065
15066 FFS is similar:
15067 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
15068 L1: const1 DW_OP_swap
15069 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15070 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15071 L3: DW_OP_drop
15072 L4: DW_OP_nop */
15073
15074 static dw_loc_descr_ref
15075 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
15076 machine_mode mem_mode)
15077 {
15078 dw_loc_descr_ref op0, ret, tmp;
15079 HOST_WIDE_INT valv;
15080 dw_loc_descr_ref l1jump, l1label;
15081 dw_loc_descr_ref l2jump, l2label;
15082 dw_loc_descr_ref l3jump, l3label;
15083 dw_loc_descr_ref l4jump, l4label;
15084 rtx msb;
15085
15086 if (GET_MODE (XEXP (rtl, 0)) != mode)
15087 return NULL;
15088
15089 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15090 VAR_INIT_STATUS_INITIALIZED);
15091 if (op0 == NULL)
15092 return NULL;
15093 ret = op0;
15094 if (GET_CODE (rtl) == CLZ)
15095 {
15096 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15097 valv = GET_MODE_BITSIZE (mode);
15098 }
15099 else if (GET_CODE (rtl) == FFS)
15100 valv = 0;
15101 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15102 valv = GET_MODE_BITSIZE (mode);
15103 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15104 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15105 add_loc_descr (&ret, l1jump);
15106 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15107 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15108 VAR_INIT_STATUS_INITIALIZED);
15109 if (tmp == NULL)
15110 return NULL;
15111 add_loc_descr (&ret, tmp);
15112 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15113 add_loc_descr (&ret, l4jump);
15114 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15115 ? const1_rtx : const0_rtx,
15116 mode, mem_mode,
15117 VAR_INIT_STATUS_INITIALIZED);
15118 if (l1label == NULL)
15119 return NULL;
15120 add_loc_descr (&ret, l1label);
15121 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15122 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15123 add_loc_descr (&ret, l2label);
15124 if (GET_CODE (rtl) != CLZ)
15125 msb = const1_rtx;
15126 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15127 msb = GEN_INT (HOST_WIDE_INT_1U
15128 << (GET_MODE_BITSIZE (mode) - 1));
15129 else
15130 msb = immed_wide_int_const
15131 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15132 GET_MODE_PRECISION (mode)), mode);
15133 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15134 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15135 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15136 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15137 else
15138 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15139 VAR_INIT_STATUS_INITIALIZED);
15140 if (tmp == NULL)
15141 return NULL;
15142 add_loc_descr (&ret, tmp);
15143 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15144 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15145 add_loc_descr (&ret, l3jump);
15146 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15147 VAR_INIT_STATUS_INITIALIZED);
15148 if (tmp == NULL)
15149 return NULL;
15150 add_loc_descr (&ret, tmp);
15151 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15152 ? DW_OP_shl : DW_OP_shr, 0, 0));
15153 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15154 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15155 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15156 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15157 add_loc_descr (&ret, l2jump);
15158 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15159 add_loc_descr (&ret, l3label);
15160 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15161 add_loc_descr (&ret, l4label);
15162 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15163 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15164 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15165 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15166 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15167 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15168 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15169 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15170 return ret;
15171 }
15172
15173 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15174 const1 is DW_OP_lit1 or corresponding typed constant):
15175 const0 DW_OP_swap
15176 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15177 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15178 L2: DW_OP_drop
15179
15180 PARITY is similar:
15181 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15182 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15183 L2: DW_OP_drop */
15184
15185 static dw_loc_descr_ref
15186 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15187 machine_mode mem_mode)
15188 {
15189 dw_loc_descr_ref op0, ret, tmp;
15190 dw_loc_descr_ref l1jump, l1label;
15191 dw_loc_descr_ref l2jump, l2label;
15192
15193 if (GET_MODE (XEXP (rtl, 0)) != mode)
15194 return NULL;
15195
15196 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15197 VAR_INIT_STATUS_INITIALIZED);
15198 if (op0 == NULL)
15199 return NULL;
15200 ret = op0;
15201 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15202 VAR_INIT_STATUS_INITIALIZED);
15203 if (tmp == NULL)
15204 return NULL;
15205 add_loc_descr (&ret, tmp);
15206 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15207 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15208 add_loc_descr (&ret, l1label);
15209 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15210 add_loc_descr (&ret, l2jump);
15211 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15212 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15213 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15214 VAR_INIT_STATUS_INITIALIZED);
15215 if (tmp == NULL)
15216 return NULL;
15217 add_loc_descr (&ret, tmp);
15218 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15219 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15220 ? DW_OP_plus : DW_OP_xor, 0, 0));
15221 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15222 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15223 VAR_INIT_STATUS_INITIALIZED);
15224 add_loc_descr (&ret, tmp);
15225 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15226 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15227 add_loc_descr (&ret, l1jump);
15228 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15229 add_loc_descr (&ret, l2label);
15230 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15231 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15232 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15233 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15234 return ret;
15235 }
15236
15237 /* BSWAP (constS is initial shift count, either 56 or 24):
15238 constS const0
15239 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15240 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15241 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15242 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15243 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15244
15245 static dw_loc_descr_ref
15246 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15247 machine_mode mem_mode)
15248 {
15249 dw_loc_descr_ref op0, ret, tmp;
15250 dw_loc_descr_ref l1jump, l1label;
15251 dw_loc_descr_ref l2jump, l2label;
15252
15253 if (BITS_PER_UNIT != 8
15254 || (GET_MODE_BITSIZE (mode) != 32
15255 && GET_MODE_BITSIZE (mode) != 64))
15256 return NULL;
15257
15258 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15259 VAR_INIT_STATUS_INITIALIZED);
15260 if (op0 == NULL)
15261 return NULL;
15262
15263 ret = op0;
15264 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15265 mode, mem_mode,
15266 VAR_INIT_STATUS_INITIALIZED);
15267 if (tmp == NULL)
15268 return NULL;
15269 add_loc_descr (&ret, tmp);
15270 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15271 VAR_INIT_STATUS_INITIALIZED);
15272 if (tmp == NULL)
15273 return NULL;
15274 add_loc_descr (&ret, tmp);
15275 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15276 add_loc_descr (&ret, l1label);
15277 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15278 mode, mem_mode,
15279 VAR_INIT_STATUS_INITIALIZED);
15280 add_loc_descr (&ret, tmp);
15281 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15282 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15283 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15284 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15285 VAR_INIT_STATUS_INITIALIZED);
15286 if (tmp == NULL)
15287 return NULL;
15288 add_loc_descr (&ret, tmp);
15289 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15290 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15291 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15292 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15293 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15294 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15295 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15296 VAR_INIT_STATUS_INITIALIZED);
15297 add_loc_descr (&ret, tmp);
15298 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15299 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15300 add_loc_descr (&ret, l2jump);
15301 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15302 VAR_INIT_STATUS_INITIALIZED);
15303 add_loc_descr (&ret, tmp);
15304 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15305 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15306 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15307 add_loc_descr (&ret, l1jump);
15308 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15309 add_loc_descr (&ret, l2label);
15310 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15311 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15312 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15313 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15314 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15315 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15316 return ret;
15317 }
15318
15319 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15320 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15321 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15322 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15323
15324 ROTATERT is similar:
15325 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15326 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15327 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15328
15329 static dw_loc_descr_ref
15330 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15331 machine_mode mem_mode)
15332 {
15333 rtx rtlop1 = XEXP (rtl, 1);
15334 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15335 int i;
15336
15337 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15338 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15339 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15340 VAR_INIT_STATUS_INITIALIZED);
15341 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15342 VAR_INIT_STATUS_INITIALIZED);
15343 if (op0 == NULL || op1 == NULL)
15344 return NULL;
15345 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15346 for (i = 0; i < 2; i++)
15347 {
15348 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15349 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15350 mode, mem_mode,
15351 VAR_INIT_STATUS_INITIALIZED);
15352 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15353 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15354 ? DW_OP_const4u
15355 : HOST_BITS_PER_WIDE_INT == 64
15356 ? DW_OP_const8u : DW_OP_constu,
15357 GET_MODE_MASK (mode), 0);
15358 else
15359 mask[i] = NULL;
15360 if (mask[i] == NULL)
15361 return NULL;
15362 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15363 }
15364 ret = op0;
15365 add_loc_descr (&ret, op1);
15366 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15367 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15368 if (GET_CODE (rtl) == ROTATERT)
15369 {
15370 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15371 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15372 GET_MODE_BITSIZE (mode), 0));
15373 }
15374 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15375 if (mask[0] != NULL)
15376 add_loc_descr (&ret, mask[0]);
15377 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15378 if (mask[1] != NULL)
15379 {
15380 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15381 add_loc_descr (&ret, mask[1]);
15382 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15383 }
15384 if (GET_CODE (rtl) == ROTATE)
15385 {
15386 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15387 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15388 GET_MODE_BITSIZE (mode), 0));
15389 }
15390 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15391 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15392 return ret;
15393 }
15394
15395 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15396 for DEBUG_PARAMETER_REF RTL. */
15397
15398 static dw_loc_descr_ref
15399 parameter_ref_descriptor (rtx rtl)
15400 {
15401 dw_loc_descr_ref ret;
15402 dw_die_ref ref;
15403
15404 if (dwarf_strict)
15405 return NULL;
15406 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15407 /* With LTO during LTRANS we get the late DIE that refers to the early
15408 DIE, thus we add another indirection here. This seems to confuse
15409 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15410 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15411 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15412 if (ref)
15413 {
15414 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15415 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15416 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15417 }
15418 else
15419 {
15420 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15421 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15422 }
15423 return ret;
15424 }
15425
15426 /* The following routine converts the RTL for a variable or parameter
15427 (resident in memory) into an equivalent Dwarf representation of a
15428 mechanism for getting the address of that same variable onto the top of a
15429 hypothetical "address evaluation" stack.
15430
15431 When creating memory location descriptors, we are effectively transforming
15432 the RTL for a memory-resident object into its Dwarf postfix expression
15433 equivalent. This routine recursively descends an RTL tree, turning
15434 it into Dwarf postfix code as it goes.
15435
15436 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15437
15438 MEM_MODE is the mode of the memory reference, needed to handle some
15439 autoincrement addressing modes.
15440
15441 Return 0 if we can't represent the location. */
15442
15443 dw_loc_descr_ref
15444 mem_loc_descriptor (rtx rtl, machine_mode mode,
15445 machine_mode mem_mode,
15446 enum var_init_status initialized)
15447 {
15448 dw_loc_descr_ref mem_loc_result = NULL;
15449 enum dwarf_location_atom op;
15450 dw_loc_descr_ref op0, op1;
15451 rtx inner = NULL_RTX;
15452 poly_int64 offset;
15453
15454 if (mode == VOIDmode)
15455 mode = GET_MODE (rtl);
15456
15457 /* Note that for a dynamically sized array, the location we will generate a
15458 description of here will be the lowest numbered location which is
15459 actually within the array. That's *not* necessarily the same as the
15460 zeroth element of the array. */
15461
15462 rtl = targetm.delegitimize_address (rtl);
15463
15464 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15465 return NULL;
15466
15467 scalar_int_mode int_mode = BImode, inner_mode, op1_mode;
15468 switch (GET_CODE (rtl))
15469 {
15470 case POST_INC:
15471 case POST_DEC:
15472 case POST_MODIFY:
15473 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15474
15475 case SUBREG:
15476 /* The case of a subreg may arise when we have a local (register)
15477 variable or a formal (register) parameter which doesn't quite fill
15478 up an entire register. For now, just assume that it is
15479 legitimate to make the Dwarf info refer to the whole register which
15480 contains the given subreg. */
15481 if (!subreg_lowpart_p (rtl))
15482 break;
15483 inner = SUBREG_REG (rtl);
15484 /* FALLTHRU */
15485 case TRUNCATE:
15486 if (inner == NULL_RTX)
15487 inner = XEXP (rtl, 0);
15488 if (is_a <scalar_int_mode> (mode, &int_mode)
15489 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15490 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15491 #ifdef POINTERS_EXTEND_UNSIGNED
15492 || (int_mode == Pmode && mem_mode != VOIDmode)
15493 #endif
15494 )
15495 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15496 {
15497 mem_loc_result = mem_loc_descriptor (inner,
15498 inner_mode,
15499 mem_mode, initialized);
15500 break;
15501 }
15502 if (dwarf_strict && dwarf_version < 5)
15503 break;
15504 if (is_a <scalar_int_mode> (mode, &int_mode)
15505 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15506 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15507 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15508 {
15509 dw_die_ref type_die;
15510 dw_loc_descr_ref cvt;
15511
15512 mem_loc_result = mem_loc_descriptor (inner,
15513 GET_MODE (inner),
15514 mem_mode, initialized);
15515 if (mem_loc_result == NULL)
15516 break;
15517 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15518 if (type_die == NULL)
15519 {
15520 mem_loc_result = NULL;
15521 break;
15522 }
15523 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15524 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15525 else
15526 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15527 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15528 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15529 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15530 add_loc_descr (&mem_loc_result, cvt);
15531 if (is_a <scalar_int_mode> (mode, &int_mode)
15532 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15533 {
15534 /* Convert it to untyped afterwards. */
15535 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15536 add_loc_descr (&mem_loc_result, cvt);
15537 }
15538 }
15539 break;
15540
15541 case REG:
15542 if (!is_a <scalar_int_mode> (mode, &int_mode)
15543 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15544 && rtl != arg_pointer_rtx
15545 && rtl != frame_pointer_rtx
15546 #ifdef POINTERS_EXTEND_UNSIGNED
15547 && (int_mode != Pmode || mem_mode == VOIDmode)
15548 #endif
15549 ))
15550 {
15551 dw_die_ref type_die;
15552 unsigned int dbx_regnum;
15553
15554 if (dwarf_strict && dwarf_version < 5)
15555 break;
15556 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15557 break;
15558 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15559 if (type_die == NULL)
15560 break;
15561
15562 dbx_regnum = dbx_reg_number (rtl);
15563 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15564 break;
15565 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15566 dbx_regnum, 0);
15567 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15568 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15569 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15570 break;
15571 }
15572 /* Whenever a register number forms a part of the description of the
15573 method for calculating the (dynamic) address of a memory resident
15574 object, DWARF rules require the register number be referred to as
15575 a "base register". This distinction is not based in any way upon
15576 what category of register the hardware believes the given register
15577 belongs to. This is strictly DWARF terminology we're dealing with
15578 here. Note that in cases where the location of a memory-resident
15579 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15580 OP_CONST (0)) the actual DWARF location descriptor that we generate
15581 may just be OP_BASEREG (basereg). This may look deceptively like
15582 the object in question was allocated to a register (rather than in
15583 memory) so DWARF consumers need to be aware of the subtle
15584 distinction between OP_REG and OP_BASEREG. */
15585 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15586 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15587 else if (stack_realign_drap
15588 && crtl->drap_reg
15589 && crtl->args.internal_arg_pointer == rtl
15590 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15591 {
15592 /* If RTL is internal_arg_pointer, which has been optimized
15593 out, use DRAP instead. */
15594 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15595 VAR_INIT_STATUS_INITIALIZED);
15596 }
15597 break;
15598
15599 case SIGN_EXTEND:
15600 case ZERO_EXTEND:
15601 if (!is_a <scalar_int_mode> (mode, &int_mode)
15602 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15603 break;
15604 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15605 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15606 if (op0 == 0)
15607 break;
15608 else if (GET_CODE (rtl) == ZERO_EXTEND
15609 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15610 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15611 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15612 to expand zero extend as two shifts instead of
15613 masking. */
15614 && GET_MODE_SIZE (inner_mode) <= 4)
15615 {
15616 mem_loc_result = op0;
15617 add_loc_descr (&mem_loc_result,
15618 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15619 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15620 }
15621 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15622 {
15623 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15624 shift *= BITS_PER_UNIT;
15625 if (GET_CODE (rtl) == SIGN_EXTEND)
15626 op = DW_OP_shra;
15627 else
15628 op = DW_OP_shr;
15629 mem_loc_result = op0;
15630 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15631 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15632 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15633 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15634 }
15635 else if (!dwarf_strict || dwarf_version >= 5)
15636 {
15637 dw_die_ref type_die1, type_die2;
15638 dw_loc_descr_ref cvt;
15639
15640 type_die1 = base_type_for_mode (inner_mode,
15641 GET_CODE (rtl) == ZERO_EXTEND);
15642 if (type_die1 == NULL)
15643 break;
15644 type_die2 = base_type_for_mode (int_mode, 1);
15645 if (type_die2 == NULL)
15646 break;
15647 mem_loc_result = op0;
15648 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15649 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15650 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15651 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15652 add_loc_descr (&mem_loc_result, cvt);
15653 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15654 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15655 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15656 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15657 add_loc_descr (&mem_loc_result, cvt);
15658 }
15659 break;
15660
15661 case MEM:
15662 {
15663 rtx new_rtl = avoid_constant_pool_reference (rtl);
15664 if (new_rtl != rtl)
15665 {
15666 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15667 initialized);
15668 if (mem_loc_result != NULL)
15669 return mem_loc_result;
15670 }
15671 }
15672 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15673 get_address_mode (rtl), mode,
15674 VAR_INIT_STATUS_INITIALIZED);
15675 if (mem_loc_result == NULL)
15676 mem_loc_result = tls_mem_loc_descriptor (rtl);
15677 if (mem_loc_result != NULL)
15678 {
15679 if (!is_a <scalar_int_mode> (mode, &int_mode)
15680 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15681 {
15682 dw_die_ref type_die;
15683 dw_loc_descr_ref deref;
15684 HOST_WIDE_INT size;
15685
15686 if (dwarf_strict && dwarf_version < 5)
15687 return NULL;
15688 if (!GET_MODE_SIZE (mode).is_constant (&size))
15689 return NULL;
15690 type_die
15691 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15692 if (type_die == NULL)
15693 return NULL;
15694 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15695 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15696 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15697 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15698 add_loc_descr (&mem_loc_result, deref);
15699 }
15700 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15701 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15702 else
15703 add_loc_descr (&mem_loc_result,
15704 new_loc_descr (DW_OP_deref_size,
15705 GET_MODE_SIZE (int_mode), 0));
15706 }
15707 break;
15708
15709 case LO_SUM:
15710 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15711
15712 case LABEL_REF:
15713 /* Some ports can transform a symbol ref into a label ref, because
15714 the symbol ref is too far away and has to be dumped into a constant
15715 pool. */
15716 case CONST:
15717 case SYMBOL_REF:
15718 case UNSPEC:
15719 if (!is_a <scalar_int_mode> (mode, &int_mode)
15720 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15721 #ifdef POINTERS_EXTEND_UNSIGNED
15722 && (int_mode != Pmode || mem_mode == VOIDmode)
15723 #endif
15724 ))
15725 break;
15726
15727 if (GET_CODE (rtl) == UNSPEC)
15728 {
15729 /* If delegitimize_address couldn't do anything with the UNSPEC, we
15730 can't express it in the debug info. This can happen e.g. with some
15731 TLS UNSPECs. Allow UNSPECs formerly from CONST that the backend
15732 approves. */
15733 bool not_ok = false;
15734 subrtx_var_iterator::array_type array;
15735 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15736 if (*iter != rtl && !CONSTANT_P (*iter))
15737 {
15738 not_ok = true;
15739 break;
15740 }
15741
15742 if (not_ok)
15743 break;
15744
15745 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15746 if (!const_ok_for_output_1 (*iter))
15747 {
15748 not_ok = true;
15749 break;
15750 }
15751
15752 if (not_ok)
15753 break;
15754
15755 rtl = gen_rtx_CONST (GET_MODE (rtl), rtl);
15756 goto symref;
15757 }
15758
15759 if (GET_CODE (rtl) == SYMBOL_REF
15760 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15761 {
15762 dw_loc_descr_ref temp;
15763
15764 /* If this is not defined, we have no way to emit the data. */
15765 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15766 break;
15767
15768 temp = new_addr_loc_descr (rtl, dtprel_true);
15769
15770 /* We check for DWARF 5 here because gdb did not implement
15771 DW_OP_form_tls_address until after 7.12. */
15772 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15773 ? DW_OP_form_tls_address
15774 : DW_OP_GNU_push_tls_address),
15775 0, 0);
15776 add_loc_descr (&mem_loc_result, temp);
15777
15778 break;
15779 }
15780
15781 if (!const_ok_for_output (rtl))
15782 {
15783 if (GET_CODE (rtl) == CONST)
15784 switch (GET_CODE (XEXP (rtl, 0)))
15785 {
15786 case NOT:
15787 op = DW_OP_not;
15788 goto try_const_unop;
15789 case NEG:
15790 op = DW_OP_neg;
15791 goto try_const_unop;
15792 try_const_unop:
15793 rtx arg;
15794 arg = XEXP (XEXP (rtl, 0), 0);
15795 if (!CONSTANT_P (arg))
15796 arg = gen_rtx_CONST (int_mode, arg);
15797 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15798 initialized);
15799 if (op0)
15800 {
15801 mem_loc_result = op0;
15802 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15803 }
15804 break;
15805 default:
15806 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15807 mem_mode, initialized);
15808 break;
15809 }
15810 break;
15811 }
15812
15813 symref:
15814 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15815 vec_safe_push (used_rtx_array, rtl);
15816 break;
15817
15818 case CONCAT:
15819 case CONCATN:
15820 case VAR_LOCATION:
15821 case DEBUG_IMPLICIT_PTR:
15822 expansion_failed (NULL_TREE, rtl,
15823 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15824 return 0;
15825
15826 case ENTRY_VALUE:
15827 if (dwarf_strict && dwarf_version < 5)
15828 return NULL;
15829 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15830 {
15831 if (!is_a <scalar_int_mode> (mode, &int_mode)
15832 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15833 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15834 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15835 else
15836 {
15837 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15838 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15839 return NULL;
15840 op0 = one_reg_loc_descriptor (dbx_regnum,
15841 VAR_INIT_STATUS_INITIALIZED);
15842 }
15843 }
15844 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15845 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15846 {
15847 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15848 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15849 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15850 return NULL;
15851 }
15852 else
15853 gcc_unreachable ();
15854 if (op0 == NULL)
15855 return NULL;
15856 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15857 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15858 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15859 break;
15860
15861 case DEBUG_PARAMETER_REF:
15862 mem_loc_result = parameter_ref_descriptor (rtl);
15863 break;
15864
15865 case PRE_MODIFY:
15866 /* Extract the PLUS expression nested inside and fall into
15867 PLUS code below. */
15868 rtl = XEXP (rtl, 1);
15869 goto plus;
15870
15871 case PRE_INC:
15872 case PRE_DEC:
15873 /* Turn these into a PLUS expression and fall into the PLUS code
15874 below. */
15875 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15876 gen_int_mode (GET_CODE (rtl) == PRE_INC
15877 ? GET_MODE_UNIT_SIZE (mem_mode)
15878 : -GET_MODE_UNIT_SIZE (mem_mode),
15879 mode));
15880
15881 /* fall through */
15882
15883 case PLUS:
15884 plus:
15885 if (is_based_loc (rtl)
15886 && is_a <scalar_int_mode> (mode, &int_mode)
15887 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15888 || XEXP (rtl, 0) == arg_pointer_rtx
15889 || XEXP (rtl, 0) == frame_pointer_rtx))
15890 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15891 INTVAL (XEXP (rtl, 1)),
15892 VAR_INIT_STATUS_INITIALIZED);
15893 else
15894 {
15895 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15896 VAR_INIT_STATUS_INITIALIZED);
15897 if (mem_loc_result == 0)
15898 break;
15899
15900 if (CONST_INT_P (XEXP (rtl, 1))
15901 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15902 <= DWARF2_ADDR_SIZE))
15903 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15904 else
15905 {
15906 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15907 VAR_INIT_STATUS_INITIALIZED);
15908 if (op1 == 0)
15909 return NULL;
15910 add_loc_descr (&mem_loc_result, op1);
15911 add_loc_descr (&mem_loc_result,
15912 new_loc_descr (DW_OP_plus, 0, 0));
15913 }
15914 }
15915 break;
15916
15917 /* If a pseudo-reg is optimized away, it is possible for it to
15918 be replaced with a MEM containing a multiply or shift. */
15919 case MINUS:
15920 op = DW_OP_minus;
15921 goto do_binop;
15922
15923 case MULT:
15924 op = DW_OP_mul;
15925 goto do_binop;
15926
15927 case DIV:
15928 if ((!dwarf_strict || dwarf_version >= 5)
15929 && is_a <scalar_int_mode> (mode, &int_mode)
15930 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15931 {
15932 mem_loc_result = typed_binop (DW_OP_div, rtl,
15933 base_type_for_mode (mode, 0),
15934 int_mode, mem_mode);
15935 break;
15936 }
15937 op = DW_OP_div;
15938 goto do_binop;
15939
15940 case UMOD:
15941 op = DW_OP_mod;
15942 goto do_binop;
15943
15944 case ASHIFT:
15945 op = DW_OP_shl;
15946 goto do_shift;
15947
15948 case ASHIFTRT:
15949 op = DW_OP_shra;
15950 goto do_shift;
15951
15952 case LSHIFTRT:
15953 op = DW_OP_shr;
15954 goto do_shift;
15955
15956 do_shift:
15957 if (!is_a <scalar_int_mode> (mode, &int_mode))
15958 break;
15959 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15960 VAR_INIT_STATUS_INITIALIZED);
15961 {
15962 rtx rtlop1 = XEXP (rtl, 1);
15963 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15964 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15965 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15966 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15967 VAR_INIT_STATUS_INITIALIZED);
15968 }
15969
15970 if (op0 == 0 || op1 == 0)
15971 break;
15972
15973 mem_loc_result = op0;
15974 add_loc_descr (&mem_loc_result, op1);
15975 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15976 break;
15977
15978 case AND:
15979 op = DW_OP_and;
15980 goto do_binop;
15981
15982 case IOR:
15983 op = DW_OP_or;
15984 goto do_binop;
15985
15986 case XOR:
15987 op = DW_OP_xor;
15988 goto do_binop;
15989
15990 do_binop:
15991 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15992 VAR_INIT_STATUS_INITIALIZED);
15993 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15994 VAR_INIT_STATUS_INITIALIZED);
15995
15996 if (op0 == 0 || op1 == 0)
15997 break;
15998
15999 mem_loc_result = op0;
16000 add_loc_descr (&mem_loc_result, op1);
16001 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16002 break;
16003
16004 case MOD:
16005 if ((!dwarf_strict || dwarf_version >= 5)
16006 && is_a <scalar_int_mode> (mode, &int_mode)
16007 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16008 {
16009 mem_loc_result = typed_binop (DW_OP_mod, rtl,
16010 base_type_for_mode (mode, 0),
16011 int_mode, mem_mode);
16012 break;
16013 }
16014
16015 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16016 VAR_INIT_STATUS_INITIALIZED);
16017 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16018 VAR_INIT_STATUS_INITIALIZED);
16019
16020 if (op0 == 0 || op1 == 0)
16021 break;
16022
16023 mem_loc_result = op0;
16024 add_loc_descr (&mem_loc_result, op1);
16025 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16026 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16027 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
16028 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
16029 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
16030 break;
16031
16032 case UDIV:
16033 if ((!dwarf_strict || dwarf_version >= 5)
16034 && is_a <scalar_int_mode> (mode, &int_mode))
16035 {
16036 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16037 {
16038 op = DW_OP_div;
16039 goto do_binop;
16040 }
16041 mem_loc_result = typed_binop (DW_OP_div, rtl,
16042 base_type_for_mode (int_mode, 1),
16043 int_mode, mem_mode);
16044 }
16045 break;
16046
16047 case NOT:
16048 op = DW_OP_not;
16049 goto do_unop;
16050
16051 case ABS:
16052 op = DW_OP_abs;
16053 goto do_unop;
16054
16055 case NEG:
16056 op = DW_OP_neg;
16057 goto do_unop;
16058
16059 do_unop:
16060 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16061 VAR_INIT_STATUS_INITIALIZED);
16062
16063 if (op0 == 0)
16064 break;
16065
16066 mem_loc_result = op0;
16067 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16068 break;
16069
16070 case CONST_INT:
16071 if (!is_a <scalar_int_mode> (mode, &int_mode)
16072 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16073 #ifdef POINTERS_EXTEND_UNSIGNED
16074 || (int_mode == Pmode
16075 && mem_mode != VOIDmode
16076 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
16077 #endif
16078 )
16079 {
16080 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16081 break;
16082 }
16083 if ((!dwarf_strict || dwarf_version >= 5)
16084 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
16085 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
16086 {
16087 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
16088 scalar_int_mode amode;
16089 if (type_die == NULL)
16090 return NULL;
16091 if (INTVAL (rtl) >= 0
16092 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
16093 .exists (&amode))
16094 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
16095 /* const DW_OP_convert <XXX> vs.
16096 DW_OP_const_type <XXX, 1, const>. */
16097 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
16098 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
16099 {
16100 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16101 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16102 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16103 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16104 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
16105 add_loc_descr (&mem_loc_result, op0);
16106 return mem_loc_result;
16107 }
16108 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
16109 INTVAL (rtl));
16110 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16111 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16112 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16113 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16114 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16115 else
16116 {
16117 mem_loc_result->dw_loc_oprnd2.val_class
16118 = dw_val_class_const_double;
16119 mem_loc_result->dw_loc_oprnd2.v.val_double
16120 = double_int::from_shwi (INTVAL (rtl));
16121 }
16122 }
16123 break;
16124
16125 case CONST_DOUBLE:
16126 if (!dwarf_strict || dwarf_version >= 5)
16127 {
16128 dw_die_ref type_die;
16129
16130 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16131 CONST_DOUBLE rtx could represent either a large integer
16132 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16133 the value is always a floating point constant.
16134
16135 When it is an integer, a CONST_DOUBLE is used whenever
16136 the constant requires 2 HWIs to be adequately represented.
16137 We output CONST_DOUBLEs as blocks. */
16138 if (mode == VOIDmode
16139 || (GET_MODE (rtl) == VOIDmode
16140 && maybe_ne (GET_MODE_BITSIZE (mode),
16141 HOST_BITS_PER_DOUBLE_INT)))
16142 break;
16143 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16144 if (type_die == NULL)
16145 return NULL;
16146 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16147 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16148 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16149 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16150 #if TARGET_SUPPORTS_WIDE_INT == 0
16151 if (!SCALAR_FLOAT_MODE_P (mode))
16152 {
16153 mem_loc_result->dw_loc_oprnd2.val_class
16154 = dw_val_class_const_double;
16155 mem_loc_result->dw_loc_oprnd2.v.val_double
16156 = rtx_to_double_int (rtl);
16157 }
16158 else
16159 #endif
16160 {
16161 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16162 unsigned int length = GET_MODE_SIZE (float_mode);
16163 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16164
16165 insert_float (rtl, array);
16166 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16167 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16168 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16169 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16170 }
16171 }
16172 break;
16173
16174 case CONST_WIDE_INT:
16175 if (!dwarf_strict || dwarf_version >= 5)
16176 {
16177 dw_die_ref type_die;
16178
16179 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16180 if (type_die == NULL)
16181 return NULL;
16182 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16183 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16184 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16185 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16186 mem_loc_result->dw_loc_oprnd2.val_class
16187 = dw_val_class_wide_int;
16188 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16189 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16190 }
16191 break;
16192
16193 case CONST_POLY_INT:
16194 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16195 break;
16196
16197 case EQ:
16198 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16199 break;
16200
16201 case GE:
16202 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16203 break;
16204
16205 case GT:
16206 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16207 break;
16208
16209 case LE:
16210 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16211 break;
16212
16213 case LT:
16214 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16215 break;
16216
16217 case NE:
16218 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16219 break;
16220
16221 case GEU:
16222 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16223 break;
16224
16225 case GTU:
16226 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16227 break;
16228
16229 case LEU:
16230 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16231 break;
16232
16233 case LTU:
16234 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16235 break;
16236
16237 case UMIN:
16238 case UMAX:
16239 if (!SCALAR_INT_MODE_P (mode))
16240 break;
16241 /* FALLTHRU */
16242 case SMIN:
16243 case SMAX:
16244 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16245 break;
16246
16247 case ZERO_EXTRACT:
16248 case SIGN_EXTRACT:
16249 if (CONST_INT_P (XEXP (rtl, 1))
16250 && CONST_INT_P (XEXP (rtl, 2))
16251 && is_a <scalar_int_mode> (mode, &int_mode)
16252 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16253 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16254 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16255 && ((unsigned) INTVAL (XEXP (rtl, 1))
16256 + (unsigned) INTVAL (XEXP (rtl, 2))
16257 <= GET_MODE_BITSIZE (int_mode)))
16258 {
16259 int shift, size;
16260 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16261 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16262 if (op0 == 0)
16263 break;
16264 if (GET_CODE (rtl) == SIGN_EXTRACT)
16265 op = DW_OP_shra;
16266 else
16267 op = DW_OP_shr;
16268 mem_loc_result = op0;
16269 size = INTVAL (XEXP (rtl, 1));
16270 shift = INTVAL (XEXP (rtl, 2));
16271 if (BITS_BIG_ENDIAN)
16272 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16273 if (shift + size != (int) DWARF2_ADDR_SIZE)
16274 {
16275 add_loc_descr (&mem_loc_result,
16276 int_loc_descriptor (DWARF2_ADDR_SIZE
16277 - shift - size));
16278 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16279 }
16280 if (size != (int) DWARF2_ADDR_SIZE)
16281 {
16282 add_loc_descr (&mem_loc_result,
16283 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16284 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16285 }
16286 }
16287 break;
16288
16289 case IF_THEN_ELSE:
16290 {
16291 dw_loc_descr_ref op2, bra_node, drop_node;
16292 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16293 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16294 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16295 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16296 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16297 VAR_INIT_STATUS_INITIALIZED);
16298 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16299 VAR_INIT_STATUS_INITIALIZED);
16300 if (op0 == NULL || op1 == NULL || op2 == NULL)
16301 break;
16302
16303 mem_loc_result = op1;
16304 add_loc_descr (&mem_loc_result, op2);
16305 add_loc_descr (&mem_loc_result, op0);
16306 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16307 add_loc_descr (&mem_loc_result, bra_node);
16308 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16309 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16310 add_loc_descr (&mem_loc_result, drop_node);
16311 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16312 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16313 }
16314 break;
16315
16316 case FLOAT_EXTEND:
16317 case FLOAT_TRUNCATE:
16318 case FLOAT:
16319 case UNSIGNED_FLOAT:
16320 case FIX:
16321 case UNSIGNED_FIX:
16322 if (!dwarf_strict || dwarf_version >= 5)
16323 {
16324 dw_die_ref type_die;
16325 dw_loc_descr_ref cvt;
16326
16327 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16328 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16329 if (op0 == NULL)
16330 break;
16331 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16332 && (GET_CODE (rtl) == FLOAT
16333 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16334 {
16335 type_die = base_type_for_mode (int_mode,
16336 GET_CODE (rtl) == UNSIGNED_FLOAT);
16337 if (type_die == NULL)
16338 break;
16339 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16340 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16341 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16342 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16343 add_loc_descr (&op0, cvt);
16344 }
16345 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16346 if (type_die == NULL)
16347 break;
16348 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16349 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16350 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16351 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16352 add_loc_descr (&op0, cvt);
16353 if (is_a <scalar_int_mode> (mode, &int_mode)
16354 && (GET_CODE (rtl) == FIX
16355 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16356 {
16357 op0 = convert_descriptor_to_mode (int_mode, op0);
16358 if (op0 == NULL)
16359 break;
16360 }
16361 mem_loc_result = op0;
16362 }
16363 break;
16364
16365 case CLZ:
16366 case CTZ:
16367 case FFS:
16368 if (is_a <scalar_int_mode> (mode, &int_mode))
16369 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16370 break;
16371
16372 case POPCOUNT:
16373 case PARITY:
16374 if (is_a <scalar_int_mode> (mode, &int_mode))
16375 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16376 break;
16377
16378 case BSWAP:
16379 if (is_a <scalar_int_mode> (mode, &int_mode))
16380 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16381 break;
16382
16383 case ROTATE:
16384 case ROTATERT:
16385 if (is_a <scalar_int_mode> (mode, &int_mode))
16386 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16387 break;
16388
16389 case COMPARE:
16390 /* In theory, we could implement the above. */
16391 /* DWARF cannot represent the unsigned compare operations
16392 natively. */
16393 case SS_MULT:
16394 case US_MULT:
16395 case SS_DIV:
16396 case US_DIV:
16397 case SS_PLUS:
16398 case US_PLUS:
16399 case SS_MINUS:
16400 case US_MINUS:
16401 case SS_NEG:
16402 case US_NEG:
16403 case SS_ABS:
16404 case SS_ASHIFT:
16405 case US_ASHIFT:
16406 case SS_TRUNCATE:
16407 case US_TRUNCATE:
16408 case UNORDERED:
16409 case ORDERED:
16410 case UNEQ:
16411 case UNGE:
16412 case UNGT:
16413 case UNLE:
16414 case UNLT:
16415 case LTGT:
16416 case FRACT_CONVERT:
16417 case UNSIGNED_FRACT_CONVERT:
16418 case SAT_FRACT:
16419 case UNSIGNED_SAT_FRACT:
16420 case SQRT:
16421 case ASM_OPERANDS:
16422 case VEC_MERGE:
16423 case VEC_SELECT:
16424 case VEC_CONCAT:
16425 case VEC_DUPLICATE:
16426 case VEC_SERIES:
16427 case HIGH:
16428 case FMA:
16429 case STRICT_LOW_PART:
16430 case CONST_VECTOR:
16431 case CONST_FIXED:
16432 case CLRSB:
16433 case CLOBBER:
16434 break;
16435
16436 case CONST_STRING:
16437 resolve_one_addr (&rtl);
16438 goto symref;
16439
16440 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16441 the expression. An UNSPEC rtx represents a raw DWARF operation,
16442 new_loc_descr is called for it to build the operation directly.
16443 Otherwise mem_loc_descriptor is called recursively. */
16444 case PARALLEL:
16445 {
16446 int index = 0;
16447 dw_loc_descr_ref exp_result = NULL;
16448
16449 for (; index < XVECLEN (rtl, 0); index++)
16450 {
16451 rtx elem = XVECEXP (rtl, 0, index);
16452 if (GET_CODE (elem) == UNSPEC)
16453 {
16454 /* Each DWARF operation UNSPEC contain two operands, if
16455 one operand is not used for the operation, const0_rtx is
16456 passed. */
16457 gcc_assert (XVECLEN (elem, 0) == 2);
16458
16459 HOST_WIDE_INT dw_op = XINT (elem, 1);
16460 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16461 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16462 exp_result
16463 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16464 oprnd2);
16465 }
16466 else
16467 exp_result
16468 = mem_loc_descriptor (elem, mode, mem_mode,
16469 VAR_INIT_STATUS_INITIALIZED);
16470
16471 if (!mem_loc_result)
16472 mem_loc_result = exp_result;
16473 else
16474 add_loc_descr (&mem_loc_result, exp_result);
16475 }
16476
16477 break;
16478 }
16479
16480 default:
16481 if (flag_checking)
16482 {
16483 print_rtl (stderr, rtl);
16484 gcc_unreachable ();
16485 }
16486 break;
16487 }
16488
16489 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16490 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16491
16492 return mem_loc_result;
16493 }
16494
16495 /* Return a descriptor that describes the concatenation of two locations.
16496 This is typically a complex variable. */
16497
16498 static dw_loc_descr_ref
16499 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16500 {
16501 /* At present we only track constant-sized pieces. */
16502 unsigned int size0, size1;
16503 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16504 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16505 return 0;
16506
16507 dw_loc_descr_ref cc_loc_result = NULL;
16508 dw_loc_descr_ref x0_ref
16509 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16510 dw_loc_descr_ref x1_ref
16511 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16512
16513 if (x0_ref == 0 || x1_ref == 0)
16514 return 0;
16515
16516 cc_loc_result = x0_ref;
16517 add_loc_descr_op_piece (&cc_loc_result, size0);
16518
16519 add_loc_descr (&cc_loc_result, x1_ref);
16520 add_loc_descr_op_piece (&cc_loc_result, size1);
16521
16522 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16523 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16524
16525 return cc_loc_result;
16526 }
16527
16528 /* Return a descriptor that describes the concatenation of N
16529 locations. */
16530
16531 static dw_loc_descr_ref
16532 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16533 {
16534 unsigned int i;
16535 dw_loc_descr_ref cc_loc_result = NULL;
16536 unsigned int n = XVECLEN (concatn, 0);
16537 unsigned int size;
16538
16539 for (i = 0; i < n; ++i)
16540 {
16541 dw_loc_descr_ref ref;
16542 rtx x = XVECEXP (concatn, 0, i);
16543
16544 /* At present we only track constant-sized pieces. */
16545 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16546 return NULL;
16547
16548 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16549 if (ref == NULL)
16550 return NULL;
16551
16552 add_loc_descr (&cc_loc_result, ref);
16553 add_loc_descr_op_piece (&cc_loc_result, size);
16554 }
16555
16556 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16557 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16558
16559 return cc_loc_result;
16560 }
16561
16562 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16563 for DEBUG_IMPLICIT_PTR RTL. */
16564
16565 static dw_loc_descr_ref
16566 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16567 {
16568 dw_loc_descr_ref ret;
16569 dw_die_ref ref;
16570
16571 if (dwarf_strict && dwarf_version < 5)
16572 return NULL;
16573 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16574 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16575 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16576 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16577 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16578 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16579 if (ref)
16580 {
16581 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16582 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16583 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16584 }
16585 else
16586 {
16587 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16588 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16589 }
16590 return ret;
16591 }
16592
16593 /* Output a proper Dwarf location descriptor for a variable or parameter
16594 which is either allocated in a register or in a memory location. For a
16595 register, we just generate an OP_REG and the register number. For a
16596 memory location we provide a Dwarf postfix expression describing how to
16597 generate the (dynamic) address of the object onto the address stack.
16598
16599 MODE is mode of the decl if this loc_descriptor is going to be used in
16600 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16601 allowed, VOIDmode otherwise.
16602
16603 If we don't know how to describe it, return 0. */
16604
16605 static dw_loc_descr_ref
16606 loc_descriptor (rtx rtl, machine_mode mode,
16607 enum var_init_status initialized)
16608 {
16609 dw_loc_descr_ref loc_result = NULL;
16610 scalar_int_mode int_mode;
16611
16612 switch (GET_CODE (rtl))
16613 {
16614 case SUBREG:
16615 /* The case of a subreg may arise when we have a local (register)
16616 variable or a formal (register) parameter which doesn't quite fill
16617 up an entire register. For now, just assume that it is
16618 legitimate to make the Dwarf info refer to the whole register which
16619 contains the given subreg. */
16620 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16621 loc_result = loc_descriptor (SUBREG_REG (rtl),
16622 GET_MODE (SUBREG_REG (rtl)), initialized);
16623 else
16624 goto do_default;
16625 break;
16626
16627 case REG:
16628 loc_result = reg_loc_descriptor (rtl, initialized);
16629 break;
16630
16631 case MEM:
16632 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16633 GET_MODE (rtl), initialized);
16634 if (loc_result == NULL)
16635 loc_result = tls_mem_loc_descriptor (rtl);
16636 if (loc_result == NULL)
16637 {
16638 rtx new_rtl = avoid_constant_pool_reference (rtl);
16639 if (new_rtl != rtl)
16640 loc_result = loc_descriptor (new_rtl, mode, initialized);
16641 }
16642 break;
16643
16644 case CONCAT:
16645 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16646 initialized);
16647 break;
16648
16649 case CONCATN:
16650 loc_result = concatn_loc_descriptor (rtl, initialized);
16651 break;
16652
16653 case VAR_LOCATION:
16654 /* Single part. */
16655 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16656 {
16657 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16658 if (GET_CODE (loc) == EXPR_LIST)
16659 loc = XEXP (loc, 0);
16660 loc_result = loc_descriptor (loc, mode, initialized);
16661 break;
16662 }
16663
16664 rtl = XEXP (rtl, 1);
16665 /* FALLTHRU */
16666
16667 case PARALLEL:
16668 {
16669 rtvec par_elems = XVEC (rtl, 0);
16670 int num_elem = GET_NUM_ELEM (par_elems);
16671 machine_mode mode;
16672 int i, size;
16673
16674 /* Create the first one, so we have something to add to. */
16675 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16676 VOIDmode, initialized);
16677 if (loc_result == NULL)
16678 return NULL;
16679 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16680 /* At present we only track constant-sized pieces. */
16681 if (!GET_MODE_SIZE (mode).is_constant (&size))
16682 return NULL;
16683 add_loc_descr_op_piece (&loc_result, size);
16684 for (i = 1; i < num_elem; i++)
16685 {
16686 dw_loc_descr_ref temp;
16687
16688 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16689 VOIDmode, initialized);
16690 if (temp == NULL)
16691 return NULL;
16692 add_loc_descr (&loc_result, temp);
16693 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16694 /* At present we only track constant-sized pieces. */
16695 if (!GET_MODE_SIZE (mode).is_constant (&size))
16696 return NULL;
16697 add_loc_descr_op_piece (&loc_result, size);
16698 }
16699 }
16700 break;
16701
16702 case CONST_INT:
16703 if (mode != VOIDmode && mode != BLKmode)
16704 {
16705 int_mode = as_a <scalar_int_mode> (mode);
16706 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16707 INTVAL (rtl));
16708 }
16709 break;
16710
16711 case CONST_DOUBLE:
16712 if (mode == VOIDmode)
16713 mode = GET_MODE (rtl);
16714
16715 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16716 {
16717 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16718
16719 /* Note that a CONST_DOUBLE rtx could represent either an integer
16720 or a floating-point constant. A CONST_DOUBLE is used whenever
16721 the constant requires more than one word in order to be
16722 adequately represented. We output CONST_DOUBLEs as blocks. */
16723 scalar_mode smode = as_a <scalar_mode> (mode);
16724 loc_result = new_loc_descr (DW_OP_implicit_value,
16725 GET_MODE_SIZE (smode), 0);
16726 #if TARGET_SUPPORTS_WIDE_INT == 0
16727 if (!SCALAR_FLOAT_MODE_P (smode))
16728 {
16729 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16730 loc_result->dw_loc_oprnd2.v.val_double
16731 = rtx_to_double_int (rtl);
16732 }
16733 else
16734 #endif
16735 {
16736 unsigned int length = GET_MODE_SIZE (smode);
16737 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16738
16739 insert_float (rtl, array);
16740 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16741 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16742 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16743 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16744 }
16745 }
16746 break;
16747
16748 case CONST_WIDE_INT:
16749 if (mode == VOIDmode)
16750 mode = GET_MODE (rtl);
16751
16752 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16753 {
16754 int_mode = as_a <scalar_int_mode> (mode);
16755 loc_result = new_loc_descr (DW_OP_implicit_value,
16756 GET_MODE_SIZE (int_mode), 0);
16757 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16758 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16759 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16760 }
16761 break;
16762
16763 case CONST_VECTOR:
16764 if (mode == VOIDmode)
16765 mode = GET_MODE (rtl);
16766
16767 if (mode != VOIDmode
16768 /* The combination of a length and byte elt_size doesn't extend
16769 naturally to boolean vectors, where several elements are packed
16770 into the same byte. */
16771 && GET_MODE_CLASS (mode) != MODE_VECTOR_BOOL
16772 && (dwarf_version >= 4 || !dwarf_strict))
16773 {
16774 unsigned int length;
16775 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16776 return NULL;
16777
16778 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16779 unsigned char *array
16780 = ggc_vec_alloc<unsigned char> (length * elt_size);
16781 unsigned int i;
16782 unsigned char *p;
16783 machine_mode imode = GET_MODE_INNER (mode);
16784
16785 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16786 switch (GET_MODE_CLASS (mode))
16787 {
16788 case MODE_VECTOR_INT:
16789 for (i = 0, p = array; i < length; i++, p += elt_size)
16790 {
16791 rtx elt = CONST_VECTOR_ELT (rtl, i);
16792 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16793 }
16794 break;
16795
16796 case MODE_VECTOR_FLOAT:
16797 for (i = 0, p = array; i < length; i++, p += elt_size)
16798 {
16799 rtx elt = CONST_VECTOR_ELT (rtl, i);
16800 insert_float (elt, p);
16801 }
16802 break;
16803
16804 default:
16805 gcc_unreachable ();
16806 }
16807
16808 loc_result = new_loc_descr (DW_OP_implicit_value,
16809 length * elt_size, 0);
16810 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16811 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16812 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16813 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16814 }
16815 break;
16816
16817 case CONST:
16818 if (mode == VOIDmode
16819 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16820 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16821 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16822 {
16823 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16824 break;
16825 }
16826 /* FALLTHROUGH */
16827 case SYMBOL_REF:
16828 if (!const_ok_for_output (rtl))
16829 break;
16830 /* FALLTHROUGH */
16831 case LABEL_REF:
16832 if (is_a <scalar_int_mode> (mode, &int_mode)
16833 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16834 && (dwarf_version >= 4 || !dwarf_strict))
16835 {
16836 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16837 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16838 vec_safe_push (used_rtx_array, rtl);
16839 }
16840 break;
16841
16842 case DEBUG_IMPLICIT_PTR:
16843 loc_result = implicit_ptr_descriptor (rtl, 0);
16844 break;
16845
16846 case PLUS:
16847 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16848 && CONST_INT_P (XEXP (rtl, 1)))
16849 {
16850 loc_result
16851 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16852 break;
16853 }
16854 /* FALLTHRU */
16855 do_default:
16856 default:
16857 if ((is_a <scalar_int_mode> (mode, &int_mode)
16858 && GET_MODE (rtl) == int_mode
16859 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16860 && dwarf_version >= 4)
16861 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16862 {
16863 /* Value expression. */
16864 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16865 if (loc_result)
16866 add_loc_descr (&loc_result,
16867 new_loc_descr (DW_OP_stack_value, 0, 0));
16868 }
16869 break;
16870 }
16871
16872 return loc_result;
16873 }
16874
16875 /* We need to figure out what section we should use as the base for the
16876 address ranges where a given location is valid.
16877 1. If this particular DECL has a section associated with it, use that.
16878 2. If this function has a section associated with it, use that.
16879 3. Otherwise, use the text section.
16880 XXX: If you split a variable across multiple sections, we won't notice. */
16881
16882 static const char *
16883 secname_for_decl (const_tree decl)
16884 {
16885 const char *secname;
16886
16887 if (VAR_OR_FUNCTION_DECL_P (decl)
16888 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16889 && DECL_SECTION_NAME (decl))
16890 secname = DECL_SECTION_NAME (decl);
16891 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16892 {
16893 if (in_cold_section_p)
16894 {
16895 section *sec = current_function_section ();
16896 if (sec->common.flags & SECTION_NAMED)
16897 return sec->named.name;
16898 }
16899 secname = DECL_SECTION_NAME (current_function_decl);
16900 }
16901 else if (cfun && in_cold_section_p)
16902 secname = crtl->subsections.cold_section_label;
16903 else
16904 secname = text_section_label;
16905
16906 return secname;
16907 }
16908
16909 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16910
16911 static bool
16912 decl_by_reference_p (tree decl)
16913 {
16914 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16915 || VAR_P (decl))
16916 && DECL_BY_REFERENCE (decl));
16917 }
16918
16919 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16920 for VARLOC. */
16921
16922 static dw_loc_descr_ref
16923 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16924 enum var_init_status initialized)
16925 {
16926 int have_address = 0;
16927 dw_loc_descr_ref descr;
16928 machine_mode mode;
16929
16930 if (want_address != 2)
16931 {
16932 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16933 /* Single part. */
16934 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16935 {
16936 varloc = PAT_VAR_LOCATION_LOC (varloc);
16937 if (GET_CODE (varloc) == EXPR_LIST)
16938 varloc = XEXP (varloc, 0);
16939 mode = GET_MODE (varloc);
16940 if (MEM_P (varloc))
16941 {
16942 rtx addr = XEXP (varloc, 0);
16943 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16944 mode, initialized);
16945 if (descr)
16946 have_address = 1;
16947 else
16948 {
16949 rtx x = avoid_constant_pool_reference (varloc);
16950 if (x != varloc)
16951 descr = mem_loc_descriptor (x, mode, VOIDmode,
16952 initialized);
16953 }
16954 }
16955 else
16956 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16957 }
16958 else
16959 return 0;
16960 }
16961 else
16962 {
16963 if (GET_CODE (varloc) == VAR_LOCATION)
16964 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16965 else
16966 mode = DECL_MODE (loc);
16967 descr = loc_descriptor (varloc, mode, initialized);
16968 have_address = 1;
16969 }
16970
16971 if (!descr)
16972 return 0;
16973
16974 if (want_address == 2 && !have_address
16975 && (dwarf_version >= 4 || !dwarf_strict))
16976 {
16977 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16978 {
16979 expansion_failed (loc, NULL_RTX,
16980 "DWARF address size mismatch");
16981 return 0;
16982 }
16983 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16984 have_address = 1;
16985 }
16986 /* Show if we can't fill the request for an address. */
16987 if (want_address && !have_address)
16988 {
16989 expansion_failed (loc, NULL_RTX,
16990 "Want address and only have value");
16991 return 0;
16992 }
16993
16994 /* If we've got an address and don't want one, dereference. */
16995 if (!want_address && have_address)
16996 {
16997 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16998 enum dwarf_location_atom op;
16999
17000 if (size > DWARF2_ADDR_SIZE || size == -1)
17001 {
17002 expansion_failed (loc, NULL_RTX,
17003 "DWARF address size mismatch");
17004 return 0;
17005 }
17006 else if (size == DWARF2_ADDR_SIZE)
17007 op = DW_OP_deref;
17008 else
17009 op = DW_OP_deref_size;
17010
17011 add_loc_descr (&descr, new_loc_descr (op, size, 0));
17012 }
17013
17014 return descr;
17015 }
17016
17017 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
17018 if it is not possible. */
17019
17020 static dw_loc_descr_ref
17021 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
17022 {
17023 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
17024 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
17025 else if (dwarf_version >= 3 || !dwarf_strict)
17026 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
17027 else
17028 return NULL;
17029 }
17030
17031 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
17032 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
17033
17034 static dw_loc_descr_ref
17035 dw_sra_loc_expr (tree decl, rtx loc)
17036 {
17037 rtx p;
17038 unsigned HOST_WIDE_INT padsize = 0;
17039 dw_loc_descr_ref descr, *descr_tail;
17040 unsigned HOST_WIDE_INT decl_size;
17041 rtx varloc;
17042 enum var_init_status initialized;
17043
17044 if (DECL_SIZE (decl) == NULL
17045 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
17046 return NULL;
17047
17048 decl_size = tree_to_uhwi (DECL_SIZE (decl));
17049 descr = NULL;
17050 descr_tail = &descr;
17051
17052 for (p = loc; p; p = XEXP (p, 1))
17053 {
17054 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
17055 rtx loc_note = *decl_piece_varloc_ptr (p);
17056 dw_loc_descr_ref cur_descr;
17057 dw_loc_descr_ref *tail, last = NULL;
17058 unsigned HOST_WIDE_INT opsize = 0;
17059
17060 if (loc_note == NULL_RTX
17061 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
17062 {
17063 padsize += bitsize;
17064 continue;
17065 }
17066 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
17067 varloc = NOTE_VAR_LOCATION (loc_note);
17068 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
17069 if (cur_descr == NULL)
17070 {
17071 padsize += bitsize;
17072 continue;
17073 }
17074
17075 /* Check that cur_descr either doesn't use
17076 DW_OP_*piece operations, or their sum is equal
17077 to bitsize. Otherwise we can't embed it. */
17078 for (tail = &cur_descr; *tail != NULL;
17079 tail = &(*tail)->dw_loc_next)
17080 if ((*tail)->dw_loc_opc == DW_OP_piece)
17081 {
17082 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
17083 * BITS_PER_UNIT;
17084 last = *tail;
17085 }
17086 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
17087 {
17088 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
17089 last = *tail;
17090 }
17091
17092 if (last != NULL && opsize != bitsize)
17093 {
17094 padsize += bitsize;
17095 /* Discard the current piece of the descriptor and release any
17096 addr_table entries it uses. */
17097 remove_loc_list_addr_table_entries (cur_descr);
17098 continue;
17099 }
17100
17101 /* If there is a hole, add DW_OP_*piece after empty DWARF
17102 expression, which means that those bits are optimized out. */
17103 if (padsize)
17104 {
17105 if (padsize > decl_size)
17106 {
17107 remove_loc_list_addr_table_entries (cur_descr);
17108 goto discard_descr;
17109 }
17110 decl_size -= padsize;
17111 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
17112 if (*descr_tail == NULL)
17113 {
17114 remove_loc_list_addr_table_entries (cur_descr);
17115 goto discard_descr;
17116 }
17117 descr_tail = &(*descr_tail)->dw_loc_next;
17118 padsize = 0;
17119 }
17120 *descr_tail = cur_descr;
17121 descr_tail = tail;
17122 if (bitsize > decl_size)
17123 goto discard_descr;
17124 decl_size -= bitsize;
17125 if (last == NULL)
17126 {
17127 HOST_WIDE_INT offset = 0;
17128 if (GET_CODE (varloc) == VAR_LOCATION
17129 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17130 {
17131 varloc = PAT_VAR_LOCATION_LOC (varloc);
17132 if (GET_CODE (varloc) == EXPR_LIST)
17133 varloc = XEXP (varloc, 0);
17134 }
17135 do
17136 {
17137 if (GET_CODE (varloc) == CONST
17138 || GET_CODE (varloc) == SIGN_EXTEND
17139 || GET_CODE (varloc) == ZERO_EXTEND)
17140 varloc = XEXP (varloc, 0);
17141 else if (GET_CODE (varloc) == SUBREG)
17142 varloc = SUBREG_REG (varloc);
17143 else
17144 break;
17145 }
17146 while (1);
17147 /* DW_OP_bit_size offset should be zero for register
17148 or implicit location descriptions and empty location
17149 descriptions, but for memory addresses needs big endian
17150 adjustment. */
17151 if (MEM_P (varloc))
17152 {
17153 unsigned HOST_WIDE_INT memsize;
17154 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17155 goto discard_descr;
17156 memsize *= BITS_PER_UNIT;
17157 if (memsize != bitsize)
17158 {
17159 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17160 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17161 goto discard_descr;
17162 if (memsize < bitsize)
17163 goto discard_descr;
17164 if (BITS_BIG_ENDIAN)
17165 offset = memsize - bitsize;
17166 }
17167 }
17168
17169 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17170 if (*descr_tail == NULL)
17171 goto discard_descr;
17172 descr_tail = &(*descr_tail)->dw_loc_next;
17173 }
17174 }
17175
17176 /* If there were any non-empty expressions, add padding till the end of
17177 the decl. */
17178 if (descr != NULL && decl_size != 0)
17179 {
17180 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17181 if (*descr_tail == NULL)
17182 goto discard_descr;
17183 }
17184 return descr;
17185
17186 discard_descr:
17187 /* Discard the descriptor and release any addr_table entries it uses. */
17188 remove_loc_list_addr_table_entries (descr);
17189 return NULL;
17190 }
17191
17192 /* Return the dwarf representation of the location list LOC_LIST of
17193 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17194 function. */
17195
17196 static dw_loc_list_ref
17197 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17198 {
17199 const char *endname, *secname;
17200 var_loc_view endview;
17201 rtx varloc;
17202 enum var_init_status initialized;
17203 struct var_loc_node *node;
17204 dw_loc_descr_ref descr;
17205 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17206 dw_loc_list_ref list = NULL;
17207 dw_loc_list_ref *listp = &list;
17208
17209 /* Now that we know what section we are using for a base,
17210 actually construct the list of locations.
17211 The first location information is what is passed to the
17212 function that creates the location list, and the remaining
17213 locations just get added on to that list.
17214 Note that we only know the start address for a location
17215 (IE location changes), so to build the range, we use
17216 the range [current location start, next location start].
17217 This means we have to special case the last node, and generate
17218 a range of [last location start, end of function label]. */
17219
17220 if (cfun && crtl->has_bb_partition)
17221 {
17222 bool save_in_cold_section_p = in_cold_section_p;
17223 in_cold_section_p = first_function_block_is_cold;
17224 if (loc_list->last_before_switch == NULL)
17225 in_cold_section_p = !in_cold_section_p;
17226 secname = secname_for_decl (decl);
17227 in_cold_section_p = save_in_cold_section_p;
17228 }
17229 else
17230 secname = secname_for_decl (decl);
17231
17232 for (node = loc_list->first; node; node = node->next)
17233 {
17234 bool range_across_switch = false;
17235 if (GET_CODE (node->loc) == EXPR_LIST
17236 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17237 {
17238 if (GET_CODE (node->loc) == EXPR_LIST)
17239 {
17240 descr = NULL;
17241 /* This requires DW_OP_{,bit_}piece, which is not usable
17242 inside DWARF expressions. */
17243 if (want_address == 2)
17244 descr = dw_sra_loc_expr (decl, node->loc);
17245 }
17246 else
17247 {
17248 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17249 varloc = NOTE_VAR_LOCATION (node->loc);
17250 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17251 }
17252 if (descr)
17253 {
17254 /* If section switch happens in between node->label
17255 and node->next->label (or end of function) and
17256 we can't emit it as a single entry list,
17257 emit two ranges, first one ending at the end
17258 of first partition and second one starting at the
17259 beginning of second partition. */
17260 if (node == loc_list->last_before_switch
17261 && (node != loc_list->first || loc_list->first->next
17262 /* If we are to emit a view number, we will emit
17263 a loclist rather than a single location
17264 expression for the entire function (see
17265 loc_list_has_views), so we have to split the
17266 range that straddles across partitions. */
17267 || !ZERO_VIEW_P (node->view))
17268 && current_function_decl)
17269 {
17270 endname = cfun->fde->dw_fde_end;
17271 endview = 0;
17272 range_across_switch = true;
17273 }
17274 /* The variable has a location between NODE->LABEL and
17275 NODE->NEXT->LABEL. */
17276 else if (node->next)
17277 endname = node->next->label, endview = node->next->view;
17278 /* If the variable has a location at the last label
17279 it keeps its location until the end of function. */
17280 else if (!current_function_decl)
17281 endname = text_end_label, endview = 0;
17282 else
17283 {
17284 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17285 current_function_funcdef_no);
17286 endname = ggc_strdup (label_id);
17287 endview = 0;
17288 }
17289
17290 *listp = new_loc_list (descr, node->label, node->view,
17291 endname, endview, secname);
17292 if (TREE_CODE (decl) == PARM_DECL
17293 && node == loc_list->first
17294 && NOTE_P (node->loc)
17295 && strcmp (node->label, endname) == 0)
17296 (*listp)->force = true;
17297 listp = &(*listp)->dw_loc_next;
17298 }
17299 }
17300
17301 if (cfun
17302 && crtl->has_bb_partition
17303 && node == loc_list->last_before_switch)
17304 {
17305 bool save_in_cold_section_p = in_cold_section_p;
17306 in_cold_section_p = !first_function_block_is_cold;
17307 secname = secname_for_decl (decl);
17308 in_cold_section_p = save_in_cold_section_p;
17309 }
17310
17311 if (range_across_switch)
17312 {
17313 if (GET_CODE (node->loc) == EXPR_LIST)
17314 descr = dw_sra_loc_expr (decl, node->loc);
17315 else
17316 {
17317 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17318 varloc = NOTE_VAR_LOCATION (node->loc);
17319 descr = dw_loc_list_1 (decl, varloc, want_address,
17320 initialized);
17321 }
17322 gcc_assert (descr);
17323 /* The variable has a location between NODE->LABEL and
17324 NODE->NEXT->LABEL. */
17325 if (node->next)
17326 endname = node->next->label, endview = node->next->view;
17327 else
17328 endname = cfun->fde->dw_fde_second_end, endview = 0;
17329 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17330 endname, endview, secname);
17331 listp = &(*listp)->dw_loc_next;
17332 }
17333 }
17334
17335 /* Try to avoid the overhead of a location list emitting a location
17336 expression instead, but only if we didn't have more than one
17337 location entry in the first place. If some entries were not
17338 representable, we don't want to pretend a single entry that was
17339 applies to the entire scope in which the variable is
17340 available. */
17341 if (list && loc_list->first->next)
17342 gen_llsym (list);
17343 else
17344 maybe_gen_llsym (list);
17345
17346 return list;
17347 }
17348
17349 /* Return if the loc_list has only single element and thus can be represented
17350 as location description. */
17351
17352 static bool
17353 single_element_loc_list_p (dw_loc_list_ref list)
17354 {
17355 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17356 return !list->ll_symbol;
17357 }
17358
17359 /* Duplicate a single element of location list. */
17360
17361 static inline dw_loc_descr_ref
17362 copy_loc_descr (dw_loc_descr_ref ref)
17363 {
17364 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17365 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17366 return copy;
17367 }
17368
17369 /* To each location in list LIST append loc descr REF. */
17370
17371 static void
17372 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17373 {
17374 dw_loc_descr_ref copy;
17375 add_loc_descr (&list->expr, ref);
17376 list = list->dw_loc_next;
17377 while (list)
17378 {
17379 copy = copy_loc_descr (ref);
17380 add_loc_descr (&list->expr, copy);
17381 while (copy->dw_loc_next)
17382 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17383 list = list->dw_loc_next;
17384 }
17385 }
17386
17387 /* To each location in list LIST prepend loc descr REF. */
17388
17389 static void
17390 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17391 {
17392 dw_loc_descr_ref copy;
17393 dw_loc_descr_ref ref_end = list->expr;
17394 add_loc_descr (&ref, list->expr);
17395 list->expr = ref;
17396 list = list->dw_loc_next;
17397 while (list)
17398 {
17399 dw_loc_descr_ref end = list->expr;
17400 list->expr = copy = copy_loc_descr (ref);
17401 while (copy->dw_loc_next != ref_end)
17402 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17403 copy->dw_loc_next = end;
17404 list = list->dw_loc_next;
17405 }
17406 }
17407
17408 /* Given two lists RET and LIST
17409 produce location list that is result of adding expression in LIST
17410 to expression in RET on each position in program.
17411 Might be destructive on both RET and LIST.
17412
17413 TODO: We handle only simple cases of RET or LIST having at most one
17414 element. General case would involve sorting the lists in program order
17415 and merging them that will need some additional work.
17416 Adding that will improve quality of debug info especially for SRA-ed
17417 structures. */
17418
17419 static void
17420 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17421 {
17422 if (!list)
17423 return;
17424 if (!*ret)
17425 {
17426 *ret = list;
17427 return;
17428 }
17429 if (!list->dw_loc_next)
17430 {
17431 add_loc_descr_to_each (*ret, list->expr);
17432 return;
17433 }
17434 if (!(*ret)->dw_loc_next)
17435 {
17436 prepend_loc_descr_to_each (list, (*ret)->expr);
17437 *ret = list;
17438 return;
17439 }
17440 expansion_failed (NULL_TREE, NULL_RTX,
17441 "Don't know how to merge two non-trivial"
17442 " location lists.\n");
17443 *ret = NULL;
17444 return;
17445 }
17446
17447 /* LOC is constant expression. Try a luck, look it up in constant
17448 pool and return its loc_descr of its address. */
17449
17450 static dw_loc_descr_ref
17451 cst_pool_loc_descr (tree loc)
17452 {
17453 /* Get an RTL for this, if something has been emitted. */
17454 rtx rtl = lookup_constant_def (loc);
17455
17456 if (!rtl || !MEM_P (rtl))
17457 {
17458 gcc_assert (!rtl);
17459 return 0;
17460 }
17461 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17462
17463 /* TODO: We might get more coverage if we was actually delaying expansion
17464 of all expressions till end of compilation when constant pools are fully
17465 populated. */
17466 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17467 {
17468 expansion_failed (loc, NULL_RTX,
17469 "CST value in contant pool but not marked.");
17470 return 0;
17471 }
17472 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17473 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17474 }
17475
17476 /* Return dw_loc_list representing address of addr_expr LOC
17477 by looking for inner INDIRECT_REF expression and turning
17478 it into simple arithmetics.
17479
17480 See loc_list_from_tree for the meaning of CONTEXT. */
17481
17482 static dw_loc_list_ref
17483 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17484 loc_descr_context *context)
17485 {
17486 tree obj, offset;
17487 poly_int64 bitsize, bitpos, bytepos;
17488 machine_mode mode;
17489 int unsignedp, reversep, volatilep = 0;
17490 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17491
17492 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17493 &bitsize, &bitpos, &offset, &mode,
17494 &unsignedp, &reversep, &volatilep);
17495 STRIP_NOPS (obj);
17496 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17497 {
17498 expansion_failed (loc, NULL_RTX, "bitfield access");
17499 return 0;
17500 }
17501 if (!INDIRECT_REF_P (obj))
17502 {
17503 expansion_failed (obj,
17504 NULL_RTX, "no indirect ref in inner refrence");
17505 return 0;
17506 }
17507 if (!offset && known_eq (bitpos, 0))
17508 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17509 context);
17510 else if (toplev
17511 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17512 && (dwarf_version >= 4 || !dwarf_strict))
17513 {
17514 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17515 if (!list_ret)
17516 return 0;
17517 if (offset)
17518 {
17519 /* Variable offset. */
17520 list_ret1 = loc_list_from_tree (offset, 0, context);
17521 if (list_ret1 == 0)
17522 return 0;
17523 add_loc_list (&list_ret, list_ret1);
17524 if (!list_ret)
17525 return 0;
17526 add_loc_descr_to_each (list_ret,
17527 new_loc_descr (DW_OP_plus, 0, 0));
17528 }
17529 HOST_WIDE_INT value;
17530 if (bytepos.is_constant (&value) && value > 0)
17531 add_loc_descr_to_each (list_ret,
17532 new_loc_descr (DW_OP_plus_uconst, value, 0));
17533 else if (maybe_ne (bytepos, 0))
17534 loc_list_plus_const (list_ret, bytepos);
17535 add_loc_descr_to_each (list_ret,
17536 new_loc_descr (DW_OP_stack_value, 0, 0));
17537 }
17538 return list_ret;
17539 }
17540
17541 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17542 all operations from LOC are nops, move to the last one. Insert in NOPS all
17543 operations that are skipped. */
17544
17545 static void
17546 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17547 hash_set<dw_loc_descr_ref> &nops)
17548 {
17549 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17550 {
17551 nops.add (loc);
17552 loc = loc->dw_loc_next;
17553 }
17554 }
17555
17556 /* Helper for loc_descr_without_nops: free the location description operation
17557 P. */
17558
17559 bool
17560 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17561 {
17562 ggc_free (loc);
17563 return true;
17564 }
17565
17566 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17567 finishes LOC. */
17568
17569 static void
17570 loc_descr_without_nops (dw_loc_descr_ref &loc)
17571 {
17572 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17573 return;
17574
17575 /* Set of all DW_OP_nop operations we remove. */
17576 hash_set<dw_loc_descr_ref> nops;
17577
17578 /* First, strip all prefix NOP operations in order to keep the head of the
17579 operations list. */
17580 loc_descr_to_next_no_nop (loc, nops);
17581
17582 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17583 {
17584 /* For control flow operations: strip "prefix" nops in destination
17585 labels. */
17586 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17587 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17588 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17589 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17590
17591 /* Do the same for the operations that follow, then move to the next
17592 iteration. */
17593 if (cur->dw_loc_next != NULL)
17594 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17595 cur = cur->dw_loc_next;
17596 }
17597
17598 nops.traverse<void *, free_loc_descr> (NULL);
17599 }
17600
17601
17602 struct dwarf_procedure_info;
17603
17604 /* Helper structure for location descriptions generation. */
17605 struct loc_descr_context
17606 {
17607 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17608 NULL_TREE if DW_OP_push_object_address in invalid for this location
17609 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17610 tree context_type;
17611 /* The ..._DECL node that should be translated as a
17612 DW_OP_push_object_address operation. */
17613 tree base_decl;
17614 /* Information about the DWARF procedure we are currently generating. NULL if
17615 we are not generating a DWARF procedure. */
17616 struct dwarf_procedure_info *dpi;
17617 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17618 by consumer. Used for DW_TAG_generic_subrange attributes. */
17619 bool placeholder_arg;
17620 /* True if PLACEHOLDER_EXPR has been seen. */
17621 bool placeholder_seen;
17622 };
17623
17624 /* DWARF procedures generation
17625
17626 DWARF expressions (aka. location descriptions) are used to encode variable
17627 things such as sizes or offsets. Such computations can have redundant parts
17628 that can be factorized in order to reduce the size of the output debug
17629 information. This is the whole point of DWARF procedures.
17630
17631 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17632 already factorized into functions ("size functions") in order to handle very
17633 big and complex types. Such functions are quite simple: they have integral
17634 arguments, they return an integral result and their body contains only a
17635 return statement with arithmetic expressions. This is the only kind of
17636 function we are interested in translating into DWARF procedures, here.
17637
17638 DWARF expressions and DWARF procedure are executed using a stack, so we have
17639 to define some calling convention for them to interact. Let's say that:
17640
17641 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17642 all arguments in reverse order (right-to-left) so that when the DWARF
17643 procedure execution starts, the first argument is the top of the stack.
17644
17645 - Then, when returning, the DWARF procedure must have consumed all arguments
17646 on the stack, must have pushed the result and touched nothing else.
17647
17648 - Each integral argument and the result are integral types can be hold in a
17649 single stack slot.
17650
17651 - We call "frame offset" the number of stack slots that are "under DWARF
17652 procedure control": it includes the arguments slots, the temporaries and
17653 the result slot. Thus, it is equal to the number of arguments when the
17654 procedure execution starts and must be equal to one (the result) when it
17655 returns. */
17656
17657 /* Helper structure used when generating operations for a DWARF procedure. */
17658 struct dwarf_procedure_info
17659 {
17660 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17661 currently translated. */
17662 tree fndecl;
17663 /* The number of arguments FNDECL takes. */
17664 unsigned args_count;
17665 };
17666
17667 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17668 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17669 equate it to this DIE. */
17670
17671 static dw_die_ref
17672 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17673 dw_die_ref parent_die)
17674 {
17675 dw_die_ref dwarf_proc_die;
17676
17677 if ((dwarf_version < 3 && dwarf_strict)
17678 || location == NULL)
17679 return NULL;
17680
17681 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17682 if (fndecl)
17683 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17684 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17685 return dwarf_proc_die;
17686 }
17687
17688 /* Return whether TYPE is a supported type as a DWARF procedure argument
17689 type or return type (we handle only scalar types and pointer types that
17690 aren't wider than the DWARF expression evaluation stack. */
17691
17692 static bool
17693 is_handled_procedure_type (tree type)
17694 {
17695 return ((INTEGRAL_TYPE_P (type)
17696 || TREE_CODE (type) == OFFSET_TYPE
17697 || TREE_CODE (type) == POINTER_TYPE)
17698 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17699 }
17700
17701 /* Helper for resolve_args_picking: do the same but stop when coming across
17702 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17703 offset *before* evaluating the corresponding operation. */
17704
17705 static bool
17706 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17707 struct dwarf_procedure_info *dpi,
17708 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17709 {
17710 /* The "frame_offset" identifier is already used to name a macro... */
17711 unsigned frame_offset_ = initial_frame_offset;
17712 dw_loc_descr_ref l;
17713
17714 for (l = loc; l != NULL;)
17715 {
17716 bool existed;
17717 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17718
17719 /* If we already met this node, there is nothing to compute anymore. */
17720 if (existed)
17721 {
17722 /* Make sure that the stack size is consistent wherever the execution
17723 flow comes from. */
17724 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17725 break;
17726 }
17727 l_frame_offset = frame_offset_;
17728
17729 /* If needed, relocate the picking offset with respect to the frame
17730 offset. */
17731 if (l->frame_offset_rel)
17732 {
17733 unsigned HOST_WIDE_INT off;
17734 switch (l->dw_loc_opc)
17735 {
17736 case DW_OP_pick:
17737 off = l->dw_loc_oprnd1.v.val_unsigned;
17738 break;
17739 case DW_OP_dup:
17740 off = 0;
17741 break;
17742 case DW_OP_over:
17743 off = 1;
17744 break;
17745 default:
17746 gcc_unreachable ();
17747 }
17748 /* frame_offset_ is the size of the current stack frame, including
17749 incoming arguments. Besides, the arguments are pushed
17750 right-to-left. Thus, in order to access the Nth argument from
17751 this operation node, the picking has to skip temporaries *plus*
17752 one stack slot per argument (0 for the first one, 1 for the second
17753 one, etc.).
17754
17755 The targetted argument number (N) is already set as the operand,
17756 and the number of temporaries can be computed with:
17757 frame_offsets_ - dpi->args_count */
17758 off += frame_offset_ - dpi->args_count;
17759
17760 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17761 if (off > 255)
17762 return false;
17763
17764 if (off == 0)
17765 {
17766 l->dw_loc_opc = DW_OP_dup;
17767 l->dw_loc_oprnd1.v.val_unsigned = 0;
17768 }
17769 else if (off == 1)
17770 {
17771 l->dw_loc_opc = DW_OP_over;
17772 l->dw_loc_oprnd1.v.val_unsigned = 0;
17773 }
17774 else
17775 {
17776 l->dw_loc_opc = DW_OP_pick;
17777 l->dw_loc_oprnd1.v.val_unsigned = off;
17778 }
17779 }
17780
17781 /* Update frame_offset according to the effect the current operation has
17782 on the stack. */
17783 switch (l->dw_loc_opc)
17784 {
17785 case DW_OP_deref:
17786 case DW_OP_swap:
17787 case DW_OP_rot:
17788 case DW_OP_abs:
17789 case DW_OP_neg:
17790 case DW_OP_not:
17791 case DW_OP_plus_uconst:
17792 case DW_OP_skip:
17793 case DW_OP_reg0:
17794 case DW_OP_reg1:
17795 case DW_OP_reg2:
17796 case DW_OP_reg3:
17797 case DW_OP_reg4:
17798 case DW_OP_reg5:
17799 case DW_OP_reg6:
17800 case DW_OP_reg7:
17801 case DW_OP_reg8:
17802 case DW_OP_reg9:
17803 case DW_OP_reg10:
17804 case DW_OP_reg11:
17805 case DW_OP_reg12:
17806 case DW_OP_reg13:
17807 case DW_OP_reg14:
17808 case DW_OP_reg15:
17809 case DW_OP_reg16:
17810 case DW_OP_reg17:
17811 case DW_OP_reg18:
17812 case DW_OP_reg19:
17813 case DW_OP_reg20:
17814 case DW_OP_reg21:
17815 case DW_OP_reg22:
17816 case DW_OP_reg23:
17817 case DW_OP_reg24:
17818 case DW_OP_reg25:
17819 case DW_OP_reg26:
17820 case DW_OP_reg27:
17821 case DW_OP_reg28:
17822 case DW_OP_reg29:
17823 case DW_OP_reg30:
17824 case DW_OP_reg31:
17825 case DW_OP_bregx:
17826 case DW_OP_piece:
17827 case DW_OP_deref_size:
17828 case DW_OP_nop:
17829 case DW_OP_bit_piece:
17830 case DW_OP_implicit_value:
17831 case DW_OP_stack_value:
17832 break;
17833
17834 case DW_OP_addr:
17835 case DW_OP_const1u:
17836 case DW_OP_const1s:
17837 case DW_OP_const2u:
17838 case DW_OP_const2s:
17839 case DW_OP_const4u:
17840 case DW_OP_const4s:
17841 case DW_OP_const8u:
17842 case DW_OP_const8s:
17843 case DW_OP_constu:
17844 case DW_OP_consts:
17845 case DW_OP_dup:
17846 case DW_OP_over:
17847 case DW_OP_pick:
17848 case DW_OP_lit0:
17849 case DW_OP_lit1:
17850 case DW_OP_lit2:
17851 case DW_OP_lit3:
17852 case DW_OP_lit4:
17853 case DW_OP_lit5:
17854 case DW_OP_lit6:
17855 case DW_OP_lit7:
17856 case DW_OP_lit8:
17857 case DW_OP_lit9:
17858 case DW_OP_lit10:
17859 case DW_OP_lit11:
17860 case DW_OP_lit12:
17861 case DW_OP_lit13:
17862 case DW_OP_lit14:
17863 case DW_OP_lit15:
17864 case DW_OP_lit16:
17865 case DW_OP_lit17:
17866 case DW_OP_lit18:
17867 case DW_OP_lit19:
17868 case DW_OP_lit20:
17869 case DW_OP_lit21:
17870 case DW_OP_lit22:
17871 case DW_OP_lit23:
17872 case DW_OP_lit24:
17873 case DW_OP_lit25:
17874 case DW_OP_lit26:
17875 case DW_OP_lit27:
17876 case DW_OP_lit28:
17877 case DW_OP_lit29:
17878 case DW_OP_lit30:
17879 case DW_OP_lit31:
17880 case DW_OP_breg0:
17881 case DW_OP_breg1:
17882 case DW_OP_breg2:
17883 case DW_OP_breg3:
17884 case DW_OP_breg4:
17885 case DW_OP_breg5:
17886 case DW_OP_breg6:
17887 case DW_OP_breg7:
17888 case DW_OP_breg8:
17889 case DW_OP_breg9:
17890 case DW_OP_breg10:
17891 case DW_OP_breg11:
17892 case DW_OP_breg12:
17893 case DW_OP_breg13:
17894 case DW_OP_breg14:
17895 case DW_OP_breg15:
17896 case DW_OP_breg16:
17897 case DW_OP_breg17:
17898 case DW_OP_breg18:
17899 case DW_OP_breg19:
17900 case DW_OP_breg20:
17901 case DW_OP_breg21:
17902 case DW_OP_breg22:
17903 case DW_OP_breg23:
17904 case DW_OP_breg24:
17905 case DW_OP_breg25:
17906 case DW_OP_breg26:
17907 case DW_OP_breg27:
17908 case DW_OP_breg28:
17909 case DW_OP_breg29:
17910 case DW_OP_breg30:
17911 case DW_OP_breg31:
17912 case DW_OP_fbreg:
17913 case DW_OP_push_object_address:
17914 case DW_OP_call_frame_cfa:
17915 case DW_OP_GNU_variable_value:
17916 case DW_OP_GNU_addr_index:
17917 case DW_OP_GNU_const_index:
17918 ++frame_offset_;
17919 break;
17920
17921 case DW_OP_drop:
17922 case DW_OP_xderef:
17923 case DW_OP_and:
17924 case DW_OP_div:
17925 case DW_OP_minus:
17926 case DW_OP_mod:
17927 case DW_OP_mul:
17928 case DW_OP_or:
17929 case DW_OP_plus:
17930 case DW_OP_shl:
17931 case DW_OP_shr:
17932 case DW_OP_shra:
17933 case DW_OP_xor:
17934 case DW_OP_bra:
17935 case DW_OP_eq:
17936 case DW_OP_ge:
17937 case DW_OP_gt:
17938 case DW_OP_le:
17939 case DW_OP_lt:
17940 case DW_OP_ne:
17941 case DW_OP_regx:
17942 case DW_OP_xderef_size:
17943 --frame_offset_;
17944 break;
17945
17946 case DW_OP_call2:
17947 case DW_OP_call4:
17948 case DW_OP_call_ref:
17949 {
17950 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17951 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17952
17953 if (stack_usage == NULL)
17954 return false;
17955 frame_offset_ += *stack_usage;
17956 break;
17957 }
17958
17959 case DW_OP_implicit_pointer:
17960 case DW_OP_entry_value:
17961 case DW_OP_const_type:
17962 case DW_OP_regval_type:
17963 case DW_OP_deref_type:
17964 case DW_OP_convert:
17965 case DW_OP_reinterpret:
17966 case DW_OP_form_tls_address:
17967 case DW_OP_GNU_push_tls_address:
17968 case DW_OP_GNU_uninit:
17969 case DW_OP_GNU_encoded_addr:
17970 case DW_OP_GNU_implicit_pointer:
17971 case DW_OP_GNU_entry_value:
17972 case DW_OP_GNU_const_type:
17973 case DW_OP_GNU_regval_type:
17974 case DW_OP_GNU_deref_type:
17975 case DW_OP_GNU_convert:
17976 case DW_OP_GNU_reinterpret:
17977 case DW_OP_GNU_parameter_ref:
17978 /* loc_list_from_tree will probably not output these operations for
17979 size functions, so assume they will not appear here. */
17980 /* Fall through... */
17981
17982 default:
17983 gcc_unreachable ();
17984 }
17985
17986 /* Now, follow the control flow (except subroutine calls). */
17987 switch (l->dw_loc_opc)
17988 {
17989 case DW_OP_bra:
17990 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17991 frame_offsets))
17992 return false;
17993 /* Fall through. */
17994
17995 case DW_OP_skip:
17996 l = l->dw_loc_oprnd1.v.val_loc;
17997 break;
17998
17999 case DW_OP_stack_value:
18000 return true;
18001
18002 default:
18003 l = l->dw_loc_next;
18004 break;
18005 }
18006 }
18007
18008 return true;
18009 }
18010
18011 /* Make a DFS over operations reachable through LOC (i.e. follow branch
18012 operations) in order to resolve the operand of DW_OP_pick operations that
18013 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
18014 offset *before* LOC is executed. Return if all relocations were
18015 successful. */
18016
18017 static bool
18018 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
18019 struct dwarf_procedure_info *dpi)
18020 {
18021 /* Associate to all visited operations the frame offset *before* evaluating
18022 this operation. */
18023 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
18024
18025 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
18026 frame_offsets);
18027 }
18028
18029 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
18030 Return NULL if it is not possible. */
18031
18032 static dw_die_ref
18033 function_to_dwarf_procedure (tree fndecl)
18034 {
18035 struct loc_descr_context ctx;
18036 struct dwarf_procedure_info dpi;
18037 dw_die_ref dwarf_proc_die;
18038 tree tree_body = DECL_SAVED_TREE (fndecl);
18039 dw_loc_descr_ref loc_body, epilogue;
18040
18041 tree cursor;
18042 unsigned i;
18043
18044 /* Do not generate multiple DWARF procedures for the same function
18045 declaration. */
18046 dwarf_proc_die = lookup_decl_die (fndecl);
18047 if (dwarf_proc_die != NULL)
18048 return dwarf_proc_die;
18049
18050 /* DWARF procedures are available starting with the DWARFv3 standard. */
18051 if (dwarf_version < 3 && dwarf_strict)
18052 return NULL;
18053
18054 /* We handle only functions for which we still have a body, that return a
18055 supported type and that takes arguments with supported types. Note that
18056 there is no point translating functions that return nothing. */
18057 if (tree_body == NULL_TREE
18058 || DECL_RESULT (fndecl) == NULL_TREE
18059 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
18060 return NULL;
18061
18062 for (cursor = DECL_ARGUMENTS (fndecl);
18063 cursor != NULL_TREE;
18064 cursor = TREE_CHAIN (cursor))
18065 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
18066 return NULL;
18067
18068 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
18069 if (TREE_CODE (tree_body) != RETURN_EXPR)
18070 return NULL;
18071 tree_body = TREE_OPERAND (tree_body, 0);
18072 if (TREE_CODE (tree_body) != MODIFY_EXPR
18073 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
18074 return NULL;
18075 tree_body = TREE_OPERAND (tree_body, 1);
18076
18077 /* Try to translate the body expression itself. Note that this will probably
18078 cause an infinite recursion if its call graph has a cycle. This is very
18079 unlikely for size functions, however, so don't bother with such things at
18080 the moment. */
18081 ctx.context_type = NULL_TREE;
18082 ctx.base_decl = NULL_TREE;
18083 ctx.dpi = &dpi;
18084 ctx.placeholder_arg = false;
18085 ctx.placeholder_seen = false;
18086 dpi.fndecl = fndecl;
18087 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
18088 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
18089 if (!loc_body)
18090 return NULL;
18091
18092 /* After evaluating all operands in "loc_body", we should still have on the
18093 stack all arguments plus the desired function result (top of the stack).
18094 Generate code in order to keep only the result in our stack frame. */
18095 epilogue = NULL;
18096 for (i = 0; i < dpi.args_count; ++i)
18097 {
18098 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
18099 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
18100 op_couple->dw_loc_next->dw_loc_next = epilogue;
18101 epilogue = op_couple;
18102 }
18103 add_loc_descr (&loc_body, epilogue);
18104 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
18105 return NULL;
18106
18107 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
18108 because they are considered useful. Now there is an epilogue, they are
18109 not anymore, so give it another try. */
18110 loc_descr_without_nops (loc_body);
18111
18112 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
18113 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
18114 though, given that size functions do not come from source, so they should
18115 not have a dedicated DW_TAG_subprogram DIE. */
18116 dwarf_proc_die
18117 = new_dwarf_proc_die (loc_body, fndecl,
18118 get_context_die (DECL_CONTEXT (fndecl)));
18119
18120 /* The called DWARF procedure consumes one stack slot per argument and
18121 returns one stack slot. */
18122 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
18123
18124 return dwarf_proc_die;
18125 }
18126
18127
18128 /* Generate Dwarf location list representing LOC.
18129 If WANT_ADDRESS is false, expression computing LOC will be computed
18130 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18131 if WANT_ADDRESS is 2, expression computing address useable in location
18132 will be returned (i.e. DW_OP_reg can be used
18133 to refer to register values).
18134
18135 CONTEXT provides information to customize the location descriptions
18136 generation. Its context_type field specifies what type is implicitly
18137 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18138 will not be generated.
18139
18140 Its DPI field determines whether we are generating a DWARF expression for a
18141 DWARF procedure, so PARM_DECL references are processed specifically.
18142
18143 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18144 and dpi fields were null. */
18145
18146 static dw_loc_list_ref
18147 loc_list_from_tree_1 (tree loc, int want_address,
18148 struct loc_descr_context *context)
18149 {
18150 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18151 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18152 int have_address = 0;
18153 enum dwarf_location_atom op;
18154
18155 /* ??? Most of the time we do not take proper care for sign/zero
18156 extending the values properly. Hopefully this won't be a real
18157 problem... */
18158
18159 if (context != NULL
18160 && context->base_decl == loc
18161 && want_address == 0)
18162 {
18163 if (dwarf_version >= 3 || !dwarf_strict)
18164 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18165 NULL, 0, NULL, 0, NULL);
18166 else
18167 return NULL;
18168 }
18169
18170 switch (TREE_CODE (loc))
18171 {
18172 case ERROR_MARK:
18173 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18174 return 0;
18175
18176 case PLACEHOLDER_EXPR:
18177 /* This case involves extracting fields from an object to determine the
18178 position of other fields. It is supposed to appear only as the first
18179 operand of COMPONENT_REF nodes and to reference precisely the type
18180 that the context allows. */
18181 if (context != NULL
18182 && TREE_TYPE (loc) == context->context_type
18183 && want_address >= 1)
18184 {
18185 if (dwarf_version >= 3 || !dwarf_strict)
18186 {
18187 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18188 have_address = 1;
18189 break;
18190 }
18191 else
18192 return NULL;
18193 }
18194 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18195 the single argument passed by consumer. */
18196 else if (context != NULL
18197 && context->placeholder_arg
18198 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18199 && want_address == 0)
18200 {
18201 ret = new_loc_descr (DW_OP_pick, 0, 0);
18202 ret->frame_offset_rel = 1;
18203 context->placeholder_seen = true;
18204 break;
18205 }
18206 else
18207 expansion_failed (loc, NULL_RTX,
18208 "PLACEHOLDER_EXPR for an unexpected type");
18209 break;
18210
18211 case CALL_EXPR:
18212 {
18213 const int nargs = call_expr_nargs (loc);
18214 tree callee = get_callee_fndecl (loc);
18215 int i;
18216 dw_die_ref dwarf_proc;
18217
18218 if (callee == NULL_TREE)
18219 goto call_expansion_failed;
18220
18221 /* We handle only functions that return an integer. */
18222 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18223 goto call_expansion_failed;
18224
18225 dwarf_proc = function_to_dwarf_procedure (callee);
18226 if (dwarf_proc == NULL)
18227 goto call_expansion_failed;
18228
18229 /* Evaluate arguments right-to-left so that the first argument will
18230 be the top-most one on the stack. */
18231 for (i = nargs - 1; i >= 0; --i)
18232 {
18233 dw_loc_descr_ref loc_descr
18234 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18235 context);
18236
18237 if (loc_descr == NULL)
18238 goto call_expansion_failed;
18239
18240 add_loc_descr (&ret, loc_descr);
18241 }
18242
18243 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18244 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18245 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18246 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18247 add_loc_descr (&ret, ret1);
18248 break;
18249
18250 call_expansion_failed:
18251 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18252 /* There are no opcodes for these operations. */
18253 return 0;
18254 }
18255
18256 case PREINCREMENT_EXPR:
18257 case PREDECREMENT_EXPR:
18258 case POSTINCREMENT_EXPR:
18259 case POSTDECREMENT_EXPR:
18260 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18261 /* There are no opcodes for these operations. */
18262 return 0;
18263
18264 case ADDR_EXPR:
18265 /* If we already want an address, see if there is INDIRECT_REF inside
18266 e.g. for &this->field. */
18267 if (want_address)
18268 {
18269 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18270 (loc, want_address == 2, context);
18271 if (list_ret)
18272 have_address = 1;
18273 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18274 && (ret = cst_pool_loc_descr (loc)))
18275 have_address = 1;
18276 }
18277 /* Otherwise, process the argument and look for the address. */
18278 if (!list_ret && !ret)
18279 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18280 else
18281 {
18282 if (want_address)
18283 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18284 return NULL;
18285 }
18286 break;
18287
18288 case VAR_DECL:
18289 if (DECL_THREAD_LOCAL_P (loc))
18290 {
18291 rtx rtl;
18292 enum dwarf_location_atom tls_op;
18293 enum dtprel_bool dtprel = dtprel_false;
18294
18295 if (targetm.have_tls)
18296 {
18297 /* If this is not defined, we have no way to emit the
18298 data. */
18299 if (!targetm.asm_out.output_dwarf_dtprel)
18300 return 0;
18301
18302 /* The way DW_OP_GNU_push_tls_address is specified, we
18303 can only look up addresses of objects in the current
18304 module. We used DW_OP_addr as first op, but that's
18305 wrong, because DW_OP_addr is relocated by the debug
18306 info consumer, while DW_OP_GNU_push_tls_address
18307 operand shouldn't be. */
18308 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18309 return 0;
18310 dtprel = dtprel_true;
18311 /* We check for DWARF 5 here because gdb did not implement
18312 DW_OP_form_tls_address until after 7.12. */
18313 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18314 : DW_OP_GNU_push_tls_address);
18315 }
18316 else
18317 {
18318 if (!targetm.emutls.debug_form_tls_address
18319 || !(dwarf_version >= 3 || !dwarf_strict))
18320 return 0;
18321 /* We stuffed the control variable into the DECL_VALUE_EXPR
18322 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18323 no longer appear in gimple code. We used the control
18324 variable in specific so that we could pick it up here. */
18325 loc = DECL_VALUE_EXPR (loc);
18326 tls_op = DW_OP_form_tls_address;
18327 }
18328
18329 rtl = rtl_for_decl_location (loc);
18330 if (rtl == NULL_RTX)
18331 return 0;
18332
18333 if (!MEM_P (rtl))
18334 return 0;
18335 rtl = XEXP (rtl, 0);
18336 if (! CONSTANT_P (rtl))
18337 return 0;
18338
18339 ret = new_addr_loc_descr (rtl, dtprel);
18340 ret1 = new_loc_descr (tls_op, 0, 0);
18341 add_loc_descr (&ret, ret1);
18342
18343 have_address = 1;
18344 break;
18345 }
18346 /* FALLTHRU */
18347
18348 case PARM_DECL:
18349 if (context != NULL && context->dpi != NULL
18350 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18351 {
18352 /* We are generating code for a DWARF procedure and we want to access
18353 one of its arguments: find the appropriate argument offset and let
18354 the resolve_args_picking pass compute the offset that complies
18355 with the stack frame size. */
18356 unsigned i = 0;
18357 tree cursor;
18358
18359 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18360 cursor != NULL_TREE && cursor != loc;
18361 cursor = TREE_CHAIN (cursor), ++i)
18362 ;
18363 /* If we are translating a DWARF procedure, all referenced parameters
18364 must belong to the current function. */
18365 gcc_assert (cursor != NULL_TREE);
18366
18367 ret = new_loc_descr (DW_OP_pick, i, 0);
18368 ret->frame_offset_rel = 1;
18369 break;
18370 }
18371 /* FALLTHRU */
18372
18373 case RESULT_DECL:
18374 if (DECL_HAS_VALUE_EXPR_P (loc))
18375 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18376 want_address, context);
18377 /* FALLTHRU */
18378
18379 case FUNCTION_DECL:
18380 {
18381 rtx rtl;
18382 var_loc_list *loc_list = lookup_decl_loc (loc);
18383
18384 if (loc_list && loc_list->first)
18385 {
18386 list_ret = dw_loc_list (loc_list, loc, want_address);
18387 have_address = want_address != 0;
18388 break;
18389 }
18390 rtl = rtl_for_decl_location (loc);
18391 if (rtl == NULL_RTX)
18392 {
18393 if (TREE_CODE (loc) != FUNCTION_DECL
18394 && early_dwarf
18395 && current_function_decl
18396 && want_address != 1
18397 && ! DECL_IGNORED_P (loc)
18398 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18399 || POINTER_TYPE_P (TREE_TYPE (loc)))
18400 && DECL_CONTEXT (loc) == current_function_decl
18401 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18402 <= DWARF2_ADDR_SIZE))
18403 {
18404 dw_die_ref ref = lookup_decl_die (loc);
18405 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18406 if (ref)
18407 {
18408 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18409 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18410 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18411 }
18412 else
18413 {
18414 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18415 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18416 }
18417 break;
18418 }
18419 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18420 return 0;
18421 }
18422 else if (CONST_INT_P (rtl))
18423 {
18424 HOST_WIDE_INT val = INTVAL (rtl);
18425 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18426 val &= GET_MODE_MASK (DECL_MODE (loc));
18427 ret = int_loc_descriptor (val);
18428 }
18429 else if (GET_CODE (rtl) == CONST_STRING)
18430 {
18431 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18432 return 0;
18433 }
18434 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18435 ret = new_addr_loc_descr (rtl, dtprel_false);
18436 else
18437 {
18438 machine_mode mode, mem_mode;
18439
18440 /* Certain constructs can only be represented at top-level. */
18441 if (want_address == 2)
18442 {
18443 ret = loc_descriptor (rtl, VOIDmode,
18444 VAR_INIT_STATUS_INITIALIZED);
18445 have_address = 1;
18446 }
18447 else
18448 {
18449 mode = GET_MODE (rtl);
18450 mem_mode = VOIDmode;
18451 if (MEM_P (rtl))
18452 {
18453 mem_mode = mode;
18454 mode = get_address_mode (rtl);
18455 rtl = XEXP (rtl, 0);
18456 have_address = 1;
18457 }
18458 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18459 VAR_INIT_STATUS_INITIALIZED);
18460 }
18461 if (!ret)
18462 expansion_failed (loc, rtl,
18463 "failed to produce loc descriptor for rtl");
18464 }
18465 }
18466 break;
18467
18468 case MEM_REF:
18469 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18470 {
18471 have_address = 1;
18472 goto do_plus;
18473 }
18474 /* Fallthru. */
18475 case INDIRECT_REF:
18476 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18477 have_address = 1;
18478 break;
18479
18480 case TARGET_MEM_REF:
18481 case SSA_NAME:
18482 case DEBUG_EXPR_DECL:
18483 return NULL;
18484
18485 case COMPOUND_EXPR:
18486 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18487 context);
18488
18489 CASE_CONVERT:
18490 case VIEW_CONVERT_EXPR:
18491 case SAVE_EXPR:
18492 case MODIFY_EXPR:
18493 case NON_LVALUE_EXPR:
18494 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18495 context);
18496
18497 case COMPONENT_REF:
18498 case BIT_FIELD_REF:
18499 case ARRAY_REF:
18500 case ARRAY_RANGE_REF:
18501 case REALPART_EXPR:
18502 case IMAGPART_EXPR:
18503 {
18504 tree obj, offset;
18505 poly_int64 bitsize, bitpos, bytepos;
18506 machine_mode mode;
18507 int unsignedp, reversep, volatilep = 0;
18508
18509 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18510 &unsignedp, &reversep, &volatilep);
18511
18512 gcc_assert (obj != loc);
18513
18514 list_ret = loc_list_from_tree_1 (obj,
18515 want_address == 2
18516 && known_eq (bitpos, 0)
18517 && !offset ? 2 : 1,
18518 context);
18519 /* TODO: We can extract value of the small expression via shifting even
18520 for nonzero bitpos. */
18521 if (list_ret == 0)
18522 return 0;
18523 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18524 || !multiple_p (bitsize, BITS_PER_UNIT))
18525 {
18526 expansion_failed (loc, NULL_RTX,
18527 "bitfield access");
18528 return 0;
18529 }
18530
18531 if (offset != NULL_TREE)
18532 {
18533 /* Variable offset. */
18534 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18535 if (list_ret1 == 0)
18536 return 0;
18537 add_loc_list (&list_ret, list_ret1);
18538 if (!list_ret)
18539 return 0;
18540 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18541 }
18542
18543 HOST_WIDE_INT value;
18544 if (bytepos.is_constant (&value) && value > 0)
18545 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18546 value, 0));
18547 else if (maybe_ne (bytepos, 0))
18548 loc_list_plus_const (list_ret, bytepos);
18549
18550 have_address = 1;
18551 break;
18552 }
18553
18554 case INTEGER_CST:
18555 if ((want_address || !tree_fits_shwi_p (loc))
18556 && (ret = cst_pool_loc_descr (loc)))
18557 have_address = 1;
18558 else if (want_address == 2
18559 && tree_fits_shwi_p (loc)
18560 && (ret = address_of_int_loc_descriptor
18561 (int_size_in_bytes (TREE_TYPE (loc)),
18562 tree_to_shwi (loc))))
18563 have_address = 1;
18564 else if (tree_fits_shwi_p (loc))
18565 ret = int_loc_descriptor (tree_to_shwi (loc));
18566 else if (tree_fits_uhwi_p (loc))
18567 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18568 else
18569 {
18570 expansion_failed (loc, NULL_RTX,
18571 "Integer operand is not host integer");
18572 return 0;
18573 }
18574 break;
18575
18576 case POLY_INT_CST:
18577 {
18578 if (want_address)
18579 {
18580 expansion_failed (loc, NULL_RTX,
18581 "constant address with a runtime component");
18582 return 0;
18583 }
18584 poly_int64 value;
18585 if (!poly_int_tree_p (loc, &value))
18586 {
18587 expansion_failed (loc, NULL_RTX, "constant too big");
18588 return 0;
18589 }
18590 ret = int_loc_descriptor (value);
18591 }
18592 break;
18593
18594 case CONSTRUCTOR:
18595 case REAL_CST:
18596 case STRING_CST:
18597 case COMPLEX_CST:
18598 if ((ret = cst_pool_loc_descr (loc)))
18599 have_address = 1;
18600 else if (TREE_CODE (loc) == CONSTRUCTOR)
18601 {
18602 tree type = TREE_TYPE (loc);
18603 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18604 unsigned HOST_WIDE_INT offset = 0;
18605 unsigned HOST_WIDE_INT cnt;
18606 constructor_elt *ce;
18607
18608 if (TREE_CODE (type) == RECORD_TYPE)
18609 {
18610 /* This is very limited, but it's enough to output
18611 pointers to member functions, as long as the
18612 referenced function is defined in the current
18613 translation unit. */
18614 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18615 {
18616 tree val = ce->value;
18617
18618 tree field = ce->index;
18619
18620 if (val)
18621 STRIP_NOPS (val);
18622
18623 if (!field || DECL_BIT_FIELD (field))
18624 {
18625 expansion_failed (loc, NULL_RTX,
18626 "bitfield in record type constructor");
18627 size = offset = (unsigned HOST_WIDE_INT)-1;
18628 ret = NULL;
18629 break;
18630 }
18631
18632 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18633 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18634 gcc_assert (pos + fieldsize <= size);
18635 if (pos < offset)
18636 {
18637 expansion_failed (loc, NULL_RTX,
18638 "out-of-order fields in record constructor");
18639 size = offset = (unsigned HOST_WIDE_INT)-1;
18640 ret = NULL;
18641 break;
18642 }
18643 if (pos > offset)
18644 {
18645 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18646 add_loc_descr (&ret, ret1);
18647 offset = pos;
18648 }
18649 if (val && fieldsize != 0)
18650 {
18651 ret1 = loc_descriptor_from_tree (val, want_address, context);
18652 if (!ret1)
18653 {
18654 expansion_failed (loc, NULL_RTX,
18655 "unsupported expression in field");
18656 size = offset = (unsigned HOST_WIDE_INT)-1;
18657 ret = NULL;
18658 break;
18659 }
18660 add_loc_descr (&ret, ret1);
18661 }
18662 if (fieldsize)
18663 {
18664 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18665 add_loc_descr (&ret, ret1);
18666 offset = pos + fieldsize;
18667 }
18668 }
18669
18670 if (offset != size)
18671 {
18672 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18673 add_loc_descr (&ret, ret1);
18674 offset = size;
18675 }
18676
18677 have_address = !!want_address;
18678 }
18679 else
18680 expansion_failed (loc, NULL_RTX,
18681 "constructor of non-record type");
18682 }
18683 else
18684 /* We can construct small constants here using int_loc_descriptor. */
18685 expansion_failed (loc, NULL_RTX,
18686 "constructor or constant not in constant pool");
18687 break;
18688
18689 case TRUTH_AND_EXPR:
18690 case TRUTH_ANDIF_EXPR:
18691 case BIT_AND_EXPR:
18692 op = DW_OP_and;
18693 goto do_binop;
18694
18695 case TRUTH_XOR_EXPR:
18696 case BIT_XOR_EXPR:
18697 op = DW_OP_xor;
18698 goto do_binop;
18699
18700 case TRUTH_OR_EXPR:
18701 case TRUTH_ORIF_EXPR:
18702 case BIT_IOR_EXPR:
18703 op = DW_OP_or;
18704 goto do_binop;
18705
18706 case FLOOR_DIV_EXPR:
18707 case CEIL_DIV_EXPR:
18708 case ROUND_DIV_EXPR:
18709 case TRUNC_DIV_EXPR:
18710 case EXACT_DIV_EXPR:
18711 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18712 return 0;
18713 op = DW_OP_div;
18714 goto do_binop;
18715
18716 case MINUS_EXPR:
18717 op = DW_OP_minus;
18718 goto do_binop;
18719
18720 case FLOOR_MOD_EXPR:
18721 case CEIL_MOD_EXPR:
18722 case ROUND_MOD_EXPR:
18723 case TRUNC_MOD_EXPR:
18724 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18725 {
18726 op = DW_OP_mod;
18727 goto do_binop;
18728 }
18729 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18730 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18731 if (list_ret == 0 || list_ret1 == 0)
18732 return 0;
18733
18734 add_loc_list (&list_ret, list_ret1);
18735 if (list_ret == 0)
18736 return 0;
18737 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18738 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18739 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18740 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18741 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18742 break;
18743
18744 case MULT_EXPR:
18745 op = DW_OP_mul;
18746 goto do_binop;
18747
18748 case LSHIFT_EXPR:
18749 op = DW_OP_shl;
18750 goto do_binop;
18751
18752 case RSHIFT_EXPR:
18753 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18754 goto do_binop;
18755
18756 case POINTER_PLUS_EXPR:
18757 case PLUS_EXPR:
18758 do_plus:
18759 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18760 {
18761 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18762 smarter to encode their opposite. The DW_OP_plus_uconst operation
18763 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18764 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18765 bytes, Y being the size of the operation that pushes the opposite
18766 of the addend. So let's choose the smallest representation. */
18767 const tree tree_addend = TREE_OPERAND (loc, 1);
18768 offset_int wi_addend;
18769 HOST_WIDE_INT shwi_addend;
18770 dw_loc_descr_ref loc_naddend;
18771
18772 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18773 if (list_ret == 0)
18774 return 0;
18775
18776 /* Try to get the literal to push. It is the opposite of the addend,
18777 so as we rely on wrapping during DWARF evaluation, first decode
18778 the literal as a "DWARF-sized" signed number. */
18779 wi_addend = wi::to_offset (tree_addend);
18780 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18781 shwi_addend = wi_addend.to_shwi ();
18782 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18783 ? int_loc_descriptor (-shwi_addend)
18784 : NULL;
18785
18786 if (loc_naddend != NULL
18787 && ((unsigned) size_of_uleb128 (shwi_addend)
18788 > size_of_loc_descr (loc_naddend)))
18789 {
18790 add_loc_descr_to_each (list_ret, loc_naddend);
18791 add_loc_descr_to_each (list_ret,
18792 new_loc_descr (DW_OP_minus, 0, 0));
18793 }
18794 else
18795 {
18796 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18797 {
18798 loc_naddend = loc_cur;
18799 loc_cur = loc_cur->dw_loc_next;
18800 ggc_free (loc_naddend);
18801 }
18802 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18803 }
18804 break;
18805 }
18806
18807 op = DW_OP_plus;
18808 goto do_binop;
18809
18810 case LE_EXPR:
18811 op = DW_OP_le;
18812 goto do_comp_binop;
18813
18814 case GE_EXPR:
18815 op = DW_OP_ge;
18816 goto do_comp_binop;
18817
18818 case LT_EXPR:
18819 op = DW_OP_lt;
18820 goto do_comp_binop;
18821
18822 case GT_EXPR:
18823 op = DW_OP_gt;
18824 goto do_comp_binop;
18825
18826 do_comp_binop:
18827 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18828 {
18829 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18830 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18831 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18832 TREE_CODE (loc));
18833 break;
18834 }
18835 else
18836 goto do_binop;
18837
18838 case EQ_EXPR:
18839 op = DW_OP_eq;
18840 goto do_binop;
18841
18842 case NE_EXPR:
18843 op = DW_OP_ne;
18844 goto do_binop;
18845
18846 do_binop:
18847 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18848 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18849 if (list_ret == 0 || list_ret1 == 0)
18850 return 0;
18851
18852 add_loc_list (&list_ret, list_ret1);
18853 if (list_ret == 0)
18854 return 0;
18855 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18856 break;
18857
18858 case TRUTH_NOT_EXPR:
18859 case BIT_NOT_EXPR:
18860 op = DW_OP_not;
18861 goto do_unop;
18862
18863 case ABS_EXPR:
18864 op = DW_OP_abs;
18865 goto do_unop;
18866
18867 case NEGATE_EXPR:
18868 op = DW_OP_neg;
18869 goto do_unop;
18870
18871 do_unop:
18872 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18873 if (list_ret == 0)
18874 return 0;
18875
18876 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18877 break;
18878
18879 case MIN_EXPR:
18880 case MAX_EXPR:
18881 {
18882 const enum tree_code code =
18883 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18884
18885 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18886 build2 (code, integer_type_node,
18887 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18888 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18889 }
18890
18891 /* fall through */
18892
18893 case COND_EXPR:
18894 {
18895 dw_loc_descr_ref lhs
18896 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18897 dw_loc_list_ref rhs
18898 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18899 dw_loc_descr_ref bra_node, jump_node, tmp;
18900
18901 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18902 if (list_ret == 0 || lhs == 0 || rhs == 0)
18903 return 0;
18904
18905 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18906 add_loc_descr_to_each (list_ret, bra_node);
18907
18908 add_loc_list (&list_ret, rhs);
18909 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18910 add_loc_descr_to_each (list_ret, jump_node);
18911
18912 add_loc_descr_to_each (list_ret, lhs);
18913 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18914 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18915
18916 /* ??? Need a node to point the skip at. Use a nop. */
18917 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18918 add_loc_descr_to_each (list_ret, tmp);
18919 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18920 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18921 }
18922 break;
18923
18924 case FIX_TRUNC_EXPR:
18925 return 0;
18926
18927 default:
18928 /* Leave front-end specific codes as simply unknown. This comes
18929 up, for instance, with the C STMT_EXPR. */
18930 if ((unsigned int) TREE_CODE (loc)
18931 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18932 {
18933 expansion_failed (loc, NULL_RTX,
18934 "language specific tree node");
18935 return 0;
18936 }
18937
18938 /* Otherwise this is a generic code; we should just lists all of
18939 these explicitly. We forgot one. */
18940 if (flag_checking)
18941 gcc_unreachable ();
18942
18943 /* In a release build, we want to degrade gracefully: better to
18944 generate incomplete debugging information than to crash. */
18945 return NULL;
18946 }
18947
18948 if (!ret && !list_ret)
18949 return 0;
18950
18951 if (want_address == 2 && !have_address
18952 && (dwarf_version >= 4 || !dwarf_strict))
18953 {
18954 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18955 {
18956 expansion_failed (loc, NULL_RTX,
18957 "DWARF address size mismatch");
18958 return 0;
18959 }
18960 if (ret)
18961 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18962 else
18963 add_loc_descr_to_each (list_ret,
18964 new_loc_descr (DW_OP_stack_value, 0, 0));
18965 have_address = 1;
18966 }
18967 /* Show if we can't fill the request for an address. */
18968 if (want_address && !have_address)
18969 {
18970 expansion_failed (loc, NULL_RTX,
18971 "Want address and only have value");
18972 return 0;
18973 }
18974
18975 gcc_assert (!ret || !list_ret);
18976
18977 /* If we've got an address and don't want one, dereference. */
18978 if (!want_address && have_address)
18979 {
18980 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18981
18982 if (size > DWARF2_ADDR_SIZE || size == -1)
18983 {
18984 expansion_failed (loc, NULL_RTX,
18985 "DWARF address size mismatch");
18986 return 0;
18987 }
18988 else if (size == DWARF2_ADDR_SIZE)
18989 op = DW_OP_deref;
18990 else
18991 op = DW_OP_deref_size;
18992
18993 if (ret)
18994 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18995 else
18996 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18997 }
18998 if (ret)
18999 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
19000
19001 return list_ret;
19002 }
19003
19004 /* Likewise, but strip useless DW_OP_nop operations in the resulting
19005 expressions. */
19006
19007 static dw_loc_list_ref
19008 loc_list_from_tree (tree loc, int want_address,
19009 struct loc_descr_context *context)
19010 {
19011 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
19012
19013 for (dw_loc_list_ref loc_cur = result;
19014 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
19015 loc_descr_without_nops (loc_cur->expr);
19016 return result;
19017 }
19018
19019 /* Same as above but return only single location expression. */
19020 static dw_loc_descr_ref
19021 loc_descriptor_from_tree (tree loc, int want_address,
19022 struct loc_descr_context *context)
19023 {
19024 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
19025 if (!ret)
19026 return NULL;
19027 if (ret->dw_loc_next)
19028 {
19029 expansion_failed (loc, NULL_RTX,
19030 "Location list where only loc descriptor needed");
19031 return NULL;
19032 }
19033 return ret->expr;
19034 }
19035
19036 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
19037 pointer to the declared type for the relevant field variable, or return
19038 `integer_type_node' if the given node turns out to be an
19039 ERROR_MARK node. */
19040
19041 static inline tree
19042 field_type (const_tree decl)
19043 {
19044 tree type;
19045
19046 if (TREE_CODE (decl) == ERROR_MARK)
19047 return integer_type_node;
19048
19049 type = DECL_BIT_FIELD_TYPE (decl);
19050 if (type == NULL_TREE)
19051 type = TREE_TYPE (decl);
19052
19053 return type;
19054 }
19055
19056 /* Given a pointer to a tree node, return the alignment in bits for
19057 it, or else return BITS_PER_WORD if the node actually turns out to
19058 be an ERROR_MARK node. */
19059
19060 static inline unsigned
19061 simple_type_align_in_bits (const_tree type)
19062 {
19063 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
19064 }
19065
19066 static inline unsigned
19067 simple_decl_align_in_bits (const_tree decl)
19068 {
19069 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
19070 }
19071
19072 /* Return the result of rounding T up to ALIGN. */
19073
19074 static inline offset_int
19075 round_up_to_align (const offset_int &t, unsigned int align)
19076 {
19077 return wi::udiv_trunc (t + align - 1, align) * align;
19078 }
19079
19080 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
19081 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
19082 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
19083 if we fail to return the size in one of these two forms. */
19084
19085 static dw_loc_descr_ref
19086 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
19087 {
19088 tree tree_size;
19089 struct loc_descr_context ctx;
19090
19091 /* Return a constant integer in priority, if possible. */
19092 *cst_size = int_size_in_bytes (type);
19093 if (*cst_size != -1)
19094 return NULL;
19095
19096 ctx.context_type = const_cast<tree> (type);
19097 ctx.base_decl = NULL_TREE;
19098 ctx.dpi = NULL;
19099 ctx.placeholder_arg = false;
19100 ctx.placeholder_seen = false;
19101
19102 type = TYPE_MAIN_VARIANT (type);
19103 tree_size = TYPE_SIZE_UNIT (type);
19104 return ((tree_size != NULL_TREE)
19105 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
19106 : NULL);
19107 }
19108
19109 /* Helper structure for RECORD_TYPE processing. */
19110 struct vlr_context
19111 {
19112 /* Root RECORD_TYPE. It is needed to generate data member location
19113 descriptions in variable-length records (VLR), but also to cope with
19114 variants, which are composed of nested structures multiplexed with
19115 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
19116 function processing a FIELD_DECL, it is required to be non null. */
19117 tree struct_type;
19118 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
19119 QUAL_UNION_TYPE), this holds an expression that computes the offset for
19120 this variant part as part of the root record (in storage units). For
19121 regular records, it must be NULL_TREE. */
19122 tree variant_part_offset;
19123 };
19124
19125 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
19126 addressed byte of the "containing object" for the given FIELD_DECL. If
19127 possible, return a native constant through CST_OFFSET (in which case NULL is
19128 returned); otherwise return a DWARF expression that computes the offset.
19129
19130 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
19131 that offset is, either because the argument turns out to be a pointer to an
19132 ERROR_MARK node, or because the offset expression is too complex for us.
19133
19134 CTX is required: see the comment for VLR_CONTEXT. */
19135
19136 static dw_loc_descr_ref
19137 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19138 HOST_WIDE_INT *cst_offset)
19139 {
19140 tree tree_result;
19141 dw_loc_list_ref loc_result;
19142
19143 *cst_offset = 0;
19144
19145 if (TREE_CODE (decl) == ERROR_MARK)
19146 return NULL;
19147 else
19148 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19149
19150 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19151 case. */
19152 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19153 return NULL;
19154
19155 /* We used to handle only constant offsets in all cases. Now, we handle
19156 properly dynamic byte offsets only when PCC bitfield type doesn't
19157 matter. */
19158 if (PCC_BITFIELD_TYPE_MATTERS
19159 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19160 {
19161 offset_int object_offset_in_bits;
19162 offset_int object_offset_in_bytes;
19163 offset_int bitpos_int;
19164 tree type;
19165 tree field_size_tree;
19166 offset_int deepest_bitpos;
19167 offset_int field_size_in_bits;
19168 unsigned int type_align_in_bits;
19169 unsigned int decl_align_in_bits;
19170 offset_int type_size_in_bits;
19171
19172 bitpos_int = wi::to_offset (bit_position (decl));
19173 type = field_type (decl);
19174 type_size_in_bits = offset_int_type_size_in_bits (type);
19175 type_align_in_bits = simple_type_align_in_bits (type);
19176
19177 field_size_tree = DECL_SIZE (decl);
19178
19179 /* The size could be unspecified if there was an error, or for
19180 a flexible array member. */
19181 if (!field_size_tree)
19182 field_size_tree = bitsize_zero_node;
19183
19184 /* If the size of the field is not constant, use the type size. */
19185 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19186 field_size_in_bits = wi::to_offset (field_size_tree);
19187 else
19188 field_size_in_bits = type_size_in_bits;
19189
19190 decl_align_in_bits = simple_decl_align_in_bits (decl);
19191
19192 /* The GCC front-end doesn't make any attempt to keep track of the
19193 starting bit offset (relative to the start of the containing
19194 structure type) of the hypothetical "containing object" for a
19195 bit-field. Thus, when computing the byte offset value for the
19196 start of the "containing object" of a bit-field, we must deduce
19197 this information on our own. This can be rather tricky to do in
19198 some cases. For example, handling the following structure type
19199 definition when compiling for an i386/i486 target (which only
19200 aligns long long's to 32-bit boundaries) can be very tricky:
19201
19202 struct S { int field1; long long field2:31; };
19203
19204 Fortunately, there is a simple rule-of-thumb which can be used
19205 in such cases. When compiling for an i386/i486, GCC will
19206 allocate 8 bytes for the structure shown above. It decides to
19207 do this based upon one simple rule for bit-field allocation.
19208 GCC allocates each "containing object" for each bit-field at
19209 the first (i.e. lowest addressed) legitimate alignment boundary
19210 (based upon the required minimum alignment for the declared
19211 type of the field) which it can possibly use, subject to the
19212 condition that there is still enough available space remaining
19213 in the containing object (when allocated at the selected point)
19214 to fully accommodate all of the bits of the bit-field itself.
19215
19216 This simple rule makes it obvious why GCC allocates 8 bytes for
19217 each object of the structure type shown above. When looking
19218 for a place to allocate the "containing object" for `field2',
19219 the compiler simply tries to allocate a 64-bit "containing
19220 object" at each successive 32-bit boundary (starting at zero)
19221 until it finds a place to allocate that 64- bit field such that
19222 at least 31 contiguous (and previously unallocated) bits remain
19223 within that selected 64 bit field. (As it turns out, for the
19224 example above, the compiler finds it is OK to allocate the
19225 "containing object" 64-bit field at bit-offset zero within the
19226 structure type.)
19227
19228 Here we attempt to work backwards from the limited set of facts
19229 we're given, and we try to deduce from those facts, where GCC
19230 must have believed that the containing object started (within
19231 the structure type). The value we deduce is then used (by the
19232 callers of this routine) to generate DW_AT_location and
19233 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19234 the case of DW_AT_location, regular fields as well). */
19235
19236 /* Figure out the bit-distance from the start of the structure to
19237 the "deepest" bit of the bit-field. */
19238 deepest_bitpos = bitpos_int + field_size_in_bits;
19239
19240 /* This is the tricky part. Use some fancy footwork to deduce
19241 where the lowest addressed bit of the containing object must
19242 be. */
19243 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19244
19245 /* Round up to type_align by default. This works best for
19246 bitfields. */
19247 object_offset_in_bits
19248 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19249
19250 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19251 {
19252 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19253
19254 /* Round up to decl_align instead. */
19255 object_offset_in_bits
19256 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19257 }
19258
19259 object_offset_in_bytes
19260 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19261 if (ctx->variant_part_offset == NULL_TREE)
19262 {
19263 *cst_offset = object_offset_in_bytes.to_shwi ();
19264 return NULL;
19265 }
19266 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19267 }
19268 else
19269 tree_result = byte_position (decl);
19270
19271 if (ctx->variant_part_offset != NULL_TREE)
19272 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19273 ctx->variant_part_offset, tree_result);
19274
19275 /* If the byte offset is a constant, it's simplier to handle a native
19276 constant rather than a DWARF expression. */
19277 if (TREE_CODE (tree_result) == INTEGER_CST)
19278 {
19279 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19280 return NULL;
19281 }
19282 struct loc_descr_context loc_ctx = {
19283 ctx->struct_type, /* context_type */
19284 NULL_TREE, /* base_decl */
19285 NULL, /* dpi */
19286 false, /* placeholder_arg */
19287 false /* placeholder_seen */
19288 };
19289 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19290
19291 /* We want a DWARF expression: abort if we only have a location list with
19292 multiple elements. */
19293 if (!loc_result || !single_element_loc_list_p (loc_result))
19294 return NULL;
19295 else
19296 return loc_result->expr;
19297 }
19298 \f
19299 /* The following routines define various Dwarf attributes and any data
19300 associated with them. */
19301
19302 /* Add a location description attribute value to a DIE.
19303
19304 This emits location attributes suitable for whole variables and
19305 whole parameters. Note that the location attributes for struct fields are
19306 generated by the routine `data_member_location_attribute' below. */
19307
19308 static inline void
19309 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19310 dw_loc_list_ref descr)
19311 {
19312 bool check_no_locviews = true;
19313 if (descr == 0)
19314 return;
19315 if (single_element_loc_list_p (descr))
19316 add_AT_loc (die, attr_kind, descr->expr);
19317 else
19318 {
19319 add_AT_loc_list (die, attr_kind, descr);
19320 gcc_assert (descr->ll_symbol);
19321 if (attr_kind == DW_AT_location && descr->vl_symbol
19322 && dwarf2out_locviews_in_attribute ())
19323 {
19324 add_AT_view_list (die, DW_AT_GNU_locviews);
19325 check_no_locviews = false;
19326 }
19327 }
19328
19329 if (check_no_locviews)
19330 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19331 }
19332
19333 /* Add DW_AT_accessibility attribute to DIE if needed. */
19334
19335 static void
19336 add_accessibility_attribute (dw_die_ref die, tree decl)
19337 {
19338 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19339 children, otherwise the default is DW_ACCESS_public. In DWARF2
19340 the default has always been DW_ACCESS_public. */
19341 if (TREE_PROTECTED (decl))
19342 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19343 else if (TREE_PRIVATE (decl))
19344 {
19345 if (dwarf_version == 2
19346 || die->die_parent == NULL
19347 || die->die_parent->die_tag != DW_TAG_class_type)
19348 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19349 }
19350 else if (dwarf_version > 2
19351 && die->die_parent
19352 && die->die_parent->die_tag == DW_TAG_class_type)
19353 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19354 }
19355
19356 /* Attach the specialized form of location attribute used for data members of
19357 struct and union types. In the special case of a FIELD_DECL node which
19358 represents a bit-field, the "offset" part of this special location
19359 descriptor must indicate the distance in bytes from the lowest-addressed
19360 byte of the containing struct or union type to the lowest-addressed byte of
19361 the "containing object" for the bit-field. (See the `field_byte_offset'
19362 function above).
19363
19364 For any given bit-field, the "containing object" is a hypothetical object
19365 (of some integral or enum type) within which the given bit-field lives. The
19366 type of this hypothetical "containing object" is always the same as the
19367 declared type of the individual bit-field itself (for GCC anyway... the
19368 DWARF spec doesn't actually mandate this). Note that it is the size (in
19369 bytes) of the hypothetical "containing object" which will be given in the
19370 DW_AT_byte_size attribute for this bit-field. (See the
19371 `byte_size_attribute' function below.) It is also used when calculating the
19372 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19373 function below.)
19374
19375 CTX is required: see the comment for VLR_CONTEXT. */
19376
19377 static void
19378 add_data_member_location_attribute (dw_die_ref die,
19379 tree decl,
19380 struct vlr_context *ctx)
19381 {
19382 HOST_WIDE_INT offset;
19383 dw_loc_descr_ref loc_descr = 0;
19384
19385 if (TREE_CODE (decl) == TREE_BINFO)
19386 {
19387 /* We're working on the TAG_inheritance for a base class. */
19388 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19389 {
19390 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19391 aren't at a fixed offset from all (sub)objects of the same
19392 type. We need to extract the appropriate offset from our
19393 vtable. The following dwarf expression means
19394
19395 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19396
19397 This is specific to the V3 ABI, of course. */
19398
19399 dw_loc_descr_ref tmp;
19400
19401 /* Make a copy of the object address. */
19402 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19403 add_loc_descr (&loc_descr, tmp);
19404
19405 /* Extract the vtable address. */
19406 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19407 add_loc_descr (&loc_descr, tmp);
19408
19409 /* Calculate the address of the offset. */
19410 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19411 gcc_assert (offset < 0);
19412
19413 tmp = int_loc_descriptor (-offset);
19414 add_loc_descr (&loc_descr, tmp);
19415 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19416 add_loc_descr (&loc_descr, tmp);
19417
19418 /* Extract the offset. */
19419 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19420 add_loc_descr (&loc_descr, tmp);
19421
19422 /* Add it to the object address. */
19423 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19424 add_loc_descr (&loc_descr, tmp);
19425 }
19426 else
19427 offset = tree_to_shwi (BINFO_OFFSET (decl));
19428 }
19429 else
19430 {
19431 loc_descr = field_byte_offset (decl, ctx, &offset);
19432
19433 /* If loc_descr is available then we know the field offset is dynamic.
19434 However, GDB does not handle dynamic field offsets very well at the
19435 moment. */
19436 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19437 {
19438 loc_descr = NULL;
19439 offset = 0;
19440 }
19441
19442 /* Data member location evalutation starts with the base address on the
19443 stack. Compute the field offset and add it to this base address. */
19444 else if (loc_descr != NULL)
19445 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19446 }
19447
19448 if (! loc_descr)
19449 {
19450 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19451 e.g. GDB only added support to it in November 2016. For DWARF5
19452 we need newer debug info consumers anyway. We might change this
19453 to dwarf_version >= 4 once most consumers catched up. */
19454 if (dwarf_version >= 5
19455 && TREE_CODE (decl) == FIELD_DECL
19456 && DECL_BIT_FIELD_TYPE (decl))
19457 {
19458 tree off = bit_position (decl);
19459 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19460 {
19461 remove_AT (die, DW_AT_byte_size);
19462 remove_AT (die, DW_AT_bit_offset);
19463 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19464 return;
19465 }
19466 }
19467 if (dwarf_version > 2)
19468 {
19469 /* Don't need to output a location expression, just the constant. */
19470 if (offset < 0)
19471 add_AT_int (die, DW_AT_data_member_location, offset);
19472 else
19473 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19474 return;
19475 }
19476 else
19477 {
19478 enum dwarf_location_atom op;
19479
19480 /* The DWARF2 standard says that we should assume that the structure
19481 address is already on the stack, so we can specify a structure
19482 field address by using DW_OP_plus_uconst. */
19483 op = DW_OP_plus_uconst;
19484 loc_descr = new_loc_descr (op, offset, 0);
19485 }
19486 }
19487
19488 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19489 }
19490
19491 /* Writes integer values to dw_vec_const array. */
19492
19493 static void
19494 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19495 {
19496 while (size != 0)
19497 {
19498 *dest++ = val & 0xff;
19499 val >>= 8;
19500 --size;
19501 }
19502 }
19503
19504 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19505
19506 static HOST_WIDE_INT
19507 extract_int (const unsigned char *src, unsigned int size)
19508 {
19509 HOST_WIDE_INT val = 0;
19510
19511 src += size;
19512 while (size != 0)
19513 {
19514 val <<= 8;
19515 val |= *--src & 0xff;
19516 --size;
19517 }
19518 return val;
19519 }
19520
19521 /* Writes wide_int values to dw_vec_const array. */
19522
19523 static void
19524 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19525 {
19526 int i;
19527
19528 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19529 {
19530 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19531 return;
19532 }
19533
19534 /* We'd have to extend this code to support odd sizes. */
19535 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19536
19537 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19538
19539 if (WORDS_BIG_ENDIAN)
19540 for (i = n - 1; i >= 0; i--)
19541 {
19542 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19543 dest += sizeof (HOST_WIDE_INT);
19544 }
19545 else
19546 for (i = 0; i < n; i++)
19547 {
19548 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19549 dest += sizeof (HOST_WIDE_INT);
19550 }
19551 }
19552
19553 /* Writes floating point values to dw_vec_const array. */
19554
19555 static void
19556 insert_float (const_rtx rtl, unsigned char *array)
19557 {
19558 long val[4];
19559 int i;
19560 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19561
19562 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19563
19564 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19565 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19566 {
19567 insert_int (val[i], 4, array);
19568 array += 4;
19569 }
19570 }
19571
19572 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19573 does not have a "location" either in memory or in a register. These
19574 things can arise in GNU C when a constant is passed as an actual parameter
19575 to an inlined function. They can also arise in C++ where declared
19576 constants do not necessarily get memory "homes". */
19577
19578 static bool
19579 add_const_value_attribute (dw_die_ref die, rtx rtl)
19580 {
19581 switch (GET_CODE (rtl))
19582 {
19583 case CONST_INT:
19584 {
19585 HOST_WIDE_INT val = INTVAL (rtl);
19586
19587 if (val < 0)
19588 add_AT_int (die, DW_AT_const_value, val);
19589 else
19590 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19591 }
19592 return true;
19593
19594 case CONST_WIDE_INT:
19595 {
19596 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19597 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19598 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19599 wide_int w = wi::zext (w1, prec);
19600 add_AT_wide (die, DW_AT_const_value, w);
19601 }
19602 return true;
19603
19604 case CONST_DOUBLE:
19605 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19606 floating-point constant. A CONST_DOUBLE is used whenever the
19607 constant requires more than one word in order to be adequately
19608 represented. */
19609 if (TARGET_SUPPORTS_WIDE_INT == 0
19610 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19611 add_AT_double (die, DW_AT_const_value,
19612 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19613 else
19614 {
19615 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19616 unsigned int length = GET_MODE_SIZE (mode);
19617 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19618
19619 insert_float (rtl, array);
19620 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19621 }
19622 return true;
19623
19624 case CONST_VECTOR:
19625 {
19626 unsigned int length;
19627 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19628 return false;
19629
19630 machine_mode mode = GET_MODE (rtl);
19631 /* The combination of a length and byte elt_size doesn't extend
19632 naturally to boolean vectors, where several elements are packed
19633 into the same byte. */
19634 if (GET_MODE_CLASS (mode) == MODE_VECTOR_BOOL)
19635 return false;
19636
19637 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19638 unsigned char *array
19639 = ggc_vec_alloc<unsigned char> (length * elt_size);
19640 unsigned int i;
19641 unsigned char *p;
19642 machine_mode imode = GET_MODE_INNER (mode);
19643
19644 switch (GET_MODE_CLASS (mode))
19645 {
19646 case MODE_VECTOR_INT:
19647 for (i = 0, p = array; i < length; i++, p += elt_size)
19648 {
19649 rtx elt = CONST_VECTOR_ELT (rtl, i);
19650 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19651 }
19652 break;
19653
19654 case MODE_VECTOR_FLOAT:
19655 for (i = 0, p = array; i < length; i++, p += elt_size)
19656 {
19657 rtx elt = CONST_VECTOR_ELT (rtl, i);
19658 insert_float (elt, p);
19659 }
19660 break;
19661
19662 default:
19663 gcc_unreachable ();
19664 }
19665
19666 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19667 }
19668 return true;
19669
19670 case CONST_STRING:
19671 if (dwarf_version >= 4 || !dwarf_strict)
19672 {
19673 dw_loc_descr_ref loc_result;
19674 resolve_one_addr (&rtl);
19675 rtl_addr:
19676 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19677 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19678 add_AT_loc (die, DW_AT_location, loc_result);
19679 vec_safe_push (used_rtx_array, rtl);
19680 return true;
19681 }
19682 return false;
19683
19684 case CONST:
19685 if (CONSTANT_P (XEXP (rtl, 0)))
19686 return add_const_value_attribute (die, XEXP (rtl, 0));
19687 /* FALLTHROUGH */
19688 case SYMBOL_REF:
19689 if (!const_ok_for_output (rtl))
19690 return false;
19691 /* FALLTHROUGH */
19692 case LABEL_REF:
19693 if (dwarf_version >= 4 || !dwarf_strict)
19694 goto rtl_addr;
19695 return false;
19696
19697 case PLUS:
19698 /* In cases where an inlined instance of an inline function is passed
19699 the address of an `auto' variable (which is local to the caller) we
19700 can get a situation where the DECL_RTL of the artificial local
19701 variable (for the inlining) which acts as a stand-in for the
19702 corresponding formal parameter (of the inline function) will look
19703 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19704 exactly a compile-time constant expression, but it isn't the address
19705 of the (artificial) local variable either. Rather, it represents the
19706 *value* which the artificial local variable always has during its
19707 lifetime. We currently have no way to represent such quasi-constant
19708 values in Dwarf, so for now we just punt and generate nothing. */
19709 return false;
19710
19711 case HIGH:
19712 case CONST_FIXED:
19713 case MINUS:
19714 case SIGN_EXTEND:
19715 case ZERO_EXTEND:
19716 case CONST_POLY_INT:
19717 return false;
19718
19719 case MEM:
19720 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19721 && MEM_READONLY_P (rtl)
19722 && GET_MODE (rtl) == BLKmode)
19723 {
19724 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19725 return true;
19726 }
19727 return false;
19728
19729 default:
19730 /* No other kinds of rtx should be possible here. */
19731 gcc_unreachable ();
19732 }
19733 return false;
19734 }
19735
19736 /* Determine whether the evaluation of EXPR references any variables
19737 or functions which aren't otherwise used (and therefore may not be
19738 output). */
19739 static tree
19740 reference_to_unused (tree * tp, int * walk_subtrees,
19741 void * data ATTRIBUTE_UNUSED)
19742 {
19743 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19744 *walk_subtrees = 0;
19745
19746 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19747 && ! TREE_ASM_WRITTEN (*tp))
19748 return *tp;
19749 /* ??? The C++ FE emits debug information for using decls, so
19750 putting gcc_unreachable here falls over. See PR31899. For now
19751 be conservative. */
19752 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19753 return *tp;
19754 else if (VAR_P (*tp))
19755 {
19756 varpool_node *node = varpool_node::get (*tp);
19757 if (!node || !node->definition)
19758 return *tp;
19759 }
19760 else if (TREE_CODE (*tp) == FUNCTION_DECL
19761 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19762 {
19763 /* The call graph machinery must have finished analyzing,
19764 optimizing and gimplifying the CU by now.
19765 So if *TP has no call graph node associated
19766 to it, it means *TP will not be emitted. */
19767 if (!cgraph_node::get (*tp))
19768 return *tp;
19769 }
19770 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19771 return *tp;
19772
19773 return NULL_TREE;
19774 }
19775
19776 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19777 for use in a later add_const_value_attribute call. */
19778
19779 static rtx
19780 rtl_for_decl_init (tree init, tree type)
19781 {
19782 rtx rtl = NULL_RTX;
19783
19784 STRIP_NOPS (init);
19785
19786 /* If a variable is initialized with a string constant without embedded
19787 zeros, build CONST_STRING. */
19788 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19789 {
19790 tree enttype = TREE_TYPE (type);
19791 tree domain = TYPE_DOMAIN (type);
19792 scalar_int_mode mode;
19793
19794 if (is_int_mode (TYPE_MODE (enttype), &mode)
19795 && GET_MODE_SIZE (mode) == 1
19796 && domain
19797 && TYPE_MAX_VALUE (domain)
19798 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19799 && integer_zerop (TYPE_MIN_VALUE (domain))
19800 && compare_tree_int (TYPE_MAX_VALUE (domain),
19801 TREE_STRING_LENGTH (init) - 1) == 0
19802 && ((size_t) TREE_STRING_LENGTH (init)
19803 == strlen (TREE_STRING_POINTER (init)) + 1))
19804 {
19805 rtl = gen_rtx_CONST_STRING (VOIDmode,
19806 ggc_strdup (TREE_STRING_POINTER (init)));
19807 rtl = gen_rtx_MEM (BLKmode, rtl);
19808 MEM_READONLY_P (rtl) = 1;
19809 }
19810 }
19811 /* Other aggregates, and complex values, could be represented using
19812 CONCAT: FIXME! */
19813 else if (AGGREGATE_TYPE_P (type)
19814 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19815 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19816 || TREE_CODE (type) == COMPLEX_TYPE)
19817 ;
19818 /* Vectors only work if their mode is supported by the target.
19819 FIXME: generic vectors ought to work too. */
19820 else if (TREE_CODE (type) == VECTOR_TYPE
19821 && !VECTOR_MODE_P (TYPE_MODE (type)))
19822 ;
19823 /* If the initializer is something that we know will expand into an
19824 immediate RTL constant, expand it now. We must be careful not to
19825 reference variables which won't be output. */
19826 else if (initializer_constant_valid_p (init, type)
19827 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19828 {
19829 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19830 possible. */
19831 if (TREE_CODE (type) == VECTOR_TYPE)
19832 switch (TREE_CODE (init))
19833 {
19834 case VECTOR_CST:
19835 break;
19836 case CONSTRUCTOR:
19837 if (TREE_CONSTANT (init))
19838 {
19839 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19840 bool constant_p = true;
19841 tree value;
19842 unsigned HOST_WIDE_INT ix;
19843
19844 /* Even when ctor is constant, it might contain non-*_CST
19845 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19846 belong into VECTOR_CST nodes. */
19847 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19848 if (!CONSTANT_CLASS_P (value))
19849 {
19850 constant_p = false;
19851 break;
19852 }
19853
19854 if (constant_p)
19855 {
19856 init = build_vector_from_ctor (type, elts);
19857 break;
19858 }
19859 }
19860 /* FALLTHRU */
19861
19862 default:
19863 return NULL;
19864 }
19865
19866 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19867
19868 /* If expand_expr returns a MEM, it wasn't immediate. */
19869 gcc_assert (!rtl || !MEM_P (rtl));
19870 }
19871
19872 return rtl;
19873 }
19874
19875 /* Generate RTL for the variable DECL to represent its location. */
19876
19877 static rtx
19878 rtl_for_decl_location (tree decl)
19879 {
19880 rtx rtl;
19881
19882 /* Here we have to decide where we are going to say the parameter "lives"
19883 (as far as the debugger is concerned). We only have a couple of
19884 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19885
19886 DECL_RTL normally indicates where the parameter lives during most of the
19887 activation of the function. If optimization is enabled however, this
19888 could be either NULL or else a pseudo-reg. Both of those cases indicate
19889 that the parameter doesn't really live anywhere (as far as the code
19890 generation parts of GCC are concerned) during most of the function's
19891 activation. That will happen (for example) if the parameter is never
19892 referenced within the function.
19893
19894 We could just generate a location descriptor here for all non-NULL
19895 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19896 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19897 where DECL_RTL is NULL or is a pseudo-reg.
19898
19899 Note however that we can only get away with using DECL_INCOMING_RTL as
19900 a backup substitute for DECL_RTL in certain limited cases. In cases
19901 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19902 we can be sure that the parameter was passed using the same type as it is
19903 declared to have within the function, and that its DECL_INCOMING_RTL
19904 points us to a place where a value of that type is passed.
19905
19906 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19907 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19908 because in these cases DECL_INCOMING_RTL points us to a value of some
19909 type which is *different* from the type of the parameter itself. Thus,
19910 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19911 such cases, the debugger would end up (for example) trying to fetch a
19912 `float' from a place which actually contains the first part of a
19913 `double'. That would lead to really incorrect and confusing
19914 output at debug-time.
19915
19916 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19917 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19918 are a couple of exceptions however. On little-endian machines we can
19919 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19920 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19921 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19922 when (on a little-endian machine) a non-prototyped function has a
19923 parameter declared to be of type `short' or `char'. In such cases,
19924 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19925 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19926 passed `int' value. If the debugger then uses that address to fetch
19927 a `short' or a `char' (on a little-endian machine) the result will be
19928 the correct data, so we allow for such exceptional cases below.
19929
19930 Note that our goal here is to describe the place where the given formal
19931 parameter lives during most of the function's activation (i.e. between the
19932 end of the prologue and the start of the epilogue). We'll do that as best
19933 as we can. Note however that if the given formal parameter is modified
19934 sometime during the execution of the function, then a stack backtrace (at
19935 debug-time) will show the function as having been called with the *new*
19936 value rather than the value which was originally passed in. This happens
19937 rarely enough that it is not a major problem, but it *is* a problem, and
19938 I'd like to fix it.
19939
19940 A future version of dwarf2out.c may generate two additional attributes for
19941 any given DW_TAG_formal_parameter DIE which will describe the "passed
19942 type" and the "passed location" for the given formal parameter in addition
19943 to the attributes we now generate to indicate the "declared type" and the
19944 "active location" for each parameter. This additional set of attributes
19945 could be used by debuggers for stack backtraces. Separately, note that
19946 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19947 This happens (for example) for inlined-instances of inline function formal
19948 parameters which are never referenced. This really shouldn't be
19949 happening. All PARM_DECL nodes should get valid non-NULL
19950 DECL_INCOMING_RTL values. FIXME. */
19951
19952 /* Use DECL_RTL as the "location" unless we find something better. */
19953 rtl = DECL_RTL_IF_SET (decl);
19954
19955 /* When generating abstract instances, ignore everything except
19956 constants, symbols living in memory, and symbols living in
19957 fixed registers. */
19958 if (! reload_completed)
19959 {
19960 if (rtl
19961 && (CONSTANT_P (rtl)
19962 || (MEM_P (rtl)
19963 && CONSTANT_P (XEXP (rtl, 0)))
19964 || (REG_P (rtl)
19965 && VAR_P (decl)
19966 && TREE_STATIC (decl))))
19967 {
19968 rtl = targetm.delegitimize_address (rtl);
19969 return rtl;
19970 }
19971 rtl = NULL_RTX;
19972 }
19973 else if (TREE_CODE (decl) == PARM_DECL)
19974 {
19975 if (rtl == NULL_RTX
19976 || is_pseudo_reg (rtl)
19977 || (MEM_P (rtl)
19978 && is_pseudo_reg (XEXP (rtl, 0))
19979 && DECL_INCOMING_RTL (decl)
19980 && MEM_P (DECL_INCOMING_RTL (decl))
19981 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19982 {
19983 tree declared_type = TREE_TYPE (decl);
19984 tree passed_type = DECL_ARG_TYPE (decl);
19985 machine_mode dmode = TYPE_MODE (declared_type);
19986 machine_mode pmode = TYPE_MODE (passed_type);
19987
19988 /* This decl represents a formal parameter which was optimized out.
19989 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19990 all cases where (rtl == NULL_RTX) just below. */
19991 if (dmode == pmode)
19992 rtl = DECL_INCOMING_RTL (decl);
19993 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19994 && SCALAR_INT_MODE_P (dmode)
19995 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19996 && DECL_INCOMING_RTL (decl))
19997 {
19998 rtx inc = DECL_INCOMING_RTL (decl);
19999 if (REG_P (inc))
20000 rtl = inc;
20001 else if (MEM_P (inc))
20002 {
20003 if (BYTES_BIG_ENDIAN)
20004 rtl = adjust_address_nv (inc, dmode,
20005 GET_MODE_SIZE (pmode)
20006 - GET_MODE_SIZE (dmode));
20007 else
20008 rtl = inc;
20009 }
20010 }
20011 }
20012
20013 /* If the parm was passed in registers, but lives on the stack, then
20014 make a big endian correction if the mode of the type of the
20015 parameter is not the same as the mode of the rtl. */
20016 /* ??? This is the same series of checks that are made in dbxout.c before
20017 we reach the big endian correction code there. It isn't clear if all
20018 of these checks are necessary here, but keeping them all is the safe
20019 thing to do. */
20020 else if (MEM_P (rtl)
20021 && XEXP (rtl, 0) != const0_rtx
20022 && ! CONSTANT_P (XEXP (rtl, 0))
20023 /* Not passed in memory. */
20024 && !MEM_P (DECL_INCOMING_RTL (decl))
20025 /* Not passed by invisible reference. */
20026 && (!REG_P (XEXP (rtl, 0))
20027 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
20028 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
20029 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
20030 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
20031 #endif
20032 )
20033 /* Big endian correction check. */
20034 && BYTES_BIG_ENDIAN
20035 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
20036 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
20037 UNITS_PER_WORD))
20038 {
20039 machine_mode addr_mode = get_address_mode (rtl);
20040 poly_int64 offset = (UNITS_PER_WORD
20041 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
20042
20043 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20044 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20045 }
20046 }
20047 else if (VAR_P (decl)
20048 && rtl
20049 && MEM_P (rtl)
20050 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
20051 {
20052 machine_mode addr_mode = get_address_mode (rtl);
20053 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
20054 GET_MODE (rtl));
20055
20056 /* If a variable is declared "register" yet is smaller than
20057 a register, then if we store the variable to memory, it
20058 looks like we're storing a register-sized value, when in
20059 fact we are not. We need to adjust the offset of the
20060 storage location to reflect the actual value's bytes,
20061 else gdb will not be able to display it. */
20062 if (maybe_ne (offset, 0))
20063 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20064 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20065 }
20066
20067 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
20068 and will have been substituted directly into all expressions that use it.
20069 C does not have such a concept, but C++ and other languages do. */
20070 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
20071 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
20072
20073 if (rtl)
20074 rtl = targetm.delegitimize_address (rtl);
20075
20076 /* If we don't look past the constant pool, we risk emitting a
20077 reference to a constant pool entry that isn't referenced from
20078 code, and thus is not emitted. */
20079 if (rtl)
20080 rtl = avoid_constant_pool_reference (rtl);
20081
20082 /* Try harder to get a rtl. If this symbol ends up not being emitted
20083 in the current CU, resolve_addr will remove the expression referencing
20084 it. */
20085 if (rtl == NULL_RTX
20086 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
20087 && VAR_P (decl)
20088 && !DECL_EXTERNAL (decl)
20089 && TREE_STATIC (decl)
20090 && DECL_NAME (decl)
20091 && !DECL_HARD_REGISTER (decl)
20092 && DECL_MODE (decl) != VOIDmode)
20093 {
20094 rtl = make_decl_rtl_for_debug (decl);
20095 if (!MEM_P (rtl)
20096 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
20097 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
20098 rtl = NULL_RTX;
20099 }
20100
20101 return rtl;
20102 }
20103
20104 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
20105 returned. If so, the decl for the COMMON block is returned, and the
20106 value is the offset into the common block for the symbol. */
20107
20108 static tree
20109 fortran_common (tree decl, HOST_WIDE_INT *value)
20110 {
20111 tree val_expr, cvar;
20112 machine_mode mode;
20113 poly_int64 bitsize, bitpos;
20114 tree offset;
20115 HOST_WIDE_INT cbitpos;
20116 int unsignedp, reversep, volatilep = 0;
20117
20118 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
20119 it does not have a value (the offset into the common area), or if it
20120 is thread local (as opposed to global) then it isn't common, and shouldn't
20121 be handled as such. */
20122 if (!VAR_P (decl)
20123 || !TREE_STATIC (decl)
20124 || !DECL_HAS_VALUE_EXPR_P (decl)
20125 || !is_fortran ())
20126 return NULL_TREE;
20127
20128 val_expr = DECL_VALUE_EXPR (decl);
20129 if (TREE_CODE (val_expr) != COMPONENT_REF)
20130 return NULL_TREE;
20131
20132 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
20133 &unsignedp, &reversep, &volatilep);
20134
20135 if (cvar == NULL_TREE
20136 || !VAR_P (cvar)
20137 || DECL_ARTIFICIAL (cvar)
20138 || !TREE_PUBLIC (cvar)
20139 /* We don't expect to have to cope with variable offsets,
20140 since at present all static data must have a constant size. */
20141 || !bitpos.is_constant (&cbitpos))
20142 return NULL_TREE;
20143
20144 *value = 0;
20145 if (offset != NULL)
20146 {
20147 if (!tree_fits_shwi_p (offset))
20148 return NULL_TREE;
20149 *value = tree_to_shwi (offset);
20150 }
20151 if (cbitpos != 0)
20152 *value += cbitpos / BITS_PER_UNIT;
20153
20154 return cvar;
20155 }
20156
20157 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20158 data attribute for a variable or a parameter. We generate the
20159 DW_AT_const_value attribute only in those cases where the given variable
20160 or parameter does not have a true "location" either in memory or in a
20161 register. This can happen (for example) when a constant is passed as an
20162 actual argument in a call to an inline function. (It's possible that
20163 these things can crop up in other ways also.) Note that one type of
20164 constant value which can be passed into an inlined function is a constant
20165 pointer. This can happen for example if an actual argument in an inlined
20166 function call evaluates to a compile-time constant address.
20167
20168 CACHE_P is true if it is worth caching the location list for DECL,
20169 so that future calls can reuse it rather than regenerate it from scratch.
20170 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20171 since we will need to refer to them each time the function is inlined. */
20172
20173 static bool
20174 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20175 {
20176 rtx rtl;
20177 dw_loc_list_ref list;
20178 var_loc_list *loc_list;
20179 cached_dw_loc_list *cache;
20180
20181 if (early_dwarf)
20182 return false;
20183
20184 if (TREE_CODE (decl) == ERROR_MARK)
20185 return false;
20186
20187 if (get_AT (die, DW_AT_location)
20188 || get_AT (die, DW_AT_const_value))
20189 return true;
20190
20191 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20192 || TREE_CODE (decl) == RESULT_DECL);
20193
20194 /* Try to get some constant RTL for this decl, and use that as the value of
20195 the location. */
20196
20197 rtl = rtl_for_decl_location (decl);
20198 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20199 && add_const_value_attribute (die, rtl))
20200 return true;
20201
20202 /* See if we have single element location list that is equivalent to
20203 a constant value. That way we are better to use add_const_value_attribute
20204 rather than expanding constant value equivalent. */
20205 loc_list = lookup_decl_loc (decl);
20206 if (loc_list
20207 && loc_list->first
20208 && loc_list->first->next == NULL
20209 && NOTE_P (loc_list->first->loc)
20210 && NOTE_VAR_LOCATION (loc_list->first->loc)
20211 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20212 {
20213 struct var_loc_node *node;
20214
20215 node = loc_list->first;
20216 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20217 if (GET_CODE (rtl) == EXPR_LIST)
20218 rtl = XEXP (rtl, 0);
20219 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20220 && add_const_value_attribute (die, rtl))
20221 return true;
20222 }
20223 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20224 list several times. See if we've already cached the contents. */
20225 list = NULL;
20226 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20227 cache_p = false;
20228 if (cache_p)
20229 {
20230 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20231 if (cache)
20232 list = cache->loc_list;
20233 }
20234 if (list == NULL)
20235 {
20236 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20237 NULL);
20238 /* It is usually worth caching this result if the decl is from
20239 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20240 if (cache_p && list && list->dw_loc_next)
20241 {
20242 cached_dw_loc_list **slot
20243 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20244 DECL_UID (decl),
20245 INSERT);
20246 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20247 cache->decl_id = DECL_UID (decl);
20248 cache->loc_list = list;
20249 *slot = cache;
20250 }
20251 }
20252 if (list)
20253 {
20254 add_AT_location_description (die, DW_AT_location, list);
20255 return true;
20256 }
20257 /* None of that worked, so it must not really have a location;
20258 try adding a constant value attribute from the DECL_INITIAL. */
20259 return tree_add_const_value_attribute_for_decl (die, decl);
20260 }
20261
20262 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20263 attribute is the const value T. */
20264
20265 static bool
20266 tree_add_const_value_attribute (dw_die_ref die, tree t)
20267 {
20268 tree init;
20269 tree type = TREE_TYPE (t);
20270 rtx rtl;
20271
20272 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20273 return false;
20274
20275 init = t;
20276 gcc_assert (!DECL_P (init));
20277
20278 if (TREE_CODE (init) == INTEGER_CST)
20279 {
20280 if (tree_fits_uhwi_p (init))
20281 {
20282 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20283 return true;
20284 }
20285 if (tree_fits_shwi_p (init))
20286 {
20287 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20288 return true;
20289 }
20290 }
20291 if (! early_dwarf)
20292 {
20293 rtl = rtl_for_decl_init (init, type);
20294 if (rtl)
20295 return add_const_value_attribute (die, rtl);
20296 }
20297 /* If the host and target are sane, try harder. */
20298 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20299 && initializer_constant_valid_p (init, type))
20300 {
20301 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20302 if (size > 0 && (int) size == size)
20303 {
20304 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20305
20306 if (native_encode_initializer (init, array, size) == size)
20307 {
20308 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20309 return true;
20310 }
20311 ggc_free (array);
20312 }
20313 }
20314 return false;
20315 }
20316
20317 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20318 attribute is the const value of T, where T is an integral constant
20319 variable with static storage duration
20320 (so it can't be a PARM_DECL or a RESULT_DECL). */
20321
20322 static bool
20323 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20324 {
20325
20326 if (!decl
20327 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20328 || (VAR_P (decl) && !TREE_STATIC (decl)))
20329 return false;
20330
20331 if (TREE_READONLY (decl)
20332 && ! TREE_THIS_VOLATILE (decl)
20333 && DECL_INITIAL (decl))
20334 /* OK */;
20335 else
20336 return false;
20337
20338 /* Don't add DW_AT_const_value if abstract origin already has one. */
20339 if (get_AT (var_die, DW_AT_const_value))
20340 return false;
20341
20342 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20343 }
20344
20345 /* Convert the CFI instructions for the current function into a
20346 location list. This is used for DW_AT_frame_base when we targeting
20347 a dwarf2 consumer that does not support the dwarf3
20348 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20349 expressions. */
20350
20351 static dw_loc_list_ref
20352 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20353 {
20354 int ix;
20355 dw_fde_ref fde;
20356 dw_loc_list_ref list, *list_tail;
20357 dw_cfi_ref cfi;
20358 dw_cfa_location last_cfa, next_cfa;
20359 const char *start_label, *last_label, *section;
20360 dw_cfa_location remember;
20361
20362 fde = cfun->fde;
20363 gcc_assert (fde != NULL);
20364
20365 section = secname_for_decl (current_function_decl);
20366 list_tail = &list;
20367 list = NULL;
20368
20369 memset (&next_cfa, 0, sizeof (next_cfa));
20370 next_cfa.reg = INVALID_REGNUM;
20371 remember = next_cfa;
20372
20373 start_label = fde->dw_fde_begin;
20374
20375 /* ??? Bald assumption that the CIE opcode list does not contain
20376 advance opcodes. */
20377 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20378 lookup_cfa_1 (cfi, &next_cfa, &remember);
20379
20380 last_cfa = next_cfa;
20381 last_label = start_label;
20382
20383 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20384 {
20385 /* If the first partition contained no CFI adjustments, the
20386 CIE opcodes apply to the whole first partition. */
20387 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20388 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20389 list_tail =&(*list_tail)->dw_loc_next;
20390 start_label = last_label = fde->dw_fde_second_begin;
20391 }
20392
20393 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20394 {
20395 switch (cfi->dw_cfi_opc)
20396 {
20397 case DW_CFA_set_loc:
20398 case DW_CFA_advance_loc1:
20399 case DW_CFA_advance_loc2:
20400 case DW_CFA_advance_loc4:
20401 if (!cfa_equal_p (&last_cfa, &next_cfa))
20402 {
20403 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20404 start_label, 0, last_label, 0, section);
20405
20406 list_tail = &(*list_tail)->dw_loc_next;
20407 last_cfa = next_cfa;
20408 start_label = last_label;
20409 }
20410 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20411 break;
20412
20413 case DW_CFA_advance_loc:
20414 /* The encoding is complex enough that we should never emit this. */
20415 gcc_unreachable ();
20416
20417 default:
20418 lookup_cfa_1 (cfi, &next_cfa, &remember);
20419 break;
20420 }
20421 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20422 {
20423 if (!cfa_equal_p (&last_cfa, &next_cfa))
20424 {
20425 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20426 start_label, 0, last_label, 0, section);
20427
20428 list_tail = &(*list_tail)->dw_loc_next;
20429 last_cfa = next_cfa;
20430 start_label = last_label;
20431 }
20432 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20433 start_label, 0, fde->dw_fde_end, 0, section);
20434 list_tail = &(*list_tail)->dw_loc_next;
20435 start_label = last_label = fde->dw_fde_second_begin;
20436 }
20437 }
20438
20439 if (!cfa_equal_p (&last_cfa, &next_cfa))
20440 {
20441 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20442 start_label, 0, last_label, 0, section);
20443 list_tail = &(*list_tail)->dw_loc_next;
20444 start_label = last_label;
20445 }
20446
20447 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20448 start_label, 0,
20449 fde->dw_fde_second_begin
20450 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20451 section);
20452
20453 maybe_gen_llsym (list);
20454
20455 return list;
20456 }
20457
20458 /* Compute a displacement from the "steady-state frame pointer" to the
20459 frame base (often the same as the CFA), and store it in
20460 frame_pointer_fb_offset. OFFSET is added to the displacement
20461 before the latter is negated. */
20462
20463 static void
20464 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20465 {
20466 rtx reg, elim;
20467
20468 #ifdef FRAME_POINTER_CFA_OFFSET
20469 reg = frame_pointer_rtx;
20470 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20471 #else
20472 reg = arg_pointer_rtx;
20473 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20474 #endif
20475
20476 elim = (ira_use_lra_p
20477 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20478 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20479 elim = strip_offset_and_add (elim, &offset);
20480
20481 frame_pointer_fb_offset = -offset;
20482
20483 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20484 in which to eliminate. This is because it's stack pointer isn't
20485 directly accessible as a register within the ISA. To work around
20486 this, assume that while we cannot provide a proper value for
20487 frame_pointer_fb_offset, we won't need one either. We can use
20488 hard frame pointer in debug info even if frame pointer isn't used
20489 since hard frame pointer in debug info is encoded with DW_OP_fbreg
20490 which uses the DW_AT_frame_base attribute, not hard frame pointer
20491 directly. */
20492 frame_pointer_fb_offset_valid
20493 = (elim == hard_frame_pointer_rtx || elim == stack_pointer_rtx);
20494 }
20495
20496 /* Generate a DW_AT_name attribute given some string value to be included as
20497 the value of the attribute. */
20498
20499 static void
20500 add_name_attribute (dw_die_ref die, const char *name_string)
20501 {
20502 if (name_string != NULL && *name_string != 0)
20503 {
20504 if (demangle_name_func)
20505 name_string = (*demangle_name_func) (name_string);
20506
20507 add_AT_string (die, DW_AT_name, name_string);
20508 }
20509 }
20510
20511 /* Generate a DW_AT_description attribute given some string value to be included
20512 as the value of the attribute. */
20513
20514 static void
20515 add_desc_attribute (dw_die_ref die, const char *name_string)
20516 {
20517 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20518 return;
20519
20520 if (name_string == NULL || *name_string == 0)
20521 return;
20522
20523 if (demangle_name_func)
20524 name_string = (*demangle_name_func) (name_string);
20525
20526 add_AT_string (die, DW_AT_description, name_string);
20527 }
20528
20529 /* Generate a DW_AT_description attribute given some decl to be included
20530 as the value of the attribute. */
20531
20532 static void
20533 add_desc_attribute (dw_die_ref die, tree decl)
20534 {
20535 tree decl_name;
20536
20537 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20538 return;
20539
20540 if (decl == NULL_TREE || !DECL_P (decl))
20541 return;
20542 decl_name = DECL_NAME (decl);
20543
20544 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20545 {
20546 const char *name = dwarf2_name (decl, 0);
20547 add_desc_attribute (die, name ? name : IDENTIFIER_POINTER (decl_name));
20548 }
20549 else
20550 {
20551 char *desc = print_generic_expr_to_str (decl);
20552 add_desc_attribute (die, desc);
20553 free (desc);
20554 }
20555 }
20556
20557 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20558 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20559 of TYPE accordingly.
20560
20561 ??? This is a temporary measure until after we're able to generate
20562 regular DWARF for the complex Ada type system. */
20563
20564 static void
20565 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20566 dw_die_ref context_die)
20567 {
20568 tree dtype;
20569 dw_die_ref dtype_die;
20570
20571 if (!lang_hooks.types.descriptive_type)
20572 return;
20573
20574 dtype = lang_hooks.types.descriptive_type (type);
20575 if (!dtype)
20576 return;
20577
20578 dtype_die = lookup_type_die (dtype);
20579 if (!dtype_die)
20580 {
20581 gen_type_die (dtype, context_die);
20582 dtype_die = lookup_type_die (dtype);
20583 gcc_assert (dtype_die);
20584 }
20585
20586 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20587 }
20588
20589 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20590
20591 static const char *
20592 comp_dir_string (void)
20593 {
20594 const char *wd;
20595 char *wd_plus_sep = NULL;
20596 static const char *cached_wd = NULL;
20597
20598 if (cached_wd != NULL)
20599 return cached_wd;
20600
20601 wd = get_src_pwd ();
20602 if (wd == NULL)
20603 return NULL;
20604
20605 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20606 {
20607 size_t wdlen = strlen (wd);
20608 wd_plus_sep = XNEWVEC (char, wdlen + 2);
20609 strcpy (wd_plus_sep, wd);
20610 wd_plus_sep [wdlen] = DIR_SEPARATOR;
20611 wd_plus_sep [wdlen + 1] = 0;
20612 wd = wd_plus_sep;
20613 }
20614
20615 cached_wd = remap_debug_filename (wd);
20616
20617 /* remap_debug_filename can just pass through wd or return a new gc string.
20618 These two types can't be both stored in a GTY(())-tagged string, but since
20619 the cached value lives forever just copy it if needed. */
20620 if (cached_wd != wd)
20621 {
20622 cached_wd = xstrdup (cached_wd);
20623 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR && wd_plus_sep != NULL)
20624 free (wd_plus_sep);
20625 }
20626
20627 return cached_wd;
20628 }
20629
20630 /* Generate a DW_AT_comp_dir attribute for DIE. */
20631
20632 static void
20633 add_comp_dir_attribute (dw_die_ref die)
20634 {
20635 const char * wd = comp_dir_string ();
20636 if (wd != NULL)
20637 add_AT_string (die, DW_AT_comp_dir, wd);
20638 }
20639
20640 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20641 pointer computation, ...), output a representation for that bound according
20642 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20643 loc_list_from_tree for the meaning of CONTEXT. */
20644
20645 static void
20646 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20647 int forms, struct loc_descr_context *context)
20648 {
20649 dw_die_ref context_die, decl_die = NULL;
20650 dw_loc_list_ref list;
20651 bool strip_conversions = true;
20652 bool placeholder_seen = false;
20653
20654 while (strip_conversions)
20655 switch (TREE_CODE (value))
20656 {
20657 case ERROR_MARK:
20658 case SAVE_EXPR:
20659 return;
20660
20661 CASE_CONVERT:
20662 case VIEW_CONVERT_EXPR:
20663 value = TREE_OPERAND (value, 0);
20664 break;
20665
20666 default:
20667 strip_conversions = false;
20668 break;
20669 }
20670
20671 /* If possible and permitted, output the attribute as a constant. */
20672 if ((forms & dw_scalar_form_constant) != 0
20673 && TREE_CODE (value) == INTEGER_CST)
20674 {
20675 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20676
20677 /* If HOST_WIDE_INT is big enough then represent the bound as
20678 a constant value. We need to choose a form based on
20679 whether the type is signed or unsigned. We cannot just
20680 call add_AT_unsigned if the value itself is positive
20681 (add_AT_unsigned might add the unsigned value encoded as
20682 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20683 bounds type and then sign extend any unsigned values found
20684 for signed types. This is needed only for
20685 DW_AT_{lower,upper}_bound, since for most other attributes,
20686 consumers will treat DW_FORM_data[1248] as unsigned values,
20687 regardless of the underlying type. */
20688 if (prec <= HOST_BITS_PER_WIDE_INT
20689 || tree_fits_uhwi_p (value))
20690 {
20691 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20692 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20693 else
20694 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20695 }
20696 else
20697 /* Otherwise represent the bound as an unsigned value with
20698 the precision of its type. The precision and signedness
20699 of the type will be necessary to re-interpret it
20700 unambiguously. */
20701 add_AT_wide (die, attr, wi::to_wide (value));
20702 return;
20703 }
20704
20705 /* Otherwise, if it's possible and permitted too, output a reference to
20706 another DIE. */
20707 if ((forms & dw_scalar_form_reference) != 0)
20708 {
20709 tree decl = NULL_TREE;
20710
20711 /* Some type attributes reference an outer type. For instance, the upper
20712 bound of an array may reference an embedding record (this happens in
20713 Ada). */
20714 if (TREE_CODE (value) == COMPONENT_REF
20715 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20716 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20717 decl = TREE_OPERAND (value, 1);
20718
20719 else if (VAR_P (value)
20720 || TREE_CODE (value) == PARM_DECL
20721 || TREE_CODE (value) == RESULT_DECL)
20722 decl = value;
20723
20724 if (decl != NULL_TREE)
20725 {
20726 decl_die = lookup_decl_die (decl);
20727
20728 /* ??? Can this happen, or should the variable have been bound
20729 first? Probably it can, since I imagine that we try to create
20730 the types of parameters in the order in which they exist in
20731 the list, and won't have created a forward reference to a
20732 later parameter. */
20733 if (decl_die != NULL)
20734 {
20735 if (get_AT (decl_die, DW_AT_location)
20736 || get_AT (decl_die, DW_AT_data_member_location)
20737 || get_AT (decl_die, DW_AT_const_value))
20738 {
20739 add_AT_die_ref (die, attr, decl_die);
20740 return;
20741 }
20742 }
20743 }
20744 }
20745
20746 /* Last chance: try to create a stack operation procedure to evaluate the
20747 value. Do nothing if even that is not possible or permitted. */
20748 if ((forms & dw_scalar_form_exprloc) == 0)
20749 return;
20750
20751 list = loc_list_from_tree (value, 2, context);
20752 if (context && context->placeholder_arg)
20753 {
20754 placeholder_seen = context->placeholder_seen;
20755 context->placeholder_seen = false;
20756 }
20757 if (list == NULL || single_element_loc_list_p (list))
20758 {
20759 /* If this attribute is not a reference nor constant, it is
20760 a DWARF expression rather than location description. For that
20761 loc_list_from_tree (value, 0, &context) is needed. */
20762 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20763 if (list2 && single_element_loc_list_p (list2))
20764 {
20765 if (placeholder_seen)
20766 {
20767 struct dwarf_procedure_info dpi;
20768 dpi.fndecl = NULL_TREE;
20769 dpi.args_count = 1;
20770 if (!resolve_args_picking (list2->expr, 1, &dpi))
20771 return;
20772 }
20773 add_AT_loc (die, attr, list2->expr);
20774 return;
20775 }
20776 }
20777
20778 /* If that failed to give a single element location list, fall back to
20779 outputting this as a reference... still if permitted. */
20780 if (list == NULL
20781 || (forms & dw_scalar_form_reference) == 0
20782 || placeholder_seen)
20783 return;
20784
20785 if (!decl_die)
20786 {
20787 if (current_function_decl == 0)
20788 context_die = comp_unit_die ();
20789 else
20790 context_die = lookup_decl_die (current_function_decl);
20791
20792 decl_die = new_die (DW_TAG_variable, context_die, value);
20793 add_AT_flag (decl_die, DW_AT_artificial, 1);
20794 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20795 context_die);
20796 }
20797
20798 add_AT_location_description (decl_die, DW_AT_location, list);
20799 add_AT_die_ref (die, attr, decl_die);
20800 }
20801
20802 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20803 default. */
20804
20805 static int
20806 lower_bound_default (void)
20807 {
20808 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20809 {
20810 case DW_LANG_C:
20811 case DW_LANG_C89:
20812 case DW_LANG_C99:
20813 case DW_LANG_C11:
20814 case DW_LANG_C_plus_plus:
20815 case DW_LANG_C_plus_plus_11:
20816 case DW_LANG_C_plus_plus_14:
20817 case DW_LANG_ObjC:
20818 case DW_LANG_ObjC_plus_plus:
20819 return 0;
20820 case DW_LANG_Fortran77:
20821 case DW_LANG_Fortran90:
20822 case DW_LANG_Fortran95:
20823 case DW_LANG_Fortran03:
20824 case DW_LANG_Fortran08:
20825 return 1;
20826 case DW_LANG_UPC:
20827 case DW_LANG_D:
20828 case DW_LANG_Python:
20829 return dwarf_version >= 4 ? 0 : -1;
20830 case DW_LANG_Ada95:
20831 case DW_LANG_Ada83:
20832 case DW_LANG_Cobol74:
20833 case DW_LANG_Cobol85:
20834 case DW_LANG_Modula2:
20835 case DW_LANG_PLI:
20836 return dwarf_version >= 4 ? 1 : -1;
20837 default:
20838 return -1;
20839 }
20840 }
20841
20842 /* Given a tree node describing an array bound (either lower or upper) output
20843 a representation for that bound. */
20844
20845 static void
20846 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20847 tree bound, struct loc_descr_context *context)
20848 {
20849 int dflt;
20850
20851 while (1)
20852 switch (TREE_CODE (bound))
20853 {
20854 /* Strip all conversions. */
20855 CASE_CONVERT:
20856 case VIEW_CONVERT_EXPR:
20857 bound = TREE_OPERAND (bound, 0);
20858 break;
20859
20860 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20861 are even omitted when they are the default. */
20862 case INTEGER_CST:
20863 /* If the value for this bound is the default one, we can even omit the
20864 attribute. */
20865 if (bound_attr == DW_AT_lower_bound
20866 && tree_fits_shwi_p (bound)
20867 && (dflt = lower_bound_default ()) != -1
20868 && tree_to_shwi (bound) == dflt)
20869 return;
20870
20871 /* FALLTHRU */
20872
20873 default:
20874 /* Because of the complex interaction there can be with other GNAT
20875 encodings, GDB isn't ready yet to handle proper DWARF description
20876 for self-referencial subrange bounds: let GNAT encodings do the
20877 magic in such a case. */
20878 if (is_ada ()
20879 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20880 && contains_placeholder_p (bound))
20881 return;
20882
20883 add_scalar_info (subrange_die, bound_attr, bound,
20884 dw_scalar_form_constant
20885 | dw_scalar_form_exprloc
20886 | dw_scalar_form_reference,
20887 context);
20888 return;
20889 }
20890 }
20891
20892 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20893 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20894 Note that the block of subscript information for an array type also
20895 includes information about the element type of the given array type.
20896
20897 This function reuses previously set type and bound information if
20898 available. */
20899
20900 static void
20901 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20902 {
20903 unsigned dimension_number;
20904 tree lower, upper;
20905 dw_die_ref child = type_die->die_child;
20906
20907 for (dimension_number = 0;
20908 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20909 type = TREE_TYPE (type), dimension_number++)
20910 {
20911 tree domain = TYPE_DOMAIN (type);
20912
20913 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20914 break;
20915
20916 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20917 and (in GNU C only) variable bounds. Handle all three forms
20918 here. */
20919
20920 /* Find and reuse a previously generated DW_TAG_subrange_type if
20921 available.
20922
20923 For multi-dimensional arrays, as we iterate through the
20924 various dimensions in the enclosing for loop above, we also
20925 iterate through the DIE children and pick at each
20926 DW_TAG_subrange_type previously generated (if available).
20927 Each child DW_TAG_subrange_type DIE describes the range of
20928 the current dimension. At this point we should have as many
20929 DW_TAG_subrange_type's as we have dimensions in the
20930 array. */
20931 dw_die_ref subrange_die = NULL;
20932 if (child)
20933 while (1)
20934 {
20935 child = child->die_sib;
20936 if (child->die_tag == DW_TAG_subrange_type)
20937 subrange_die = child;
20938 if (child == type_die->die_child)
20939 {
20940 /* If we wrapped around, stop looking next time. */
20941 child = NULL;
20942 break;
20943 }
20944 if (child->die_tag == DW_TAG_subrange_type)
20945 break;
20946 }
20947 if (!subrange_die)
20948 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20949
20950 if (domain)
20951 {
20952 /* We have an array type with specified bounds. */
20953 lower = TYPE_MIN_VALUE (domain);
20954 upper = TYPE_MAX_VALUE (domain);
20955
20956 /* Define the index type. */
20957 if (TREE_TYPE (domain)
20958 && !get_AT (subrange_die, DW_AT_type))
20959 {
20960 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20961 TREE_TYPE field. We can't emit debug info for this
20962 because it is an unnamed integral type. */
20963 if (TREE_CODE (domain) == INTEGER_TYPE
20964 && TYPE_NAME (domain) == NULL_TREE
20965 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20966 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20967 ;
20968 else
20969 add_type_attribute (subrange_die, TREE_TYPE (domain),
20970 TYPE_UNQUALIFIED, false, type_die);
20971 }
20972
20973 /* ??? If upper is NULL, the array has unspecified length,
20974 but it does have a lower bound. This happens with Fortran
20975 dimension arr(N:*)
20976 Since the debugger is definitely going to need to know N
20977 to produce useful results, go ahead and output the lower
20978 bound solo, and hope the debugger can cope. */
20979
20980 if (!get_AT (subrange_die, DW_AT_lower_bound))
20981 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20982 if (!get_AT (subrange_die, DW_AT_upper_bound)
20983 && !get_AT (subrange_die, DW_AT_count))
20984 {
20985 if (upper)
20986 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20987 else if ((is_c () || is_cxx ()) && COMPLETE_TYPE_P (type))
20988 /* Zero-length array. */
20989 add_bound_info (subrange_die, DW_AT_count,
20990 build_int_cst (TREE_TYPE (lower), 0), NULL);
20991 }
20992 }
20993
20994 /* Otherwise we have an array type with an unspecified length. The
20995 DWARF-2 spec does not say how to handle this; let's just leave out the
20996 bounds. */
20997 }
20998 }
20999
21000 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
21001
21002 static void
21003 add_byte_size_attribute (dw_die_ref die, tree tree_node)
21004 {
21005 dw_die_ref decl_die;
21006 HOST_WIDE_INT size;
21007 dw_loc_descr_ref size_expr = NULL;
21008
21009 switch (TREE_CODE (tree_node))
21010 {
21011 case ERROR_MARK:
21012 size = 0;
21013 break;
21014 case ENUMERAL_TYPE:
21015 case RECORD_TYPE:
21016 case UNION_TYPE:
21017 case QUAL_UNION_TYPE:
21018 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
21019 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
21020 {
21021 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
21022 return;
21023 }
21024 size_expr = type_byte_size (tree_node, &size);
21025 break;
21026 case FIELD_DECL:
21027 /* For a data member of a struct or union, the DW_AT_byte_size is
21028 generally given as the number of bytes normally allocated for an
21029 object of the *declared* type of the member itself. This is true
21030 even for bit-fields. */
21031 size = int_size_in_bytes (field_type (tree_node));
21032 break;
21033 default:
21034 gcc_unreachable ();
21035 }
21036
21037 /* Support for dynamically-sized objects was introduced by DWARFv3.
21038 At the moment, GDB does not handle variable byte sizes very well,
21039 though. */
21040 if ((dwarf_version >= 3 || !dwarf_strict)
21041 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
21042 && size_expr != NULL)
21043 add_AT_loc (die, DW_AT_byte_size, size_expr);
21044
21045 /* Note that `size' might be -1 when we get to this point. If it is, that
21046 indicates that the byte size of the entity in question is variable and
21047 that we could not generate a DWARF expression that computes it. */
21048 if (size >= 0)
21049 add_AT_unsigned (die, DW_AT_byte_size, size);
21050 }
21051
21052 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
21053 alignment. */
21054
21055 static void
21056 add_alignment_attribute (dw_die_ref die, tree tree_node)
21057 {
21058 if (dwarf_version < 5 && dwarf_strict)
21059 return;
21060
21061 unsigned align;
21062
21063 if (DECL_P (tree_node))
21064 {
21065 if (!DECL_USER_ALIGN (tree_node))
21066 return;
21067
21068 align = DECL_ALIGN_UNIT (tree_node);
21069 }
21070 else if (TYPE_P (tree_node))
21071 {
21072 if (!TYPE_USER_ALIGN (tree_node))
21073 return;
21074
21075 align = TYPE_ALIGN_UNIT (tree_node);
21076 }
21077 else
21078 gcc_unreachable ();
21079
21080 add_AT_unsigned (die, DW_AT_alignment, align);
21081 }
21082
21083 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21084 which specifies the distance in bits from the highest order bit of the
21085 "containing object" for the bit-field to the highest order bit of the
21086 bit-field itself.
21087
21088 For any given bit-field, the "containing object" is a hypothetical object
21089 (of some integral or enum type) within which the given bit-field lives. The
21090 type of this hypothetical "containing object" is always the same as the
21091 declared type of the individual bit-field itself. The determination of the
21092 exact location of the "containing object" for a bit-field is rather
21093 complicated. It's handled by the `field_byte_offset' function (above).
21094
21095 CTX is required: see the comment for VLR_CONTEXT.
21096
21097 Note that it is the size (in bytes) of the hypothetical "containing object"
21098 which will be given in the DW_AT_byte_size attribute for this bit-field.
21099 (See `byte_size_attribute' above). */
21100
21101 static inline void
21102 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21103 {
21104 HOST_WIDE_INT object_offset_in_bytes;
21105 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21106 HOST_WIDE_INT bitpos_int;
21107 HOST_WIDE_INT highest_order_object_bit_offset;
21108 HOST_WIDE_INT highest_order_field_bit_offset;
21109 HOST_WIDE_INT bit_offset;
21110
21111 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21112
21113 /* Must be a field and a bit field. */
21114 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21115
21116 /* We can't yet handle bit-fields whose offsets are variable, so if we
21117 encounter such things, just return without generating any attribute
21118 whatsoever. Likewise for variable or too large size. */
21119 if (! tree_fits_shwi_p (bit_position (decl))
21120 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21121 return;
21122
21123 bitpos_int = int_bit_position (decl);
21124
21125 /* Note that the bit offset is always the distance (in bits) from the
21126 highest-order bit of the "containing object" to the highest-order bit of
21127 the bit-field itself. Since the "high-order end" of any object or field
21128 is different on big-endian and little-endian machines, the computation
21129 below must take account of these differences. */
21130 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21131 highest_order_field_bit_offset = bitpos_int;
21132
21133 if (! BYTES_BIG_ENDIAN)
21134 {
21135 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21136 highest_order_object_bit_offset +=
21137 simple_type_size_in_bits (original_type);
21138 }
21139
21140 bit_offset
21141 = (! BYTES_BIG_ENDIAN
21142 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21143 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21144
21145 if (bit_offset < 0)
21146 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21147 else
21148 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21149 }
21150
21151 /* For a FIELD_DECL node which represents a bit field, output an attribute
21152 which specifies the length in bits of the given field. */
21153
21154 static inline void
21155 add_bit_size_attribute (dw_die_ref die, tree decl)
21156 {
21157 /* Must be a field and a bit field. */
21158 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21159 && DECL_BIT_FIELD_TYPE (decl));
21160
21161 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21162 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21163 }
21164
21165 /* If the compiled language is ANSI C, then add a 'prototyped'
21166 attribute, if arg types are given for the parameters of a function. */
21167
21168 static inline void
21169 add_prototyped_attribute (dw_die_ref die, tree func_type)
21170 {
21171 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21172 {
21173 case DW_LANG_C:
21174 case DW_LANG_C89:
21175 case DW_LANG_C99:
21176 case DW_LANG_C11:
21177 case DW_LANG_ObjC:
21178 if (prototype_p (func_type))
21179 add_AT_flag (die, DW_AT_prototyped, 1);
21180 break;
21181 default:
21182 break;
21183 }
21184 }
21185
21186 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21187 by looking in the type declaration, the object declaration equate table or
21188 the block mapping. */
21189
21190 static inline void
21191 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21192 {
21193 dw_die_ref origin_die = NULL;
21194
21195 /* For late LTO debug output we want to refer directly to the abstract
21196 DIE in the early debug rather to the possibly existing concrete
21197 instance and avoid creating that just for this purpose. */
21198 sym_off_pair *desc;
21199 if (in_lto_p
21200 && external_die_map
21201 && (desc = external_die_map->get (origin)))
21202 {
21203 add_AT_external_die_ref (die, DW_AT_abstract_origin,
21204 desc->sym, desc->off);
21205 return;
21206 }
21207
21208 if (DECL_P (origin))
21209 origin_die = lookup_decl_die (origin);
21210 else if (TYPE_P (origin))
21211 origin_die = lookup_type_die (origin);
21212 else if (TREE_CODE (origin) == BLOCK)
21213 origin_die = lookup_block_die (origin);
21214
21215 /* XXX: Functions that are never lowered don't always have correct block
21216 trees (in the case of java, they simply have no block tree, in some other
21217 languages). For these functions, there is nothing we can really do to
21218 output correct debug info for inlined functions in all cases. Rather
21219 than die, we'll just produce deficient debug info now, in that we will
21220 have variables without a proper abstract origin. In the future, when all
21221 functions are lowered, we should re-add a gcc_assert (origin_die)
21222 here. */
21223
21224 if (origin_die)
21225 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21226 }
21227
21228 /* We do not currently support the pure_virtual attribute. */
21229
21230 static inline void
21231 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21232 {
21233 if (DECL_VINDEX (func_decl))
21234 {
21235 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21236
21237 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21238 add_AT_loc (die, DW_AT_vtable_elem_location,
21239 new_loc_descr (DW_OP_constu,
21240 tree_to_shwi (DECL_VINDEX (func_decl)),
21241 0));
21242
21243 /* GNU extension: Record what type this method came from originally. */
21244 if (debug_info_level > DINFO_LEVEL_TERSE
21245 && DECL_CONTEXT (func_decl))
21246 add_AT_die_ref (die, DW_AT_containing_type,
21247 lookup_type_die (DECL_CONTEXT (func_decl)));
21248 }
21249 }
21250 \f
21251 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21252 given decl. This used to be a vendor extension until after DWARF 4
21253 standardized it. */
21254
21255 static void
21256 add_linkage_attr (dw_die_ref die, tree decl)
21257 {
21258 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21259
21260 /* Mimic what assemble_name_raw does with a leading '*'. */
21261 if (name[0] == '*')
21262 name = &name[1];
21263
21264 if (dwarf_version >= 4)
21265 add_AT_string (die, DW_AT_linkage_name, name);
21266 else
21267 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21268 }
21269
21270 /* Add source coordinate attributes for the given decl. */
21271
21272 static void
21273 add_src_coords_attributes (dw_die_ref die, tree decl)
21274 {
21275 expanded_location s;
21276
21277 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21278 return;
21279 s = expand_location (DECL_SOURCE_LOCATION (decl));
21280 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21281 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21282 if (debug_column_info && s.column)
21283 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21284 }
21285
21286 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21287
21288 static void
21289 add_linkage_name_raw (dw_die_ref die, tree decl)
21290 {
21291 /* Defer until we have an assembler name set. */
21292 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21293 {
21294 limbo_die_node *asm_name;
21295
21296 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21297 asm_name->die = die;
21298 asm_name->created_for = decl;
21299 asm_name->next = deferred_asm_name;
21300 deferred_asm_name = asm_name;
21301 }
21302 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21303 add_linkage_attr (die, decl);
21304 }
21305
21306 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21307
21308 static void
21309 add_linkage_name (dw_die_ref die, tree decl)
21310 {
21311 if (debug_info_level > DINFO_LEVEL_NONE
21312 && VAR_OR_FUNCTION_DECL_P (decl)
21313 && TREE_PUBLIC (decl)
21314 && !(VAR_P (decl) && DECL_REGISTER (decl))
21315 && die->die_tag != DW_TAG_member)
21316 add_linkage_name_raw (die, decl);
21317 }
21318
21319 /* Add a DW_AT_name attribute and source coordinate attribute for the
21320 given decl, but only if it actually has a name. */
21321
21322 static void
21323 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21324 bool no_linkage_name)
21325 {
21326 tree decl_name;
21327
21328 decl_name = DECL_NAME (decl);
21329 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21330 {
21331 const char *name = dwarf2_name (decl, 0);
21332 if (name)
21333 add_name_attribute (die, name);
21334 else
21335 add_desc_attribute (die, decl);
21336
21337 if (! DECL_ARTIFICIAL (decl))
21338 add_src_coords_attributes (die, decl);
21339
21340 if (!no_linkage_name)
21341 add_linkage_name (die, decl);
21342 }
21343 else
21344 add_desc_attribute (die, decl);
21345
21346 #ifdef VMS_DEBUGGING_INFO
21347 /* Get the function's name, as described by its RTL. This may be different
21348 from the DECL_NAME name used in the source file. */
21349 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21350 {
21351 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21352 XEXP (DECL_RTL (decl), 0), false);
21353 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21354 }
21355 #endif /* VMS_DEBUGGING_INFO */
21356 }
21357
21358 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21359
21360 static void
21361 add_discr_value (dw_die_ref die, dw_discr_value *value)
21362 {
21363 dw_attr_node attr;
21364
21365 attr.dw_attr = DW_AT_discr_value;
21366 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21367 attr.dw_attr_val.val_entry = NULL;
21368 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21369 if (value->pos)
21370 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21371 else
21372 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21373 add_dwarf_attr (die, &attr);
21374 }
21375
21376 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21377
21378 static void
21379 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21380 {
21381 dw_attr_node attr;
21382
21383 attr.dw_attr = DW_AT_discr_list;
21384 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21385 attr.dw_attr_val.val_entry = NULL;
21386 attr.dw_attr_val.v.val_discr_list = discr_list;
21387 add_dwarf_attr (die, &attr);
21388 }
21389
21390 static inline dw_discr_list_ref
21391 AT_discr_list (dw_attr_node *attr)
21392 {
21393 return attr->dw_attr_val.v.val_discr_list;
21394 }
21395
21396 #ifdef VMS_DEBUGGING_INFO
21397 /* Output the debug main pointer die for VMS */
21398
21399 void
21400 dwarf2out_vms_debug_main_pointer (void)
21401 {
21402 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21403 dw_die_ref die;
21404
21405 /* Allocate the VMS debug main subprogram die. */
21406 die = new_die_raw (DW_TAG_subprogram);
21407 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21408 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21409 current_function_funcdef_no);
21410 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21411
21412 /* Make it the first child of comp_unit_die (). */
21413 die->die_parent = comp_unit_die ();
21414 if (comp_unit_die ()->die_child)
21415 {
21416 die->die_sib = comp_unit_die ()->die_child->die_sib;
21417 comp_unit_die ()->die_child->die_sib = die;
21418 }
21419 else
21420 {
21421 die->die_sib = die;
21422 comp_unit_die ()->die_child = die;
21423 }
21424 }
21425 #endif /* VMS_DEBUGGING_INFO */
21426
21427 /* walk_tree helper function for uses_local_type, below. */
21428
21429 static tree
21430 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21431 {
21432 if (!TYPE_P (*tp))
21433 *walk_subtrees = 0;
21434 else
21435 {
21436 tree name = TYPE_NAME (*tp);
21437 if (name && DECL_P (name) && decl_function_context (name))
21438 return *tp;
21439 }
21440 return NULL_TREE;
21441 }
21442
21443 /* If TYPE involves a function-local type (including a local typedef to a
21444 non-local type), returns that type; otherwise returns NULL_TREE. */
21445
21446 static tree
21447 uses_local_type (tree type)
21448 {
21449 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21450 return used;
21451 }
21452
21453 /* Return the DIE for the scope that immediately contains this type.
21454 Non-named types that do not involve a function-local type get global
21455 scope. Named types nested in namespaces or other types get their
21456 containing scope. All other types (i.e. function-local named types) get
21457 the current active scope. */
21458
21459 static dw_die_ref
21460 scope_die_for (tree t, dw_die_ref context_die)
21461 {
21462 dw_die_ref scope_die = NULL;
21463 tree containing_scope;
21464
21465 /* Non-types always go in the current scope. */
21466 gcc_assert (TYPE_P (t));
21467
21468 /* Use the scope of the typedef, rather than the scope of the type
21469 it refers to. */
21470 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21471 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21472 else
21473 containing_scope = TYPE_CONTEXT (t);
21474
21475 /* Use the containing namespace if there is one. */
21476 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21477 {
21478 if (context_die == lookup_decl_die (containing_scope))
21479 /* OK */;
21480 else if (debug_info_level > DINFO_LEVEL_TERSE)
21481 context_die = get_context_die (containing_scope);
21482 else
21483 containing_scope = NULL_TREE;
21484 }
21485
21486 /* Ignore function type "scopes" from the C frontend. They mean that
21487 a tagged type is local to a parmlist of a function declarator, but
21488 that isn't useful to DWARF. */
21489 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21490 containing_scope = NULL_TREE;
21491
21492 if (SCOPE_FILE_SCOPE_P (containing_scope))
21493 {
21494 /* If T uses a local type keep it local as well, to avoid references
21495 to function-local DIEs from outside the function. */
21496 if (current_function_decl && uses_local_type (t))
21497 scope_die = context_die;
21498 else
21499 scope_die = comp_unit_die ();
21500 }
21501 else if (TYPE_P (containing_scope))
21502 {
21503 /* For types, we can just look up the appropriate DIE. */
21504 if (debug_info_level > DINFO_LEVEL_TERSE)
21505 scope_die = get_context_die (containing_scope);
21506 else
21507 {
21508 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21509 if (scope_die == NULL)
21510 scope_die = comp_unit_die ();
21511 }
21512 }
21513 else
21514 scope_die = context_die;
21515
21516 return scope_die;
21517 }
21518
21519 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21520
21521 static inline int
21522 local_scope_p (dw_die_ref context_die)
21523 {
21524 for (; context_die; context_die = context_die->die_parent)
21525 if (context_die->die_tag == DW_TAG_inlined_subroutine
21526 || context_die->die_tag == DW_TAG_subprogram)
21527 return 1;
21528
21529 return 0;
21530 }
21531
21532 /* Returns nonzero if CONTEXT_DIE is a class. */
21533
21534 static inline int
21535 class_scope_p (dw_die_ref context_die)
21536 {
21537 return (context_die
21538 && (context_die->die_tag == DW_TAG_structure_type
21539 || context_die->die_tag == DW_TAG_class_type
21540 || context_die->die_tag == DW_TAG_interface_type
21541 || context_die->die_tag == DW_TAG_union_type));
21542 }
21543
21544 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21545 whether or not to treat a DIE in this context as a declaration. */
21546
21547 static inline int
21548 class_or_namespace_scope_p (dw_die_ref context_die)
21549 {
21550 return (class_scope_p (context_die)
21551 || (context_die && context_die->die_tag == DW_TAG_namespace));
21552 }
21553
21554 /* Many forms of DIEs require a "type description" attribute. This
21555 routine locates the proper "type descriptor" die for the type given
21556 by 'type' plus any additional qualifiers given by 'cv_quals', and
21557 adds a DW_AT_type attribute below the given die. */
21558
21559 static void
21560 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21561 bool reverse, dw_die_ref context_die)
21562 {
21563 enum tree_code code = TREE_CODE (type);
21564 dw_die_ref type_die = NULL;
21565
21566 if (debug_info_level <= DINFO_LEVEL_TERSE)
21567 return;
21568
21569 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21570 or fixed-point type, use the inner type. This is because we have no
21571 support for unnamed types in base_type_die. This can happen if this is
21572 an Ada subrange type. Correct solution is emit a subrange type die. */
21573 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21574 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21575 type = TREE_TYPE (type), code = TREE_CODE (type);
21576
21577 if (code == ERROR_MARK
21578 /* Handle a special case. For functions whose return type is void, we
21579 generate *no* type attribute. (Note that no object may have type
21580 `void', so this only applies to function return types). */
21581 || code == VOID_TYPE)
21582 return;
21583
21584 type_die = modified_type_die (type,
21585 cv_quals | TYPE_QUALS (type),
21586 reverse,
21587 context_die);
21588
21589 if (type_die != NULL)
21590 add_AT_die_ref (object_die, DW_AT_type, type_die);
21591 }
21592
21593 /* Given an object die, add the calling convention attribute for the
21594 function call type. */
21595 static void
21596 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21597 {
21598 enum dwarf_calling_convention value = DW_CC_normal;
21599
21600 value = ((enum dwarf_calling_convention)
21601 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21602
21603 if (is_fortran ()
21604 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21605 {
21606 /* DWARF 2 doesn't provide a way to identify a program's source-level
21607 entry point. DW_AT_calling_convention attributes are only meant
21608 to describe functions' calling conventions. However, lacking a
21609 better way to signal the Fortran main program, we used this for
21610 a long time, following existing custom. Now, DWARF 4 has
21611 DW_AT_main_subprogram, which we add below, but some tools still
21612 rely on the old way, which we thus keep. */
21613 value = DW_CC_program;
21614
21615 if (dwarf_version >= 4 || !dwarf_strict)
21616 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21617 }
21618
21619 /* Only add the attribute if the backend requests it, and
21620 is not DW_CC_normal. */
21621 if (value && (value != DW_CC_normal))
21622 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21623 }
21624
21625 /* Given a tree pointer to a struct, class, union, or enum type node, return
21626 a pointer to the (string) tag name for the given type, or zero if the type
21627 was declared without a tag. */
21628
21629 static const char *
21630 type_tag (const_tree type)
21631 {
21632 const char *name = 0;
21633
21634 if (TYPE_NAME (type) != 0)
21635 {
21636 tree t = 0;
21637
21638 /* Find the IDENTIFIER_NODE for the type name. */
21639 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21640 && !TYPE_NAMELESS (type))
21641 t = TYPE_NAME (type);
21642
21643 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21644 a TYPE_DECL node, regardless of whether or not a `typedef' was
21645 involved. */
21646 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21647 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21648 {
21649 /* We want to be extra verbose. Don't call dwarf_name if
21650 DECL_NAME isn't set. The default hook for decl_printable_name
21651 doesn't like that, and in this context it's correct to return
21652 0, instead of "<anonymous>" or the like. */
21653 if (DECL_NAME (TYPE_NAME (type))
21654 && !DECL_NAMELESS (TYPE_NAME (type)))
21655 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21656 }
21657
21658 /* Now get the name as a string, or invent one. */
21659 if (!name && t != 0)
21660 name = IDENTIFIER_POINTER (t);
21661 }
21662
21663 return (name == 0 || *name == '\0') ? 0 : name;
21664 }
21665
21666 /* Return the type associated with a data member, make a special check
21667 for bit field types. */
21668
21669 static inline tree
21670 member_declared_type (const_tree member)
21671 {
21672 return (DECL_BIT_FIELD_TYPE (member)
21673 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21674 }
21675
21676 /* Get the decl's label, as described by its RTL. This may be different
21677 from the DECL_NAME name used in the source file. */
21678
21679 #if 0
21680 static const char *
21681 decl_start_label (tree decl)
21682 {
21683 rtx x;
21684 const char *fnname;
21685
21686 x = DECL_RTL (decl);
21687 gcc_assert (MEM_P (x));
21688
21689 x = XEXP (x, 0);
21690 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21691
21692 fnname = XSTR (x, 0);
21693 return fnname;
21694 }
21695 #endif
21696 \f
21697 /* For variable-length arrays that have been previously generated, but
21698 may be incomplete due to missing subscript info, fill the subscript
21699 info. Return TRUE if this is one of those cases. */
21700 static bool
21701 fill_variable_array_bounds (tree type)
21702 {
21703 if (TREE_ASM_WRITTEN (type)
21704 && TREE_CODE (type) == ARRAY_TYPE
21705 && variably_modified_type_p (type, NULL))
21706 {
21707 dw_die_ref array_die = lookup_type_die (type);
21708 if (!array_die)
21709 return false;
21710 add_subscript_info (array_die, type, !is_ada ());
21711 return true;
21712 }
21713 return false;
21714 }
21715
21716 /* These routines generate the internal representation of the DIE's for
21717 the compilation unit. Debugging information is collected by walking
21718 the declaration trees passed in from dwarf2out_decl(). */
21719
21720 static void
21721 gen_array_type_die (tree type, dw_die_ref context_die)
21722 {
21723 dw_die_ref array_die;
21724
21725 /* GNU compilers represent multidimensional array types as sequences of one
21726 dimensional array types whose element types are themselves array types.
21727 We sometimes squish that down to a single array_type DIE with multiple
21728 subscripts in the Dwarf debugging info. The draft Dwarf specification
21729 say that we are allowed to do this kind of compression in C, because
21730 there is no difference between an array of arrays and a multidimensional
21731 array. We don't do this for Ada to remain as close as possible to the
21732 actual representation, which is especially important against the language
21733 flexibilty wrt arrays of variable size. */
21734
21735 bool collapse_nested_arrays = !is_ada ();
21736
21737 if (fill_variable_array_bounds (type))
21738 return;
21739
21740 dw_die_ref scope_die = scope_die_for (type, context_die);
21741 tree element_type;
21742
21743 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21744 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21745 if (TREE_CODE (type) == ARRAY_TYPE
21746 && TYPE_STRING_FLAG (type)
21747 && is_fortran ()
21748 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21749 {
21750 HOST_WIDE_INT size;
21751
21752 array_die = new_die (DW_TAG_string_type, scope_die, type);
21753 add_name_attribute (array_die, type_tag (type));
21754 equate_type_number_to_die (type, array_die);
21755 size = int_size_in_bytes (type);
21756 if (size >= 0)
21757 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21758 /* ??? We can't annotate types late, but for LTO we may not
21759 generate a location early either (gfortran.dg/save_6.f90). */
21760 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21761 && TYPE_DOMAIN (type) != NULL_TREE
21762 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21763 {
21764 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21765 tree rszdecl = szdecl;
21766
21767 size = int_size_in_bytes (TREE_TYPE (szdecl));
21768 if (!DECL_P (szdecl))
21769 {
21770 if (TREE_CODE (szdecl) == INDIRECT_REF
21771 && DECL_P (TREE_OPERAND (szdecl, 0)))
21772 {
21773 rszdecl = TREE_OPERAND (szdecl, 0);
21774 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21775 != DWARF2_ADDR_SIZE)
21776 size = 0;
21777 }
21778 else
21779 size = 0;
21780 }
21781 if (size > 0)
21782 {
21783 dw_loc_list_ref loc
21784 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21785 NULL);
21786 if (loc)
21787 {
21788 add_AT_location_description (array_die, DW_AT_string_length,
21789 loc);
21790 if (size != DWARF2_ADDR_SIZE)
21791 add_AT_unsigned (array_die, dwarf_version >= 5
21792 ? DW_AT_string_length_byte_size
21793 : DW_AT_byte_size, size);
21794 }
21795 }
21796 }
21797 return;
21798 }
21799
21800 array_die = new_die (DW_TAG_array_type, scope_die, type);
21801 add_name_attribute (array_die, type_tag (type));
21802 equate_type_number_to_die (type, array_die);
21803
21804 if (TREE_CODE (type) == VECTOR_TYPE)
21805 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21806
21807 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21808 if (is_fortran ()
21809 && TREE_CODE (type) == ARRAY_TYPE
21810 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21811 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21812 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21813
21814 #if 0
21815 /* We default the array ordering. Debuggers will probably do the right
21816 things even if DW_AT_ordering is not present. It's not even an issue
21817 until we start to get into multidimensional arrays anyway. If a debugger
21818 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21819 then we'll have to put the DW_AT_ordering attribute back in. (But if
21820 and when we find out that we need to put these in, we will only do so
21821 for multidimensional arrays. */
21822 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21823 #endif
21824
21825 if (TREE_CODE (type) == VECTOR_TYPE)
21826 {
21827 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21828 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21829 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21830 add_bound_info (subrange_die, DW_AT_upper_bound,
21831 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21832 }
21833 else
21834 add_subscript_info (array_die, type, collapse_nested_arrays);
21835
21836 /* Add representation of the type of the elements of this array type and
21837 emit the corresponding DIE if we haven't done it already. */
21838 element_type = TREE_TYPE (type);
21839 if (collapse_nested_arrays)
21840 while (TREE_CODE (element_type) == ARRAY_TYPE)
21841 {
21842 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21843 break;
21844 element_type = TREE_TYPE (element_type);
21845 }
21846
21847 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21848 TREE_CODE (type) == ARRAY_TYPE
21849 && TYPE_REVERSE_STORAGE_ORDER (type),
21850 context_die);
21851
21852 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21853 if (TYPE_ARTIFICIAL (type))
21854 add_AT_flag (array_die, DW_AT_artificial, 1);
21855
21856 if (get_AT (array_die, DW_AT_name))
21857 add_pubtype (type, array_die);
21858
21859 add_alignment_attribute (array_die, type);
21860 }
21861
21862 /* This routine generates DIE for array with hidden descriptor, details
21863 are filled into *info by a langhook. */
21864
21865 static void
21866 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21867 dw_die_ref context_die)
21868 {
21869 const dw_die_ref scope_die = scope_die_for (type, context_die);
21870 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21871 struct loc_descr_context context = { type, info->base_decl, NULL,
21872 false, false };
21873 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21874 int dim;
21875
21876 add_name_attribute (array_die, type_tag (type));
21877 equate_type_number_to_die (type, array_die);
21878
21879 if (info->ndimensions > 1)
21880 switch (info->ordering)
21881 {
21882 case array_descr_ordering_row_major:
21883 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21884 break;
21885 case array_descr_ordering_column_major:
21886 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21887 break;
21888 default:
21889 break;
21890 }
21891
21892 if (dwarf_version >= 3 || !dwarf_strict)
21893 {
21894 if (info->data_location)
21895 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21896 dw_scalar_form_exprloc, &context);
21897 if (info->associated)
21898 add_scalar_info (array_die, DW_AT_associated, info->associated,
21899 dw_scalar_form_constant
21900 | dw_scalar_form_exprloc
21901 | dw_scalar_form_reference, &context);
21902 if (info->allocated)
21903 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21904 dw_scalar_form_constant
21905 | dw_scalar_form_exprloc
21906 | dw_scalar_form_reference, &context);
21907 if (info->stride)
21908 {
21909 const enum dwarf_attribute attr
21910 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21911 const int forms
21912 = (info->stride_in_bits)
21913 ? dw_scalar_form_constant
21914 : (dw_scalar_form_constant
21915 | dw_scalar_form_exprloc
21916 | dw_scalar_form_reference);
21917
21918 add_scalar_info (array_die, attr, info->stride, forms, &context);
21919 }
21920 }
21921 if (dwarf_version >= 5)
21922 {
21923 if (info->rank)
21924 {
21925 add_scalar_info (array_die, DW_AT_rank, info->rank,
21926 dw_scalar_form_constant
21927 | dw_scalar_form_exprloc, &context);
21928 subrange_tag = DW_TAG_generic_subrange;
21929 context.placeholder_arg = true;
21930 }
21931 }
21932
21933 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21934
21935 for (dim = 0; dim < info->ndimensions; dim++)
21936 {
21937 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21938
21939 if (info->dimen[dim].bounds_type)
21940 add_type_attribute (subrange_die,
21941 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21942 false, context_die);
21943 if (info->dimen[dim].lower_bound)
21944 add_bound_info (subrange_die, DW_AT_lower_bound,
21945 info->dimen[dim].lower_bound, &context);
21946 if (info->dimen[dim].upper_bound)
21947 add_bound_info (subrange_die, DW_AT_upper_bound,
21948 info->dimen[dim].upper_bound, &context);
21949 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21950 add_scalar_info (subrange_die, DW_AT_byte_stride,
21951 info->dimen[dim].stride,
21952 dw_scalar_form_constant
21953 | dw_scalar_form_exprloc
21954 | dw_scalar_form_reference,
21955 &context);
21956 }
21957
21958 gen_type_die (info->element_type, context_die);
21959 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21960 TREE_CODE (type) == ARRAY_TYPE
21961 && TYPE_REVERSE_STORAGE_ORDER (type),
21962 context_die);
21963
21964 if (get_AT (array_die, DW_AT_name))
21965 add_pubtype (type, array_die);
21966
21967 add_alignment_attribute (array_die, type);
21968 }
21969
21970 #if 0
21971 static void
21972 gen_entry_point_die (tree decl, dw_die_ref context_die)
21973 {
21974 tree origin = decl_ultimate_origin (decl);
21975 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21976
21977 if (origin != NULL)
21978 add_abstract_origin_attribute (decl_die, origin);
21979 else
21980 {
21981 add_name_and_src_coords_attributes (decl_die, decl);
21982 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21983 TYPE_UNQUALIFIED, false, context_die);
21984 }
21985
21986 if (DECL_ABSTRACT_P (decl))
21987 equate_decl_number_to_die (decl, decl_die);
21988 else
21989 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21990 }
21991 #endif
21992
21993 /* Walk through the list of incomplete types again, trying once more to
21994 emit full debugging info for them. */
21995
21996 static void
21997 retry_incomplete_types (void)
21998 {
21999 set_early_dwarf s;
22000 int i;
22001
22002 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
22003 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
22004 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
22005 vec_safe_truncate (incomplete_types, 0);
22006 }
22007
22008 /* Determine what tag to use for a record type. */
22009
22010 static enum dwarf_tag
22011 record_type_tag (tree type)
22012 {
22013 if (! lang_hooks.types.classify_record)
22014 return DW_TAG_structure_type;
22015
22016 switch (lang_hooks.types.classify_record (type))
22017 {
22018 case RECORD_IS_STRUCT:
22019 return DW_TAG_structure_type;
22020
22021 case RECORD_IS_CLASS:
22022 return DW_TAG_class_type;
22023
22024 case RECORD_IS_INTERFACE:
22025 if (dwarf_version >= 3 || !dwarf_strict)
22026 return DW_TAG_interface_type;
22027 return DW_TAG_structure_type;
22028
22029 default:
22030 gcc_unreachable ();
22031 }
22032 }
22033
22034 /* Generate a DIE to represent an enumeration type. Note that these DIEs
22035 include all of the information about the enumeration values also. Each
22036 enumerated type name/value is listed as a child of the enumerated type
22037 DIE. */
22038
22039 static dw_die_ref
22040 gen_enumeration_type_die (tree type, dw_die_ref context_die)
22041 {
22042 dw_die_ref type_die = lookup_type_die (type);
22043 dw_die_ref orig_type_die = type_die;
22044
22045 if (type_die == NULL)
22046 {
22047 type_die = new_die (DW_TAG_enumeration_type,
22048 scope_die_for (type, context_die), type);
22049 equate_type_number_to_die (type, type_die);
22050 add_name_attribute (type_die, type_tag (type));
22051 if ((dwarf_version >= 4 || !dwarf_strict)
22052 && ENUM_IS_SCOPED (type))
22053 add_AT_flag (type_die, DW_AT_enum_class, 1);
22054 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
22055 add_AT_flag (type_die, DW_AT_declaration, 1);
22056 if (!dwarf_strict)
22057 add_AT_unsigned (type_die, DW_AT_encoding,
22058 TYPE_UNSIGNED (type)
22059 ? DW_ATE_unsigned
22060 : DW_ATE_signed);
22061 }
22062 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22063 return type_die;
22064 else
22065 remove_AT (type_die, DW_AT_declaration);
22066
22067 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22068 given enum type is incomplete, do not generate the DW_AT_byte_size
22069 attribute or the DW_AT_element_list attribute. */
22070 if (TYPE_SIZE (type))
22071 {
22072 tree link;
22073
22074 if (!ENUM_IS_OPAQUE (type))
22075 TREE_ASM_WRITTEN (type) = 1;
22076 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22077 add_byte_size_attribute (type_die, type);
22078 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22079 add_alignment_attribute (type_die, type);
22080 if ((dwarf_version >= 3 || !dwarf_strict)
22081 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22082 {
22083 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22084 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22085 context_die);
22086 }
22087 if (TYPE_STUB_DECL (type) != NULL_TREE)
22088 {
22089 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22090 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22091 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22092 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22093 }
22094
22095 /* If the first reference to this type was as the return type of an
22096 inline function, then it may not have a parent. Fix this now. */
22097 if (type_die->die_parent == NULL)
22098 add_child_die (scope_die_for (type, context_die), type_die);
22099
22100 for (link = TYPE_VALUES (type);
22101 link != NULL; link = TREE_CHAIN (link))
22102 {
22103 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22104 tree value = TREE_VALUE (link);
22105
22106 gcc_assert (!ENUM_IS_OPAQUE (type));
22107 add_name_attribute (enum_die,
22108 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22109
22110 if (TREE_CODE (value) == CONST_DECL)
22111 value = DECL_INITIAL (value);
22112
22113 if (simple_type_size_in_bits (TREE_TYPE (value))
22114 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22115 {
22116 /* For constant forms created by add_AT_unsigned DWARF
22117 consumers (GDB, elfutils, etc.) always zero extend
22118 the value. Only when the actual value is negative
22119 do we need to use add_AT_int to generate a constant
22120 form that can represent negative values. */
22121 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22122 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22123 add_AT_unsigned (enum_die, DW_AT_const_value,
22124 (unsigned HOST_WIDE_INT) val);
22125 else
22126 add_AT_int (enum_die, DW_AT_const_value, val);
22127 }
22128 else
22129 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22130 that here. TODO: This should be re-worked to use correct
22131 signed/unsigned double tags for all cases. */
22132 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22133 }
22134
22135 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22136 if (TYPE_ARTIFICIAL (type)
22137 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22138 add_AT_flag (type_die, DW_AT_artificial, 1);
22139 }
22140 else
22141 add_AT_flag (type_die, DW_AT_declaration, 1);
22142
22143 add_pubtype (type, type_die);
22144
22145 return type_die;
22146 }
22147
22148 /* Generate a DIE to represent either a real live formal parameter decl or to
22149 represent just the type of some formal parameter position in some function
22150 type.
22151
22152 Note that this routine is a bit unusual because its argument may be a
22153 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22154 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22155 node. If it's the former then this function is being called to output a
22156 DIE to represent a formal parameter object (or some inlining thereof). If
22157 it's the latter, then this function is only being called to output a
22158 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22159 argument type of some subprogram type.
22160 If EMIT_NAME_P is true, name and source coordinate attributes
22161 are emitted. */
22162
22163 static dw_die_ref
22164 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22165 dw_die_ref context_die)
22166 {
22167 tree node_or_origin = node ? node : origin;
22168 tree ultimate_origin;
22169 dw_die_ref parm_die = NULL;
22170
22171 if (DECL_P (node_or_origin))
22172 {
22173 parm_die = lookup_decl_die (node);
22174
22175 /* If the contexts differ, we may not be talking about the same
22176 thing.
22177 ??? When in LTO the DIE parent is the "abstract" copy and the
22178 context_die is the specification "copy". */
22179 if (parm_die
22180 && parm_die->die_parent != context_die
22181 && (parm_die->die_parent->die_tag != DW_TAG_GNU_formal_parameter_pack
22182 || parm_die->die_parent->die_parent != context_die)
22183 && !in_lto_p)
22184 {
22185 gcc_assert (!DECL_ABSTRACT_P (node));
22186 /* This can happen when creating a concrete instance, in
22187 which case we need to create a new DIE that will get
22188 annotated with DW_AT_abstract_origin. */
22189 parm_die = NULL;
22190 }
22191
22192 if (parm_die && parm_die->die_parent == NULL)
22193 {
22194 /* Check that parm_die already has the right attributes that
22195 we would have added below. If any attributes are
22196 missing, fall through to add them. */
22197 if (! DECL_ABSTRACT_P (node_or_origin)
22198 && !get_AT (parm_die, DW_AT_location)
22199 && !get_AT (parm_die, DW_AT_const_value))
22200 /* We are missing location info, and are about to add it. */
22201 ;
22202 else
22203 {
22204 add_child_die (context_die, parm_die);
22205 return parm_die;
22206 }
22207 }
22208 }
22209
22210 /* If we have a previously generated DIE, use it, unless this is an
22211 concrete instance (origin != NULL), in which case we need a new
22212 DIE with a corresponding DW_AT_abstract_origin. */
22213 bool reusing_die;
22214 if (parm_die && origin == NULL)
22215 reusing_die = true;
22216 else
22217 {
22218 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22219 reusing_die = false;
22220 }
22221
22222 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22223 {
22224 case tcc_declaration:
22225 ultimate_origin = decl_ultimate_origin (node_or_origin);
22226 if (node || ultimate_origin)
22227 origin = ultimate_origin;
22228
22229 if (reusing_die)
22230 goto add_location;
22231
22232 if (origin != NULL)
22233 add_abstract_origin_attribute (parm_die, origin);
22234 else if (emit_name_p)
22235 add_name_and_src_coords_attributes (parm_die, node);
22236 if (origin == NULL
22237 || (! DECL_ABSTRACT_P (node_or_origin)
22238 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22239 decl_function_context
22240 (node_or_origin))))
22241 {
22242 tree type = TREE_TYPE (node_or_origin);
22243 if (decl_by_reference_p (node_or_origin))
22244 add_type_attribute (parm_die, TREE_TYPE (type),
22245 TYPE_UNQUALIFIED,
22246 false, context_die);
22247 else
22248 add_type_attribute (parm_die, type,
22249 decl_quals (node_or_origin),
22250 false, context_die);
22251 }
22252 if (origin == NULL && DECL_ARTIFICIAL (node))
22253 add_AT_flag (parm_die, DW_AT_artificial, 1);
22254 add_location:
22255 if (node && node != origin)
22256 equate_decl_number_to_die (node, parm_die);
22257 if (! DECL_ABSTRACT_P (node_or_origin))
22258 add_location_or_const_value_attribute (parm_die, node_or_origin,
22259 node == NULL);
22260
22261 break;
22262
22263 case tcc_type:
22264 /* We were called with some kind of a ..._TYPE node. */
22265 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22266 context_die);
22267 break;
22268
22269 default:
22270 gcc_unreachable ();
22271 }
22272
22273 return parm_die;
22274 }
22275
22276 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22277 children DW_TAG_formal_parameter DIEs representing the arguments of the
22278 parameter pack.
22279
22280 PARM_PACK must be a function parameter pack.
22281 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22282 must point to the subsequent arguments of the function PACK_ARG belongs to.
22283 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22284 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22285 following the last one for which a DIE was generated. */
22286
22287 static dw_die_ref
22288 gen_formal_parameter_pack_die (tree parm_pack,
22289 tree pack_arg,
22290 dw_die_ref subr_die,
22291 tree *next_arg)
22292 {
22293 tree arg;
22294 dw_die_ref parm_pack_die;
22295
22296 gcc_assert (parm_pack
22297 && lang_hooks.function_parameter_pack_p (parm_pack)
22298 && subr_die);
22299
22300 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22301 add_src_coords_attributes (parm_pack_die, parm_pack);
22302
22303 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22304 {
22305 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22306 parm_pack))
22307 break;
22308 gen_formal_parameter_die (arg, NULL,
22309 false /* Don't emit name attribute. */,
22310 parm_pack_die);
22311 }
22312 if (next_arg)
22313 *next_arg = arg;
22314 return parm_pack_die;
22315 }
22316
22317 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22318 at the end of an (ANSI prototyped) formal parameters list. */
22319
22320 static void
22321 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22322 {
22323 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22324 }
22325
22326 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22327 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22328 parameters as specified in some function type specification (except for
22329 those which appear as part of a function *definition*). */
22330
22331 static void
22332 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22333 {
22334 tree link;
22335 tree formal_type = NULL;
22336 tree first_parm_type;
22337 tree arg;
22338
22339 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22340 {
22341 arg = DECL_ARGUMENTS (function_or_method_type);
22342 function_or_method_type = TREE_TYPE (function_or_method_type);
22343 }
22344 else
22345 arg = NULL_TREE;
22346
22347 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22348
22349 /* Make our first pass over the list of formal parameter types and output a
22350 DW_TAG_formal_parameter DIE for each one. */
22351 for (link = first_parm_type; link; )
22352 {
22353 dw_die_ref parm_die;
22354
22355 formal_type = TREE_VALUE (link);
22356 if (formal_type == void_type_node)
22357 break;
22358
22359 /* Output a (nameless) DIE to represent the formal parameter itself. */
22360 parm_die = gen_formal_parameter_die (formal_type, NULL,
22361 true /* Emit name attribute. */,
22362 context_die);
22363 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22364 && link == first_parm_type)
22365 {
22366 add_AT_flag (parm_die, DW_AT_artificial, 1);
22367 if (dwarf_version >= 3 || !dwarf_strict)
22368 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22369 }
22370 else if (arg && DECL_ARTIFICIAL (arg))
22371 add_AT_flag (parm_die, DW_AT_artificial, 1);
22372
22373 link = TREE_CHAIN (link);
22374 if (arg)
22375 arg = DECL_CHAIN (arg);
22376 }
22377
22378 /* If this function type has an ellipsis, add a
22379 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22380 if (formal_type != void_type_node)
22381 gen_unspecified_parameters_die (function_or_method_type, context_die);
22382
22383 /* Make our second (and final) pass over the list of formal parameter types
22384 and output DIEs to represent those types (as necessary). */
22385 for (link = TYPE_ARG_TYPES (function_or_method_type);
22386 link && TREE_VALUE (link);
22387 link = TREE_CHAIN (link))
22388 gen_type_die (TREE_VALUE (link), context_die);
22389 }
22390
22391 /* We want to generate the DIE for TYPE so that we can generate the
22392 die for MEMBER, which has been defined; we will need to refer back
22393 to the member declaration nested within TYPE. If we're trying to
22394 generate minimal debug info for TYPE, processing TYPE won't do the
22395 trick; we need to attach the member declaration by hand. */
22396
22397 static void
22398 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22399 {
22400 gen_type_die (type, context_die);
22401
22402 /* If we're trying to avoid duplicate debug info, we may not have
22403 emitted the member decl for this function. Emit it now. */
22404 if (TYPE_STUB_DECL (type)
22405 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22406 && ! lookup_decl_die (member))
22407 {
22408 dw_die_ref type_die;
22409 gcc_assert (!decl_ultimate_origin (member));
22410
22411 type_die = lookup_type_die_strip_naming_typedef (type);
22412 if (TREE_CODE (member) == FUNCTION_DECL)
22413 gen_subprogram_die (member, type_die);
22414 else if (TREE_CODE (member) == FIELD_DECL)
22415 {
22416 /* Ignore the nameless fields that are used to skip bits but handle
22417 C++ anonymous unions and structs. */
22418 if (DECL_NAME (member) != NULL_TREE
22419 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22420 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22421 {
22422 struct vlr_context vlr_ctx = {
22423 DECL_CONTEXT (member), /* struct_type */
22424 NULL_TREE /* variant_part_offset */
22425 };
22426 gen_type_die (member_declared_type (member), type_die);
22427 gen_field_die (member, &vlr_ctx, type_die);
22428 }
22429 }
22430 else
22431 gen_variable_die (member, NULL_TREE, type_die);
22432 }
22433 }
22434 \f
22435 /* Forward declare these functions, because they are mutually recursive
22436 with their set_block_* pairing functions. */
22437 static void set_decl_origin_self (tree);
22438
22439 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22440 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22441 that it points to the node itself, thus indicating that the node is its
22442 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22443 the given node is NULL, recursively descend the decl/block tree which
22444 it is the root of, and for each other ..._DECL or BLOCK node contained
22445 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22446 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22447 values to point to themselves. */
22448
22449 static void
22450 set_block_origin_self (tree stmt)
22451 {
22452 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22453 {
22454 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22455
22456 {
22457 tree local_decl;
22458
22459 for (local_decl = BLOCK_VARS (stmt);
22460 local_decl != NULL_TREE;
22461 local_decl = DECL_CHAIN (local_decl))
22462 /* Do not recurse on nested functions since the inlining status
22463 of parent and child can be different as per the DWARF spec. */
22464 if (TREE_CODE (local_decl) != FUNCTION_DECL
22465 && !DECL_EXTERNAL (local_decl))
22466 set_decl_origin_self (local_decl);
22467 }
22468
22469 {
22470 tree subblock;
22471
22472 for (subblock = BLOCK_SUBBLOCKS (stmt);
22473 subblock != NULL_TREE;
22474 subblock = BLOCK_CHAIN (subblock))
22475 set_block_origin_self (subblock); /* Recurse. */
22476 }
22477 }
22478 }
22479
22480 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22481 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22482 node to so that it points to the node itself, thus indicating that the
22483 node represents its own (abstract) origin. Additionally, if the
22484 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22485 the decl/block tree of which the given node is the root of, and for
22486 each other ..._DECL or BLOCK node contained therein whose
22487 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22488 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22489 point to themselves. */
22490
22491 static void
22492 set_decl_origin_self (tree decl)
22493 {
22494 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22495 {
22496 DECL_ABSTRACT_ORIGIN (decl) = decl;
22497 if (TREE_CODE (decl) == FUNCTION_DECL)
22498 {
22499 tree arg;
22500
22501 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22502 DECL_ABSTRACT_ORIGIN (arg) = arg;
22503 if (DECL_INITIAL (decl) != NULL_TREE
22504 && DECL_INITIAL (decl) != error_mark_node)
22505 set_block_origin_self (DECL_INITIAL (decl));
22506 }
22507 }
22508 }
22509 \f
22510 /* Mark the early DIE for DECL as the abstract instance. */
22511
22512 static void
22513 dwarf2out_abstract_function (tree decl)
22514 {
22515 dw_die_ref old_die;
22516
22517 /* Make sure we have the actual abstract inline, not a clone. */
22518 decl = DECL_ORIGIN (decl);
22519
22520 if (DECL_IGNORED_P (decl))
22521 return;
22522
22523 /* In LTO we're all set. We already created abstract instances
22524 early and we want to avoid creating a concrete instance of that
22525 if we don't output it. */
22526 if (in_lto_p)
22527 return;
22528
22529 old_die = lookup_decl_die (decl);
22530 gcc_assert (old_die != NULL);
22531 if (get_AT (old_die, DW_AT_inline))
22532 /* We've already generated the abstract instance. */
22533 return;
22534
22535 /* Go ahead and put DW_AT_inline on the DIE. */
22536 if (DECL_DECLARED_INLINE_P (decl))
22537 {
22538 if (cgraph_function_possibly_inlined_p (decl))
22539 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22540 else
22541 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22542 }
22543 else
22544 {
22545 if (cgraph_function_possibly_inlined_p (decl))
22546 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22547 else
22548 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22549 }
22550
22551 if (DECL_DECLARED_INLINE_P (decl)
22552 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22553 add_AT_flag (old_die, DW_AT_artificial, 1);
22554
22555 set_decl_origin_self (decl);
22556 }
22557
22558 /* Helper function of premark_used_types() which gets called through
22559 htab_traverse.
22560
22561 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22562 marked as unused by prune_unused_types. */
22563
22564 bool
22565 premark_used_types_helper (tree const &type, void *)
22566 {
22567 dw_die_ref die;
22568
22569 die = lookup_type_die (type);
22570 if (die != NULL)
22571 die->die_perennial_p = 1;
22572 return true;
22573 }
22574
22575 /* Helper function of premark_types_used_by_global_vars which gets called
22576 through htab_traverse.
22577
22578 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22579 marked as unused by prune_unused_types. The DIE of the type is marked
22580 only if the global variable using the type will actually be emitted. */
22581
22582 int
22583 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22584 void *)
22585 {
22586 struct types_used_by_vars_entry *entry;
22587 dw_die_ref die;
22588
22589 entry = (struct types_used_by_vars_entry *) *slot;
22590 gcc_assert (entry->type != NULL
22591 && entry->var_decl != NULL);
22592 die = lookup_type_die (entry->type);
22593 if (die)
22594 {
22595 /* Ask cgraph if the global variable really is to be emitted.
22596 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22597 varpool_node *node = varpool_node::get (entry->var_decl);
22598 if (node && node->definition)
22599 {
22600 die->die_perennial_p = 1;
22601 /* Keep the parent DIEs as well. */
22602 while ((die = die->die_parent) && die->die_perennial_p == 0)
22603 die->die_perennial_p = 1;
22604 }
22605 }
22606 return 1;
22607 }
22608
22609 /* Mark all members of used_types_hash as perennial. */
22610
22611 static void
22612 premark_used_types (struct function *fun)
22613 {
22614 if (fun && fun->used_types_hash)
22615 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22616 }
22617
22618 /* Mark all members of types_used_by_vars_entry as perennial. */
22619
22620 static void
22621 premark_types_used_by_global_vars (void)
22622 {
22623 if (types_used_by_vars_hash)
22624 types_used_by_vars_hash
22625 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22626 }
22627
22628 /* Mark all variables used by the symtab as perennial. */
22629
22630 static void
22631 premark_used_variables (void)
22632 {
22633 /* Mark DIEs in the symtab as used. */
22634 varpool_node *var;
22635 FOR_EACH_VARIABLE (var)
22636 {
22637 dw_die_ref die = lookup_decl_die (var->decl);
22638 if (die)
22639 die->die_perennial_p = 1;
22640 }
22641 }
22642
22643 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22644 for CA_LOC call arg loc node. */
22645
22646 static dw_die_ref
22647 gen_call_site_die (tree decl, dw_die_ref subr_die,
22648 struct call_arg_loc_node *ca_loc)
22649 {
22650 dw_die_ref stmt_die = NULL, die;
22651 tree block = ca_loc->block;
22652
22653 while (block
22654 && block != DECL_INITIAL (decl)
22655 && TREE_CODE (block) == BLOCK)
22656 {
22657 stmt_die = lookup_block_die (block);
22658 if (stmt_die)
22659 break;
22660 block = BLOCK_SUPERCONTEXT (block);
22661 }
22662 if (stmt_die == NULL)
22663 stmt_die = subr_die;
22664 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22665 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22666 if (ca_loc->tail_call_p)
22667 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22668 if (ca_loc->symbol_ref)
22669 {
22670 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22671 if (tdie)
22672 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22673 else
22674 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22675 false);
22676 }
22677 return die;
22678 }
22679
22680 /* Generate a DIE to represent a declared function (either file-scope or
22681 block-local). */
22682
22683 static void
22684 gen_subprogram_die (tree decl, dw_die_ref context_die)
22685 {
22686 tree origin = decl_ultimate_origin (decl);
22687 dw_die_ref subr_die;
22688 dw_die_ref old_die = lookup_decl_die (decl);
22689
22690 /* This function gets called multiple times for different stages of
22691 the debug process. For example, for func() in this code:
22692
22693 namespace S
22694 {
22695 void func() { ... }
22696 }
22697
22698 ...we get called 4 times. Twice in early debug and twice in
22699 late debug:
22700
22701 Early debug
22702 -----------
22703
22704 1. Once while generating func() within the namespace. This is
22705 the declaration. The declaration bit below is set, as the
22706 context is the namespace.
22707
22708 A new DIE will be generated with DW_AT_declaration set.
22709
22710 2. Once for func() itself. This is the specification. The
22711 declaration bit below is clear as the context is the CU.
22712
22713 We will use the cached DIE from (1) to create a new DIE with
22714 DW_AT_specification pointing to the declaration in (1).
22715
22716 Late debug via rest_of_handle_final()
22717 -------------------------------------
22718
22719 3. Once generating func() within the namespace. This is also the
22720 declaration, as in (1), but this time we will early exit below
22721 as we have a cached DIE and a declaration needs no additional
22722 annotations (no locations), as the source declaration line
22723 info is enough.
22724
22725 4. Once for func() itself. As in (2), this is the specification,
22726 but this time we will re-use the cached DIE, and just annotate
22727 it with the location information that should now be available.
22728
22729 For something without namespaces, but with abstract instances, we
22730 are also called a multiple times:
22731
22732 class Base
22733 {
22734 public:
22735 Base (); // constructor declaration (1)
22736 };
22737
22738 Base::Base () { } // constructor specification (2)
22739
22740 Early debug
22741 -----------
22742
22743 1. Once for the Base() constructor by virtue of it being a
22744 member of the Base class. This is done via
22745 rest_of_type_compilation.
22746
22747 This is a declaration, so a new DIE will be created with
22748 DW_AT_declaration.
22749
22750 2. Once for the Base() constructor definition, but this time
22751 while generating the abstract instance of the base
22752 constructor (__base_ctor) which is being generated via early
22753 debug of reachable functions.
22754
22755 Even though we have a cached version of the declaration (1),
22756 we will create a DW_AT_specification of the declaration DIE
22757 in (1).
22758
22759 3. Once for the __base_ctor itself, but this time, we generate
22760 an DW_AT_abstract_origin version of the DW_AT_specification in
22761 (2).
22762
22763 Late debug via rest_of_handle_final
22764 -----------------------------------
22765
22766 4. One final time for the __base_ctor (which will have a cached
22767 DIE with DW_AT_abstract_origin created in (3). This time,
22768 we will just annotate the location information now
22769 available.
22770 */
22771 int declaration = (current_function_decl != decl
22772 || class_or_namespace_scope_p (context_die));
22773
22774 /* A declaration that has been previously dumped needs no
22775 additional information. */
22776 if (old_die && declaration)
22777 return;
22778
22779 /* Now that the C++ front end lazily declares artificial member fns, we
22780 might need to retrofit the declaration into its class. */
22781 if (!declaration && !origin && !old_die
22782 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22783 && !class_or_namespace_scope_p (context_die)
22784 && debug_info_level > DINFO_LEVEL_TERSE)
22785 old_die = force_decl_die (decl);
22786
22787 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22788 if (origin != NULL)
22789 {
22790 gcc_assert (!declaration || local_scope_p (context_die));
22791
22792 /* Fixup die_parent for the abstract instance of a nested
22793 inline function. */
22794 if (old_die && old_die->die_parent == NULL)
22795 add_child_die (context_die, old_die);
22796
22797 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22798 {
22799 /* If we have a DW_AT_abstract_origin we have a working
22800 cached version. */
22801 subr_die = old_die;
22802 }
22803 else
22804 {
22805 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22806 add_abstract_origin_attribute (subr_die, origin);
22807 /* This is where the actual code for a cloned function is.
22808 Let's emit linkage name attribute for it. This helps
22809 debuggers to e.g, set breakpoints into
22810 constructors/destructors when the user asks "break
22811 K::K". */
22812 add_linkage_name (subr_die, decl);
22813 }
22814 }
22815 /* A cached copy, possibly from early dwarf generation. Reuse as
22816 much as possible. */
22817 else if (old_die)
22818 {
22819 if (!get_AT_flag (old_die, DW_AT_declaration)
22820 /* We can have a normal definition following an inline one in the
22821 case of redefinition of GNU C extern inlines.
22822 It seems reasonable to use AT_specification in this case. */
22823 && !get_AT (old_die, DW_AT_inline))
22824 {
22825 /* Detect and ignore this case, where we are trying to output
22826 something we have already output. */
22827 if (get_AT (old_die, DW_AT_low_pc)
22828 || get_AT (old_die, DW_AT_ranges))
22829 return;
22830
22831 /* If we have no location information, this must be a
22832 partially generated DIE from early dwarf generation.
22833 Fall through and generate it. */
22834 }
22835
22836 /* If the definition comes from the same place as the declaration,
22837 maybe use the old DIE. We always want the DIE for this function
22838 that has the *_pc attributes to be under comp_unit_die so the
22839 debugger can find it. We also need to do this for abstract
22840 instances of inlines, since the spec requires the out-of-line copy
22841 to have the same parent. For local class methods, this doesn't
22842 apply; we just use the old DIE. */
22843 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22844 struct dwarf_file_data * file_index = lookup_filename (s.file);
22845 if (((is_unit_die (old_die->die_parent)
22846 /* This condition fixes the inconsistency/ICE with the
22847 following Fortran test (or some derivative thereof) while
22848 building libgfortran:
22849
22850 module some_m
22851 contains
22852 logical function funky (FLAG)
22853 funky = .true.
22854 end function
22855 end module
22856 */
22857 || (old_die->die_parent
22858 && old_die->die_parent->die_tag == DW_TAG_module)
22859 || local_scope_p (old_die->die_parent)
22860 || context_die == NULL)
22861 && (DECL_ARTIFICIAL (decl)
22862 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22863 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22864 == (unsigned) s.line)
22865 && (!debug_column_info
22866 || s.column == 0
22867 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22868 == (unsigned) s.column)))))
22869 /* With LTO if there's an abstract instance for
22870 the old DIE, this is a concrete instance and
22871 thus re-use the DIE. */
22872 || get_AT (old_die, DW_AT_abstract_origin))
22873 {
22874 subr_die = old_die;
22875
22876 /* Clear out the declaration attribute, but leave the
22877 parameters so they can be augmented with location
22878 information later. Unless this was a declaration, in
22879 which case, wipe out the nameless parameters and recreate
22880 them further down. */
22881 if (remove_AT (subr_die, DW_AT_declaration))
22882 {
22883
22884 remove_AT (subr_die, DW_AT_object_pointer);
22885 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22886 }
22887 }
22888 /* Make a specification pointing to the previously built
22889 declaration. */
22890 else
22891 {
22892 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22893 add_AT_specification (subr_die, old_die);
22894 add_pubname (decl, subr_die);
22895 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22896 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22897 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22898 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22899 if (debug_column_info
22900 && s.column
22901 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22902 != (unsigned) s.column))
22903 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22904
22905 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22906 emit the real type on the definition die. */
22907 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22908 {
22909 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22910 if (die == auto_die || die == decltype_auto_die)
22911 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22912 TYPE_UNQUALIFIED, false, context_die);
22913 }
22914
22915 /* When we process the method declaration, we haven't seen
22916 the out-of-class defaulted definition yet, so we have to
22917 recheck now. */
22918 if ((dwarf_version >= 5 || ! dwarf_strict)
22919 && !get_AT (subr_die, DW_AT_defaulted))
22920 {
22921 int defaulted
22922 = lang_hooks.decls.decl_dwarf_attribute (decl,
22923 DW_AT_defaulted);
22924 if (defaulted != -1)
22925 {
22926 /* Other values must have been handled before. */
22927 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22928 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22929 }
22930 }
22931 }
22932 }
22933 /* Create a fresh DIE for anything else. */
22934 else
22935 {
22936 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22937
22938 if (TREE_PUBLIC (decl))
22939 add_AT_flag (subr_die, DW_AT_external, 1);
22940
22941 add_name_and_src_coords_attributes (subr_die, decl);
22942 add_pubname (decl, subr_die);
22943 if (debug_info_level > DINFO_LEVEL_TERSE)
22944 {
22945 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22946 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22947 TYPE_UNQUALIFIED, false, context_die);
22948 }
22949
22950 add_pure_or_virtual_attribute (subr_die, decl);
22951 if (DECL_ARTIFICIAL (decl))
22952 add_AT_flag (subr_die, DW_AT_artificial, 1);
22953
22954 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22955 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22956
22957 add_alignment_attribute (subr_die, decl);
22958
22959 add_accessibility_attribute (subr_die, decl);
22960 }
22961
22962 /* Unless we have an existing non-declaration DIE, equate the new
22963 DIE. */
22964 if (!old_die || is_declaration_die (old_die))
22965 equate_decl_number_to_die (decl, subr_die);
22966
22967 if (declaration)
22968 {
22969 if (!old_die || !get_AT (old_die, DW_AT_inline))
22970 {
22971 add_AT_flag (subr_die, DW_AT_declaration, 1);
22972
22973 /* If this is an explicit function declaration then generate
22974 a DW_AT_explicit attribute. */
22975 if ((dwarf_version >= 3 || !dwarf_strict)
22976 && lang_hooks.decls.decl_dwarf_attribute (decl,
22977 DW_AT_explicit) == 1)
22978 add_AT_flag (subr_die, DW_AT_explicit, 1);
22979
22980 /* If this is a C++11 deleted special function member then generate
22981 a DW_AT_deleted attribute. */
22982 if ((dwarf_version >= 5 || !dwarf_strict)
22983 && lang_hooks.decls.decl_dwarf_attribute (decl,
22984 DW_AT_deleted) == 1)
22985 add_AT_flag (subr_die, DW_AT_deleted, 1);
22986
22987 /* If this is a C++11 defaulted special function member then
22988 generate a DW_AT_defaulted attribute. */
22989 if (dwarf_version >= 5 || !dwarf_strict)
22990 {
22991 int defaulted
22992 = lang_hooks.decls.decl_dwarf_attribute (decl,
22993 DW_AT_defaulted);
22994 if (defaulted != -1)
22995 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22996 }
22997
22998 /* If this is a C++11 non-static member function with & ref-qualifier
22999 then generate a DW_AT_reference attribute. */
23000 if ((dwarf_version >= 5 || !dwarf_strict)
23001 && lang_hooks.decls.decl_dwarf_attribute (decl,
23002 DW_AT_reference) == 1)
23003 add_AT_flag (subr_die, DW_AT_reference, 1);
23004
23005 /* If this is a C++11 non-static member function with &&
23006 ref-qualifier then generate a DW_AT_reference attribute. */
23007 if ((dwarf_version >= 5 || !dwarf_strict)
23008 && lang_hooks.decls.decl_dwarf_attribute (decl,
23009 DW_AT_rvalue_reference)
23010 == 1)
23011 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
23012 }
23013 }
23014 /* For non DECL_EXTERNALs, if range information is available, fill
23015 the DIE with it. */
23016 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
23017 {
23018 HOST_WIDE_INT cfa_fb_offset;
23019
23020 struct function *fun = DECL_STRUCT_FUNCTION (decl);
23021
23022 if (!crtl->has_bb_partition)
23023 {
23024 dw_fde_ref fde = fun->fde;
23025 if (fde->dw_fde_begin)
23026 {
23027 /* We have already generated the labels. */
23028 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23029 fde->dw_fde_end, false);
23030 }
23031 else
23032 {
23033 /* Create start/end labels and add the range. */
23034 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
23035 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
23036 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
23037 current_function_funcdef_no);
23038 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
23039 current_function_funcdef_no);
23040 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
23041 false);
23042 }
23043
23044 #if VMS_DEBUGGING_INFO
23045 /* HP OpenVMS Industry Standard 64: DWARF Extensions
23046 Section 2.3 Prologue and Epilogue Attributes:
23047 When a breakpoint is set on entry to a function, it is generally
23048 desirable for execution to be suspended, not on the very first
23049 instruction of the function, but rather at a point after the
23050 function's frame has been set up, after any language defined local
23051 declaration processing has been completed, and before execution of
23052 the first statement of the function begins. Debuggers generally
23053 cannot properly determine where this point is. Similarly for a
23054 breakpoint set on exit from a function. The prologue and epilogue
23055 attributes allow a compiler to communicate the location(s) to use. */
23056
23057 {
23058 if (fde->dw_fde_vms_end_prologue)
23059 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
23060 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
23061
23062 if (fde->dw_fde_vms_begin_epilogue)
23063 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
23064 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
23065 }
23066 #endif
23067
23068 }
23069 else
23070 {
23071 /* Generate pubnames entries for the split function code ranges. */
23072 dw_fde_ref fde = fun->fde;
23073
23074 if (fde->dw_fde_second_begin)
23075 {
23076 if (dwarf_version >= 3 || !dwarf_strict)
23077 {
23078 /* We should use ranges for non-contiguous code section
23079 addresses. Use the actual code range for the initial
23080 section, since the HOT/COLD labels might precede an
23081 alignment offset. */
23082 bool range_list_added = false;
23083 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23084 fde->dw_fde_end, &range_list_added,
23085 false);
23086 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23087 fde->dw_fde_second_end,
23088 &range_list_added, false);
23089 if (range_list_added)
23090 add_ranges (NULL);
23091 }
23092 else
23093 {
23094 /* There is no real support in DW2 for this .. so we make
23095 a work-around. First, emit the pub name for the segment
23096 containing the function label. Then make and emit a
23097 simplified subprogram DIE for the second segment with the
23098 name pre-fixed by __hot/cold_sect_of_. We use the same
23099 linkage name for the second die so that gdb will find both
23100 sections when given "b foo". */
23101 const char *name = NULL;
23102 tree decl_name = DECL_NAME (decl);
23103 dw_die_ref seg_die;
23104
23105 /* Do the 'primary' section. */
23106 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23107 fde->dw_fde_end, false);
23108
23109 /* Build a minimal DIE for the secondary section. */
23110 seg_die = new_die (DW_TAG_subprogram,
23111 subr_die->die_parent, decl);
23112
23113 if (TREE_PUBLIC (decl))
23114 add_AT_flag (seg_die, DW_AT_external, 1);
23115
23116 if (decl_name != NULL
23117 && IDENTIFIER_POINTER (decl_name) != NULL)
23118 {
23119 name = dwarf2_name (decl, 1);
23120 if (! DECL_ARTIFICIAL (decl))
23121 add_src_coords_attributes (seg_die, decl);
23122
23123 add_linkage_name (seg_die, decl);
23124 }
23125 gcc_assert (name != NULL);
23126 add_pure_or_virtual_attribute (seg_die, decl);
23127 if (DECL_ARTIFICIAL (decl))
23128 add_AT_flag (seg_die, DW_AT_artificial, 1);
23129
23130 name = concat ("__second_sect_of_", name, NULL);
23131 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23132 fde->dw_fde_second_end, false);
23133 add_name_attribute (seg_die, name);
23134 if (want_pubnames ())
23135 add_pubname_string (name, seg_die);
23136 }
23137 }
23138 else
23139 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23140 false);
23141 }
23142
23143 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23144
23145 /* We define the "frame base" as the function's CFA. This is more
23146 convenient for several reasons: (1) It's stable across the prologue
23147 and epilogue, which makes it better than just a frame pointer,
23148 (2) With dwarf3, there exists a one-byte encoding that allows us
23149 to reference the .debug_frame data by proxy, but failing that,
23150 (3) We can at least reuse the code inspection and interpretation
23151 code that determines the CFA position at various points in the
23152 function. */
23153 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23154 {
23155 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23156 add_AT_loc (subr_die, DW_AT_frame_base, op);
23157 }
23158 else
23159 {
23160 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23161 if (list->dw_loc_next)
23162 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23163 else
23164 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23165 }
23166
23167 /* Compute a displacement from the "steady-state frame pointer" to
23168 the CFA. The former is what all stack slots and argument slots
23169 will reference in the rtl; the latter is what we've told the
23170 debugger about. We'll need to adjust all frame_base references
23171 by this displacement. */
23172 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23173
23174 if (fun->static_chain_decl)
23175 {
23176 /* DWARF requires here a location expression that computes the
23177 address of the enclosing subprogram's frame base. The machinery
23178 in tree-nested.c is supposed to store this specific address in the
23179 last field of the FRAME record. */
23180 const tree frame_type
23181 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23182 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23183
23184 tree fb_expr
23185 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23186 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23187 fb_expr, fb_decl, NULL_TREE);
23188
23189 add_AT_location_description (subr_die, DW_AT_static_link,
23190 loc_list_from_tree (fb_expr, 0, NULL));
23191 }
23192
23193 resolve_variable_values ();
23194 }
23195
23196 /* Generate child dies for template paramaters. */
23197 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23198 gen_generic_params_dies (decl);
23199
23200 /* Now output descriptions of the arguments for this function. This gets
23201 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23202 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23203 `...' at the end of the formal parameter list. In order to find out if
23204 there was a trailing ellipsis or not, we must instead look at the type
23205 associated with the FUNCTION_DECL. This will be a node of type
23206 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23207 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23208 an ellipsis at the end. */
23209
23210 /* In the case where we are describing a mere function declaration, all we
23211 need to do here (and all we *can* do here) is to describe the *types* of
23212 its formal parameters. */
23213 if (debug_info_level <= DINFO_LEVEL_TERSE)
23214 ;
23215 else if (declaration)
23216 gen_formal_types_die (decl, subr_die);
23217 else
23218 {
23219 /* Generate DIEs to represent all known formal parameters. */
23220 tree parm = DECL_ARGUMENTS (decl);
23221 tree generic_decl = early_dwarf
23222 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23223 tree generic_decl_parm = generic_decl
23224 ? DECL_ARGUMENTS (generic_decl)
23225 : NULL;
23226
23227 /* Now we want to walk the list of parameters of the function and
23228 emit their relevant DIEs.
23229
23230 We consider the case of DECL being an instance of a generic function
23231 as well as it being a normal function.
23232
23233 If DECL is an instance of a generic function we walk the
23234 parameters of the generic function declaration _and_ the parameters of
23235 DECL itself. This is useful because we want to emit specific DIEs for
23236 function parameter packs and those are declared as part of the
23237 generic function declaration. In that particular case,
23238 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23239 That DIE has children DIEs representing the set of arguments
23240 of the pack. Note that the set of pack arguments can be empty.
23241 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23242 children DIE.
23243
23244 Otherwise, we just consider the parameters of DECL. */
23245 while (generic_decl_parm || parm)
23246 {
23247 if (generic_decl_parm
23248 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23249 gen_formal_parameter_pack_die (generic_decl_parm,
23250 parm, subr_die,
23251 &parm);
23252 else if (parm)
23253 {
23254 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23255
23256 if (early_dwarf
23257 && parm == DECL_ARGUMENTS (decl)
23258 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23259 && parm_die
23260 && (dwarf_version >= 3 || !dwarf_strict))
23261 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23262
23263 parm = DECL_CHAIN (parm);
23264 }
23265
23266 if (generic_decl_parm)
23267 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23268 }
23269
23270 /* Decide whether we need an unspecified_parameters DIE at the end.
23271 There are 2 more cases to do this for: 1) the ansi ... declaration -
23272 this is detectable when the end of the arg list is not a
23273 void_type_node 2) an unprototyped function declaration (not a
23274 definition). This just means that we have no info about the
23275 parameters at all. */
23276 if (early_dwarf)
23277 {
23278 if (prototype_p (TREE_TYPE (decl)))
23279 {
23280 /* This is the prototyped case, check for.... */
23281 if (stdarg_p (TREE_TYPE (decl)))
23282 gen_unspecified_parameters_die (decl, subr_die);
23283 }
23284 else if (DECL_INITIAL (decl) == NULL_TREE)
23285 gen_unspecified_parameters_die (decl, subr_die);
23286 }
23287 }
23288
23289 if (subr_die != old_die)
23290 /* Add the calling convention attribute if requested. */
23291 add_calling_convention_attribute (subr_die, decl);
23292
23293 /* Output Dwarf info for all of the stuff within the body of the function
23294 (if it has one - it may be just a declaration).
23295
23296 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23297 a function. This BLOCK actually represents the outermost binding contour
23298 for the function, i.e. the contour in which the function's formal
23299 parameters and labels get declared. Curiously, it appears that the front
23300 end doesn't actually put the PARM_DECL nodes for the current function onto
23301 the BLOCK_VARS list for this outer scope, but are strung off of the
23302 DECL_ARGUMENTS list for the function instead.
23303
23304 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23305 the LABEL_DECL nodes for the function however, and we output DWARF info
23306 for those in decls_for_scope. Just within the `outer_scope' there will be
23307 a BLOCK node representing the function's outermost pair of curly braces,
23308 and any blocks used for the base and member initializers of a C++
23309 constructor function. */
23310 tree outer_scope = DECL_INITIAL (decl);
23311 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23312 {
23313 int call_site_note_count = 0;
23314 int tail_call_site_note_count = 0;
23315
23316 /* Emit a DW_TAG_variable DIE for a named return value. */
23317 if (DECL_NAME (DECL_RESULT (decl)))
23318 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23319
23320 /* The first time through decls_for_scope we will generate the
23321 DIEs for the locals. The second time, we fill in the
23322 location info. */
23323 decls_for_scope (outer_scope, subr_die);
23324
23325 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23326 {
23327 struct call_arg_loc_node *ca_loc;
23328 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23329 {
23330 dw_die_ref die = NULL;
23331 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23332 rtx arg, next_arg;
23333 tree arg_decl = NULL_TREE;
23334
23335 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23336 ? XEXP (ca_loc->call_arg_loc_note, 0)
23337 : NULL_RTX);
23338 arg; arg = next_arg)
23339 {
23340 dw_loc_descr_ref reg, val;
23341 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23342 dw_die_ref cdie, tdie = NULL;
23343
23344 next_arg = XEXP (arg, 1);
23345 if (REG_P (XEXP (XEXP (arg, 0), 0))
23346 && next_arg
23347 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23348 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23349 && REGNO (XEXP (XEXP (arg, 0), 0))
23350 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23351 next_arg = XEXP (next_arg, 1);
23352 if (mode == VOIDmode)
23353 {
23354 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23355 if (mode == VOIDmode)
23356 mode = GET_MODE (XEXP (arg, 0));
23357 }
23358 if (mode == VOIDmode || mode == BLKmode)
23359 continue;
23360 /* Get dynamic information about call target only if we
23361 have no static information: we cannot generate both
23362 DW_AT_call_origin and DW_AT_call_target
23363 attributes. */
23364 if (ca_loc->symbol_ref == NULL_RTX)
23365 {
23366 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23367 {
23368 tloc = XEXP (XEXP (arg, 0), 1);
23369 continue;
23370 }
23371 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23372 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23373 {
23374 tlocc = XEXP (XEXP (arg, 0), 1);
23375 continue;
23376 }
23377 }
23378 reg = NULL;
23379 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23380 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23381 VAR_INIT_STATUS_INITIALIZED);
23382 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23383 {
23384 rtx mem = XEXP (XEXP (arg, 0), 0);
23385 reg = mem_loc_descriptor (XEXP (mem, 0),
23386 get_address_mode (mem),
23387 GET_MODE (mem),
23388 VAR_INIT_STATUS_INITIALIZED);
23389 }
23390 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23391 == DEBUG_PARAMETER_REF)
23392 {
23393 tree tdecl
23394 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23395 tdie = lookup_decl_die (tdecl);
23396 if (tdie == NULL)
23397 continue;
23398 arg_decl = tdecl;
23399 }
23400 else
23401 continue;
23402 if (reg == NULL
23403 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23404 != DEBUG_PARAMETER_REF)
23405 continue;
23406 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23407 VOIDmode,
23408 VAR_INIT_STATUS_INITIALIZED);
23409 if (val == NULL)
23410 continue;
23411 if (die == NULL)
23412 die = gen_call_site_die (decl, subr_die, ca_loc);
23413 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23414 NULL_TREE);
23415 add_desc_attribute (cdie, arg_decl);
23416 if (reg != NULL)
23417 add_AT_loc (cdie, DW_AT_location, reg);
23418 else if (tdie != NULL)
23419 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23420 tdie);
23421 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23422 if (next_arg != XEXP (arg, 1))
23423 {
23424 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23425 if (mode == VOIDmode)
23426 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23427 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23428 0), 1),
23429 mode, VOIDmode,
23430 VAR_INIT_STATUS_INITIALIZED);
23431 if (val != NULL)
23432 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23433 val);
23434 }
23435 }
23436 if (die == NULL
23437 && (ca_loc->symbol_ref || tloc))
23438 die = gen_call_site_die (decl, subr_die, ca_loc);
23439 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23440 {
23441 dw_loc_descr_ref tval = NULL;
23442
23443 if (tloc != NULL_RTX)
23444 tval = mem_loc_descriptor (tloc,
23445 GET_MODE (tloc) == VOIDmode
23446 ? Pmode : GET_MODE (tloc),
23447 VOIDmode,
23448 VAR_INIT_STATUS_INITIALIZED);
23449 if (tval)
23450 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23451 else if (tlocc != NULL_RTX)
23452 {
23453 tval = mem_loc_descriptor (tlocc,
23454 GET_MODE (tlocc) == VOIDmode
23455 ? Pmode : GET_MODE (tlocc),
23456 VOIDmode,
23457 VAR_INIT_STATUS_INITIALIZED);
23458 if (tval)
23459 add_AT_loc (die,
23460 dwarf_AT (DW_AT_call_target_clobbered),
23461 tval);
23462 }
23463 }
23464 if (die != NULL)
23465 {
23466 call_site_note_count++;
23467 if (ca_loc->tail_call_p)
23468 tail_call_site_note_count++;
23469 }
23470 }
23471 }
23472 call_arg_locations = NULL;
23473 call_arg_loc_last = NULL;
23474 if (tail_call_site_count >= 0
23475 && tail_call_site_count == tail_call_site_note_count
23476 && (!dwarf_strict || dwarf_version >= 5))
23477 {
23478 if (call_site_count >= 0
23479 && call_site_count == call_site_note_count)
23480 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23481 else
23482 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23483 }
23484 call_site_count = -1;
23485 tail_call_site_count = -1;
23486 }
23487
23488 /* Mark used types after we have created DIEs for the functions scopes. */
23489 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23490 }
23491
23492 /* Returns a hash value for X (which really is a die_struct). */
23493
23494 hashval_t
23495 block_die_hasher::hash (die_struct *d)
23496 {
23497 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23498 }
23499
23500 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23501 as decl_id and die_parent of die_struct Y. */
23502
23503 bool
23504 block_die_hasher::equal (die_struct *x, die_struct *y)
23505 {
23506 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23507 }
23508
23509 /* Hold information about markers for inlined entry points. */
23510 struct GTY ((for_user)) inline_entry_data
23511 {
23512 /* The block that's the inlined_function_outer_scope for an inlined
23513 function. */
23514 tree block;
23515
23516 /* The label at the inlined entry point. */
23517 const char *label_pfx;
23518 unsigned int label_num;
23519
23520 /* The view number to be used as the inlined entry point. */
23521 var_loc_view view;
23522 };
23523
23524 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23525 {
23526 typedef tree compare_type;
23527 static inline hashval_t hash (const inline_entry_data *);
23528 static inline bool equal (const inline_entry_data *, const_tree);
23529 };
23530
23531 /* Hash table routines for inline_entry_data. */
23532
23533 inline hashval_t
23534 inline_entry_data_hasher::hash (const inline_entry_data *data)
23535 {
23536 return htab_hash_pointer (data->block);
23537 }
23538
23539 inline bool
23540 inline_entry_data_hasher::equal (const inline_entry_data *data,
23541 const_tree block)
23542 {
23543 return data->block == block;
23544 }
23545
23546 /* Inlined entry points pending DIE creation in this compilation unit. */
23547
23548 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23549
23550
23551 /* Return TRUE if DECL, which may have been previously generated as
23552 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23553 true if decl (or its origin) is either an extern declaration or a
23554 class/namespace scoped declaration.
23555
23556 The declare_in_namespace support causes us to get two DIEs for one
23557 variable, both of which are declarations. We want to avoid
23558 considering one to be a specification, so we must test for
23559 DECLARATION and DW_AT_declaration. */
23560 static inline bool
23561 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23562 {
23563 return (old_die && TREE_STATIC (decl) && !declaration
23564 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23565 }
23566
23567 /* Return true if DECL is a local static. */
23568
23569 static inline bool
23570 local_function_static (tree decl)
23571 {
23572 gcc_assert (VAR_P (decl));
23573 return TREE_STATIC (decl)
23574 && DECL_CONTEXT (decl)
23575 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23576 }
23577
23578 /* Return true iff DECL overrides (presumably completes) the type of
23579 OLD_DIE within CONTEXT_DIE. */
23580
23581 static bool
23582 override_type_for_decl_p (tree decl, dw_die_ref old_die,
23583 dw_die_ref context_die)
23584 {
23585 tree type = TREE_TYPE (decl);
23586 int cv_quals;
23587
23588 if (decl_by_reference_p (decl))
23589 {
23590 type = TREE_TYPE (type);
23591 cv_quals = TYPE_UNQUALIFIED;
23592 }
23593 else
23594 cv_quals = decl_quals (decl);
23595
23596 dw_die_ref type_die = modified_type_die (type,
23597 cv_quals | TYPE_QUALS (type),
23598 false,
23599 context_die);
23600
23601 dw_die_ref old_type_die = get_AT_ref (old_die, DW_AT_type);
23602
23603 return type_die != old_type_die;
23604 }
23605
23606 /* Generate a DIE to represent a declared data object.
23607 Either DECL or ORIGIN must be non-null. */
23608
23609 static void
23610 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23611 {
23612 HOST_WIDE_INT off = 0;
23613 tree com_decl;
23614 tree decl_or_origin = decl ? decl : origin;
23615 tree ultimate_origin;
23616 dw_die_ref var_die;
23617 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23618 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23619 || class_or_namespace_scope_p (context_die));
23620 bool specialization_p = false;
23621 bool no_linkage_name = false;
23622
23623 /* While C++ inline static data members have definitions inside of the
23624 class, force the first DIE to be a declaration, then let gen_member_die
23625 reparent it to the class context and call gen_variable_die again
23626 to create the outside of the class DIE for the definition. */
23627 if (!declaration
23628 && old_die == NULL
23629 && decl
23630 && DECL_CONTEXT (decl)
23631 && TYPE_P (DECL_CONTEXT (decl))
23632 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23633 {
23634 declaration = true;
23635 if (dwarf_version < 5)
23636 no_linkage_name = true;
23637 }
23638
23639 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23640 if (decl || ultimate_origin)
23641 origin = ultimate_origin;
23642 com_decl = fortran_common (decl_or_origin, &off);
23643
23644 /* Symbol in common gets emitted as a child of the common block, in the form
23645 of a data member. */
23646 if (com_decl)
23647 {
23648 dw_die_ref com_die;
23649 dw_loc_list_ref loc = NULL;
23650 die_node com_die_arg;
23651
23652 var_die = lookup_decl_die (decl_or_origin);
23653 if (var_die)
23654 {
23655 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23656 {
23657 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23658 if (loc)
23659 {
23660 if (off)
23661 {
23662 /* Optimize the common case. */
23663 if (single_element_loc_list_p (loc)
23664 && loc->expr->dw_loc_opc == DW_OP_addr
23665 && loc->expr->dw_loc_next == NULL
23666 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23667 == SYMBOL_REF)
23668 {
23669 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23670 loc->expr->dw_loc_oprnd1.v.val_addr
23671 = plus_constant (GET_MODE (x), x , off);
23672 }
23673 else
23674 loc_list_plus_const (loc, off);
23675 }
23676 add_AT_location_description (var_die, DW_AT_location, loc);
23677 remove_AT (var_die, DW_AT_declaration);
23678 }
23679 }
23680 return;
23681 }
23682
23683 if (common_block_die_table == NULL)
23684 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23685
23686 com_die_arg.decl_id = DECL_UID (com_decl);
23687 com_die_arg.die_parent = context_die;
23688 com_die = common_block_die_table->find (&com_die_arg);
23689 if (! early_dwarf)
23690 loc = loc_list_from_tree (com_decl, 2, NULL);
23691 if (com_die == NULL)
23692 {
23693 const char *cnam
23694 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23695 die_node **slot;
23696
23697 com_die = new_die (DW_TAG_common_block, context_die, decl);
23698 add_name_and_src_coords_attributes (com_die, com_decl);
23699 if (loc)
23700 {
23701 add_AT_location_description (com_die, DW_AT_location, loc);
23702 /* Avoid sharing the same loc descriptor between
23703 DW_TAG_common_block and DW_TAG_variable. */
23704 loc = loc_list_from_tree (com_decl, 2, NULL);
23705 }
23706 else if (DECL_EXTERNAL (decl_or_origin))
23707 add_AT_flag (com_die, DW_AT_declaration, 1);
23708 if (want_pubnames ())
23709 add_pubname_string (cnam, com_die); /* ??? needed? */
23710 com_die->decl_id = DECL_UID (com_decl);
23711 slot = common_block_die_table->find_slot (com_die, INSERT);
23712 *slot = com_die;
23713 }
23714 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23715 {
23716 add_AT_location_description (com_die, DW_AT_location, loc);
23717 loc = loc_list_from_tree (com_decl, 2, NULL);
23718 remove_AT (com_die, DW_AT_declaration);
23719 }
23720 var_die = new_die (DW_TAG_variable, com_die, decl);
23721 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23722 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23723 decl_quals (decl_or_origin), false,
23724 context_die);
23725 add_alignment_attribute (var_die, decl);
23726 add_AT_flag (var_die, DW_AT_external, 1);
23727 if (loc)
23728 {
23729 if (off)
23730 {
23731 /* Optimize the common case. */
23732 if (single_element_loc_list_p (loc)
23733 && loc->expr->dw_loc_opc == DW_OP_addr
23734 && loc->expr->dw_loc_next == NULL
23735 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23736 {
23737 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23738 loc->expr->dw_loc_oprnd1.v.val_addr
23739 = plus_constant (GET_MODE (x), x, off);
23740 }
23741 else
23742 loc_list_plus_const (loc, off);
23743 }
23744 add_AT_location_description (var_die, DW_AT_location, loc);
23745 }
23746 else if (DECL_EXTERNAL (decl_or_origin))
23747 add_AT_flag (var_die, DW_AT_declaration, 1);
23748 if (decl)
23749 equate_decl_number_to_die (decl, var_die);
23750 return;
23751 }
23752
23753 if (old_die)
23754 {
23755 if (declaration)
23756 {
23757 /* A declaration that has been previously dumped, needs no
23758 further annotations, since it doesn't need location on
23759 the second pass. */
23760 return;
23761 }
23762 else if (decl_will_get_specification_p (old_die, decl, declaration)
23763 && !get_AT (old_die, DW_AT_specification))
23764 {
23765 /* Fall-thru so we can make a new variable die along with a
23766 DW_AT_specification. */
23767 }
23768 else if (origin && old_die->die_parent != context_die)
23769 {
23770 /* If we will be creating an inlined instance, we need a
23771 new DIE that will get annotated with
23772 DW_AT_abstract_origin. */
23773 gcc_assert (!DECL_ABSTRACT_P (decl));
23774 }
23775 else
23776 {
23777 /* If a DIE was dumped early, it still needs location info.
23778 Skip to where we fill the location bits. */
23779 var_die = old_die;
23780
23781 /* ??? In LTRANS we cannot annotate early created variably
23782 modified type DIEs without copying them and adjusting all
23783 references to them. Thus we dumped them again. Also add a
23784 reference to them but beware of -g0 compile and -g link
23785 in which case the reference will be already present. */
23786 tree type = TREE_TYPE (decl_or_origin);
23787 if (in_lto_p
23788 && ! get_AT (var_die, DW_AT_type)
23789 && variably_modified_type_p
23790 (type, decl_function_context (decl_or_origin)))
23791 {
23792 if (decl_by_reference_p (decl_or_origin))
23793 add_type_attribute (var_die, TREE_TYPE (type),
23794 TYPE_UNQUALIFIED, false, context_die);
23795 else
23796 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23797 false, context_die);
23798 }
23799
23800 goto gen_variable_die_location;
23801 }
23802 }
23803
23804 /* For static data members, the declaration in the class is supposed
23805 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23806 also in DWARF2; the specification should still be DW_TAG_variable
23807 referencing the DW_TAG_member DIE. */
23808 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23809 var_die = new_die (DW_TAG_member, context_die, decl);
23810 else
23811 var_die = new_die (DW_TAG_variable, context_die, decl);
23812
23813 if (origin != NULL)
23814 add_abstract_origin_attribute (var_die, origin);
23815
23816 /* Loop unrolling can create multiple blocks that refer to the same
23817 static variable, so we must test for the DW_AT_declaration flag.
23818
23819 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23820 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23821 sharing them.
23822
23823 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23824 else if (decl_will_get_specification_p (old_die, decl, declaration))
23825 {
23826 /* This is a definition of a C++ class level static. */
23827 add_AT_specification (var_die, old_die);
23828 specialization_p = true;
23829 if (DECL_NAME (decl))
23830 {
23831 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23832 struct dwarf_file_data * file_index = lookup_filename (s.file);
23833
23834 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23835 add_AT_file (var_die, DW_AT_decl_file, file_index);
23836
23837 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23838 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23839
23840 if (debug_column_info
23841 && s.column
23842 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23843 != (unsigned) s.column))
23844 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23845
23846 if (old_die->die_tag == DW_TAG_member)
23847 add_linkage_name (var_die, decl);
23848 }
23849 }
23850 else
23851 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23852
23853 if ((origin == NULL && !specialization_p)
23854 || (origin != NULL
23855 && !DECL_ABSTRACT_P (decl_or_origin)
23856 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23857 decl_function_context
23858 (decl_or_origin)))
23859 || (old_die && specialization_p
23860 && override_type_for_decl_p (decl_or_origin, old_die, context_die)))
23861 {
23862 tree type = TREE_TYPE (decl_or_origin);
23863
23864 if (decl_by_reference_p (decl_or_origin))
23865 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23866 context_die);
23867 else
23868 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23869 context_die);
23870 }
23871
23872 if (origin == NULL && !specialization_p)
23873 {
23874 if (TREE_PUBLIC (decl))
23875 add_AT_flag (var_die, DW_AT_external, 1);
23876
23877 if (DECL_ARTIFICIAL (decl))
23878 add_AT_flag (var_die, DW_AT_artificial, 1);
23879
23880 add_alignment_attribute (var_die, decl);
23881
23882 add_accessibility_attribute (var_die, decl);
23883 }
23884
23885 if (declaration)
23886 add_AT_flag (var_die, DW_AT_declaration, 1);
23887
23888 if (decl && (DECL_ABSTRACT_P (decl)
23889 || !old_die || is_declaration_die (old_die)))
23890 equate_decl_number_to_die (decl, var_die);
23891
23892 gen_variable_die_location:
23893 if (! declaration
23894 && (! DECL_ABSTRACT_P (decl_or_origin)
23895 /* Local static vars are shared between all clones/inlines,
23896 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23897 already set. */
23898 || (VAR_P (decl_or_origin)
23899 && TREE_STATIC (decl_or_origin)
23900 && DECL_RTL_SET_P (decl_or_origin))))
23901 {
23902 if (early_dwarf)
23903 add_pubname (decl_or_origin, var_die);
23904 else
23905 add_location_or_const_value_attribute (var_die, decl_or_origin,
23906 decl == NULL);
23907 }
23908 else
23909 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23910
23911 if ((dwarf_version >= 4 || !dwarf_strict)
23912 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23913 DW_AT_const_expr) == 1
23914 && !get_AT (var_die, DW_AT_const_expr)
23915 && !specialization_p)
23916 add_AT_flag (var_die, DW_AT_const_expr, 1);
23917
23918 if (!dwarf_strict)
23919 {
23920 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23921 DW_AT_inline);
23922 if (inl != -1
23923 && !get_AT (var_die, DW_AT_inline)
23924 && !specialization_p)
23925 add_AT_unsigned (var_die, DW_AT_inline, inl);
23926 }
23927 }
23928
23929 /* Generate a DIE to represent a named constant. */
23930
23931 static void
23932 gen_const_die (tree decl, dw_die_ref context_die)
23933 {
23934 dw_die_ref const_die;
23935 tree type = TREE_TYPE (decl);
23936
23937 const_die = lookup_decl_die (decl);
23938 if (const_die)
23939 return;
23940
23941 const_die = new_die (DW_TAG_constant, context_die, decl);
23942 equate_decl_number_to_die (decl, const_die);
23943 add_name_and_src_coords_attributes (const_die, decl);
23944 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23945 if (TREE_PUBLIC (decl))
23946 add_AT_flag (const_die, DW_AT_external, 1);
23947 if (DECL_ARTIFICIAL (decl))
23948 add_AT_flag (const_die, DW_AT_artificial, 1);
23949 tree_add_const_value_attribute_for_decl (const_die, decl);
23950 }
23951
23952 /* Generate a DIE to represent a label identifier. */
23953
23954 static void
23955 gen_label_die (tree decl, dw_die_ref context_die)
23956 {
23957 tree origin = decl_ultimate_origin (decl);
23958 dw_die_ref lbl_die = lookup_decl_die (decl);
23959 rtx insn;
23960 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23961
23962 if (!lbl_die)
23963 {
23964 lbl_die = new_die (DW_TAG_label, context_die, decl);
23965 equate_decl_number_to_die (decl, lbl_die);
23966
23967 if (origin != NULL)
23968 add_abstract_origin_attribute (lbl_die, origin);
23969 else
23970 add_name_and_src_coords_attributes (lbl_die, decl);
23971 }
23972
23973 if (DECL_ABSTRACT_P (decl))
23974 equate_decl_number_to_die (decl, lbl_die);
23975 else if (! early_dwarf)
23976 {
23977 insn = DECL_RTL_IF_SET (decl);
23978
23979 /* Deleted labels are programmer specified labels which have been
23980 eliminated because of various optimizations. We still emit them
23981 here so that it is possible to put breakpoints on them. */
23982 if (insn
23983 && (LABEL_P (insn)
23984 || ((NOTE_P (insn)
23985 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23986 {
23987 /* When optimization is enabled (via -O) some parts of the compiler
23988 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23989 represent source-level labels which were explicitly declared by
23990 the user. This really shouldn't be happening though, so catch
23991 it if it ever does happen. */
23992 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23993
23994 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23995 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23996 }
23997 else if (insn
23998 && NOTE_P (insn)
23999 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
24000 && CODE_LABEL_NUMBER (insn) != -1)
24001 {
24002 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
24003 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
24004 }
24005 }
24006 }
24007
24008 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
24009 attributes to the DIE for a block STMT, to describe where the inlined
24010 function was called from. This is similar to add_src_coords_attributes. */
24011
24012 static inline void
24013 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
24014 {
24015 /* We can end up with BUILTINS_LOCATION here. */
24016 if (RESERVED_LOCATION_P (BLOCK_SOURCE_LOCATION (stmt)))
24017 return;
24018
24019 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
24020
24021 if (dwarf_version >= 3 || !dwarf_strict)
24022 {
24023 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
24024 add_AT_unsigned (die, DW_AT_call_line, s.line);
24025 if (debug_column_info && s.column)
24026 add_AT_unsigned (die, DW_AT_call_column, s.column);
24027 }
24028 }
24029
24030
24031 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
24032 Add low_pc and high_pc attributes to the DIE for a block STMT. */
24033
24034 static inline void
24035 add_high_low_attributes (tree stmt, dw_die_ref die)
24036 {
24037 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24038
24039 if (inline_entry_data **iedp
24040 = !inline_entry_data_table ? NULL
24041 : inline_entry_data_table->find_slot_with_hash (stmt,
24042 htab_hash_pointer (stmt),
24043 NO_INSERT))
24044 {
24045 inline_entry_data *ied = *iedp;
24046 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
24047 gcc_assert (debug_inline_points);
24048 gcc_assert (inlined_function_outer_scope_p (stmt));
24049
24050 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
24051 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24052
24053 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
24054 && !dwarf_strict)
24055 {
24056 if (!output_asm_line_debug_info ())
24057 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
24058 else
24059 {
24060 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
24061 /* FIXME: this will resolve to a small number. Could we
24062 possibly emit smaller data? Ideally we'd emit a
24063 uleb128, but that would make the size of DIEs
24064 impossible for the compiler to compute, since it's
24065 the assembler that computes the value of the view
24066 label in this case. Ideally, we'd have a single form
24067 encompassing both the address and the view, and
24068 indirecting them through a table might make things
24069 easier, but even that would be more wasteful,
24070 space-wise, than what we have now. */
24071 add_AT_symview (die, DW_AT_GNU_entry_view, label);
24072 }
24073 }
24074
24075 inline_entry_data_table->clear_slot (iedp);
24076 }
24077
24078 if (BLOCK_FRAGMENT_CHAIN (stmt)
24079 && (dwarf_version >= 3 || !dwarf_strict))
24080 {
24081 tree chain, superblock = NULL_TREE;
24082 dw_die_ref pdie;
24083 dw_attr_node *attr = NULL;
24084
24085 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
24086 {
24087 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24088 BLOCK_NUMBER (stmt));
24089 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24090 }
24091
24092 /* Optimize duplicate .debug_ranges lists or even tails of
24093 lists. If this BLOCK has same ranges as its supercontext,
24094 lookup DW_AT_ranges attribute in the supercontext (and
24095 recursively so), verify that the ranges_table contains the
24096 right values and use it instead of adding a new .debug_range. */
24097 for (chain = stmt, pdie = die;
24098 BLOCK_SAME_RANGE (chain);
24099 chain = BLOCK_SUPERCONTEXT (chain))
24100 {
24101 dw_attr_node *new_attr;
24102
24103 pdie = pdie->die_parent;
24104 if (pdie == NULL)
24105 break;
24106 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
24107 break;
24108 new_attr = get_AT (pdie, DW_AT_ranges);
24109 if (new_attr == NULL
24110 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24111 break;
24112 attr = new_attr;
24113 superblock = BLOCK_SUPERCONTEXT (chain);
24114 }
24115 if (attr != NULL
24116 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24117 == (int)BLOCK_NUMBER (superblock))
24118 && BLOCK_FRAGMENT_CHAIN (superblock))
24119 {
24120 unsigned long off = attr->dw_attr_val.v.val_offset;
24121 unsigned long supercnt = 0, thiscnt = 0;
24122 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24123 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24124 {
24125 ++supercnt;
24126 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24127 == (int)BLOCK_NUMBER (chain));
24128 }
24129 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24130 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24131 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24132 ++thiscnt;
24133 gcc_assert (supercnt >= thiscnt);
24134 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24135 false);
24136 note_rnglist_head (off + supercnt - thiscnt);
24137 return;
24138 }
24139
24140 unsigned int offset = add_ranges (stmt, true);
24141 add_AT_range_list (die, DW_AT_ranges, offset, false);
24142 note_rnglist_head (offset);
24143
24144 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24145 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24146 do
24147 {
24148 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24149 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24150 chain = BLOCK_FRAGMENT_CHAIN (chain);
24151 }
24152 while (chain);
24153 add_ranges (NULL);
24154 }
24155 else
24156 {
24157 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24158 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24159 BLOCK_NUMBER (stmt));
24160 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24161 BLOCK_NUMBER (stmt));
24162 add_AT_low_high_pc (die, label, label_high, false);
24163 }
24164 }
24165
24166 /* Generate a DIE for a lexical block. */
24167
24168 static void
24169 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24170 {
24171 dw_die_ref old_die = lookup_block_die (stmt);
24172 dw_die_ref stmt_die = NULL;
24173 if (!old_die)
24174 {
24175 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24176 equate_block_to_die (stmt, stmt_die);
24177 }
24178
24179 if (BLOCK_ABSTRACT_ORIGIN (stmt))
24180 {
24181 /* If this is an inlined or conrecte instance, create a new lexical
24182 die for anything below to attach DW_AT_abstract_origin to. */
24183 if (old_die)
24184 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24185
24186 tree origin = block_ultimate_origin (stmt);
24187 if (origin != NULL_TREE && (origin != stmt || old_die))
24188 add_abstract_origin_attribute (stmt_die, origin);
24189
24190 old_die = NULL;
24191 }
24192
24193 if (old_die)
24194 stmt_die = old_die;
24195
24196 /* A non abstract block whose blocks have already been reordered
24197 should have the instruction range for this block. If so, set the
24198 high/low attributes. */
24199 if (!early_dwarf && TREE_ASM_WRITTEN (stmt))
24200 {
24201 gcc_assert (stmt_die);
24202 add_high_low_attributes (stmt, stmt_die);
24203 }
24204
24205 decls_for_scope (stmt, stmt_die);
24206 }
24207
24208 /* Generate a DIE for an inlined subprogram. */
24209
24210 static void
24211 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24212 {
24213 tree decl = block_ultimate_origin (stmt);
24214
24215 /* Make sure any inlined functions are known to be inlineable. */
24216 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24217 || cgraph_function_possibly_inlined_p (decl));
24218
24219 dw_die_ref subr_die = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24220
24221 if (call_arg_locations || debug_inline_points)
24222 equate_block_to_die (stmt, subr_die);
24223 add_abstract_origin_attribute (subr_die, decl);
24224 if (TREE_ASM_WRITTEN (stmt))
24225 add_high_low_attributes (stmt, subr_die);
24226 add_call_src_coords_attributes (stmt, subr_die);
24227
24228 /* The inliner creates an extra BLOCK for the parameter setup,
24229 we want to merge that with the actual outermost BLOCK of the
24230 inlined function to avoid duplicate locals in consumers.
24231 Do that by doing the recursion to subblocks on the single subblock
24232 of STMT. */
24233 bool unwrap_one = false;
24234 if (BLOCK_SUBBLOCKS (stmt) && !BLOCK_CHAIN (BLOCK_SUBBLOCKS (stmt)))
24235 {
24236 tree origin = block_ultimate_origin (BLOCK_SUBBLOCKS (stmt));
24237 if (origin
24238 && TREE_CODE (origin) == BLOCK
24239 && BLOCK_SUPERCONTEXT (origin) == decl)
24240 unwrap_one = true;
24241 }
24242 decls_for_scope (stmt, subr_die, !unwrap_one);
24243 if (unwrap_one)
24244 decls_for_scope (BLOCK_SUBBLOCKS (stmt), subr_die);
24245 }
24246
24247 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24248 the comment for VLR_CONTEXT. */
24249
24250 static void
24251 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24252 {
24253 dw_die_ref decl_die;
24254
24255 if (TREE_TYPE (decl) == error_mark_node)
24256 return;
24257
24258 decl_die = new_die (DW_TAG_member, context_die, decl);
24259 add_name_and_src_coords_attributes (decl_die, decl);
24260 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24261 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24262 context_die);
24263
24264 if (DECL_BIT_FIELD_TYPE (decl))
24265 {
24266 add_byte_size_attribute (decl_die, decl);
24267 add_bit_size_attribute (decl_die, decl);
24268 add_bit_offset_attribute (decl_die, decl, ctx);
24269 }
24270
24271 add_alignment_attribute (decl_die, decl);
24272
24273 /* If we have a variant part offset, then we are supposed to process a member
24274 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24275 trees. */
24276 gcc_assert (ctx->variant_part_offset == NULL_TREE
24277 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24278 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24279 add_data_member_location_attribute (decl_die, decl, ctx);
24280
24281 if (DECL_ARTIFICIAL (decl))
24282 add_AT_flag (decl_die, DW_AT_artificial, 1);
24283
24284 add_accessibility_attribute (decl_die, decl);
24285
24286 /* Equate decl number to die, so that we can look up this decl later on. */
24287 equate_decl_number_to_die (decl, decl_die);
24288 }
24289
24290 /* Generate a DIE for a pointer to a member type. TYPE can be an
24291 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24292 pointer to member function. */
24293
24294 static void
24295 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24296 {
24297 if (lookup_type_die (type))
24298 return;
24299
24300 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24301 scope_die_for (type, context_die), type);
24302
24303 equate_type_number_to_die (type, ptr_die);
24304 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24305 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24306 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24307 context_die);
24308 add_alignment_attribute (ptr_die, type);
24309
24310 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24311 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24312 {
24313 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24314 add_AT_loc (ptr_die, DW_AT_use_location, op);
24315 }
24316 }
24317
24318 static char *producer_string;
24319
24320 /* Return a heap allocated producer string including command line options
24321 if -grecord-gcc-switches. */
24322
24323 static char *
24324 gen_producer_string (void)
24325 {
24326 size_t j;
24327 auto_vec<const char *> switches;
24328 const char *language_string = lang_hooks.name;
24329 char *producer, *tail;
24330 const char *p;
24331 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24332 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24333
24334 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24335 switch (save_decoded_options[j].opt_index)
24336 {
24337 case OPT_o:
24338 case OPT_d:
24339 case OPT_dumpbase:
24340 case OPT_dumpdir:
24341 case OPT_auxbase:
24342 case OPT_auxbase_strip:
24343 case OPT_quiet:
24344 case OPT_version:
24345 case OPT_v:
24346 case OPT_w:
24347 case OPT_L:
24348 case OPT_D:
24349 case OPT_I:
24350 case OPT_U:
24351 case OPT_SPECIAL_unknown:
24352 case OPT_SPECIAL_ignore:
24353 case OPT_SPECIAL_warn_removed:
24354 case OPT_SPECIAL_program_name:
24355 case OPT_SPECIAL_input_file:
24356 case OPT_grecord_gcc_switches:
24357 case OPT__output_pch_:
24358 case OPT_fdiagnostics_show_location_:
24359 case OPT_fdiagnostics_show_option:
24360 case OPT_fdiagnostics_show_caret:
24361 case OPT_fdiagnostics_show_labels:
24362 case OPT_fdiagnostics_show_line_numbers:
24363 case OPT_fdiagnostics_color_:
24364 case OPT_fdiagnostics_format_:
24365 case OPT_fverbose_asm:
24366 case OPT____:
24367 case OPT__sysroot_:
24368 case OPT_nostdinc:
24369 case OPT_nostdinc__:
24370 case OPT_fpreprocessed:
24371 case OPT_fltrans_output_list_:
24372 case OPT_fresolution_:
24373 case OPT_fdebug_prefix_map_:
24374 case OPT_fmacro_prefix_map_:
24375 case OPT_ffile_prefix_map_:
24376 case OPT_fcompare_debug:
24377 case OPT_fchecking:
24378 case OPT_fchecking_:
24379 /* Ignore these. */
24380 continue;
24381 case OPT_flto_:
24382 {
24383 const char *lto_canonical = "-flto";
24384 switches.safe_push (lto_canonical);
24385 len += strlen (lto_canonical) + 1;
24386 break;
24387 }
24388 default:
24389 if (cl_options[save_decoded_options[j].opt_index].flags
24390 & CL_NO_DWARF_RECORD)
24391 continue;
24392 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24393 == '-');
24394 switch (save_decoded_options[j].canonical_option[0][1])
24395 {
24396 case 'M':
24397 case 'i':
24398 case 'W':
24399 continue;
24400 case 'f':
24401 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24402 "dump", 4) == 0)
24403 continue;
24404 break;
24405 default:
24406 break;
24407 }
24408 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24409 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24410 break;
24411 }
24412
24413 producer = XNEWVEC (char, plen + 1 + len + 1);
24414 tail = producer;
24415 sprintf (tail, "%s %s", language_string, version_string);
24416 tail += plen;
24417
24418 FOR_EACH_VEC_ELT (switches, j, p)
24419 {
24420 len = strlen (p);
24421 *tail = ' ';
24422 memcpy (tail + 1, p, len);
24423 tail += len + 1;
24424 }
24425
24426 *tail = '\0';
24427 return producer;
24428 }
24429
24430 /* Given a C and/or C++ language/version string return the "highest".
24431 C++ is assumed to be "higher" than C in this case. Used for merging
24432 LTO translation unit languages. */
24433 static const char *
24434 highest_c_language (const char *lang1, const char *lang2)
24435 {
24436 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24437 return "GNU C++17";
24438 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24439 return "GNU C++14";
24440 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24441 return "GNU C++11";
24442 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24443 return "GNU C++98";
24444
24445 if (strcmp ("GNU C2X", lang1) == 0 || strcmp ("GNU C2X", lang2) == 0)
24446 return "GNU C2X";
24447 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24448 return "GNU C17";
24449 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24450 return "GNU C11";
24451 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24452 return "GNU C99";
24453 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24454 return "GNU C89";
24455
24456 gcc_unreachable ();
24457 }
24458
24459
24460 /* Generate the DIE for the compilation unit. */
24461
24462 static dw_die_ref
24463 gen_compile_unit_die (const char *filename)
24464 {
24465 dw_die_ref die;
24466 const char *language_string = lang_hooks.name;
24467 int language;
24468
24469 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24470
24471 if (filename)
24472 {
24473 add_name_attribute (die, filename);
24474 /* Don't add cwd for <built-in>. */
24475 if (filename[0] != '<')
24476 add_comp_dir_attribute (die);
24477 }
24478
24479 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24480
24481 /* If our producer is LTO try to figure out a common language to use
24482 from the global list of translation units. */
24483 if (strcmp (language_string, "GNU GIMPLE") == 0)
24484 {
24485 unsigned i;
24486 tree t;
24487 const char *common_lang = NULL;
24488
24489 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24490 {
24491 if (!TRANSLATION_UNIT_LANGUAGE (t))
24492 continue;
24493 if (!common_lang)
24494 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24495 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24496 ;
24497 else if (strncmp (common_lang, "GNU C", 5) == 0
24498 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24499 /* Mixing C and C++ is ok, use C++ in that case. */
24500 common_lang = highest_c_language (common_lang,
24501 TRANSLATION_UNIT_LANGUAGE (t));
24502 else
24503 {
24504 /* Fall back to C. */
24505 common_lang = NULL;
24506 break;
24507 }
24508 }
24509
24510 if (common_lang)
24511 language_string = common_lang;
24512 }
24513
24514 language = DW_LANG_C;
24515 if (strncmp (language_string, "GNU C", 5) == 0
24516 && ISDIGIT (language_string[5]))
24517 {
24518 language = DW_LANG_C89;
24519 if (dwarf_version >= 3 || !dwarf_strict)
24520 {
24521 if (strcmp (language_string, "GNU C89") != 0)
24522 language = DW_LANG_C99;
24523
24524 if (dwarf_version >= 5 /* || !dwarf_strict */)
24525 if (strcmp (language_string, "GNU C11") == 0
24526 || strcmp (language_string, "GNU C17") == 0
24527 || strcmp (language_string, "GNU C2X"))
24528 language = DW_LANG_C11;
24529 }
24530 }
24531 else if (strncmp (language_string, "GNU C++", 7) == 0)
24532 {
24533 language = DW_LANG_C_plus_plus;
24534 if (dwarf_version >= 5 /* || !dwarf_strict */)
24535 {
24536 if (strcmp (language_string, "GNU C++11") == 0)
24537 language = DW_LANG_C_plus_plus_11;
24538 else if (strcmp (language_string, "GNU C++14") == 0)
24539 language = DW_LANG_C_plus_plus_14;
24540 else if (strcmp (language_string, "GNU C++17") == 0)
24541 /* For now. */
24542 language = DW_LANG_C_plus_plus_14;
24543 }
24544 }
24545 else if (strcmp (language_string, "GNU F77") == 0)
24546 language = DW_LANG_Fortran77;
24547 else if (dwarf_version >= 3 || !dwarf_strict)
24548 {
24549 if (strcmp (language_string, "GNU Ada") == 0)
24550 language = DW_LANG_Ada95;
24551 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24552 {
24553 language = DW_LANG_Fortran95;
24554 if (dwarf_version >= 5 /* || !dwarf_strict */)
24555 {
24556 if (strcmp (language_string, "GNU Fortran2003") == 0)
24557 language = DW_LANG_Fortran03;
24558 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24559 language = DW_LANG_Fortran08;
24560 }
24561 }
24562 else if (strcmp (language_string, "GNU Objective-C") == 0)
24563 language = DW_LANG_ObjC;
24564 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24565 language = DW_LANG_ObjC_plus_plus;
24566 else if (strcmp (language_string, "GNU D") == 0)
24567 language = DW_LANG_D;
24568 else if (dwarf_version >= 5 || !dwarf_strict)
24569 {
24570 if (strcmp (language_string, "GNU Go") == 0)
24571 language = DW_LANG_Go;
24572 }
24573 }
24574 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24575 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24576 language = DW_LANG_Fortran90;
24577 /* Likewise for Ada. */
24578 else if (strcmp (language_string, "GNU Ada") == 0)
24579 language = DW_LANG_Ada83;
24580
24581 add_AT_unsigned (die, DW_AT_language, language);
24582
24583 switch (language)
24584 {
24585 case DW_LANG_Fortran77:
24586 case DW_LANG_Fortran90:
24587 case DW_LANG_Fortran95:
24588 case DW_LANG_Fortran03:
24589 case DW_LANG_Fortran08:
24590 /* Fortran has case insensitive identifiers and the front-end
24591 lowercases everything. */
24592 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24593 break;
24594 default:
24595 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24596 break;
24597 }
24598 return die;
24599 }
24600
24601 /* Generate the DIE for a base class. */
24602
24603 static void
24604 gen_inheritance_die (tree binfo, tree access, tree type,
24605 dw_die_ref context_die)
24606 {
24607 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24608 struct vlr_context ctx = { type, NULL };
24609
24610 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24611 context_die);
24612 add_data_member_location_attribute (die, binfo, &ctx);
24613
24614 if (BINFO_VIRTUAL_P (binfo))
24615 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24616
24617 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24618 children, otherwise the default is DW_ACCESS_public. In DWARF2
24619 the default has always been DW_ACCESS_private. */
24620 if (access == access_public_node)
24621 {
24622 if (dwarf_version == 2
24623 || context_die->die_tag == DW_TAG_class_type)
24624 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24625 }
24626 else if (access == access_protected_node)
24627 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24628 else if (dwarf_version > 2
24629 && context_die->die_tag != DW_TAG_class_type)
24630 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24631 }
24632
24633 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24634 structure. */
24635
24636 static bool
24637 is_variant_part (tree decl)
24638 {
24639 return (TREE_CODE (decl) == FIELD_DECL
24640 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24641 }
24642
24643 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24644 return the FIELD_DECL. Return NULL_TREE otherwise. */
24645
24646 static tree
24647 analyze_discr_in_predicate (tree operand, tree struct_type)
24648 {
24649 while (CONVERT_EXPR_P (operand))
24650 operand = TREE_OPERAND (operand, 0);
24651
24652 /* Match field access to members of struct_type only. */
24653 if (TREE_CODE (operand) == COMPONENT_REF
24654 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24655 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24656 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24657 return TREE_OPERAND (operand, 1);
24658 else
24659 return NULL_TREE;
24660 }
24661
24662 /* Check that SRC is a constant integer that can be represented as a native
24663 integer constant (either signed or unsigned). If so, store it into DEST and
24664 return true. Return false otherwise. */
24665
24666 static bool
24667 get_discr_value (tree src, dw_discr_value *dest)
24668 {
24669 tree discr_type = TREE_TYPE (src);
24670
24671 if (lang_hooks.types.get_debug_type)
24672 {
24673 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24674 if (debug_type != NULL)
24675 discr_type = debug_type;
24676 }
24677
24678 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24679 return false;
24680
24681 /* Signedness can vary between the original type and the debug type. This
24682 can happen for character types in Ada for instance: the character type
24683 used for code generation can be signed, to be compatible with the C one,
24684 but from a debugger point of view, it must be unsigned. */
24685 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24686 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24687
24688 if (is_orig_unsigned != is_debug_unsigned)
24689 src = fold_convert (discr_type, src);
24690
24691 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24692 return false;
24693
24694 dest->pos = is_debug_unsigned;
24695 if (is_debug_unsigned)
24696 dest->v.uval = tree_to_uhwi (src);
24697 else
24698 dest->v.sval = tree_to_shwi (src);
24699
24700 return true;
24701 }
24702
24703 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24704 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24705 store NULL_TREE in DISCR_DECL. Otherwise:
24706
24707 - store the discriminant field in STRUCT_TYPE that controls the variant
24708 part to *DISCR_DECL
24709
24710 - put in *DISCR_LISTS_P an array where for each variant, the item
24711 represents the corresponding matching list of discriminant values.
24712
24713 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24714 the above array.
24715
24716 Note that when the array is allocated (i.e. when the analysis is
24717 successful), it is up to the caller to free the array. */
24718
24719 static void
24720 analyze_variants_discr (tree variant_part_decl,
24721 tree struct_type,
24722 tree *discr_decl,
24723 dw_discr_list_ref **discr_lists_p,
24724 unsigned *discr_lists_length)
24725 {
24726 tree variant_part_type = TREE_TYPE (variant_part_decl);
24727 tree variant;
24728 dw_discr_list_ref *discr_lists;
24729 unsigned i;
24730
24731 /* Compute how many variants there are in this variant part. */
24732 *discr_lists_length = 0;
24733 for (variant = TYPE_FIELDS (variant_part_type);
24734 variant != NULL_TREE;
24735 variant = DECL_CHAIN (variant))
24736 ++*discr_lists_length;
24737
24738 *discr_decl = NULL_TREE;
24739 *discr_lists_p
24740 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24741 sizeof (**discr_lists_p));
24742 discr_lists = *discr_lists_p;
24743
24744 /* And then analyze all variants to extract discriminant information for all
24745 of them. This analysis is conservative: as soon as we detect something we
24746 do not support, abort everything and pretend we found nothing. */
24747 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24748 variant != NULL_TREE;
24749 variant = DECL_CHAIN (variant), ++i)
24750 {
24751 tree match_expr = DECL_QUALIFIER (variant);
24752
24753 /* Now, try to analyze the predicate and deduce a discriminant for
24754 it. */
24755 if (match_expr == boolean_true_node)
24756 /* Typically happens for the default variant: it matches all cases that
24757 previous variants rejected. Don't output any matching value for
24758 this one. */
24759 continue;
24760
24761 /* The following loop tries to iterate over each discriminant
24762 possibility: single values or ranges. */
24763 while (match_expr != NULL_TREE)
24764 {
24765 tree next_round_match_expr;
24766 tree candidate_discr = NULL_TREE;
24767 dw_discr_list_ref new_node = NULL;
24768
24769 /* Possibilities are matched one after the other by nested
24770 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24771 continue with the rest at next iteration. */
24772 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24773 {
24774 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24775 match_expr = TREE_OPERAND (match_expr, 1);
24776 }
24777 else
24778 next_round_match_expr = NULL_TREE;
24779
24780 if (match_expr == boolean_false_node)
24781 /* This sub-expression matches nothing: just wait for the next
24782 one. */
24783 ;
24784
24785 else if (TREE_CODE (match_expr) == EQ_EXPR)
24786 {
24787 /* We are matching: <discr_field> == <integer_cst>
24788 This sub-expression matches a single value. */
24789 tree integer_cst = TREE_OPERAND (match_expr, 1);
24790
24791 candidate_discr
24792 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24793 struct_type);
24794
24795 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24796 if (!get_discr_value (integer_cst,
24797 &new_node->dw_discr_lower_bound))
24798 goto abort;
24799 new_node->dw_discr_range = false;
24800 }
24801
24802 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24803 {
24804 /* We are matching:
24805 <discr_field> > <integer_cst>
24806 && <discr_field> < <integer_cst>.
24807 This sub-expression matches the range of values between the
24808 two matched integer constants. Note that comparisons can be
24809 inclusive or exclusive. */
24810 tree candidate_discr_1, candidate_discr_2;
24811 tree lower_cst, upper_cst;
24812 bool lower_cst_included, upper_cst_included;
24813 tree lower_op = TREE_OPERAND (match_expr, 0);
24814 tree upper_op = TREE_OPERAND (match_expr, 1);
24815
24816 /* When the comparison is exclusive, the integer constant is not
24817 the discriminant range bound we are looking for: we will have
24818 to increment or decrement it. */
24819 if (TREE_CODE (lower_op) == GE_EXPR)
24820 lower_cst_included = true;
24821 else if (TREE_CODE (lower_op) == GT_EXPR)
24822 lower_cst_included = false;
24823 else
24824 goto abort;
24825
24826 if (TREE_CODE (upper_op) == LE_EXPR)
24827 upper_cst_included = true;
24828 else if (TREE_CODE (upper_op) == LT_EXPR)
24829 upper_cst_included = false;
24830 else
24831 goto abort;
24832
24833 /* Extract the discriminant from the first operand and check it
24834 is consistant with the same analysis in the second
24835 operand. */
24836 candidate_discr_1
24837 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24838 struct_type);
24839 candidate_discr_2
24840 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24841 struct_type);
24842 if (candidate_discr_1 == candidate_discr_2)
24843 candidate_discr = candidate_discr_1;
24844 else
24845 goto abort;
24846
24847 /* Extract bounds from both. */
24848 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24849 lower_cst = TREE_OPERAND (lower_op, 1);
24850 upper_cst = TREE_OPERAND (upper_op, 1);
24851
24852 if (!lower_cst_included)
24853 lower_cst
24854 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24855 build_int_cst (TREE_TYPE (lower_cst), 1));
24856 if (!upper_cst_included)
24857 upper_cst
24858 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24859 build_int_cst (TREE_TYPE (upper_cst), 1));
24860
24861 if (!get_discr_value (lower_cst,
24862 &new_node->dw_discr_lower_bound)
24863 || !get_discr_value (upper_cst,
24864 &new_node->dw_discr_upper_bound))
24865 goto abort;
24866
24867 new_node->dw_discr_range = true;
24868 }
24869
24870 else if ((candidate_discr
24871 = analyze_discr_in_predicate (match_expr, struct_type))
24872 && TREE_TYPE (candidate_discr) == boolean_type_node)
24873 {
24874 /* We are matching: <discr_field> for a boolean discriminant.
24875 This sub-expression matches boolean_true_node. */
24876 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24877 if (!get_discr_value (boolean_true_node,
24878 &new_node->dw_discr_lower_bound))
24879 goto abort;
24880 new_node->dw_discr_range = false;
24881 }
24882
24883 else
24884 /* Unsupported sub-expression: we cannot determine the set of
24885 matching discriminant values. Abort everything. */
24886 goto abort;
24887
24888 /* If the discriminant info is not consistant with what we saw so
24889 far, consider the analysis failed and abort everything. */
24890 if (candidate_discr == NULL_TREE
24891 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24892 goto abort;
24893 else
24894 *discr_decl = candidate_discr;
24895
24896 if (new_node != NULL)
24897 {
24898 new_node->dw_discr_next = discr_lists[i];
24899 discr_lists[i] = new_node;
24900 }
24901 match_expr = next_round_match_expr;
24902 }
24903 }
24904
24905 /* If we reach this point, we could match everything we were interested
24906 in. */
24907 return;
24908
24909 abort:
24910 /* Clean all data structure and return no result. */
24911 free (*discr_lists_p);
24912 *discr_lists_p = NULL;
24913 *discr_decl = NULL_TREE;
24914 }
24915
24916 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24917 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24918 under CONTEXT_DIE.
24919
24920 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24921 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24922 this type, which are record types, represent the available variants and each
24923 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24924 values are inferred from these attributes.
24925
24926 In trees, the offsets for the fields inside these sub-records are relative
24927 to the variant part itself, whereas the corresponding DIEs should have
24928 offset attributes that are relative to the embedding record base address.
24929 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24930 must be an expression that computes the offset of the variant part to
24931 describe in DWARF. */
24932
24933 static void
24934 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24935 dw_die_ref context_die)
24936 {
24937 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24938 tree variant_part_offset = vlr_ctx->variant_part_offset;
24939 struct loc_descr_context ctx = {
24940 vlr_ctx->struct_type, /* context_type */
24941 NULL_TREE, /* base_decl */
24942 NULL, /* dpi */
24943 false, /* placeholder_arg */
24944 false /* placeholder_seen */
24945 };
24946
24947 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24948 NULL_TREE if there is no such field. */
24949 tree discr_decl = NULL_TREE;
24950 dw_discr_list_ref *discr_lists;
24951 unsigned discr_lists_length = 0;
24952 unsigned i;
24953
24954 dw_die_ref dwarf_proc_die = NULL;
24955 dw_die_ref variant_part_die
24956 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24957
24958 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24959
24960 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24961 &discr_decl, &discr_lists, &discr_lists_length);
24962
24963 if (discr_decl != NULL_TREE)
24964 {
24965 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24966
24967 if (discr_die)
24968 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24969 else
24970 /* We have no DIE for the discriminant, so just discard all
24971 discrimimant information in the output. */
24972 discr_decl = NULL_TREE;
24973 }
24974
24975 /* If the offset for this variant part is more complex than a constant,
24976 create a DWARF procedure for it so that we will not have to generate DWARF
24977 expressions for it for each member. */
24978 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24979 && (dwarf_version >= 3 || !dwarf_strict))
24980 {
24981 const tree dwarf_proc_fndecl
24982 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24983 build_function_type (TREE_TYPE (variant_part_offset),
24984 NULL_TREE));
24985 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24986 const dw_loc_descr_ref dwarf_proc_body
24987 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24988
24989 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24990 dwarf_proc_fndecl, context_die);
24991 if (dwarf_proc_die != NULL)
24992 variant_part_offset = dwarf_proc_call;
24993 }
24994
24995 /* Output DIEs for all variants. */
24996 i = 0;
24997 for (tree variant = TYPE_FIELDS (variant_part_type);
24998 variant != NULL_TREE;
24999 variant = DECL_CHAIN (variant), ++i)
25000 {
25001 tree variant_type = TREE_TYPE (variant);
25002 dw_die_ref variant_die;
25003
25004 /* All variants (i.e. members of a variant part) are supposed to be
25005 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
25006 under these records. */
25007 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
25008
25009 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
25010 equate_decl_number_to_die (variant, variant_die);
25011
25012 /* Output discriminant values this variant matches, if any. */
25013 if (discr_decl == NULL || discr_lists[i] == NULL)
25014 /* In the case we have discriminant information at all, this is
25015 probably the default variant: as the standard says, don't
25016 output any discriminant value/list attribute. */
25017 ;
25018 else if (discr_lists[i]->dw_discr_next == NULL
25019 && !discr_lists[i]->dw_discr_range)
25020 /* If there is only one accepted value, don't bother outputting a
25021 list. */
25022 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
25023 else
25024 add_discr_list (variant_die, discr_lists[i]);
25025
25026 for (tree member = TYPE_FIELDS (variant_type);
25027 member != NULL_TREE;
25028 member = DECL_CHAIN (member))
25029 {
25030 struct vlr_context vlr_sub_ctx = {
25031 vlr_ctx->struct_type, /* struct_type */
25032 NULL /* variant_part_offset */
25033 };
25034 if (is_variant_part (member))
25035 {
25036 /* All offsets for fields inside variant parts are relative to
25037 the top-level embedding RECORD_TYPE's base address. On the
25038 other hand, offsets in GCC's types are relative to the
25039 nested-most variant part. So we have to sum offsets each time
25040 we recurse. */
25041
25042 vlr_sub_ctx.variant_part_offset
25043 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
25044 variant_part_offset, byte_position (member));
25045 gen_variant_part (member, &vlr_sub_ctx, variant_die);
25046 }
25047 else
25048 {
25049 vlr_sub_ctx.variant_part_offset = variant_part_offset;
25050 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
25051 }
25052 }
25053 }
25054
25055 free (discr_lists);
25056 }
25057
25058 /* Generate a DIE for a class member. */
25059
25060 static void
25061 gen_member_die (tree type, dw_die_ref context_die)
25062 {
25063 tree member;
25064 tree binfo = TYPE_BINFO (type);
25065
25066 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
25067
25068 /* If this is not an incomplete type, output descriptions of each of its
25069 members. Note that as we output the DIEs necessary to represent the
25070 members of this record or union type, we will also be trying to output
25071 DIEs to represent the *types* of those members. However the `type'
25072 function (above) will specifically avoid generating type DIEs for member
25073 types *within* the list of member DIEs for this (containing) type except
25074 for those types (of members) which are explicitly marked as also being
25075 members of this (containing) type themselves. The g++ front- end can
25076 force any given type to be treated as a member of some other (containing)
25077 type by setting the TYPE_CONTEXT of the given (member) type to point to
25078 the TREE node representing the appropriate (containing) type. */
25079
25080 /* First output info about the base classes. */
25081 if (binfo && early_dwarf)
25082 {
25083 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
25084 int i;
25085 tree base;
25086
25087 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
25088 gen_inheritance_die (base,
25089 (accesses ? (*accesses)[i] : access_public_node),
25090 type,
25091 context_die);
25092 }
25093
25094 /* Now output info about the members. */
25095 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
25096 {
25097 /* Ignore clones. */
25098 if (DECL_ABSTRACT_ORIGIN (member))
25099 continue;
25100
25101 struct vlr_context vlr_ctx = { type, NULL_TREE };
25102 bool static_inline_p
25103 = (VAR_P (member)
25104 && TREE_STATIC (member)
25105 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
25106 != -1));
25107
25108 /* If we thought we were generating minimal debug info for TYPE
25109 and then changed our minds, some of the member declarations
25110 may have already been defined. Don't define them again, but
25111 do put them in the right order. */
25112
25113 if (dw_die_ref child = lookup_decl_die (member))
25114 {
25115 /* Handle inline static data members, which only have in-class
25116 declarations. */
25117 bool splice = true;
25118
25119 dw_die_ref ref = NULL;
25120 if (child->die_tag == DW_TAG_variable
25121 && child->die_parent == comp_unit_die ())
25122 {
25123 ref = get_AT_ref (child, DW_AT_specification);
25124
25125 /* For C++17 inline static data members followed by redundant
25126 out of class redeclaration, we might get here with
25127 child being the DIE created for the out of class
25128 redeclaration and with its DW_AT_specification being
25129 the DIE created for in-class definition. We want to
25130 reparent the latter, and don't want to create another
25131 DIE with DW_AT_specification in that case, because
25132 we already have one. */
25133 if (ref
25134 && static_inline_p
25135 && ref->die_tag == DW_TAG_variable
25136 && ref->die_parent == comp_unit_die ()
25137 && get_AT (ref, DW_AT_specification) == NULL)
25138 {
25139 child = ref;
25140 ref = NULL;
25141 static_inline_p = false;
25142 }
25143
25144 if (!ref)
25145 {
25146 reparent_child (child, context_die);
25147 if (dwarf_version < 5)
25148 child->die_tag = DW_TAG_member;
25149 splice = false;
25150 }
25151 }
25152
25153 if (splice)
25154 splice_child_die (context_die, child);
25155 }
25156
25157 /* Do not generate standard DWARF for variant parts if we are generating
25158 the corresponding GNAT encodings: DIEs generated for both would
25159 conflict in our mappings. */
25160 else if (is_variant_part (member)
25161 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25162 {
25163 vlr_ctx.variant_part_offset = byte_position (member);
25164 gen_variant_part (member, &vlr_ctx, context_die);
25165 }
25166 else
25167 {
25168 vlr_ctx.variant_part_offset = NULL_TREE;
25169 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25170 }
25171
25172 /* For C++ inline static data members emit immediately a DW_TAG_variable
25173 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25174 DW_AT_specification. */
25175 if (static_inline_p)
25176 {
25177 int old_extern = DECL_EXTERNAL (member);
25178 DECL_EXTERNAL (member) = 0;
25179 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25180 DECL_EXTERNAL (member) = old_extern;
25181 }
25182 }
25183 }
25184
25185 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25186 is set, we pretend that the type was never defined, so we only get the
25187 member DIEs needed by later specification DIEs. */
25188
25189 static void
25190 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25191 enum debug_info_usage usage)
25192 {
25193 if (TREE_ASM_WRITTEN (type))
25194 {
25195 /* Fill in the bound of variable-length fields in late dwarf if
25196 still incomplete. */
25197 if (!early_dwarf && variably_modified_type_p (type, NULL))
25198 for (tree member = TYPE_FIELDS (type);
25199 member;
25200 member = DECL_CHAIN (member))
25201 fill_variable_array_bounds (TREE_TYPE (member));
25202 return;
25203 }
25204
25205 dw_die_ref type_die = lookup_type_die (type);
25206 dw_die_ref scope_die = 0;
25207 int nested = 0;
25208 int complete = (TYPE_SIZE (type)
25209 && (! TYPE_STUB_DECL (type)
25210 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25211 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25212 complete = complete && should_emit_struct_debug (type, usage);
25213
25214 if (type_die && ! complete)
25215 return;
25216
25217 if (TYPE_CONTEXT (type) != NULL_TREE
25218 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25219 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25220 nested = 1;
25221
25222 scope_die = scope_die_for (type, context_die);
25223
25224 /* Generate child dies for template paramaters. */
25225 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25226 schedule_generic_params_dies_gen (type);
25227
25228 if (! type_die || (nested && is_cu_die (scope_die)))
25229 /* First occurrence of type or toplevel definition of nested class. */
25230 {
25231 dw_die_ref old_die = type_die;
25232
25233 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25234 ? record_type_tag (type) : DW_TAG_union_type,
25235 scope_die, type);
25236 equate_type_number_to_die (type, type_die);
25237 if (old_die)
25238 add_AT_specification (type_die, old_die);
25239 else
25240 add_name_attribute (type_die, type_tag (type));
25241 }
25242 else
25243 remove_AT (type_die, DW_AT_declaration);
25244
25245 /* If this type has been completed, then give it a byte_size attribute and
25246 then give a list of members. */
25247 if (complete && !ns_decl)
25248 {
25249 /* Prevent infinite recursion in cases where the type of some member of
25250 this type is expressed in terms of this type itself. */
25251 TREE_ASM_WRITTEN (type) = 1;
25252 add_byte_size_attribute (type_die, type);
25253 add_alignment_attribute (type_die, type);
25254 if (TYPE_STUB_DECL (type) != NULL_TREE)
25255 {
25256 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25257 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25258 }
25259
25260 /* If the first reference to this type was as the return type of an
25261 inline function, then it may not have a parent. Fix this now. */
25262 if (type_die->die_parent == NULL)
25263 add_child_die (scope_die, type_die);
25264
25265 gen_member_die (type, type_die);
25266
25267 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25268 if (TYPE_ARTIFICIAL (type))
25269 add_AT_flag (type_die, DW_AT_artificial, 1);
25270
25271 /* GNU extension: Record what type our vtable lives in. */
25272 if (TYPE_VFIELD (type))
25273 {
25274 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25275
25276 gen_type_die (vtype, context_die);
25277 add_AT_die_ref (type_die, DW_AT_containing_type,
25278 lookup_type_die (vtype));
25279 }
25280 }
25281 else
25282 {
25283 add_AT_flag (type_die, DW_AT_declaration, 1);
25284
25285 /* We don't need to do this for function-local types. */
25286 if (TYPE_STUB_DECL (type)
25287 && ! decl_function_context (TYPE_STUB_DECL (type)))
25288 vec_safe_push (incomplete_types, type);
25289 }
25290
25291 if (get_AT (type_die, DW_AT_name))
25292 add_pubtype (type, type_die);
25293 }
25294
25295 /* Generate a DIE for a subroutine _type_. */
25296
25297 static void
25298 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25299 {
25300 tree return_type = TREE_TYPE (type);
25301 dw_die_ref subr_die
25302 = new_die (DW_TAG_subroutine_type,
25303 scope_die_for (type, context_die), type);
25304
25305 equate_type_number_to_die (type, subr_die);
25306 add_prototyped_attribute (subr_die, type);
25307 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25308 context_die);
25309 add_alignment_attribute (subr_die, type);
25310 gen_formal_types_die (type, subr_die);
25311
25312 if (get_AT (subr_die, DW_AT_name))
25313 add_pubtype (type, subr_die);
25314 if ((dwarf_version >= 5 || !dwarf_strict)
25315 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25316 add_AT_flag (subr_die, DW_AT_reference, 1);
25317 if ((dwarf_version >= 5 || !dwarf_strict)
25318 && lang_hooks.types.type_dwarf_attribute (type,
25319 DW_AT_rvalue_reference) != -1)
25320 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25321 }
25322
25323 /* Generate a DIE for a type definition. */
25324
25325 static void
25326 gen_typedef_die (tree decl, dw_die_ref context_die)
25327 {
25328 dw_die_ref type_die;
25329 tree type;
25330
25331 if (TREE_ASM_WRITTEN (decl))
25332 {
25333 if (DECL_ORIGINAL_TYPE (decl))
25334 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25335 return;
25336 }
25337
25338 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25339 checks in process_scope_var and modified_type_die), this should be called
25340 only for original types. */
25341 gcc_assert (decl_ultimate_origin (decl) == NULL
25342 || decl_ultimate_origin (decl) == decl);
25343
25344 TREE_ASM_WRITTEN (decl) = 1;
25345 type_die = new_die (DW_TAG_typedef, context_die, decl);
25346
25347 add_name_and_src_coords_attributes (type_die, decl);
25348 if (DECL_ORIGINAL_TYPE (decl))
25349 {
25350 type = DECL_ORIGINAL_TYPE (decl);
25351 if (type == error_mark_node)
25352 return;
25353
25354 gcc_assert (type != TREE_TYPE (decl));
25355 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25356 }
25357 else
25358 {
25359 type = TREE_TYPE (decl);
25360 if (type == error_mark_node)
25361 return;
25362
25363 if (is_naming_typedef_decl (TYPE_NAME (type)))
25364 {
25365 /* Here, we are in the case of decl being a typedef naming
25366 an anonymous type, e.g:
25367 typedef struct {...} foo;
25368 In that case TREE_TYPE (decl) is not a typedef variant
25369 type and TYPE_NAME of the anonymous type is set to the
25370 TYPE_DECL of the typedef. This construct is emitted by
25371 the C++ FE.
25372
25373 TYPE is the anonymous struct named by the typedef
25374 DECL. As we need the DW_AT_type attribute of the
25375 DW_TAG_typedef to point to the DIE of TYPE, let's
25376 generate that DIE right away. add_type_attribute
25377 called below will then pick (via lookup_type_die) that
25378 anonymous struct DIE. */
25379 if (!TREE_ASM_WRITTEN (type))
25380 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25381
25382 /* This is a GNU Extension. We are adding a
25383 DW_AT_linkage_name attribute to the DIE of the
25384 anonymous struct TYPE. The value of that attribute
25385 is the name of the typedef decl naming the anonymous
25386 struct. This greatly eases the work of consumers of
25387 this debug info. */
25388 add_linkage_name_raw (lookup_type_die (type), decl);
25389 }
25390 }
25391
25392 add_type_attribute (type_die, type, decl_quals (decl), false,
25393 context_die);
25394
25395 if (is_naming_typedef_decl (decl))
25396 /* We want that all subsequent calls to lookup_type_die with
25397 TYPE in argument yield the DW_TAG_typedef we have just
25398 created. */
25399 equate_type_number_to_die (type, type_die);
25400
25401 add_alignment_attribute (type_die, TREE_TYPE (decl));
25402
25403 add_accessibility_attribute (type_die, decl);
25404
25405 if (DECL_ABSTRACT_P (decl))
25406 equate_decl_number_to_die (decl, type_die);
25407
25408 if (get_AT (type_die, DW_AT_name))
25409 add_pubtype (decl, type_die);
25410 }
25411
25412 /* Generate a DIE for a struct, class, enum or union type. */
25413
25414 static void
25415 gen_tagged_type_die (tree type,
25416 dw_die_ref context_die,
25417 enum debug_info_usage usage)
25418 {
25419 if (type == NULL_TREE
25420 || !is_tagged_type (type))
25421 return;
25422
25423 if (TREE_ASM_WRITTEN (type))
25424 ;
25425 /* If this is a nested type whose containing class hasn't been written
25426 out yet, writing it out will cover this one, too. This does not apply
25427 to instantiations of member class templates; they need to be added to
25428 the containing class as they are generated. FIXME: This hurts the
25429 idea of combining type decls from multiple TUs, since we can't predict
25430 what set of template instantiations we'll get. */
25431 else if (TYPE_CONTEXT (type)
25432 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25433 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25434 {
25435 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25436
25437 if (TREE_ASM_WRITTEN (type))
25438 return;
25439
25440 /* If that failed, attach ourselves to the stub. */
25441 context_die = lookup_type_die (TYPE_CONTEXT (type));
25442 }
25443 else if (TYPE_CONTEXT (type) != NULL_TREE
25444 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25445 {
25446 /* If this type is local to a function that hasn't been written
25447 out yet, use a NULL context for now; it will be fixed up in
25448 decls_for_scope. */
25449 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25450 /* A declaration DIE doesn't count; nested types need to go in the
25451 specification. */
25452 if (context_die && is_declaration_die (context_die))
25453 context_die = NULL;
25454 }
25455 else
25456 context_die = declare_in_namespace (type, context_die);
25457
25458 if (TREE_CODE (type) == ENUMERAL_TYPE)
25459 {
25460 /* This might have been written out by the call to
25461 declare_in_namespace. */
25462 if (!TREE_ASM_WRITTEN (type))
25463 gen_enumeration_type_die (type, context_die);
25464 }
25465 else
25466 gen_struct_or_union_type_die (type, context_die, usage);
25467
25468 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25469 it up if it is ever completed. gen_*_type_die will set it for us
25470 when appropriate. */
25471 }
25472
25473 /* Generate a type description DIE. */
25474
25475 static void
25476 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25477 enum debug_info_usage usage)
25478 {
25479 struct array_descr_info info;
25480
25481 if (type == NULL_TREE || type == error_mark_node)
25482 return;
25483
25484 if (flag_checking && type)
25485 verify_type (type);
25486
25487 if (TYPE_NAME (type) != NULL_TREE
25488 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25489 && is_redundant_typedef (TYPE_NAME (type))
25490 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25491 /* The DECL of this type is a typedef we don't want to emit debug
25492 info for but we want debug info for its underlying typedef.
25493 This can happen for e.g, the injected-class-name of a C++
25494 type. */
25495 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25496
25497 /* If TYPE is a typedef type variant, let's generate debug info
25498 for the parent typedef which TYPE is a type of. */
25499 if (typedef_variant_p (type))
25500 {
25501 if (TREE_ASM_WRITTEN (type))
25502 return;
25503
25504 tree name = TYPE_NAME (type);
25505 tree origin = decl_ultimate_origin (name);
25506 if (origin != NULL && origin != name)
25507 {
25508 gen_decl_die (origin, NULL, NULL, context_die);
25509 return;
25510 }
25511
25512 /* Prevent broken recursion; we can't hand off to the same type. */
25513 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25514
25515 /* Give typedefs the right scope. */
25516 context_die = scope_die_for (type, context_die);
25517
25518 TREE_ASM_WRITTEN (type) = 1;
25519
25520 gen_decl_die (name, NULL, NULL, context_die);
25521 return;
25522 }
25523
25524 /* If type is an anonymous tagged type named by a typedef, let's
25525 generate debug info for the typedef. */
25526 if (is_naming_typedef_decl (TYPE_NAME (type)))
25527 {
25528 /* Give typedefs the right scope. */
25529 context_die = scope_die_for (type, context_die);
25530
25531 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25532 return;
25533 }
25534
25535 if (lang_hooks.types.get_debug_type)
25536 {
25537 tree debug_type = lang_hooks.types.get_debug_type (type);
25538
25539 if (debug_type != NULL_TREE && debug_type != type)
25540 {
25541 gen_type_die_with_usage (debug_type, context_die, usage);
25542 return;
25543 }
25544 }
25545
25546 /* We are going to output a DIE to represent the unqualified version
25547 of this type (i.e. without any const or volatile qualifiers) so
25548 get the main variant (i.e. the unqualified version) of this type
25549 now. (Vectors and arrays are special because the debugging info is in the
25550 cloned type itself. Similarly function/method types can contain extra
25551 ref-qualification). */
25552 if (TREE_CODE (type) == FUNCTION_TYPE
25553 || TREE_CODE (type) == METHOD_TYPE)
25554 {
25555 /* For function/method types, can't use type_main_variant here,
25556 because that can have different ref-qualifiers for C++,
25557 but try to canonicalize. */
25558 tree main = TYPE_MAIN_VARIANT (type);
25559 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25560 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25561 && check_base_type (t, main)
25562 && check_lang_type (t, type))
25563 {
25564 type = t;
25565 break;
25566 }
25567 }
25568 else if (TREE_CODE (type) != VECTOR_TYPE
25569 && TREE_CODE (type) != ARRAY_TYPE)
25570 type = type_main_variant (type);
25571
25572 /* If this is an array type with hidden descriptor, handle it first. */
25573 if (!TREE_ASM_WRITTEN (type)
25574 && lang_hooks.types.get_array_descr_info)
25575 {
25576 memset (&info, 0, sizeof (info));
25577 if (lang_hooks.types.get_array_descr_info (type, &info))
25578 {
25579 /* Fortran sometimes emits array types with no dimension. */
25580 gcc_assert (info.ndimensions >= 0
25581 && (info.ndimensions
25582 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25583 gen_descr_array_type_die (type, &info, context_die);
25584 TREE_ASM_WRITTEN (type) = 1;
25585 return;
25586 }
25587 }
25588
25589 if (TREE_ASM_WRITTEN (type))
25590 {
25591 /* Variable-length types may be incomplete even if
25592 TREE_ASM_WRITTEN. For such types, fall through to
25593 gen_array_type_die() and possibly fill in
25594 DW_AT_{upper,lower}_bound attributes. */
25595 if ((TREE_CODE (type) != ARRAY_TYPE
25596 && TREE_CODE (type) != RECORD_TYPE
25597 && TREE_CODE (type) != UNION_TYPE
25598 && TREE_CODE (type) != QUAL_UNION_TYPE)
25599 || !variably_modified_type_p (type, NULL))
25600 return;
25601 }
25602
25603 switch (TREE_CODE (type))
25604 {
25605 case ERROR_MARK:
25606 break;
25607
25608 case POINTER_TYPE:
25609 case REFERENCE_TYPE:
25610 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25611 ensures that the gen_type_die recursion will terminate even if the
25612 type is recursive. Recursive types are possible in Ada. */
25613 /* ??? We could perhaps do this for all types before the switch
25614 statement. */
25615 TREE_ASM_WRITTEN (type) = 1;
25616
25617 /* For these types, all that is required is that we output a DIE (or a
25618 set of DIEs) to represent the "basis" type. */
25619 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25620 DINFO_USAGE_IND_USE);
25621 break;
25622
25623 case OFFSET_TYPE:
25624 /* This code is used for C++ pointer-to-data-member types.
25625 Output a description of the relevant class type. */
25626 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25627 DINFO_USAGE_IND_USE);
25628
25629 /* Output a description of the type of the object pointed to. */
25630 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25631 DINFO_USAGE_IND_USE);
25632
25633 /* Now output a DIE to represent this pointer-to-data-member type
25634 itself. */
25635 gen_ptr_to_mbr_type_die (type, context_die);
25636 break;
25637
25638 case FUNCTION_TYPE:
25639 /* Force out return type (in case it wasn't forced out already). */
25640 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25641 DINFO_USAGE_DIR_USE);
25642 gen_subroutine_type_die (type, context_die);
25643 break;
25644
25645 case METHOD_TYPE:
25646 /* Force out return type (in case it wasn't forced out already). */
25647 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25648 DINFO_USAGE_DIR_USE);
25649 gen_subroutine_type_die (type, context_die);
25650 break;
25651
25652 case ARRAY_TYPE:
25653 case VECTOR_TYPE:
25654 gen_array_type_die (type, context_die);
25655 break;
25656
25657 case ENUMERAL_TYPE:
25658 case RECORD_TYPE:
25659 case UNION_TYPE:
25660 case QUAL_UNION_TYPE:
25661 gen_tagged_type_die (type, context_die, usage);
25662 return;
25663
25664 case VOID_TYPE:
25665 case INTEGER_TYPE:
25666 case REAL_TYPE:
25667 case FIXED_POINT_TYPE:
25668 case COMPLEX_TYPE:
25669 case BOOLEAN_TYPE:
25670 /* No DIEs needed for fundamental types. */
25671 break;
25672
25673 case NULLPTR_TYPE:
25674 case LANG_TYPE:
25675 /* Just use DW_TAG_unspecified_type. */
25676 {
25677 dw_die_ref type_die = lookup_type_die (type);
25678 if (type_die == NULL)
25679 {
25680 tree name = TYPE_IDENTIFIER (type);
25681 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25682 type);
25683 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25684 equate_type_number_to_die (type, type_die);
25685 }
25686 }
25687 break;
25688
25689 default:
25690 if (is_cxx_auto (type))
25691 {
25692 tree name = TYPE_IDENTIFIER (type);
25693 dw_die_ref *die = (name == get_identifier ("auto")
25694 ? &auto_die : &decltype_auto_die);
25695 if (!*die)
25696 {
25697 *die = new_die (DW_TAG_unspecified_type,
25698 comp_unit_die (), NULL_TREE);
25699 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25700 }
25701 equate_type_number_to_die (type, *die);
25702 break;
25703 }
25704 gcc_unreachable ();
25705 }
25706
25707 TREE_ASM_WRITTEN (type) = 1;
25708 }
25709
25710 static void
25711 gen_type_die (tree type, dw_die_ref context_die)
25712 {
25713 if (type != error_mark_node)
25714 {
25715 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25716 if (flag_checking)
25717 {
25718 dw_die_ref die = lookup_type_die (type);
25719 if (die)
25720 check_die (die);
25721 }
25722 }
25723 }
25724
25725 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25726 things which are local to the given block. */
25727
25728 static void
25729 gen_block_die (tree stmt, dw_die_ref context_die)
25730 {
25731 int must_output_die = 0;
25732 bool inlined_func;
25733
25734 /* Ignore blocks that are NULL. */
25735 if (stmt == NULL_TREE)
25736 return;
25737
25738 inlined_func = inlined_function_outer_scope_p (stmt);
25739
25740 /* If the block is one fragment of a non-contiguous block, do not
25741 process the variables, since they will have been done by the
25742 origin block. Do process subblocks. */
25743 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25744 {
25745 tree sub;
25746
25747 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25748 gen_block_die (sub, context_die);
25749
25750 return;
25751 }
25752
25753 /* Determine if we need to output any Dwarf DIEs at all to represent this
25754 block. */
25755 if (inlined_func)
25756 /* The outer scopes for inlinings *must* always be represented. We
25757 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25758 must_output_die = 1;
25759 else if (lookup_block_die (stmt))
25760 /* If we already have a DIE then it was filled early. Meanwhile
25761 we might have pruned all BLOCK_VARS as optimized out but we
25762 still want to generate high/low PC attributes so output it. */
25763 must_output_die = 1;
25764 else if (TREE_USED (stmt)
25765 || TREE_ASM_WRITTEN (stmt))
25766 {
25767 /* Determine if this block directly contains any "significant"
25768 local declarations which we will need to output DIEs for. */
25769 if (debug_info_level > DINFO_LEVEL_TERSE)
25770 {
25771 /* We are not in terse mode so any local declaration that
25772 is not ignored for debug purposes counts as being a
25773 "significant" one. */
25774 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25775 must_output_die = 1;
25776 else
25777 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25778 if (!DECL_IGNORED_P (var))
25779 {
25780 must_output_die = 1;
25781 break;
25782 }
25783 }
25784 else if (!dwarf2out_ignore_block (stmt))
25785 must_output_die = 1;
25786 }
25787
25788 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25789 DIE for any block which contains no significant local declarations at
25790 all. Rather, in such cases we just call `decls_for_scope' so that any
25791 needed Dwarf info for any sub-blocks will get properly generated. Note
25792 that in terse mode, our definition of what constitutes a "significant"
25793 local declaration gets restricted to include only inlined function
25794 instances and local (nested) function definitions. */
25795 if (must_output_die)
25796 {
25797 if (inlined_func)
25798 gen_inlined_subroutine_die (stmt, context_die);
25799 else
25800 gen_lexical_block_die (stmt, context_die);
25801 }
25802 else
25803 decls_for_scope (stmt, context_die);
25804 }
25805
25806 /* Process variable DECL (or variable with origin ORIGIN) within
25807 block STMT and add it to CONTEXT_DIE. */
25808 static void
25809 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25810 {
25811 dw_die_ref die;
25812 tree decl_or_origin = decl ? decl : origin;
25813
25814 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25815 die = lookup_decl_die (decl_or_origin);
25816 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25817 {
25818 if (TYPE_DECL_IS_STUB (decl_or_origin))
25819 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25820 else
25821 die = lookup_decl_die (decl_or_origin);
25822 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25823 if (! die && ! early_dwarf)
25824 return;
25825 }
25826 else
25827 die = NULL;
25828
25829 /* Avoid creating DIEs for local typedefs and concrete static variables that
25830 will only be pruned later. */
25831 if ((origin || decl_ultimate_origin (decl))
25832 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25833 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25834 {
25835 origin = decl_ultimate_origin (decl_or_origin);
25836 if (decl && VAR_P (decl) && die != NULL)
25837 {
25838 die = lookup_decl_die (origin);
25839 if (die != NULL)
25840 equate_decl_number_to_die (decl, die);
25841 }
25842 return;
25843 }
25844
25845 if (die != NULL && die->die_parent == NULL)
25846 add_child_die (context_die, die);
25847 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25848 {
25849 if (early_dwarf)
25850 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25851 stmt, context_die);
25852 }
25853 else
25854 {
25855 if (decl && DECL_P (decl))
25856 {
25857 die = lookup_decl_die (decl);
25858
25859 /* Early created DIEs do not have a parent as the decls refer
25860 to the function as DECL_CONTEXT rather than the BLOCK. */
25861 if (die && die->die_parent == NULL)
25862 {
25863 gcc_assert (in_lto_p);
25864 add_child_die (context_die, die);
25865 }
25866 }
25867
25868 gen_decl_die (decl, origin, NULL, context_die);
25869 }
25870 }
25871
25872 /* Generate all of the decls declared within a given scope and (recursively)
25873 all of its sub-blocks. */
25874
25875 static void
25876 decls_for_scope (tree stmt, dw_die_ref context_die, bool recurse)
25877 {
25878 tree decl;
25879 unsigned int i;
25880 tree subblocks;
25881
25882 /* Ignore NULL blocks. */
25883 if (stmt == NULL_TREE)
25884 return;
25885
25886 /* Output the DIEs to represent all of the data objects and typedefs
25887 declared directly within this block but not within any nested
25888 sub-blocks. Also, nested function and tag DIEs have been
25889 generated with a parent of NULL; fix that up now. We don't
25890 have to do this if we're at -g1. */
25891 if (debug_info_level > DINFO_LEVEL_TERSE)
25892 {
25893 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25894 process_scope_var (stmt, decl, NULL_TREE, context_die);
25895 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25896 origin - avoid doing this twice as we have no good way to see
25897 if we've done it once already. */
25898 if (! early_dwarf)
25899 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25900 {
25901 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25902 if (decl == current_function_decl)
25903 /* Ignore declarations of the current function, while they
25904 are declarations, gen_subprogram_die would treat them
25905 as definitions again, because they are equal to
25906 current_function_decl and endlessly recurse. */;
25907 else if (TREE_CODE (decl) == FUNCTION_DECL)
25908 process_scope_var (stmt, decl, NULL_TREE, context_die);
25909 else
25910 process_scope_var (stmt, NULL_TREE, decl, context_die);
25911 }
25912 }
25913
25914 /* Even if we're at -g1, we need to process the subblocks in order to get
25915 inlined call information. */
25916
25917 /* Output the DIEs to represent all sub-blocks (and the items declared
25918 therein) of this block. */
25919 if (recurse)
25920 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25921 subblocks != NULL;
25922 subblocks = BLOCK_CHAIN (subblocks))
25923 gen_block_die (subblocks, context_die);
25924 }
25925
25926 /* Is this a typedef we can avoid emitting? */
25927
25928 static bool
25929 is_redundant_typedef (const_tree decl)
25930 {
25931 if (TYPE_DECL_IS_STUB (decl))
25932 return true;
25933
25934 if (DECL_ARTIFICIAL (decl)
25935 && DECL_CONTEXT (decl)
25936 && is_tagged_type (DECL_CONTEXT (decl))
25937 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25938 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25939 /* Also ignore the artificial member typedef for the class name. */
25940 return true;
25941
25942 return false;
25943 }
25944
25945 /* Return TRUE if TYPE is a typedef that names a type for linkage
25946 purposes. This kind of typedefs is produced by the C++ FE for
25947 constructs like:
25948
25949 typedef struct {...} foo;
25950
25951 In that case, there is no typedef variant type produced for foo.
25952 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25953 struct type. */
25954
25955 static bool
25956 is_naming_typedef_decl (const_tree decl)
25957 {
25958 if (decl == NULL_TREE
25959 || TREE_CODE (decl) != TYPE_DECL
25960 || DECL_NAMELESS (decl)
25961 || !is_tagged_type (TREE_TYPE (decl))
25962 || DECL_IS_BUILTIN (decl)
25963 || is_redundant_typedef (decl)
25964 /* It looks like Ada produces TYPE_DECLs that are very similar
25965 to C++ naming typedefs but that have different
25966 semantics. Let's be specific to c++ for now. */
25967 || !is_cxx (decl))
25968 return FALSE;
25969
25970 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25971 && TYPE_NAME (TREE_TYPE (decl)) == decl
25972 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25973 != TYPE_NAME (TREE_TYPE (decl))));
25974 }
25975
25976 /* Looks up the DIE for a context. */
25977
25978 static inline dw_die_ref
25979 lookup_context_die (tree context)
25980 {
25981 if (context)
25982 {
25983 /* Find die that represents this context. */
25984 if (TYPE_P (context))
25985 {
25986 context = TYPE_MAIN_VARIANT (context);
25987 dw_die_ref ctx = lookup_type_die (context);
25988 if (!ctx)
25989 return NULL;
25990 return strip_naming_typedef (context, ctx);
25991 }
25992 else
25993 return lookup_decl_die (context);
25994 }
25995 return comp_unit_die ();
25996 }
25997
25998 /* Returns the DIE for a context. */
25999
26000 static inline dw_die_ref
26001 get_context_die (tree context)
26002 {
26003 if (context)
26004 {
26005 /* Find die that represents this context. */
26006 if (TYPE_P (context))
26007 {
26008 context = TYPE_MAIN_VARIANT (context);
26009 return strip_naming_typedef (context, force_type_die (context));
26010 }
26011 else
26012 return force_decl_die (context);
26013 }
26014 return comp_unit_die ();
26015 }
26016
26017 /* Returns the DIE for decl. A DIE will always be returned. */
26018
26019 static dw_die_ref
26020 force_decl_die (tree decl)
26021 {
26022 dw_die_ref decl_die;
26023 unsigned saved_external_flag;
26024 tree save_fn = NULL_TREE;
26025 decl_die = lookup_decl_die (decl);
26026 if (!decl_die)
26027 {
26028 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
26029
26030 decl_die = lookup_decl_die (decl);
26031 if (decl_die)
26032 return decl_die;
26033
26034 switch (TREE_CODE (decl))
26035 {
26036 case FUNCTION_DECL:
26037 /* Clear current_function_decl, so that gen_subprogram_die thinks
26038 that this is a declaration. At this point, we just want to force
26039 declaration die. */
26040 save_fn = current_function_decl;
26041 current_function_decl = NULL_TREE;
26042 gen_subprogram_die (decl, context_die);
26043 current_function_decl = save_fn;
26044 break;
26045
26046 case VAR_DECL:
26047 /* Set external flag to force declaration die. Restore it after
26048 gen_decl_die() call. */
26049 saved_external_flag = DECL_EXTERNAL (decl);
26050 DECL_EXTERNAL (decl) = 1;
26051 gen_decl_die (decl, NULL, NULL, context_die);
26052 DECL_EXTERNAL (decl) = saved_external_flag;
26053 break;
26054
26055 case NAMESPACE_DECL:
26056 if (dwarf_version >= 3 || !dwarf_strict)
26057 dwarf2out_decl (decl);
26058 else
26059 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
26060 decl_die = comp_unit_die ();
26061 break;
26062
26063 case TRANSLATION_UNIT_DECL:
26064 decl_die = comp_unit_die ();
26065 break;
26066
26067 default:
26068 gcc_unreachable ();
26069 }
26070
26071 /* We should be able to find the DIE now. */
26072 if (!decl_die)
26073 decl_die = lookup_decl_die (decl);
26074 gcc_assert (decl_die);
26075 }
26076
26077 return decl_die;
26078 }
26079
26080 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
26081 always returned. */
26082
26083 static dw_die_ref
26084 force_type_die (tree type)
26085 {
26086 dw_die_ref type_die;
26087
26088 type_die = lookup_type_die (type);
26089 if (!type_die)
26090 {
26091 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
26092
26093 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
26094 false, context_die);
26095 gcc_assert (type_die);
26096 }
26097 return type_die;
26098 }
26099
26100 /* Force out any required namespaces to be able to output DECL,
26101 and return the new context_die for it, if it's changed. */
26102
26103 static dw_die_ref
26104 setup_namespace_context (tree thing, dw_die_ref context_die)
26105 {
26106 tree context = (DECL_P (thing)
26107 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26108 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26109 /* Force out the namespace. */
26110 context_die = force_decl_die (context);
26111
26112 return context_die;
26113 }
26114
26115 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26116 type) within its namespace, if appropriate.
26117
26118 For compatibility with older debuggers, namespace DIEs only contain
26119 declarations; all definitions are emitted at CU scope, with
26120 DW_AT_specification pointing to the declaration (like with class
26121 members). */
26122
26123 static dw_die_ref
26124 declare_in_namespace (tree thing, dw_die_ref context_die)
26125 {
26126 dw_die_ref ns_context;
26127
26128 if (debug_info_level <= DINFO_LEVEL_TERSE)
26129 return context_die;
26130
26131 /* External declarations in the local scope only need to be emitted
26132 once, not once in the namespace and once in the scope.
26133
26134 This avoids declaring the `extern' below in the
26135 namespace DIE as well as in the innermost scope:
26136
26137 namespace S
26138 {
26139 int i=5;
26140 int foo()
26141 {
26142 int i=8;
26143 extern int i;
26144 return i;
26145 }
26146 }
26147 */
26148 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26149 return context_die;
26150
26151 /* If this decl is from an inlined function, then don't try to emit it in its
26152 namespace, as we will get confused. It would have already been emitted
26153 when the abstract instance of the inline function was emitted anyways. */
26154 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26155 return context_die;
26156
26157 ns_context = setup_namespace_context (thing, context_die);
26158
26159 if (ns_context != context_die)
26160 {
26161 if (is_fortran () || is_dlang ())
26162 return ns_context;
26163 if (DECL_P (thing))
26164 gen_decl_die (thing, NULL, NULL, ns_context);
26165 else
26166 gen_type_die (thing, ns_context);
26167 }
26168 return context_die;
26169 }
26170
26171 /* Generate a DIE for a namespace or namespace alias. */
26172
26173 static void
26174 gen_namespace_die (tree decl, dw_die_ref context_die)
26175 {
26176 dw_die_ref namespace_die;
26177
26178 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26179 they are an alias of. */
26180 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26181 {
26182 /* Output a real namespace or module. */
26183 context_die = setup_namespace_context (decl, comp_unit_die ());
26184 namespace_die = new_die (is_fortran () || is_dlang ()
26185 ? DW_TAG_module : DW_TAG_namespace,
26186 context_die, decl);
26187 /* For Fortran modules defined in different CU don't add src coords. */
26188 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26189 {
26190 const char *name = dwarf2_name (decl, 0);
26191 if (name)
26192 add_name_attribute (namespace_die, name);
26193 }
26194 else
26195 add_name_and_src_coords_attributes (namespace_die, decl);
26196 if (DECL_EXTERNAL (decl))
26197 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26198 equate_decl_number_to_die (decl, namespace_die);
26199 }
26200 else
26201 {
26202 /* Output a namespace alias. */
26203
26204 /* Force out the namespace we are an alias of, if necessary. */
26205 dw_die_ref origin_die
26206 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26207
26208 if (DECL_FILE_SCOPE_P (decl)
26209 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26210 context_die = setup_namespace_context (decl, comp_unit_die ());
26211 /* Now create the namespace alias DIE. */
26212 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26213 add_name_and_src_coords_attributes (namespace_die, decl);
26214 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26215 equate_decl_number_to_die (decl, namespace_die);
26216 }
26217 if ((dwarf_version >= 5 || !dwarf_strict)
26218 && lang_hooks.decls.decl_dwarf_attribute (decl,
26219 DW_AT_export_symbols) == 1)
26220 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26221
26222 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26223 if (want_pubnames ())
26224 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26225 }
26226
26227 /* Generate Dwarf debug information for a decl described by DECL.
26228 The return value is currently only meaningful for PARM_DECLs,
26229 for all other decls it returns NULL.
26230
26231 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26232 It can be NULL otherwise. */
26233
26234 static dw_die_ref
26235 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26236 dw_die_ref context_die)
26237 {
26238 tree decl_or_origin = decl ? decl : origin;
26239 tree class_origin = NULL, ultimate_origin;
26240
26241 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26242 return NULL;
26243
26244 switch (TREE_CODE (decl_or_origin))
26245 {
26246 case ERROR_MARK:
26247 break;
26248
26249 case CONST_DECL:
26250 if (!is_fortran () && !is_ada () && !is_dlang ())
26251 {
26252 /* The individual enumerators of an enum type get output when we output
26253 the Dwarf representation of the relevant enum type itself. */
26254 break;
26255 }
26256
26257 /* Emit its type. */
26258 gen_type_die (TREE_TYPE (decl), context_die);
26259
26260 /* And its containing namespace. */
26261 context_die = declare_in_namespace (decl, context_die);
26262
26263 gen_const_die (decl, context_die);
26264 break;
26265
26266 case FUNCTION_DECL:
26267 #if 0
26268 /* FIXME */
26269 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26270 on local redeclarations of global functions. That seems broken. */
26271 if (current_function_decl != decl)
26272 /* This is only a declaration. */;
26273 #endif
26274
26275 /* We should have abstract copies already and should not generate
26276 stray type DIEs in late LTO dumping. */
26277 if (! early_dwarf)
26278 ;
26279
26280 /* If we're emitting a clone, emit info for the abstract instance. */
26281 else if (origin || DECL_ORIGIN (decl) != decl)
26282 dwarf2out_abstract_function (origin
26283 ? DECL_ORIGIN (origin)
26284 : DECL_ABSTRACT_ORIGIN (decl));
26285
26286 /* If we're emitting a possibly inlined function emit it as
26287 abstract instance. */
26288 else if (cgraph_function_possibly_inlined_p (decl)
26289 && ! DECL_ABSTRACT_P (decl)
26290 && ! class_or_namespace_scope_p (context_die)
26291 /* dwarf2out_abstract_function won't emit a die if this is just
26292 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26293 that case, because that works only if we have a die. */
26294 && DECL_INITIAL (decl) != NULL_TREE)
26295 dwarf2out_abstract_function (decl);
26296
26297 /* Otherwise we're emitting the primary DIE for this decl. */
26298 else if (debug_info_level > DINFO_LEVEL_TERSE)
26299 {
26300 /* Before we describe the FUNCTION_DECL itself, make sure that we
26301 have its containing type. */
26302 if (!origin)
26303 origin = decl_class_context (decl);
26304 if (origin != NULL_TREE)
26305 gen_type_die (origin, context_die);
26306
26307 /* And its return type. */
26308 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26309
26310 /* And its virtual context. */
26311 if (DECL_VINDEX (decl) != NULL_TREE)
26312 gen_type_die (DECL_CONTEXT (decl), context_die);
26313
26314 /* Make sure we have a member DIE for decl. */
26315 if (origin != NULL_TREE)
26316 gen_type_die_for_member (origin, decl, context_die);
26317
26318 /* And its containing namespace. */
26319 context_die = declare_in_namespace (decl, context_die);
26320 }
26321
26322 /* Now output a DIE to represent the function itself. */
26323 if (decl)
26324 gen_subprogram_die (decl, context_die);
26325 break;
26326
26327 case TYPE_DECL:
26328 /* If we are in terse mode, don't generate any DIEs to represent any
26329 actual typedefs. */
26330 if (debug_info_level <= DINFO_LEVEL_TERSE)
26331 break;
26332
26333 /* In the special case of a TYPE_DECL node representing the declaration
26334 of some type tag, if the given TYPE_DECL is marked as having been
26335 instantiated from some other (original) TYPE_DECL node (e.g. one which
26336 was generated within the original definition of an inline function) we
26337 used to generate a special (abbreviated) DW_TAG_structure_type,
26338 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26339 should be actually referencing those DIEs, as variable DIEs with that
26340 type would be emitted already in the abstract origin, so it was always
26341 removed during unused type prunning. Don't add anything in this
26342 case. */
26343 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26344 break;
26345
26346 if (is_redundant_typedef (decl))
26347 gen_type_die (TREE_TYPE (decl), context_die);
26348 else
26349 /* Output a DIE to represent the typedef itself. */
26350 gen_typedef_die (decl, context_die);
26351 break;
26352
26353 case LABEL_DECL:
26354 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26355 gen_label_die (decl, context_die);
26356 break;
26357
26358 case VAR_DECL:
26359 case RESULT_DECL:
26360 /* If we are in terse mode, don't generate any DIEs to represent any
26361 variable declarations or definitions unless it is external. */
26362 if (debug_info_level < DINFO_LEVEL_TERSE
26363 || (debug_info_level == DINFO_LEVEL_TERSE
26364 && !TREE_PUBLIC (decl_or_origin)))
26365 break;
26366
26367 if (debug_info_level > DINFO_LEVEL_TERSE)
26368 {
26369 /* Avoid generating stray type DIEs during late dwarf dumping.
26370 All types have been dumped early. */
26371 if (early_dwarf
26372 /* ??? But in LTRANS we cannot annotate early created variably
26373 modified type DIEs without copying them and adjusting all
26374 references to them. Dump them again as happens for inlining
26375 which copies both the decl and the types. */
26376 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26377 in VLA bound information for example. */
26378 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26379 current_function_decl)))
26380 {
26381 /* Output any DIEs that are needed to specify the type of this data
26382 object. */
26383 if (decl_by_reference_p (decl_or_origin))
26384 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26385 else
26386 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26387 }
26388
26389 if (early_dwarf)
26390 {
26391 /* And its containing type. */
26392 class_origin = decl_class_context (decl_or_origin);
26393 if (class_origin != NULL_TREE)
26394 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26395
26396 /* And its containing namespace. */
26397 context_die = declare_in_namespace (decl_or_origin, context_die);
26398 }
26399 }
26400
26401 /* Now output the DIE to represent the data object itself. This gets
26402 complicated because of the possibility that the VAR_DECL really
26403 represents an inlined instance of a formal parameter for an inline
26404 function. */
26405 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26406 if (ultimate_origin != NULL_TREE
26407 && TREE_CODE (ultimate_origin) == PARM_DECL)
26408 gen_formal_parameter_die (decl, origin,
26409 true /* Emit name attribute. */,
26410 context_die);
26411 else
26412 gen_variable_die (decl, origin, context_die);
26413 break;
26414
26415 case FIELD_DECL:
26416 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26417 /* Ignore the nameless fields that are used to skip bits but handle C++
26418 anonymous unions and structs. */
26419 if (DECL_NAME (decl) != NULL_TREE
26420 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26421 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26422 {
26423 gen_type_die (member_declared_type (decl), context_die);
26424 gen_field_die (decl, ctx, context_die);
26425 }
26426 break;
26427
26428 case PARM_DECL:
26429 /* Avoid generating stray type DIEs during late dwarf dumping.
26430 All types have been dumped early. */
26431 if (early_dwarf
26432 /* ??? But in LTRANS we cannot annotate early created variably
26433 modified type DIEs without copying them and adjusting all
26434 references to them. Dump them again as happens for inlining
26435 which copies both the decl and the types. */
26436 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26437 in VLA bound information for example. */
26438 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26439 current_function_decl)))
26440 {
26441 if (DECL_BY_REFERENCE (decl_or_origin))
26442 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26443 else
26444 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26445 }
26446 return gen_formal_parameter_die (decl, origin,
26447 true /* Emit name attribute. */,
26448 context_die);
26449
26450 case NAMESPACE_DECL:
26451 if (dwarf_version >= 3 || !dwarf_strict)
26452 gen_namespace_die (decl, context_die);
26453 break;
26454
26455 case IMPORTED_DECL:
26456 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26457 DECL_CONTEXT (decl), context_die);
26458 break;
26459
26460 case NAMELIST_DECL:
26461 gen_namelist_decl (DECL_NAME (decl), context_die,
26462 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26463 break;
26464
26465 default:
26466 /* Probably some frontend-internal decl. Assume we don't care. */
26467 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26468 break;
26469 }
26470
26471 return NULL;
26472 }
26473 \f
26474 /* Output initial debug information for global DECL. Called at the
26475 end of the parsing process.
26476
26477 This is the initial debug generation process. As such, the DIEs
26478 generated may be incomplete. A later debug generation pass
26479 (dwarf2out_late_global_decl) will augment the information generated
26480 in this pass (e.g., with complete location info). */
26481
26482 static void
26483 dwarf2out_early_global_decl (tree decl)
26484 {
26485 set_early_dwarf s;
26486
26487 /* gen_decl_die() will set DECL_ABSTRACT because
26488 cgraph_function_possibly_inlined_p() returns true. This is in
26489 turn will cause DW_AT_inline attributes to be set.
26490
26491 This happens because at early dwarf generation, there is no
26492 cgraph information, causing cgraph_function_possibly_inlined_p()
26493 to return true. Trick cgraph_function_possibly_inlined_p()
26494 while we generate dwarf early. */
26495 bool save = symtab->global_info_ready;
26496 symtab->global_info_ready = true;
26497
26498 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26499 other DECLs and they can point to template types or other things
26500 that dwarf2out can't handle when done via dwarf2out_decl. */
26501 if (TREE_CODE (decl) != TYPE_DECL
26502 && TREE_CODE (decl) != PARM_DECL)
26503 {
26504 if (TREE_CODE (decl) == FUNCTION_DECL)
26505 {
26506 tree save_fndecl = current_function_decl;
26507
26508 /* For nested functions, make sure we have DIEs for the parents first
26509 so that all nested DIEs are generated at the proper scope in the
26510 first shot. */
26511 tree context = decl_function_context (decl);
26512 if (context != NULL)
26513 {
26514 dw_die_ref context_die = lookup_decl_die (context);
26515 current_function_decl = context;
26516
26517 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26518 enough so that it lands in its own context. This avoids type
26519 pruning issues later on. */
26520 if (context_die == NULL || is_declaration_die (context_die))
26521 dwarf2out_early_global_decl (context);
26522 }
26523
26524 /* Emit an abstract origin of a function first. This happens
26525 with C++ constructor clones for example and makes
26526 dwarf2out_abstract_function happy which requires the early
26527 DIE of the abstract instance to be present. */
26528 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26529 dw_die_ref origin_die;
26530 if (origin != NULL
26531 /* Do not emit the DIE multiple times but make sure to
26532 process it fully here in case we just saw a declaration. */
26533 && ((origin_die = lookup_decl_die (origin)) == NULL
26534 || is_declaration_die (origin_die)))
26535 {
26536 current_function_decl = origin;
26537 dwarf2out_decl (origin);
26538 }
26539
26540 /* Emit the DIE for decl but avoid doing that multiple times. */
26541 dw_die_ref old_die;
26542 if ((old_die = lookup_decl_die (decl)) == NULL
26543 || is_declaration_die (old_die))
26544 {
26545 current_function_decl = decl;
26546 dwarf2out_decl (decl);
26547 }
26548
26549 current_function_decl = save_fndecl;
26550 }
26551 else
26552 dwarf2out_decl (decl);
26553 }
26554 symtab->global_info_ready = save;
26555 }
26556
26557 /* Return whether EXPR is an expression with the following pattern:
26558 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26559
26560 static bool
26561 is_trivial_indirect_ref (tree expr)
26562 {
26563 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26564 return false;
26565
26566 tree nop = TREE_OPERAND (expr, 0);
26567 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26568 return false;
26569
26570 tree int_cst = TREE_OPERAND (nop, 0);
26571 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26572 }
26573
26574 /* Output debug information for global decl DECL. Called from
26575 toplev.c after compilation proper has finished. */
26576
26577 static void
26578 dwarf2out_late_global_decl (tree decl)
26579 {
26580 /* Fill-in any location information we were unable to determine
26581 on the first pass. */
26582 if (VAR_P (decl))
26583 {
26584 dw_die_ref die = lookup_decl_die (decl);
26585
26586 /* We may have to generate full debug late for LTO in case debug
26587 was not enabled at compile-time or the target doesn't support
26588 the LTO early debug scheme. */
26589 if (! die && in_lto_p)
26590 dwarf2out_decl (decl);
26591 else if (die)
26592 {
26593 /* We get called via the symtab code invoking late_global_decl
26594 for symbols that are optimized out.
26595
26596 Do not add locations for those, except if they have a
26597 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26598 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26599 INDIRECT_REF expression, as this could generate relocations to
26600 text symbols in LTO object files, which is invalid. */
26601 varpool_node *node = varpool_node::get (decl);
26602 if ((! node || ! node->definition)
26603 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26604 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26605 tree_add_const_value_attribute_for_decl (die, decl);
26606 else
26607 add_location_or_const_value_attribute (die, decl, false);
26608 }
26609 }
26610 }
26611
26612 /* Output debug information for type decl DECL. Called from toplev.c
26613 and from language front ends (to record built-in types). */
26614 static void
26615 dwarf2out_type_decl (tree decl, int local)
26616 {
26617 if (!local)
26618 {
26619 set_early_dwarf s;
26620 dwarf2out_decl (decl);
26621 }
26622 }
26623
26624 /* Output debug information for imported module or decl DECL.
26625 NAME is non-NULL name in the lexical block if the decl has been renamed.
26626 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26627 that DECL belongs to.
26628 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26629 static void
26630 dwarf2out_imported_module_or_decl_1 (tree decl,
26631 tree name,
26632 tree lexical_block,
26633 dw_die_ref lexical_block_die)
26634 {
26635 expanded_location xloc;
26636 dw_die_ref imported_die = NULL;
26637 dw_die_ref at_import_die;
26638
26639 if (TREE_CODE (decl) == IMPORTED_DECL)
26640 {
26641 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26642 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26643 gcc_assert (decl);
26644 }
26645 else
26646 xloc = expand_location (input_location);
26647
26648 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26649 {
26650 at_import_die = force_type_die (TREE_TYPE (decl));
26651 /* For namespace N { typedef void T; } using N::T; base_type_die
26652 returns NULL, but DW_TAG_imported_declaration requires
26653 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26654 if (!at_import_die)
26655 {
26656 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26657 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26658 at_import_die = lookup_type_die (TREE_TYPE (decl));
26659 gcc_assert (at_import_die);
26660 }
26661 }
26662 else
26663 {
26664 at_import_die = lookup_decl_die (decl);
26665 if (!at_import_die)
26666 {
26667 /* If we're trying to avoid duplicate debug info, we may not have
26668 emitted the member decl for this field. Emit it now. */
26669 if (TREE_CODE (decl) == FIELD_DECL)
26670 {
26671 tree type = DECL_CONTEXT (decl);
26672
26673 if (TYPE_CONTEXT (type)
26674 && TYPE_P (TYPE_CONTEXT (type))
26675 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26676 DINFO_USAGE_DIR_USE))
26677 return;
26678 gen_type_die_for_member (type, decl,
26679 get_context_die (TYPE_CONTEXT (type)));
26680 }
26681 if (TREE_CODE (decl) == NAMELIST_DECL)
26682 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26683 get_context_die (DECL_CONTEXT (decl)),
26684 NULL_TREE);
26685 else
26686 at_import_die = force_decl_die (decl);
26687 }
26688 }
26689
26690 if (TREE_CODE (decl) == NAMESPACE_DECL)
26691 {
26692 if (dwarf_version >= 3 || !dwarf_strict)
26693 imported_die = new_die (DW_TAG_imported_module,
26694 lexical_block_die,
26695 lexical_block);
26696 else
26697 return;
26698 }
26699 else
26700 imported_die = new_die (DW_TAG_imported_declaration,
26701 lexical_block_die,
26702 lexical_block);
26703
26704 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26705 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26706 if (debug_column_info && xloc.column)
26707 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26708 if (name)
26709 add_AT_string (imported_die, DW_AT_name,
26710 IDENTIFIER_POINTER (name));
26711 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26712 }
26713
26714 /* Output debug information for imported module or decl DECL.
26715 NAME is non-NULL name in context if the decl has been renamed.
26716 CHILD is true if decl is one of the renamed decls as part of
26717 importing whole module.
26718 IMPLICIT is set if this hook is called for an implicit import
26719 such as inline namespace. */
26720
26721 static void
26722 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26723 bool child, bool implicit)
26724 {
26725 /* dw_die_ref at_import_die; */
26726 dw_die_ref scope_die;
26727
26728 if (debug_info_level <= DINFO_LEVEL_TERSE)
26729 return;
26730
26731 gcc_assert (decl);
26732
26733 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26734 should be enough, for DWARF4 and older even if we emit as extension
26735 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26736 for the benefit of consumers unaware of DW_AT_export_symbols. */
26737 if (implicit
26738 && dwarf_version >= 5
26739 && lang_hooks.decls.decl_dwarf_attribute (decl,
26740 DW_AT_export_symbols) == 1)
26741 return;
26742
26743 set_early_dwarf s;
26744
26745 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26746 We need decl DIE for reference and scope die. First, get DIE for the decl
26747 itself. */
26748
26749 /* Get the scope die for decl context. Use comp_unit_die for global module
26750 or decl. If die is not found for non globals, force new die. */
26751 if (context
26752 && TYPE_P (context)
26753 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26754 return;
26755
26756 scope_die = get_context_die (context);
26757
26758 if (child)
26759 {
26760 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26761 there is nothing we can do, here. */
26762 if (dwarf_version < 3 && dwarf_strict)
26763 return;
26764
26765 gcc_assert (scope_die->die_child);
26766 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26767 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26768 scope_die = scope_die->die_child;
26769 }
26770
26771 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26772 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26773 }
26774
26775 /* Output debug information for namelists. */
26776
26777 static dw_die_ref
26778 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26779 {
26780 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26781 tree value;
26782 unsigned i;
26783
26784 if (debug_info_level <= DINFO_LEVEL_TERSE)
26785 return NULL;
26786
26787 gcc_assert (scope_die != NULL);
26788 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26789 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26790
26791 /* If there are no item_decls, we have a nondefining namelist, e.g.
26792 with USE association; hence, set DW_AT_declaration. */
26793 if (item_decls == NULL_TREE)
26794 {
26795 add_AT_flag (nml_die, DW_AT_declaration, 1);
26796 return nml_die;
26797 }
26798
26799 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26800 {
26801 nml_item_ref_die = lookup_decl_die (value);
26802 if (!nml_item_ref_die)
26803 nml_item_ref_die = force_decl_die (value);
26804
26805 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26806 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26807 }
26808 return nml_die;
26809 }
26810
26811
26812 /* Write the debugging output for DECL and return the DIE. */
26813
26814 static void
26815 dwarf2out_decl (tree decl)
26816 {
26817 dw_die_ref context_die = comp_unit_die ();
26818
26819 switch (TREE_CODE (decl))
26820 {
26821 case ERROR_MARK:
26822 return;
26823
26824 case FUNCTION_DECL:
26825 /* If we're a nested function, initially use a parent of NULL; if we're
26826 a plain function, this will be fixed up in decls_for_scope. If
26827 we're a method, it will be ignored, since we already have a DIE.
26828 Avoid doing this late though since clones of class methods may
26829 otherwise end up in limbo and create type DIEs late. */
26830 if (early_dwarf
26831 && decl_function_context (decl)
26832 /* But if we're in terse mode, we don't care about scope. */
26833 && debug_info_level > DINFO_LEVEL_TERSE)
26834 context_die = NULL;
26835 break;
26836
26837 case VAR_DECL:
26838 /* For local statics lookup proper context die. */
26839 if (local_function_static (decl))
26840 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26841
26842 /* If we are in terse mode, don't generate any DIEs to represent any
26843 variable declarations or definitions unless it is external. */
26844 if (debug_info_level < DINFO_LEVEL_TERSE
26845 || (debug_info_level == DINFO_LEVEL_TERSE
26846 && !TREE_PUBLIC (decl)))
26847 return;
26848 break;
26849
26850 case CONST_DECL:
26851 if (debug_info_level <= DINFO_LEVEL_TERSE)
26852 return;
26853 if (!is_fortran () && !is_ada () && !is_dlang ())
26854 return;
26855 if (TREE_STATIC (decl) && decl_function_context (decl))
26856 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26857 break;
26858
26859 case NAMESPACE_DECL:
26860 case IMPORTED_DECL:
26861 if (debug_info_level <= DINFO_LEVEL_TERSE)
26862 return;
26863 if (lookup_decl_die (decl) != NULL)
26864 return;
26865 break;
26866
26867 case TYPE_DECL:
26868 /* Don't emit stubs for types unless they are needed by other DIEs. */
26869 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26870 return;
26871
26872 /* Don't bother trying to generate any DIEs to represent any of the
26873 normal built-in types for the language we are compiling. */
26874 if (DECL_IS_BUILTIN (decl))
26875 return;
26876
26877 /* If we are in terse mode, don't generate any DIEs for types. */
26878 if (debug_info_level <= DINFO_LEVEL_TERSE)
26879 return;
26880
26881 /* If we're a function-scope tag, initially use a parent of NULL;
26882 this will be fixed up in decls_for_scope. */
26883 if (decl_function_context (decl))
26884 context_die = NULL;
26885
26886 break;
26887
26888 case NAMELIST_DECL:
26889 break;
26890
26891 default:
26892 return;
26893 }
26894
26895 gen_decl_die (decl, NULL, NULL, context_die);
26896
26897 if (flag_checking)
26898 {
26899 dw_die_ref die = lookup_decl_die (decl);
26900 if (die)
26901 check_die (die);
26902 }
26903 }
26904
26905 /* Write the debugging output for DECL. */
26906
26907 static void
26908 dwarf2out_function_decl (tree decl)
26909 {
26910 dwarf2out_decl (decl);
26911 call_arg_locations = NULL;
26912 call_arg_loc_last = NULL;
26913 call_site_count = -1;
26914 tail_call_site_count = -1;
26915 decl_loc_table->empty ();
26916 cached_dw_loc_list_table->empty ();
26917 }
26918
26919 /* Output a marker (i.e. a label) for the beginning of the generated code for
26920 a lexical block. */
26921
26922 static void
26923 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26924 unsigned int blocknum)
26925 {
26926 switch_to_section (current_function_section ());
26927 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26928 }
26929
26930 /* Output a marker (i.e. a label) for the end of the generated code for a
26931 lexical block. */
26932
26933 static void
26934 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26935 {
26936 switch_to_section (current_function_section ());
26937 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26938 }
26939
26940 /* Returns nonzero if it is appropriate not to emit any debugging
26941 information for BLOCK, because it doesn't contain any instructions.
26942
26943 Don't allow this for blocks with nested functions or local classes
26944 as we would end up with orphans, and in the presence of scheduling
26945 we may end up calling them anyway. */
26946
26947 static bool
26948 dwarf2out_ignore_block (const_tree block)
26949 {
26950 tree decl;
26951 unsigned int i;
26952
26953 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26954 if (TREE_CODE (decl) == FUNCTION_DECL
26955 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26956 return 0;
26957 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26958 {
26959 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26960 if (TREE_CODE (decl) == FUNCTION_DECL
26961 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26962 return 0;
26963 }
26964
26965 return 1;
26966 }
26967
26968 /* Hash table routines for file_hash. */
26969
26970 bool
26971 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26972 {
26973 return filename_cmp (p1->filename, p2) == 0;
26974 }
26975
26976 hashval_t
26977 dwarf_file_hasher::hash (dwarf_file_data *p)
26978 {
26979 return htab_hash_string (p->filename);
26980 }
26981
26982 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26983 dwarf2out.c) and return its "index". The index of each (known) filename is
26984 just a unique number which is associated with only that one filename. We
26985 need such numbers for the sake of generating labels (in the .debug_sfnames
26986 section) and references to those files numbers (in the .debug_srcinfo
26987 and .debug_macinfo sections). If the filename given as an argument is not
26988 found in our current list, add it to the list and assign it the next
26989 available unique index number. */
26990
26991 static struct dwarf_file_data *
26992 lookup_filename (const char *file_name)
26993 {
26994 struct dwarf_file_data * created;
26995
26996 if (!file_name)
26997 return NULL;
26998
26999 if (!file_name[0])
27000 file_name = "<stdin>";
27001
27002 dwarf_file_data **slot
27003 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
27004 INSERT);
27005 if (*slot)
27006 return *slot;
27007
27008 created = ggc_alloc<dwarf_file_data> ();
27009 created->filename = file_name;
27010 created->emitted_number = 0;
27011 *slot = created;
27012 return created;
27013 }
27014
27015 /* If the assembler will construct the file table, then translate the compiler
27016 internal file table number into the assembler file table number, and emit
27017 a .file directive if we haven't already emitted one yet. The file table
27018 numbers are different because we prune debug info for unused variables and
27019 types, which may include filenames. */
27020
27021 static int
27022 maybe_emit_file (struct dwarf_file_data * fd)
27023 {
27024 if (! fd->emitted_number)
27025 {
27026 if (last_emitted_file)
27027 fd->emitted_number = last_emitted_file->emitted_number + 1;
27028 else
27029 fd->emitted_number = 1;
27030 last_emitted_file = fd;
27031
27032 if (output_asm_line_debug_info ())
27033 {
27034 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
27035 output_quoted_string (asm_out_file,
27036 remap_debug_filename (fd->filename));
27037 fputc ('\n', asm_out_file);
27038 }
27039 }
27040
27041 return fd->emitted_number;
27042 }
27043
27044 /* Schedule generation of a DW_AT_const_value attribute to DIE.
27045 That generation should happen after function debug info has been
27046 generated. The value of the attribute is the constant value of ARG. */
27047
27048 static void
27049 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
27050 {
27051 die_arg_entry entry;
27052
27053 if (!die || !arg)
27054 return;
27055
27056 gcc_assert (early_dwarf);
27057
27058 if (!tmpl_value_parm_die_table)
27059 vec_alloc (tmpl_value_parm_die_table, 32);
27060
27061 entry.die = die;
27062 entry.arg = arg;
27063 vec_safe_push (tmpl_value_parm_die_table, entry);
27064 }
27065
27066 /* Return TRUE if T is an instance of generic type, FALSE
27067 otherwise. */
27068
27069 static bool
27070 generic_type_p (tree t)
27071 {
27072 if (t == NULL_TREE || !TYPE_P (t))
27073 return false;
27074 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
27075 }
27076
27077 /* Schedule the generation of the generic parameter dies for the
27078 instance of generic type T. The proper generation itself is later
27079 done by gen_scheduled_generic_parms_dies. */
27080
27081 static void
27082 schedule_generic_params_dies_gen (tree t)
27083 {
27084 if (!generic_type_p (t))
27085 return;
27086
27087 gcc_assert (early_dwarf);
27088
27089 if (!generic_type_instances)
27090 vec_alloc (generic_type_instances, 256);
27091
27092 vec_safe_push (generic_type_instances, t);
27093 }
27094
27095 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
27096 by append_entry_to_tmpl_value_parm_die_table. This function must
27097 be called after function DIEs have been generated. */
27098
27099 static void
27100 gen_remaining_tmpl_value_param_die_attribute (void)
27101 {
27102 if (tmpl_value_parm_die_table)
27103 {
27104 unsigned i, j;
27105 die_arg_entry *e;
27106
27107 /* We do this in two phases - first get the cases we can
27108 handle during early-finish, preserving those we cannot
27109 (containing symbolic constants where we don't yet know
27110 whether we are going to output the referenced symbols).
27111 For those we try again at late-finish. */
27112 j = 0;
27113 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27114 {
27115 if (!e->die->removed
27116 && !tree_add_const_value_attribute (e->die, e->arg))
27117 {
27118 dw_loc_descr_ref loc = NULL;
27119 if (! early_dwarf
27120 && (dwarf_version >= 5 || !dwarf_strict))
27121 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27122 if (loc)
27123 add_AT_loc (e->die, DW_AT_location, loc);
27124 else
27125 (*tmpl_value_parm_die_table)[j++] = *e;
27126 }
27127 }
27128 tmpl_value_parm_die_table->truncate (j);
27129 }
27130 }
27131
27132 /* Generate generic parameters DIEs for instances of generic types
27133 that have been previously scheduled by
27134 schedule_generic_params_dies_gen. This function must be called
27135 after all the types of the CU have been laid out. */
27136
27137 static void
27138 gen_scheduled_generic_parms_dies (void)
27139 {
27140 unsigned i;
27141 tree t;
27142
27143 if (!generic_type_instances)
27144 return;
27145
27146 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27147 if (COMPLETE_TYPE_P (t))
27148 gen_generic_params_dies (t);
27149
27150 generic_type_instances = NULL;
27151 }
27152
27153
27154 /* Replace DW_AT_name for the decl with name. */
27155
27156 static void
27157 dwarf2out_set_name (tree decl, tree name)
27158 {
27159 dw_die_ref die;
27160 dw_attr_node *attr;
27161 const char *dname;
27162
27163 die = TYPE_SYMTAB_DIE (decl);
27164 if (!die)
27165 return;
27166
27167 dname = dwarf2_name (name, 0);
27168 if (!dname)
27169 return;
27170
27171 attr = get_AT (die, DW_AT_name);
27172 if (attr)
27173 {
27174 struct indirect_string_node *node;
27175
27176 node = find_AT_string (dname);
27177 /* replace the string. */
27178 attr->dw_attr_val.v.val_str = node;
27179 }
27180
27181 else
27182 add_name_attribute (die, dname);
27183 }
27184
27185 /* True if before or during processing of the first function being emitted. */
27186 static bool in_first_function_p = true;
27187 /* True if loc_note during dwarf2out_var_location call might still be
27188 before first real instruction at address equal to .Ltext0. */
27189 static bool maybe_at_text_label_p = true;
27190 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27191 static unsigned int first_loclabel_num_not_at_text_label;
27192
27193 /* Look ahead for a real insn, or for a begin stmt marker. */
27194
27195 static rtx_insn *
27196 dwarf2out_next_real_insn (rtx_insn *loc_note)
27197 {
27198 rtx_insn *next_real = NEXT_INSN (loc_note);
27199
27200 while (next_real)
27201 if (INSN_P (next_real))
27202 break;
27203 else
27204 next_real = NEXT_INSN (next_real);
27205
27206 return next_real;
27207 }
27208
27209 /* Called by the final INSN scan whenever we see a var location. We
27210 use it to drop labels in the right places, and throw the location in
27211 our lookup table. */
27212
27213 static void
27214 dwarf2out_var_location (rtx_insn *loc_note)
27215 {
27216 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27217 struct var_loc_node *newloc;
27218 rtx_insn *next_real, *next_note;
27219 rtx_insn *call_insn = NULL;
27220 static const char *last_label;
27221 static const char *last_postcall_label;
27222 static bool last_in_cold_section_p;
27223 static rtx_insn *expected_next_loc_note;
27224 tree decl;
27225 bool var_loc_p;
27226 var_loc_view view = 0;
27227
27228 if (!NOTE_P (loc_note))
27229 {
27230 if (CALL_P (loc_note))
27231 {
27232 maybe_reset_location_view (loc_note, cur_line_info_table);
27233 call_site_count++;
27234 if (SIBLING_CALL_P (loc_note))
27235 tail_call_site_count++;
27236 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27237 {
27238 call_insn = loc_note;
27239 loc_note = NULL;
27240 var_loc_p = false;
27241
27242 next_real = dwarf2out_next_real_insn (call_insn);
27243 next_note = NULL;
27244 cached_next_real_insn = NULL;
27245 goto create_label;
27246 }
27247 if (optimize == 0 && !flag_var_tracking)
27248 {
27249 /* When the var-tracking pass is not running, there is no note
27250 for indirect calls whose target is compile-time known. In this
27251 case, process such calls specifically so that we generate call
27252 sites for them anyway. */
27253 rtx x = PATTERN (loc_note);
27254 if (GET_CODE (x) == PARALLEL)
27255 x = XVECEXP (x, 0, 0);
27256 if (GET_CODE (x) == SET)
27257 x = SET_SRC (x);
27258 if (GET_CODE (x) == CALL)
27259 x = XEXP (x, 0);
27260 if (!MEM_P (x)
27261 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27262 || !SYMBOL_REF_DECL (XEXP (x, 0))
27263 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27264 != FUNCTION_DECL))
27265 {
27266 call_insn = loc_note;
27267 loc_note = NULL;
27268 var_loc_p = false;
27269
27270 next_real = dwarf2out_next_real_insn (call_insn);
27271 next_note = NULL;
27272 cached_next_real_insn = NULL;
27273 goto create_label;
27274 }
27275 }
27276 }
27277 else if (!debug_variable_location_views)
27278 gcc_unreachable ();
27279 else
27280 maybe_reset_location_view (loc_note, cur_line_info_table);
27281
27282 return;
27283 }
27284
27285 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27286 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27287 return;
27288
27289 /* Optimize processing a large consecutive sequence of location
27290 notes so we don't spend too much time in next_real_insn. If the
27291 next insn is another location note, remember the next_real_insn
27292 calculation for next time. */
27293 next_real = cached_next_real_insn;
27294 if (next_real)
27295 {
27296 if (expected_next_loc_note != loc_note)
27297 next_real = NULL;
27298 }
27299
27300 next_note = NEXT_INSN (loc_note);
27301 if (! next_note
27302 || next_note->deleted ()
27303 || ! NOTE_P (next_note)
27304 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27305 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27306 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27307 next_note = NULL;
27308
27309 if (! next_real)
27310 next_real = dwarf2out_next_real_insn (loc_note);
27311
27312 if (next_note)
27313 {
27314 expected_next_loc_note = next_note;
27315 cached_next_real_insn = next_real;
27316 }
27317 else
27318 cached_next_real_insn = NULL;
27319
27320 /* If there are no instructions which would be affected by this note,
27321 don't do anything. */
27322 if (var_loc_p
27323 && next_real == NULL_RTX
27324 && !NOTE_DURING_CALL_P (loc_note))
27325 return;
27326
27327 create_label:
27328
27329 if (next_real == NULL_RTX)
27330 next_real = get_last_insn ();
27331
27332 /* If there were any real insns between note we processed last time
27333 and this note (or if it is the first note), clear
27334 last_{,postcall_}label so that they are not reused this time. */
27335 if (last_var_location_insn == NULL_RTX
27336 || last_var_location_insn != next_real
27337 || last_in_cold_section_p != in_cold_section_p)
27338 {
27339 last_label = NULL;
27340 last_postcall_label = NULL;
27341 }
27342
27343 if (var_loc_p)
27344 {
27345 const char *label
27346 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27347 view = cur_line_info_table->view;
27348 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27349 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27350 if (newloc == NULL)
27351 return;
27352 }
27353 else
27354 {
27355 decl = NULL_TREE;
27356 newloc = NULL;
27357 }
27358
27359 /* If there were no real insns between note we processed last time
27360 and this note, use the label we emitted last time. Otherwise
27361 create a new label and emit it. */
27362 if (last_label == NULL)
27363 {
27364 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27365 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27366 loclabel_num++;
27367 last_label = ggc_strdup (loclabel);
27368 /* See if loclabel might be equal to .Ltext0. If yes,
27369 bump first_loclabel_num_not_at_text_label. */
27370 if (!have_multiple_function_sections
27371 && in_first_function_p
27372 && maybe_at_text_label_p)
27373 {
27374 static rtx_insn *last_start;
27375 rtx_insn *insn;
27376 for (insn = loc_note; insn; insn = previous_insn (insn))
27377 if (insn == last_start)
27378 break;
27379 else if (!NONDEBUG_INSN_P (insn))
27380 continue;
27381 else
27382 {
27383 rtx body = PATTERN (insn);
27384 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27385 continue;
27386 /* Inline asm could occupy zero bytes. */
27387 else if (GET_CODE (body) == ASM_INPUT
27388 || asm_noperands (body) >= 0)
27389 continue;
27390 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27391 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27392 continue;
27393 #endif
27394 else
27395 {
27396 /* Assume insn has non-zero length. */
27397 maybe_at_text_label_p = false;
27398 break;
27399 }
27400 }
27401 if (maybe_at_text_label_p)
27402 {
27403 last_start = loc_note;
27404 first_loclabel_num_not_at_text_label = loclabel_num;
27405 }
27406 }
27407 }
27408
27409 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27410 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27411
27412 if (!var_loc_p)
27413 {
27414 struct call_arg_loc_node *ca_loc
27415 = ggc_cleared_alloc<call_arg_loc_node> ();
27416 rtx_insn *prev = call_insn;
27417
27418 ca_loc->call_arg_loc_note
27419 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27420 ca_loc->next = NULL;
27421 ca_loc->label = last_label;
27422 gcc_assert (prev
27423 && (CALL_P (prev)
27424 || (NONJUMP_INSN_P (prev)
27425 && GET_CODE (PATTERN (prev)) == SEQUENCE
27426 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27427 if (!CALL_P (prev))
27428 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27429 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27430
27431 /* Look for a SYMBOL_REF in the "prev" instruction. */
27432 rtx x = get_call_rtx_from (prev);
27433 if (x)
27434 {
27435 /* Try to get the call symbol, if any. */
27436 if (MEM_P (XEXP (x, 0)))
27437 x = XEXP (x, 0);
27438 /* First, look for a memory access to a symbol_ref. */
27439 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27440 && SYMBOL_REF_DECL (XEXP (x, 0))
27441 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27442 ca_loc->symbol_ref = XEXP (x, 0);
27443 /* Otherwise, look at a compile-time known user-level function
27444 declaration. */
27445 else if (MEM_P (x)
27446 && MEM_EXPR (x)
27447 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27448 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27449 }
27450
27451 ca_loc->block = insn_scope (prev);
27452 if (call_arg_locations)
27453 call_arg_loc_last->next = ca_loc;
27454 else
27455 call_arg_locations = ca_loc;
27456 call_arg_loc_last = ca_loc;
27457 }
27458 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27459 {
27460 newloc->label = last_label;
27461 newloc->view = view;
27462 }
27463 else
27464 {
27465 if (!last_postcall_label)
27466 {
27467 sprintf (loclabel, "%s-1", last_label);
27468 last_postcall_label = ggc_strdup (loclabel);
27469 }
27470 newloc->label = last_postcall_label;
27471 /* ??? This view is at last_label, not last_label-1, but we
27472 could only assume view at last_label-1 is zero if we could
27473 assume calls always have length greater than one. This is
27474 probably true in general, though there might be a rare
27475 exception to this rule, e.g. if a call insn is optimized out
27476 by target magic. Then, even the -1 in the label will be
27477 wrong, which might invalidate the range. Anyway, using view,
27478 though technically possibly incorrect, will work as far as
27479 ranges go: since L-1 is in the middle of the call insn,
27480 (L-1).0 and (L-1).V shouldn't make any difference, and having
27481 the loclist entry refer to the .loc entry might be useful, so
27482 leave it like this. */
27483 newloc->view = view;
27484 }
27485
27486 if (var_loc_p && flag_debug_asm)
27487 {
27488 const char *name, *sep, *patstr;
27489 if (decl && DECL_NAME (decl))
27490 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27491 else
27492 name = "";
27493 if (NOTE_VAR_LOCATION_LOC (loc_note))
27494 {
27495 sep = " => ";
27496 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27497 }
27498 else
27499 {
27500 sep = " ";
27501 patstr = "RESET";
27502 }
27503 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27504 name, sep, patstr);
27505 }
27506
27507 last_var_location_insn = next_real;
27508 last_in_cold_section_p = in_cold_section_p;
27509 }
27510
27511 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27512 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27513 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27514 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27515 BLOCK_FRAGMENT_ORIGIN links. */
27516 static bool
27517 block_within_block_p (tree block, tree outer, bool bothways)
27518 {
27519 if (block == outer)
27520 return true;
27521
27522 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27523 for (tree context = BLOCK_SUPERCONTEXT (block);
27524 context != outer;
27525 context = BLOCK_SUPERCONTEXT (context))
27526 if (!context || TREE_CODE (context) != BLOCK)
27527 return false;
27528
27529 if (!bothways)
27530 return true;
27531
27532 /* Now check that each block is actually referenced by its
27533 parent. */
27534 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27535 context = BLOCK_SUPERCONTEXT (context))
27536 {
27537 if (BLOCK_FRAGMENT_ORIGIN (context))
27538 {
27539 gcc_assert (!BLOCK_SUBBLOCKS (context));
27540 context = BLOCK_FRAGMENT_ORIGIN (context);
27541 }
27542 for (tree sub = BLOCK_SUBBLOCKS (context);
27543 sub != block;
27544 sub = BLOCK_CHAIN (sub))
27545 if (!sub)
27546 return false;
27547 if (context == outer)
27548 return true;
27549 else
27550 block = context;
27551 }
27552 }
27553
27554 /* Called during final while assembling the marker of the entry point
27555 for an inlined function. */
27556
27557 static void
27558 dwarf2out_inline_entry (tree block)
27559 {
27560 gcc_assert (debug_inline_points);
27561
27562 /* If we can't represent it, don't bother. */
27563 if (!(dwarf_version >= 3 || !dwarf_strict))
27564 return;
27565
27566 gcc_assert (DECL_P (block_ultimate_origin (block)));
27567
27568 /* Sanity check the block tree. This would catch a case in which
27569 BLOCK got removed from the tree reachable from the outermost
27570 lexical block, but got retained in markers. It would still link
27571 back to its parents, but some ancestor would be missing a link
27572 down the path to the sub BLOCK. If the block got removed, its
27573 BLOCK_NUMBER will not be a usable value. */
27574 if (flag_checking)
27575 gcc_assert (block_within_block_p (block,
27576 DECL_INITIAL (current_function_decl),
27577 true));
27578
27579 gcc_assert (inlined_function_outer_scope_p (block));
27580 gcc_assert (!lookup_block_die (block));
27581
27582 if (BLOCK_FRAGMENT_ORIGIN (block))
27583 block = BLOCK_FRAGMENT_ORIGIN (block);
27584 /* Can the entry point ever not be at the beginning of an
27585 unfragmented lexical block? */
27586 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27587 || (cur_line_info_table
27588 && !ZERO_VIEW_P (cur_line_info_table->view))))
27589 return;
27590
27591 if (!inline_entry_data_table)
27592 inline_entry_data_table
27593 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27594
27595
27596 inline_entry_data **iedp
27597 = inline_entry_data_table->find_slot_with_hash (block,
27598 htab_hash_pointer (block),
27599 INSERT);
27600 if (*iedp)
27601 /* ??? Ideally, we'd record all entry points for the same inlined
27602 function (some may have been duplicated by e.g. unrolling), but
27603 we have no way to represent that ATM. */
27604 return;
27605
27606 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27607 ied->block = block;
27608 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27609 ied->label_num = BLOCK_NUMBER (block);
27610 if (cur_line_info_table)
27611 ied->view = cur_line_info_table->view;
27612
27613 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_INLINE_ENTRY_LABEL,
27614 BLOCK_NUMBER (block));
27615 }
27616
27617 /* Called from finalize_size_functions for size functions so that their body
27618 can be encoded in the debug info to describe the layout of variable-length
27619 structures. */
27620
27621 static void
27622 dwarf2out_size_function (tree decl)
27623 {
27624 set_early_dwarf s;
27625 function_to_dwarf_procedure (decl);
27626 }
27627
27628 /* Note in one location list that text section has changed. */
27629
27630 int
27631 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27632 {
27633 var_loc_list *list = *slot;
27634 if (list->first)
27635 list->last_before_switch
27636 = list->last->next ? list->last->next : list->last;
27637 return 1;
27638 }
27639
27640 /* Note in all location lists that text section has changed. */
27641
27642 static void
27643 var_location_switch_text_section (void)
27644 {
27645 if (decl_loc_table == NULL)
27646 return;
27647
27648 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27649 }
27650
27651 /* Create a new line number table. */
27652
27653 static dw_line_info_table *
27654 new_line_info_table (void)
27655 {
27656 dw_line_info_table *table;
27657
27658 table = ggc_cleared_alloc<dw_line_info_table> ();
27659 table->file_num = 1;
27660 table->line_num = 1;
27661 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27662 FORCE_RESET_NEXT_VIEW (table->view);
27663 table->symviews_since_reset = 0;
27664
27665 return table;
27666 }
27667
27668 /* Lookup the "current" table into which we emit line info, so
27669 that we don't have to do it for every source line. */
27670
27671 static void
27672 set_cur_line_info_table (section *sec)
27673 {
27674 dw_line_info_table *table;
27675
27676 if (sec == text_section)
27677 table = text_section_line_info;
27678 else if (sec == cold_text_section)
27679 {
27680 table = cold_text_section_line_info;
27681 if (!table)
27682 {
27683 cold_text_section_line_info = table = new_line_info_table ();
27684 table->end_label = cold_end_label;
27685 }
27686 }
27687 else
27688 {
27689 const char *end_label;
27690
27691 if (crtl->has_bb_partition)
27692 {
27693 if (in_cold_section_p)
27694 end_label = crtl->subsections.cold_section_end_label;
27695 else
27696 end_label = crtl->subsections.hot_section_end_label;
27697 }
27698 else
27699 {
27700 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27701 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27702 current_function_funcdef_no);
27703 end_label = ggc_strdup (label);
27704 }
27705
27706 table = new_line_info_table ();
27707 table->end_label = end_label;
27708
27709 vec_safe_push (separate_line_info, table);
27710 }
27711
27712 if (output_asm_line_debug_info ())
27713 table->is_stmt = (cur_line_info_table
27714 ? cur_line_info_table->is_stmt
27715 : DWARF_LINE_DEFAULT_IS_STMT_START);
27716 cur_line_info_table = table;
27717 }
27718
27719
27720 /* We need to reset the locations at the beginning of each
27721 function. We can't do this in the end_function hook, because the
27722 declarations that use the locations won't have been output when
27723 that hook is called. Also compute have_multiple_function_sections here. */
27724
27725 static void
27726 dwarf2out_begin_function (tree fun)
27727 {
27728 section *sec = function_section (fun);
27729
27730 if (sec != text_section)
27731 have_multiple_function_sections = true;
27732
27733 if (crtl->has_bb_partition && !cold_text_section)
27734 {
27735 gcc_assert (current_function_decl == fun);
27736 cold_text_section = unlikely_text_section ();
27737 switch_to_section (cold_text_section);
27738 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27739 switch_to_section (sec);
27740 }
27741
27742 dwarf2out_note_section_used ();
27743 call_site_count = 0;
27744 tail_call_site_count = 0;
27745
27746 set_cur_line_info_table (sec);
27747 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27748 }
27749
27750 /* Helper function of dwarf2out_end_function, called only after emitting
27751 the very first function into assembly. Check if some .debug_loc range
27752 might end with a .LVL* label that could be equal to .Ltext0.
27753 In that case we must force using absolute addresses in .debug_loc ranges,
27754 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27755 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27756 list terminator.
27757 Set have_multiple_function_sections to true in that case and
27758 terminate htab traversal. */
27759
27760 int
27761 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27762 {
27763 var_loc_list *entry = *slot;
27764 struct var_loc_node *node;
27765
27766 node = entry->first;
27767 if (node && node->next && node->next->label)
27768 {
27769 unsigned int i;
27770 const char *label = node->next->label;
27771 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27772
27773 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27774 {
27775 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27776 if (strcmp (label, loclabel) == 0)
27777 {
27778 have_multiple_function_sections = true;
27779 return 0;
27780 }
27781 }
27782 }
27783 return 1;
27784 }
27785
27786 /* Hook called after emitting a function into assembly.
27787 This does something only for the very first function emitted. */
27788
27789 static void
27790 dwarf2out_end_function (unsigned int)
27791 {
27792 if (in_first_function_p
27793 && !have_multiple_function_sections
27794 && first_loclabel_num_not_at_text_label
27795 && decl_loc_table)
27796 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27797 in_first_function_p = false;
27798 maybe_at_text_label_p = false;
27799 }
27800
27801 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27802 front-ends register a translation unit even before dwarf2out_init is
27803 called. */
27804 static tree main_translation_unit = NULL_TREE;
27805
27806 /* Hook called by front-ends after they built their main translation unit.
27807 Associate comp_unit_die to UNIT. */
27808
27809 static void
27810 dwarf2out_register_main_translation_unit (tree unit)
27811 {
27812 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27813 && main_translation_unit == NULL_TREE);
27814 main_translation_unit = unit;
27815 /* If dwarf2out_init has not been called yet, it will perform the association
27816 itself looking at main_translation_unit. */
27817 if (decl_die_table != NULL)
27818 equate_decl_number_to_die (unit, comp_unit_die ());
27819 }
27820
27821 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27822
27823 static void
27824 push_dw_line_info_entry (dw_line_info_table *table,
27825 enum dw_line_info_opcode opcode, unsigned int val)
27826 {
27827 dw_line_info_entry e;
27828 e.opcode = opcode;
27829 e.val = val;
27830 vec_safe_push (table->entries, e);
27831 }
27832
27833 /* Output a label to mark the beginning of a source code line entry
27834 and record information relating to this source line, in
27835 'line_info_table' for later output of the .debug_line section. */
27836 /* ??? The discriminator parameter ought to be unsigned. */
27837
27838 static void
27839 dwarf2out_source_line (unsigned int line, unsigned int column,
27840 const char *filename,
27841 int discriminator, bool is_stmt)
27842 {
27843 unsigned int file_num;
27844 dw_line_info_table *table;
27845 static var_loc_view lvugid;
27846
27847 if (debug_info_level < DINFO_LEVEL_TERSE)
27848 return;
27849
27850 table = cur_line_info_table;
27851
27852 if (line == 0)
27853 {
27854 if (debug_variable_location_views
27855 && output_asm_line_debug_info ()
27856 && table && !RESETTING_VIEW_P (table->view))
27857 {
27858 /* If we're using the assembler to compute view numbers, we
27859 can't issue a .loc directive for line zero, so we can't
27860 get a view number at this point. We might attempt to
27861 compute it from the previous view, or equate it to a
27862 subsequent view (though it might not be there!), but
27863 since we're omitting the line number entry, we might as
27864 well omit the view number as well. That means pretending
27865 it's a view number zero, which might very well turn out
27866 to be correct. ??? Extend the assembler so that the
27867 compiler could emit e.g. ".locview .LVU#", to output a
27868 view without changing line number information. We'd then
27869 have to count it in symviews_since_reset; when it's omitted,
27870 it doesn't count. */
27871 if (!zero_view_p)
27872 zero_view_p = BITMAP_GGC_ALLOC ();
27873 bitmap_set_bit (zero_view_p, table->view);
27874 if (flag_debug_asm)
27875 {
27876 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27877 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27878 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27879 ASM_COMMENT_START);
27880 assemble_name (asm_out_file, label);
27881 putc ('\n', asm_out_file);
27882 }
27883 table->view = ++lvugid;
27884 }
27885 return;
27886 }
27887
27888 /* The discriminator column was added in dwarf4. Simplify the below
27889 by simply removing it if we're not supposed to output it. */
27890 if (dwarf_version < 4 && dwarf_strict)
27891 discriminator = 0;
27892
27893 if (!debug_column_info)
27894 column = 0;
27895
27896 file_num = maybe_emit_file (lookup_filename (filename));
27897
27898 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27899 the debugger has used the second (possibly duplicate) line number
27900 at the beginning of the function to mark the end of the prologue.
27901 We could eliminate any other duplicates within the function. For
27902 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27903 that second line number entry. */
27904 /* Recall that this end-of-prologue indication is *not* the same thing
27905 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27906 to which the hook corresponds, follows the last insn that was
27907 emitted by gen_prologue. What we need is to precede the first insn
27908 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27909 insn that corresponds to something the user wrote. These may be
27910 very different locations once scheduling is enabled. */
27911
27912 if (0 && file_num == table->file_num
27913 && line == table->line_num
27914 && column == table->column_num
27915 && discriminator == table->discrim_num
27916 && is_stmt == table->is_stmt)
27917 return;
27918
27919 switch_to_section (current_function_section ());
27920
27921 /* If requested, emit something human-readable. */
27922 if (flag_debug_asm)
27923 {
27924 if (debug_column_info)
27925 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27926 filename, line, column);
27927 else
27928 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27929 filename, line);
27930 }
27931
27932 if (output_asm_line_debug_info ())
27933 {
27934 /* Emit the .loc directive understood by GNU as. */
27935 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27936 file_num, line, is_stmt, discriminator */
27937 fputs ("\t.loc ", asm_out_file);
27938 fprint_ul (asm_out_file, file_num);
27939 putc (' ', asm_out_file);
27940 fprint_ul (asm_out_file, line);
27941 putc (' ', asm_out_file);
27942 fprint_ul (asm_out_file, column);
27943
27944 if (is_stmt != table->is_stmt)
27945 {
27946 #if HAVE_GAS_LOC_STMT
27947 fputs (" is_stmt ", asm_out_file);
27948 putc (is_stmt ? '1' : '0', asm_out_file);
27949 #endif
27950 }
27951 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27952 {
27953 gcc_assert (discriminator > 0);
27954 fputs (" discriminator ", asm_out_file);
27955 fprint_ul (asm_out_file, (unsigned long) discriminator);
27956 }
27957 if (debug_variable_location_views)
27958 {
27959 if (!RESETTING_VIEW_P (table->view))
27960 {
27961 table->symviews_since_reset++;
27962 if (table->symviews_since_reset > symview_upper_bound)
27963 symview_upper_bound = table->symviews_since_reset;
27964 /* When we're using the assembler to compute view
27965 numbers, we output symbolic labels after "view" in
27966 .loc directives, and the assembler will set them for
27967 us, so that we can refer to the view numbers in
27968 location lists. The only exceptions are when we know
27969 a view will be zero: "-0" is a forced reset, used
27970 e.g. in the beginning of functions, whereas "0" tells
27971 the assembler to check that there was a PC change
27972 since the previous view, in a way that implicitly
27973 resets the next view. */
27974 fputs (" view ", asm_out_file);
27975 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27976 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27977 assemble_name (asm_out_file, label);
27978 table->view = ++lvugid;
27979 }
27980 else
27981 {
27982 table->symviews_since_reset = 0;
27983 if (FORCE_RESETTING_VIEW_P (table->view))
27984 fputs (" view -0", asm_out_file);
27985 else
27986 fputs (" view 0", asm_out_file);
27987 /* Mark the present view as a zero view. Earlier debug
27988 binds may have already added its id to loclists to be
27989 emitted later, so we can't reuse the id for something
27990 else. However, it's good to know whether a view is
27991 known to be zero, because then we may be able to
27992 optimize out locviews that are all zeros, so take
27993 note of it in zero_view_p. */
27994 if (!zero_view_p)
27995 zero_view_p = BITMAP_GGC_ALLOC ();
27996 bitmap_set_bit (zero_view_p, lvugid);
27997 table->view = ++lvugid;
27998 }
27999 }
28000 putc ('\n', asm_out_file);
28001 }
28002 else
28003 {
28004 unsigned int label_num = ++line_info_label_num;
28005
28006 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
28007
28008 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
28009 push_dw_line_info_entry (table, LI_adv_address, label_num);
28010 else
28011 push_dw_line_info_entry (table, LI_set_address, label_num);
28012 if (debug_variable_location_views)
28013 {
28014 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
28015 if (resetting)
28016 table->view = 0;
28017
28018 if (flag_debug_asm)
28019 fprintf (asm_out_file, "\t%s view %s%d\n",
28020 ASM_COMMENT_START,
28021 resetting ? "-" : "",
28022 table->view);
28023
28024 table->view++;
28025 }
28026 if (file_num != table->file_num)
28027 push_dw_line_info_entry (table, LI_set_file, file_num);
28028 if (discriminator != table->discrim_num)
28029 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
28030 if (is_stmt != table->is_stmt)
28031 push_dw_line_info_entry (table, LI_negate_stmt, 0);
28032 push_dw_line_info_entry (table, LI_set_line, line);
28033 if (debug_column_info)
28034 push_dw_line_info_entry (table, LI_set_column, column);
28035 }
28036
28037 table->file_num = file_num;
28038 table->line_num = line;
28039 table->column_num = column;
28040 table->discrim_num = discriminator;
28041 table->is_stmt = is_stmt;
28042 table->in_use = true;
28043 }
28044
28045 /* Record the beginning of a new source file. */
28046
28047 static void
28048 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
28049 {
28050 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28051 {
28052 macinfo_entry e;
28053 e.code = DW_MACINFO_start_file;
28054 e.lineno = lineno;
28055 e.info = ggc_strdup (filename);
28056 vec_safe_push (macinfo_table, e);
28057 }
28058 }
28059
28060 /* Record the end of a source file. */
28061
28062 static void
28063 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
28064 {
28065 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28066 {
28067 macinfo_entry e;
28068 e.code = DW_MACINFO_end_file;
28069 e.lineno = lineno;
28070 e.info = NULL;
28071 vec_safe_push (macinfo_table, e);
28072 }
28073 }
28074
28075 /* Called from debug_define in toplev.c. The `buffer' parameter contains
28076 the tail part of the directive line, i.e. the part which is past the
28077 initial whitespace, #, whitespace, directive-name, whitespace part. */
28078
28079 static void
28080 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
28081 const char *buffer ATTRIBUTE_UNUSED)
28082 {
28083 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28084 {
28085 macinfo_entry e;
28086 /* Insert a dummy first entry to be able to optimize the whole
28087 predefined macro block using DW_MACRO_import. */
28088 if (macinfo_table->is_empty () && lineno <= 1)
28089 {
28090 e.code = 0;
28091 e.lineno = 0;
28092 e.info = NULL;
28093 vec_safe_push (macinfo_table, e);
28094 }
28095 e.code = DW_MACINFO_define;
28096 e.lineno = lineno;
28097 e.info = ggc_strdup (buffer);
28098 vec_safe_push (macinfo_table, e);
28099 }
28100 }
28101
28102 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
28103 the tail part of the directive line, i.e. the part which is past the
28104 initial whitespace, #, whitespace, directive-name, whitespace part. */
28105
28106 static void
28107 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28108 const char *buffer ATTRIBUTE_UNUSED)
28109 {
28110 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28111 {
28112 macinfo_entry e;
28113 /* Insert a dummy first entry to be able to optimize the whole
28114 predefined macro block using DW_MACRO_import. */
28115 if (macinfo_table->is_empty () && lineno <= 1)
28116 {
28117 e.code = 0;
28118 e.lineno = 0;
28119 e.info = NULL;
28120 vec_safe_push (macinfo_table, e);
28121 }
28122 e.code = DW_MACINFO_undef;
28123 e.lineno = lineno;
28124 e.info = ggc_strdup (buffer);
28125 vec_safe_push (macinfo_table, e);
28126 }
28127 }
28128
28129 /* Helpers to manipulate hash table of CUs. */
28130
28131 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28132 {
28133 static inline hashval_t hash (const macinfo_entry *);
28134 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28135 };
28136
28137 inline hashval_t
28138 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28139 {
28140 return htab_hash_string (entry->info);
28141 }
28142
28143 inline bool
28144 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28145 const macinfo_entry *entry2)
28146 {
28147 return !strcmp (entry1->info, entry2->info);
28148 }
28149
28150 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28151
28152 /* Output a single .debug_macinfo entry. */
28153
28154 static void
28155 output_macinfo_op (macinfo_entry *ref)
28156 {
28157 int file_num;
28158 size_t len;
28159 struct indirect_string_node *node;
28160 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28161 struct dwarf_file_data *fd;
28162
28163 switch (ref->code)
28164 {
28165 case DW_MACINFO_start_file:
28166 fd = lookup_filename (ref->info);
28167 file_num = maybe_emit_file (fd);
28168 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28169 dw2_asm_output_data_uleb128 (ref->lineno,
28170 "Included from line number %lu",
28171 (unsigned long) ref->lineno);
28172 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28173 break;
28174 case DW_MACINFO_end_file:
28175 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28176 break;
28177 case DW_MACINFO_define:
28178 case DW_MACINFO_undef:
28179 len = strlen (ref->info) + 1;
28180 if (!dwarf_strict
28181 && len > DWARF_OFFSET_SIZE
28182 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28183 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28184 {
28185 ref->code = ref->code == DW_MACINFO_define
28186 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28187 output_macinfo_op (ref);
28188 return;
28189 }
28190 dw2_asm_output_data (1, ref->code,
28191 ref->code == DW_MACINFO_define
28192 ? "Define macro" : "Undefine macro");
28193 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28194 (unsigned long) ref->lineno);
28195 dw2_asm_output_nstring (ref->info, -1, "The macro");
28196 break;
28197 case DW_MACRO_define_strp:
28198 case DW_MACRO_undef_strp:
28199 /* NB: dwarf2out_finish performs:
28200 1. save_macinfo_strings
28201 2. hash table traverse of index_string
28202 3. output_macinfo -> output_macinfo_op
28203 4. output_indirect_strings
28204 -> hash table traverse of output_index_string
28205
28206 When output_macinfo_op is called, all index strings have been
28207 added to hash table by save_macinfo_strings and we can't pass
28208 INSERT to find_slot_with_hash which may expand hash table, even
28209 if no insertion is needed, and change hash table traverse order
28210 between index_string and output_index_string. */
28211 node = find_AT_string (ref->info, NO_INSERT);
28212 gcc_assert (node
28213 && (node->form == DW_FORM_strp
28214 || node->form == dwarf_FORM (DW_FORM_strx)));
28215 dw2_asm_output_data (1, ref->code,
28216 ref->code == DW_MACRO_define_strp
28217 ? "Define macro strp"
28218 : "Undefine macro strp");
28219 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28220 (unsigned long) ref->lineno);
28221 if (node->form == DW_FORM_strp)
28222 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28223 debug_str_section, "The macro: \"%s\"",
28224 ref->info);
28225 else
28226 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28227 ref->info);
28228 break;
28229 case DW_MACRO_import:
28230 dw2_asm_output_data (1, ref->code, "Import");
28231 ASM_GENERATE_INTERNAL_LABEL (label,
28232 DEBUG_MACRO_SECTION_LABEL,
28233 ref->lineno + macinfo_label_base);
28234 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28235 break;
28236 default:
28237 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28238 ASM_COMMENT_START, (unsigned long) ref->code);
28239 break;
28240 }
28241 }
28242
28243 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28244 other compilation unit .debug_macinfo sections. IDX is the first
28245 index of a define/undef, return the number of ops that should be
28246 emitted in a comdat .debug_macinfo section and emit
28247 a DW_MACRO_import entry referencing it.
28248 If the define/undef entry should be emitted normally, return 0. */
28249
28250 static unsigned
28251 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28252 macinfo_hash_type **macinfo_htab)
28253 {
28254 macinfo_entry *first, *second, *cur, *inc;
28255 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28256 unsigned char checksum[16];
28257 struct md5_ctx ctx;
28258 char *grp_name, *tail;
28259 const char *base;
28260 unsigned int i, count, encoded_filename_len, linebuf_len;
28261 macinfo_entry **slot;
28262
28263 first = &(*macinfo_table)[idx];
28264 second = &(*macinfo_table)[idx + 1];
28265
28266 /* Optimize only if there are at least two consecutive define/undef ops,
28267 and either all of them are before first DW_MACINFO_start_file
28268 with lineno {0,1} (i.e. predefined macro block), or all of them are
28269 in some included header file. */
28270 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28271 return 0;
28272 if (vec_safe_is_empty (files))
28273 {
28274 if (first->lineno > 1 || second->lineno > 1)
28275 return 0;
28276 }
28277 else if (first->lineno == 0)
28278 return 0;
28279
28280 /* Find the last define/undef entry that can be grouped together
28281 with first and at the same time compute md5 checksum of their
28282 codes, linenumbers and strings. */
28283 md5_init_ctx (&ctx);
28284 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28285 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28286 break;
28287 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28288 break;
28289 else
28290 {
28291 unsigned char code = cur->code;
28292 md5_process_bytes (&code, 1, &ctx);
28293 checksum_uleb128 (cur->lineno, &ctx);
28294 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28295 }
28296 md5_finish_ctx (&ctx, checksum);
28297 count = i - idx;
28298
28299 /* From the containing include filename (if any) pick up just
28300 usable characters from its basename. */
28301 if (vec_safe_is_empty (files))
28302 base = "";
28303 else
28304 base = lbasename (files->last ().info);
28305 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28306 if (ISIDNUM (base[i]) || base[i] == '.')
28307 encoded_filename_len++;
28308 /* Count . at the end. */
28309 if (encoded_filename_len)
28310 encoded_filename_len++;
28311
28312 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28313 linebuf_len = strlen (linebuf);
28314
28315 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28316 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28317 + 16 * 2 + 1);
28318 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28319 tail = grp_name + 4;
28320 if (encoded_filename_len)
28321 {
28322 for (i = 0; base[i]; i++)
28323 if (ISIDNUM (base[i]) || base[i] == '.')
28324 *tail++ = base[i];
28325 *tail++ = '.';
28326 }
28327 memcpy (tail, linebuf, linebuf_len);
28328 tail += linebuf_len;
28329 *tail++ = '.';
28330 for (i = 0; i < 16; i++)
28331 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28332
28333 /* Construct a macinfo_entry for DW_MACRO_import
28334 in the empty vector entry before the first define/undef. */
28335 inc = &(*macinfo_table)[idx - 1];
28336 inc->code = DW_MACRO_import;
28337 inc->lineno = 0;
28338 inc->info = ggc_strdup (grp_name);
28339 if (!*macinfo_htab)
28340 *macinfo_htab = new macinfo_hash_type (10);
28341 /* Avoid emitting duplicates. */
28342 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28343 if (*slot != NULL)
28344 {
28345 inc->code = 0;
28346 inc->info = NULL;
28347 /* If such an entry has been used before, just emit
28348 a DW_MACRO_import op. */
28349 inc = *slot;
28350 output_macinfo_op (inc);
28351 /* And clear all macinfo_entry in the range to avoid emitting them
28352 in the second pass. */
28353 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28354 {
28355 cur->code = 0;
28356 cur->info = NULL;
28357 }
28358 }
28359 else
28360 {
28361 *slot = inc;
28362 inc->lineno = (*macinfo_htab)->elements ();
28363 output_macinfo_op (inc);
28364 }
28365 return count;
28366 }
28367
28368 /* Save any strings needed by the macinfo table in the debug str
28369 table. All strings must be collected into the table by the time
28370 index_string is called. */
28371
28372 static void
28373 save_macinfo_strings (void)
28374 {
28375 unsigned len;
28376 unsigned i;
28377 macinfo_entry *ref;
28378
28379 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28380 {
28381 switch (ref->code)
28382 {
28383 /* Match the logic in output_macinfo_op to decide on
28384 indirect strings. */
28385 case DW_MACINFO_define:
28386 case DW_MACINFO_undef:
28387 len = strlen (ref->info) + 1;
28388 if (!dwarf_strict
28389 && len > DWARF_OFFSET_SIZE
28390 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28391 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28392 set_indirect_string (find_AT_string (ref->info));
28393 break;
28394 case DW_MACINFO_start_file:
28395 /* -gsplit-dwarf -g3 will also output filename as indirect
28396 string. */
28397 if (!dwarf_split_debug_info)
28398 break;
28399 /* Fall through. */
28400 case DW_MACRO_define_strp:
28401 case DW_MACRO_undef_strp:
28402 set_indirect_string (find_AT_string (ref->info));
28403 break;
28404 default:
28405 break;
28406 }
28407 }
28408 }
28409
28410 /* Output macinfo section(s). */
28411
28412 static void
28413 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28414 {
28415 unsigned i;
28416 unsigned long length = vec_safe_length (macinfo_table);
28417 macinfo_entry *ref;
28418 vec<macinfo_entry, va_gc> *files = NULL;
28419 macinfo_hash_type *macinfo_htab = NULL;
28420 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28421
28422 if (! length)
28423 return;
28424
28425 /* output_macinfo* uses these interchangeably. */
28426 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28427 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28428 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28429 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28430
28431 /* AIX Assembler inserts the length, so adjust the reference to match the
28432 offset expected by debuggers. */
28433 strcpy (dl_section_ref, debug_line_label);
28434 if (XCOFF_DEBUGGING_INFO)
28435 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28436
28437 /* For .debug_macro emit the section header. */
28438 if (!dwarf_strict || dwarf_version >= 5)
28439 {
28440 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28441 "DWARF macro version number");
28442 if (DWARF_OFFSET_SIZE == 8)
28443 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28444 else
28445 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28446 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28447 debug_line_section, NULL);
28448 }
28449
28450 /* In the first loop, it emits the primary .debug_macinfo section
28451 and after each emitted op the macinfo_entry is cleared.
28452 If a longer range of define/undef ops can be optimized using
28453 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28454 the vector before the first define/undef in the range and the
28455 whole range of define/undef ops is not emitted and kept. */
28456 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28457 {
28458 switch (ref->code)
28459 {
28460 case DW_MACINFO_start_file:
28461 vec_safe_push (files, *ref);
28462 break;
28463 case DW_MACINFO_end_file:
28464 if (!vec_safe_is_empty (files))
28465 files->pop ();
28466 break;
28467 case DW_MACINFO_define:
28468 case DW_MACINFO_undef:
28469 if ((!dwarf_strict || dwarf_version >= 5)
28470 && HAVE_COMDAT_GROUP
28471 && vec_safe_length (files) != 1
28472 && i > 0
28473 && i + 1 < length
28474 && (*macinfo_table)[i - 1].code == 0)
28475 {
28476 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28477 if (count)
28478 {
28479 i += count - 1;
28480 continue;
28481 }
28482 }
28483 break;
28484 case 0:
28485 /* A dummy entry may be inserted at the beginning to be able
28486 to optimize the whole block of predefined macros. */
28487 if (i == 0)
28488 continue;
28489 default:
28490 break;
28491 }
28492 output_macinfo_op (ref);
28493 ref->info = NULL;
28494 ref->code = 0;
28495 }
28496
28497 if (!macinfo_htab)
28498 return;
28499
28500 /* Save the number of transparent includes so we can adjust the
28501 label number for the fat LTO object DWARF. */
28502 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28503
28504 delete macinfo_htab;
28505 macinfo_htab = NULL;
28506
28507 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28508 terminate the current chain and switch to a new comdat .debug_macinfo
28509 section and emit the define/undef entries within it. */
28510 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28511 switch (ref->code)
28512 {
28513 case 0:
28514 continue;
28515 case DW_MACRO_import:
28516 {
28517 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28518 tree comdat_key = get_identifier (ref->info);
28519 /* Terminate the previous .debug_macinfo section. */
28520 dw2_asm_output_data (1, 0, "End compilation unit");
28521 targetm.asm_out.named_section (debug_macinfo_section_name,
28522 SECTION_DEBUG
28523 | SECTION_LINKONCE
28524 | (early_lto_debug
28525 ? SECTION_EXCLUDE : 0),
28526 comdat_key);
28527 ASM_GENERATE_INTERNAL_LABEL (label,
28528 DEBUG_MACRO_SECTION_LABEL,
28529 ref->lineno + macinfo_label_base);
28530 ASM_OUTPUT_LABEL (asm_out_file, label);
28531 ref->code = 0;
28532 ref->info = NULL;
28533 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28534 "DWARF macro version number");
28535 if (DWARF_OFFSET_SIZE == 8)
28536 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28537 else
28538 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28539 }
28540 break;
28541 case DW_MACINFO_define:
28542 case DW_MACINFO_undef:
28543 output_macinfo_op (ref);
28544 ref->code = 0;
28545 ref->info = NULL;
28546 break;
28547 default:
28548 gcc_unreachable ();
28549 }
28550
28551 macinfo_label_base += macinfo_label_base_adj;
28552 }
28553
28554 /* Initialize the various sections and labels for dwarf output and prefix
28555 them with PREFIX if non-NULL. Returns the generation (zero based
28556 number of times function was called). */
28557
28558 static unsigned
28559 init_sections_and_labels (bool early_lto_debug)
28560 {
28561 /* As we may get called multiple times have a generation count for
28562 labels. */
28563 static unsigned generation = 0;
28564
28565 if (early_lto_debug)
28566 {
28567 if (!dwarf_split_debug_info)
28568 {
28569 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28570 SECTION_DEBUG | SECTION_EXCLUDE,
28571 NULL);
28572 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28573 SECTION_DEBUG | SECTION_EXCLUDE,
28574 NULL);
28575 debug_macinfo_section_name
28576 = ((dwarf_strict && dwarf_version < 5)
28577 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28578 debug_macinfo_section = get_section (debug_macinfo_section_name,
28579 SECTION_DEBUG
28580 | SECTION_EXCLUDE, NULL);
28581 }
28582 else
28583 {
28584 /* ??? Which of the following do we need early? */
28585 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28586 SECTION_DEBUG | SECTION_EXCLUDE,
28587 NULL);
28588 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28589 SECTION_DEBUG | SECTION_EXCLUDE,
28590 NULL);
28591 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28592 SECTION_DEBUG
28593 | SECTION_EXCLUDE, NULL);
28594 debug_skeleton_abbrev_section
28595 = get_section (DEBUG_LTO_ABBREV_SECTION,
28596 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28597 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28598 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28599 generation);
28600
28601 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28602 stay in the main .o, but the skeleton_line goes into the split
28603 off dwo. */
28604 debug_skeleton_line_section
28605 = get_section (DEBUG_LTO_LINE_SECTION,
28606 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28607 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28608 DEBUG_SKELETON_LINE_SECTION_LABEL,
28609 generation);
28610 debug_str_offsets_section
28611 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28612 SECTION_DEBUG | SECTION_EXCLUDE,
28613 NULL);
28614 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28615 DEBUG_SKELETON_INFO_SECTION_LABEL,
28616 generation);
28617 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28618 DEBUG_STR_DWO_SECTION_FLAGS,
28619 NULL);
28620 debug_macinfo_section_name
28621 = ((dwarf_strict && dwarf_version < 5)
28622 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28623 debug_macinfo_section = get_section (debug_macinfo_section_name,
28624 SECTION_DEBUG | SECTION_EXCLUDE,
28625 NULL);
28626 }
28627 /* For macro info and the file table we have to refer to a
28628 debug_line section. */
28629 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28630 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28631 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28632 DEBUG_LINE_SECTION_LABEL, generation);
28633
28634 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28635 DEBUG_STR_SECTION_FLAGS
28636 | SECTION_EXCLUDE, NULL);
28637 if (!dwarf_split_debug_info)
28638 debug_line_str_section
28639 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28640 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28641 }
28642 else
28643 {
28644 if (!dwarf_split_debug_info)
28645 {
28646 debug_info_section = get_section (DEBUG_INFO_SECTION,
28647 SECTION_DEBUG, NULL);
28648 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28649 SECTION_DEBUG, NULL);
28650 debug_loc_section = get_section (dwarf_version >= 5
28651 ? DEBUG_LOCLISTS_SECTION
28652 : DEBUG_LOC_SECTION,
28653 SECTION_DEBUG, NULL);
28654 debug_macinfo_section_name
28655 = ((dwarf_strict && dwarf_version < 5)
28656 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28657 debug_macinfo_section = get_section (debug_macinfo_section_name,
28658 SECTION_DEBUG, NULL);
28659 }
28660 else
28661 {
28662 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28663 SECTION_DEBUG | SECTION_EXCLUDE,
28664 NULL);
28665 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28666 SECTION_DEBUG | SECTION_EXCLUDE,
28667 NULL);
28668 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28669 SECTION_DEBUG, NULL);
28670 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28671 SECTION_DEBUG, NULL);
28672 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28673 SECTION_DEBUG, NULL);
28674 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28675 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28676 generation);
28677
28678 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28679 stay in the main .o, but the skeleton_line goes into the
28680 split off dwo. */
28681 debug_skeleton_line_section
28682 = get_section (DEBUG_DWO_LINE_SECTION,
28683 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28684 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28685 DEBUG_SKELETON_LINE_SECTION_LABEL,
28686 generation);
28687 debug_str_offsets_section
28688 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28689 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28690 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28691 DEBUG_SKELETON_INFO_SECTION_LABEL,
28692 generation);
28693 debug_loc_section = get_section (dwarf_version >= 5
28694 ? DEBUG_DWO_LOCLISTS_SECTION
28695 : DEBUG_DWO_LOC_SECTION,
28696 SECTION_DEBUG | SECTION_EXCLUDE,
28697 NULL);
28698 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28699 DEBUG_STR_DWO_SECTION_FLAGS,
28700 NULL);
28701 debug_macinfo_section_name
28702 = ((dwarf_strict && dwarf_version < 5)
28703 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28704 debug_macinfo_section = get_section (debug_macinfo_section_name,
28705 SECTION_DEBUG | SECTION_EXCLUDE,
28706 NULL);
28707 }
28708 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28709 SECTION_DEBUG, NULL);
28710 debug_line_section = get_section (DEBUG_LINE_SECTION,
28711 SECTION_DEBUG, NULL);
28712 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28713 SECTION_DEBUG, NULL);
28714 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28715 SECTION_DEBUG, NULL);
28716 debug_str_section = get_section (DEBUG_STR_SECTION,
28717 DEBUG_STR_SECTION_FLAGS, NULL);
28718 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28719 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28720 DEBUG_STR_SECTION_FLAGS, NULL);
28721
28722 debug_ranges_section = get_section (dwarf_version >= 5
28723 ? DEBUG_RNGLISTS_SECTION
28724 : DEBUG_RANGES_SECTION,
28725 SECTION_DEBUG, NULL);
28726 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28727 SECTION_DEBUG, NULL);
28728 }
28729
28730 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28731 DEBUG_ABBREV_SECTION_LABEL, generation);
28732 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28733 DEBUG_INFO_SECTION_LABEL, generation);
28734 info_section_emitted = false;
28735 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28736 DEBUG_LINE_SECTION_LABEL, generation);
28737 /* There are up to 4 unique ranges labels per generation.
28738 See also output_rnglists. */
28739 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28740 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28741 if (dwarf_version >= 5 && dwarf_split_debug_info)
28742 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28743 DEBUG_RANGES_SECTION_LABEL,
28744 1 + generation * 4);
28745 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28746 DEBUG_ADDR_SECTION_LABEL, generation);
28747 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28748 (dwarf_strict && dwarf_version < 5)
28749 ? DEBUG_MACINFO_SECTION_LABEL
28750 : DEBUG_MACRO_SECTION_LABEL, generation);
28751 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28752 generation);
28753
28754 ++generation;
28755 return generation - 1;
28756 }
28757
28758 /* Set up for Dwarf output at the start of compilation. */
28759
28760 static void
28761 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28762 {
28763 /* Allocate the file_table. */
28764 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28765
28766 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28767 /* Allocate the decl_die_table. */
28768 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28769
28770 /* Allocate the decl_loc_table. */
28771 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28772
28773 /* Allocate the cached_dw_loc_list_table. */
28774 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28775
28776 /* Allocate the initial hunk of the abbrev_die_table. */
28777 vec_alloc (abbrev_die_table, 256);
28778 /* Zero-th entry is allocated, but unused. */
28779 abbrev_die_table->quick_push (NULL);
28780
28781 /* Allocate the dwarf_proc_stack_usage_map. */
28782 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28783
28784 /* Allocate the pubtypes and pubnames vectors. */
28785 vec_alloc (pubname_table, 32);
28786 vec_alloc (pubtype_table, 32);
28787
28788 vec_alloc (incomplete_types, 64);
28789
28790 vec_alloc (used_rtx_array, 32);
28791
28792 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28793 vec_alloc (macinfo_table, 64);
28794 #endif
28795
28796 /* If front-ends already registered a main translation unit but we were not
28797 ready to perform the association, do this now. */
28798 if (main_translation_unit != NULL_TREE)
28799 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28800 }
28801
28802 /* Called before compile () starts outputtting functions, variables
28803 and toplevel asms into assembly. */
28804
28805 static void
28806 dwarf2out_assembly_start (void)
28807 {
28808 if (text_section_line_info)
28809 return;
28810
28811 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28812 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28813 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28814 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28815 COLD_TEXT_SECTION_LABEL, 0);
28816 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28817
28818 switch_to_section (text_section);
28819 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28820 #endif
28821
28822 /* Make sure the line number table for .text always exists. */
28823 text_section_line_info = new_line_info_table ();
28824 text_section_line_info->end_label = text_end_label;
28825
28826 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28827 cur_line_info_table = text_section_line_info;
28828 #endif
28829
28830 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28831 && dwarf2out_do_cfi_asm ()
28832 && !dwarf2out_do_eh_frame ())
28833 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28834 }
28835
28836 /* A helper function for dwarf2out_finish called through
28837 htab_traverse. Assign a string its index. All strings must be
28838 collected into the table by the time index_string is called,
28839 because the indexing code relies on htab_traverse to traverse nodes
28840 in the same order for each run. */
28841
28842 int
28843 index_string (indirect_string_node **h, unsigned int *index)
28844 {
28845 indirect_string_node *node = *h;
28846
28847 find_string_form (node);
28848 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28849 {
28850 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28851 node->index = *index;
28852 *index += 1;
28853 }
28854 return 1;
28855 }
28856
28857 /* A helper function for output_indirect_strings called through
28858 htab_traverse. Output the offset to a string and update the
28859 current offset. */
28860
28861 int
28862 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28863 {
28864 indirect_string_node *node = *h;
28865
28866 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28867 {
28868 /* Assert that this node has been assigned an index. */
28869 gcc_assert (node->index != NO_INDEX_ASSIGNED
28870 && node->index != NOT_INDEXED);
28871 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28872 "indexed string 0x%x: %s", node->index, node->str);
28873 *offset += strlen (node->str) + 1;
28874 }
28875 return 1;
28876 }
28877
28878 /* A helper function for dwarf2out_finish called through
28879 htab_traverse. Output the indexed string. */
28880
28881 int
28882 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28883 {
28884 struct indirect_string_node *node = *h;
28885
28886 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28887 {
28888 /* Assert that the strings are output in the same order as their
28889 indexes were assigned. */
28890 gcc_assert (*cur_idx == node->index);
28891 assemble_string (node->str, strlen (node->str) + 1);
28892 *cur_idx += 1;
28893 }
28894 return 1;
28895 }
28896
28897 /* A helper function for output_indirect_strings. Counts the number
28898 of index strings offsets. Must match the logic of the functions
28899 output_index_string[_offsets] above. */
28900 int
28901 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28902 {
28903 struct indirect_string_node *node = *h;
28904
28905 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28906 *last_idx += 1;
28907 return 1;
28908 }
28909
28910 /* A helper function for dwarf2out_finish called through
28911 htab_traverse. Emit one queued .debug_str string. */
28912
28913 int
28914 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28915 {
28916 struct indirect_string_node *node = *h;
28917
28918 node->form = find_string_form (node);
28919 if (node->form == form && node->refcount > 0)
28920 {
28921 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28922 assemble_string (node->str, strlen (node->str) + 1);
28923 }
28924
28925 return 1;
28926 }
28927
28928 /* Output the indexed string table. */
28929
28930 static void
28931 output_indirect_strings (void)
28932 {
28933 switch_to_section (debug_str_section);
28934 if (!dwarf_split_debug_info)
28935 debug_str_hash->traverse<enum dwarf_form,
28936 output_indirect_string> (DW_FORM_strp);
28937 else
28938 {
28939 unsigned int offset = 0;
28940 unsigned int cur_idx = 0;
28941
28942 if (skeleton_debug_str_hash)
28943 skeleton_debug_str_hash->traverse<enum dwarf_form,
28944 output_indirect_string> (DW_FORM_strp);
28945
28946 switch_to_section (debug_str_offsets_section);
28947 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
28948 header. Note that we don't need to generate a label to the
28949 actual index table following the header here, because this is
28950 for the split dwarf case only. In an .dwo file there is only
28951 one string offsets table (and one debug info section). But
28952 if we would start using string offset tables for the main (or
28953 skeleton) unit, then we have to add a DW_AT_str_offsets_base
28954 pointing to the actual index after the header. Split dwarf
28955 units will never have a string offsets base attribute. When
28956 a split unit is moved into a .dwp file the string offsets can
28957 be found through the .debug_cu_index section table. */
28958 if (dwarf_version >= 5)
28959 {
28960 unsigned int last_idx = 0;
28961 unsigned long str_offsets_length;
28962
28963 debug_str_hash->traverse_noresize
28964 <unsigned int *, count_index_strings> (&last_idx);
28965 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
28966 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
28967 dw2_asm_output_data (4, 0xffffffff,
28968 "Escape value for 64-bit DWARF extension");
28969 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
28970 "Length of string offsets unit");
28971 dw2_asm_output_data (2, 5, "DWARF string offsets version");
28972 dw2_asm_output_data (2, 0, "Header zero padding");
28973 }
28974 debug_str_hash->traverse_noresize
28975 <unsigned int *, output_index_string_offset> (&offset);
28976 switch_to_section (debug_str_dwo_section);
28977 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28978 (&cur_idx);
28979 }
28980 }
28981
28982 /* Callback for htab_traverse to assign an index to an entry in the
28983 table, and to write that entry to the .debug_addr section. */
28984
28985 int
28986 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28987 {
28988 addr_table_entry *entry = *slot;
28989
28990 if (entry->refcount == 0)
28991 {
28992 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28993 || entry->index == NOT_INDEXED);
28994 return 1;
28995 }
28996
28997 gcc_assert (entry->index == *cur_index);
28998 (*cur_index)++;
28999
29000 switch (entry->kind)
29001 {
29002 case ate_kind_rtx:
29003 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
29004 "0x%x", entry->index);
29005 break;
29006 case ate_kind_rtx_dtprel:
29007 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
29008 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
29009 DWARF2_ADDR_SIZE,
29010 entry->addr.rtl);
29011 fputc ('\n', asm_out_file);
29012 break;
29013 case ate_kind_label:
29014 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
29015 "0x%x", entry->index);
29016 break;
29017 default:
29018 gcc_unreachable ();
29019 }
29020 return 1;
29021 }
29022
29023 /* A helper function for dwarf2out_finish. Counts the number
29024 of indexed addresses. Must match the logic of the functions
29025 output_addr_table_entry above. */
29026 int
29027 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
29028 {
29029 addr_table_entry *entry = *slot;
29030
29031 if (entry->refcount > 0)
29032 *last_idx += 1;
29033 return 1;
29034 }
29035
29036 /* Produce the .debug_addr section. */
29037
29038 static void
29039 output_addr_table (void)
29040 {
29041 unsigned int index = 0;
29042 if (addr_index_table == NULL || addr_index_table->size () == 0)
29043 return;
29044
29045 switch_to_section (debug_addr_section);
29046 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
29047 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
29048 before DWARF5, didn't have a header for .debug_addr units.
29049 DWARF5 specifies a small header when address tables are used. */
29050 if (dwarf_version >= 5)
29051 {
29052 unsigned int last_idx = 0;
29053 unsigned long addrs_length;
29054
29055 addr_index_table->traverse_noresize
29056 <unsigned int *, count_index_addrs> (&last_idx);
29057 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
29058
29059 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
29060 dw2_asm_output_data (4, 0xffffffff,
29061 "Escape value for 64-bit DWARF extension");
29062 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
29063 "Length of Address Unit");
29064 dw2_asm_output_data (2, 5, "DWARF addr version");
29065 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
29066 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
29067 }
29068 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
29069
29070 addr_index_table
29071 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
29072 }
29073
29074 #if ENABLE_ASSERT_CHECKING
29075 /* Verify that all marks are clear. */
29076
29077 static void
29078 verify_marks_clear (dw_die_ref die)
29079 {
29080 dw_die_ref c;
29081
29082 gcc_assert (! die->die_mark);
29083 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
29084 }
29085 #endif /* ENABLE_ASSERT_CHECKING */
29086
29087 /* Clear the marks for a die and its children.
29088 Be cool if the mark isn't set. */
29089
29090 static void
29091 prune_unmark_dies (dw_die_ref die)
29092 {
29093 dw_die_ref c;
29094
29095 if (die->die_mark)
29096 die->die_mark = 0;
29097 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
29098 }
29099
29100 /* Given LOC that is referenced by a DIE we're marking as used, find all
29101 referenced DWARF procedures it references and mark them as used. */
29102
29103 static void
29104 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
29105 {
29106 for (; loc != NULL; loc = loc->dw_loc_next)
29107 switch (loc->dw_loc_opc)
29108 {
29109 case DW_OP_implicit_pointer:
29110 case DW_OP_convert:
29111 case DW_OP_reinterpret:
29112 case DW_OP_GNU_implicit_pointer:
29113 case DW_OP_GNU_convert:
29114 case DW_OP_GNU_reinterpret:
29115 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
29116 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29117 break;
29118 case DW_OP_GNU_variable_value:
29119 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29120 {
29121 dw_die_ref ref
29122 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29123 if (ref == NULL)
29124 break;
29125 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29126 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29127 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29128 }
29129 /* FALLTHRU */
29130 case DW_OP_call2:
29131 case DW_OP_call4:
29132 case DW_OP_call_ref:
29133 case DW_OP_const_type:
29134 case DW_OP_GNU_const_type:
29135 case DW_OP_GNU_parameter_ref:
29136 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
29137 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29138 break;
29139 case DW_OP_regval_type:
29140 case DW_OP_deref_type:
29141 case DW_OP_GNU_regval_type:
29142 case DW_OP_GNU_deref_type:
29143 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
29144 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
29145 break;
29146 case DW_OP_entry_value:
29147 case DW_OP_GNU_entry_value:
29148 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29149 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29150 break;
29151 default:
29152 break;
29153 }
29154 }
29155
29156 /* Given DIE that we're marking as used, find any other dies
29157 it references as attributes and mark them as used. */
29158
29159 static void
29160 prune_unused_types_walk_attribs (dw_die_ref die)
29161 {
29162 dw_attr_node *a;
29163 unsigned ix;
29164
29165 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29166 {
29167 switch (AT_class (a))
29168 {
29169 /* Make sure DWARF procedures referenced by location descriptions will
29170 get emitted. */
29171 case dw_val_class_loc:
29172 prune_unused_types_walk_loc_descr (AT_loc (a));
29173 break;
29174 case dw_val_class_loc_list:
29175 for (dw_loc_list_ref list = AT_loc_list (a);
29176 list != NULL;
29177 list = list->dw_loc_next)
29178 prune_unused_types_walk_loc_descr (list->expr);
29179 break;
29180
29181 case dw_val_class_view_list:
29182 /* This points to a loc_list in another attribute, so it's
29183 already covered. */
29184 break;
29185
29186 case dw_val_class_die_ref:
29187 /* A reference to another DIE.
29188 Make sure that it will get emitted.
29189 If it was broken out into a comdat group, don't follow it. */
29190 if (! AT_ref (a)->comdat_type_p
29191 || a->dw_attr == DW_AT_specification)
29192 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29193 break;
29194
29195 case dw_val_class_str:
29196 /* Set the string's refcount to 0 so that prune_unused_types_mark
29197 accounts properly for it. */
29198 a->dw_attr_val.v.val_str->refcount = 0;
29199 break;
29200
29201 default:
29202 break;
29203 }
29204 }
29205 }
29206
29207 /* Mark the generic parameters and arguments children DIEs of DIE. */
29208
29209 static void
29210 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29211 {
29212 dw_die_ref c;
29213
29214 if (die == NULL || die->die_child == NULL)
29215 return;
29216 c = die->die_child;
29217 do
29218 {
29219 if (is_template_parameter (c))
29220 prune_unused_types_mark (c, 1);
29221 c = c->die_sib;
29222 } while (c && c != die->die_child);
29223 }
29224
29225 /* Mark DIE as being used. If DOKIDS is true, then walk down
29226 to DIE's children. */
29227
29228 static void
29229 prune_unused_types_mark (dw_die_ref die, int dokids)
29230 {
29231 dw_die_ref c;
29232
29233 if (die->die_mark == 0)
29234 {
29235 /* We haven't done this node yet. Mark it as used. */
29236 die->die_mark = 1;
29237 /* If this is the DIE of a generic type instantiation,
29238 mark the children DIEs that describe its generic parms and
29239 args. */
29240 prune_unused_types_mark_generic_parms_dies (die);
29241
29242 /* We also have to mark its parents as used.
29243 (But we don't want to mark our parent's kids due to this,
29244 unless it is a class.) */
29245 if (die->die_parent)
29246 prune_unused_types_mark (die->die_parent,
29247 class_scope_p (die->die_parent));
29248
29249 /* Mark any referenced nodes. */
29250 prune_unused_types_walk_attribs (die);
29251
29252 /* If this node is a specification,
29253 also mark the definition, if it exists. */
29254 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29255 prune_unused_types_mark (die->die_definition, 1);
29256 }
29257
29258 if (dokids && die->die_mark != 2)
29259 {
29260 /* We need to walk the children, but haven't done so yet.
29261 Remember that we've walked the kids. */
29262 die->die_mark = 2;
29263
29264 /* If this is an array type, we need to make sure our
29265 kids get marked, even if they're types. If we're
29266 breaking out types into comdat sections, do this
29267 for all type definitions. */
29268 if (die->die_tag == DW_TAG_array_type
29269 || (use_debug_types
29270 && is_type_die (die) && ! is_declaration_die (die)))
29271 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29272 else
29273 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29274 }
29275 }
29276
29277 /* For local classes, look if any static member functions were emitted
29278 and if so, mark them. */
29279
29280 static void
29281 prune_unused_types_walk_local_classes (dw_die_ref die)
29282 {
29283 dw_die_ref c;
29284
29285 if (die->die_mark == 2)
29286 return;
29287
29288 switch (die->die_tag)
29289 {
29290 case DW_TAG_structure_type:
29291 case DW_TAG_union_type:
29292 case DW_TAG_class_type:
29293 case DW_TAG_interface_type:
29294 break;
29295
29296 case DW_TAG_subprogram:
29297 if (!get_AT_flag (die, DW_AT_declaration)
29298 || die->die_definition != NULL)
29299 prune_unused_types_mark (die, 1);
29300 return;
29301
29302 default:
29303 return;
29304 }
29305
29306 /* Mark children. */
29307 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29308 }
29309
29310 /* Walk the tree DIE and mark types that we actually use. */
29311
29312 static void
29313 prune_unused_types_walk (dw_die_ref die)
29314 {
29315 dw_die_ref c;
29316
29317 /* Don't do anything if this node is already marked and
29318 children have been marked as well. */
29319 if (die->die_mark == 2)
29320 return;
29321
29322 switch (die->die_tag)
29323 {
29324 case DW_TAG_structure_type:
29325 case DW_TAG_union_type:
29326 case DW_TAG_class_type:
29327 case DW_TAG_interface_type:
29328 if (die->die_perennial_p)
29329 break;
29330
29331 for (c = die->die_parent; c; c = c->die_parent)
29332 if (c->die_tag == DW_TAG_subprogram)
29333 break;
29334
29335 /* Finding used static member functions inside of classes
29336 is needed just for local classes, because for other classes
29337 static member function DIEs with DW_AT_specification
29338 are emitted outside of the DW_TAG_*_type. If we ever change
29339 it, we'd need to call this even for non-local classes. */
29340 if (c)
29341 prune_unused_types_walk_local_classes (die);
29342
29343 /* It's a type node --- don't mark it. */
29344 return;
29345
29346 case DW_TAG_const_type:
29347 case DW_TAG_packed_type:
29348 case DW_TAG_pointer_type:
29349 case DW_TAG_reference_type:
29350 case DW_TAG_rvalue_reference_type:
29351 case DW_TAG_volatile_type:
29352 case DW_TAG_typedef:
29353 case DW_TAG_array_type:
29354 case DW_TAG_friend:
29355 case DW_TAG_enumeration_type:
29356 case DW_TAG_subroutine_type:
29357 case DW_TAG_string_type:
29358 case DW_TAG_set_type:
29359 case DW_TAG_subrange_type:
29360 case DW_TAG_ptr_to_member_type:
29361 case DW_TAG_file_type:
29362 /* Type nodes are useful only when other DIEs reference them --- don't
29363 mark them. */
29364 /* FALLTHROUGH */
29365
29366 case DW_TAG_dwarf_procedure:
29367 /* Likewise for DWARF procedures. */
29368
29369 if (die->die_perennial_p)
29370 break;
29371
29372 return;
29373
29374 case DW_TAG_variable:
29375 if (flag_debug_only_used_symbols)
29376 {
29377 if (die->die_perennial_p)
29378 break;
29379
29380 /* premark_used_variables marks external variables --- don't mark
29381 them here. But function-local externals are always considered
29382 used. */
29383 if (get_AT (die, DW_AT_external))
29384 {
29385 for (c = die->die_parent; c; c = c->die_parent)
29386 if (c->die_tag == DW_TAG_subprogram)
29387 break;
29388 if (!c)
29389 return;
29390 }
29391 }
29392 /* FALLTHROUGH */
29393
29394 default:
29395 /* Mark everything else. */
29396 break;
29397 }
29398
29399 if (die->die_mark == 0)
29400 {
29401 die->die_mark = 1;
29402
29403 /* Now, mark any dies referenced from here. */
29404 prune_unused_types_walk_attribs (die);
29405 }
29406
29407 die->die_mark = 2;
29408
29409 /* Mark children. */
29410 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29411 }
29412
29413 /* Increment the string counts on strings referred to from DIE's
29414 attributes. */
29415
29416 static void
29417 prune_unused_types_update_strings (dw_die_ref die)
29418 {
29419 dw_attr_node *a;
29420 unsigned ix;
29421
29422 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29423 if (AT_class (a) == dw_val_class_str)
29424 {
29425 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29426 s->refcount++;
29427 /* Avoid unnecessarily putting strings that are used less than
29428 twice in the hash table. */
29429 if (s->refcount
29430 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29431 {
29432 indirect_string_node **slot
29433 = debug_str_hash->find_slot_with_hash (s->str,
29434 htab_hash_string (s->str),
29435 INSERT);
29436 gcc_assert (*slot == NULL);
29437 *slot = s;
29438 }
29439 }
29440 }
29441
29442 /* Mark DIE and its children as removed. */
29443
29444 static void
29445 mark_removed (dw_die_ref die)
29446 {
29447 dw_die_ref c;
29448 die->removed = true;
29449 FOR_EACH_CHILD (die, c, mark_removed (c));
29450 }
29451
29452 /* Remove from the tree DIE any dies that aren't marked. */
29453
29454 static void
29455 prune_unused_types_prune (dw_die_ref die)
29456 {
29457 dw_die_ref c;
29458
29459 gcc_assert (die->die_mark);
29460 prune_unused_types_update_strings (die);
29461
29462 if (! die->die_child)
29463 return;
29464
29465 c = die->die_child;
29466 do {
29467 dw_die_ref prev = c, next;
29468 for (c = c->die_sib; ! c->die_mark; c = next)
29469 if (c == die->die_child)
29470 {
29471 /* No marked children between 'prev' and the end of the list. */
29472 if (prev == c)
29473 /* No marked children at all. */
29474 die->die_child = NULL;
29475 else
29476 {
29477 prev->die_sib = c->die_sib;
29478 die->die_child = prev;
29479 }
29480 c->die_sib = NULL;
29481 mark_removed (c);
29482 return;
29483 }
29484 else
29485 {
29486 next = c->die_sib;
29487 c->die_sib = NULL;
29488 mark_removed (c);
29489 }
29490
29491 if (c != prev->die_sib)
29492 prev->die_sib = c;
29493 prune_unused_types_prune (c);
29494 } while (c != die->die_child);
29495 }
29496
29497 /* Remove dies representing declarations that we never use. */
29498
29499 static void
29500 prune_unused_types (void)
29501 {
29502 unsigned int i;
29503 limbo_die_node *node;
29504 comdat_type_node *ctnode;
29505 pubname_entry *pub;
29506 dw_die_ref base_type;
29507
29508 #if ENABLE_ASSERT_CHECKING
29509 /* All the marks should already be clear. */
29510 verify_marks_clear (comp_unit_die ());
29511 for (node = limbo_die_list; node; node = node->next)
29512 verify_marks_clear (node->die);
29513 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29514 verify_marks_clear (ctnode->root_die);
29515 #endif /* ENABLE_ASSERT_CHECKING */
29516
29517 /* Mark types that are used in global variables. */
29518 premark_types_used_by_global_vars ();
29519
29520 /* Mark variables used in the symtab. */
29521 if (flag_debug_only_used_symbols)
29522 premark_used_variables ();
29523
29524 /* Set the mark on nodes that are actually used. */
29525 prune_unused_types_walk (comp_unit_die ());
29526 for (node = limbo_die_list; node; node = node->next)
29527 prune_unused_types_walk (node->die);
29528 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29529 {
29530 prune_unused_types_walk (ctnode->root_die);
29531 prune_unused_types_mark (ctnode->type_die, 1);
29532 }
29533
29534 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29535 are unusual in that they are pubnames that are the children of pubtypes.
29536 They should only be marked via their parent DW_TAG_enumeration_type die,
29537 not as roots in themselves. */
29538 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29539 if (pub->die->die_tag != DW_TAG_enumerator)
29540 prune_unused_types_mark (pub->die, 1);
29541 for (i = 0; base_types.iterate (i, &base_type); i++)
29542 prune_unused_types_mark (base_type, 1);
29543
29544 /* Also set the mark on nodes that could be referenced by
29545 DW_TAG_call_site DW_AT_call_origin (i.e. direct call callees) or
29546 by DW_TAG_inlined_subroutine origins. */
29547 cgraph_node *cnode;
29548 FOR_EACH_FUNCTION (cnode)
29549 if (cnode->referred_to_p (false))
29550 {
29551 dw_die_ref die = lookup_decl_die (cnode->decl);
29552 if (die == NULL || die->die_mark)
29553 continue;
29554 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29555 if (e->caller != cnode)
29556 {
29557 prune_unused_types_mark (die, 1);
29558 break;
29559 }
29560 }
29561
29562 if (debug_str_hash)
29563 debug_str_hash->empty ();
29564 if (skeleton_debug_str_hash)
29565 skeleton_debug_str_hash->empty ();
29566 prune_unused_types_prune (comp_unit_die ());
29567 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29568 {
29569 node = *pnode;
29570 if (!node->die->die_mark)
29571 *pnode = node->next;
29572 else
29573 {
29574 prune_unused_types_prune (node->die);
29575 pnode = &node->next;
29576 }
29577 }
29578 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29579 prune_unused_types_prune (ctnode->root_die);
29580
29581 /* Leave the marks clear. */
29582 prune_unmark_dies (comp_unit_die ());
29583 for (node = limbo_die_list; node; node = node->next)
29584 prune_unmark_dies (node->die);
29585 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29586 prune_unmark_dies (ctnode->root_die);
29587 }
29588
29589 /* Helpers to manipulate hash table of comdat type units. */
29590
29591 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29592 {
29593 static inline hashval_t hash (const comdat_type_node *);
29594 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29595 };
29596
29597 inline hashval_t
29598 comdat_type_hasher::hash (const comdat_type_node *type_node)
29599 {
29600 hashval_t h;
29601 memcpy (&h, type_node->signature, sizeof (h));
29602 return h;
29603 }
29604
29605 inline bool
29606 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29607 const comdat_type_node *type_node_2)
29608 {
29609 return (! memcmp (type_node_1->signature, type_node_2->signature,
29610 DWARF_TYPE_SIGNATURE_SIZE));
29611 }
29612
29613 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29614 to the location it would have been added, should we know its
29615 DECL_ASSEMBLER_NAME when we added other attributes. This will
29616 probably improve compactness of debug info, removing equivalent
29617 abbrevs, and hide any differences caused by deferring the
29618 computation of the assembler name, triggered by e.g. PCH. */
29619
29620 static inline void
29621 move_linkage_attr (dw_die_ref die)
29622 {
29623 unsigned ix = vec_safe_length (die->die_attr);
29624 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29625
29626 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29627 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29628
29629 while (--ix > 0)
29630 {
29631 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29632
29633 if (prev->dw_attr == DW_AT_decl_line
29634 || prev->dw_attr == DW_AT_decl_column
29635 || prev->dw_attr == DW_AT_name)
29636 break;
29637 }
29638
29639 if (ix != vec_safe_length (die->die_attr) - 1)
29640 {
29641 die->die_attr->pop ();
29642 die->die_attr->quick_insert (ix, linkage);
29643 }
29644 }
29645
29646 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29647 referenced from typed stack ops and count how often they are used. */
29648
29649 static void
29650 mark_base_types (dw_loc_descr_ref loc)
29651 {
29652 dw_die_ref base_type = NULL;
29653
29654 for (; loc; loc = loc->dw_loc_next)
29655 {
29656 switch (loc->dw_loc_opc)
29657 {
29658 case DW_OP_regval_type:
29659 case DW_OP_deref_type:
29660 case DW_OP_GNU_regval_type:
29661 case DW_OP_GNU_deref_type:
29662 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29663 break;
29664 case DW_OP_convert:
29665 case DW_OP_reinterpret:
29666 case DW_OP_GNU_convert:
29667 case DW_OP_GNU_reinterpret:
29668 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29669 continue;
29670 /* FALLTHRU */
29671 case DW_OP_const_type:
29672 case DW_OP_GNU_const_type:
29673 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29674 break;
29675 case DW_OP_entry_value:
29676 case DW_OP_GNU_entry_value:
29677 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29678 continue;
29679 default:
29680 continue;
29681 }
29682 gcc_assert (base_type->die_parent == comp_unit_die ());
29683 if (base_type->die_mark)
29684 base_type->die_mark++;
29685 else
29686 {
29687 base_types.safe_push (base_type);
29688 base_type->die_mark = 1;
29689 }
29690 }
29691 }
29692
29693 /* Comparison function for sorting marked base types. */
29694
29695 static int
29696 base_type_cmp (const void *x, const void *y)
29697 {
29698 dw_die_ref dx = *(const dw_die_ref *) x;
29699 dw_die_ref dy = *(const dw_die_ref *) y;
29700 unsigned int byte_size1, byte_size2;
29701 unsigned int encoding1, encoding2;
29702 unsigned int align1, align2;
29703 if (dx->die_mark > dy->die_mark)
29704 return -1;
29705 if (dx->die_mark < dy->die_mark)
29706 return 1;
29707 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29708 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29709 if (byte_size1 < byte_size2)
29710 return 1;
29711 if (byte_size1 > byte_size2)
29712 return -1;
29713 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29714 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29715 if (encoding1 < encoding2)
29716 return 1;
29717 if (encoding1 > encoding2)
29718 return -1;
29719 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29720 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29721 if (align1 < align2)
29722 return 1;
29723 if (align1 > align2)
29724 return -1;
29725 return 0;
29726 }
29727
29728 /* Move base types marked by mark_base_types as early as possible
29729 in the CU, sorted by decreasing usage count both to make the
29730 uleb128 references as small as possible and to make sure they
29731 will have die_offset already computed by calc_die_sizes when
29732 sizes of typed stack loc ops is computed. */
29733
29734 static void
29735 move_marked_base_types (void)
29736 {
29737 unsigned int i;
29738 dw_die_ref base_type, die, c;
29739
29740 if (base_types.is_empty ())
29741 return;
29742
29743 /* Sort by decreasing usage count, they will be added again in that
29744 order later on. */
29745 base_types.qsort (base_type_cmp);
29746 die = comp_unit_die ();
29747 c = die->die_child;
29748 do
29749 {
29750 dw_die_ref prev = c;
29751 c = c->die_sib;
29752 while (c->die_mark)
29753 {
29754 remove_child_with_prev (c, prev);
29755 /* As base types got marked, there must be at least
29756 one node other than DW_TAG_base_type. */
29757 gcc_assert (die->die_child != NULL);
29758 c = prev->die_sib;
29759 }
29760 }
29761 while (c != die->die_child);
29762 gcc_assert (die->die_child);
29763 c = die->die_child;
29764 for (i = 0; base_types.iterate (i, &base_type); i++)
29765 {
29766 base_type->die_mark = 0;
29767 base_type->die_sib = c->die_sib;
29768 c->die_sib = base_type;
29769 c = base_type;
29770 }
29771 }
29772
29773 /* Helper function for resolve_addr, attempt to resolve
29774 one CONST_STRING, return true if successful. Similarly verify that
29775 SYMBOL_REFs refer to variables emitted in the current CU. */
29776
29777 static bool
29778 resolve_one_addr (rtx *addr)
29779 {
29780 rtx rtl = *addr;
29781
29782 if (GET_CODE (rtl) == CONST_STRING)
29783 {
29784 size_t len = strlen (XSTR (rtl, 0)) + 1;
29785 tree t = build_string (len, XSTR (rtl, 0));
29786 tree tlen = size_int (len - 1);
29787 TREE_TYPE (t)
29788 = build_array_type (char_type_node, build_index_type (tlen));
29789 rtl = lookup_constant_def (t);
29790 if (!rtl || !MEM_P (rtl))
29791 return false;
29792 rtl = XEXP (rtl, 0);
29793 if (GET_CODE (rtl) == SYMBOL_REF
29794 && SYMBOL_REF_DECL (rtl)
29795 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29796 return false;
29797 vec_safe_push (used_rtx_array, rtl);
29798 *addr = rtl;
29799 return true;
29800 }
29801
29802 if (GET_CODE (rtl) == SYMBOL_REF
29803 && SYMBOL_REF_DECL (rtl))
29804 {
29805 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29806 {
29807 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29808 return false;
29809 }
29810 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29811 return false;
29812 }
29813
29814 if (GET_CODE (rtl) == CONST)
29815 {
29816 subrtx_ptr_iterator::array_type array;
29817 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29818 if (!resolve_one_addr (*iter))
29819 return false;
29820 }
29821
29822 return true;
29823 }
29824
29825 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29826 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29827 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29828
29829 static rtx
29830 string_cst_pool_decl (tree t)
29831 {
29832 rtx rtl = output_constant_def (t, 1);
29833 unsigned char *array;
29834 dw_loc_descr_ref l;
29835 tree decl;
29836 size_t len;
29837 dw_die_ref ref;
29838
29839 if (!rtl || !MEM_P (rtl))
29840 return NULL_RTX;
29841 rtl = XEXP (rtl, 0);
29842 if (GET_CODE (rtl) != SYMBOL_REF
29843 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29844 return NULL_RTX;
29845
29846 decl = SYMBOL_REF_DECL (rtl);
29847 if (!lookup_decl_die (decl))
29848 {
29849 len = TREE_STRING_LENGTH (t);
29850 vec_safe_push (used_rtx_array, rtl);
29851 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29852 array = ggc_vec_alloc<unsigned char> (len);
29853 memcpy (array, TREE_STRING_POINTER (t), len);
29854 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29855 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29856 l->dw_loc_oprnd2.v.val_vec.length = len;
29857 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29858 l->dw_loc_oprnd2.v.val_vec.array = array;
29859 add_AT_loc (ref, DW_AT_location, l);
29860 equate_decl_number_to_die (decl, ref);
29861 }
29862 return rtl;
29863 }
29864
29865 /* Helper function of resolve_addr_in_expr. LOC is
29866 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29867 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29868 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29869 with DW_OP_implicit_pointer if possible
29870 and return true, if unsuccessful, return false. */
29871
29872 static bool
29873 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29874 {
29875 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29876 HOST_WIDE_INT offset = 0;
29877 dw_die_ref ref = NULL;
29878 tree decl;
29879
29880 if (GET_CODE (rtl) == CONST
29881 && GET_CODE (XEXP (rtl, 0)) == PLUS
29882 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29883 {
29884 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29885 rtl = XEXP (XEXP (rtl, 0), 0);
29886 }
29887 if (GET_CODE (rtl) == CONST_STRING)
29888 {
29889 size_t len = strlen (XSTR (rtl, 0)) + 1;
29890 tree t = build_string (len, XSTR (rtl, 0));
29891 tree tlen = size_int (len - 1);
29892
29893 TREE_TYPE (t)
29894 = build_array_type (char_type_node, build_index_type (tlen));
29895 rtl = string_cst_pool_decl (t);
29896 if (!rtl)
29897 return false;
29898 }
29899 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29900 {
29901 decl = SYMBOL_REF_DECL (rtl);
29902 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29903 {
29904 ref = lookup_decl_die (decl);
29905 if (ref && (get_AT (ref, DW_AT_location)
29906 || get_AT (ref, DW_AT_const_value)))
29907 {
29908 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29909 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29910 loc->dw_loc_oprnd1.val_entry = NULL;
29911 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29912 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29913 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29914 loc->dw_loc_oprnd2.v.val_int = offset;
29915 return true;
29916 }
29917 }
29918 }
29919 return false;
29920 }
29921
29922 /* Helper function for resolve_addr, handle one location
29923 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29924 the location list couldn't be resolved. */
29925
29926 static bool
29927 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29928 {
29929 dw_loc_descr_ref keep = NULL;
29930 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29931 switch (loc->dw_loc_opc)
29932 {
29933 case DW_OP_addr:
29934 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29935 {
29936 if ((prev == NULL
29937 || prev->dw_loc_opc == DW_OP_piece
29938 || prev->dw_loc_opc == DW_OP_bit_piece)
29939 && loc->dw_loc_next
29940 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29941 && (!dwarf_strict || dwarf_version >= 5)
29942 && optimize_one_addr_into_implicit_ptr (loc))
29943 break;
29944 return false;
29945 }
29946 break;
29947 case DW_OP_GNU_addr_index:
29948 case DW_OP_addrx:
29949 case DW_OP_GNU_const_index:
29950 case DW_OP_constx:
29951 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29952 || loc->dw_loc_opc == DW_OP_addrx)
29953 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29954 || loc->dw_loc_opc == DW_OP_constx)
29955 && loc->dtprel))
29956 {
29957 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29958 if (!resolve_one_addr (&rtl))
29959 return false;
29960 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29961 loc->dw_loc_oprnd1.val_entry
29962 = add_addr_table_entry (rtl, ate_kind_rtx);
29963 }
29964 break;
29965 case DW_OP_const4u:
29966 case DW_OP_const8u:
29967 if (loc->dtprel
29968 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29969 return false;
29970 break;
29971 case DW_OP_plus_uconst:
29972 if (size_of_loc_descr (loc)
29973 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29974 + 1
29975 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29976 {
29977 dw_loc_descr_ref repl
29978 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29979 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29980 add_loc_descr (&repl, loc->dw_loc_next);
29981 *loc = *repl;
29982 }
29983 break;
29984 case DW_OP_implicit_value:
29985 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29986 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29987 return false;
29988 break;
29989 case DW_OP_implicit_pointer:
29990 case DW_OP_GNU_implicit_pointer:
29991 case DW_OP_GNU_parameter_ref:
29992 case DW_OP_GNU_variable_value:
29993 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29994 {
29995 dw_die_ref ref
29996 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29997 if (ref == NULL)
29998 return false;
29999 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30000 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30001 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30002 }
30003 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
30004 {
30005 if (prev == NULL
30006 && loc->dw_loc_next == NULL
30007 && AT_class (a) == dw_val_class_loc)
30008 switch (a->dw_attr)
30009 {
30010 /* Following attributes allow both exprloc and reference,
30011 so if the whole expression is DW_OP_GNU_variable_value
30012 alone we could transform it into reference. */
30013 case DW_AT_byte_size:
30014 case DW_AT_bit_size:
30015 case DW_AT_lower_bound:
30016 case DW_AT_upper_bound:
30017 case DW_AT_bit_stride:
30018 case DW_AT_count:
30019 case DW_AT_allocated:
30020 case DW_AT_associated:
30021 case DW_AT_byte_stride:
30022 a->dw_attr_val.val_class = dw_val_class_die_ref;
30023 a->dw_attr_val.val_entry = NULL;
30024 a->dw_attr_val.v.val_die_ref.die
30025 = loc->dw_loc_oprnd1.v.val_die_ref.die;
30026 a->dw_attr_val.v.val_die_ref.external = 0;
30027 return true;
30028 default:
30029 break;
30030 }
30031 if (dwarf_strict)
30032 return false;
30033 }
30034 break;
30035 case DW_OP_const_type:
30036 case DW_OP_regval_type:
30037 case DW_OP_deref_type:
30038 case DW_OP_convert:
30039 case DW_OP_reinterpret:
30040 case DW_OP_GNU_const_type:
30041 case DW_OP_GNU_regval_type:
30042 case DW_OP_GNU_deref_type:
30043 case DW_OP_GNU_convert:
30044 case DW_OP_GNU_reinterpret:
30045 while (loc->dw_loc_next
30046 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
30047 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
30048 {
30049 dw_die_ref base1, base2;
30050 unsigned enc1, enc2, size1, size2;
30051 if (loc->dw_loc_opc == DW_OP_regval_type
30052 || loc->dw_loc_opc == DW_OP_deref_type
30053 || loc->dw_loc_opc == DW_OP_GNU_regval_type
30054 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
30055 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
30056 else if (loc->dw_loc_oprnd1.val_class
30057 == dw_val_class_unsigned_const)
30058 break;
30059 else
30060 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
30061 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
30062 == dw_val_class_unsigned_const)
30063 break;
30064 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
30065 gcc_assert (base1->die_tag == DW_TAG_base_type
30066 && base2->die_tag == DW_TAG_base_type);
30067 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
30068 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
30069 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
30070 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
30071 if (size1 == size2
30072 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
30073 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
30074 && loc != keep)
30075 || enc1 == enc2))
30076 {
30077 /* Optimize away next DW_OP_convert after
30078 adjusting LOC's base type die reference. */
30079 if (loc->dw_loc_opc == DW_OP_regval_type
30080 || loc->dw_loc_opc == DW_OP_deref_type
30081 || loc->dw_loc_opc == DW_OP_GNU_regval_type
30082 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
30083 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
30084 else
30085 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
30086 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
30087 continue;
30088 }
30089 /* Don't change integer DW_OP_convert after e.g. floating
30090 point typed stack entry. */
30091 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
30092 keep = loc->dw_loc_next;
30093 break;
30094 }
30095 break;
30096 default:
30097 break;
30098 }
30099 return true;
30100 }
30101
30102 /* Helper function of resolve_addr. DIE had DW_AT_location of
30103 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
30104 and DW_OP_addr couldn't be resolved. resolve_addr has already
30105 removed the DW_AT_location attribute. This function attempts to
30106 add a new DW_AT_location attribute with DW_OP_implicit_pointer
30107 to it or DW_AT_const_value attribute, if possible. */
30108
30109 static void
30110 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
30111 {
30112 if (!VAR_P (decl)
30113 || lookup_decl_die (decl) != die
30114 || DECL_EXTERNAL (decl)
30115 || !TREE_STATIC (decl)
30116 || DECL_INITIAL (decl) == NULL_TREE
30117 || DECL_P (DECL_INITIAL (decl))
30118 || get_AT (die, DW_AT_const_value))
30119 return;
30120
30121 tree init = DECL_INITIAL (decl);
30122 HOST_WIDE_INT offset = 0;
30123 /* For variables that have been optimized away and thus
30124 don't have a memory location, see if we can emit
30125 DW_AT_const_value instead. */
30126 if (tree_add_const_value_attribute (die, init))
30127 return;
30128 if (dwarf_strict && dwarf_version < 5)
30129 return;
30130 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
30131 and ADDR_EXPR refers to a decl that has DW_AT_location or
30132 DW_AT_const_value (but isn't addressable, otherwise
30133 resolving the original DW_OP_addr wouldn't fail), see if
30134 we can add DW_OP_implicit_pointer. */
30135 STRIP_NOPS (init);
30136 if (TREE_CODE (init) == POINTER_PLUS_EXPR
30137 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
30138 {
30139 offset = tree_to_shwi (TREE_OPERAND (init, 1));
30140 init = TREE_OPERAND (init, 0);
30141 STRIP_NOPS (init);
30142 }
30143 if (TREE_CODE (init) != ADDR_EXPR)
30144 return;
30145 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
30146 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
30147 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
30148 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
30149 && TREE_OPERAND (init, 0) != decl))
30150 {
30151 dw_die_ref ref;
30152 dw_loc_descr_ref l;
30153
30154 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
30155 {
30156 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
30157 if (!rtl)
30158 return;
30159 decl = SYMBOL_REF_DECL (rtl);
30160 }
30161 else
30162 decl = TREE_OPERAND (init, 0);
30163 ref = lookup_decl_die (decl);
30164 if (ref == NULL
30165 || (!get_AT (ref, DW_AT_location)
30166 && !get_AT (ref, DW_AT_const_value)))
30167 return;
30168 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
30169 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30170 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30171 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30172 add_AT_loc (die, DW_AT_location, l);
30173 }
30174 }
30175
30176 /* Return NULL if l is a DWARF expression, or first op that is not
30177 valid DWARF expression. */
30178
30179 static dw_loc_descr_ref
30180 non_dwarf_expression (dw_loc_descr_ref l)
30181 {
30182 while (l)
30183 {
30184 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30185 return l;
30186 switch (l->dw_loc_opc)
30187 {
30188 case DW_OP_regx:
30189 case DW_OP_implicit_value:
30190 case DW_OP_stack_value:
30191 case DW_OP_implicit_pointer:
30192 case DW_OP_GNU_implicit_pointer:
30193 case DW_OP_GNU_parameter_ref:
30194 case DW_OP_piece:
30195 case DW_OP_bit_piece:
30196 return l;
30197 default:
30198 break;
30199 }
30200 l = l->dw_loc_next;
30201 }
30202 return NULL;
30203 }
30204
30205 /* Return adjusted copy of EXPR:
30206 If it is empty DWARF expression, return it.
30207 If it is valid non-empty DWARF expression,
30208 return copy of EXPR with DW_OP_deref appended to it.
30209 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30210 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30211 If it is DWARF expression followed by DW_OP_stack_value, return
30212 copy of the DWARF expression without anything appended.
30213 Otherwise, return NULL. */
30214
30215 static dw_loc_descr_ref
30216 copy_deref_exprloc (dw_loc_descr_ref expr)
30217 {
30218 dw_loc_descr_ref tail = NULL;
30219
30220 if (expr == NULL)
30221 return NULL;
30222
30223 dw_loc_descr_ref l = non_dwarf_expression (expr);
30224 if (l && l->dw_loc_next)
30225 return NULL;
30226
30227 if (l)
30228 {
30229 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30230 tail = new_loc_descr ((enum dwarf_location_atom)
30231 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30232 0, 0);
30233 else
30234 switch (l->dw_loc_opc)
30235 {
30236 case DW_OP_regx:
30237 tail = new_loc_descr (DW_OP_bregx,
30238 l->dw_loc_oprnd1.v.val_unsigned, 0);
30239 break;
30240 case DW_OP_stack_value:
30241 break;
30242 default:
30243 return NULL;
30244 }
30245 }
30246 else
30247 tail = new_loc_descr (DW_OP_deref, 0, 0);
30248
30249 dw_loc_descr_ref ret = NULL, *p = &ret;
30250 while (expr != l)
30251 {
30252 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30253 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30254 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30255 p = &(*p)->dw_loc_next;
30256 expr = expr->dw_loc_next;
30257 }
30258 *p = tail;
30259 return ret;
30260 }
30261
30262 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30263 reference to a variable or argument, adjust it if needed and return:
30264 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30265 attribute if present should be removed
30266 0 keep the attribute perhaps with minor modifications, no need to rescan
30267 1 if the attribute has been successfully adjusted. */
30268
30269 static int
30270 optimize_string_length (dw_attr_node *a)
30271 {
30272 dw_loc_descr_ref l = AT_loc (a), lv;
30273 dw_die_ref die;
30274 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30275 {
30276 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30277 die = lookup_decl_die (decl);
30278 if (die)
30279 {
30280 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30281 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30282 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30283 }
30284 else
30285 return -1;
30286 }
30287 else
30288 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30289
30290 /* DWARF5 allows reference class, so we can then reference the DIE.
30291 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30292 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30293 {
30294 a->dw_attr_val.val_class = dw_val_class_die_ref;
30295 a->dw_attr_val.val_entry = NULL;
30296 a->dw_attr_val.v.val_die_ref.die = die;
30297 a->dw_attr_val.v.val_die_ref.external = 0;
30298 return 0;
30299 }
30300
30301 dw_attr_node *av = get_AT (die, DW_AT_location);
30302 dw_loc_list_ref d;
30303 bool non_dwarf_expr = false;
30304
30305 if (av == NULL)
30306 return dwarf_strict ? -1 : 0;
30307 switch (AT_class (av))
30308 {
30309 case dw_val_class_loc_list:
30310 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30311 if (d->expr && non_dwarf_expression (d->expr))
30312 non_dwarf_expr = true;
30313 break;
30314 case dw_val_class_view_list:
30315 gcc_unreachable ();
30316 case dw_val_class_loc:
30317 lv = AT_loc (av);
30318 if (lv == NULL)
30319 return dwarf_strict ? -1 : 0;
30320 if (non_dwarf_expression (lv))
30321 non_dwarf_expr = true;
30322 break;
30323 default:
30324 return dwarf_strict ? -1 : 0;
30325 }
30326
30327 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30328 into DW_OP_call4 or DW_OP_GNU_variable_value into
30329 DW_OP_call4 DW_OP_deref, do so. */
30330 if (!non_dwarf_expr
30331 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30332 {
30333 l->dw_loc_opc = DW_OP_call4;
30334 if (l->dw_loc_next)
30335 l->dw_loc_next = NULL;
30336 else
30337 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30338 return 0;
30339 }
30340
30341 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30342 copy over the DW_AT_location attribute from die to a. */
30343 if (l->dw_loc_next != NULL)
30344 {
30345 a->dw_attr_val = av->dw_attr_val;
30346 return 1;
30347 }
30348
30349 dw_loc_list_ref list, *p;
30350 switch (AT_class (av))
30351 {
30352 case dw_val_class_loc_list:
30353 p = &list;
30354 list = NULL;
30355 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30356 {
30357 lv = copy_deref_exprloc (d->expr);
30358 if (lv)
30359 {
30360 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30361 p = &(*p)->dw_loc_next;
30362 }
30363 else if (!dwarf_strict && d->expr)
30364 return 0;
30365 }
30366 if (list == NULL)
30367 return dwarf_strict ? -1 : 0;
30368 a->dw_attr_val.val_class = dw_val_class_loc_list;
30369 gen_llsym (list);
30370 *AT_loc_list_ptr (a) = list;
30371 return 1;
30372 case dw_val_class_loc:
30373 lv = copy_deref_exprloc (AT_loc (av));
30374 if (lv == NULL)
30375 return dwarf_strict ? -1 : 0;
30376 a->dw_attr_val.v.val_loc = lv;
30377 return 1;
30378 default:
30379 gcc_unreachable ();
30380 }
30381 }
30382
30383 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30384 an address in .rodata section if the string literal is emitted there,
30385 or remove the containing location list or replace DW_AT_const_value
30386 with DW_AT_location and empty location expression, if it isn't found
30387 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30388 to something that has been emitted in the current CU. */
30389
30390 static void
30391 resolve_addr (dw_die_ref die)
30392 {
30393 dw_die_ref c;
30394 dw_attr_node *a;
30395 dw_loc_list_ref *curr, *start, loc;
30396 unsigned ix;
30397 bool remove_AT_byte_size = false;
30398
30399 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30400 switch (AT_class (a))
30401 {
30402 case dw_val_class_loc_list:
30403 start = curr = AT_loc_list_ptr (a);
30404 loc = *curr;
30405 gcc_assert (loc);
30406 /* The same list can be referenced more than once. See if we have
30407 already recorded the result from a previous pass. */
30408 if (loc->replaced)
30409 *curr = loc->dw_loc_next;
30410 else if (!loc->resolved_addr)
30411 {
30412 /* As things stand, we do not expect or allow one die to
30413 reference a suffix of another die's location list chain.
30414 References must be identical or completely separate.
30415 There is therefore no need to cache the result of this
30416 pass on any list other than the first; doing so
30417 would lead to unnecessary writes. */
30418 while (*curr)
30419 {
30420 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30421 if (!resolve_addr_in_expr (a, (*curr)->expr))
30422 {
30423 dw_loc_list_ref next = (*curr)->dw_loc_next;
30424 dw_loc_descr_ref l = (*curr)->expr;
30425
30426 if (next && (*curr)->ll_symbol)
30427 {
30428 gcc_assert (!next->ll_symbol);
30429 next->ll_symbol = (*curr)->ll_symbol;
30430 next->vl_symbol = (*curr)->vl_symbol;
30431 }
30432 if (dwarf_split_debug_info)
30433 remove_loc_list_addr_table_entries (l);
30434 *curr = next;
30435 }
30436 else
30437 {
30438 mark_base_types ((*curr)->expr);
30439 curr = &(*curr)->dw_loc_next;
30440 }
30441 }
30442 if (loc == *start)
30443 loc->resolved_addr = 1;
30444 else
30445 {
30446 loc->replaced = 1;
30447 loc->dw_loc_next = *start;
30448 }
30449 }
30450 if (!*start)
30451 {
30452 remove_AT (die, a->dw_attr);
30453 ix--;
30454 }
30455 break;
30456 case dw_val_class_view_list:
30457 {
30458 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30459 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30460 dw_val_node *llnode
30461 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30462 /* If we no longer have a loclist, or it no longer needs
30463 views, drop this attribute. */
30464 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30465 {
30466 remove_AT (die, a->dw_attr);
30467 ix--;
30468 }
30469 break;
30470 }
30471 case dw_val_class_loc:
30472 {
30473 dw_loc_descr_ref l = AT_loc (a);
30474 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30475 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30476 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30477 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30478 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30479 with DW_FORM_ref referencing the same DIE as
30480 DW_OP_GNU_variable_value used to reference. */
30481 if (a->dw_attr == DW_AT_string_length
30482 && l
30483 && l->dw_loc_opc == DW_OP_GNU_variable_value
30484 && (l->dw_loc_next == NULL
30485 || (l->dw_loc_next->dw_loc_next == NULL
30486 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30487 {
30488 switch (optimize_string_length (a))
30489 {
30490 case -1:
30491 remove_AT (die, a->dw_attr);
30492 ix--;
30493 /* If we drop DW_AT_string_length, we need to drop also
30494 DW_AT_{string_length_,}byte_size. */
30495 remove_AT_byte_size = true;
30496 continue;
30497 default:
30498 break;
30499 case 1:
30500 /* Even if we keep the optimized DW_AT_string_length,
30501 it might have changed AT_class, so process it again. */
30502 ix--;
30503 continue;
30504 }
30505 }
30506 /* For -gdwarf-2 don't attempt to optimize
30507 DW_AT_data_member_location containing
30508 DW_OP_plus_uconst - older consumers might
30509 rely on it being that op instead of a more complex,
30510 but shorter, location description. */
30511 if ((dwarf_version > 2
30512 || a->dw_attr != DW_AT_data_member_location
30513 || l == NULL
30514 || l->dw_loc_opc != DW_OP_plus_uconst
30515 || l->dw_loc_next != NULL)
30516 && !resolve_addr_in_expr (a, l))
30517 {
30518 if (dwarf_split_debug_info)
30519 remove_loc_list_addr_table_entries (l);
30520 if (l != NULL
30521 && l->dw_loc_next == NULL
30522 && l->dw_loc_opc == DW_OP_addr
30523 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30524 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30525 && a->dw_attr == DW_AT_location)
30526 {
30527 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30528 remove_AT (die, a->dw_attr);
30529 ix--;
30530 optimize_location_into_implicit_ptr (die, decl);
30531 break;
30532 }
30533 if (a->dw_attr == DW_AT_string_length)
30534 /* If we drop DW_AT_string_length, we need to drop also
30535 DW_AT_{string_length_,}byte_size. */
30536 remove_AT_byte_size = true;
30537 remove_AT (die, a->dw_attr);
30538 ix--;
30539 }
30540 else
30541 mark_base_types (l);
30542 }
30543 break;
30544 case dw_val_class_addr:
30545 if (a->dw_attr == DW_AT_const_value
30546 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30547 {
30548 if (AT_index (a) != NOT_INDEXED)
30549 remove_addr_table_entry (a->dw_attr_val.val_entry);
30550 remove_AT (die, a->dw_attr);
30551 ix--;
30552 }
30553 if ((die->die_tag == DW_TAG_call_site
30554 && a->dw_attr == DW_AT_call_origin)
30555 || (die->die_tag == DW_TAG_GNU_call_site
30556 && a->dw_attr == DW_AT_abstract_origin))
30557 {
30558 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30559 dw_die_ref tdie = lookup_decl_die (tdecl);
30560 dw_die_ref cdie;
30561 if (tdie == NULL
30562 && DECL_EXTERNAL (tdecl)
30563 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30564 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30565 {
30566 dw_die_ref pdie = cdie;
30567 /* Make sure we don't add these DIEs into type units.
30568 We could emit skeleton DIEs for context (namespaces,
30569 outer structs/classes) and a skeleton DIE for the
30570 innermost context with DW_AT_signature pointing to the
30571 type unit. See PR78835. */
30572 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30573 pdie = pdie->die_parent;
30574 if (pdie == NULL)
30575 {
30576 /* Creating a full DIE for tdecl is overly expensive and
30577 at this point even wrong when in the LTO phase
30578 as it can end up generating new type DIEs we didn't
30579 output and thus optimize_external_refs will crash. */
30580 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30581 add_AT_flag (tdie, DW_AT_external, 1);
30582 add_AT_flag (tdie, DW_AT_declaration, 1);
30583 add_linkage_attr (tdie, tdecl);
30584 add_name_and_src_coords_attributes (tdie, tdecl, true);
30585 equate_decl_number_to_die (tdecl, tdie);
30586 }
30587 }
30588 if (tdie)
30589 {
30590 a->dw_attr_val.val_class = dw_val_class_die_ref;
30591 a->dw_attr_val.v.val_die_ref.die = tdie;
30592 a->dw_attr_val.v.val_die_ref.external = 0;
30593 }
30594 else
30595 {
30596 if (AT_index (a) != NOT_INDEXED)
30597 remove_addr_table_entry (a->dw_attr_val.val_entry);
30598 remove_AT (die, a->dw_attr);
30599 ix--;
30600 }
30601 }
30602 break;
30603 default:
30604 break;
30605 }
30606
30607 if (remove_AT_byte_size)
30608 remove_AT (die, dwarf_version >= 5
30609 ? DW_AT_string_length_byte_size
30610 : DW_AT_byte_size);
30611
30612 FOR_EACH_CHILD (die, c, resolve_addr (c));
30613 }
30614 \f
30615 /* Helper routines for optimize_location_lists.
30616 This pass tries to share identical local lists in .debug_loc
30617 section. */
30618
30619 /* Iteratively hash operands of LOC opcode into HSTATE. */
30620
30621 static void
30622 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30623 {
30624 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30625 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30626
30627 switch (loc->dw_loc_opc)
30628 {
30629 case DW_OP_const4u:
30630 case DW_OP_const8u:
30631 if (loc->dtprel)
30632 goto hash_addr;
30633 /* FALLTHRU */
30634 case DW_OP_const1u:
30635 case DW_OP_const1s:
30636 case DW_OP_const2u:
30637 case DW_OP_const2s:
30638 case DW_OP_const4s:
30639 case DW_OP_const8s:
30640 case DW_OP_constu:
30641 case DW_OP_consts:
30642 case DW_OP_pick:
30643 case DW_OP_plus_uconst:
30644 case DW_OP_breg0:
30645 case DW_OP_breg1:
30646 case DW_OP_breg2:
30647 case DW_OP_breg3:
30648 case DW_OP_breg4:
30649 case DW_OP_breg5:
30650 case DW_OP_breg6:
30651 case DW_OP_breg7:
30652 case DW_OP_breg8:
30653 case DW_OP_breg9:
30654 case DW_OP_breg10:
30655 case DW_OP_breg11:
30656 case DW_OP_breg12:
30657 case DW_OP_breg13:
30658 case DW_OP_breg14:
30659 case DW_OP_breg15:
30660 case DW_OP_breg16:
30661 case DW_OP_breg17:
30662 case DW_OP_breg18:
30663 case DW_OP_breg19:
30664 case DW_OP_breg20:
30665 case DW_OP_breg21:
30666 case DW_OP_breg22:
30667 case DW_OP_breg23:
30668 case DW_OP_breg24:
30669 case DW_OP_breg25:
30670 case DW_OP_breg26:
30671 case DW_OP_breg27:
30672 case DW_OP_breg28:
30673 case DW_OP_breg29:
30674 case DW_OP_breg30:
30675 case DW_OP_breg31:
30676 case DW_OP_regx:
30677 case DW_OP_fbreg:
30678 case DW_OP_piece:
30679 case DW_OP_deref_size:
30680 case DW_OP_xderef_size:
30681 hstate.add_object (val1->v.val_int);
30682 break;
30683 case DW_OP_skip:
30684 case DW_OP_bra:
30685 {
30686 int offset;
30687
30688 gcc_assert (val1->val_class == dw_val_class_loc);
30689 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30690 hstate.add_object (offset);
30691 }
30692 break;
30693 case DW_OP_implicit_value:
30694 hstate.add_object (val1->v.val_unsigned);
30695 switch (val2->val_class)
30696 {
30697 case dw_val_class_const:
30698 hstate.add_object (val2->v.val_int);
30699 break;
30700 case dw_val_class_vec:
30701 {
30702 unsigned int elt_size = val2->v.val_vec.elt_size;
30703 unsigned int len = val2->v.val_vec.length;
30704
30705 hstate.add_int (elt_size);
30706 hstate.add_int (len);
30707 hstate.add (val2->v.val_vec.array, len * elt_size);
30708 }
30709 break;
30710 case dw_val_class_const_double:
30711 hstate.add_object (val2->v.val_double.low);
30712 hstate.add_object (val2->v.val_double.high);
30713 break;
30714 case dw_val_class_wide_int:
30715 hstate.add (val2->v.val_wide->get_val (),
30716 get_full_len (*val2->v.val_wide)
30717 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30718 break;
30719 case dw_val_class_addr:
30720 inchash::add_rtx (val2->v.val_addr, hstate);
30721 break;
30722 default:
30723 gcc_unreachable ();
30724 }
30725 break;
30726 case DW_OP_bregx:
30727 case DW_OP_bit_piece:
30728 hstate.add_object (val1->v.val_int);
30729 hstate.add_object (val2->v.val_int);
30730 break;
30731 case DW_OP_addr:
30732 hash_addr:
30733 if (loc->dtprel)
30734 {
30735 unsigned char dtprel = 0xd1;
30736 hstate.add_object (dtprel);
30737 }
30738 inchash::add_rtx (val1->v.val_addr, hstate);
30739 break;
30740 case DW_OP_GNU_addr_index:
30741 case DW_OP_addrx:
30742 case DW_OP_GNU_const_index:
30743 case DW_OP_constx:
30744 {
30745 if (loc->dtprel)
30746 {
30747 unsigned char dtprel = 0xd1;
30748 hstate.add_object (dtprel);
30749 }
30750 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30751 }
30752 break;
30753 case DW_OP_implicit_pointer:
30754 case DW_OP_GNU_implicit_pointer:
30755 hstate.add_int (val2->v.val_int);
30756 break;
30757 case DW_OP_entry_value:
30758 case DW_OP_GNU_entry_value:
30759 hstate.add_object (val1->v.val_loc);
30760 break;
30761 case DW_OP_regval_type:
30762 case DW_OP_deref_type:
30763 case DW_OP_GNU_regval_type:
30764 case DW_OP_GNU_deref_type:
30765 {
30766 unsigned int byte_size
30767 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30768 unsigned int encoding
30769 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30770 hstate.add_object (val1->v.val_int);
30771 hstate.add_object (byte_size);
30772 hstate.add_object (encoding);
30773 }
30774 break;
30775 case DW_OP_convert:
30776 case DW_OP_reinterpret:
30777 case DW_OP_GNU_convert:
30778 case DW_OP_GNU_reinterpret:
30779 if (val1->val_class == dw_val_class_unsigned_const)
30780 {
30781 hstate.add_object (val1->v.val_unsigned);
30782 break;
30783 }
30784 /* FALLTHRU */
30785 case DW_OP_const_type:
30786 case DW_OP_GNU_const_type:
30787 {
30788 unsigned int byte_size
30789 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30790 unsigned int encoding
30791 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30792 hstate.add_object (byte_size);
30793 hstate.add_object (encoding);
30794 if (loc->dw_loc_opc != DW_OP_const_type
30795 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30796 break;
30797 hstate.add_object (val2->val_class);
30798 switch (val2->val_class)
30799 {
30800 case dw_val_class_const:
30801 hstate.add_object (val2->v.val_int);
30802 break;
30803 case dw_val_class_vec:
30804 {
30805 unsigned int elt_size = val2->v.val_vec.elt_size;
30806 unsigned int len = val2->v.val_vec.length;
30807
30808 hstate.add_object (elt_size);
30809 hstate.add_object (len);
30810 hstate.add (val2->v.val_vec.array, len * elt_size);
30811 }
30812 break;
30813 case dw_val_class_const_double:
30814 hstate.add_object (val2->v.val_double.low);
30815 hstate.add_object (val2->v.val_double.high);
30816 break;
30817 case dw_val_class_wide_int:
30818 hstate.add (val2->v.val_wide->get_val (),
30819 get_full_len (*val2->v.val_wide)
30820 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30821 break;
30822 default:
30823 gcc_unreachable ();
30824 }
30825 }
30826 break;
30827
30828 default:
30829 /* Other codes have no operands. */
30830 break;
30831 }
30832 }
30833
30834 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30835
30836 static inline void
30837 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30838 {
30839 dw_loc_descr_ref l;
30840 bool sizes_computed = false;
30841 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30842 size_of_locs (loc);
30843
30844 for (l = loc; l != NULL; l = l->dw_loc_next)
30845 {
30846 enum dwarf_location_atom opc = l->dw_loc_opc;
30847 hstate.add_object (opc);
30848 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30849 {
30850 size_of_locs (loc);
30851 sizes_computed = true;
30852 }
30853 hash_loc_operands (l, hstate);
30854 }
30855 }
30856
30857 /* Compute hash of the whole location list LIST_HEAD. */
30858
30859 static inline void
30860 hash_loc_list (dw_loc_list_ref list_head)
30861 {
30862 dw_loc_list_ref curr = list_head;
30863 inchash::hash hstate;
30864
30865 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30866 {
30867 hstate.add (curr->begin, strlen (curr->begin) + 1);
30868 hstate.add (curr->end, strlen (curr->end) + 1);
30869 hstate.add_object (curr->vbegin);
30870 hstate.add_object (curr->vend);
30871 if (curr->section)
30872 hstate.add (curr->section, strlen (curr->section) + 1);
30873 hash_locs (curr->expr, hstate);
30874 }
30875 list_head->hash = hstate.end ();
30876 }
30877
30878 /* Return true if X and Y opcodes have the same operands. */
30879
30880 static inline bool
30881 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30882 {
30883 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30884 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30885 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30886 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30887
30888 switch (x->dw_loc_opc)
30889 {
30890 case DW_OP_const4u:
30891 case DW_OP_const8u:
30892 if (x->dtprel)
30893 goto hash_addr;
30894 /* FALLTHRU */
30895 case DW_OP_const1u:
30896 case DW_OP_const1s:
30897 case DW_OP_const2u:
30898 case DW_OP_const2s:
30899 case DW_OP_const4s:
30900 case DW_OP_const8s:
30901 case DW_OP_constu:
30902 case DW_OP_consts:
30903 case DW_OP_pick:
30904 case DW_OP_plus_uconst:
30905 case DW_OP_breg0:
30906 case DW_OP_breg1:
30907 case DW_OP_breg2:
30908 case DW_OP_breg3:
30909 case DW_OP_breg4:
30910 case DW_OP_breg5:
30911 case DW_OP_breg6:
30912 case DW_OP_breg7:
30913 case DW_OP_breg8:
30914 case DW_OP_breg9:
30915 case DW_OP_breg10:
30916 case DW_OP_breg11:
30917 case DW_OP_breg12:
30918 case DW_OP_breg13:
30919 case DW_OP_breg14:
30920 case DW_OP_breg15:
30921 case DW_OP_breg16:
30922 case DW_OP_breg17:
30923 case DW_OP_breg18:
30924 case DW_OP_breg19:
30925 case DW_OP_breg20:
30926 case DW_OP_breg21:
30927 case DW_OP_breg22:
30928 case DW_OP_breg23:
30929 case DW_OP_breg24:
30930 case DW_OP_breg25:
30931 case DW_OP_breg26:
30932 case DW_OP_breg27:
30933 case DW_OP_breg28:
30934 case DW_OP_breg29:
30935 case DW_OP_breg30:
30936 case DW_OP_breg31:
30937 case DW_OP_regx:
30938 case DW_OP_fbreg:
30939 case DW_OP_piece:
30940 case DW_OP_deref_size:
30941 case DW_OP_xderef_size:
30942 return valx1->v.val_int == valy1->v.val_int;
30943 case DW_OP_skip:
30944 case DW_OP_bra:
30945 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30946 can cause irrelevant differences in dw_loc_addr. */
30947 gcc_assert (valx1->val_class == dw_val_class_loc
30948 && valy1->val_class == dw_val_class_loc
30949 && (dwarf_split_debug_info
30950 || x->dw_loc_addr == y->dw_loc_addr));
30951 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30952 case DW_OP_implicit_value:
30953 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30954 || valx2->val_class != valy2->val_class)
30955 return false;
30956 switch (valx2->val_class)
30957 {
30958 case dw_val_class_const:
30959 return valx2->v.val_int == valy2->v.val_int;
30960 case dw_val_class_vec:
30961 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30962 && valx2->v.val_vec.length == valy2->v.val_vec.length
30963 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30964 valx2->v.val_vec.elt_size
30965 * valx2->v.val_vec.length) == 0;
30966 case dw_val_class_const_double:
30967 return valx2->v.val_double.low == valy2->v.val_double.low
30968 && valx2->v.val_double.high == valy2->v.val_double.high;
30969 case dw_val_class_wide_int:
30970 return *valx2->v.val_wide == *valy2->v.val_wide;
30971 case dw_val_class_addr:
30972 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30973 default:
30974 gcc_unreachable ();
30975 }
30976 case DW_OP_bregx:
30977 case DW_OP_bit_piece:
30978 return valx1->v.val_int == valy1->v.val_int
30979 && valx2->v.val_int == valy2->v.val_int;
30980 case DW_OP_addr:
30981 hash_addr:
30982 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30983 case DW_OP_GNU_addr_index:
30984 case DW_OP_addrx:
30985 case DW_OP_GNU_const_index:
30986 case DW_OP_constx:
30987 {
30988 rtx ax1 = valx1->val_entry->addr.rtl;
30989 rtx ay1 = valy1->val_entry->addr.rtl;
30990 return rtx_equal_p (ax1, ay1);
30991 }
30992 case DW_OP_implicit_pointer:
30993 case DW_OP_GNU_implicit_pointer:
30994 return valx1->val_class == dw_val_class_die_ref
30995 && valx1->val_class == valy1->val_class
30996 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30997 && valx2->v.val_int == valy2->v.val_int;
30998 case DW_OP_entry_value:
30999 case DW_OP_GNU_entry_value:
31000 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
31001 case DW_OP_const_type:
31002 case DW_OP_GNU_const_type:
31003 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
31004 || valx2->val_class != valy2->val_class)
31005 return false;
31006 switch (valx2->val_class)
31007 {
31008 case dw_val_class_const:
31009 return valx2->v.val_int == valy2->v.val_int;
31010 case dw_val_class_vec:
31011 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
31012 && valx2->v.val_vec.length == valy2->v.val_vec.length
31013 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
31014 valx2->v.val_vec.elt_size
31015 * valx2->v.val_vec.length) == 0;
31016 case dw_val_class_const_double:
31017 return valx2->v.val_double.low == valy2->v.val_double.low
31018 && valx2->v.val_double.high == valy2->v.val_double.high;
31019 case dw_val_class_wide_int:
31020 return *valx2->v.val_wide == *valy2->v.val_wide;
31021 default:
31022 gcc_unreachable ();
31023 }
31024 case DW_OP_regval_type:
31025 case DW_OP_deref_type:
31026 case DW_OP_GNU_regval_type:
31027 case DW_OP_GNU_deref_type:
31028 return valx1->v.val_int == valy1->v.val_int
31029 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
31030 case DW_OP_convert:
31031 case DW_OP_reinterpret:
31032 case DW_OP_GNU_convert:
31033 case DW_OP_GNU_reinterpret:
31034 if (valx1->val_class != valy1->val_class)
31035 return false;
31036 if (valx1->val_class == dw_val_class_unsigned_const)
31037 return valx1->v.val_unsigned == valy1->v.val_unsigned;
31038 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
31039 case DW_OP_GNU_parameter_ref:
31040 return valx1->val_class == dw_val_class_die_ref
31041 && valx1->val_class == valy1->val_class
31042 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
31043 default:
31044 /* Other codes have no operands. */
31045 return true;
31046 }
31047 }
31048
31049 /* Return true if DWARF location expressions X and Y are the same. */
31050
31051 static inline bool
31052 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
31053 {
31054 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
31055 if (x->dw_loc_opc != y->dw_loc_opc
31056 || x->dtprel != y->dtprel
31057 || !compare_loc_operands (x, y))
31058 break;
31059 return x == NULL && y == NULL;
31060 }
31061
31062 /* Hashtable helpers. */
31063
31064 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
31065 {
31066 static inline hashval_t hash (const dw_loc_list_struct *);
31067 static inline bool equal (const dw_loc_list_struct *,
31068 const dw_loc_list_struct *);
31069 };
31070
31071 /* Return precomputed hash of location list X. */
31072
31073 inline hashval_t
31074 loc_list_hasher::hash (const dw_loc_list_struct *x)
31075 {
31076 return x->hash;
31077 }
31078
31079 /* Return true if location lists A and B are the same. */
31080
31081 inline bool
31082 loc_list_hasher::equal (const dw_loc_list_struct *a,
31083 const dw_loc_list_struct *b)
31084 {
31085 if (a == b)
31086 return 1;
31087 if (a->hash != b->hash)
31088 return 0;
31089 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
31090 if (strcmp (a->begin, b->begin) != 0
31091 || strcmp (a->end, b->end) != 0
31092 || (a->section == NULL) != (b->section == NULL)
31093 || (a->section && strcmp (a->section, b->section) != 0)
31094 || a->vbegin != b->vbegin || a->vend != b->vend
31095 || !compare_locs (a->expr, b->expr))
31096 break;
31097 return a == NULL && b == NULL;
31098 }
31099
31100 typedef hash_table<loc_list_hasher> loc_list_hash_type;
31101
31102
31103 /* Recursively optimize location lists referenced from DIE
31104 children and share them whenever possible. */
31105
31106 static void
31107 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
31108 {
31109 dw_die_ref c;
31110 dw_attr_node *a;
31111 unsigned ix;
31112 dw_loc_list_struct **slot;
31113 bool drop_locviews = false;
31114 bool has_locviews = false;
31115
31116 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31117 if (AT_class (a) == dw_val_class_loc_list)
31118 {
31119 dw_loc_list_ref list = AT_loc_list (a);
31120 /* TODO: perform some optimizations here, before hashing
31121 it and storing into the hash table. */
31122 hash_loc_list (list);
31123 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
31124 if (*slot == NULL)
31125 {
31126 *slot = list;
31127 if (loc_list_has_views (list))
31128 gcc_assert (list->vl_symbol);
31129 else if (list->vl_symbol)
31130 {
31131 drop_locviews = true;
31132 list->vl_symbol = NULL;
31133 }
31134 }
31135 else
31136 {
31137 if (list->vl_symbol && !(*slot)->vl_symbol)
31138 drop_locviews = true;
31139 a->dw_attr_val.v.val_loc_list = *slot;
31140 }
31141 }
31142 else if (AT_class (a) == dw_val_class_view_list)
31143 {
31144 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
31145 has_locviews = true;
31146 }
31147
31148
31149 if (drop_locviews && has_locviews)
31150 remove_AT (die, DW_AT_GNU_locviews);
31151
31152 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
31153 }
31154
31155
31156 /* Recursively assign each location list a unique index into the debug_addr
31157 section. */
31158
31159 static void
31160 index_location_lists (dw_die_ref die)
31161 {
31162 dw_die_ref c;
31163 dw_attr_node *a;
31164 unsigned ix;
31165
31166 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31167 if (AT_class (a) == dw_val_class_loc_list)
31168 {
31169 dw_loc_list_ref list = AT_loc_list (a);
31170 dw_loc_list_ref curr;
31171 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31172 {
31173 /* Don't index an entry that has already been indexed
31174 or won't be output. Make sure skip_loc_list_entry doesn't
31175 call size_of_locs, because that might cause circular dependency,
31176 index_location_lists requiring address table indexes to be
31177 computed, but adding new indexes through add_addr_table_entry
31178 and address table index computation requiring no new additions
31179 to the hash table. In the rare case of DWARF[234] >= 64KB
31180 location expression, we'll just waste unused address table entry
31181 for it. */
31182 if (curr->begin_entry != NULL
31183 || skip_loc_list_entry (curr))
31184 continue;
31185
31186 curr->begin_entry
31187 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31188 }
31189 }
31190
31191 FOR_EACH_CHILD (die, c, index_location_lists (c));
31192 }
31193
31194 /* Optimize location lists referenced from DIE
31195 children and share them whenever possible. */
31196
31197 static void
31198 optimize_location_lists (dw_die_ref die)
31199 {
31200 loc_list_hash_type htab (500);
31201 optimize_location_lists_1 (die, &htab);
31202 }
31203 \f
31204 /* Traverse the limbo die list, and add parent/child links. The only
31205 dies without parents that should be here are concrete instances of
31206 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31207 For concrete instances, we can get the parent die from the abstract
31208 instance. */
31209
31210 static void
31211 flush_limbo_die_list (void)
31212 {
31213 limbo_die_node *node;
31214
31215 /* get_context_die calls force_decl_die, which can put new DIEs on the
31216 limbo list in LTO mode when nested functions are put in a different
31217 partition than that of their parent function. */
31218 while ((node = limbo_die_list))
31219 {
31220 dw_die_ref die = node->die;
31221 limbo_die_list = node->next;
31222
31223 if (die->die_parent == NULL)
31224 {
31225 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31226
31227 if (origin && origin->die_parent)
31228 add_child_die (origin->die_parent, die);
31229 else if (is_cu_die (die))
31230 ;
31231 else if (seen_error ())
31232 /* It's OK to be confused by errors in the input. */
31233 add_child_die (comp_unit_die (), die);
31234 else
31235 {
31236 /* In certain situations, the lexical block containing a
31237 nested function can be optimized away, which results
31238 in the nested function die being orphaned. Likewise
31239 with the return type of that nested function. Force
31240 this to be a child of the containing function.
31241
31242 It may happen that even the containing function got fully
31243 inlined and optimized out. In that case we are lost and
31244 assign the empty child. This should not be big issue as
31245 the function is likely unreachable too. */
31246 gcc_assert (node->created_for);
31247
31248 if (DECL_P (node->created_for))
31249 origin = get_context_die (DECL_CONTEXT (node->created_for));
31250 else if (TYPE_P (node->created_for))
31251 origin = scope_die_for (node->created_for, comp_unit_die ());
31252 else
31253 origin = comp_unit_die ();
31254
31255 add_child_die (origin, die);
31256 }
31257 }
31258 }
31259 }
31260
31261 /* Reset DIEs so we can output them again. */
31262
31263 static void
31264 reset_dies (dw_die_ref die)
31265 {
31266 dw_die_ref c;
31267
31268 /* Remove stuff we re-generate. */
31269 die->die_mark = 0;
31270 die->die_offset = 0;
31271 die->die_abbrev = 0;
31272 remove_AT (die, DW_AT_sibling);
31273
31274 FOR_EACH_CHILD (die, c, reset_dies (c));
31275 }
31276
31277 /* Output stuff that dwarf requires at the end of every file,
31278 and generate the DWARF-2 debugging info. */
31279
31280 static void
31281 dwarf2out_finish (const char *filename)
31282 {
31283 comdat_type_node *ctnode;
31284 dw_die_ref main_comp_unit_die;
31285 unsigned char checksum[16];
31286 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31287
31288 /* Flush out any latecomers to the limbo party. */
31289 flush_limbo_die_list ();
31290
31291 if (inline_entry_data_table)
31292 gcc_assert (inline_entry_data_table->is_empty ());
31293
31294 if (flag_checking)
31295 {
31296 verify_die (comp_unit_die ());
31297 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31298 verify_die (node->die);
31299 }
31300
31301 /* We shouldn't have any symbols with delayed asm names for
31302 DIEs generated after early finish. */
31303 gcc_assert (deferred_asm_name == NULL);
31304
31305 gen_remaining_tmpl_value_param_die_attribute ();
31306
31307 if (flag_generate_lto || flag_generate_offload)
31308 {
31309 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31310
31311 /* Prune stuff so that dwarf2out_finish runs successfully
31312 for the fat part of the object. */
31313 reset_dies (comp_unit_die ());
31314 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31315 reset_dies (node->die);
31316
31317 hash_table<comdat_type_hasher> comdat_type_table (100);
31318 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31319 {
31320 comdat_type_node **slot
31321 = comdat_type_table.find_slot (ctnode, INSERT);
31322
31323 /* Don't reset types twice. */
31324 if (*slot != HTAB_EMPTY_ENTRY)
31325 continue;
31326
31327 /* Remove the pointer to the line table. */
31328 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31329
31330 if (debug_info_level >= DINFO_LEVEL_TERSE)
31331 reset_dies (ctnode->root_die);
31332
31333 *slot = ctnode;
31334 }
31335
31336 /* Reset die CU symbol so we don't output it twice. */
31337 comp_unit_die ()->die_id.die_symbol = NULL;
31338
31339 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31340 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31341 if (have_macinfo)
31342 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31343
31344 /* Remove indirect string decisions. */
31345 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31346 if (debug_line_str_hash)
31347 {
31348 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31349 debug_line_str_hash = NULL;
31350 }
31351 }
31352
31353 #if ENABLE_ASSERT_CHECKING
31354 {
31355 dw_die_ref die = comp_unit_die (), c;
31356 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31357 }
31358 #endif
31359 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31360 resolve_addr (ctnode->root_die);
31361 resolve_addr (comp_unit_die ());
31362 move_marked_base_types ();
31363
31364 if (dump_file)
31365 {
31366 fprintf (dump_file, "DWARF for %s\n", filename);
31367 print_die (comp_unit_die (), dump_file);
31368 }
31369
31370 /* Initialize sections and labels used for actual assembler output. */
31371 unsigned generation = init_sections_and_labels (false);
31372
31373 /* Traverse the DIE's and add sibling attributes to those DIE's that
31374 have children. */
31375 add_sibling_attributes (comp_unit_die ());
31376 limbo_die_node *node;
31377 for (node = cu_die_list; node; node = node->next)
31378 add_sibling_attributes (node->die);
31379 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31380 add_sibling_attributes (ctnode->root_die);
31381
31382 /* When splitting DWARF info, we put some attributes in the
31383 skeleton compile_unit DIE that remains in the .o, while
31384 most attributes go in the DWO compile_unit_die. */
31385 if (dwarf_split_debug_info)
31386 {
31387 limbo_die_node *cu;
31388 main_comp_unit_die = gen_compile_unit_die (NULL);
31389 if (dwarf_version >= 5)
31390 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31391 cu = limbo_die_list;
31392 gcc_assert (cu->die == main_comp_unit_die);
31393 limbo_die_list = limbo_die_list->next;
31394 cu->next = cu_die_list;
31395 cu_die_list = cu;
31396 }
31397 else
31398 main_comp_unit_die = comp_unit_die ();
31399
31400 /* Output a terminator label for the .text section. */
31401 switch_to_section (text_section);
31402 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31403 if (cold_text_section)
31404 {
31405 switch_to_section (cold_text_section);
31406 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31407 }
31408
31409 /* We can only use the low/high_pc attributes if all of the code was
31410 in .text. */
31411 if (!have_multiple_function_sections
31412 || (dwarf_version < 3 && dwarf_strict))
31413 {
31414 /* Don't add if the CU has no associated code. */
31415 if (text_section_used)
31416 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31417 text_end_label, true);
31418 }
31419 else
31420 {
31421 unsigned fde_idx;
31422 dw_fde_ref fde;
31423 bool range_list_added = false;
31424
31425 if (text_section_used)
31426 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31427 text_end_label, &range_list_added, true);
31428 if (cold_text_section_used)
31429 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31430 cold_end_label, &range_list_added, true);
31431
31432 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31433 {
31434 if (DECL_IGNORED_P (fde->decl))
31435 continue;
31436 if (!fde->in_std_section)
31437 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31438 fde->dw_fde_end, &range_list_added,
31439 true);
31440 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31441 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31442 fde->dw_fde_second_end, &range_list_added,
31443 true);
31444 }
31445
31446 if (range_list_added)
31447 {
31448 /* We need to give .debug_loc and .debug_ranges an appropriate
31449 "base address". Use zero so that these addresses become
31450 absolute. Historically, we've emitted the unexpected
31451 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31452 Emit both to give time for other tools to adapt. */
31453 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31454 if (! dwarf_strict && dwarf_version < 4)
31455 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31456
31457 add_ranges (NULL);
31458 }
31459 }
31460
31461 /* AIX Assembler inserts the length, so adjust the reference to match the
31462 offset expected by debuggers. */
31463 strcpy (dl_section_ref, debug_line_section_label);
31464 if (XCOFF_DEBUGGING_INFO)
31465 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31466
31467 if (debug_info_level >= DINFO_LEVEL_TERSE)
31468 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31469 dl_section_ref);
31470
31471 if (have_macinfo)
31472 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31473 macinfo_section_label);
31474
31475 if (dwarf_split_debug_info)
31476 {
31477 if (have_location_lists)
31478 {
31479 /* Since we generate the loclists in the split DWARF .dwo
31480 file itself, we don't need to generate a loclists_base
31481 attribute for the split compile unit DIE. That attribute
31482 (and using relocatable sec_offset FORMs) isn't allowed
31483 for a split compile unit. Only if the .debug_loclists
31484 section was in the main file, would we need to generate a
31485 loclists_base attribute here (for the full or skeleton
31486 unit DIE). */
31487
31488 /* optimize_location_lists calculates the size of the lists,
31489 so index them first, and assign indices to the entries.
31490 Although optimize_location_lists will remove entries from
31491 the table, it only does so for duplicates, and therefore
31492 only reduces ref_counts to 1. */
31493 index_location_lists (comp_unit_die ());
31494 }
31495
31496 if (addr_index_table != NULL)
31497 {
31498 unsigned int index = 0;
31499 addr_index_table
31500 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31501 (&index);
31502 }
31503 }
31504
31505 loc_list_idx = 0;
31506 if (have_location_lists)
31507 {
31508 optimize_location_lists (comp_unit_die ());
31509 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31510 if (dwarf_version >= 5 && dwarf_split_debug_info)
31511 assign_location_list_indexes (comp_unit_die ());
31512 }
31513
31514 save_macinfo_strings ();
31515
31516 if (dwarf_split_debug_info)
31517 {
31518 unsigned int index = 0;
31519
31520 /* Add attributes common to skeleton compile_units and
31521 type_units. Because these attributes include strings, it
31522 must be done before freezing the string table. Top-level
31523 skeleton die attrs are added when the skeleton type unit is
31524 created, so ensure it is created by this point. */
31525 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31526 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31527 }
31528
31529 /* Output all of the compilation units. We put the main one last so that
31530 the offsets are available to output_pubnames. */
31531 for (node = cu_die_list; node; node = node->next)
31532 output_comp_unit (node->die, 0, NULL);
31533
31534 hash_table<comdat_type_hasher> comdat_type_table (100);
31535 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31536 {
31537 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31538
31539 /* Don't output duplicate types. */
31540 if (*slot != HTAB_EMPTY_ENTRY)
31541 continue;
31542
31543 /* Add a pointer to the line table for the main compilation unit
31544 so that the debugger can make sense of DW_AT_decl_file
31545 attributes. */
31546 if (debug_info_level >= DINFO_LEVEL_TERSE)
31547 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31548 (!dwarf_split_debug_info
31549 ? dl_section_ref
31550 : debug_skeleton_line_section_label));
31551
31552 output_comdat_type_unit (ctnode, false);
31553 *slot = ctnode;
31554 }
31555
31556 if (dwarf_split_debug_info)
31557 {
31558 int mark;
31559 struct md5_ctx ctx;
31560
31561 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31562 index_rnglists ();
31563
31564 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31565 md5_init_ctx (&ctx);
31566 mark = 0;
31567 die_checksum (comp_unit_die (), &ctx, &mark);
31568 unmark_all_dies (comp_unit_die ());
31569 md5_finish_ctx (&ctx, checksum);
31570
31571 if (dwarf_version < 5)
31572 {
31573 /* Use the first 8 bytes of the checksum as the dwo_id,
31574 and add it to both comp-unit DIEs. */
31575 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31576 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31577 }
31578
31579 /* Add the base offset of the ranges table to the skeleton
31580 comp-unit DIE. */
31581 if (!vec_safe_is_empty (ranges_table))
31582 {
31583 if (dwarf_version >= 5)
31584 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31585 ranges_base_label);
31586 else
31587 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31588 ranges_section_label);
31589 }
31590
31591 output_addr_table ();
31592 }
31593
31594 /* Output the main compilation unit if non-empty or if .debug_macinfo
31595 or .debug_macro will be emitted. */
31596 output_comp_unit (comp_unit_die (), have_macinfo,
31597 dwarf_split_debug_info ? checksum : NULL);
31598
31599 if (dwarf_split_debug_info && info_section_emitted)
31600 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31601
31602 /* Output the abbreviation table. */
31603 if (vec_safe_length (abbrev_die_table) != 1)
31604 {
31605 switch_to_section (debug_abbrev_section);
31606 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31607 output_abbrev_section ();
31608 }
31609
31610 /* Output location list section if necessary. */
31611 if (have_location_lists)
31612 {
31613 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31614 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31615 /* Output the location lists info. */
31616 switch_to_section (debug_loc_section);
31617 if (dwarf_version >= 5)
31618 {
31619 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31620 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31621 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31622 dw2_asm_output_data (4, 0xffffffff,
31623 "Initial length escape value indicating "
31624 "64-bit DWARF extension");
31625 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31626 "Length of Location Lists");
31627 ASM_OUTPUT_LABEL (asm_out_file, l1);
31628 output_dwarf_version ();
31629 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31630 dw2_asm_output_data (1, 0, "Segment Size");
31631 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31632 "Offset Entry Count");
31633 }
31634 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31635 if (dwarf_version >= 5 && dwarf_split_debug_info)
31636 {
31637 unsigned int save_loc_list_idx = loc_list_idx;
31638 loc_list_idx = 0;
31639 output_loclists_offsets (comp_unit_die ());
31640 gcc_assert (save_loc_list_idx == loc_list_idx);
31641 }
31642 output_location_lists (comp_unit_die ());
31643 if (dwarf_version >= 5)
31644 ASM_OUTPUT_LABEL (asm_out_file, l2);
31645 }
31646
31647 output_pubtables ();
31648
31649 /* Output the address range information if a CU (.debug_info section)
31650 was emitted. We output an empty table even if we had no functions
31651 to put in it. This because the consumer has no way to tell the
31652 difference between an empty table that we omitted and failure to
31653 generate a table that would have contained data. */
31654 if (info_section_emitted)
31655 {
31656 switch_to_section (debug_aranges_section);
31657 output_aranges ();
31658 }
31659
31660 /* Output ranges section if necessary. */
31661 if (!vec_safe_is_empty (ranges_table))
31662 {
31663 if (dwarf_version >= 5)
31664 output_rnglists (generation);
31665 else
31666 output_ranges ();
31667 }
31668
31669 /* Have to end the macro section. */
31670 if (have_macinfo)
31671 {
31672 switch_to_section (debug_macinfo_section);
31673 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31674 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31675 : debug_skeleton_line_section_label, false);
31676 dw2_asm_output_data (1, 0, "End compilation unit");
31677 }
31678
31679 /* Output the source line correspondence table. We must do this
31680 even if there is no line information. Otherwise, on an empty
31681 translation unit, we will generate a present, but empty,
31682 .debug_info section. IRIX 6.5 `nm' will then complain when
31683 examining the file. This is done late so that any filenames
31684 used by the debug_info section are marked as 'used'. */
31685 switch_to_section (debug_line_section);
31686 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31687 if (! output_asm_line_debug_info ())
31688 output_line_info (false);
31689
31690 if (dwarf_split_debug_info && info_section_emitted)
31691 {
31692 switch_to_section (debug_skeleton_line_section);
31693 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31694 output_line_info (true);
31695 }
31696
31697 /* If we emitted any indirect strings, output the string table too. */
31698 if (debug_str_hash || skeleton_debug_str_hash)
31699 output_indirect_strings ();
31700 if (debug_line_str_hash)
31701 {
31702 switch_to_section (debug_line_str_section);
31703 const enum dwarf_form form = DW_FORM_line_strp;
31704 debug_line_str_hash->traverse<enum dwarf_form,
31705 output_indirect_string> (form);
31706 }
31707
31708 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31709 symview_upper_bound = 0;
31710 if (zero_view_p)
31711 bitmap_clear (zero_view_p);
31712 }
31713
31714 /* Returns a hash value for X (which really is a variable_value_struct). */
31715
31716 inline hashval_t
31717 variable_value_hasher::hash (variable_value_struct *x)
31718 {
31719 return (hashval_t) x->decl_id;
31720 }
31721
31722 /* Return nonzero if decl_id of variable_value_struct X is the same as
31723 UID of decl Y. */
31724
31725 inline bool
31726 variable_value_hasher::equal (variable_value_struct *x, tree y)
31727 {
31728 return x->decl_id == DECL_UID (y);
31729 }
31730
31731 /* Helper function for resolve_variable_value, handle
31732 DW_OP_GNU_variable_value in one location expression.
31733 Return true if exprloc has been changed into loclist. */
31734
31735 static bool
31736 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31737 {
31738 dw_loc_descr_ref next;
31739 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31740 {
31741 next = loc->dw_loc_next;
31742 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31743 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31744 continue;
31745
31746 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31747 if (DECL_CONTEXT (decl) != current_function_decl)
31748 continue;
31749
31750 dw_die_ref ref = lookup_decl_die (decl);
31751 if (ref)
31752 {
31753 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31754 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31755 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31756 continue;
31757 }
31758 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31759 if (l == NULL)
31760 continue;
31761 if (l->dw_loc_next)
31762 {
31763 if (AT_class (a) != dw_val_class_loc)
31764 continue;
31765 switch (a->dw_attr)
31766 {
31767 /* Following attributes allow both exprloc and loclist
31768 classes, so we can change them into a loclist. */
31769 case DW_AT_location:
31770 case DW_AT_string_length:
31771 case DW_AT_return_addr:
31772 case DW_AT_data_member_location:
31773 case DW_AT_frame_base:
31774 case DW_AT_segment:
31775 case DW_AT_static_link:
31776 case DW_AT_use_location:
31777 case DW_AT_vtable_elem_location:
31778 if (prev)
31779 {
31780 prev->dw_loc_next = NULL;
31781 prepend_loc_descr_to_each (l, AT_loc (a));
31782 }
31783 if (next)
31784 add_loc_descr_to_each (l, next);
31785 a->dw_attr_val.val_class = dw_val_class_loc_list;
31786 a->dw_attr_val.val_entry = NULL;
31787 a->dw_attr_val.v.val_loc_list = l;
31788 have_location_lists = true;
31789 return true;
31790 /* Following attributes allow both exprloc and reference,
31791 so if the whole expression is DW_OP_GNU_variable_value alone
31792 we could transform it into reference. */
31793 case DW_AT_byte_size:
31794 case DW_AT_bit_size:
31795 case DW_AT_lower_bound:
31796 case DW_AT_upper_bound:
31797 case DW_AT_bit_stride:
31798 case DW_AT_count:
31799 case DW_AT_allocated:
31800 case DW_AT_associated:
31801 case DW_AT_byte_stride:
31802 if (prev == NULL && next == NULL)
31803 break;
31804 /* FALLTHRU */
31805 default:
31806 if (dwarf_strict)
31807 continue;
31808 break;
31809 }
31810 /* Create DW_TAG_variable that we can refer to. */
31811 gen_decl_die (decl, NULL_TREE, NULL,
31812 lookup_decl_die (current_function_decl));
31813 ref = lookup_decl_die (decl);
31814 if (ref)
31815 {
31816 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31817 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31818 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31819 }
31820 continue;
31821 }
31822 if (prev)
31823 {
31824 prev->dw_loc_next = l->expr;
31825 add_loc_descr (&prev->dw_loc_next, next);
31826 free_loc_descr (loc, NULL);
31827 next = prev->dw_loc_next;
31828 }
31829 else
31830 {
31831 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31832 add_loc_descr (&loc, next);
31833 next = loc;
31834 }
31835 loc = prev;
31836 }
31837 return false;
31838 }
31839
31840 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31841
31842 static void
31843 resolve_variable_value (dw_die_ref die)
31844 {
31845 dw_attr_node *a;
31846 dw_loc_list_ref loc;
31847 unsigned ix;
31848
31849 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31850 switch (AT_class (a))
31851 {
31852 case dw_val_class_loc:
31853 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31854 break;
31855 /* FALLTHRU */
31856 case dw_val_class_loc_list:
31857 loc = AT_loc_list (a);
31858 gcc_assert (loc);
31859 for (; loc; loc = loc->dw_loc_next)
31860 resolve_variable_value_in_expr (a, loc->expr);
31861 break;
31862 default:
31863 break;
31864 }
31865 }
31866
31867 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31868 temporaries in the current function. */
31869
31870 static void
31871 resolve_variable_values (void)
31872 {
31873 if (!variable_value_hash || !current_function_decl)
31874 return;
31875
31876 struct variable_value_struct *node
31877 = variable_value_hash->find_with_hash (current_function_decl,
31878 DECL_UID (current_function_decl));
31879
31880 if (node == NULL)
31881 return;
31882
31883 unsigned int i;
31884 dw_die_ref die;
31885 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31886 resolve_variable_value (die);
31887 }
31888
31889 /* Helper function for note_variable_value, handle one location
31890 expression. */
31891
31892 static void
31893 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31894 {
31895 for (; loc; loc = loc->dw_loc_next)
31896 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31897 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31898 {
31899 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31900 dw_die_ref ref = lookup_decl_die (decl);
31901 if (! ref && (flag_generate_lto || flag_generate_offload))
31902 {
31903 /* ??? This is somewhat a hack because we do not create DIEs
31904 for variables not in BLOCK trees early but when generating
31905 early LTO output we need the dw_val_class_decl_ref to be
31906 fully resolved. For fat LTO objects we'd also like to
31907 undo this after LTO dwarf output. */
31908 gcc_assert (DECL_CONTEXT (decl));
31909 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31910 gcc_assert (ctx != NULL);
31911 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31912 ref = lookup_decl_die (decl);
31913 gcc_assert (ref != NULL);
31914 }
31915 if (ref)
31916 {
31917 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31918 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31919 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31920 continue;
31921 }
31922 if (VAR_P (decl)
31923 && DECL_CONTEXT (decl)
31924 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31925 && lookup_decl_die (DECL_CONTEXT (decl)))
31926 {
31927 if (!variable_value_hash)
31928 variable_value_hash
31929 = hash_table<variable_value_hasher>::create_ggc (10);
31930
31931 tree fndecl = DECL_CONTEXT (decl);
31932 struct variable_value_struct *node;
31933 struct variable_value_struct **slot
31934 = variable_value_hash->find_slot_with_hash (fndecl,
31935 DECL_UID (fndecl),
31936 INSERT);
31937 if (*slot == NULL)
31938 {
31939 node = ggc_cleared_alloc<variable_value_struct> ();
31940 node->decl_id = DECL_UID (fndecl);
31941 *slot = node;
31942 }
31943 else
31944 node = *slot;
31945
31946 vec_safe_push (node->dies, die);
31947 }
31948 }
31949 }
31950
31951 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31952 with dw_val_class_decl_ref operand. */
31953
31954 static void
31955 note_variable_value (dw_die_ref die)
31956 {
31957 dw_die_ref c;
31958 dw_attr_node *a;
31959 dw_loc_list_ref loc;
31960 unsigned ix;
31961
31962 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31963 switch (AT_class (a))
31964 {
31965 case dw_val_class_loc_list:
31966 loc = AT_loc_list (a);
31967 gcc_assert (loc);
31968 if (!loc->noted_variable_value)
31969 {
31970 loc->noted_variable_value = 1;
31971 for (; loc; loc = loc->dw_loc_next)
31972 note_variable_value_in_expr (die, loc->expr);
31973 }
31974 break;
31975 case dw_val_class_loc:
31976 note_variable_value_in_expr (die, AT_loc (a));
31977 break;
31978 default:
31979 break;
31980 }
31981
31982 /* Mark children. */
31983 FOR_EACH_CHILD (die, c, note_variable_value (c));
31984 }
31985
31986 /* Perform any cleanups needed after the early debug generation pass
31987 has run. */
31988
31989 static void
31990 dwarf2out_early_finish (const char *filename)
31991 {
31992 set_early_dwarf s;
31993 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31994
31995 /* PCH might result in DW_AT_producer string being restored from the
31996 header compilation, so always fill it with empty string initially
31997 and overwrite only here. */
31998 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31999 producer_string = gen_producer_string ();
32000 producer->dw_attr_val.v.val_str->refcount--;
32001 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
32002
32003 /* Add the name for the main input file now. We delayed this from
32004 dwarf2out_init to avoid complications with PCH. */
32005 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
32006 add_comp_dir_attribute (comp_unit_die ());
32007
32008 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
32009 DW_AT_comp_dir into .debug_line_str section. */
32010 if (!output_asm_line_debug_info ()
32011 && dwarf_version >= 5
32012 && DWARF5_USE_DEBUG_LINE_STR)
32013 {
32014 for (int i = 0; i < 2; i++)
32015 {
32016 dw_attr_node *a = get_AT (comp_unit_die (),
32017 i ? DW_AT_comp_dir : DW_AT_name);
32018 if (a == NULL
32019 || AT_class (a) != dw_val_class_str
32020 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
32021 continue;
32022
32023 if (! debug_line_str_hash)
32024 debug_line_str_hash
32025 = hash_table<indirect_string_hasher>::create_ggc (10);
32026
32027 struct indirect_string_node *node
32028 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
32029 set_indirect_string (node);
32030 node->form = DW_FORM_line_strp;
32031 a->dw_attr_val.v.val_str->refcount--;
32032 a->dw_attr_val.v.val_str = node;
32033 }
32034 }
32035
32036 /* With LTO early dwarf was really finished at compile-time, so make
32037 sure to adjust the phase after annotating the LTRANS CU DIE. */
32038 if (in_lto_p)
32039 {
32040 /* Force DW_TAG_imported_unit to be created now, otherwise
32041 we might end up without it or ordered after DW_TAG_inlined_subroutine
32042 referencing DIEs from it. */
32043 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
32044 {
32045 unsigned i;
32046 tree tu;
32047 if (external_die_map)
32048 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, tu)
32049 if (sym_off_pair *desc = external_die_map->get (tu))
32050 {
32051 dw_die_ref import = new_die (DW_TAG_imported_unit,
32052 comp_unit_die (), NULL_TREE);
32053 add_AT_external_die_ref (import, DW_AT_import,
32054 desc->sym, desc->off);
32055 }
32056 }
32057
32058 early_dwarf_finished = true;
32059 if (dump_file)
32060 {
32061 fprintf (dump_file, "LTO EARLY DWARF for %s\n", filename);
32062 print_die (comp_unit_die (), dump_file);
32063 }
32064 return;
32065 }
32066
32067 /* Walk through the list of incomplete types again, trying once more to
32068 emit full debugging info for them. */
32069 retry_incomplete_types ();
32070
32071 /* The point here is to flush out the limbo list so that it is empty
32072 and we don't need to stream it for LTO. */
32073 flush_limbo_die_list ();
32074
32075 gen_scheduled_generic_parms_dies ();
32076 gen_remaining_tmpl_value_param_die_attribute ();
32077
32078 /* Add DW_AT_linkage_name for all deferred DIEs. */
32079 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
32080 {
32081 tree decl = node->created_for;
32082 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
32083 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
32084 ended up in deferred_asm_name before we knew it was
32085 constant and never written to disk. */
32086 && DECL_ASSEMBLER_NAME (decl))
32087 {
32088 add_linkage_attr (node->die, decl);
32089 move_linkage_attr (node->die);
32090 }
32091 }
32092 deferred_asm_name = NULL;
32093
32094 if (flag_eliminate_unused_debug_types)
32095 prune_unused_types ();
32096
32097 /* Generate separate COMDAT sections for type DIEs. */
32098 if (use_debug_types)
32099 {
32100 break_out_comdat_types (comp_unit_die ());
32101
32102 /* Each new type_unit DIE was added to the limbo die list when created.
32103 Since these have all been added to comdat_type_list, clear the
32104 limbo die list. */
32105 limbo_die_list = NULL;
32106
32107 /* For each new comdat type unit, copy declarations for incomplete
32108 types to make the new unit self-contained (i.e., no direct
32109 references to the main compile unit). */
32110 for (comdat_type_node *ctnode = comdat_type_list;
32111 ctnode != NULL; ctnode = ctnode->next)
32112 copy_decls_for_unworthy_types (ctnode->root_die);
32113 copy_decls_for_unworthy_types (comp_unit_die ());
32114
32115 /* In the process of copying declarations from one unit to another,
32116 we may have left some declarations behind that are no longer
32117 referenced. Prune them. */
32118 prune_unused_types ();
32119 }
32120
32121 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
32122 with dw_val_class_decl_ref operand. */
32123 note_variable_value (comp_unit_die ());
32124 for (limbo_die_node *node = cu_die_list; node; node = node->next)
32125 note_variable_value (node->die);
32126 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
32127 ctnode = ctnode->next)
32128 note_variable_value (ctnode->root_die);
32129 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32130 note_variable_value (node->die);
32131
32132 /* The AT_pubnames attribute needs to go in all skeleton dies, including
32133 both the main_cu and all skeleton TUs. Making this call unconditional
32134 would end up either adding a second copy of the AT_pubnames attribute, or
32135 requiring a special case in add_top_level_skeleton_die_attrs. */
32136 if (!dwarf_split_debug_info)
32137 add_AT_pubnames (comp_unit_die ());
32138
32139 /* The early debug phase is now finished. */
32140 early_dwarf_finished = true;
32141 if (dump_file)
32142 {
32143 fprintf (dump_file, "EARLY DWARF for %s\n", filename);
32144 print_die (comp_unit_die (), dump_file);
32145 }
32146
32147 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
32148 if ((!flag_generate_lto && !flag_generate_offload)
32149 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
32150 copy_lto_debug_sections operation of the simple object support in
32151 libiberty is not implemented for them yet. */
32152 || TARGET_PECOFF || TARGET_COFF)
32153 return;
32154
32155 /* Now as we are going to output for LTO initialize sections and labels
32156 to the LTO variants. We don't need a random-seed postfix as other
32157 LTO sections as linking the LTO debug sections into one in a partial
32158 link is fine. */
32159 init_sections_and_labels (true);
32160
32161 /* The output below is modeled after dwarf2out_finish with all
32162 location related output removed and some LTO specific changes.
32163 Some refactoring might make both smaller and easier to match up. */
32164
32165 /* Traverse the DIE's and add sibling attributes to those DIE's
32166 that have children. */
32167 add_sibling_attributes (comp_unit_die ());
32168 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32169 add_sibling_attributes (node->die);
32170 for (comdat_type_node *ctnode = comdat_type_list;
32171 ctnode != NULL; ctnode = ctnode->next)
32172 add_sibling_attributes (ctnode->root_die);
32173
32174 /* AIX Assembler inserts the length, so adjust the reference to match the
32175 offset expected by debuggers. */
32176 strcpy (dl_section_ref, debug_line_section_label);
32177 if (XCOFF_DEBUGGING_INFO)
32178 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
32179
32180 if (debug_info_level >= DINFO_LEVEL_TERSE)
32181 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
32182
32183 if (have_macinfo)
32184 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
32185 macinfo_section_label);
32186
32187 save_macinfo_strings ();
32188
32189 if (dwarf_split_debug_info)
32190 {
32191 unsigned int index = 0;
32192 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32193 }
32194
32195 /* Output all of the compilation units. We put the main one last so that
32196 the offsets are available to output_pubnames. */
32197 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32198 output_comp_unit (node->die, 0, NULL);
32199
32200 hash_table<comdat_type_hasher> comdat_type_table (100);
32201 for (comdat_type_node *ctnode = comdat_type_list;
32202 ctnode != NULL; ctnode = ctnode->next)
32203 {
32204 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32205
32206 /* Don't output duplicate types. */
32207 if (*slot != HTAB_EMPTY_ENTRY)
32208 continue;
32209
32210 /* Add a pointer to the line table for the main compilation unit
32211 so that the debugger can make sense of DW_AT_decl_file
32212 attributes. */
32213 if (debug_info_level >= DINFO_LEVEL_TERSE)
32214 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32215 (!dwarf_split_debug_info
32216 ? debug_line_section_label
32217 : debug_skeleton_line_section_label));
32218
32219 output_comdat_type_unit (ctnode, true);
32220 *slot = ctnode;
32221 }
32222
32223 /* Stick a unique symbol to the main debuginfo section. */
32224 compute_comp_unit_symbol (comp_unit_die ());
32225
32226 /* Output the main compilation unit. We always need it if only for
32227 the CU symbol. */
32228 output_comp_unit (comp_unit_die (), true, NULL);
32229
32230 /* Output the abbreviation table. */
32231 if (vec_safe_length (abbrev_die_table) != 1)
32232 {
32233 switch_to_section (debug_abbrev_section);
32234 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32235 output_abbrev_section ();
32236 }
32237
32238 /* Have to end the macro section. */
32239 if (have_macinfo)
32240 {
32241 /* We have to save macinfo state if we need to output it again
32242 for the FAT part of the object. */
32243 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32244 if (flag_fat_lto_objects)
32245 macinfo_table = macinfo_table->copy ();
32246
32247 switch_to_section (debug_macinfo_section);
32248 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32249 output_macinfo (debug_line_section_label, true);
32250 dw2_asm_output_data (1, 0, "End compilation unit");
32251
32252 if (flag_fat_lto_objects)
32253 {
32254 vec_free (macinfo_table);
32255 macinfo_table = saved_macinfo_table;
32256 }
32257 }
32258
32259 /* Emit a skeleton debug_line section. */
32260 switch_to_section (debug_line_section);
32261 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32262 output_line_info (true);
32263
32264 /* If we emitted any indirect strings, output the string table too. */
32265 if (debug_str_hash || skeleton_debug_str_hash)
32266 output_indirect_strings ();
32267 if (debug_line_str_hash)
32268 {
32269 switch_to_section (debug_line_str_section);
32270 const enum dwarf_form form = DW_FORM_line_strp;
32271 debug_line_str_hash->traverse<enum dwarf_form,
32272 output_indirect_string> (form);
32273 }
32274
32275 /* Switch back to the text section. */
32276 switch_to_section (text_section);
32277 }
32278
32279 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32280 within the same process. For use by toplev::finalize. */
32281
32282 void
32283 dwarf2out_c_finalize (void)
32284 {
32285 last_var_location_insn = NULL;
32286 cached_next_real_insn = NULL;
32287 used_rtx_array = NULL;
32288 incomplete_types = NULL;
32289 debug_info_section = NULL;
32290 debug_skeleton_info_section = NULL;
32291 debug_abbrev_section = NULL;
32292 debug_skeleton_abbrev_section = NULL;
32293 debug_aranges_section = NULL;
32294 debug_addr_section = NULL;
32295 debug_macinfo_section = NULL;
32296 debug_line_section = NULL;
32297 debug_skeleton_line_section = NULL;
32298 debug_loc_section = NULL;
32299 debug_pubnames_section = NULL;
32300 debug_pubtypes_section = NULL;
32301 debug_str_section = NULL;
32302 debug_line_str_section = NULL;
32303 debug_str_dwo_section = NULL;
32304 debug_str_offsets_section = NULL;
32305 debug_ranges_section = NULL;
32306 debug_frame_section = NULL;
32307 fde_vec = NULL;
32308 debug_str_hash = NULL;
32309 debug_line_str_hash = NULL;
32310 skeleton_debug_str_hash = NULL;
32311 dw2_string_counter = 0;
32312 have_multiple_function_sections = false;
32313 text_section_used = false;
32314 cold_text_section_used = false;
32315 cold_text_section = NULL;
32316 current_unit_personality = NULL;
32317
32318 early_dwarf = false;
32319 early_dwarf_finished = false;
32320
32321 next_die_offset = 0;
32322 single_comp_unit_die = NULL;
32323 comdat_type_list = NULL;
32324 limbo_die_list = NULL;
32325 file_table = NULL;
32326 decl_die_table = NULL;
32327 common_block_die_table = NULL;
32328 decl_loc_table = NULL;
32329 call_arg_locations = NULL;
32330 call_arg_loc_last = NULL;
32331 call_site_count = -1;
32332 tail_call_site_count = -1;
32333 cached_dw_loc_list_table = NULL;
32334 abbrev_die_table = NULL;
32335 delete dwarf_proc_stack_usage_map;
32336 dwarf_proc_stack_usage_map = NULL;
32337 line_info_label_num = 0;
32338 cur_line_info_table = NULL;
32339 text_section_line_info = NULL;
32340 cold_text_section_line_info = NULL;
32341 separate_line_info = NULL;
32342 info_section_emitted = false;
32343 pubname_table = NULL;
32344 pubtype_table = NULL;
32345 macinfo_table = NULL;
32346 ranges_table = NULL;
32347 ranges_by_label = NULL;
32348 rnglist_idx = 0;
32349 have_location_lists = false;
32350 loclabel_num = 0;
32351 poc_label_num = 0;
32352 last_emitted_file = NULL;
32353 label_num = 0;
32354 tmpl_value_parm_die_table = NULL;
32355 generic_type_instances = NULL;
32356 frame_pointer_fb_offset = 0;
32357 frame_pointer_fb_offset_valid = false;
32358 base_types.release ();
32359 XDELETEVEC (producer_string);
32360 producer_string = NULL;
32361 }
32362
32363 #include "gt-dwarf2out.h"